Dorothydu commited on
Commit
bb40ab2
·
verified ·
1 Parent(s): eb6215d

Add files using upload-large-folder tool

Browse files
1001-sala-de-aula-master/env/lib64/python3.5/site-packages/rest_framework/schemas/inspectors.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ inspectors.py # Per-endpoint view introspection
3
+
4
+ See schemas.__init__.py for package overview.
5
+ """
6
+ from weakref import WeakKeyDictionary
7
+
8
+ from rest_framework.settings import api_settings
9
+
10
+
11
+ class ViewInspector:
12
+ """
13
+ Descriptor class on APIView.
14
+
15
+ Provide subclass for per-view schema generation
16
+ """
17
+
18
+ def __init__(self):
19
+ self.instance_schemas = WeakKeyDictionary()
20
+
21
+ def __get__(self, instance, owner):
22
+ """
23
+ Enables `ViewInspector` as a Python _Descriptor_.
24
+
25
+ This is how `view.schema` knows about `view`.
26
+
27
+ `__get__` is called when the descriptor is accessed on the owner.
28
+ (That will be when view.schema is called in our case.)
29
+
30
+ `owner` is always the owner class. (An APIView, or subclass for us.)
31
+ `instance` is the view instance or `None` if accessed from the class,
32
+ rather than an instance.
33
+
34
+ See: https://docs.python.org/3/howto/descriptor.html for info on
35
+ descriptor usage.
36
+ """
37
+ if instance in self.instance_schemas:
38
+ return self.instance_schemas[instance]
39
+
40
+ self.view = instance
41
+ return self
42
+
43
+ def __set__(self, instance, other):
44
+ self.instance_schemas[instance] = other
45
+ if other is not None:
46
+ other.view = instance
47
+
48
+ @property
49
+ def view(self):
50
+ """View property."""
51
+ assert self._view is not None, (
52
+ "Schema generation REQUIRES a view instance. (Hint: you accessed "
53
+ "`schema` from the view class rather than an instance.)"
54
+ )
55
+ return self._view
56
+
57
+ @view.setter
58
+ def view(self, value):
59
+ self._view = value
60
+
61
+ @view.deleter
62
+ def view(self):
63
+ self._view = None
64
+
65
+
66
+ class DefaultSchema(ViewInspector):
67
+ """Allows overriding AutoSchema using DEFAULT_SCHEMA_CLASS setting"""
68
+ def __get__(self, instance, owner):
69
+ result = super().__get__(instance, owner)
70
+ if not isinstance(result, DefaultSchema):
71
+ return result
72
+
73
+ inspector_class = api_settings.DEFAULT_SCHEMA_CLASS
74
+ assert issubclass(inspector_class, ViewInspector), (
75
+ "DEFAULT_SCHEMA_CLASS must be set to a ViewInspector (usually an AutoSchema) subclass"
76
+ )
77
+ inspector = inspector_class()
78
+ inspector.view = instance
79
+ return inspector
1001-sala-de-aula-master/env/lib64/python3.5/site-packages/setuptools/command/easy_install.py ADDED
@@ -0,0 +1,2315 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ """
4
+ Easy Install
5
+ ------------
6
+
7
+ A tool for doing automatic download/extract/build of distutils-based Python
8
+ packages. For detailed documentation, see the accompanying EasyInstall.txt
9
+ file, or visit the `EasyInstall home page`__.
10
+
11
+ __ https://pythonhosted.org/setuptools/easy_install.html
12
+
13
+ """
14
+
15
+ from glob import glob
16
+ from distutils.util import get_platform
17
+ from distutils.util import convert_path, subst_vars
18
+ from distutils.errors import DistutilsArgError, DistutilsOptionError, \
19
+ DistutilsError, DistutilsPlatformError
20
+ from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
21
+ from distutils import log, dir_util
22
+ from distutils.command.build_scripts import first_line_re
23
+ from distutils.spawn import find_executable
24
+ import sys
25
+ import os
26
+ import zipimport
27
+ import shutil
28
+ import tempfile
29
+ import zipfile
30
+ import re
31
+ import stat
32
+ import random
33
+ import platform
34
+ import textwrap
35
+ import warnings
36
+ import site
37
+ import struct
38
+ import contextlib
39
+ import subprocess
40
+ import shlex
41
+ import io
42
+
43
+ from setuptools.extern import six
44
+ from setuptools.extern.six.moves import configparser, map
45
+
46
+ from setuptools import Command
47
+ from setuptools.sandbox import run_setup
48
+ from setuptools.py31compat import get_path, get_config_vars
49
+ from setuptools.command import setopt
50
+ from setuptools.archive_util import unpack_archive
51
+ from setuptools.package_index import PackageIndex
52
+ from setuptools.package_index import URL_SCHEME
53
+ from setuptools.command import bdist_egg, egg_info
54
+ from pkg_resources import (
55
+ yield_lines, normalize_path, resource_string, ensure_directory,
56
+ get_distribution, find_distributions, Environment, Requirement,
57
+ Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
58
+ VersionConflict, DEVELOP_DIST,
59
+ )
60
+ import pkg_resources
61
+
62
+ # Turn on PEP440Warnings
63
+ warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
64
+
65
+
66
+ __all__ = [
67
+ 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
68
+ 'main', 'get_exe_prefixes',
69
+ ]
70
+
71
+
72
+ def is_64bit():
73
+ return struct.calcsize("P") == 8
74
+
75
+
76
+ def samefile(p1, p2):
77
+ both_exist = os.path.exists(p1) and os.path.exists(p2)
78
+ use_samefile = hasattr(os.path, 'samefile') and both_exist
79
+ if use_samefile:
80
+ return os.path.samefile(p1, p2)
81
+ norm_p1 = os.path.normpath(os.path.normcase(p1))
82
+ norm_p2 = os.path.normpath(os.path.normcase(p2))
83
+ return norm_p1 == norm_p2
84
+
85
+
86
+ if six.PY2:
87
+ def _to_ascii(s):
88
+ return s
89
+
90
+ def isascii(s):
91
+ try:
92
+ six.text_type(s, 'ascii')
93
+ return True
94
+ except UnicodeError:
95
+ return False
96
+ else:
97
+ def _to_ascii(s):
98
+ return s.encode('ascii')
99
+
100
+ def isascii(s):
101
+ try:
102
+ s.encode('ascii')
103
+ return True
104
+ except UnicodeError:
105
+ return False
106
+
107
+
108
+ class easy_install(Command):
109
+ """Manage a download/build/install process"""
110
+ description = "Find/get/install Python packages"
111
+ command_consumes_arguments = True
112
+
113
+ user_options = [
114
+ ('prefix=', None, "installation prefix"),
115
+ ("zip-ok", "z", "install package as a zipfile"),
116
+ ("multi-version", "m", "make apps have to require() a version"),
117
+ ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
118
+ ("install-dir=", "d", "install package to DIR"),
119
+ ("script-dir=", "s", "install scripts to DIR"),
120
+ ("exclude-scripts", "x", "Don't install scripts"),
121
+ ("always-copy", "a", "Copy all needed packages to install dir"),
122
+ ("index-url=", "i", "base URL of Python Package Index"),
123
+ ("find-links=", "f", "additional URL(s) to search for packages"),
124
+ ("build-directory=", "b",
125
+ "download/extract/build in DIR; keep the results"),
126
+ ('optimize=', 'O',
127
+ "also compile with optimization: -O1 for \"python -O\", "
128
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
129
+ ('record=', None,
130
+ "filename in which to record list of installed files"),
131
+ ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
132
+ ('site-dirs=', 'S', "list of directories where .pth files work"),
133
+ ('editable', 'e', "Install specified packages in editable form"),
134
+ ('no-deps', 'N', "don't install dependencies"),
135
+ ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
136
+ ('local-snapshots-ok', 'l',
137
+ "allow building eggs from local checkouts"),
138
+ ('version', None, "print version information and exit"),
139
+ ('install-layout=', None, "installation layout to choose (known values: deb)"),
140
+ ('force-installation-into-system-dir', '0', "force installation into /usr"),
141
+ ('no-find-links', None,
142
+ "Don't load find-links defined in packages being installed")
143
+ ]
144
+ boolean_options = [
145
+ 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
146
+ 'editable',
147
+ 'no-deps', 'local-snapshots-ok', 'version', 'force-installation-into-system-dir'
148
+ ]
149
+
150
+ if site.ENABLE_USER_SITE:
151
+ help_msg = "install in user site-package '%s'" % site.USER_SITE
152
+ user_options.append(('user', None, help_msg))
153
+ boolean_options.append('user')
154
+
155
+ negative_opt = {'always-unzip': 'zip-ok'}
156
+ create_index = PackageIndex
157
+
158
+ def initialize_options(self):
159
+ # the --user option seems to be an opt-in one,
160
+ # so the default should be False.
161
+ self.user = 0
162
+ self.zip_ok = self.local_snapshots_ok = None
163
+ self.install_dir = self.script_dir = self.exclude_scripts = None
164
+ self.index_url = None
165
+ self.find_links = None
166
+ self.build_directory = None
167
+ self.args = None
168
+ self.optimize = self.record = None
169
+ self.upgrade = self.always_copy = self.multi_version = None
170
+ self.editable = self.no_deps = self.allow_hosts = None
171
+ self.root = self.prefix = self.no_report = None
172
+ self.version = None
173
+ self.install_purelib = None # for pure module distributions
174
+ self.install_platlib = None # non-pure (dists w/ extensions)
175
+ self.install_headers = None # for C/C++ headers
176
+ self.install_lib = None # set to either purelib or platlib
177
+ self.install_scripts = None
178
+ self.install_data = None
179
+ self.install_base = None
180
+ self.install_platbase = None
181
+ if site.ENABLE_USER_SITE:
182
+ self.install_userbase = site.USER_BASE
183
+ self.install_usersite = site.USER_SITE
184
+ else:
185
+ self.install_userbase = None
186
+ self.install_usersite = None
187
+ self.no_find_links = None
188
+
189
+ # Options not specifiable via command line
190
+ self.package_index = None
191
+ self.pth_file = self.always_copy_from = None
192
+ self.site_dirs = None
193
+ self.installed_projects = {}
194
+ self.sitepy_installed = False
195
+ # enable custom installation, known values: deb
196
+ self.install_layout = None
197
+ self.force_installation_into_system_dir = None
198
+ self.multiarch = None
199
+
200
+ # Always read easy_install options, even if we are subclassed, or have
201
+ # an independent instance created. This ensures that defaults will
202
+ # always come from the standard configuration file(s)' "easy_install"
203
+ # section, even if this is a "develop" or "install" command, or some
204
+ # other embedding.
205
+ self._dry_run = None
206
+ self.verbose = self.distribution.verbose
207
+ self.distribution._set_command_options(
208
+ self, self.distribution.get_option_dict('easy_install')
209
+ )
210
+
211
+ def delete_blockers(self, blockers):
212
+ extant_blockers = (
213
+ filename for filename in blockers
214
+ if os.path.exists(filename) or os.path.islink(filename)
215
+ )
216
+ list(map(self._delete_path, extant_blockers))
217
+
218
+ def _delete_path(self, path):
219
+ log.info("Deleting %s", path)
220
+ if self.dry_run:
221
+ return
222
+
223
+ is_tree = os.path.isdir(path) and not os.path.islink(path)
224
+ remover = rmtree if is_tree else os.unlink
225
+ remover(path)
226
+
227
+ @staticmethod
228
+ def _render_version():
229
+ """
230
+ Render the Setuptools version and installation details, then exit.
231
+ """
232
+ ver = sys.version[:3]
233
+ dist = get_distribution('setuptools')
234
+ tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
235
+ print(tmpl.format(**locals()))
236
+ raise SystemExit()
237
+
238
+ def finalize_options(self):
239
+ self.version and self._render_version()
240
+
241
+ py_version = sys.version.split()[0]
242
+ prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
243
+
244
+ self.config_vars = {
245
+ 'dist_name': self.distribution.get_name(),
246
+ 'dist_version': self.distribution.get_version(),
247
+ 'dist_fullname': self.distribution.get_fullname(),
248
+ 'py_version': py_version,
249
+ 'py_version_short': py_version[0:3],
250
+ 'py_version_nodot': py_version[0] + py_version[2],
251
+ 'sys_prefix': prefix,
252
+ 'prefix': prefix,
253
+ 'sys_exec_prefix': exec_prefix,
254
+ 'exec_prefix': exec_prefix,
255
+ # Only python 3.2+ has abiflags
256
+ 'abiflags': getattr(sys, 'abiflags', ''),
257
+ }
258
+
259
+ if site.ENABLE_USER_SITE:
260
+ self.config_vars['userbase'] = self.install_userbase
261
+ self.config_vars['usersite'] = self.install_usersite
262
+
263
+ self._fix_install_dir_for_user_site()
264
+
265
+ self.expand_basedirs()
266
+ self.expand_dirs()
267
+
268
+ if self.install_layout:
269
+ if not self.install_layout.lower() in ['deb']:
270
+ raise DistutilsOptionError("unknown value for --install-layout")
271
+ self.install_layout = self.install_layout.lower()
272
+ import sysconfig
273
+ if sys.version_info[:2] >= (3, 3):
274
+ self.multiarch = sysconfig.get_config_var('MULTIARCH')
275
+ self._expand('install_dir', 'script_dir', 'build_directory',
276
+ 'site_dirs')
277
+ # If a non-default installation directory was specified, default the
278
+ # script directory to match it.
279
+ if self.script_dir is None:
280
+ self.script_dir = self.install_dir
281
+
282
+ if self.no_find_links is None:
283
+ self.no_find_links = False
284
+
285
+ # Let install_dir get set by install_lib command, which in turn
286
+ # gets its info from the install command, and takes into account
287
+ # --prefix and --home and all that other crud.
288
+ self.set_undefined_options(
289
+ 'install_lib', ('install_dir', 'install_dir')
290
+ )
291
+ # Likewise, set default script_dir from 'install_scripts.install_dir'
292
+ self.set_undefined_options(
293
+ 'install_scripts', ('install_dir', 'script_dir')
294
+ )
295
+
296
+ if self.user and self.install_purelib:
297
+ self.install_dir = self.install_purelib
298
+ self.script_dir = self.install_scripts
299
+
300
+ if self.prefix == '/usr' and not self.force_installation_into_system_dir:
301
+ raise DistutilsOptionError("""installation into /usr
302
+
303
+ Trying to install into the system managed parts of the file system. Please
304
+ consider to install to another location, or use the option
305
+ --force-installation-into-system-dir to overwrite this warning.
306
+ """)
307
+
308
+ # default --record from the install command
309
+ self.set_undefined_options('install', ('record', 'record'))
310
+ # Should this be moved to the if statement below? It's not used
311
+ # elsewhere
312
+ normpath = map(normalize_path, sys.path)
313
+ self.all_site_dirs = get_site_dirs()
314
+ if self.site_dirs is not None:
315
+ site_dirs = [
316
+ os.path.expanduser(s.strip()) for s in
317
+ self.site_dirs.split(',')
318
+ ]
319
+ for d in site_dirs:
320
+ if not os.path.isdir(d):
321
+ log.warn("%s (in --site-dirs) does not exist", d)
322
+ elif normalize_path(d) not in normpath:
323
+ raise DistutilsOptionError(
324
+ d + " (in --site-dirs) is not on sys.path"
325
+ )
326
+ else:
327
+ self.all_site_dirs.append(normalize_path(d))
328
+ if not self.editable:
329
+ self.check_site_dir()
330
+ self.index_url = self.index_url or "https://pypi.python.org/simple"
331
+ self.shadow_path = self.all_site_dirs[:]
332
+ for path_item in self.install_dir, normalize_path(self.script_dir):
333
+ if path_item not in self.shadow_path:
334
+ self.shadow_path.insert(0, path_item)
335
+
336
+ if self.allow_hosts is not None:
337
+ hosts = [s.strip() for s in self.allow_hosts.split(',')]
338
+ else:
339
+ hosts = ['*']
340
+ if self.package_index is None:
341
+ self.package_index = self.create_index(
342
+ self.index_url, search_path=self.shadow_path, hosts=hosts,
343
+ )
344
+ self.local_index = Environment(self.shadow_path + sys.path)
345
+
346
+ if self.find_links is not None:
347
+ if isinstance(self.find_links, six.string_types):
348
+ self.find_links = self.find_links.split()
349
+ else:
350
+ self.find_links = []
351
+ if self.local_snapshots_ok:
352
+ self.package_index.scan_egg_links(self.shadow_path + sys.path)
353
+ if not self.no_find_links:
354
+ self.package_index.add_find_links(self.find_links)
355
+ self.set_undefined_options('install_lib', ('optimize', 'optimize'))
356
+ if not isinstance(self.optimize, int):
357
+ try:
358
+ self.optimize = int(self.optimize)
359
+ if not (0 <= self.optimize <= 2):
360
+ raise ValueError
361
+ except ValueError:
362
+ raise DistutilsOptionError("--optimize must be 0, 1, or 2")
363
+
364
+ if self.editable and not self.build_directory:
365
+ raise DistutilsArgError(
366
+ "Must specify a build directory (-b) when using --editable"
367
+ )
368
+ if not self.args:
369
+ raise DistutilsArgError(
370
+ "No urls, filenames, or requirements specified (see --help)")
371
+
372
+ self.outputs = []
373
+
374
+ def _fix_install_dir_for_user_site(self):
375
+ """
376
+ Fix the install_dir if "--user" was used.
377
+ """
378
+ if not self.user or not site.ENABLE_USER_SITE:
379
+ return
380
+
381
+ self.create_home_path()
382
+ if self.install_userbase is None:
383
+ msg = "User base directory is not specified"
384
+ raise DistutilsPlatformError(msg)
385
+ self.install_base = self.install_platbase = self.install_userbase
386
+ scheme_name = os.name.replace('posix', 'unix') + '_user'
387
+ self.select_scheme(scheme_name)
388
+
389
+ def _expand_attrs(self, attrs):
390
+ for attr in attrs:
391
+ val = getattr(self, attr)
392
+ if val is not None:
393
+ if os.name == 'posix' or os.name == 'nt':
394
+ val = os.path.expanduser(val)
395
+ val = subst_vars(val, self.config_vars)
396
+ setattr(self, attr, val)
397
+
398
+ def expand_basedirs(self):
399
+ """Calls `os.path.expanduser` on install_base, install_platbase and
400
+ root."""
401
+ self._expand_attrs(['install_base', 'install_platbase', 'root'])
402
+
403
+ def expand_dirs(self):
404
+ """Calls `os.path.expanduser` on install dirs."""
405
+ self._expand_attrs(['install_purelib', 'install_platlib',
406
+ 'install_lib', 'install_headers',
407
+ 'install_scripts', 'install_data', ])
408
+
409
+ def run(self):
410
+ if self.verbose != self.distribution.verbose:
411
+ log.set_verbosity(self.verbose)
412
+ try:
413
+ for spec in self.args:
414
+ self.easy_install(spec, not self.no_deps)
415
+ if self.record:
416
+ outputs = self.outputs
417
+ if self.root: # strip any package prefix
418
+ root_len = len(self.root)
419
+ for counter in range(len(outputs)):
420
+ outputs[counter] = outputs[counter][root_len:]
421
+ from distutils import file_util
422
+
423
+ self.execute(
424
+ file_util.write_file, (self.record, outputs),
425
+ "writing list of installed files to '%s'" %
426
+ self.record
427
+ )
428
+ self.warn_deprecated_options()
429
+ finally:
430
+ log.set_verbosity(self.distribution.verbose)
431
+
432
+ def pseudo_tempname(self):
433
+ """Return a pseudo-tempname base in the install directory.
434
+ This code is intentionally naive; if a malicious party can write to
435
+ the target directory you're already in deep doodoo.
436
+ """
437
+ try:
438
+ pid = os.getpid()
439
+ except:
440
+ pid = random.randint(0, sys.maxsize)
441
+ return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
442
+
443
+ def warn_deprecated_options(self):
444
+ pass
445
+
446
+ def check_site_dir(self):
447
+ """Verify that self.install_dir is .pth-capable dir, if needed"""
448
+
449
+ instdir = normalize_path(self.install_dir)
450
+ pth_file = os.path.join(instdir, 'easy-install.pth')
451
+
452
+ # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
453
+ is_site_dir = instdir in self.all_site_dirs
454
+
455
+ if not is_site_dir and not self.multi_version:
456
+ # No? Then directly test whether it does .pth file processing
457
+ is_site_dir = self.check_pth_processing()
458
+ else:
459
+ # make sure we can write to target dir
460
+ testfile = self.pseudo_tempname() + '.write-test'
461
+ test_exists = os.path.exists(testfile)
462
+ try:
463
+ if test_exists:
464
+ os.unlink(testfile)
465
+ open(testfile, 'w').close()
466
+ os.unlink(testfile)
467
+ except (OSError, IOError):
468
+ self.cant_write_to_target()
469
+
470
+ if not is_site_dir and not self.multi_version:
471
+ # Can't install non-multi to non-site dir
472
+ raise DistutilsError(self.no_default_version_msg())
473
+
474
+ if is_site_dir:
475
+ if self.pth_file is None:
476
+ self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
477
+ else:
478
+ self.pth_file = None
479
+
480
+ PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep)
481
+ if instdir not in map(normalize_path, filter(None, PYTHONPATH)):
482
+ # only PYTHONPATH dirs need a site.py, so pretend it's there
483
+ self.sitepy_installed = True
484
+ elif self.multi_version and not os.path.exists(pth_file):
485
+ self.sitepy_installed = True # don't need site.py in this case
486
+ self.pth_file = None # and don't create a .pth file
487
+ self.install_dir = instdir
488
+
489
+ __cant_write_msg = textwrap.dedent("""
490
+ can't create or remove files in install directory
491
+
492
+ The following error occurred while trying to add or remove files in the
493
+ installation directory:
494
+
495
+ %s
496
+
497
+ The installation directory you specified (via --install-dir, --prefix, or
498
+ the distutils default setting) was:
499
+
500
+ %s
501
+ """).lstrip()
502
+
503
+ __not_exists_id = textwrap.dedent("""
504
+ This directory does not currently exist. Please create it and try again, or
505
+ choose a different installation directory (using the -d or --install-dir
506
+ option).
507
+ """).lstrip()
508
+
509
+ __access_msg = textwrap.dedent("""
510
+ Perhaps your account does not have write access to this directory? If the
511
+ installation directory is a system-owned directory, you may need to sign in
512
+ as the administrator or "root" account. If you do not have administrative
513
+ access to this machine, you may wish to choose a different installation
514
+ directory, preferably one that is listed in your PYTHONPATH environment
515
+ variable.
516
+
517
+ For information on other options, you may wish to consult the
518
+ documentation at:
519
+
520
+ https://pythonhosted.org/setuptools/easy_install.html
521
+
522
+ Please make the appropriate changes for your system and try again.
523
+ """).lstrip()
524
+
525
+ def cant_write_to_target(self):
526
+ msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
527
+
528
+ if not os.path.exists(self.install_dir):
529
+ msg += '\n' + self.__not_exists_id
530
+ else:
531
+ msg += '\n' + self.__access_msg
532
+ raise DistutilsError(msg)
533
+
534
+ def check_pth_processing(self):
535
+ """Empirically verify whether .pth files are supported in inst. dir"""
536
+ instdir = self.install_dir
537
+ log.info("Checking .pth file support in %s", instdir)
538
+ pth_file = self.pseudo_tempname() + ".pth"
539
+ ok_file = pth_file + '.ok'
540
+ ok_exists = os.path.exists(ok_file)
541
+ try:
542
+ if ok_exists:
543
+ os.unlink(ok_file)
544
+ dirname = os.path.dirname(ok_file)
545
+ if not os.path.exists(dirname):
546
+ os.makedirs(dirname)
547
+ f = open(pth_file, 'w')
548
+ except (OSError, IOError):
549
+ self.cant_write_to_target()
550
+ else:
551
+ try:
552
+ f.write("import os; f = open(%r, 'w'); f.write('OK'); "
553
+ "f.close()\n" % (ok_file,))
554
+ f.close()
555
+ f = None
556
+ executable = sys.executable
557
+ if os.name == 'nt':
558
+ dirname, basename = os.path.split(executable)
559
+ alt = os.path.join(dirname, 'pythonw.exe')
560
+ if (basename.lower() == 'python.exe' and
561
+ os.path.exists(alt)):
562
+ # use pythonw.exe to avoid opening a console window
563
+ executable = alt
564
+
565
+ from distutils.spawn import spawn
566
+
567
+ spawn([executable, '-E', '-c', 'pass'], 0)
568
+
569
+ if os.path.exists(ok_file):
570
+ log.info(
571
+ "TEST PASSED: %s appears to support .pth files",
572
+ instdir
573
+ )
574
+ return True
575
+ finally:
576
+ if f:
577
+ f.close()
578
+ if os.path.exists(ok_file):
579
+ os.unlink(ok_file)
580
+ if os.path.exists(pth_file):
581
+ os.unlink(pth_file)
582
+ if not self.multi_version:
583
+ log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
584
+ return False
585
+
586
+ def install_egg_scripts(self, dist):
587
+ """Write all the scripts for `dist`, unless scripts are excluded"""
588
+ if not self.exclude_scripts and dist.metadata_isdir('scripts'):
589
+ for script_name in dist.metadata_listdir('scripts'):
590
+ if dist.metadata_isdir('scripts/' + script_name):
591
+ # The "script" is a directory, likely a Python 3
592
+ # __pycache__ directory, so skip it.
593
+ continue
594
+ self.install_script(
595
+ dist, script_name,
596
+ dist.get_metadata('scripts/' + script_name)
597
+ )
598
+ self.install_wrapper_scripts(dist)
599
+
600
+ def add_output(self, path):
601
+ if os.path.isdir(path):
602
+ for base, dirs, files in os.walk(path):
603
+ for filename in files:
604
+ self.outputs.append(os.path.join(base, filename))
605
+ else:
606
+ self.outputs.append(path)
607
+
608
+ def not_editable(self, spec):
609
+ if self.editable:
610
+ raise DistutilsArgError(
611
+ "Invalid argument %r: you can't use filenames or URLs "
612
+ "with --editable (except via the --find-links option)."
613
+ % (spec,)
614
+ )
615
+
616
+ def check_editable(self, spec):
617
+ if not self.editable:
618
+ return
619
+
620
+ if os.path.exists(os.path.join(self.build_directory, spec.key)):
621
+ raise DistutilsArgError(
622
+ "%r already exists in %s; can't do a checkout there" %
623
+ (spec.key, self.build_directory)
624
+ )
625
+
626
+ def easy_install(self, spec, deps=False):
627
+ tmpdir = tempfile.mkdtemp(prefix="easy_install-")
628
+ download = None
629
+ if not self.editable:
630
+ self.install_site_py()
631
+
632
+ try:
633
+ if not isinstance(spec, Requirement):
634
+ if URL_SCHEME(spec):
635
+ # It's a url, download it to tmpdir and process
636
+ self.not_editable(spec)
637
+ download = self.package_index.download(spec, tmpdir)
638
+ return self.install_item(None, download, tmpdir, deps,
639
+ True)
640
+
641
+ elif os.path.exists(spec):
642
+ # Existing file or directory, just process it directly
643
+ self.not_editable(spec)
644
+ return self.install_item(None, spec, tmpdir, deps, True)
645
+ else:
646
+ spec = parse_requirement_arg(spec)
647
+
648
+ self.check_editable(spec)
649
+ dist = self.package_index.fetch_distribution(
650
+ spec, tmpdir, self.upgrade, self.editable,
651
+ not self.always_copy, self.local_index
652
+ )
653
+ if dist is None:
654
+ msg = "Could not find suitable distribution for %r" % spec
655
+ if self.always_copy:
656
+ msg += " (--always-copy skips system and development eggs)"
657
+ raise DistutilsError(msg)
658
+ elif dist.precedence == DEVELOP_DIST:
659
+ # .egg-info dists don't need installing, just process deps
660
+ self.process_distribution(spec, dist, deps, "Using")
661
+ return dist
662
+ else:
663
+ return self.install_item(spec, dist.location, tmpdir, deps)
664
+
665
+ finally:
666
+ if os.path.exists(tmpdir):
667
+ rmtree(tmpdir)
668
+
669
+ def install_item(self, spec, download, tmpdir, deps, install_needed=False):
670
+
671
+ # Installation is also needed if file in tmpdir or is not an egg
672
+ install_needed = install_needed or self.always_copy
673
+ install_needed = install_needed or os.path.dirname(download) == tmpdir
674
+ install_needed = install_needed or not download.endswith('.egg')
675
+ install_needed = install_needed or (
676
+ self.always_copy_from is not None and
677
+ os.path.dirname(normalize_path(download)) ==
678
+ normalize_path(self.always_copy_from)
679
+ )
680
+
681
+ if spec and not install_needed:
682
+ # at this point, we know it's a local .egg, we just don't know if
683
+ # it's already installed.
684
+ for dist in self.local_index[spec.project_name]:
685
+ if dist.location == download:
686
+ break
687
+ else:
688
+ install_needed = True # it's not in the local index
689
+
690
+ log.info("Processing %s", os.path.basename(download))
691
+
692
+ if install_needed:
693
+ dists = self.install_eggs(spec, download, tmpdir)
694
+ for dist in dists:
695
+ self.process_distribution(spec, dist, deps)
696
+ else:
697
+ dists = [self.egg_distribution(download)]
698
+ self.process_distribution(spec, dists[0], deps, "Using")
699
+
700
+ if spec is not None:
701
+ for dist in dists:
702
+ if dist in spec:
703
+ return dist
704
+
705
+ def select_scheme(self, name):
706
+ """Sets the install directories by applying the install schemes."""
707
+ # it's the caller's problem if they supply a bad name!
708
+ scheme = INSTALL_SCHEMES[name]
709
+ for key in SCHEME_KEYS:
710
+ attrname = 'install_' + key
711
+ if getattr(self, attrname) is None:
712
+ setattr(self, attrname, scheme[key])
713
+
714
+ def process_distribution(self, requirement, dist, deps=True, *info):
715
+ self.update_pth(dist)
716
+ self.package_index.add(dist)
717
+ if dist in self.local_index[dist.key]:
718
+ self.local_index.remove(dist)
719
+ self.local_index.add(dist)
720
+ self.install_egg_scripts(dist)
721
+ self.installed_projects[dist.key] = dist
722
+ log.info(self.installation_report(requirement, dist, *info))
723
+ if (dist.has_metadata('dependency_links.txt') and
724
+ not self.no_find_links):
725
+ self.package_index.add_find_links(
726
+ dist.get_metadata_lines('dependency_links.txt')
727
+ )
728
+ if not deps and not self.always_copy:
729
+ return
730
+ elif requirement is not None and dist.key != requirement.key:
731
+ log.warn("Skipping dependencies for %s", dist)
732
+ return # XXX this is not the distribution we were looking for
733
+ elif requirement is None or dist not in requirement:
734
+ # if we wound up with a different version, resolve what we've got
735
+ distreq = dist.as_requirement()
736
+ requirement = Requirement(str(distreq))
737
+ log.info("Processing dependencies for %s", requirement)
738
+ try:
739
+ distros = WorkingSet([]).resolve(
740
+ [requirement], self.local_index, self.easy_install
741
+ )
742
+ except DistributionNotFound as e:
743
+ raise DistutilsError(str(e))
744
+ except VersionConflict as e:
745
+ raise DistutilsError(e.report())
746
+ if self.always_copy or self.always_copy_from:
747
+ # Force all the relevant distros to be copied or activated
748
+ for dist in distros:
749
+ if dist.key not in self.installed_projects:
750
+ self.easy_install(dist.as_requirement())
751
+ log.info("Finished processing dependencies for %s", requirement)
752
+
753
+ def should_unzip(self, dist):
754
+ if self.zip_ok is not None:
755
+ return not self.zip_ok
756
+ if dist.has_metadata('not-zip-safe'):
757
+ return True
758
+ if not dist.has_metadata('zip-safe'):
759
+ return True
760
+ return False
761
+
762
+ def maybe_move(self, spec, dist_filename, setup_base):
763
+ dst = os.path.join(self.build_directory, spec.key)
764
+ if os.path.exists(dst):
765
+ msg = ("%r already exists in %s; build directory %s will not be "
766
+ "kept")
767
+ log.warn(msg, spec.key, self.build_directory, setup_base)
768
+ return setup_base
769
+ if os.path.isdir(dist_filename):
770
+ setup_base = dist_filename
771
+ else:
772
+ if os.path.dirname(dist_filename) == setup_base:
773
+ os.unlink(dist_filename) # get it out of the tmp dir
774
+ contents = os.listdir(setup_base)
775
+ if len(contents) == 1:
776
+ dist_filename = os.path.join(setup_base, contents[0])
777
+ if os.path.isdir(dist_filename):
778
+ # if the only thing there is a directory, move it instead
779
+ setup_base = dist_filename
780
+ ensure_directory(dst)
781
+ shutil.move(setup_base, dst)
782
+ return dst
783
+
784
+ def install_wrapper_scripts(self, dist):
785
+ if self.exclude_scripts:
786
+ return
787
+ for args in ScriptWriter.best().get_args(dist):
788
+ self.write_script(*args)
789
+
790
+ def install_script(self, dist, script_name, script_text, dev_path=None):
791
+ """Generate a legacy script wrapper and install it"""
792
+ spec = str(dist.as_requirement())
793
+ is_script = is_python_script(script_text, script_name)
794
+
795
+ if is_script:
796
+ body = self._load_template(dev_path) % locals()
797
+ script_text = ScriptWriter.get_header(script_text) + body
798
+ self.write_script(script_name, _to_ascii(script_text), 'b')
799
+
800
+ @staticmethod
801
+ def _load_template(dev_path):
802
+ """
803
+ There are a couple of template scripts in the package. This
804
+ function loads one of them and prepares it for use.
805
+ """
806
+ # See https://github.com/pypa/setuptools/issues/134 for info
807
+ # on script file naming and downstream issues with SVR4
808
+ name = 'script.tmpl'
809
+ if dev_path:
810
+ name = name.replace('.tmpl', ' (dev).tmpl')
811
+
812
+ raw_bytes = resource_string('setuptools', name)
813
+ return raw_bytes.decode('utf-8')
814
+
815
+ def write_script(self, script_name, contents, mode="t", blockers=()):
816
+ """Write an executable file to the scripts directory"""
817
+ self.delete_blockers( # clean up old .py/.pyw w/o a script
818
+ [os.path.join(self.script_dir, x) for x in blockers]
819
+ )
820
+ log.info("Installing %s script to %s", script_name, self.script_dir)
821
+ target = os.path.join(self.script_dir, script_name)
822
+ self.add_output(target)
823
+
824
+ mask = current_umask()
825
+ if not self.dry_run:
826
+ ensure_directory(target)
827
+ if os.path.exists(target):
828
+ os.unlink(target)
829
+ with open(target, "w" + mode) as f:
830
+ f.write(contents)
831
+ chmod(target, 0o777 - mask)
832
+
833
+ def install_eggs(self, spec, dist_filename, tmpdir):
834
+ # .egg dirs or files are already built, so just return them
835
+ if dist_filename.lower().endswith('.egg'):
836
+ return [self.install_egg(dist_filename, tmpdir)]
837
+ elif dist_filename.lower().endswith('.exe'):
838
+ return [self.install_exe(dist_filename, tmpdir)]
839
+
840
+ # Anything else, try to extract and build
841
+ setup_base = tmpdir
842
+ if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
843
+ unpack_archive(dist_filename, tmpdir, self.unpack_progress)
844
+ elif os.path.isdir(dist_filename):
845
+ setup_base = os.path.abspath(dist_filename)
846
+
847
+ if (setup_base.startswith(tmpdir) # something we downloaded
848
+ and self.build_directory and spec is not None):
849
+ setup_base = self.maybe_move(spec, dist_filename, setup_base)
850
+
851
+ # Find the setup.py file
852
+ setup_script = os.path.join(setup_base, 'setup.py')
853
+
854
+ if not os.path.exists(setup_script):
855
+ setups = glob(os.path.join(setup_base, '*', 'setup.py'))
856
+ if not setups:
857
+ raise DistutilsError(
858
+ "Couldn't find a setup script in %s" %
859
+ os.path.abspath(dist_filename)
860
+ )
861
+ if len(setups) > 1:
862
+ raise DistutilsError(
863
+ "Multiple setup scripts in %s" %
864
+ os.path.abspath(dist_filename)
865
+ )
866
+ setup_script = setups[0]
867
+
868
+ # Now run it, and return the result
869
+ if self.editable:
870
+ log.info(self.report_editable(spec, setup_script))
871
+ return []
872
+ else:
873
+ return self.build_and_install(setup_script, setup_base)
874
+
875
+ def egg_distribution(self, egg_path):
876
+ if os.path.isdir(egg_path):
877
+ metadata = PathMetadata(egg_path, os.path.join(egg_path,
878
+ 'EGG-INFO'))
879
+ else:
880
+ metadata = EggMetadata(zipimport.zipimporter(egg_path))
881
+ return Distribution.from_filename(egg_path, metadata=metadata)
882
+
883
+ def install_egg(self, egg_path, tmpdir):
884
+ destination = os.path.join(self.install_dir,
885
+ os.path.basename(egg_path))
886
+ destination = os.path.abspath(destination)
887
+ if not self.dry_run:
888
+ ensure_directory(destination)
889
+
890
+ dist = self.egg_distribution(egg_path)
891
+ if not samefile(egg_path, destination):
892
+ if os.path.isdir(destination) and not os.path.islink(destination):
893
+ dir_util.remove_tree(destination, dry_run=self.dry_run)
894
+ elif os.path.exists(destination):
895
+ self.execute(os.unlink, (destination,), "Removing " +
896
+ destination)
897
+ try:
898
+ new_dist_is_zipped = False
899
+ if os.path.isdir(egg_path):
900
+ if egg_path.startswith(tmpdir):
901
+ f, m = shutil.move, "Moving"
902
+ else:
903
+ f, m = shutil.copytree, "Copying"
904
+ elif self.should_unzip(dist):
905
+ self.mkpath(destination)
906
+ f, m = self.unpack_and_compile, "Extracting"
907
+ else:
908
+ new_dist_is_zipped = True
909
+ if egg_path.startswith(tmpdir):
910
+ f, m = shutil.move, "Moving"
911
+ else:
912
+ f, m = shutil.copy2, "Copying"
913
+ self.execute(f, (egg_path, destination),
914
+ (m + " %s to %s") %
915
+ (os.path.basename(egg_path),
916
+ os.path.dirname(destination)))
917
+ update_dist_caches(destination,
918
+ fix_zipimporter_caches=new_dist_is_zipped)
919
+ except:
920
+ update_dist_caches(destination, fix_zipimporter_caches=False)
921
+ raise
922
+
923
+ self.add_output(destination)
924
+ return self.egg_distribution(destination)
925
+
926
+ def install_exe(self, dist_filename, tmpdir):
927
+ # See if it's valid, get data
928
+ cfg = extract_wininst_cfg(dist_filename)
929
+ if cfg is None:
930
+ raise DistutilsError(
931
+ "%s is not a valid distutils Windows .exe" % dist_filename
932
+ )
933
+ # Create a dummy distribution object until we build the real distro
934
+ dist = Distribution(
935
+ None,
936
+ project_name=cfg.get('metadata', 'name'),
937
+ version=cfg.get('metadata', 'version'), platform=get_platform(),
938
+ )
939
+
940
+ # Convert the .exe to an unpacked egg
941
+ egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() +
942
+ '.egg')
943
+ egg_tmp = egg_path + '.tmp'
944
+ _egg_info = os.path.join(egg_tmp, 'EGG-INFO')
945
+ pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
946
+ ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
947
+ dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
948
+ self.exe_to_egg(dist_filename, egg_tmp)
949
+
950
+ # Write EGG-INFO/PKG-INFO
951
+ if not os.path.exists(pkg_inf):
952
+ f = open(pkg_inf, 'w')
953
+ f.write('Metadata-Version: 1.0\n')
954
+ for k, v in cfg.items('metadata'):
955
+ if k != 'target_version':
956
+ f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
957
+ f.close()
958
+ script_dir = os.path.join(_egg_info, 'scripts')
959
+ # delete entry-point scripts to avoid duping
960
+ self.delete_blockers(
961
+ [os.path.join(script_dir, args[0]) for args in
962
+ ScriptWriter.get_args(dist)]
963
+ )
964
+ # Build .egg file from tmpdir
965
+ bdist_egg.make_zipfile(
966
+ egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
967
+ )
968
+ # install the .egg
969
+ return self.install_egg(egg_path, tmpdir)
970
+
971
+ def exe_to_egg(self, dist_filename, egg_tmp):
972
+ """Extract a bdist_wininst to the directories an egg would use"""
973
+ # Check for .pth file and set up prefix translations
974
+ prefixes = get_exe_prefixes(dist_filename)
975
+ to_compile = []
976
+ native_libs = []
977
+ top_level = {}
978
+
979
+ def process(src, dst):
980
+ s = src.lower()
981
+ for old, new in prefixes:
982
+ if s.startswith(old):
983
+ src = new + src[len(old):]
984
+ parts = src.split('/')
985
+ dst = os.path.join(egg_tmp, *parts)
986
+ dl = dst.lower()
987
+ if dl.endswith('.pyd') or dl.endswith('.dll'):
988
+ parts[-1] = bdist_egg.strip_module(parts[-1])
989
+ top_level[os.path.splitext(parts[0])[0]] = 1
990
+ native_libs.append(src)
991
+ elif dl.endswith('.py') and old != 'SCRIPTS/':
992
+ top_level[os.path.splitext(parts[0])[0]] = 1
993
+ to_compile.append(dst)
994
+ return dst
995
+ if not src.endswith('.pth'):
996
+ log.warn("WARNING: can't process %s", src)
997
+ return None
998
+
999
+ # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
1000
+ unpack_archive(dist_filename, egg_tmp, process)
1001
+ stubs = []
1002
+ for res in native_libs:
1003
+ if res.lower().endswith('.pyd'): # create stubs for .pyd's
1004
+ parts = res.split('/')
1005
+ resource = parts[-1]
1006
+ parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
1007
+ pyfile = os.path.join(egg_tmp, *parts)
1008
+ to_compile.append(pyfile)
1009
+ stubs.append(pyfile)
1010
+ bdist_egg.write_stub(resource, pyfile)
1011
+ self.byte_compile(to_compile) # compile .py's
1012
+ bdist_egg.write_safety_flag(
1013
+ os.path.join(egg_tmp, 'EGG-INFO'),
1014
+ bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
1015
+
1016
+ for name in 'top_level', 'native_libs':
1017
+ if locals()[name]:
1018
+ txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
1019
+ if not os.path.exists(txt):
1020
+ f = open(txt, 'w')
1021
+ f.write('\n'.join(locals()[name]) + '\n')
1022
+ f.close()
1023
+
1024
+ __mv_warning = textwrap.dedent("""
1025
+ Because this distribution was installed --multi-version, before you can
1026
+ import modules from this package in an application, you will need to
1027
+ 'import pkg_resources' and then use a 'require()' call similar to one of
1028
+ these examples, in order to select the desired version:
1029
+
1030
+ pkg_resources.require("%(name)s") # latest installed version
1031
+ pkg_resources.require("%(name)s==%(version)s") # this exact version
1032
+ pkg_resources.require("%(name)s>=%(version)s") # this version or higher
1033
+ """).lstrip()
1034
+
1035
+ __id_warning = textwrap.dedent("""
1036
+ Note also that the installation directory must be on sys.path at runtime for
1037
+ this to work. (e.g. by being the application's script directory, by being on
1038
+ PYTHONPATH, or by being added to sys.path by your code.)
1039
+ """)
1040
+
1041
+ def installation_report(self, req, dist, what="Installed"):
1042
+ """Helpful installation message for display to package users"""
1043
+ msg = "\n%(what)s %(eggloc)s%(extras)s"
1044
+ if self.multi_version and not self.no_report:
1045
+ msg += '\n' + self.__mv_warning
1046
+ if self.install_dir not in map(normalize_path, sys.path):
1047
+ msg += '\n' + self.__id_warning
1048
+
1049
+ eggloc = dist.location
1050
+ name = dist.project_name
1051
+ version = dist.version
1052
+ extras = '' # TODO: self.report_extras(req, dist)
1053
+ return msg % locals()
1054
+
1055
+ __editable_msg = textwrap.dedent("""
1056
+ Extracted editable version of %(spec)s to %(dirname)s
1057
+
1058
+ If it uses setuptools in its setup script, you can activate it in
1059
+ "development" mode by going to that directory and running::
1060
+
1061
+ %(python)s setup.py develop
1062
+
1063
+ See the setuptools documentation for the "develop" command for more info.
1064
+ """).lstrip()
1065
+
1066
+ def report_editable(self, spec, setup_script):
1067
+ dirname = os.path.dirname(setup_script)
1068
+ python = sys.executable
1069
+ return '\n' + self.__editable_msg % locals()
1070
+
1071
+ def run_setup(self, setup_script, setup_base, args):
1072
+ sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
1073
+ sys.modules.setdefault('distutils.command.egg_info', egg_info)
1074
+
1075
+ args = list(args)
1076
+ if self.verbose > 2:
1077
+ v = 'v' * (self.verbose - 1)
1078
+ args.insert(0, '-' + v)
1079
+ elif self.verbose < 2:
1080
+ args.insert(0, '-q')
1081
+ if self.dry_run:
1082
+ args.insert(0, '-n')
1083
+ log.info(
1084
+ "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
1085
+ )
1086
+ try:
1087
+ run_setup(setup_script, args)
1088
+ except SystemExit as v:
1089
+ raise DistutilsError("Setup script exited with %s" % (v.args[0],))
1090
+
1091
+ def build_and_install(self, setup_script, setup_base):
1092
+ args = ['bdist_egg', '--dist-dir']
1093
+
1094
+ dist_dir = tempfile.mkdtemp(
1095
+ prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
1096
+ )
1097
+ try:
1098
+ self._set_fetcher_options(os.path.dirname(setup_script))
1099
+ args.append(dist_dir)
1100
+
1101
+ self.run_setup(setup_script, setup_base, args)
1102
+ all_eggs = Environment([dist_dir])
1103
+ eggs = []
1104
+ for key in all_eggs:
1105
+ for dist in all_eggs[key]:
1106
+ eggs.append(self.install_egg(dist.location, setup_base))
1107
+ if not eggs and not self.dry_run:
1108
+ log.warn("No eggs found in %s (setup script problem?)",
1109
+ dist_dir)
1110
+ return eggs
1111
+ finally:
1112
+ rmtree(dist_dir)
1113
+ log.set_verbosity(self.verbose) # restore our log verbosity
1114
+
1115
+ def _set_fetcher_options(self, base):
1116
+ """
1117
+ When easy_install is about to run bdist_egg on a source dist, that
1118
+ source dist might have 'setup_requires' directives, requiring
1119
+ additional fetching. Ensure the fetcher options given to easy_install
1120
+ are available to that command as well.
1121
+ """
1122
+ # find the fetch options from easy_install and write them out
1123
+ # to the setup.cfg file.
1124
+ ei_opts = self.distribution.get_option_dict('easy_install').copy()
1125
+ fetch_directives = (
1126
+ 'find_links', 'site_dirs', 'index_url', 'optimize',
1127
+ 'site_dirs', 'allow_hosts',
1128
+ )
1129
+ fetch_options = {}
1130
+ for key, val in ei_opts.items():
1131
+ if key not in fetch_directives:
1132
+ continue
1133
+ fetch_options[key.replace('_', '-')] = val[1]
1134
+ # create a settings dictionary suitable for `edit_config`
1135
+ settings = dict(easy_install=fetch_options)
1136
+ cfg_filename = os.path.join(base, 'setup.cfg')
1137
+ setopt.edit_config(cfg_filename, settings)
1138
+
1139
+ def update_pth(self, dist):
1140
+ if self.pth_file is None:
1141
+ return
1142
+
1143
+ for d in self.pth_file[dist.key]: # drop old entries
1144
+ if self.multi_version or d.location != dist.location:
1145
+ log.info("Removing %s from easy-install.pth file", d)
1146
+ self.pth_file.remove(d)
1147
+ if d.location in self.shadow_path:
1148
+ self.shadow_path.remove(d.location)
1149
+
1150
+ if not self.multi_version:
1151
+ if dist.location in self.pth_file.paths:
1152
+ log.info(
1153
+ "%s is already the active version in easy-install.pth",
1154
+ dist
1155
+ )
1156
+ else:
1157
+ log.info("Adding %s to easy-install.pth file", dist)
1158
+ self.pth_file.add(dist) # add new entry
1159
+ if dist.location not in self.shadow_path:
1160
+ self.shadow_path.append(dist.location)
1161
+
1162
+ if not self.dry_run:
1163
+
1164
+ self.pth_file.save()
1165
+
1166
+ if dist.key == 'setuptools':
1167
+ # Ensure that setuptools itself never becomes unavailable!
1168
+ # XXX should this check for latest version?
1169
+ filename = os.path.join(self.install_dir, 'setuptools.pth')
1170
+ if os.path.islink(filename):
1171
+ os.unlink(filename)
1172
+ f = open(filename, 'wt')
1173
+ f.write(self.pth_file.make_relative(dist.location) + '\n')
1174
+ f.close()
1175
+
1176
+ def unpack_progress(self, src, dst):
1177
+ # Progress filter for unpacking
1178
+ log.debug("Unpacking %s to %s", src, dst)
1179
+ return dst # only unpack-and-compile skips files for dry run
1180
+
1181
+ def unpack_and_compile(self, egg_path, destination):
1182
+ to_compile = []
1183
+ to_chmod = []
1184
+
1185
+ def pf(src, dst):
1186
+ if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
1187
+ to_compile.append(dst)
1188
+ elif dst.endswith('.dll') or dst.endswith('.so'):
1189
+ to_chmod.append(dst)
1190
+ self.unpack_progress(src, dst)
1191
+ return not self.dry_run and dst or None
1192
+
1193
+ unpack_archive(egg_path, destination, pf)
1194
+ self.byte_compile(to_compile)
1195
+ if not self.dry_run:
1196
+ for f in to_chmod:
1197
+ mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
1198
+ chmod(f, mode)
1199
+
1200
+ def byte_compile(self, to_compile):
1201
+ if sys.dont_write_bytecode:
1202
+ self.warn('byte-compiling is disabled, skipping.')
1203
+ return
1204
+
1205
+ from distutils.util import byte_compile
1206
+
1207
+ try:
1208
+ # try to make the byte compile messages quieter
1209
+ log.set_verbosity(self.verbose - 1)
1210
+
1211
+ byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
1212
+ if self.optimize:
1213
+ byte_compile(
1214
+ to_compile, optimize=self.optimize, force=1,
1215
+ dry_run=self.dry_run
1216
+ )
1217
+ finally:
1218
+ log.set_verbosity(self.verbose) # restore original verbosity
1219
+
1220
+ __no_default_msg = textwrap.dedent("""
1221
+ bad install directory or PYTHONPATH
1222
+
1223
+ You are attempting to install a package to a directory that is not
1224
+ on PYTHONPATH and which Python does not read ".pth" files from. The
1225
+ installation directory you specified (via --install-dir, --prefix, or
1226
+ the distutils default setting) was:
1227
+
1228
+ %s
1229
+
1230
+ and your PYTHONPATH environment variable currently contains:
1231
+
1232
+ %r
1233
+
1234
+ Here are some of your options for correcting the problem:
1235
+
1236
+ * You can choose a different installation directory, i.e., one that is
1237
+ on PYTHONPATH or supports .pth files
1238
+
1239
+ * You can add the installation directory to the PYTHONPATH environment
1240
+ variable. (It must then also be on PYTHONPATH whenever you run
1241
+ Python and want to use the package(s) you are installing.)
1242
+
1243
+ * You can set up the installation directory to support ".pth" files by
1244
+ using one of the approaches described here:
1245
+
1246
+ https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
1247
+
1248
+ Please make the appropriate changes for your system and try again.""").lstrip()
1249
+
1250
+ def no_default_version_msg(self):
1251
+ template = self.__no_default_msg
1252
+ return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
1253
+
1254
+ def install_site_py(self):
1255
+ """Make sure there's a site.py in the target dir, if needed"""
1256
+
1257
+ if self.sitepy_installed:
1258
+ return # already did it, or don't need to
1259
+
1260
+ sitepy = os.path.join(self.install_dir, "site.py")
1261
+ source = resource_string("setuptools", "site-patch.py")
1262
+ source = source.decode('utf-8')
1263
+ current = ""
1264
+
1265
+ if os.path.exists(sitepy):
1266
+ log.debug("Checking existing site.py in %s", self.install_dir)
1267
+ with io.open(sitepy) as strm:
1268
+ current = strm.read()
1269
+
1270
+ if not current.startswith('def __boot():'):
1271
+ raise DistutilsError(
1272
+ "%s is not a setuptools-generated site.py; please"
1273
+ " remove it." % sitepy
1274
+ )
1275
+
1276
+ if current != source:
1277
+ log.info("Creating %s", sitepy)
1278
+ if not self.dry_run:
1279
+ ensure_directory(sitepy)
1280
+ with io.open(sitepy, 'w', encoding='utf-8') as strm:
1281
+ strm.write(source)
1282
+ self.byte_compile([sitepy])
1283
+
1284
+ self.sitepy_installed = True
1285
+
1286
+ def create_home_path(self):
1287
+ """Create directories under ~."""
1288
+ if not self.user:
1289
+ return
1290
+ home = convert_path(os.path.expanduser("~"))
1291
+ for name, path in six.iteritems(self.config_vars):
1292
+ if path.startswith(home) and not os.path.isdir(path):
1293
+ self.debug_print("os.makedirs('%s', 0o700)" % path)
1294
+ os.makedirs(path, 0o700)
1295
+
1296
+ if sys.version[:3] in ('2.3', '2.4', '2.5') or 'real_prefix' in sys.__dict__:
1297
+ sitedir_name = 'site-packages'
1298
+ else:
1299
+ sitedir_name = 'dist-packages'
1300
+
1301
+ INSTALL_SCHEMES = dict(
1302
+ posix=dict(
1303
+ install_dir='$base/lib/python$py_version_short/site-packages',
1304
+ script_dir='$base/bin',
1305
+ ),
1306
+ unix_local = dict(
1307
+ install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name,
1308
+ script_dir = '$base/local/bin',
1309
+ ),
1310
+ posix_local = dict(
1311
+ install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name,
1312
+ script_dir = '$base/local/bin',
1313
+ ),
1314
+ deb_system = dict(
1315
+ install_dir = '$base/lib/python3/%s' % sitedir_name,
1316
+ script_dir = '$base/bin',
1317
+ ),
1318
+ )
1319
+
1320
+ DEFAULT_SCHEME = dict(
1321
+ install_dir='$base/Lib/site-packages',
1322
+ script_dir='$base/Scripts',
1323
+ )
1324
+
1325
+ def _expand(self, *attrs):
1326
+ config_vars = self.get_finalized_command('install').config_vars
1327
+
1328
+ if self.prefix or self.install_layout:
1329
+ if self.install_layout and self.install_layout in ['deb']:
1330
+ scheme_name = "deb_system"
1331
+ self.prefix = '/usr'
1332
+ elif self.prefix or 'real_prefix' in sys.__dict__:
1333
+ scheme_name = os.name
1334
+ else:
1335
+ scheme_name = "posix_local"
1336
+ # Set default install_dir/scripts from --prefix
1337
+ config_vars = config_vars.copy()
1338
+ config_vars['base'] = self.prefix
1339
+ scheme = self.INSTALL_SCHEMES.get(scheme_name,self.DEFAULT_SCHEME)
1340
+ for attr, val in scheme.items():
1341
+ if getattr(self, attr, None) is None:
1342
+ setattr(self, attr, val)
1343
+
1344
+ from distutils.util import subst_vars
1345
+
1346
+ for attr in attrs:
1347
+ val = getattr(self, attr)
1348
+ if val is not None:
1349
+ val = subst_vars(val, config_vars)
1350
+ if os.name == 'posix':
1351
+ val = os.path.expanduser(val)
1352
+ setattr(self, attr, val)
1353
+
1354
+
1355
+ def get_site_dirs():
1356
+ # return a list of 'site' dirs
1357
+ sitedirs = [_f for _f in os.environ.get('PYTHONPATH',
1358
+ '').split(os.pathsep) if _f]
1359
+ prefixes = [sys.prefix]
1360
+ if sys.exec_prefix != sys.prefix:
1361
+ prefixes.append(sys.exec_prefix)
1362
+ for prefix in prefixes:
1363
+ if prefix:
1364
+ if sys.platform in ('os2emx', 'riscos'):
1365
+ sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
1366
+ elif os.sep == '/':
1367
+ sitedirs.extend([os.path.join(prefix,
1368
+ "lib",
1369
+ "python" + sys.version[:3],
1370
+ "site-packages"),
1371
+ os.path.join(prefix, "lib", "site-python")])
1372
+ else:
1373
+ if sys.version[:3] in ('2.3', '2.4', '2.5'):
1374
+ sdir = "site-packages"
1375
+ else:
1376
+ sdir = "dist-packages"
1377
+ sitedirs.extend(
1378
+ [os.path.join(prefix, "local/lib", "python" + sys.version[:3], sdir),
1379
+ os.path.join(prefix, "lib", "python" + sys.version[:3], sdir)]
1380
+ )
1381
+ if sys.platform == 'darwin':
1382
+ # for framework builds *only* we add the standard Apple
1383
+ # locations. Currently only per-user, but /Library and
1384
+ # /Network/Library could be added too
1385
+ if 'Python.framework' in prefix:
1386
+ home = os.environ.get('HOME')
1387
+ if home:
1388
+ sitedirs.append(
1389
+ os.path.join(home,
1390
+ 'Library',
1391
+ 'Python',
1392
+ sys.version[:3],
1393
+ 'site-packages'))
1394
+ lib_paths = get_path('purelib'), get_path('platlib')
1395
+ for site_lib in lib_paths:
1396
+ if site_lib not in sitedirs:
1397
+ sitedirs.append(site_lib)
1398
+
1399
+ if site.ENABLE_USER_SITE:
1400
+ sitedirs.append(site.USER_SITE)
1401
+
1402
+ sitedirs = list(map(normalize_path, sitedirs))
1403
+
1404
+ return sitedirs
1405
+
1406
+
1407
+ def expand_paths(inputs):
1408
+ """Yield sys.path directories that might contain "old-style" packages"""
1409
+
1410
+ seen = {}
1411
+
1412
+ for dirname in inputs:
1413
+ dirname = normalize_path(dirname)
1414
+ if dirname in seen:
1415
+ continue
1416
+
1417
+ seen[dirname] = 1
1418
+ if not os.path.isdir(dirname):
1419
+ continue
1420
+
1421
+ files = os.listdir(dirname)
1422
+ yield dirname, files
1423
+
1424
+ for name in files:
1425
+ if not name.endswith('.pth'):
1426
+ # We only care about the .pth files
1427
+ continue
1428
+ if name in ('easy-install.pth', 'setuptools.pth'):
1429
+ # Ignore .pth files that we control
1430
+ continue
1431
+
1432
+ # Read the .pth file
1433
+ f = open(os.path.join(dirname, name))
1434
+ lines = list(yield_lines(f))
1435
+ f.close()
1436
+
1437
+ # Yield existing non-dupe, non-import directory lines from it
1438
+ for line in lines:
1439
+ if not line.startswith("import"):
1440
+ line = normalize_path(line.rstrip())
1441
+ if line not in seen:
1442
+ seen[line] = 1
1443
+ if not os.path.isdir(line):
1444
+ continue
1445
+ yield line, os.listdir(line)
1446
+
1447
+
1448
+ def extract_wininst_cfg(dist_filename):
1449
+ """Extract configuration data from a bdist_wininst .exe
1450
+
1451
+ Returns a configparser.RawConfigParser, or None
1452
+ """
1453
+ f = open(dist_filename, 'rb')
1454
+ try:
1455
+ endrec = zipfile._EndRecData(f)
1456
+ if endrec is None:
1457
+ return None
1458
+
1459
+ prepended = (endrec[9] - endrec[5]) - endrec[6]
1460
+ if prepended < 12: # no wininst data here
1461
+ return None
1462
+ f.seek(prepended - 12)
1463
+
1464
+ tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
1465
+ if tag not in (0x1234567A, 0x1234567B):
1466
+ return None # not a valid tag
1467
+
1468
+ f.seek(prepended - (12 + cfglen))
1469
+ cfg = configparser.RawConfigParser(
1470
+ {'version': '', 'target_version': ''})
1471
+ try:
1472
+ part = f.read(cfglen)
1473
+ # Read up to the first null byte.
1474
+ config = part.split(b'\0', 1)[0]
1475
+ # Now the config is in bytes, but for RawConfigParser, it should
1476
+ # be text, so decode it.
1477
+ config = config.decode(sys.getfilesystemencoding())
1478
+ cfg.readfp(six.StringIO(config))
1479
+ except configparser.Error:
1480
+ return None
1481
+ if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
1482
+ return None
1483
+ return cfg
1484
+
1485
+ finally:
1486
+ f.close()
1487
+
1488
+
1489
+ def get_exe_prefixes(exe_filename):
1490
+ """Get exe->egg path translations for a given .exe file"""
1491
+
1492
+ prefixes = [
1493
+ ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
1494
+ ('PLATLIB/', ''),
1495
+ ('SCRIPTS/', 'EGG-INFO/scripts/'),
1496
+ ('DATA/lib/site-packages', ''),
1497
+ ]
1498
+ z = zipfile.ZipFile(exe_filename)
1499
+ try:
1500
+ for info in z.infolist():
1501
+ name = info.filename
1502
+ parts = name.split('/')
1503
+ if len(parts) == 3 and parts[2] == 'PKG-INFO':
1504
+ if parts[1].endswith('.egg-info'):
1505
+ prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
1506
+ break
1507
+ if len(parts) != 2 or not name.endswith('.pth'):
1508
+ continue
1509
+ if name.endswith('-nspkg.pth'):
1510
+ continue
1511
+ if parts[0].upper() in ('PURELIB', 'PLATLIB'):
1512
+ contents = z.read(name)
1513
+ if six.PY3:
1514
+ contents = contents.decode()
1515
+ for pth in yield_lines(contents):
1516
+ pth = pth.strip().replace('\\', '/')
1517
+ if not pth.startswith('import'):
1518
+ prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
1519
+ finally:
1520
+ z.close()
1521
+ prefixes = [(x.lower(), y) for x, y in prefixes]
1522
+ prefixes.sort()
1523
+ prefixes.reverse()
1524
+ return prefixes
1525
+
1526
+
1527
+ def parse_requirement_arg(spec):
1528
+ try:
1529
+ return Requirement.parse(spec)
1530
+ except ValueError:
1531
+ raise DistutilsError(
1532
+ "Not a URL, existing file, or requirement spec: %r" % (spec,)
1533
+ )
1534
+
1535
+
1536
+ class PthDistributions(Environment):
1537
+ """A .pth file with Distribution paths in it"""
1538
+
1539
+ dirty = False
1540
+
1541
+ def __init__(self, filename, sitedirs=()):
1542
+ self.filename = filename
1543
+ self.sitedirs = list(map(normalize_path, sitedirs))
1544
+ self.basedir = normalize_path(os.path.dirname(self.filename))
1545
+ self._load()
1546
+ Environment.__init__(self, [], None, None)
1547
+ for path in yield_lines(self.paths):
1548
+ list(map(self.add, find_distributions(path, True)))
1549
+
1550
+ def _load(self):
1551
+ self.paths = []
1552
+ saw_import = False
1553
+ seen = dict.fromkeys(self.sitedirs)
1554
+ if os.path.isfile(self.filename):
1555
+ f = open(self.filename, 'rt')
1556
+ for line in f:
1557
+ if line.startswith('import'):
1558
+ saw_import = True
1559
+ continue
1560
+ path = line.rstrip()
1561
+ self.paths.append(path)
1562
+ if not path.strip() or path.strip().startswith('#'):
1563
+ continue
1564
+ # skip non-existent paths, in case somebody deleted a package
1565
+ # manually, and duplicate paths as well
1566
+ path = self.paths[-1] = normalize_path(
1567
+ os.path.join(self.basedir, path)
1568
+ )
1569
+ if not os.path.exists(path) or path in seen:
1570
+ self.paths.pop() # skip it
1571
+ self.dirty = True # we cleaned up, so we're dirty now :)
1572
+ continue
1573
+ seen[path] = 1
1574
+ f.close()
1575
+
1576
+ if self.paths and not saw_import:
1577
+ self.dirty = True # ensure anything we touch has import wrappers
1578
+ while self.paths and not self.paths[-1].strip():
1579
+ self.paths.pop()
1580
+
1581
+ def save(self):
1582
+ """Write changed .pth file back to disk"""
1583
+ if not self.dirty:
1584
+ return
1585
+
1586
+ rel_paths = list(map(self.make_relative, self.paths))
1587
+ if rel_paths:
1588
+ log.debug("Saving %s", self.filename)
1589
+ lines = self._wrap_lines(rel_paths)
1590
+ data = '\n'.join(lines) + '\n'
1591
+
1592
+ if os.path.islink(self.filename):
1593
+ os.unlink(self.filename)
1594
+ with open(self.filename, 'wt') as f:
1595
+ f.write(data)
1596
+
1597
+ elif os.path.exists(self.filename):
1598
+ log.debug("Deleting empty %s", self.filename)
1599
+ os.unlink(self.filename)
1600
+
1601
+ self.dirty = False
1602
+
1603
+ @staticmethod
1604
+ def _wrap_lines(lines):
1605
+ return lines
1606
+
1607
+ def add(self, dist):
1608
+ """Add `dist` to the distribution map"""
1609
+ new_path = (
1610
+ dist.location not in self.paths and (
1611
+ dist.location not in self.sitedirs or
1612
+ # account for '.' being in PYTHONPATH
1613
+ dist.location == os.getcwd()
1614
+ )
1615
+ )
1616
+ if new_path:
1617
+ self.paths.append(dist.location)
1618
+ self.dirty = True
1619
+ Environment.add(self, dist)
1620
+
1621
+ def remove(self, dist):
1622
+ """Remove `dist` from the distribution map"""
1623
+ while dist.location in self.paths:
1624
+ self.paths.remove(dist.location)
1625
+ self.dirty = True
1626
+ Environment.remove(self, dist)
1627
+
1628
+ def make_relative(self, path):
1629
+ npath, last = os.path.split(normalize_path(path))
1630
+ baselen = len(self.basedir)
1631
+ parts = [last]
1632
+ sep = os.altsep == '/' and '/' or os.sep
1633
+ while len(npath) >= baselen:
1634
+ if npath == self.basedir:
1635
+ parts.append(os.curdir)
1636
+ parts.reverse()
1637
+ return sep.join(parts)
1638
+ npath, last = os.path.split(npath)
1639
+ parts.append(last)
1640
+ else:
1641
+ return path
1642
+
1643
+
1644
+ class RewritePthDistributions(PthDistributions):
1645
+
1646
+ @classmethod
1647
+ def _wrap_lines(cls, lines):
1648
+ yield cls.prelude
1649
+ for line in lines:
1650
+ yield line
1651
+ yield cls.postlude
1652
+
1653
+ _inline = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')
1654
+ prelude = _inline("""
1655
+ import sys
1656
+ sys.__plen = len(sys.path)
1657
+ """)
1658
+ postlude = _inline("""
1659
+ import sys
1660
+ new = sys.path[sys.__plen:]
1661
+ del sys.path[sys.__plen:]
1662
+ p = getattr(sys, '__egginsert', 0)
1663
+ sys.path[p:p] = new
1664
+ sys.__egginsert = p + len(new)
1665
+ """)
1666
+
1667
+
1668
+ if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'rewrite') == 'rewrite':
1669
+ PthDistributions = RewritePthDistributions
1670
+
1671
+
1672
+ def _first_line_re():
1673
+ """
1674
+ Return a regular expression based on first_line_re suitable for matching
1675
+ strings.
1676
+ """
1677
+ if isinstance(first_line_re.pattern, str):
1678
+ return first_line_re
1679
+
1680
+ # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
1681
+ return re.compile(first_line_re.pattern.decode())
1682
+
1683
+
1684
+ def auto_chmod(func, arg, exc):
1685
+ if func is os.remove and os.name == 'nt':
1686
+ chmod(arg, stat.S_IWRITE)
1687
+ return func(arg)
1688
+ et, ev, _ = sys.exc_info()
1689
+ six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
1690
+
1691
+
1692
+ def update_dist_caches(dist_path, fix_zipimporter_caches):
1693
+ """
1694
+ Fix any globally cached `dist_path` related data
1695
+
1696
+ `dist_path` should be a path of a newly installed egg distribution (zipped
1697
+ or unzipped).
1698
+
1699
+ sys.path_importer_cache contains finder objects that have been cached when
1700
+ importing data from the original distribution. Any such finders need to be
1701
+ cleared since the replacement distribution might be packaged differently,
1702
+ e.g. a zipped egg distribution might get replaced with an unzipped egg
1703
+ folder or vice versa. Having the old finders cached may then cause Python
1704
+ to attempt loading modules from the replacement distribution using an
1705
+ incorrect loader.
1706
+
1707
+ zipimport.zipimporter objects are Python loaders charged with importing
1708
+ data packaged inside zip archives. If stale loaders referencing the
1709
+ original distribution, are left behind, they can fail to load modules from
1710
+ the replacement distribution. E.g. if an old zipimport.zipimporter instance
1711
+ is used to load data from a new zipped egg archive, it may cause the
1712
+ operation to attempt to locate the requested data in the wrong location -
1713
+ one indicated by the original distribution's zip archive directory
1714
+ information. Such an operation may then fail outright, e.g. report having
1715
+ read a 'bad local file header', or even worse, it may fail silently &
1716
+ return invalid data.
1717
+
1718
+ zipimport._zip_directory_cache contains cached zip archive directory
1719
+ information for all existing zipimport.zipimporter instances and all such
1720
+ instances connected to the same archive share the same cached directory
1721
+ information.
1722
+
1723
+ If asked, and the underlying Python implementation allows it, we can fix
1724
+ all existing zipimport.zipimporter instances instead of having to track
1725
+ them down and remove them one by one, by updating their shared cached zip
1726
+ archive directory information. This, of course, assumes that the
1727
+ replacement distribution is packaged as a zipped egg.
1728
+
1729
+ If not asked to fix existing zipimport.zipimporter instances, we still do
1730
+ our best to clear any remaining zipimport.zipimporter related cached data
1731
+ that might somehow later get used when attempting to load data from the new
1732
+ distribution and thus cause such load operations to fail. Note that when
1733
+ tracking down such remaining stale data, we can not catch every conceivable
1734
+ usage from here, and we clear only those that we know of and have found to
1735
+ cause problems if left alive. Any remaining caches should be updated by
1736
+ whomever is in charge of maintaining them, i.e. they should be ready to
1737
+ handle us replacing their zip archives with new distributions at runtime.
1738
+
1739
+ """
1740
+ # There are several other known sources of stale zipimport.zipimporter
1741
+ # instances that we do not clear here, but might if ever given a reason to
1742
+ # do so:
1743
+ # * Global setuptools pkg_resources.working_set (a.k.a. 'master working
1744
+ # set') may contain distributions which may in turn contain their
1745
+ # zipimport.zipimporter loaders.
1746
+ # * Several zipimport.zipimporter loaders held by local variables further
1747
+ # up the function call stack when running the setuptools installation.
1748
+ # * Already loaded modules may have their __loader__ attribute set to the
1749
+ # exact loader instance used when importing them. Python 3.4 docs state
1750
+ # that this information is intended mostly for introspection and so is
1751
+ # not expected to cause us problems.
1752
+ normalized_path = normalize_path(dist_path)
1753
+ _uncache(normalized_path, sys.path_importer_cache)
1754
+ if fix_zipimporter_caches:
1755
+ _replace_zip_directory_cache_data(normalized_path)
1756
+ else:
1757
+ # Here, even though we do not want to fix existing and now stale
1758
+ # zipimporter cache information, we still want to remove it. Related to
1759
+ # Python's zip archive directory information cache, we clear each of
1760
+ # its stale entries in two phases:
1761
+ # 1. Clear the entry so attempting to access zip archive information
1762
+ # via any existing stale zipimport.zipimporter instances fails.
1763
+ # 2. Remove the entry from the cache so any newly constructed
1764
+ # zipimport.zipimporter instances do not end up using old stale
1765
+ # zip archive directory information.
1766
+ # This whole stale data removal step does not seem strictly necessary,
1767
+ # but has been left in because it was done before we started replacing
1768
+ # the zip archive directory information cache content if possible, and
1769
+ # there are no relevant unit tests that we can depend on to tell us if
1770
+ # this is really needed.
1771
+ _remove_and_clear_zip_directory_cache_data(normalized_path)
1772
+
1773
+
1774
+ def _collect_zipimporter_cache_entries(normalized_path, cache):
1775
+ """
1776
+ Return zipimporter cache entry keys related to a given normalized path.
1777
+
1778
+ Alternative path spellings (e.g. those using different character case or
1779
+ those using alternative path separators) related to the same path are
1780
+ included. Any sub-path entries are included as well, i.e. those
1781
+ corresponding to zip archives embedded in other zip archives.
1782
+
1783
+ """
1784
+ result = []
1785
+ prefix_len = len(normalized_path)
1786
+ for p in cache:
1787
+ np = normalize_path(p)
1788
+ if (np.startswith(normalized_path) and
1789
+ np[prefix_len:prefix_len + 1] in (os.sep, '')):
1790
+ result.append(p)
1791
+ return result
1792
+
1793
+
1794
+ def _update_zipimporter_cache(normalized_path, cache, updater=None):
1795
+ """
1796
+ Update zipimporter cache data for a given normalized path.
1797
+
1798
+ Any sub-path entries are processed as well, i.e. those corresponding to zip
1799
+ archives embedded in other zip archives.
1800
+
1801
+ Given updater is a callable taking a cache entry key and the original entry
1802
+ (after already removing the entry from the cache), and expected to update
1803
+ the entry and possibly return a new one to be inserted in its place.
1804
+ Returning None indicates that the entry should not be replaced with a new
1805
+ one. If no updater is given, the cache entries are simply removed without
1806
+ any additional processing, the same as if the updater simply returned None.
1807
+
1808
+ """
1809
+ for p in _collect_zipimporter_cache_entries(normalized_path, cache):
1810
+ # N.B. pypy's custom zipimport._zip_directory_cache implementation does
1811
+ # not support the complete dict interface:
1812
+ # * Does not support item assignment, thus not allowing this function
1813
+ # to be used only for removing existing cache entries.
1814
+ # * Does not support the dict.pop() method, forcing us to use the
1815
+ # get/del patterns instead. For more detailed information see the
1816
+ # following links:
1817
+ # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420
1818
+ # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99
1819
+ old_entry = cache[p]
1820
+ del cache[p]
1821
+ new_entry = updater and updater(p, old_entry)
1822
+ if new_entry is not None:
1823
+ cache[p] = new_entry
1824
+
1825
+
1826
+ def _uncache(normalized_path, cache):
1827
+ _update_zipimporter_cache(normalized_path, cache)
1828
+
1829
+
1830
+ def _remove_and_clear_zip_directory_cache_data(normalized_path):
1831
+ def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
1832
+ old_entry.clear()
1833
+
1834
+ _update_zipimporter_cache(
1835
+ normalized_path, zipimport._zip_directory_cache,
1836
+ updater=clear_and_remove_cached_zip_archive_directory_data)
1837
+
1838
+ # PyPy Python implementation does not allow directly writing to the
1839
+ # zipimport._zip_directory_cache and so prevents us from attempting to correct
1840
+ # its content. The best we can do there is clear the problematic cache content
1841
+ # and have PyPy repopulate it as needed. The downside is that if there are any
1842
+ # stale zipimport.zipimporter instances laying around, attempting to use them
1843
+ # will fail due to not having its zip archive directory information available
1844
+ # instead of being automatically corrected to use the new correct zip archive
1845
+ # directory information.
1846
+ if '__pypy__' in sys.builtin_module_names:
1847
+ _replace_zip_directory_cache_data = \
1848
+ _remove_and_clear_zip_directory_cache_data
1849
+ else:
1850
+ def _replace_zip_directory_cache_data(normalized_path):
1851
+ def replace_cached_zip_archive_directory_data(path, old_entry):
1852
+ # N.B. In theory, we could load the zip directory information just
1853
+ # once for all updated path spellings, and then copy it locally and
1854
+ # update its contained path strings to contain the correct
1855
+ # spelling, but that seems like a way too invasive move (this cache
1856
+ # structure is not officially documented anywhere and could in
1857
+ # theory change with new Python releases) for no significant
1858
+ # benefit.
1859
+ old_entry.clear()
1860
+ zipimport.zipimporter(path)
1861
+ old_entry.update(zipimport._zip_directory_cache[path])
1862
+ return old_entry
1863
+
1864
+ _update_zipimporter_cache(
1865
+ normalized_path, zipimport._zip_directory_cache,
1866
+ updater=replace_cached_zip_archive_directory_data)
1867
+
1868
+
1869
+ def is_python(text, filename='<string>'):
1870
+ "Is this string a valid Python script?"
1871
+ try:
1872
+ compile(text, filename, 'exec')
1873
+ except (SyntaxError, TypeError):
1874
+ return False
1875
+ else:
1876
+ return True
1877
+
1878
+
1879
+ def is_sh(executable):
1880
+ """Determine if the specified executable is a .sh (contains a #! line)"""
1881
+ try:
1882
+ with io.open(executable, encoding='latin-1') as fp:
1883
+ magic = fp.read(2)
1884
+ except (OSError, IOError):
1885
+ return executable
1886
+ return magic == '#!'
1887
+
1888
+
1889
+ def nt_quote_arg(arg):
1890
+ """Quote a command line argument according to Windows parsing rules"""
1891
+ return subprocess.list2cmdline([arg])
1892
+
1893
+
1894
+ def is_python_script(script_text, filename):
1895
+ """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
1896
+ """
1897
+ if filename.endswith('.py') or filename.endswith('.pyw'):
1898
+ return True # extension says it's Python
1899
+ if is_python(script_text, filename):
1900
+ return True # it's syntactically valid Python
1901
+ if script_text.startswith('#!'):
1902
+ # It begins with a '#!' line, so check if 'python' is in it somewhere
1903
+ return 'python' in script_text.splitlines()[0].lower()
1904
+
1905
+ return False # Not any Python I can recognize
1906
+
1907
+
1908
+ try:
1909
+ from os import chmod as _chmod
1910
+ except ImportError:
1911
+ # Jython compatibility
1912
+ def _chmod(*args):
1913
+ pass
1914
+
1915
+
1916
+ def chmod(path, mode):
1917
+ log.debug("changing mode of %s to %o", path, mode)
1918
+ try:
1919
+ _chmod(path, mode)
1920
+ except os.error as e:
1921
+ log.debug("chmod failed: %s", e)
1922
+
1923
+
1924
+ class CommandSpec(list):
1925
+ """
1926
+ A command spec for a #! header, specified as a list of arguments akin to
1927
+ those passed to Popen.
1928
+ """
1929
+
1930
+ options = []
1931
+ split_args = dict()
1932
+
1933
+ @classmethod
1934
+ def best(cls):
1935
+ """
1936
+ Choose the best CommandSpec class based on environmental conditions.
1937
+ """
1938
+ return cls
1939
+
1940
+ @classmethod
1941
+ def _sys_executable(cls):
1942
+ _default = os.path.normpath(sys.executable)
1943
+ return os.environ.get('__PYVENV_LAUNCHER__', _default)
1944
+
1945
+ @classmethod
1946
+ def from_param(cls, param):
1947
+ """
1948
+ Construct a CommandSpec from a parameter to build_scripts, which may
1949
+ be None.
1950
+ """
1951
+ if isinstance(param, cls):
1952
+ return param
1953
+ if isinstance(param, list):
1954
+ return cls(param)
1955
+ if param is None:
1956
+ return cls.from_environment()
1957
+ # otherwise, assume it's a string.
1958
+ return cls.from_string(param)
1959
+
1960
+ @classmethod
1961
+ def from_environment(cls):
1962
+ return cls([cls._sys_executable()])
1963
+
1964
+ @classmethod
1965
+ def from_string(cls, string):
1966
+ """
1967
+ Construct a command spec from a simple string representing a command
1968
+ line parseable by shlex.split.
1969
+ """
1970
+ items = shlex.split(string, **cls.split_args)
1971
+ return cls(items)
1972
+
1973
+ def install_options(self, script_text):
1974
+ self.options = shlex.split(self._extract_options(script_text))
1975
+ cmdline = subprocess.list2cmdline(self)
1976
+ if not isascii(cmdline):
1977
+ self.options[:0] = ['-x']
1978
+
1979
+ @staticmethod
1980
+ def _extract_options(orig_script):
1981
+ """
1982
+ Extract any options from the first line of the script.
1983
+ """
1984
+ first = (orig_script + '\n').splitlines()[0]
1985
+ match = _first_line_re().match(first)
1986
+ options = match.group(1) or '' if match else ''
1987
+ return options.strip()
1988
+
1989
+ def as_header(self):
1990
+ return self._render(self + list(self.options))
1991
+
1992
+ @staticmethod
1993
+ def _render(items):
1994
+ cmdline = subprocess.list2cmdline(items)
1995
+ return '#!' + cmdline + '\n'
1996
+
1997
+ # For pbr compat; will be removed in a future version.
1998
+ sys_executable = CommandSpec._sys_executable()
1999
+
2000
+
2001
+ class WindowsCommandSpec(CommandSpec):
2002
+ split_args = dict(posix=False)
2003
+
2004
+
2005
+ class ScriptWriter(object):
2006
+ """
2007
+ Encapsulates behavior around writing entry point scripts for console and
2008
+ gui apps.
2009
+ """
2010
+
2011
+ template = textwrap.dedent("""
2012
+ # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
2013
+ __requires__ = %(spec)r
2014
+ import sys
2015
+ from pkg_resources import load_entry_point
2016
+
2017
+ if __name__ == '__main__':
2018
+ sys.exit(
2019
+ load_entry_point(%(spec)r, %(group)r, %(name)r)()
2020
+ )
2021
+ """).lstrip()
2022
+
2023
+ command_spec_class = CommandSpec
2024
+
2025
+ @classmethod
2026
+ def get_script_args(cls, dist, executable=None, wininst=False):
2027
+ # for backward compatibility
2028
+ warnings.warn("Use get_args", DeprecationWarning)
2029
+ writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
2030
+ header = cls.get_script_header("", executable, wininst)
2031
+ return writer.get_args(dist, header)
2032
+
2033
+ @classmethod
2034
+ def get_script_header(cls, script_text, executable=None, wininst=False):
2035
+ # for backward compatibility
2036
+ warnings.warn("Use get_header", DeprecationWarning)
2037
+ if wininst:
2038
+ executable = "python.exe"
2039
+ cmd = cls.command_spec_class.best().from_param(executable)
2040
+ cmd.install_options(script_text)
2041
+ return cmd.as_header()
2042
+
2043
+ @classmethod
2044
+ def get_args(cls, dist, header=None):
2045
+ """
2046
+ Yield write_script() argument tuples for a distribution's
2047
+ console_scripts and gui_scripts entry points.
2048
+ """
2049
+ if header is None:
2050
+ header = cls.get_header()
2051
+ spec = str(dist.as_requirement())
2052
+ for type_ in 'console', 'gui':
2053
+ group = type_ + '_scripts'
2054
+ for name, ep in dist.get_entry_map(group).items():
2055
+ cls._ensure_safe_name(name)
2056
+ script_text = cls.template % locals()
2057
+ args = cls._get_script_args(type_, name, header, script_text)
2058
+ for res in args:
2059
+ yield res
2060
+
2061
+ @staticmethod
2062
+ def _ensure_safe_name(name):
2063
+ """
2064
+ Prevent paths in *_scripts entry point names.
2065
+ """
2066
+ has_path_sep = re.search(r'[\\/]', name)
2067
+ if has_path_sep:
2068
+ raise ValueError("Path separators not allowed in script names")
2069
+
2070
+ @classmethod
2071
+ def get_writer(cls, force_windows):
2072
+ # for backward compatibility
2073
+ warnings.warn("Use best", DeprecationWarning)
2074
+ return WindowsScriptWriter.best() if force_windows else cls.best()
2075
+
2076
+ @classmethod
2077
+ def best(cls):
2078
+ """
2079
+ Select the best ScriptWriter for this environment.
2080
+ """
2081
+ if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
2082
+ return WindowsScriptWriter.best()
2083
+ else:
2084
+ return cls
2085
+
2086
+ @classmethod
2087
+ def _get_script_args(cls, type_, name, header, script_text):
2088
+ # Simply write the stub with no extension.
2089
+ yield (name, header + script_text)
2090
+
2091
+ @classmethod
2092
+ def get_header(cls, script_text="", executable=None):
2093
+ """Create a #! line, getting options (if any) from script_text"""
2094
+ cmd = cls.command_spec_class.best().from_param(executable)
2095
+ cmd.install_options(script_text)
2096
+ return cmd.as_header()
2097
+
2098
+
2099
+ class WindowsScriptWriter(ScriptWriter):
2100
+ command_spec_class = WindowsCommandSpec
2101
+
2102
+ @classmethod
2103
+ def get_writer(cls):
2104
+ # for backward compatibility
2105
+ warnings.warn("Use best", DeprecationWarning)
2106
+ return cls.best()
2107
+
2108
+ @classmethod
2109
+ def best(cls):
2110
+ """
2111
+ Select the best ScriptWriter suitable for Windows
2112
+ """
2113
+ writer_lookup = dict(
2114
+ executable=WindowsExecutableLauncherWriter,
2115
+ natural=cls,
2116
+ )
2117
+ # for compatibility, use the executable launcher by default
2118
+ launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
2119
+ return writer_lookup[launcher]
2120
+
2121
+ @classmethod
2122
+ def _get_script_args(cls, type_, name, header, script_text):
2123
+ "For Windows, add a .py extension"
2124
+ ext = dict(console='.pya', gui='.pyw')[type_]
2125
+ if ext not in os.environ['PATHEXT'].lower().split(';'):
2126
+ warnings.warn("%s not listed in PATHEXT; scripts will not be "
2127
+ "recognized as executables." % ext, UserWarning)
2128
+ old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
2129
+ old.remove(ext)
2130
+ header = cls._adjust_header(type_, header)
2131
+ blockers = [name + x for x in old]
2132
+ yield name + ext, header + script_text, 't', blockers
2133
+
2134
+ @classmethod
2135
+ def _adjust_header(cls, type_, orig_header):
2136
+ """
2137
+ Make sure 'pythonw' is used for gui and and 'python' is used for
2138
+ console (regardless of what sys.executable is).
2139
+ """
2140
+ pattern = 'pythonw.exe'
2141
+ repl = 'python.exe'
2142
+ if type_ == 'gui':
2143
+ pattern, repl = repl, pattern
2144
+ pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
2145
+ new_header = pattern_ob.sub(string=orig_header, repl=repl)
2146
+ return new_header if cls._use_header(new_header) else orig_header
2147
+
2148
+ @staticmethod
2149
+ def _use_header(new_header):
2150
+ """
2151
+ Should _adjust_header use the replaced header?
2152
+
2153
+ On non-windows systems, always use. On
2154
+ Windows systems, only use the replaced header if it resolves
2155
+ to an executable on the system.
2156
+ """
2157
+ clean_header = new_header[2:-1].strip('"')
2158
+ return sys.platform != 'win32' or find_executable(clean_header)
2159
+
2160
+
2161
+ class WindowsExecutableLauncherWriter(WindowsScriptWriter):
2162
+ @classmethod
2163
+ def _get_script_args(cls, type_, name, header, script_text):
2164
+ """
2165
+ For Windows, add a .py extension and an .exe launcher
2166
+ """
2167
+ if type_ == 'gui':
2168
+ launcher_type = 'gui'
2169
+ ext = '-script.pyw'
2170
+ old = ['.pyw']
2171
+ else:
2172
+ launcher_type = 'cli'
2173
+ ext = '-script.py'
2174
+ old = ['.py', '.pyc', '.pyo']
2175
+ hdr = cls._adjust_header(type_, header)
2176
+ blockers = [name + x for x in old]
2177
+ yield (name + ext, hdr + script_text, 't', blockers)
2178
+ yield (
2179
+ name + '.exe', get_win_launcher(launcher_type),
2180
+ 'b' # write in binary mode
2181
+ )
2182
+ if not is_64bit():
2183
+ # install a manifest for the launcher to prevent Windows
2184
+ # from detecting it as an installer (which it will for
2185
+ # launchers like easy_install.exe). Consider only
2186
+ # adding a manifest for launchers detected as installers.
2187
+ # See Distribute #143 for details.
2188
+ m_name = name + '.exe.manifest'
2189
+ yield (m_name, load_launcher_manifest(name), 't')
2190
+
2191
+
2192
+ # for backward-compatibility
2193
+ get_script_args = ScriptWriter.get_script_args
2194
+ get_script_header = ScriptWriter.get_script_header
2195
+
2196
+
2197
+ def get_win_launcher(type):
2198
+ """
2199
+ Load the Windows launcher (executable) suitable for launching a script.
2200
+
2201
+ `type` should be either 'cli' or 'gui'
2202
+
2203
+ Returns the executable as a byte string.
2204
+ """
2205
+ launcher_fn = '%s.exe' % type
2206
+ if platform.machine().lower() == 'arm':
2207
+ launcher_fn = launcher_fn.replace(".", "-arm.")
2208
+ if is_64bit():
2209
+ launcher_fn = launcher_fn.replace(".", "-64.")
2210
+ else:
2211
+ launcher_fn = launcher_fn.replace(".", "-32.")
2212
+ return resource_string('setuptools', launcher_fn)
2213
+
2214
+
2215
+ def load_launcher_manifest(name):
2216
+ manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
2217
+ if six.PY2:
2218
+ return manifest % vars()
2219
+ else:
2220
+ return manifest.decode('utf-8') % vars()
2221
+
2222
+
2223
+ def rmtree(path, ignore_errors=False, onerror=auto_chmod):
2224
+ """Recursively delete a directory tree.
2225
+
2226
+ This code is taken from the Python 2.4 version of 'shutil', because
2227
+ the 2.3 version doesn't really work right.
2228
+ """
2229
+ if ignore_errors:
2230
+ def onerror(*args):
2231
+ pass
2232
+ elif onerror is None:
2233
+ def onerror(*args):
2234
+ raise
2235
+ names = []
2236
+ try:
2237
+ names = os.listdir(path)
2238
+ except os.error:
2239
+ onerror(os.listdir, path, sys.exc_info())
2240
+ for name in names:
2241
+ fullname = os.path.join(path, name)
2242
+ try:
2243
+ mode = os.lstat(fullname).st_mode
2244
+ except os.error:
2245
+ mode = 0
2246
+ if stat.S_ISDIR(mode):
2247
+ rmtree(fullname, ignore_errors, onerror)
2248
+ else:
2249
+ try:
2250
+ os.remove(fullname)
2251
+ except os.error:
2252
+ onerror(os.remove, fullname, sys.exc_info())
2253
+ try:
2254
+ os.rmdir(path)
2255
+ except os.error:
2256
+ onerror(os.rmdir, path, sys.exc_info())
2257
+
2258
+
2259
+ def current_umask():
2260
+ tmp = os.umask(0o022)
2261
+ os.umask(tmp)
2262
+ return tmp
2263
+
2264
+
2265
+ def bootstrap():
2266
+ # This function is called when setuptools*.egg is run using /bin/sh
2267
+ import setuptools
2268
+
2269
+ argv0 = os.path.dirname(setuptools.__path__[0])
2270
+ sys.argv[0] = argv0
2271
+ sys.argv.append(argv0)
2272
+ main()
2273
+
2274
+
2275
+ def main(argv=None, **kw):
2276
+ from setuptools import setup
2277
+ from setuptools.dist import Distribution
2278
+
2279
+ class DistributionWithoutHelpCommands(Distribution):
2280
+ common_usage = ""
2281
+
2282
+ def _show_help(self, *args, **kw):
2283
+ with _patch_usage():
2284
+ Distribution._show_help(self, *args, **kw)
2285
+
2286
+ if argv is None:
2287
+ argv = sys.argv[1:]
2288
+
2289
+ with _patch_usage():
2290
+ setup(
2291
+ script_args=['-q', 'easy_install', '-v'] + argv,
2292
+ script_name=sys.argv[0] or 'easy_install',
2293
+ distclass=DistributionWithoutHelpCommands, **kw
2294
+ )
2295
+
2296
+
2297
+ @contextlib.contextmanager
2298
+ def _patch_usage():
2299
+ import distutils.core
2300
+ USAGE = textwrap.dedent("""
2301
+ usage: %(script)s [options] requirement_or_url ...
2302
+ or: %(script)s --help
2303
+ """).lstrip()
2304
+
2305
+ def gen_usage(script_name):
2306
+ return USAGE % dict(
2307
+ script=os.path.basename(script_name),
2308
+ )
2309
+
2310
+ saved = distutils.core.gen_usage
2311
+ distutils.core.gen_usage = gen_usage
2312
+ try:
2313
+ yield
2314
+ finally:
2315
+ distutils.core.gen_usage = saved
1001-sala-de-aula-master/env/lib64/python3.5/site-packages/setuptools/command/install_lib.py ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import imp
4
+ from itertools import product, starmap
5
+ import distutils.command.install_lib as orig
6
+
7
+ class install_lib(orig.install_lib):
8
+ """Don't add compiled flags to filenames of non-Python files"""
9
+
10
+ def initialize_options(self):
11
+ orig.install_lib.initialize_options(self)
12
+ self.multiarch = None
13
+ self.install_layout = None
14
+
15
+ def finalize_options(self):
16
+ orig.install_lib.finalize_options(self)
17
+ self.set_undefined_options('install',('install_layout','install_layout'))
18
+ if self.install_layout == 'deb' and sys.version_info[:2] >= (3, 3):
19
+ import sysconfig
20
+ self.multiarch = sysconfig.get_config_var('MULTIARCH')
21
+
22
+ def run(self):
23
+ self.build()
24
+ outfiles = self.install()
25
+ if outfiles is not None:
26
+ # always compile, in case we have any extension stubs to deal with
27
+ self.byte_compile(outfiles)
28
+
29
+ def get_exclusions(self):
30
+ """
31
+ Return a collections.Sized collections.Container of paths to be
32
+ excluded for single_version_externally_managed installations.
33
+ """
34
+ all_packages = (
35
+ pkg
36
+ for ns_pkg in self._get_SVEM_NSPs()
37
+ for pkg in self._all_packages(ns_pkg)
38
+ )
39
+
40
+ excl_specs = product(all_packages, self._gen_exclusion_paths())
41
+ return set(starmap(self._exclude_pkg_path, excl_specs))
42
+
43
+ def _exclude_pkg_path(self, pkg, exclusion_path):
44
+ """
45
+ Given a package name and exclusion path within that package,
46
+ compute the full exclusion path.
47
+ """
48
+ parts = pkg.split('.') + [exclusion_path]
49
+ return os.path.join(self.install_dir, *parts)
50
+
51
+ @staticmethod
52
+ def _all_packages(pkg_name):
53
+ """
54
+ >>> list(install_lib._all_packages('foo.bar.baz'))
55
+ ['foo.bar.baz', 'foo.bar', 'foo']
56
+ """
57
+ while pkg_name:
58
+ yield pkg_name
59
+ pkg_name, sep, child = pkg_name.rpartition('.')
60
+
61
+ def _get_SVEM_NSPs(self):
62
+ """
63
+ Get namespace packages (list) but only for
64
+ single_version_externally_managed installations and empty otherwise.
65
+ """
66
+ # TODO: is it necessary to short-circuit here? i.e. what's the cost
67
+ # if get_finalized_command is called even when namespace_packages is
68
+ # False?
69
+ if not self.distribution.namespace_packages:
70
+ return []
71
+
72
+ install_cmd = self.get_finalized_command('install')
73
+ svem = install_cmd.single_version_externally_managed
74
+
75
+ return self.distribution.namespace_packages if svem else []
76
+
77
+ @staticmethod
78
+ def _gen_exclusion_paths():
79
+ """
80
+ Generate file paths to be excluded for namespace packages (bytecode
81
+ cache files).
82
+ """
83
+ # always exclude the package module itself
84
+ yield '__init__.py'
85
+
86
+ yield '__init__.pyc'
87
+ yield '__init__.pyo'
88
+
89
+ if not hasattr(imp, 'get_tag'):
90
+ return
91
+
92
+ base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
93
+ yield base + '.pyc'
94
+ yield base + '.pyo'
95
+ yield base + '.opt-1.pyc'
96
+ yield base + '.opt-2.pyc'
97
+
98
+ def copy_tree(
99
+ self, infile, outfile,
100
+ preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
101
+ ):
102
+ assert preserve_mode and preserve_times and not preserve_symlinks
103
+ exclude = self.get_exclusions()
104
+
105
+ if not exclude:
106
+ import distutils.dir_util
107
+ distutils.dir_util._multiarch = self.multiarch
108
+ return orig.install_lib.copy_tree(self, infile, outfile)
109
+
110
+ # Exclude namespace package __init__.py* files from the output
111
+
112
+ from setuptools.archive_util import unpack_directory
113
+ from distutils import log
114
+
115
+ outfiles = []
116
+
117
+ if self.multiarch:
118
+ import sysconfig
119
+ ext_suffix = sysconfig.get_config_var ('EXT_SUFFIX')
120
+ if ext_suffix.endswith(self.multiarch + ext_suffix[-3:]):
121
+ new_suffix = None
122
+ else:
123
+ new_suffix = "%s-%s%s" % (ext_suffix[:-3], self.multiarch, ext_suffix[-3:])
124
+
125
+ def pf(src, dst):
126
+ if dst in exclude:
127
+ log.warn("Skipping installation of %s (namespace package)",
128
+ dst)
129
+ return False
130
+
131
+ if self.multiarch and new_suffix and dst.endswith(ext_suffix) and not dst.endswith(new_suffix):
132
+ dst = dst.replace(ext_suffix, new_suffix)
133
+ log.info("renaming extension to %s", os.path.basename(dst))
134
+
135
+ log.info("copying %s -> %s", src, os.path.dirname(dst))
136
+ outfiles.append(dst)
137
+ return dst
138
+
139
+ unpack_directory(infile, outfile, pf)
140
+ return outfiles
141
+
142
+ def get_outputs(self):
143
+ outputs = orig.install_lib.get_outputs(self)
144
+ exclude = self.get_exclusions()
145
+ if exclude:
146
+ return [f for f in outputs if f not in exclude]
147
+ return outputs
1076LAB-master/1225test/arm.h ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+ #include "HRSDK.h"
3
+ #include <iostream>
4
+ #include <Windows.h>
5
+ //#include "gripper.h"
6
+
7
+ using namespace std;
8
+
9
+ class ARM
10
+ {
11
+ private:
12
+ //class robot declare
13
+ HROBOT robot;
14
+
15
+ //class parameter
16
+ int overrideRatio = 70; //1-100% //25-35
17
+ int ptpSpeed = 45; //1-100% //25-35
18
+ int linSpeed = 450; //1-2000 mm/s //700
19
+ int AccelerationRatio = 40; //1-100% //25-35
20
+
21
+
22
+
23
+ public:
24
+ //pos coordinate
25
+ const double pos_home[6] = { 0, 286.429, 282.85, 180, 0, 90 };
26
+
27
+ //axis coordinate
28
+
29
+ //�{�����
30
+ ARM(HROBOT r); //��l��
31
+ void goThePos(double* pos);
32
+ void goThePos(const double* pos);
33
+ void lineGoThePos(const double* pos);
34
+ void lineGoThePosRel(const double* pos);
35
+ void goTheAxis(double* pos);
36
+ void goTheAxis(const double* pos);
37
+ void getNowPos(double* pos);
38
+ void getNowAxis(double* axis);
39
+ void goHome();
40
+ void setSpeed(int speed);//1~100
41
+ void setptpSpeed(int ptpspeed);
42
+ void setPosX(double x);
43
+ void setPosY(double y);
44
+ void setPosXY(double x, double y);
45
+ void setPosZ(double z);
46
+ void setPosRX(double rx);
47
+ void setAxisJ1(double j1);
48
+ void setAxisJ2(double j2);
49
+ void setAxisJ3J5(double j3, double j5);
50
+ void setAxisJ2J3J5(double j2, double j3, double j5);
51
+ void setAxisJ5(double j5);
52
+ void setAxisJ6(double j6);
53
+ void cup_300ml();
54
+ //�{�����
55
+
56
+ void errorCheck();
57
+
58
+
59
+ void goCheakPos();
60
+ void goCatch();
61
+
62
+
63
+
64
+ void goArticle1();
65
+ void goArticle2();
66
+ void goArticle3();
67
+ void goPhotoPos();
68
+ void goPhotoPlace();
69
+ void goTest();
70
+ void goPhotoPos(int num);
71
+ void goPlanePhotoPos();
72
+ void goSolidPhotoPos();
73
+ void goMiddelePos();
74
+ void goOriginalPos();
75
+ void goBlockUp(double x, double y, double angle);
76
+
77
+ void goDown();
78
+ void goDownDown();
79
+ void goUp();
80
+ void goUpUp();
81
+ void goBigUp();
82
+ void goMoveXY(double x, double y);
83
+ void goHeadStright();
84
+ void goHeadModeZero();
85
+ void goHeadModeOne();
86
+ void goSolidPos(double x, double y, double angle, int num);
87
+ //double* getnowpos();
88
+ void turnOverOnBox();
89
+ void turnOverAboveBox();
90
+ void goOnBox();
91
+ void goAboveBox();
92
+ void goOnBoxWithAngle(double angle);
93
+ void goSmallAboveBoxWithAngle(double angle);
94
+
95
+ void gripperOpen();
96
+ void gripperOpenLarge();
97
+ void gripperClose();
98
+
99
+ };
1076LAB-master/Adafruit_Python_DHT/README.md ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *DEPRECATED LIBRARY* Adafruit Python DHT Sensor Library
2
+ =======================
3
+
4
+ his library has been deprecated! We are leaving this up for historical and research purposes but archiving the repository.
5
+
6
+ We are now only supporting the use of our CircuitPython libraries for use with Python.
7
+
8
+ Check out this guide for info on using DHT sensors with the CircuitPython library: https://learn.adafruit.com/dht-humidity-sensing-on-raspberry-pi-with-gdocs-logging/python-setup
9
+
10
+ ---------------------------------------
11
+
12
+ Python library to read the DHT series of humidity and temperature sensors on a
13
+ Raspberry Pi or Beaglebone Black.
14
+
15
+ Designed specifically to work with the Adafruit DHT series sensors ---->
16
+ https://www.adafruit.com/products/385
17
+
18
+ Currently the library is tested with Python 2.6, 2.7, 3.3 and 3.4. It should
19
+ work with Python greater than 3.4, too.
20
+
21
+ Installing
22
+ ----------
23
+
24
+ ### Dependencies
25
+
26
+ For all platforms (Raspberry Pi and Beaglebone Black) make sure your system is
27
+ able to compile and download Python extensions with **pip**:
28
+
29
+ On Raspbian or Beaglebone Black's Debian/Ubuntu image you can ensure your
30
+ system is ready by running one or two of the following sets of commands:
31
+
32
+ Python 2:
33
+
34
+ ````sh
35
+ sudo apt-get update
36
+ sudo apt-get install python-pip
37
+ sudo python -m pip install --upgrade pip setuptools wheel
38
+ ````
39
+
40
+ Python 3:
41
+
42
+ ````sh
43
+ sudo apt-get update
44
+ sudo apt-get install python3-pip
45
+ sudo python3 -m pip install --upgrade pip setuptools wheel
46
+ ````
47
+
48
+ ### Install with pip
49
+
50
+ Use `pip` to install from PyPI.
51
+
52
+ Python 2:
53
+
54
+ ```sh
55
+ sudo pip install Adafruit_DHT
56
+ ```
57
+
58
+ Python 3:
59
+
60
+ ```sh
61
+ sudo pip3 install Adafruit_DHT
62
+ ```
63
+
64
+ ### Compile and install from the repository
65
+
66
+ First download the library source code from the [GitHub releases
67
+ page](https://github.com/adafruit/Adafruit_Python_DHT/releases), unzipping the
68
+ archive, and execute:
69
+
70
+ Python 2:
71
+
72
+ ```sh
73
+ cd Adafruit_Python_DHT
74
+ sudo python setup.py install
75
+ ```
76
+
77
+ Python 3:
78
+
79
+ ```sh
80
+ cd Adafruit_Python_DHT
81
+ sudo python3 setup.py install
82
+ ```
83
+
84
+ You may also git clone the repository if you want to test an unreleased
85
+ version:
86
+
87
+ ```sh
88
+ git clone https://github.com/adafruit/Adafruit_Python_DHT.git
89
+ ```
90
+
91
+ Usage
92
+ -----
93
+
94
+ See example of usage in the examples folder.
95
+
96
+ Author
97
+ ------
98
+
99
+ Adafruit invests time and resources providing this open source code, please
100
+ support Adafruit and open-source hardware by purchasing products from Adafruit!
101
+
102
+ Written by Tony DiCola for Adafruit Industries.
103
+
104
+ MIT license, all text above must be included in any redistribution
1076LAB-master/Adafruit_Python_DHT/source/Raspberry_Pi/pi_mmio.c ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Copyright (c) 2014 Adafruit Industries
2
+ // Author: Tony DiCola
3
+ // Based on code from Gert van Loo & Dom: http://elinux.org/RPi_Low-level_peripherals#GPIO_Code_examples
4
+
5
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ // of this software and associated documentation files (the "Software"), to deal
7
+ // in the Software without restriction, including without limitation the rights
8
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ // copies of the Software, and to permit persons to whom the Software is
10
+ // furnished to do so, subject to the following conditions:
11
+
12
+ // The above copyright notice and this permission notice shall be included in all
13
+ // copies or substantial portions of the Software.
14
+
15
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ // SOFTWARE.
22
+ #include <fcntl.h>
23
+ #include <stdlib.h>
24
+ #include <string.h>
25
+ #include <sys/mman.h>
26
+ #include <sys/stat.h>
27
+ #include <sys/types.h>
28
+ #include <unistd.h>
29
+
30
+ #include "pi_mmio.h"
31
+
32
+ #define BASE 0x20000000
33
+ #define GPIO_BASE (BASE + 0x200000)
34
+ #define GPIO_LENGTH 4096
35
+
36
+ volatile uint32_t* pi_mmio_gpio = NULL;
37
+
38
+ int pi_mmio_init(void) {
39
+ if (pi_mmio_gpio == NULL) {
40
+ int fd;
41
+
42
+ // On older kernels user readable /dev/gpiomem might not exists.
43
+ // Falls back to root-only /dev/mem.
44
+ if( access( "/dev/gpiomem", F_OK ) != -1 ) {
45
+ fd = open("/dev/gpiomem", O_RDWR | O_SYNC);
46
+ } else {
47
+ fd = open("/dev/mem", O_RDWR | O_SYNC);
48
+ }
49
+ if (fd == -1) {
50
+ // Error opening /dev/gpiomem.
51
+ return MMIO_ERROR_DEVMEM;
52
+ }
53
+ // Map GPIO memory to location in process space.
54
+ pi_mmio_gpio = (uint32_t*)mmap(NULL, GPIO_LENGTH, PROT_READ | PROT_WRITE, MAP_SHARED, fd, GPIO_BASE);
55
+ close(fd);
56
+ if (pi_mmio_gpio == MAP_FAILED) {
57
+ // Don't save the result if the memory mapping failed.
58
+ pi_mmio_gpio = NULL;
59
+ return MMIO_ERROR_MMAP;
60
+ }
61
+ }
62
+ return MMIO_SUCCESS;
63
+ }
1076LAB-master/Adafruit_Python_DHT/source/Raspberry_Pi/pi_mmio.h ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Copyright (c) 2014 Adafruit Industries
2
+ // Author: Tony DiCola
3
+ // Based on code from Gert van Loo & Dom: http://elinux.org/RPi_Low-level_peripherals#GPIO_Code_examples
4
+
5
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ // of this software and associated documentation files (the "Software"), to deal
7
+ // in the Software without restriction, including without limitation the rights
8
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ // copies of the Software, and to permit persons to whom the Software is
10
+ // furnished to do so, subject to the following conditions:
11
+
12
+ // The above copyright notice and this permission notice shall be included in all
13
+ // copies or substantial portions of the Software.
14
+
15
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ // SOFTWARE.
22
+
23
+ // Simple fast memory-mapped GPIO library for the Raspberry Pi.
24
+ #ifndef PI_MMIO_H
25
+ #define PI_MMIO_H
26
+
27
+ #include <stdint.h>
28
+
29
+ #define MMIO_SUCCESS 0
30
+ #define MMIO_ERROR_DEVMEM -1
31
+ #define MMIO_ERROR_MMAP -2
32
+
33
+ extern volatile uint32_t* pi_mmio_gpio;
34
+
35
+ int pi_mmio_init(void);
36
+
37
+ static inline void pi_mmio_set_input(const int gpio_number) {
38
+ // Set GPIO register to 000 for specified GPIO number.
39
+ *(pi_mmio_gpio+((gpio_number)/10)) &= ~(7<<(((gpio_number)%10)*3));
40
+ }
41
+
42
+ static inline void pi_mmio_set_output(const int gpio_number) {
43
+ // First set to 000 using input function.
44
+ pi_mmio_set_input(gpio_number);
45
+ // Next set bit 0 to 1 to set output.
46
+ *(pi_mmio_gpio+((gpio_number)/10)) |= (1<<(((gpio_number)%10)*3));
47
+ }
48
+
49
+ static inline void pi_mmio_set_high(const int gpio_number) {
50
+ *(pi_mmio_gpio+7) = 1 << gpio_number;
51
+ }
52
+
53
+ static inline void pi_mmio_set_low(const int gpio_number) {
54
+ *(pi_mmio_gpio+10) = 1 << gpio_number;
55
+ }
56
+
57
+ static inline uint32_t pi_mmio_input(const int gpio_number) {
58
+ return *(pi_mmio_gpio+13) & (1 << gpio_number);
59
+ }
60
+
61
+ #endif
1076LAB-master/Adafruit_Python_DHT/source/Raspberry_Pi_2/pi_2_dht_read.h ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Copyright (c) 2014 Adafruit Industries
2
+ // Author: Tony DiCola
3
+
4
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
5
+ // of this software and associated documentation files (the "Software"), to deal
6
+ // in the Software without restriction, including without limitation the rights
7
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8
+ // copies of the Software, and to permit persons to whom the Software is
9
+ // furnished to do so, subject to the following conditions:
10
+
11
+ // The above copyright notice and this permission notice shall be included in all
12
+ // copies or substantial portions of the Software.
13
+
14
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20
+ // SOFTWARE.
21
+ #ifndef PI_2_DHT_READ_H
22
+ #define PI_2_DHT_READ_H
23
+
24
+ #include "../common_dht_read.h"
25
+
26
+ // Read DHT sensor connected to GPIO pin (using BCM numbering). Humidity and temperature will be
27
+ // returned in the provided parameters. If a successfull reading could be made a value of 0
28
+ // (DHT_SUCCESS) will be returned. If there was an error reading the sensor a negative value will
29
+ // be returned. Some errors can be ignored and retried, specifically DHT_ERROR_TIMEOUT or DHT_ERROR_CHECKSUM.
30
+ int pi_2_dht_read(int sensor, int pin, float* humidity, float* temperature);
31
+
32
+ #endif
1076LAB-master/Adafruit_Python_DHT/source/Raspberry_Pi_2/pi_2_mmio.c ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Copyright (c) 2014 Adafruit Industries
2
+ // Author: Tony DiCola
3
+ // Based on code from Gert van Loo & Dom: http://elinux.org/RPi_Low-level_peripherals#GPIO_Code_examples
4
+
5
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ // of this software and associated documentation files (the "Software"), to deal
7
+ // in the Software without restriction, including without limitation the rights
8
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ // copies of the Software, and to permit persons to whom the Software is
10
+ // furnished to do so, subject to the following conditions:
11
+
12
+ // The above copyright notice and this permission notice shall be included in all
13
+ // copies or substantial portions of the Software.
14
+
15
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ // SOFTWARE.
22
+ #include <fcntl.h>
23
+ #include <stdlib.h>
24
+ #include <stdio.h>
25
+ #include <string.h>
26
+ #include <sys/mman.h>
27
+ #include <sys/stat.h>
28
+ #include <sys/types.h>
29
+ #include <unistd.h>
30
+
31
+ #include "pi_2_mmio.h"
32
+
33
+ #define GPIO_BASE_OFFSET 0x200000
34
+ #define GPIO_LENGTH 4096
35
+
36
+ volatile uint32_t* pi_2_mmio_gpio = NULL;
37
+
38
+ int pi_2_mmio_init(void) {
39
+ if (pi_2_mmio_gpio == NULL) {
40
+ // Check for GPIO and peripheral addresses from device tree.
41
+ // Adapted from code in the RPi.GPIO library at:
42
+ // http://sourceforge.net/p/raspberry-gpio-python/
43
+ FILE *fp = fopen("/proc/device-tree/soc/ranges", "rb");
44
+ if (fp == NULL) {
45
+ return MMIO_ERROR_OFFSET;
46
+ }
47
+ fseek(fp, 4, SEEK_SET);
48
+ unsigned char buf[4];
49
+ if (fread(buf, 1, sizeof(buf), fp) != sizeof(buf)) {
50
+ return MMIO_ERROR_OFFSET;
51
+ }
52
+ uint32_t peri_base = buf[0] << 24 | buf[1] << 16 | buf[2] << 8 | buf[3] << 0;
53
+ uint32_t gpio_base = peri_base + GPIO_BASE_OFFSET;
54
+ fclose(fp);
55
+
56
+ int fd = open("/dev/gpiomem", O_RDWR | O_SYNC);
57
+ if (fd == -1) {
58
+ // Error opening /dev/gpiomem.
59
+ return MMIO_ERROR_DEVMEM;
60
+ }
61
+ // Map GPIO memory to location in process space.
62
+ pi_2_mmio_gpio = (uint32_t*)mmap(NULL, GPIO_LENGTH, PROT_READ | PROT_WRITE, MAP_SHARED, fd, gpio_base);
63
+ close(fd);
64
+ if (pi_2_mmio_gpio == MAP_FAILED) {
65
+ // Don't save the result if the memory mapping failed.
66
+ pi_2_mmio_gpio = NULL;
67
+ return MMIO_ERROR_MMAP;
68
+ }
69
+ }
70
+ return MMIO_SUCCESS;
71
+ }
1076LAB-master/Adafruit_Python_DHT/source/Test/test_dht_read.c ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Copyright (c) 2014 Adafruit Industries
2
+ // Author: Tony DiCola
3
+
4
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
5
+ // of this software and associated documentation files (the "Software"), to deal
6
+ // in the Software without restriction, including without limitation the rights
7
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8
+ // copies of the Software, and to permit persons to whom the Software is
9
+ // furnished to do so, subject to the following conditions:
10
+
11
+ // The above copyright notice and this permission notice shall be included in all
12
+ // copies or substantial portions of the Software.
13
+
14
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20
+ // SOFTWARE.
21
+ #include <stdlib.h>
22
+
23
+ #include "test_dht_read.h"
24
+
25
+ int test_dht_read(int type, int pin, float* humidity, float* temperature) {
26
+ // Validate humidity and temperature arguments and set them to zero.
27
+ if (humidity == NULL || temperature == NULL) {
28
+ return -1;
29
+ }
30
+ *temperature = 42.0f;
31
+ *humidity = 50.0f;
32
+
33
+ return 0;
34
+ }
1076LAB-master/nodes/turtlebot3_bot ADDED
@@ -0,0 +1,219 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) 2011, Willow Garage, Inc.
4
+ # All rights reserved.
5
+ #
6
+ # Redistribution and use in source and binary forms, with or without
7
+ # modification, are permitted provided that the following conditions are met:
8
+ #
9
+ # * Redistributions of source code must retain the above copyright
10
+ # notice, this list of conditions and the following disclaimer.
11
+ # * Redistributions in binary form must reproduce the above copyright
12
+ # notice, this list of conditions and the following disclaimer in the
13
+ # documentation and/or other materials provided with the distribution.
14
+ # * Neither the name of the Willow Garage, Inc. nor the names of its
15
+ # contributors may be used to endorse or promote products derived from
16
+ # this software without specific prior written permission.
17
+ #
18
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19
+ # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20
+ # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21
+ # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
22
+ # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23
+ # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24
+ # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25
+ # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26
+ # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27
+ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28
+ # POSSIBILITY OF SUCH DAMAGE.
29
+
30
+ import rospy
31
+ from geometry_msgs.msg import Twist
32
+ import sys, select, os
33
+ import socket
34
+ import time
35
+
36
+
37
+ #socket
38
+ import sys
39
+ import socket
40
+ import time
41
+
42
+ HOST = '192.168.0.121' #137
43
+ PORT =8001
44
+ u = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
45
+ u.bind((HOST, PORT))
46
+
47
+ #u.sendto('BOT',(HOST,PORT))
48
+ u.settimeout(0.05)
49
+
50
+ if os.name == 'nt':
51
+ import msvcrt
52
+ else:
53
+ import tty, termios
54
+
55
+ #the max linear velocity of turtlebot
56
+ BURGER_MAX_LIN_VEL = 0.22
57
+ BURGER_MAX_ANG_VEL = 2.84
58
+
59
+ WAFFLE_MAX_LIN_VEL = 0.26
60
+ WAFFLE_MAX_ANG_VEL = 1.82
61
+
62
+ #the increased value of velocity when you press the bottom
63
+ LIN_VEL_STEP_SIZE = 0.01
64
+ ANG_VEL_STEP_SIZE = 0.1
65
+
66
+ msg = """
67
+ Control Your TurtleBot3!
68
+ ---------------------------
69
+ Moving around:
70
+ w
71
+ a s d
72
+ x
73
+
74
+ w/x : increase/decrease linear velocity (Burger : ~ 0.22, Waffle and Waffle Pi : ~ 0.26)
75
+ a/d : increase/decrease angular velocity (Burger : ~ 2.84, Waffle and Waffle Pi : ~ 1.82)
76
+
77
+ space key, s : force stop
78
+
79
+ CTRL-C to quit
80
+ """
81
+
82
+ e = """
83
+ Communications Failed
84
+ """
85
+
86
+ def getKey():
87
+ if os.name == 'nt':
88
+ return msvcrt.getch()
89
+
90
+ tty.setraw(sys.stdin.fileno())
91
+ rlist, _, _ = select.select([sys.stdin], [], [], 0.1)
92
+ if rlist:
93
+ key = sys.stdin.read(1)
94
+ else:
95
+ key = ''
96
+ try:
97
+ data,addr = u.recvfrom(1024)
98
+ print data
99
+ #do something
100
+ return data
101
+
102
+ except socket.timeout:
103
+ s102=0
104
+
105
+ termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
106
+ return key
107
+
108
+ def vels(target_linear_vel, target_angular_vel):
109
+ return "currently:\tlinear vel %s\t angular vel %s " % (target_linear_vel,target_angular_vel)
110
+
111
+ def makeSimpleProfile(output, input, slop):
112
+ if input > output:
113
+ output = min( input, output + slop )
114
+ elif input < output:
115
+ output = max( input, output - slop )
116
+ else:
117
+ output = input
118
+
119
+ return output
120
+
121
+ def constrain(input, low, high):
122
+ if input < low:
123
+ input = low
124
+ elif input > high:
125
+ input = high
126
+ else:
127
+ input = input
128
+
129
+ return input
130
+
131
+ def checkLinearLimitVelocity(vel):
132
+ if turtlebot3_model == "burger":
133
+ vel = constrain(vel, -BURGER_MAX_LIN_VEL, BURGER_MAX_LIN_VEL)
134
+ elif turtlebot3_model == "waffle" or turtlebot3_model == "waffle_pi":
135
+ vel = constrain(vel, -WAFFLE_MAX_LIN_VEL, WAFFLE_MAX_LIN_VEL)
136
+ else:
137
+ vel = constrain(vel, -BURGER_MAX_LIN_VEL, BURGER_MAX_LIN_VEL)
138
+
139
+ return vel
140
+
141
+ def checkAngularLimitVelocity(vel):
142
+ if turtlebot3_model == "burger":
143
+ vel = constrain(vel, -BURGER_MAX_ANG_VEL, BURGER_MAX_ANG_VEL)
144
+ elif turtlebot3_model == "waffle" or turtlebot3_model == "waffle_pi":
145
+ vel = constrain(vel, -WAFFLE_MAX_ANG_VEL, WAFFLE_MAX_ANG_VEL)
146
+ else:
147
+ vel = constrain(vel, -BURGER_MAX_ANG_VEL, BURGER_MAX_ANG_VEL)
148
+
149
+ return vel
150
+
151
+ if __name__=="__main__":
152
+ if os.name != 'nt':
153
+ settings = termios.tcgetattr(sys.stdin)
154
+
155
+ rospy.init_node('turtlebot3_teleop')
156
+ pub = rospy.Publisher('cmd_vel', Twist, queue_size=10)
157
+
158
+ turtlebot3_model = rospy.get_param("model", "burger")
159
+
160
+ status = 0
161
+ target_linear_vel = 0.0
162
+ target_angular_vel = 0.0
163
+ control_linear_vel = 0.0
164
+ control_angular_vel = 0.0
165
+ status=0
166
+ try:
167
+ print(msg)
168
+ while(1):
169
+ key = getKey()
170
+ if key == 'w' :
171
+ target_linear_vel = checkLinearLimitVelocity(target_linear_vel + LIN_VEL_STEP_SIZE)
172
+ status = status - 1
173
+ print(vels(target_linear_vel,target_angular_vel))
174
+ elif key == 'x' :
175
+ target_linear_vel = checkLinearLimitVelocity(target_linear_vel - LIN_VEL_STEP_SIZE)
176
+ status = status - 1
177
+ print(vels(target_linear_vel,target_angular_vel))
178
+ elif key == 'a' :
179
+ target_angular_vel = checkAngularLimitVelocity(target_angular_vel + ANG_VEL_STEP_SIZE)
180
+ status = status - 1
181
+ print(vels(target_linear_vel,target_angular_vel))
182
+ elif key == 'd' :
183
+ target_angular_vel = checkAngularLimitVelocity(target_angular_vel - ANG_VEL_STEP_SIZE)
184
+ status = status -1
185
+ print(vels(target_linear_vel,target_angular_vel))
186
+ elif key == ' ' or key == 's' :
187
+ target_linear_vel = 0.0
188
+ control_linear_vel = 0.0
189
+ target_angular_vel = 0.0
190
+ control_angular_vel = 0.0
191
+ status = status -1
192
+ print(vels(target_linear_vel, target_angular_vel))
193
+ else:
194
+ if (key == '\x03'):
195
+ sock.close()
196
+ break
197
+
198
+
199
+ twist = Twist()
200
+
201
+ control_linear_vel = makeSimpleProfile(control_linear_vel, target_linear_vel, (LIN_VEL_STEP_SIZE/2.0))
202
+ twist.linear.x = control_linear_vel; twist.linear.y = 0.0; twist.linear.z = 0.0
203
+
204
+ control_angular_vel = makeSimpleProfile(control_angular_vel, target_angular_vel, (ANG_VEL_STEP_SIZE/2.0))
205
+ twist.angular.x = 0.0; twist.angular.y = 0.0; twist.angular.z = control_angular_vel
206
+
207
+ pub.publish(twist)
208
+
209
+ except:
210
+ print(e)
211
+
212
+ finally:
213
+ twist = Twist()
214
+ twist.linear.x = 0.0; twist.linear.y = 0.0; twist.linear.z = 0.0
215
+ twist.angular.x = 0.0; twist.angular.y = 0.0; twist.angular.z = 0.0
216
+ pub.publish(twist)
217
+
218
+ if os.name != 'nt':
219
+ termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
10pics-master/README.rst ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ======
2
+ 10pics
3
+ ======
4
+ This project is based on a team introduction activity: Use 10 pictures to describe yourself!
5
+
6
+ With this project you will be able to choose 10 pictures from your Google Photos Library and display them in a Jupyter notebook.
7
+
8
+ Setup
9
+ ========
10
+ Google Credentials
11
+ ------------------
12
+ In order to be able to access your photos via Google's API you'll need to download a secrets file. The steps below outline how this is done.
13
+
14
+ - Create a client id using these borrowed instructions_. When it comes to choosing the application type, please choose Desktop.
15
+ - Once the client ID is created, download it as ``secrets_file.json``
16
+
17
+ .. _instructions: https://docs.google.com/document/d/1ck1679H8ifmZ_4eVbDeD_-jezIcZ-j6MlaNaeQiz7y0
18
+
19
+ Poetry
20
+ ------
21
+ This project uses poetry. Install ``poetry`` by running the command ``python <(curl -sSL https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py)``.
22
+ Once installed, run ``poetry install``, which will create the virtual env with the required packages.
23
+
24
+ Run example
25
+ -----------
26
+ Run ``poetry run jupyter notebook`` to start a local Jupyter notebook, then provide the two path files as indicated.
27
+ - ``secrets_file.json`` as explained above
28
+ - ``credentials.pickle`` will store your session credentials for you, for easy reauthentication
29
+
30
+ When instantiating the client, you will be directed to your browser to authorize your application. Make sure to follow through, even where it says *This app isn't verified*.
31
+
32
+ Contribute
33
+ ==========
34
+ Feel free to create a PR if you'd like to contribute.
35
+
36
+ Please make sure to run ``make lint-check`` before you do!
37
+
38
+
10pics-master/setup.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Setup project."""
2
+ from setuptools import find_packages, setup
3
+
4
+ short_desc = "10 Pictures"
5
+ long_desc = "Show pictures from your Google Photo Library"
6
+
7
+ setup(
8
+ name="ten_pics",
9
+ description=short_desc,
10
+ long_description=long_desc,
11
+ author="Vincent Ketelaars",
12
+ author_email="admin@vincentketelaars.nl",
13
+ packages=find_packages(),
14
+ )
11777-Group11-master/.pipreqs/requirements_pipreqs.txt ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ anytree==2.12.1
2
+ apex==0.9.10dev
3
+ boto3==1.37.20
4
+ botocore==1.37.20
5
+ ftfy==6.3.1
6
+ GitPython==3.1.44
7
+ GitPython==3.1.44
8
+ ipdb==0.13.13
9
+ ipython==8.12.3
10
+ ipython==8.18.1
11
+ matplotlib==3.10.1
12
+ numpy==2.2.4
13
+ psutil==7.0.0
14
+ ptvsd==4.3.2
15
+ PyYAML==6.0.2
16
+ PyYAML==6.0.2
17
+ regex==2024.11.6
18
+ Requests==2.32.3
19
+ scikit_learn==1.6.1
20
+ sentencepiece==0.2.0
21
+ setuptools==28.8.0
22
+ setuptools==74.0.0
23
+ setuptools==52.0.0
24
+ six==1.10.0
25
+ six==1.16.0
26
+ spacy==3.8.4
27
+ tensorboardX==2.6.2.2
28
+ tensorflow==2.19.0
29
+ torch==2.1.0
30
+ tqdm==4.67.1
31
+ transformers==4.50.1
11777-Group11-master/attention_weight_vis/bertviz/model_view.js ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * @fileoverview Transformer Visualization D3 javascript code.
3
+ *
4
+ * Based on: https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/visualization/attention.js
5
+ *
6
+ * Change log:
7
+ *
8
+ * 02/01/19 Jesse Vig Initial implementation
9
+ */
10
+
11
+
12
+ requirejs(['jquery', 'd3'], function($, d3) {
13
+
14
+ var params = window.params;
15
+ var config = {};
16
+
17
+ const MIN_X = 0;
18
+ const MIN_Y = 0;
19
+ const DIV_WIDTH = 970;
20
+ const THUMBNAIL_PADDING = 5;
21
+ const DETAIL_WIDTH = 300;
22
+ const DETAIL_ATTENTION_WIDTH = 140;
23
+ const DETAIL_BOX_WIDTH = 80;
24
+ const DETAIL_BOX_HEIGHT = 20;
25
+ const DETAIL_PADDING = 5;
26
+ const ATTN_PADDING = 0;
27
+ const DETAIL_HEADING_HEIGHT = 47;
28
+ const DETAIL_HEADING_TEXT_SIZE = 15;
29
+ const TEXT_SIZE = 13;
30
+ const LAYER_COLORS = d3.schemeCategory10;
31
+
32
+ function render() {
33
+
34
+ // Set global state variables
35
+
36
+ var attData = config.attention[config.filter];
37
+ config.leftText = attData.left_text;
38
+ config.rightText = attData.right_text;
39
+ config.attn = attData.attn;
40
+ config.numLayers = config.attn.length;
41
+ config.numHeads = config.attn[0].length;
42
+ config.thumbnailBoxHeight = 7 * (12 / config.numHeads);
43
+ config.thumbnailHeight = Math.max(config.leftText.length, config.rightText.length) * config.thumbnailBoxHeight + 2 * THUMBNAIL_PADDING;
44
+ config.thumbnailWidth = DIV_WIDTH / config.numHeads;
45
+ config.detailHeight = Math.max(config.leftText.length, config.rightText.length) * DETAIL_BOX_HEIGHT + 2 * DETAIL_PADDING + DETAIL_HEADING_HEIGHT;
46
+ config.divHeight = config.numLayers * config.thumbnailHeight;
47
+
48
+ $("#vis").empty();
49
+ $("#vis").attr("height", config.divHeight);
50
+ config.svg = d3.select("#vis")
51
+ .append('svg')
52
+ .attr("width", DIV_WIDTH)
53
+ .attr("height", config.divHeight)
54
+ .attr("fill", "black");
55
+
56
+ var i;
57
+ var j;
58
+ for (i = 0; i < config.numLayers; i++) {
59
+ for (j = 0; j < config.numHeads; j++) {
60
+ renderThumbnail(i, j);
61
+ }
62
+ }
63
+ }
64
+
65
+ function renderThumbnail(layerIndex, headIndex) {
66
+ var x = headIndex * config.thumbnailWidth;
67
+ var y = layerIndex * config.thumbnailHeight;
68
+ renderThumbnailAttn(x, y, config.attn[layerIndex][headIndex], layerIndex, headIndex);
69
+ }
70
+
71
+ function renderDetail(att, layerIndex, headIndex) {
72
+ var xOffset = .8 * config.thumbnailWidth;
73
+ var maxX = DIV_WIDTH;
74
+ var maxY = config.divHeight;
75
+ var leftPos = (headIndex / config.numHeads) * DIV_WIDTH
76
+ var x = leftPos + THUMBNAIL_PADDING + xOffset;
77
+ if (x < MIN_X) {
78
+ x = MIN_X;
79
+ } else if (x + DETAIL_WIDTH > maxX) {
80
+ x = leftPos + THUMBNAIL_PADDING - DETAIL_WIDTH + 8;
81
+ }
82
+ var posLeftText = x;
83
+ var posAttention = posLeftText + DETAIL_BOX_WIDTH;
84
+ var posRightText = posAttention + DETAIL_ATTENTION_WIDTH;
85
+ var thumbnailHeight = Math.max(config.leftText.length, config.rightText.length) * config.thumbnailBoxHeight + 2 * THUMBNAIL_PADDING;
86
+ var yOffset = 20;
87
+ var y = layerIndex * thumbnailHeight + THUMBNAIL_PADDING + yOffset;
88
+ if (y < MIN_Y) {
89
+ y = MIN_Y;
90
+ } else if (y + config.detailHeight > maxY) {
91
+ y = maxY - config.detailHeight;
92
+ }
93
+ renderDetailFrame(x, y, layerIndex);
94
+ renderDetailHeading(x, y, layerIndex, headIndex);
95
+ renderText(config.leftText, "leftText", posLeftText, y + DETAIL_HEADING_HEIGHT, layerIndex);
96
+ renderDetailAttn(posAttention, y + DETAIL_HEADING_HEIGHT, att, layerIndex, headIndex);
97
+ renderText(config.rightText, "rightText", posRightText, y + DETAIL_HEADING_HEIGHT, layerIndex);
98
+ }
99
+
100
+ function renderDetailHeading(x, y, layerIndex, headIndex) {
101
+ var fillColor = getColor(layerIndex);
102
+ config.svg.append("text")
103
+ .classed("detail", true)
104
+ .text('Layer ' + layerIndex + ", Head " + headIndex)
105
+ .attr("font-size", DETAIL_HEADING_TEXT_SIZE + "px")
106
+ .style("cursor", "default")
107
+ .style("-webkit-user-select", "none")
108
+ .style("font-weight", "bold")
109
+ .attr("fill", fillColor)
110
+ .attr("x", x + 87)
111
+ .attr("y", y + 16)
112
+ .attr("height", DETAIL_HEADING_HEIGHT)
113
+ .attr("width", DETAIL_WIDTH)
114
+ .attr("dy", DETAIL_HEADING_TEXT_SIZE);
115
+ }
116
+
117
+ function renderText(text, id, x, y, layerIndex) {
118
+ var tokenContainer = config.svg.append("svg:g")
119
+ .classed("detail", true)
120
+ .selectAll("g")
121
+ .data(text)
122
+ .enter()
123
+ .append("g");
124
+
125
+ var fillColor = getColor(layerIndex);
126
+
127
+ tokenContainer.append("rect")
128
+ .classed("highlight", true)
129
+ .attr("fill", fillColor)
130
+ .style("opacity", 0.0)
131
+ .attr("height", DETAIL_BOX_HEIGHT)
132
+ .attr("width", DETAIL_BOX_WIDTH)
133
+ .attr("x", x)
134
+ .attr("y", function (d, i) {
135
+ return y + i * DETAIL_BOX_HEIGHT - 1;
136
+ });
137
+
138
+ var textContainer = tokenContainer.append("text")
139
+ .classed("token", true)
140
+ .text(function (d) {
141
+ return d;
142
+ })
143
+ .attr("font-size", TEXT_SIZE + "px")
144
+ .style("cursor", "default")
145
+ .style("-webkit-user-select", "none")
146
+ .attr("fill", fillColor)
147
+ .attr("x", x)
148
+ .attr("y", function (d, i) {
149
+ return i * DETAIL_BOX_HEIGHT + y;
150
+ })
151
+ .attr("height", DETAIL_BOX_HEIGHT)
152
+ .attr("width", DETAIL_BOX_WIDTH)
153
+ .attr("dy", TEXT_SIZE);
154
+
155
+ if (id == "leftText") {
156
+ textContainer.style("text-anchor", "end")
157
+ .attr("dx", DETAIL_BOX_WIDTH - 2);
158
+ tokenContainer.on("mouseover", function (d, index) {
159
+ highlightSelection(index);
160
+ });
161
+ tokenContainer.on("mouseleave", function () {
162
+ unhighlightSelection();
163
+ });
164
+ }
165
+ }
166
+
167
+ function highlightSelection(index) {
168
+ config.svg.select("#leftText")
169
+ .selectAll(".highlight")
170
+ .style("opacity", function (d, i) {
171
+ return i == index ? 1.0 : 0.0;
172
+ });
173
+ config.svg.selectAll(".attn-line-group")
174
+ .style("opacity", function (d, i) {
175
+ return i == index ? 1.0 : 0.0;
176
+ });
177
+ }
178
+
179
+ function unhighlightSelection() {
180
+ config.svg.select("#leftText")
181
+ .selectAll(".highlight")
182
+ .style("opacity", 0.0);
183
+ config.svg.selectAll(".attn-line-group")
184
+ .style("opacity", 1);
185
+ }
186
+
187
+ function renderThumbnailAttn(x, y, att, layerIndex, headIndex) {
188
+
189
+ var attnContainer = config.svg.append("svg:g");
190
+
191
+ var attnBackground = attnContainer.append("rect")
192
+ .attr("id", 'attn_background_' + layerIndex + "_" + headIndex)
193
+ .classed("attn_background", true)
194
+ .attr("x", x)
195
+ .attr("y", y)
196
+ .attr("height", config.thumbnailHeight)
197
+ .attr("width", config.thumbnailWidth)
198
+ .attr("stroke-width", 2)
199
+ .attr("stroke", getColor(layerIndex))
200
+ .attr("stroke-opacity", 0);
201
+ var x1 = x + THUMBNAIL_PADDING;
202
+ var x2 = x1 + config.thumbnailWidth - 14;
203
+ var y1 = y + THUMBNAIL_PADDING;
204
+
205
+ attnContainer.selectAll("g")
206
+ .data(att)
207
+ .enter()
208
+ .append("g") // Add group for each source token
209
+ .attr("source-index", function (d, i) { // Save index of source token
210
+ return i;
211
+ })
212
+ .selectAll("line")
213
+ .data(function (d) { // Loop over all target tokens
214
+ return d;
215
+ })
216
+ .enter() // When entering
217
+ .append("line")
218
+ .attr("x1", x1)
219
+ .attr("y1", function (d) {
220
+ var sourceIndex = +this.parentNode.getAttribute("source-index");
221
+ return y1 + (sourceIndex + .5) * config.thumbnailBoxHeight;
222
+ })
223
+ .attr("x2", x2)
224
+ .attr("y2", function (d, targetIndex) {
225
+ return y1 + (targetIndex + .5) * config.thumbnailBoxHeight;
226
+ })
227
+ .attr("stroke-width", 2.2)
228
+ .attr("stroke", getColor(layerIndex))
229
+ .attr("stroke-opacity", function (d) {
230
+ return d;
231
+ });
232
+
233
+ var clickRegion = attnContainer.append("rect")
234
+ .attr("x", x)
235
+ .attr("y", y)
236
+ .attr("height", config.thumbnailHeight)
237
+ .attr("width", config.thumbnailWidth)
238
+ .style("opacity", 0);
239
+
240
+ clickRegion.on("click", function (d, index) {
241
+ var attnBackgroundOther = config.svg.selectAll(".attn_background");
242
+ attnBackgroundOther.attr("fill", "black");
243
+ attnBackgroundOther.attr("stroke-opacity", 0);
244
+
245
+ config.svg.selectAll(".detail").remove();
246
+ if (config.detail_layer != layerIndex || config.detail_head != headIndex) {
247
+ renderDetail(att, layerIndex, headIndex);
248
+ config.detail_layer = layerIndex;
249
+ config.detail_head = headIndex;
250
+ attnBackground.attr("fill", "#202020");
251
+ attnBackground.attr("stroke-opacity", .8);
252
+ } else {
253
+ config.detail_layer = null;
254
+ config.detail_head = null;
255
+ attnBackground.attr("fill", "black");
256
+ attnBackground.attr("stroke-opacity", 0);
257
+ }
258
+ });
259
+
260
+ clickRegion.on("mouseover", function (d) {
261
+ d3.select(this).style("cursor", "pointer");
262
+ });
263
+ }
264
+
265
+ function renderDetailFrame(x, y, layerIndex) {
266
+ var detailFrame = config.svg.append("rect")
267
+ .classed("detail", true)
268
+ .attr("x", x)
269
+ .attr("y", y)
270
+ .attr("height", config.detailHeight)
271
+ .attr("width", DETAIL_WIDTH)
272
+ .style("opacity", 1)
273
+ .attr("fill", "white")
274
+ .attr("stroke-width", 2)
275
+ .attr("stroke-opacity", 0.7)
276
+ .attr("stroke", getColor(layerIndex));
277
+ }
278
+
279
+ function renderDetailAttn(x, y, att, layerIndex) {
280
+ var attnContainer = config.svg.append("svg:g")
281
+ .classed("detail", true)
282
+ .attr("pointer-events", "none");
283
+ attnContainer.selectAll("g")
284
+ .data(att)
285
+ .enter()
286
+ .append("g") // Add group for each source token
287
+ .classed('attn-line-group', true)
288
+ .attr("source-index", function (d, i) { // Save index of source token
289
+ return i;
290
+ })
291
+ .selectAll("line")
292
+ .data(function (d) { // Loop over all target tokens
293
+ return d;
294
+ })
295
+ .enter()
296
+ .append("line")
297
+ .attr("x1", x + ATTN_PADDING)
298
+ .attr("y1", function (d) {
299
+ var sourceIndex = +this.parentNode.getAttribute("source-index");
300
+ return y + (sourceIndex + .5) * DETAIL_BOX_HEIGHT;
301
+ })
302
+ .attr("x2", x + DETAIL_ATTENTION_WIDTH - ATTN_PADDING)
303
+ .attr("y2", function (d, targetIndex) {
304
+ return y + (targetIndex + .5) * DETAIL_BOX_HEIGHT;
305
+ })
306
+ .attr("stroke-width", 2)
307
+ .attr("stroke", getColor(layerIndex))
308
+ .attr("stroke-opacity", function (d) {
309
+ return d;
310
+ });
311
+ }
312
+
313
+ function getColor(layer) {
314
+ return LAYER_COLORS[layer % 10];
315
+ }
316
+
317
+ function initialize() {
318
+ config.attention = params['attention'];
319
+ config.filter = params['default_filter'];
320
+ }
321
+
322
+ $("#filter").on('change', function (e) {
323
+ config.filter = e.currentTarget.value;
324
+ render();
325
+ });
326
+
327
+ initialize();
328
+ render();
329
+
330
+ });
11777-Group11-master/attention_weight_vis/bertviz/neuron_view.py ADDED
@@ -0,0 +1,257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2018 The Tensor2Tensor Authors.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ # Change log
17
+ # 12/12/18 Jesse Vig Adapted to BERT model
18
+ # 12/19/18 Jesse Vig Assorted cleanup. Changed orientation of attention matrices. Updated comments.
19
+
20
+
21
+ """Module for postprocessing and displaying transformer attentions.
22
+
23
+ This module is designed to be called from an ipython notebook.
24
+ """
25
+
26
+ import json
27
+ from IPython.core.display import display, HTML, Javascript
28
+ import os
29
+ import torch
30
+ from collections import defaultdict
31
+
32
+ def show(model, model_type, tokenizer, sentence_a, sentence_b=None):
33
+ if sentence_b:
34
+ vis_html = """
35
+ <span style="user-select:none">
36
+ Layer: <select id="layer"></select>
37
+ Head: <select id="att_head"></select>
38
+ Attention: <select id="filter">
39
+ <option value="all">All</option>
40
+ <option value="aa">Sentence A -> Sentence A</option>
41
+ <option value="ab">Sentence A -> Sentence B</option>
42
+ <option value="ba">Sentence B -> Sentence A</option>
43
+ <option value="bb">Sentence B -> Sentence B</option>
44
+ </select>
45
+ </span>
46
+ <div id='vis'></div>
47
+ """
48
+ else:
49
+ vis_html = """
50
+ <span style="user-select:none">
51
+ Layer: <select id="layer"></select>
52
+ Head: <select id="att_head"></select>
53
+ </span>
54
+ <div id='vis'></div>
55
+ """
56
+ display(HTML(vis_html))
57
+ __location__ = os.path.realpath(
58
+ os.path.join(os.getcwd(), os.path.dirname(__file__)))
59
+ vis_js = open(os.path.join(__location__, 'neuron_view.js')).read()
60
+ attn_data = get_attention(model, model_type, tokenizer, sentence_a, sentence_b, include_queries_and_keys=True)
61
+ if model_type == 'gpt2':
62
+ bidirectional = False
63
+ else:
64
+ bidirectional = True
65
+ params = {
66
+ 'attention': attn_data,
67
+ 'default_filter': "all",
68
+ 'bidirectional': bidirectional
69
+ }
70
+ display(Javascript('window.params = %s' % json.dumps(params)))
71
+ display(Javascript(vis_js))
72
+
73
+
74
+ def get_attention(model, model_type, tokenizer, sentence_a, sentence_b=None, include_queries_and_keys=False):
75
+ """Compute representation of attention to pass to the d3 visualization
76
+
77
+ Args:
78
+ model: pytorch-transformers model
79
+ model_type: type of model. Valid values 'bert', 'gpt2', 'xlnet', 'roberta'
80
+ tokenizer: pytorch-transformers tokenizer
81
+ sentence_a: Sentence A string
82
+ sentence_b: Sentence B string
83
+ include_queries_and_keys: Indicates whether to include queries/keys in results
84
+
85
+ Returns:
86
+ Dictionary of attn representations with the structure:
87
+ {
88
+ 'all': All attention (source = AB, target = AB)
89
+ 'aa': Sentence A self-attention (source = A, target = A) (if sentence_b is not None)
90
+ 'bb': Sentence B self-attention (source = B, target = B) (if sentence_b is not None)
91
+ 'ab': Sentence A -> Sentence B attention (source = A, target = B) (if sentence_b is not None)
92
+ 'ba': Sentence B -> Sentence A attention (source = B, target = A) (if sentence_b is not None)
93
+ }
94
+ where each value is a dictionary:
95
+ {
96
+ 'left_text': list of source tokens, to be displayed on the left of the vis
97
+ 'right_text': list of target tokens, to be displayed on the right of the vis
98
+ 'attn': list of attention matrices, one for each layer. Each has shape [num_heads, source_seq_len, target_seq_len]
99
+ 'queries' (optional): list of query vector arrays, one for each layer. Each has shape (num_heads, source_seq_len, vector_size)
100
+ 'keys' (optional): list of key vector arrays, one for each layer. Each has shape (num_heads, target_seq_len, vector_size)
101
+ }
102
+ """
103
+
104
+ if model_type not in ('bert', 'gpt2', 'xlnet', 'roberta'):
105
+ raise ValueError("Invalid model type:", model_type)
106
+ if not sentence_a:
107
+ raise ValueError("Sentence A is required")
108
+ is_sentence_pair = bool(sentence_b)
109
+ if is_sentence_pair and model_type not in ('bert', 'roberta', 'xlnet'):
110
+ raise ValueError(f'Model {model_type} does not support sentence pairs')
111
+ if is_sentence_pair and model_type == 'xlnet':
112
+ raise NotImplementedError("Sentence-pair inputs for XLNet not currently supported.")
113
+
114
+ # Prepare inputs to model
115
+ tokens_a = None
116
+ tokens_b = None
117
+ token_type_ids = None
118
+ if not is_sentence_pair: # Single sentence
119
+ if model_type in ('bert', 'roberta'):
120
+ tokens_a = [tokenizer.cls_token] + tokenizer.tokenize(sentence_a) + [tokenizer.sep_token]
121
+ elif model_type == 'xlnet':
122
+ tokens_a = tokenizer.tokenize(sentence_a) + [tokenizer.sep_token] + [tokenizer.cls_token]
123
+ else:
124
+ tokens_a = tokenizer.tokenize(sentence_a)
125
+ else:
126
+ if model_type == 'bert':
127
+ tokens_a = [tokenizer.cls_token] + tokenizer.tokenize(sentence_a) + [tokenizer.sep_token]
128
+ tokens_b = tokenizer.tokenize(sentence_b) + [tokenizer.sep_token]
129
+ token_type_ids = torch.LongTensor([[0] * len(tokens_a) + [1] * len(tokens_b)])
130
+ elif model_type == 'roberta':
131
+ tokens_a = [tokenizer.cls_token] + tokenizer.tokenize(sentence_a) + [tokenizer.sep_token]
132
+ tokens_b = [tokenizer.sep_token] + tokenizer.tokenize(sentence_b) + [tokenizer.sep_token]
133
+ # Roberta doesn't use token type embeddings per https://github.com/huggingface/pytorch-transformers/blob/master/pytorch_transformers/convert_roberta_checkpoint_to_pytorch.py
134
+ else:
135
+ tokens_b = tokenizer.tokenize(sentence_b)
136
+
137
+ token_ids = tokenizer.convert_tokens_to_ids(tokens_a + (tokens_b if tokens_b else []))
138
+ tokens_tensor = torch.tensor(token_ids).unsqueeze(0)
139
+
140
+ # Call model to get attention data
141
+ model.eval()
142
+ if token_type_ids is not None:
143
+ output = model(tokens_tensor, token_type_ids=token_type_ids)
144
+ else:
145
+ output = model(tokens_tensor)
146
+ attn_data_list = output[-1]
147
+
148
+ # Populate map with attn data and, optionally, query, key data
149
+ attn_dict = defaultdict(list)
150
+ if include_queries_and_keys:
151
+ queries_dict = defaultdict(list)
152
+ keys_dict = defaultdict(list)
153
+
154
+ if is_sentence_pair:
155
+ slice_a = slice(0, len(tokens_a)) # Positions corresponding to sentence A in input
156
+ slice_b = slice(len(tokens_a), len(tokens_a) + len(tokens_b)) # Position corresponding to sentence B in input
157
+ for layer, attn_data in enumerate(attn_data_list):
158
+ # Process attention
159
+ attn = attn_data['attn'][0] # assume batch_size=1; shape = [num_heads, source_seq_len, target_seq_len]
160
+ attn_dict['all'].append(attn.tolist())
161
+ if is_sentence_pair:
162
+ attn_dict['aa'].append(
163
+ attn[:, slice_a, slice_a].tolist()) # Append A->A attention for layer, across all heads
164
+ attn_dict['bb'].append(
165
+ attn[:, slice_b, slice_b].tolist()) # Append B->B attention for layer, across all heads
166
+ attn_dict['ab'].append(
167
+ attn[:, slice_a, slice_b].tolist()) # Append A->B attention for layer, across all heads
168
+ attn_dict['ba'].append(
169
+ attn[:, slice_b, slice_a].tolist()) # Append B->A attention for layer, across all heads
170
+ # Process queries and keys
171
+ if include_queries_and_keys:
172
+ queries = attn_data['queries'][0] # assume batch_size=1; shape = [num_heads, seq_len, vector_size]
173
+ keys = attn_data['keys'][0] # assume batch_size=1; shape = [num_heads, seq_len, vector_size]
174
+ queries_dict['all'].append(queries.tolist())
175
+ keys_dict['all'].append(keys.tolist())
176
+ if is_sentence_pair:
177
+ queries_dict['a'].append(queries[:, slice_a, :].tolist())
178
+ keys_dict['a'].append(keys[:, slice_a, :].tolist())
179
+ queries_dict['b'].append(queries[:, slice_b, :].tolist())
180
+ keys_dict['b'].append(keys[:, slice_b, :].tolist())
181
+
182
+ tokens_a = format_special_chars(tokens_a)
183
+ if tokens_b:
184
+ tokens_b = format_special_chars(tokens_b)
185
+ if model_type != 'gpt2':
186
+ tokens_a = format_delimiters(tokens_a, tokenizer)
187
+ if tokens_b:
188
+ tokens_b = format_delimiters(tokens_b, tokenizer)
189
+
190
+ results = {
191
+ 'all': {
192
+ 'attn': attn_dict['all'],
193
+ 'left_text': tokens_a + (tokens_b if tokens_b else []),
194
+ 'right_text': tokens_a + (tokens_b if tokens_b else [])
195
+ }
196
+ }
197
+ if is_sentence_pair:
198
+ results.update({
199
+ 'aa': {
200
+ 'attn': attn_dict['aa'],
201
+ 'left_text': tokens_a,
202
+ 'right_text': tokens_a
203
+ },
204
+ 'bb': {
205
+ 'attn': attn_dict['bb'],
206
+ 'left_text': tokens_b,
207
+ 'right_text': tokens_b
208
+ },
209
+ 'ab': {
210
+ 'attn': attn_dict['ab'],
211
+ 'left_text': tokens_a,
212
+ 'right_text': tokens_b
213
+ },
214
+ 'ba': {
215
+ 'attn': attn_dict['ba'],
216
+ 'left_text': tokens_b,
217
+ 'right_text': tokens_a
218
+ }
219
+ })
220
+ if include_queries_and_keys:
221
+ results['all'].update({
222
+ 'queries': queries_dict['all'],
223
+ 'keys': keys_dict['all'],
224
+ })
225
+ if is_sentence_pair:
226
+ results['aa'].update({
227
+ 'queries': queries_dict['a'],
228
+ 'keys': keys_dict['a'],
229
+ })
230
+ results['bb'].update({
231
+ 'queries': queries_dict['b'],
232
+ 'keys': keys_dict['b'],
233
+ })
234
+ results['ab'].update({
235
+ 'queries': queries_dict['a'],
236
+ 'keys': keys_dict['b'],
237
+ })
238
+ results['ba'].update({
239
+ 'queries': queries_dict['b'],
240
+ 'keys': keys_dict['a'],
241
+ })
242
+ return results
243
+
244
+
245
+ def format_special_chars(tokens):
246
+ return [t.replace('Ġ', ' ').replace('▁', ' ') for t in tokens]
247
+
248
+
249
+ def format_delimiters(tokens, tokenizer):
250
+ formatted_tokens = []
251
+ for t in tokens:
252
+ if tokenizer.sep_token:
253
+ t = t.replace(tokenizer.sep_token, '[SEP]')
254
+ if tokenizer.cls_token:
255
+ t = t.replace(tokenizer.cls_token, '[CLS]')
256
+ formatted_tokens.append(t)
257
+ return formatted_tokens
11777-Group11-master/attention_weight_vis/bertviz/transformers_neuron_view/__init__.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __version__ = "1.1.0"
2
+ from .tokenization_bert import BertTokenizer, BasicTokenizer, WordpieceTokenizer
3
+ from .tokenization_openai import OpenAIGPTTokenizer
4
+ from .tokenization_transfo_xl import (TransfoXLTokenizer, TransfoXLCorpus)
5
+ from .tokenization_gpt2 import GPT2Tokenizer
6
+ from .tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE
7
+ from .tokenization_xlm import XLMTokenizer
8
+ from .tokenization_roberta import RobertaTokenizer
9
+
10
+ from .tokenization_utils import (PreTrainedTokenizer)
11
+
12
+ from .modeling_bert import (BertConfig, BertPreTrainedModel, BertModel, BertForPreTraining,
13
+ BertForMaskedLM, BertForNextSentencePrediction,
14
+ BertForSequenceClassification, BertForMultipleChoice,
15
+ BertForTokenClassification, BertForQuestionAnswering,
16
+ load_tf_weights_in_bert, BERT_PRETRAINED_MODEL_ARCHIVE_MAP,
17
+ BERT_PRETRAINED_CONFIG_ARCHIVE_MAP)
18
+ from .modeling_openai import (OpenAIGPTConfig, OpenAIGPTPreTrainedModel, OpenAIGPTModel,
19
+ OpenAIGPTLMHeadModel, OpenAIGPTDoubleHeadsModel,
20
+ load_tf_weights_in_openai_gpt, OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP,
21
+ OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_MAP)
22
+ from .modeling_transfo_xl import (TransfoXLConfig, TransfoXLModel, TransfoXLLMHeadModel,
23
+ load_tf_weights_in_transfo_xl, TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP,
24
+ TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP)
25
+ from .modeling_gpt2 import (GPT2Config, GPT2PreTrainedModel, GPT2Model,
26
+ GPT2LMHeadModel, GPT2DoubleHeadsModel,
27
+ load_tf_weights_in_gpt2, GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP,
28
+ GPT2_PRETRAINED_MODEL_ARCHIVE_MAP)
29
+ from .modeling_xlnet import (XLNetConfig,
30
+ XLNetPreTrainedModel, XLNetModel, XLNetLMHeadModel,
31
+ XLNetForSequenceClassification, XLNetForQuestionAnswering,
32
+ load_tf_weights_in_xlnet, XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP,
33
+ XLNET_PRETRAINED_MODEL_ARCHIVE_MAP)
34
+ from .modeling_xlm import (XLMConfig, XLMPreTrainedModel , XLMModel,
35
+ XLMWithLMHeadModel, XLMForSequenceClassification,
36
+ XLMForQuestionAnswering, XLM_PRETRAINED_CONFIG_ARCHIVE_MAP,
37
+ XLM_PRETRAINED_MODEL_ARCHIVE_MAP)
38
+ from .modeling_roberta import (RobertaConfig, RobertaForMaskedLM, RobertaModel, RobertaForSequenceClassification,
39
+ ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAP)
40
+ from .modeling_utils import (WEIGHTS_NAME, CONFIG_NAME, TF_WEIGHTS_NAME,
41
+ PretrainedConfig, PreTrainedModel, prune_layer, Conv1D)
42
+
43
+ # from .optimization import (AdamW, ConstantLRSchedule, WarmupConstantSchedule, WarmupCosineSchedule,
44
+ # WarmupCosineWithHardRestartsSchedule, WarmupLinearSchedule)
45
+
46
+ from .file_utils import (PYTORCH_TRANSFORMERS_CACHE, PYTORCH_PRETRAINED_BERT_CACHE, cached_path)
11777-Group11-master/get_model.sh ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ export MODEL_NAME=base-vg-labels
2
+ export MODEL_DIR=model
3
+ sudo apt-get update
4
+ sudo apt-get install unzip
5
+ mkdir $MODEL_DIR
6
+ wget https://biglmdiag.blob.core.windows.net/oscar/pretrained_models/$MODEL_NAME.zip
7
+ unzip $MODEL_NAME.zip -d $MODEL_DIR
8
+ rm $MODEL_NAME.zip
9
+ echo DONE
11777-Group11-master/oscar/distillation/run_squad_w_distillation.py ADDED
@@ -0,0 +1,866 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
3
+ # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+ """ This is the exact same script as `examples/question-answering/run_squad.py` (as of 2020, January 8th) with an additional and optional step of distillation."""
17
+
18
+ import argparse
19
+ import glob
20
+ import logging
21
+ import os
22
+ import random
23
+ import timeit
24
+
25
+ import numpy as np
26
+ import torch
27
+ import torch.nn as nn
28
+ import torch.nn.functional as F
29
+ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
30
+ from torch.utils.data.distributed import DistributedSampler
31
+ from tqdm import tqdm, trange
32
+
33
+ from transformers import (
34
+ WEIGHTS_NAME,
35
+ AdamW,
36
+ BertConfig,
37
+ BertForQuestionAnswering,
38
+ BertTokenizer,
39
+ DistilBertConfig,
40
+ DistilBertForQuestionAnswering,
41
+ DistilBertTokenizer,
42
+ RobertaConfig,
43
+ RobertaForQuestionAnswering,
44
+ RobertaTokenizer,
45
+ XLMConfig,
46
+ XLMForQuestionAnswering,
47
+ XLMTokenizer,
48
+ XLNetConfig,
49
+ XLNetForQuestionAnswering,
50
+ XLNetTokenizer,
51
+ get_linear_schedule_with_warmup,
52
+ squad_convert_examples_to_features,
53
+ )
54
+ from transformers.data.metrics.squad_metrics import (
55
+ compute_predictions_log_probs,
56
+ compute_predictions_logits,
57
+ squad_evaluate,
58
+ )
59
+ from transformers.data.processors.squad import SquadResult, SquadV1Processor, SquadV2Processor
60
+
61
+
62
+ try:
63
+ from torch.utils.tensorboard import SummaryWriter
64
+ except ImportError:
65
+ from tensorboardX import SummaryWriter
66
+
67
+
68
+ logger = logging.getLogger(__name__)
69
+
70
+
71
+ MODEL_CLASSES = {
72
+ "bert": (BertConfig, BertForQuestionAnswering, BertTokenizer),
73
+ "xlnet": (XLNetConfig, XLNetForQuestionAnswering, XLNetTokenizer),
74
+ "xlm": (XLMConfig, XLMForQuestionAnswering, XLMTokenizer),
75
+ "distilbert": (DistilBertConfig, DistilBertForQuestionAnswering, DistilBertTokenizer),
76
+ "roberta": (RobertaConfig, RobertaForQuestionAnswering, RobertaTokenizer),
77
+ }
78
+
79
+
80
+ def set_seed(args):
81
+ random.seed(args.seed)
82
+ np.random.seed(args.seed)
83
+ torch.manual_seed(args.seed)
84
+ if args.n_gpu > 0:
85
+ torch.cuda.manual_seed_all(args.seed)
86
+
87
+
88
+ def to_list(tensor):
89
+ return tensor.detach().cpu().tolist()
90
+
91
+
92
+ def train(args, train_dataset, model, tokenizer, teacher=None):
93
+ """ Train the model """
94
+ if args.local_rank in [-1, 0]:
95
+ tb_writer = SummaryWriter()
96
+
97
+ args.train_batch_size = args.per_gpu_train_batch_size * max(1, args.n_gpu)
98
+ train_sampler = RandomSampler(train_dataset) if args.local_rank == -1 else DistributedSampler(train_dataset)
99
+ train_dataloader = DataLoader(train_dataset, sampler=train_sampler, batch_size=args.train_batch_size)
100
+
101
+ if args.max_steps > 0:
102
+ t_total = args.max_steps
103
+ args.num_train_epochs = args.max_steps // (len(train_dataloader) // args.gradient_accumulation_steps) + 1
104
+ else:
105
+ t_total = len(train_dataloader) // args.gradient_accumulation_steps * args.num_train_epochs
106
+
107
+ # Prepare optimizer and schedule (linear warmup and decay)
108
+ no_decay = ["bias", "LayerNorm.weight"]
109
+ optimizer_grouped_parameters = [
110
+ {
111
+ "params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
112
+ "weight_decay": args.weight_decay,
113
+ },
114
+ {"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0},
115
+ ]
116
+ optimizer = AdamW(optimizer_grouped_parameters, lr=args.learning_rate, eps=args.adam_epsilon)
117
+ scheduler = get_linear_schedule_with_warmup(
118
+ optimizer, num_warmup_steps=args.warmup_steps, num_training_steps=t_total
119
+ )
120
+
121
+ # Check if saved optimizer or scheduler states exist
122
+ if os.path.isfile(os.path.join(args.model_name_or_path, "optimizer.pt")) and os.path.isfile(
123
+ os.path.join(args.model_name_or_path, "scheduler.pt")
124
+ ):
125
+ # Load in optimizer and scheduler states
126
+ optimizer.load_state_dict(torch.load(os.path.join(args.model_name_or_path, "optimizer.pt")))
127
+ scheduler.load_state_dict(torch.load(os.path.join(args.model_name_or_path, "scheduler.pt")))
128
+
129
+ if args.fp16:
130
+ try:
131
+ from apex import amp
132
+ except ImportError:
133
+ raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.")
134
+
135
+ model, optimizer = amp.initialize(model, optimizer, opt_level=args.fp16_opt_level)
136
+
137
+ # multi-gpu training (should be after apex fp16 initialization)
138
+ if args.n_gpu > 1:
139
+ model = torch.nn.DataParallel(model)
140
+
141
+ # Distributed training (should be after apex fp16 initialization)
142
+ if args.local_rank != -1:
143
+ model = torch.nn.parallel.DistributedDataParallel(
144
+ model, device_ids=[args.local_rank], output_device=args.local_rank, find_unused_parameters=True
145
+ )
146
+
147
+ # Train!
148
+ logger.info("***** Running training *****")
149
+ logger.info(" Num examples = %d", len(train_dataset))
150
+ logger.info(" Num Epochs = %d", args.num_train_epochs)
151
+ logger.info(" Instantaneous batch size per GPU = %d", args.per_gpu_train_batch_size)
152
+ logger.info(
153
+ " Total train batch size (w. parallel, distributed & accumulation) = %d",
154
+ args.train_batch_size
155
+ * args.gradient_accumulation_steps
156
+ * (torch.distributed.get_world_size() if args.local_rank != -1 else 1),
157
+ )
158
+ logger.info(" Gradient Accumulation steps = %d", args.gradient_accumulation_steps)
159
+ logger.info(" Total optimization steps = %d", t_total)
160
+
161
+ global_step = 1
162
+ epochs_trained = 0
163
+ steps_trained_in_current_epoch = 0
164
+ # Check if continuing training from a checkpoint
165
+ if os.path.exists(args.model_name_or_path):
166
+ try:
167
+ # set global_step to gobal_step of last saved checkpoint from model path
168
+ checkpoint_suffix = args.model_name_or_path.split("-")[-1].split("/")[0]
169
+ global_step = int(checkpoint_suffix)
170
+ epochs_trained = global_step // (len(train_dataloader) // args.gradient_accumulation_steps)
171
+ steps_trained_in_current_epoch = global_step % (len(train_dataloader) // args.gradient_accumulation_steps)
172
+
173
+ logger.info(" Continuing training from checkpoint, will skip to saved global_step")
174
+ logger.info(" Continuing training from epoch %d", epochs_trained)
175
+ logger.info(" Continuing training from global step %d", global_step)
176
+ logger.info(" Will skip the first %d steps in the first epoch", steps_trained_in_current_epoch)
177
+ except ValueError:
178
+ logger.info(" Starting fine-tuning.")
179
+
180
+ tr_loss, logging_loss = 0.0, 0.0
181
+ model.zero_grad()
182
+ train_iterator = trange(
183
+ epochs_trained, int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0]
184
+ )
185
+ # Added here for reproductibility
186
+ set_seed(args)
187
+
188
+ for _ in train_iterator:
189
+ epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0])
190
+ for step, batch in enumerate(epoch_iterator):
191
+
192
+ # Skip past any already trained steps if resuming training
193
+ if steps_trained_in_current_epoch > 0:
194
+ steps_trained_in_current_epoch -= 1
195
+ continue
196
+
197
+ model.train()
198
+ if teacher is not None:
199
+ teacher.eval()
200
+ batch = tuple(t.to(args.device) for t in batch)
201
+
202
+ inputs = {
203
+ "input_ids": batch[0],
204
+ "attention_mask": batch[1],
205
+ "start_positions": batch[3],
206
+ "end_positions": batch[4],
207
+ }
208
+ if args.model_type != "distilbert":
209
+ inputs["token_type_ids"] = None if args.model_type == "xlm" else batch[2]
210
+ if args.model_type in ["xlnet", "xlm"]:
211
+ inputs.update({"cls_index": batch[5], "p_mask": batch[6]})
212
+ if args.version_2_with_negative:
213
+ inputs.update({"is_impossible": batch[7]})
214
+ outputs = model(**inputs)
215
+ loss, start_logits_stu, end_logits_stu = outputs
216
+
217
+ # Distillation loss
218
+ if teacher is not None:
219
+ if "token_type_ids" not in inputs:
220
+ inputs["token_type_ids"] = None if args.teacher_type == "xlm" else batch[2]
221
+ with torch.no_grad():
222
+ start_logits_tea, end_logits_tea = teacher(
223
+ input_ids=inputs["input_ids"],
224
+ token_type_ids=inputs["token_type_ids"],
225
+ attention_mask=inputs["attention_mask"],
226
+ )
227
+ assert start_logits_tea.size() == start_logits_stu.size()
228
+ assert end_logits_tea.size() == end_logits_stu.size()
229
+
230
+ loss_fct = nn.KLDivLoss(reduction="batchmean")
231
+ loss_start = (
232
+ loss_fct(
233
+ F.log_softmax(start_logits_stu / args.temperature, dim=-1),
234
+ F.softmax(start_logits_tea / args.temperature, dim=-1),
235
+ )
236
+ * (args.temperature ** 2)
237
+ )
238
+ loss_end = (
239
+ loss_fct(
240
+ F.log_softmax(end_logits_stu / args.temperature, dim=-1),
241
+ F.softmax(end_logits_tea / args.temperature, dim=-1),
242
+ )
243
+ * (args.temperature ** 2)
244
+ )
245
+ loss_ce = (loss_start + loss_end) / 2.0
246
+
247
+ loss = args.alpha_ce * loss_ce + args.alpha_squad * loss
248
+
249
+ if args.n_gpu > 1:
250
+ loss = loss.mean() # mean() to average on multi-gpu parallel (not distributed) training
251
+ if args.gradient_accumulation_steps > 1:
252
+ loss = loss / args.gradient_accumulation_steps
253
+
254
+ if args.fp16:
255
+ with amp.scale_loss(loss, optimizer) as scaled_loss:
256
+ scaled_loss.backward()
257
+ else:
258
+ loss.backward()
259
+
260
+ tr_loss += loss.item()
261
+ if (step + 1) % args.gradient_accumulation_steps == 0:
262
+ if args.fp16:
263
+ torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), args.max_grad_norm)
264
+ else:
265
+ torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)
266
+
267
+ optimizer.step()
268
+ scheduler.step() # Update learning rate schedule
269
+ model.zero_grad()
270
+ global_step += 1
271
+
272
+ # Log metrics
273
+ if args.local_rank in [-1, 0] and args.logging_steps > 0 and global_step % args.logging_steps == 0:
274
+ # Only evaluate when single GPU otherwise metrics may not average well
275
+ if args.local_rank == -1 and args.evaluate_during_training:
276
+ results = evaluate(args, model, tokenizer)
277
+ for key, value in results.items():
278
+ tb_writer.add_scalar("eval_{}".format(key), value, global_step)
279
+ tb_writer.add_scalar("lr", scheduler.get_lr()[0], global_step)
280
+ tb_writer.add_scalar("loss", (tr_loss - logging_loss) / args.logging_steps, global_step)
281
+ logging_loss = tr_loss
282
+
283
+ if args.local_rank in [-1, 0] and args.save_steps > 0 and global_step % args.save_steps == 0:
284
+ # Save model checkpoint
285
+ output_dir = os.path.join(args.output_dir, "checkpoint-{}".format(global_step))
286
+ if not os.path.exists(output_dir):
287
+ os.makedirs(output_dir)
288
+ model_to_save = (
289
+ model.module if hasattr(model, "module") else model
290
+ ) # Take care of distributed/parallel training
291
+ model_to_save.save_pretrained(output_dir)
292
+ tokenizer.save_pretrained(output_dir)
293
+
294
+ torch.save(args, os.path.join(output_dir, "training_args.bin"))
295
+ logger.info("Saving model checkpoint to %s", output_dir)
296
+
297
+ torch.save(optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
298
+ torch.save(scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
299
+ logger.info("Saving optimizer and scheduler states to %s", output_dir)
300
+
301
+ if args.max_steps > 0 and global_step > args.max_steps:
302
+ epoch_iterator.close()
303
+ break
304
+ if args.max_steps > 0 and global_step > args.max_steps:
305
+ train_iterator.close()
306
+ break
307
+
308
+ if args.local_rank in [-1, 0]:
309
+ tb_writer.close()
310
+
311
+ return global_step, tr_loss / global_step
312
+
313
+
314
+ def evaluate(args, model, tokenizer, prefix=""):
315
+ dataset, examples, features = load_and_cache_examples(args, tokenizer, evaluate=True, output_examples=True)
316
+
317
+ if not os.path.exists(args.output_dir) and args.local_rank in [-1, 0]:
318
+ os.makedirs(args.output_dir)
319
+
320
+ args.eval_batch_size = args.per_gpu_eval_batch_size * max(1, args.n_gpu)
321
+
322
+ # Note that DistributedSampler samples randomly
323
+ eval_sampler = SequentialSampler(dataset)
324
+ eval_dataloader = DataLoader(dataset, sampler=eval_sampler, batch_size=args.eval_batch_size)
325
+
326
+ # multi-gpu evaluate
327
+ if args.n_gpu > 1 and not isinstance(model, torch.nn.DataParallel):
328
+ model = torch.nn.DataParallel(model)
329
+
330
+ # Eval!
331
+ logger.info("***** Running evaluation {} *****".format(prefix))
332
+ logger.info(" Num examples = %d", len(dataset))
333
+ logger.info(" Batch size = %d", args.eval_batch_size)
334
+
335
+ all_results = []
336
+ start_time = timeit.default_timer()
337
+
338
+ for batch in tqdm(eval_dataloader, desc="Evaluating"):
339
+ model.eval()
340
+ batch = tuple(t.to(args.device) for t in batch)
341
+
342
+ with torch.no_grad():
343
+ inputs = {"input_ids": batch[0], "attention_mask": batch[1]}
344
+ if args.model_type != "distilbert":
345
+ inputs["token_type_ids"] = None if args.model_type == "xlm" else batch[2] # XLM don't use segment_ids
346
+ example_indices = batch[3]
347
+ if args.model_type in ["xlnet", "xlm"]:
348
+ inputs.update({"cls_index": batch[4], "p_mask": batch[5]})
349
+
350
+ outputs = model(**inputs)
351
+
352
+ for i, example_index in enumerate(example_indices):
353
+ eval_feature = features[example_index.item()]
354
+ unique_id = int(eval_feature.unique_id)
355
+
356
+ output = [to_list(output[i]) for output in outputs]
357
+
358
+ # Some models (XLNet, XLM) use 5 arguments for their predictions, while the other "simpler"
359
+ # models only use two.
360
+ if len(output) >= 5:
361
+ start_logits = output[0]
362
+ start_top_index = output[1]
363
+ end_logits = output[2]
364
+ end_top_index = output[3]
365
+ cls_logits = output[4]
366
+
367
+ result = SquadResult(
368
+ unique_id,
369
+ start_logits,
370
+ end_logits,
371
+ start_top_index=start_top_index,
372
+ end_top_index=end_top_index,
373
+ cls_logits=cls_logits,
374
+ )
375
+
376
+ else:
377
+ start_logits, end_logits = output
378
+ result = SquadResult(unique_id, start_logits, end_logits)
379
+
380
+ all_results.append(result)
381
+
382
+ evalTime = timeit.default_timer() - start_time
383
+ logger.info(" Evaluation done in total %f secs (%f sec per example)", evalTime, evalTime / len(dataset))
384
+
385
+ # Compute predictions
386
+ output_prediction_file = os.path.join(args.output_dir, "predictions_{}.json".format(prefix))
387
+ output_nbest_file = os.path.join(args.output_dir, "nbest_predictions_{}.json".format(prefix))
388
+
389
+ if args.version_2_with_negative:
390
+ output_null_log_odds_file = os.path.join(args.output_dir, "null_odds_{}.json".format(prefix))
391
+ else:
392
+ output_null_log_odds_file = None
393
+
394
+ if args.model_type in ["xlnet", "xlm"]:
395
+ # XLNet uses a more complex post-processing procedure
396
+ predictions = compute_predictions_log_probs(
397
+ examples,
398
+ features,
399
+ all_results,
400
+ args.n_best_size,
401
+ args.max_answer_length,
402
+ output_prediction_file,
403
+ output_nbest_file,
404
+ output_null_log_odds_file,
405
+ model.config.start_n_top,
406
+ model.config.end_n_top,
407
+ args.version_2_with_negative,
408
+ tokenizer,
409
+ args.verbose_logging,
410
+ )
411
+ else:
412
+ predictions = compute_predictions_logits(
413
+ examples,
414
+ features,
415
+ all_results,
416
+ args.n_best_size,
417
+ args.max_answer_length,
418
+ args.do_lower_case,
419
+ output_prediction_file,
420
+ output_nbest_file,
421
+ output_null_log_odds_file,
422
+ args.verbose_logging,
423
+ args.version_2_with_negative,
424
+ args.null_score_diff_threshold,
425
+ tokenizer,
426
+ )
427
+
428
+ # Compute the F1 and exact scores.
429
+ results = squad_evaluate(examples, predictions)
430
+ return results
431
+
432
+
433
+ def load_and_cache_examples(args, tokenizer, evaluate=False, output_examples=False):
434
+ if args.local_rank not in [-1, 0] and not evaluate:
435
+ # Make sure only the first process in distributed training process the dataset, and the others will use the cache
436
+ torch.distributed.barrier()
437
+
438
+ # Load data features from cache or dataset file
439
+ input_file = args.predict_file if evaluate else args.train_file
440
+ cached_features_file = os.path.join(
441
+ os.path.dirname(input_file),
442
+ "cached_distillation_{}_{}_{}".format(
443
+ "dev" if evaluate else "train",
444
+ list(filter(None, args.model_name_or_path.split("/"))).pop(),
445
+ str(args.max_seq_length),
446
+ ),
447
+ )
448
+ if os.path.exists(cached_features_file) and not args.overwrite_cache:
449
+ logger.info("Loading features from cached file %s", cached_features_file)
450
+ features_and_dataset = torch.load(cached_features_file)
451
+
452
+ try:
453
+ features, dataset, examples = (
454
+ features_and_dataset["features"],
455
+ features_and_dataset["dataset"],
456
+ features_and_dataset["examples"],
457
+ )
458
+ except KeyError:
459
+ raise DeprecationWarning(
460
+ "You seem to be loading features from an older version of this script please delete the "
461
+ "file %s in order for it to be created again" % cached_features_file
462
+ )
463
+ else:
464
+ logger.info("Creating features from dataset file at %s", input_file)
465
+ processor = SquadV2Processor() if args.version_2_with_negative else SquadV1Processor()
466
+ if evaluate:
467
+ examples = processor.get_dev_examples(args.data_dir, filename=args.predict_file)
468
+ else:
469
+ examples = processor.get_train_examples(args.data_dir, filename=args.train_file)
470
+
471
+ features, dataset = squad_convert_examples_to_features(
472
+ examples=examples,
473
+ tokenizer=tokenizer,
474
+ max_seq_length=args.max_seq_length,
475
+ doc_stride=args.doc_stride,
476
+ max_query_length=args.max_query_length,
477
+ is_training=not evaluate,
478
+ return_dataset="pt",
479
+ threads=args.threads,
480
+ )
481
+
482
+ if args.local_rank in [-1, 0]:
483
+ logger.info("Saving features into cached file %s", cached_features_file)
484
+ torch.save({"features": features, "dataset": dataset, "examples": examples}, cached_features_file)
485
+
486
+ if args.local_rank == 0 and not evaluate:
487
+ # Make sure only the first process in distributed training process the dataset, and the others will use the cache
488
+ torch.distributed.barrier()
489
+
490
+ if output_examples:
491
+ return dataset, examples, features
492
+ return dataset
493
+
494
+
495
+ def main():
496
+ parser = argparse.ArgumentParser()
497
+
498
+ # Required parameters
499
+ parser.add_argument(
500
+ "--model_type",
501
+ default=None,
502
+ type=str,
503
+ required=True,
504
+ help="Model type selected in the list: " + ", ".join(MODEL_CLASSES.keys()),
505
+ )
506
+ parser.add_argument(
507
+ "--model_name_or_path",
508
+ default=None,
509
+ type=str,
510
+ required=True,
511
+ help="Path to pretrained model or model identifier from huggingface.co/models",
512
+ )
513
+ parser.add_argument(
514
+ "--output_dir",
515
+ default=None,
516
+ type=str,
517
+ required=True,
518
+ help="The output directory where the model checkpoints and predictions will be written.",
519
+ )
520
+
521
+ # Distillation parameters (optional)
522
+ parser.add_argument(
523
+ "--teacher_type",
524
+ default=None,
525
+ type=str,
526
+ help="Teacher type. Teacher tokenizer and student (model) tokenizer must output the same tokenization. Only for distillation.",
527
+ )
528
+ parser.add_argument(
529
+ "--teacher_name_or_path",
530
+ default=None,
531
+ type=str,
532
+ help="Path to the already SQuAD fine-tuned teacher model. Only for distillation.",
533
+ )
534
+ parser.add_argument(
535
+ "--alpha_ce", default=0.5, type=float, help="Distillation loss linear weight. Only for distillation."
536
+ )
537
+ parser.add_argument(
538
+ "--alpha_squad", default=0.5, type=float, help="True SQuAD loss linear weight. Only for distillation."
539
+ )
540
+ parser.add_argument(
541
+ "--temperature", default=2.0, type=float, help="Distillation temperature. Only for distillation."
542
+ )
543
+
544
+ # Other parameters
545
+ parser.add_argument(
546
+ "--data_dir",
547
+ default=None,
548
+ type=str,
549
+ help="The input data dir. Should contain the .json files for the task."
550
+ + "If no data dir or train/predict files are specified, will run with tensorflow_datasets.",
551
+ )
552
+ parser.add_argument(
553
+ "--train_file",
554
+ default=None,
555
+ type=str,
556
+ help="The input training file. If a data dir is specified, will look for the file there"
557
+ + "If no data dir or train/predict files are specified, will run with tensorflow_datasets.",
558
+ )
559
+ parser.add_argument(
560
+ "--predict_file",
561
+ default=None,
562
+ type=str,
563
+ help="The input evaluation file. If a data dir is specified, will look for the file there"
564
+ + "If no data dir or train/predict files are specified, will run with tensorflow_datasets.",
565
+ )
566
+ parser.add_argument(
567
+ "--config_name", default="", type=str, help="Pretrained config name or path if not the same as model_name"
568
+ )
569
+ parser.add_argument(
570
+ "--tokenizer_name",
571
+ default="",
572
+ type=str,
573
+ help="Pretrained tokenizer name or path if not the same as model_name",
574
+ )
575
+ parser.add_argument(
576
+ "--cache_dir",
577
+ default="",
578
+ type=str,
579
+ help="Where do you want to store the pre-trained models downloaded from s3",
580
+ )
581
+
582
+ parser.add_argument(
583
+ "--version_2_with_negative",
584
+ action="store_true",
585
+ help="If true, the SQuAD examples contain some that do not have an answer.",
586
+ )
587
+ parser.add_argument(
588
+ "--null_score_diff_threshold",
589
+ type=float,
590
+ default=0.0,
591
+ help="If null_score - best_non_null is greater than the threshold predict null.",
592
+ )
593
+
594
+ parser.add_argument(
595
+ "--max_seq_length",
596
+ default=384,
597
+ type=int,
598
+ help="The maximum total input sequence length after WordPiece tokenization. Sequences "
599
+ "longer than this will be truncated, and sequences shorter than this will be padded.",
600
+ )
601
+ parser.add_argument(
602
+ "--doc_stride",
603
+ default=128,
604
+ type=int,
605
+ help="When splitting up a long document into chunks, how much stride to take between chunks.",
606
+ )
607
+ parser.add_argument(
608
+ "--max_query_length",
609
+ default=64,
610
+ type=int,
611
+ help="The maximum number of tokens for the question. Questions longer than this will "
612
+ "be truncated to this length.",
613
+ )
614
+ parser.add_argument("--do_train", action="store_true", help="Whether to run training.")
615
+ parser.add_argument("--do_eval", action="store_true", help="Whether to run eval on the dev set.")
616
+ parser.add_argument(
617
+ "--evaluate_during_training", action="store_true", help="Rul evaluation during training at each logging step."
618
+ )
619
+ parser.add_argument(
620
+ "--do_lower_case", action="store_true", help="Set this flag if you are using an uncased model."
621
+ )
622
+
623
+ parser.add_argument("--per_gpu_train_batch_size", default=8, type=int, help="Batch size per GPU/CPU for training.")
624
+ parser.add_argument(
625
+ "--per_gpu_eval_batch_size", default=8, type=int, help="Batch size per GPU/CPU for evaluation."
626
+ )
627
+ parser.add_argument("--learning_rate", default=5e-5, type=float, help="The initial learning rate for Adam.")
628
+ parser.add_argument(
629
+ "--gradient_accumulation_steps",
630
+ type=int,
631
+ default=1,
632
+ help="Number of updates steps to accumulate before performing a backward/update pass.",
633
+ )
634
+ parser.add_argument("--weight_decay", default=0.0, type=float, help="Weight decay if we apply some.")
635
+ parser.add_argument("--adam_epsilon", default=1e-8, type=float, help="Epsilon for Adam optimizer.")
636
+ parser.add_argument("--max_grad_norm", default=1.0, type=float, help="Max gradient norm.")
637
+ parser.add_argument(
638
+ "--num_train_epochs", default=3.0, type=float, help="Total number of training epochs to perform."
639
+ )
640
+ parser.add_argument(
641
+ "--max_steps",
642
+ default=-1,
643
+ type=int,
644
+ help="If > 0: set total number of training steps to perform. Override num_train_epochs.",
645
+ )
646
+ parser.add_argument("--warmup_steps", default=0, type=int, help="Linear warmup over warmup_steps.")
647
+ parser.add_argument(
648
+ "--n_best_size",
649
+ default=20,
650
+ type=int,
651
+ help="The total number of n-best predictions to generate in the nbest_predictions.json output file.",
652
+ )
653
+ parser.add_argument(
654
+ "--max_answer_length",
655
+ default=30,
656
+ type=int,
657
+ help="The maximum length of an answer that can be generated. This is needed because the start "
658
+ "and end predictions are not conditioned on one another.",
659
+ )
660
+ parser.add_argument(
661
+ "--verbose_logging",
662
+ action="store_true",
663
+ help="If true, all of the warnings related to data processing will be printed. "
664
+ "A number of warnings are expected for a normal SQuAD evaluation.",
665
+ )
666
+
667
+ parser.add_argument("--logging_steps", type=int, default=50, help="Log every X updates steps.")
668
+ parser.add_argument("--save_steps", type=int, default=50, help="Save checkpoint every X updates steps.")
669
+ parser.add_argument(
670
+ "--eval_all_checkpoints",
671
+ action="store_true",
672
+ help="Evaluate all checkpoints starting with the same prefix as model_name ending and ending with step number",
673
+ )
674
+ parser.add_argument("--no_cuda", action="store_true", help="Whether not to use CUDA when available")
675
+ parser.add_argument(
676
+ "--overwrite_output_dir", action="store_true", help="Overwrite the content of the output directory"
677
+ )
678
+ parser.add_argument(
679
+ "--overwrite_cache", action="store_true", help="Overwrite the cached training and evaluation sets"
680
+ )
681
+ parser.add_argument("--seed", type=int, default=42, help="random seed for initialization")
682
+
683
+ parser.add_argument("--local_rank", type=int, default=-1, help="local_rank for distributed training on gpus")
684
+ parser.add_argument(
685
+ "--fp16",
686
+ action="store_true",
687
+ help="Whether to use 16-bit (mixed) precision (through NVIDIA apex) instead of 32-bit",
688
+ )
689
+ parser.add_argument(
690
+ "--fp16_opt_level",
691
+ type=str,
692
+ default="O1",
693
+ help="For fp16: Apex AMP optimization level selected in ['O0', 'O1', 'O2', and 'O3']."
694
+ "See details at https://nvidia.github.io/apex/amp.html",
695
+ )
696
+ parser.add_argument("--server_ip", type=str, default="", help="Can be used for distant debugging.")
697
+ parser.add_argument("--server_port", type=str, default="", help="Can be used for distant debugging.")
698
+
699
+ parser.add_argument("--threads", type=int, default=1, help="multiple threads for converting example to features")
700
+ args = parser.parse_args()
701
+
702
+ if (
703
+ os.path.exists(args.output_dir)
704
+ and os.listdir(args.output_dir)
705
+ and args.do_train
706
+ and not args.overwrite_output_dir
707
+ ):
708
+ raise ValueError(
709
+ "Output directory ({}) already exists and is not empty. Use --overwrite_output_dir to overcome.".format(
710
+ args.output_dir
711
+ )
712
+ )
713
+
714
+ # Setup distant debugging if needed
715
+ if args.server_ip and args.server_port:
716
+ # Distant debugging - see https://code.visualstudio.com/docs/python/debugging#_attach-to-a-local-script
717
+ import ptvsd
718
+
719
+ print("Waiting for debugger attach")
720
+ ptvsd.enable_attach(address=(args.server_ip, args.server_port), redirect_output=True)
721
+ ptvsd.wait_for_attach()
722
+
723
+ # Setup CUDA, GPU & distributed training
724
+ if args.local_rank == -1 or args.no_cuda:
725
+ device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu")
726
+ args.n_gpu = 0 if args.no_cuda else torch.cuda.device_count()
727
+ else: # Initializes the distributed backend which will take care of sychronizing nodes/GPUs
728
+ torch.cuda.set_device(args.local_rank)
729
+ device = torch.device("cuda", args.local_rank)
730
+ torch.distributed.init_process_group(backend="nccl")
731
+ args.n_gpu = 1
732
+ args.device = device
733
+
734
+ # Setup logging
735
+ logging.basicConfig(
736
+ format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
737
+ datefmt="%m/%d/%Y %H:%M:%S",
738
+ level=logging.INFO if args.local_rank in [-1, 0] else logging.WARN,
739
+ )
740
+ logger.warning(
741
+ "Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, 16-bits training: %s",
742
+ args.local_rank,
743
+ device,
744
+ args.n_gpu,
745
+ bool(args.local_rank != -1),
746
+ args.fp16,
747
+ )
748
+
749
+ # Set seed
750
+ set_seed(args)
751
+
752
+ # Load pretrained model and tokenizer
753
+ if args.local_rank not in [-1, 0]:
754
+ # Make sure only the first process in distributed training will download model & vocab
755
+ torch.distributed.barrier()
756
+
757
+ args.model_type = args.model_type.lower()
758
+ config_class, model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
759
+ config = config_class.from_pretrained(
760
+ args.config_name if args.config_name else args.model_name_or_path,
761
+ cache_dir=args.cache_dir if args.cache_dir else None,
762
+ )
763
+ tokenizer = tokenizer_class.from_pretrained(
764
+ args.tokenizer_name if args.tokenizer_name else args.model_name_or_path,
765
+ do_lower_case=args.do_lower_case,
766
+ cache_dir=args.cache_dir if args.cache_dir else None,
767
+ )
768
+ model = model_class.from_pretrained(
769
+ args.model_name_or_path,
770
+ from_tf=bool(".ckpt" in args.model_name_or_path),
771
+ config=config,
772
+ cache_dir=args.cache_dir if args.cache_dir else None,
773
+ )
774
+
775
+ if args.teacher_type is not None:
776
+ assert args.teacher_name_or_path is not None
777
+ assert args.alpha_ce > 0.0
778
+ assert args.alpha_ce + args.alpha_squad > 0.0
779
+ assert args.teacher_type != "distilbert", "We constraint teachers not to be of type DistilBERT."
780
+ teacher_config_class, teacher_model_class, _ = MODEL_CLASSES[args.teacher_type]
781
+ teacher_config = teacher_config_class.from_pretrained(
782
+ args.teacher_name_or_path, cache_dir=args.cache_dir if args.cache_dir else None
783
+ )
784
+ teacher = teacher_model_class.from_pretrained(
785
+ args.teacher_name_or_path, config=teacher_config, cache_dir=args.cache_dir if args.cache_dir else None
786
+ )
787
+ teacher.to(args.device)
788
+ else:
789
+ teacher = None
790
+
791
+ if args.local_rank == 0:
792
+ # Make sure only the first process in distributed training will download model & vocab
793
+ torch.distributed.barrier()
794
+
795
+ model.to(args.device)
796
+
797
+ logger.info("Training/evaluation parameters %s", args)
798
+
799
+ # Before we do anything with models, we want to ensure that we get fp16 execution of torch.einsum if args.fp16 is set.
800
+ # Otherwise it'll default to "promote" mode, and we'll get fp32 operations. Note that running `--fp16_opt_level="O2"` will
801
+ # remove the need for this code, but it is still valid.
802
+ if args.fp16:
803
+ try:
804
+ import apex
805
+
806
+ apex.amp.register_half_function(torch, "einsum")
807
+ except ImportError:
808
+ raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.")
809
+
810
+ # Training
811
+ if args.do_train:
812
+ train_dataset = load_and_cache_examples(args, tokenizer, evaluate=False, output_examples=False)
813
+ global_step, tr_loss = train(args, train_dataset, model, tokenizer, teacher=teacher)
814
+ logger.info(" global_step = %s, average loss = %s", global_step, tr_loss)
815
+
816
+ # Save the trained model and the tokenizer
817
+ if args.do_train and (args.local_rank == -1 or torch.distributed.get_rank() == 0):
818
+ logger.info("Saving model checkpoint to %s", args.output_dir)
819
+ # Save a trained model, configuration and tokenizer using `save_pretrained()`.
820
+ # They can then be reloaded using `from_pretrained()`
821
+ model_to_save = (
822
+ model.module if hasattr(model, "module") else model
823
+ ) # Take care of distributed/parallel training
824
+ model_to_save.save_pretrained(args.output_dir)
825
+ tokenizer.save_pretrained(args.output_dir)
826
+
827
+ # Good practice: save your training arguments together with the trained model
828
+ torch.save(args, os.path.join(args.output_dir, "training_args.bin"))
829
+
830
+ # Load a trained model and vocabulary that you have fine-tuned
831
+ model = model_class.from_pretrained(args.output_dir)
832
+ tokenizer = tokenizer_class.from_pretrained(args.output_dir, do_lower_case=args.do_lower_case)
833
+ model.to(args.device)
834
+
835
+ # Evaluation - we can ask to evaluate all the checkpoints (sub-directories) in a directory
836
+ results = {}
837
+ if args.do_eval and args.local_rank in [-1, 0]:
838
+ if args.do_train:
839
+ logger.info("Loading checkpoints saved during training for evaluation")
840
+ checkpoints = [args.output_dir]
841
+ if args.eval_all_checkpoints:
842
+ checkpoints = list(
843
+ os.path.dirname(c) for c in sorted(glob.glob(args.output_dir + "/**/" + WEIGHTS_NAME, recursive=True))
844
+ )
845
+
846
+ logger.info("Evaluate the following checkpoints: %s", checkpoints)
847
+
848
+ for checkpoint in checkpoints:
849
+ # Reload the model
850
+ global_step = checkpoint.split("-")[-1] if len(checkpoints) > 1 else ""
851
+ model = model_class.from_pretrained(checkpoint)
852
+ model.to(args.device)
853
+
854
+ # Evaluate
855
+ result = evaluate(args, model, tokenizer, prefix=global_step)
856
+
857
+ result = dict((k + ("_{}".format(global_step) if global_step else ""), v) for k, v in result.items())
858
+ results.update(result)
859
+
860
+ logger.info("Results: {}".format(results))
861
+
862
+ return results
863
+
864
+
865
+ if __name__ == "__main__":
866
+ main()
11777-Group11-master/setup.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/python
2
+
3
+ from __future__ import print_function
4
+ import os
5
+ import sys
6
+ import re
7
+ import os.path as op
8
+ from setuptools import find_packages, setup
9
+
10
+ # change directory to this module path
11
+ try:
12
+ this_file = __file__
13
+ except NameError:
14
+ this_file = sys.argv[0]
15
+ this_file = os.path.abspath(this_file)
16
+ if op.dirname(this_file):
17
+ os.chdir(op.dirname(this_file))
18
+ script_dir = os.getcwd()
19
+
20
+ def readme(fname):
21
+ """Read text out of a file in the same directory as setup.py.
22
+ """
23
+ return open(op.join(script_dir, fname)).read()
24
+
25
+
26
+ def find_version(fname):
27
+ version_file = readme(fname)
28
+ version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
29
+ version_file, re.M)
30
+ if version_match:
31
+ return version_match.group(1)
32
+ raise RuntimeError("Unable to find version string.")
33
+
34
+
35
+ setup(
36
+ name="oscar",
37
+ version=find_version("oscar/__init__.py"),
38
+ url='https://github.com/xjli/Oscar',
39
+ description="Oscar for vision and language tasks",
40
+ long_description=readme('README.md'),
41
+ packages=find_packages(),
42
+ classifiers=[
43
+ 'Intended Audience :: Developers',
44
+ "Programming Language :: Python",
45
+ 'Topic :: Software Development',
46
+ ]
47
+ )