body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
dc25a309006a204c3376064b6116afcd7e547e77ccf4fd41a945c3fe43da8c21
@staticmethod def _is_2_element(value): '\n Helper function to check a variable to see if it\n is a 2-tuple of Quantity objects.\n ' return ((len(value) == 2) and isinstance(value[0], u.Quantity) and isinstance(value[1], u.Quantity))
Helper function to check a variable to see if it is a 2-tuple of Quantity objects.
specutils/spectra/spectral_region.py
_is_2_element
havok2063/specutils
118
python
@staticmethod def _is_2_element(value): '\n Helper function to check a variable to see if it\n is a 2-tuple of Quantity objects.\n ' return ((len(value) == 2) and isinstance(value[0], u.Quantity) and isinstance(value[1], u.Quantity))
@staticmethod def _is_2_element(value): '\n Helper function to check a variable to see if it\n is a 2-tuple of Quantity objects.\n ' return ((len(value) == 2) and isinstance(value[0], u.Quantity) and isinstance(value[1], u.Quantity))<|docstring|>Helper function to check a variable to see if it is a 2-tuple of Quantity objects.<|endoftext|>
e58f0cfc6c8a0f1939877a76022908b83386898195512929fe08c3765e4ef06e
def _reorder(self): '\n Re-order the list based on lower bounds.\n ' self._subregions.sort(key=(lambda k: k[0]))
Re-order the list based on lower bounds.
specutils/spectra/spectral_region.py
_reorder
havok2063/specutils
118
python
def _reorder(self): '\n \n ' self._subregions.sort(key=(lambda k: k[0]))
def _reorder(self): '\n \n ' self._subregions.sort(key=(lambda k: k[0]))<|docstring|>Re-order the list based on lower bounds.<|endoftext|>
618a94fd815f93ff1eec7ffa82af00752fa5cc90ec5d317d11e57a0fcec60453
@property def subregions(self): '\n An iterable over ``(lower, upper)`` tuples that are each of the\n sub-regions.\n ' return self._subregions
An iterable over ``(lower, upper)`` tuples that are each of the sub-regions.
specutils/spectra/spectral_region.py
subregions
havok2063/specutils
118
python
@property def subregions(self): '\n An iterable over ``(lower, upper)`` tuples that are each of the\n sub-regions.\n ' return self._subregions
@property def subregions(self): '\n An iterable over ``(lower, upper)`` tuples that are each of the\n sub-regions.\n ' return self._subregions<|docstring|>An iterable over ``(lower, upper)`` tuples that are each of the sub-regions.<|endoftext|>
53dc0df84abc93674b04f9938b1e9be2c3eee07ffe1d0c6c8a0368f23868facc
@property def bounds(self): '\n Compute the lower and upper extent of the SpectralRegion.\n ' return (self.lower, self.upper)
Compute the lower and upper extent of the SpectralRegion.
specutils/spectra/spectral_region.py
bounds
havok2063/specutils
118
python
@property def bounds(self): '\n \n ' return (self.lower, self.upper)
@property def bounds(self): '\n \n ' return (self.lower, self.upper)<|docstring|>Compute the lower and upper extent of the SpectralRegion.<|endoftext|>
671e45bb9b135686a854ae6d8291dd00f744611b4cfb6d0006ea99e9fccfa34a
@property def lower(self): '\n The most minimum value of the sub-regions.\n\n The sub-regions are ordered based on the lower bound, so the\n lower bound for this instance is the lower bound of the first\n sub-region.\n ' return self._subregions[0][0]
The most minimum value of the sub-regions. The sub-regions are ordered based on the lower bound, so the lower bound for this instance is the lower bound of the first sub-region.
specutils/spectra/spectral_region.py
lower
havok2063/specutils
118
python
@property def lower(self): '\n The most minimum value of the sub-regions.\n\n The sub-regions are ordered based on the lower bound, so the\n lower bound for this instance is the lower bound of the first\n sub-region.\n ' return self._subregions[0][0]
@property def lower(self): '\n The most minimum value of the sub-regions.\n\n The sub-regions are ordered based on the lower bound, so the\n lower bound for this instance is the lower bound of the first\n sub-region.\n ' return self._subregions[0][0]<|docstring|>The most minimum value of the sub-regions. The sub-regions are ordered based on the lower bound, so the lower bound for this instance is the lower bound of the first sub-region.<|endoftext|>
9fd5039f5e89e48e72d16c4894ddcc6b477f9f8d1c4f15f1f3ca350f1c9e4984
@property def upper(self): '\n The most maximum value of the sub-regions.\n\n The sub-regions are ordered based on the lower bound, but the\n upper bound might not be the upper bound of the last sub-region\n so we have to look for it.\n ' return max((x[1] for x in self._subregions))
The most maximum value of the sub-regions. The sub-regions are ordered based on the lower bound, but the upper bound might not be the upper bound of the last sub-region so we have to look for it.
specutils/spectra/spectral_region.py
upper
havok2063/specutils
118
python
@property def upper(self): '\n The most maximum value of the sub-regions.\n\n The sub-regions are ordered based on the lower bound, but the\n upper bound might not be the upper bound of the last sub-region\n so we have to look for it.\n ' return max((x[1] for x in self._subregions))
@property def upper(self): '\n The most maximum value of the sub-regions.\n\n The sub-regions are ordered based on the lower bound, but the\n upper bound might not be the upper bound of the last sub-region\n so we have to look for it.\n ' return max((x[1] for x in self._subregions))<|docstring|>The most maximum value of the sub-regions. The sub-regions are ordered based on the lower bound, but the upper bound might not be the upper bound of the last sub-region so we have to look for it.<|endoftext|>
74fbb83426c3f6d2e4f45308fd7944215367584b557e40ab703e0c5902aebc7a
def invert_from_spectrum(self, spectrum): '\n Invert a SpectralRegion based on the extent of the\n input spectrum.\n\n See notes in SpectralRegion.invert() method.\n ' return self.invert(spectrum.spectral_axis[0], spectrum.spectral_axis[(- 1)])
Invert a SpectralRegion based on the extent of the input spectrum. See notes in SpectralRegion.invert() method.
specutils/spectra/spectral_region.py
invert_from_spectrum
havok2063/specutils
118
python
def invert_from_spectrum(self, spectrum): '\n Invert a SpectralRegion based on the extent of the\n input spectrum.\n\n See notes in SpectralRegion.invert() method.\n ' return self.invert(spectrum.spectral_axis[0], spectrum.spectral_axis[(- 1)])
def invert_from_spectrum(self, spectrum): '\n Invert a SpectralRegion based on the extent of the\n input spectrum.\n\n See notes in SpectralRegion.invert() method.\n ' return self.invert(spectrum.spectral_axis[0], spectrum.spectral_axis[(- 1)])<|docstring|>Invert a SpectralRegion based on the extent of the input spectrum. See notes in SpectralRegion.invert() method.<|endoftext|>
e7dc8863da2c71689543070f836ccebc36f8a0aa99ef577908898328638eca03
def invert(self, lower_bound, upper_bound): '\n Invert this spectral region. That is, given a set of sub-regions this\n object defines, create a new `SpectralRegion` such that the sub-regions\n are defined in the new one as regions *not* in this `SpectralRegion`.\n\n Parameters\n ----------\n lower_bound : `~astropy.units.Quantity`\n The lower bound of the region. Can be scalar with pixel or any\n valid ``spectral_axis`` unit\n upper_bound : `~astropy.units.Quantity`\n The upper bound of the region. Can be scalar with pixel or any\n valid ``spectral_axis`` unit\n\n Returns\n -------\n spectral_region : `~specutils.SpectralRegion`\n Spectral region of the non-selected regions\n\n Notes\n -----\n This is applicable if, for example, a `SpectralRegion` has sub-regions\n defined for peaks in a spectrum and then one wants to create a\n `SpectralRegion` defined as all the *non*-peaks, then one could use this\n function.\n\n As an example, assume this SpectralRegion is defined as\n ``sr = SpectralRegion([(0.45*u.um, 0.6*u.um), (0.8*u.um, 0.9*u.um)])``.\n If we call ``sr_invert = sr.invert(0.3*u.um, 1.0*u.um)`` then\n ``sr_invert`` will be\n ``SpectralRegion([(0.3*u.um, 0.45*u.um), (0.6*u.um, 0.8*u.um), (0.9*u.um, 1*u.um)])``\n\n ' min_num = ((- sys.maxsize) - 1) max_num = sys.maxsize rs = (self._subregions + [((min_num * u.um), lower_bound), (upper_bound, (max_num * u.um))]) sorted_regions = sorted(rs, key=(lambda k: k[0])) merged = [] for higher in sorted_regions: if (not merged): merged.append(higher) else: lower = merged[(- 1)] if (higher[0] <= lower[1]): upper_bound = max(lower[1], higher[1]) merged[(- 1)] = (lower[0], upper_bound) else: merged.append(higher) newlist = list(itertools.chain.from_iterable(merged)) newlist = newlist[1:(- 1)] return SpectralRegion([(x, y) for (x, y) in zip(newlist[0::2], newlist[1::2])])
Invert this spectral region. That is, given a set of sub-regions this object defines, create a new `SpectralRegion` such that the sub-regions are defined in the new one as regions *not* in this `SpectralRegion`. Parameters ---------- lower_bound : `~astropy.units.Quantity` The lower bound of the region. Can be scalar with pixel or any valid ``spectral_axis`` unit upper_bound : `~astropy.units.Quantity` The upper bound of the region. Can be scalar with pixel or any valid ``spectral_axis`` unit Returns ------- spectral_region : `~specutils.SpectralRegion` Spectral region of the non-selected regions Notes ----- This is applicable if, for example, a `SpectralRegion` has sub-regions defined for peaks in a spectrum and then one wants to create a `SpectralRegion` defined as all the *non*-peaks, then one could use this function. As an example, assume this SpectralRegion is defined as ``sr = SpectralRegion([(0.45*u.um, 0.6*u.um), (0.8*u.um, 0.9*u.um)])``. If we call ``sr_invert = sr.invert(0.3*u.um, 1.0*u.um)`` then ``sr_invert`` will be ``SpectralRegion([(0.3*u.um, 0.45*u.um), (0.6*u.um, 0.8*u.um), (0.9*u.um, 1*u.um)])``
specutils/spectra/spectral_region.py
invert
havok2063/specutils
118
python
def invert(self, lower_bound, upper_bound): '\n Invert this spectral region. That is, given a set of sub-regions this\n object defines, create a new `SpectralRegion` such that the sub-regions\n are defined in the new one as regions *not* in this `SpectralRegion`.\n\n Parameters\n ----------\n lower_bound : `~astropy.units.Quantity`\n The lower bound of the region. Can be scalar with pixel or any\n valid ``spectral_axis`` unit\n upper_bound : `~astropy.units.Quantity`\n The upper bound of the region. Can be scalar with pixel or any\n valid ``spectral_axis`` unit\n\n Returns\n -------\n spectral_region : `~specutils.SpectralRegion`\n Spectral region of the non-selected regions\n\n Notes\n -----\n This is applicable if, for example, a `SpectralRegion` has sub-regions\n defined for peaks in a spectrum and then one wants to create a\n `SpectralRegion` defined as all the *non*-peaks, then one could use this\n function.\n\n As an example, assume this SpectralRegion is defined as\n ``sr = SpectralRegion([(0.45*u.um, 0.6*u.um), (0.8*u.um, 0.9*u.um)])``.\n If we call ``sr_invert = sr.invert(0.3*u.um, 1.0*u.um)`` then\n ``sr_invert`` will be\n ``SpectralRegion([(0.3*u.um, 0.45*u.um), (0.6*u.um, 0.8*u.um), (0.9*u.um, 1*u.um)])``\n\n ' min_num = ((- sys.maxsize) - 1) max_num = sys.maxsize rs = (self._subregions + [((min_num * u.um), lower_bound), (upper_bound, (max_num * u.um))]) sorted_regions = sorted(rs, key=(lambda k: k[0])) merged = [] for higher in sorted_regions: if (not merged): merged.append(higher) else: lower = merged[(- 1)] if (higher[0] <= lower[1]): upper_bound = max(lower[1], higher[1]) merged[(- 1)] = (lower[0], upper_bound) else: merged.append(higher) newlist = list(itertools.chain.from_iterable(merged)) newlist = newlist[1:(- 1)] return SpectralRegion([(x, y) for (x, y) in zip(newlist[0::2], newlist[1::2])])
def invert(self, lower_bound, upper_bound): '\n Invert this spectral region. That is, given a set of sub-regions this\n object defines, create a new `SpectralRegion` such that the sub-regions\n are defined in the new one as regions *not* in this `SpectralRegion`.\n\n Parameters\n ----------\n lower_bound : `~astropy.units.Quantity`\n The lower bound of the region. Can be scalar with pixel or any\n valid ``spectral_axis`` unit\n upper_bound : `~astropy.units.Quantity`\n The upper bound of the region. Can be scalar with pixel or any\n valid ``spectral_axis`` unit\n\n Returns\n -------\n spectral_region : `~specutils.SpectralRegion`\n Spectral region of the non-selected regions\n\n Notes\n -----\n This is applicable if, for example, a `SpectralRegion` has sub-regions\n defined for peaks in a spectrum and then one wants to create a\n `SpectralRegion` defined as all the *non*-peaks, then one could use this\n function.\n\n As an example, assume this SpectralRegion is defined as\n ``sr = SpectralRegion([(0.45*u.um, 0.6*u.um), (0.8*u.um, 0.9*u.um)])``.\n If we call ``sr_invert = sr.invert(0.3*u.um, 1.0*u.um)`` then\n ``sr_invert`` will be\n ``SpectralRegion([(0.3*u.um, 0.45*u.um), (0.6*u.um, 0.8*u.um), (0.9*u.um, 1*u.um)])``\n\n ' min_num = ((- sys.maxsize) - 1) max_num = sys.maxsize rs = (self._subregions + [((min_num * u.um), lower_bound), (upper_bound, (max_num * u.um))]) sorted_regions = sorted(rs, key=(lambda k: k[0])) merged = [] for higher in sorted_regions: if (not merged): merged.append(higher) else: lower = merged[(- 1)] if (higher[0] <= lower[1]): upper_bound = max(lower[1], higher[1]) merged[(- 1)] = (lower[0], upper_bound) else: merged.append(higher) newlist = list(itertools.chain.from_iterable(merged)) newlist = newlist[1:(- 1)] return SpectralRegion([(x, y) for (x, y) in zip(newlist[0::2], newlist[1::2])])<|docstring|>Invert this spectral region. That is, given a set of sub-regions this object defines, create a new `SpectralRegion` such that the sub-regions are defined in the new one as regions *not* in this `SpectralRegion`. Parameters ---------- lower_bound : `~astropy.units.Quantity` The lower bound of the region. Can be scalar with pixel or any valid ``spectral_axis`` unit upper_bound : `~astropy.units.Quantity` The upper bound of the region. Can be scalar with pixel or any valid ``spectral_axis`` unit Returns ------- spectral_region : `~specutils.SpectralRegion` Spectral region of the non-selected regions Notes ----- This is applicable if, for example, a `SpectralRegion` has sub-regions defined for peaks in a spectrum and then one wants to create a `SpectralRegion` defined as all the *non*-peaks, then one could use this function. As an example, assume this SpectralRegion is defined as ``sr = SpectralRegion([(0.45*u.um, 0.6*u.um), (0.8*u.um, 0.9*u.um)])``. If we call ``sr_invert = sr.invert(0.3*u.um, 1.0*u.um)`` then ``sr_invert`` will be ``SpectralRegion([(0.3*u.um, 0.45*u.um), (0.6*u.um, 0.8*u.um), (0.9*u.um, 1*u.um)])``<|endoftext|>
9c7bbb3763b5884fc034c699e477c7204b8bfecb9e2f892e4e5db8bf027fb7c4
def get_credentials(): ' Gets credentials to access gCal API ' store = oauth2client.file.Storage(config.credential_store) credentials = store.get() if ((not credentials) or credentials.invalid): flow = client.flow_from_clientsecrets(config.client_secret, 'https://www.googleapis.com/auth/calendar') flow.user_agent = config.application parser = argparse.ArgumentParser(parents=[tools.argparser]) flags = parser.parse_args() credentials = tools.run_flow(flow, store, flags) print(('Storing credentials to ' + config.credential_store)) return credentials
Gets credentials to access gCal API
main.py
get_credentials
slamb2k/gCal-iCal-Sync
0
python
def get_credentials(): ' ' store = oauth2client.file.Storage(config.credential_store) credentials = store.get() if ((not credentials) or credentials.invalid): flow = client.flow_from_clientsecrets(config.client_secret, 'https://www.googleapis.com/auth/calendar') flow.user_agent = config.application parser = argparse.ArgumentParser(parents=[tools.argparser]) flags = parser.parse_args() credentials = tools.run_flow(flow, store, flags) print(('Storing credentials to ' + config.credential_store)) return credentials
def get_credentials(): ' ' store = oauth2client.file.Storage(config.credential_store) credentials = store.get() if ((not credentials) or credentials.invalid): flow = client.flow_from_clientsecrets(config.client_secret, 'https://www.googleapis.com/auth/calendar') flow.user_agent = config.application parser = argparse.ArgumentParser(parents=[tools.argparser]) flags = parser.parse_args() credentials = tools.run_flow(flow, store, flags) print(('Storing credentials to ' + config.credential_store)) return credentials<|docstring|>Gets credentials to access gCal API<|endoftext|>
b5720ade44efd36264ff5c1dd81c6d6d59244da80194ec56a3828ffab18fd849
def get_calendar_service(): ' Gets a service object to use to query gCal API ' credentials = get_credentials() http = credentials.authorize(httplib2.Http()) return discovery.build('calendar', 'v3', http=http)
Gets a service object to use to query gCal API
main.py
get_calendar_service
slamb2k/gCal-iCal-Sync
0
python
def get_calendar_service(): ' ' credentials = get_credentials() http = credentials.authorize(httplib2.Http()) return discovery.build('calendar', 'v3', http=http)
def get_calendar_service(): ' ' credentials = get_credentials() http = credentials.authorize(httplib2.Http()) return discovery.build('calendar', 'v3', http=http)<|docstring|>Gets a service object to use to query gCal API<|endoftext|>
b51a9152acf04c643ab90af050413c0da08d3f4493a55e3d220f50ef8baee477
def load_ical(url): ' Loads an iCal file from a URL and returns an events object ' (resp, content) = httplib2.Http(timeout=None).request(url) assert (resp['status'] == '200') content = content.decode('utf-8') content = content.replace('"tzone://Microsoft/Utc"', 'UTC') events = {} for event in re.findall('BEGIN:VEVENT.*?END:VEVENT', content, ((re.M | re.I) | re.DOTALL)): summary = re.search('summary:(.*)', event, re.I).group(1) if (summary is None): print(("Couldn't find summary. Skipping event.\nEvent Data: %s" % event)) continue allday = re.search('X-MICROSOFT-CDO-ALLDAYEVENT:TRUE', event, re.I) isAllDay = (allday is not None) if isAllDay: startDateRegEx = 'dtstart;VALUE=DATE:(?P<date>(.*))' endDateRegEx = 'dtend;VALUE=DATE:(?P<date>(.*))' else: startDateRegEx = 'dtstart;TZID=(?P<timezone>.*?):(?P<date>(.*))' endDateRegEx = 'dtend;TZID=(?P<timezone>.*?):(?P<date>(.*))' start = re.search(startDateRegEx, event, re.I) if (start is None): print(("Couldn't find start date. Skipping event - %s" % summary)) continue end = re.search(endDateRegEx, event, re.I) if (end is None): print(("Couldn't find end date. Skipping event - %s" % summary)) continue start_timezone_string = 'UTC' if (('timezone' in start.groupdict()) and (start.group('timezone') != 'UTC')): start_timezone_string = config.default_timezone try: start_date_string = start.group('date').replace('Z', '') parsed_start_date = parse(start_date_string) start_date_tz = gettz(start_timezone_string) parsed_start_date = parsed_start_date.replace(tzinfo=start_date_tz) except: print(("Couldn't parse start date: %s. Skipping event - %s" % (start_date_string, summary))) continue end_timezone_string = 'UTC' if (('timezone' in end.groupdict()) and (end.group('timezone') != 'UTC')): end_timezone_string = config.default_timezone try: end_date_string = end.group('date').replace('Z', '') parsed_end_date = parse(end_date_string) end_date_tz = gettz(end_timezone_string) parsed_end_date = parsed_end_date.replace(tzinfo=end_date_tz) except: print(("Couldn't parse end date: %s. Skipping event - %s" % (end_date_string, summary))) continue hash = hashlib.sha256(('%s %s %s' % (parsed_start_date.isoformat(), parsed_end_date.isoformat(), summary)).encode('utf-8')).hexdigest() if (parsed_start_date.replace(tzinfo=None) >= parse(config.start_date)): events[hash] = {'summary': summary, 'start': {'dateTime': str(parsed_start_date).replace(' ', 'T'), 'timeZone': start_timezone_string}, 'end': {'dateTime': str(parsed_end_date).replace(' ', 'T'), 'timeZone': end_timezone_string}, 'id': hash} return events
Loads an iCal file from a URL and returns an events object
main.py
load_ical
slamb2k/gCal-iCal-Sync
0
python
def load_ical(url): ' ' (resp, content) = httplib2.Http(timeout=None).request(url) assert (resp['status'] == '200') content = content.decode('utf-8') content = content.replace('"tzone://Microsoft/Utc"', 'UTC') events = {} for event in re.findall('BEGIN:VEVENT.*?END:VEVENT', content, ((re.M | re.I) | re.DOTALL)): summary = re.search('summary:(.*)', event, re.I).group(1) if (summary is None): print(("Couldn't find summary. Skipping event.\nEvent Data: %s" % event)) continue allday = re.search('X-MICROSOFT-CDO-ALLDAYEVENT:TRUE', event, re.I) isAllDay = (allday is not None) if isAllDay: startDateRegEx = 'dtstart;VALUE=DATE:(?P<date>(.*))' endDateRegEx = 'dtend;VALUE=DATE:(?P<date>(.*))' else: startDateRegEx = 'dtstart;TZID=(?P<timezone>.*?):(?P<date>(.*))' endDateRegEx = 'dtend;TZID=(?P<timezone>.*?):(?P<date>(.*))' start = re.search(startDateRegEx, event, re.I) if (start is None): print(("Couldn't find start date. Skipping event - %s" % summary)) continue end = re.search(endDateRegEx, event, re.I) if (end is None): print(("Couldn't find end date. Skipping event - %s" % summary)) continue start_timezone_string = 'UTC' if (('timezone' in start.groupdict()) and (start.group('timezone') != 'UTC')): start_timezone_string = config.default_timezone try: start_date_string = start.group('date').replace('Z', ) parsed_start_date = parse(start_date_string) start_date_tz = gettz(start_timezone_string) parsed_start_date = parsed_start_date.replace(tzinfo=start_date_tz) except: print(("Couldn't parse start date: %s. Skipping event - %s" % (start_date_string, summary))) continue end_timezone_string = 'UTC' if (('timezone' in end.groupdict()) and (end.group('timezone') != 'UTC')): end_timezone_string = config.default_timezone try: end_date_string = end.group('date').replace('Z', ) parsed_end_date = parse(end_date_string) end_date_tz = gettz(end_timezone_string) parsed_end_date = parsed_end_date.replace(tzinfo=end_date_tz) except: print(("Couldn't parse end date: %s. Skipping event - %s" % (end_date_string, summary))) continue hash = hashlib.sha256(('%s %s %s' % (parsed_start_date.isoformat(), parsed_end_date.isoformat(), summary)).encode('utf-8')).hexdigest() if (parsed_start_date.replace(tzinfo=None) >= parse(config.start_date)): events[hash] = {'summary': summary, 'start': {'dateTime': str(parsed_start_date).replace(' ', 'T'), 'timeZone': start_timezone_string}, 'end': {'dateTime': str(parsed_end_date).replace(' ', 'T'), 'timeZone': end_timezone_string}, 'id': hash} return events
def load_ical(url): ' ' (resp, content) = httplib2.Http(timeout=None).request(url) assert (resp['status'] == '200') content = content.decode('utf-8') content = content.replace('"tzone://Microsoft/Utc"', 'UTC') events = {} for event in re.findall('BEGIN:VEVENT.*?END:VEVENT', content, ((re.M | re.I) | re.DOTALL)): summary = re.search('summary:(.*)', event, re.I).group(1) if (summary is None): print(("Couldn't find summary. Skipping event.\nEvent Data: %s" % event)) continue allday = re.search('X-MICROSOFT-CDO-ALLDAYEVENT:TRUE', event, re.I) isAllDay = (allday is not None) if isAllDay: startDateRegEx = 'dtstart;VALUE=DATE:(?P<date>(.*))' endDateRegEx = 'dtend;VALUE=DATE:(?P<date>(.*))' else: startDateRegEx = 'dtstart;TZID=(?P<timezone>.*?):(?P<date>(.*))' endDateRegEx = 'dtend;TZID=(?P<timezone>.*?):(?P<date>(.*))' start = re.search(startDateRegEx, event, re.I) if (start is None): print(("Couldn't find start date. Skipping event - %s" % summary)) continue end = re.search(endDateRegEx, event, re.I) if (end is None): print(("Couldn't find end date. Skipping event - %s" % summary)) continue start_timezone_string = 'UTC' if (('timezone' in start.groupdict()) and (start.group('timezone') != 'UTC')): start_timezone_string = config.default_timezone try: start_date_string = start.group('date').replace('Z', ) parsed_start_date = parse(start_date_string) start_date_tz = gettz(start_timezone_string) parsed_start_date = parsed_start_date.replace(tzinfo=start_date_tz) except: print(("Couldn't parse start date: %s. Skipping event - %s" % (start_date_string, summary))) continue end_timezone_string = 'UTC' if (('timezone' in end.groupdict()) and (end.group('timezone') != 'UTC')): end_timezone_string = config.default_timezone try: end_date_string = end.group('date').replace('Z', ) parsed_end_date = parse(end_date_string) end_date_tz = gettz(end_timezone_string) parsed_end_date = parsed_end_date.replace(tzinfo=end_date_tz) except: print(("Couldn't parse end date: %s. Skipping event - %s" % (end_date_string, summary))) continue hash = hashlib.sha256(('%s %s %s' % (parsed_start_date.isoformat(), parsed_end_date.isoformat(), summary)).encode('utf-8')).hexdigest() if (parsed_start_date.replace(tzinfo=None) >= parse(config.start_date)): events[hash] = {'summary': summary, 'start': {'dateTime': str(parsed_start_date).replace(' ', 'T'), 'timeZone': start_timezone_string}, 'end': {'dateTime': str(parsed_end_date).replace(' ', 'T'), 'timeZone': end_timezone_string}, 'id': hash} return events<|docstring|>Loads an iCal file from a URL and returns an events object<|endoftext|>
e004d698617138478d1d3820ebc6c92c08a299d3e16cf0937ed48811c177f001
def handle_existing_events(service, new_events): ' Examines existing gCal events and prunes as needed ' if config.erase_all: print('Clearing calendar...') service.calendars().clear(calendarId=config.gcal_id).execute() for event in service.events().list(calendarId=config.gcal_id, maxResults=2500).execute()['items']: if (event['id'] in new_events): del new_events[event['id']] elif config.remove_stale: print(('Deleting stale event %s...' % event['id'][0:8])) service.events().delete(calendarId=config.gcal_id, eventId=event['id']).execute()
Examines existing gCal events and prunes as needed
main.py
handle_existing_events
slamb2k/gCal-iCal-Sync
0
python
def handle_existing_events(service, new_events): ' ' if config.erase_all: print('Clearing calendar...') service.calendars().clear(calendarId=config.gcal_id).execute() for event in service.events().list(calendarId=config.gcal_id, maxResults=2500).execute()['items']: if (event['id'] in new_events): del new_events[event['id']] elif config.remove_stale: print(('Deleting stale event %s...' % event['id'][0:8])) service.events().delete(calendarId=config.gcal_id, eventId=event['id']).execute()
def handle_existing_events(service, new_events): ' ' if config.erase_all: print('Clearing calendar...') service.calendars().clear(calendarId=config.gcal_id).execute() for event in service.events().list(calendarId=config.gcal_id, maxResults=2500).execute()['items']: if (event['id'] in new_events): del new_events[event['id']] elif config.remove_stale: print(('Deleting stale event %s...' % event['id'][0:8])) service.events().delete(calendarId=config.gcal_id, eventId=event['id']).execute()<|docstring|>Examines existing gCal events and prunes as needed<|endoftext|>
3d32af0fcf4339421fdcdd1304fa7b91b537ac4bb1a2b00f2f7746160100ebb8
def add_ical_to_gcal(service, events): ' Adds all events in event list to gCal ' for (i, event) in enumerate(events): print(('Adding %d/%d %s' % ((i + 1), len(events), events[event]['summary']))) try: sleep(0.3) service.events().insert(calendarId=config.gcal_id, body=events[event]).execute() except errors.HttpError as e: if (e.resp.status == 409): print('Event already exists. Updating...') sleep(0.3) service.events().update(calendarId=config.gcal_id, eventId=event, body=events[event]).execute() print('Event updated.') else: raise e
Adds all events in event list to gCal
main.py
add_ical_to_gcal
slamb2k/gCal-iCal-Sync
0
python
def add_ical_to_gcal(service, events): ' ' for (i, event) in enumerate(events): print(('Adding %d/%d %s' % ((i + 1), len(events), events[event]['summary']))) try: sleep(0.3) service.events().insert(calendarId=config.gcal_id, body=events[event]).execute() except errors.HttpError as e: if (e.resp.status == 409): print('Event already exists. Updating...') sleep(0.3) service.events().update(calendarId=config.gcal_id, eventId=event, body=events[event]).execute() print('Event updated.') else: raise e
def add_ical_to_gcal(service, events): ' ' for (i, event) in enumerate(events): print(('Adding %d/%d %s' % ((i + 1), len(events), events[event]['summary']))) try: sleep(0.3) service.events().insert(calendarId=config.gcal_id, body=events[event]).execute() except errors.HttpError as e: if (e.resp.status == 409): print('Event already exists. Updating...') sleep(0.3) service.events().update(calendarId=config.gcal_id, eventId=event, body=events[event]).execute() print('Event updated.') else: raise e<|docstring|>Adds all events in event list to gCal<|endoftext|>
9d0133b69cea20a9c2d3f5ed4f7dbb1b717d2be1d03a5dfbfa333bc981e0ed84
def sigmoid(input, eps=1e-07): 'Same as `torch.sigmoid`, plus clamping to `(eps,1-eps)' return input.sigmoid().clamp(eps, (1 - eps))
Same as `torch.sigmoid`, plus clamping to `(eps,1-eps)
isic/layers.py
sigmoid
bomcon123456/isic
0
python
def sigmoid(input, eps=1e-07): return input.sigmoid().clamp(eps, (1 - eps))
def sigmoid(input, eps=1e-07): return input.sigmoid().clamp(eps, (1 - eps))<|docstring|>Same as `torch.sigmoid`, plus clamping to `(eps,1-eps)<|endoftext|>
4108eaab425ef41c50117f3e6b676bf30011f2e514b4e6db9c4bf66d5d931a3b
def sigmoid_(input, eps=1e-07): 'Same as `torch.sigmoid_`, plus clamping to `(eps,1-eps)' return input.sigmoid_().clamp_(eps, (1 - eps))
Same as `torch.sigmoid_`, plus clamping to `(eps,1-eps)
isic/layers.py
sigmoid_
bomcon123456/isic
0
python
def sigmoid_(input, eps=1e-07): return input.sigmoid_().clamp_(eps, (1 - eps))
def sigmoid_(input, eps=1e-07): return input.sigmoid_().clamp_(eps, (1 - eps))<|docstring|>Same as `torch.sigmoid_`, plus clamping to `(eps,1-eps)<|endoftext|>
90b466523338c4ce5ab9d149c036e3163401ede255abb6740cb83dbfe9cd2607
def init_default(m, func=nn.init.kaiming_normal_): 'Initialize `m` weights with `func` and set `bias` to 0.' if func: if hasattr(m, 'weight'): func(m.weight) if (hasattr(m, 'bias') and hasattr(m.bias, 'data')): m.bias.data.fill_(0.0) return m
Initialize `m` weights with `func` and set `bias` to 0.
isic/layers.py
init_default
bomcon123456/isic
0
python
def init_default(m, func=nn.init.kaiming_normal_): if func: if hasattr(m, 'weight'): func(m.weight) if (hasattr(m, 'bias') and hasattr(m.bias, 'data')): m.bias.data.fill_(0.0) return m
def init_default(m, func=nn.init.kaiming_normal_): if func: if hasattr(m, 'weight'): func(m.weight) if (hasattr(m, 'bias') and hasattr(m.bias, 'data')): m.bias.data.fill_(0.0) return m<|docstring|>Initialize `m` weights with `func` and set `bias` to 0.<|endoftext|>
fcda858b9af65a1865996f0ec07e997f3435e0c83736d921d05ad63d06ff4dc6
def requires_grad(m): 'Check if the first parameter of `m` requires grad or not' ps = list(m.parameters()) return (ps[0].requires_grad if (len(ps) > 0) else False)
Check if the first parameter of `m` requires grad or not
isic/layers.py
requires_grad
bomcon123456/isic
0
python
def requires_grad(m): ps = list(m.parameters()) return (ps[0].requires_grad if (len(ps) > 0) else False)
def requires_grad(m): ps = list(m.parameters()) return (ps[0].requires_grad if (len(ps) > 0) else False)<|docstring|>Check if the first parameter of `m` requires grad or not<|endoftext|>
f63279d7b5f80bc6621639cf1b8c29e98cb28ca8069e7c877fc91276a55988e8
def cond_init(m, func): "Apply `init_default` to `m` unless it's a batchnorm module" if ((not isinstance(m, norm_types)) and requires_grad(m)): init_default(m, func)
Apply `init_default` to `m` unless it's a batchnorm module
isic/layers.py
cond_init
bomcon123456/isic
0
python
def cond_init(m, func): if ((not isinstance(m, norm_types)) and requires_grad(m)): init_default(m, func)
def cond_init(m, func): if ((not isinstance(m, norm_types)) and requires_grad(m)): init_default(m, func)<|docstring|>Apply `init_default` to `m` unless it's a batchnorm module<|endoftext|>
ec6b05a696bb0c0a2b7a86d3d1edd2753b9d2b060d7096d91230f2edae57cde3
def BatchNorm(nf, ndim=2, norm_type=NormType.Batch, **kwargs): 'BatchNorm layer with `nf` features and `ndim` initialized depending on `norm_type`.' return _get_norm('BatchNorm', nf, ndim, zero=(norm_type == NormType.BatchZero), **kwargs)
BatchNorm layer with `nf` features and `ndim` initialized depending on `norm_type`.
isic/layers.py
BatchNorm
bomcon123456/isic
0
python
def BatchNorm(nf, ndim=2, norm_type=NormType.Batch, **kwargs): return _get_norm('BatchNorm', nf, ndim, zero=(norm_type == NormType.BatchZero), **kwargs)
def BatchNorm(nf, ndim=2, norm_type=NormType.Batch, **kwargs): return _get_norm('BatchNorm', nf, ndim, zero=(norm_type == NormType.BatchZero), **kwargs)<|docstring|>BatchNorm layer with `nf` features and `ndim` initialized depending on `norm_type`.<|endoftext|>
9687db7e5da218fdd14040ce45a216bf924119e7fa3f264dfa03064e925b59ec
def InstanceNorm(nf, ndim=2, norm_type=NormType.Instance, affine=True, **kwargs): 'InstanceNorm layer with `nf` features and `ndim` initialized depending on `norm_type`.' return _get_norm('InstanceNorm', nf, ndim, zero=(norm_type == NormType.InstanceZero), affine=affine, **kwargs)
InstanceNorm layer with `nf` features and `ndim` initialized depending on `norm_type`.
isic/layers.py
InstanceNorm
bomcon123456/isic
0
python
def InstanceNorm(nf, ndim=2, norm_type=NormType.Instance, affine=True, **kwargs): return _get_norm('InstanceNorm', nf, ndim, zero=(norm_type == NormType.InstanceZero), affine=affine, **kwargs)
def InstanceNorm(nf, ndim=2, norm_type=NormType.Instance, affine=True, **kwargs): return _get_norm('InstanceNorm', nf, ndim, zero=(norm_type == NormType.InstanceZero), affine=affine, **kwargs)<|docstring|>InstanceNorm layer with `nf` features and `ndim` initialized depending on `norm_type`.<|endoftext|>
4e1637ce0bd532bf30c4d4d1c4dd57ed53eb964f803c107648e91a4438652dda
def _get_norm(prefix, nf, ndim=2, zero=False, **kwargs): 'Norm layer with `nf` features and `ndim` initialized depending on `norm_type`.' assert (1 <= ndim <= 3) bn = getattr(nn, f'{prefix}{ndim}d')(nf, **kwargs) if bn.affine: bn.bias.data.fill_(0.001) bn.weight.data.fill_((0.0 if zero else 1.0)) return bn
Norm layer with `nf` features and `ndim` initialized depending on `norm_type`.
isic/layers.py
_get_norm
bomcon123456/isic
0
python
def _get_norm(prefix, nf, ndim=2, zero=False, **kwargs): assert (1 <= ndim <= 3) bn = getattr(nn, f'{prefix}{ndim}d')(nf, **kwargs) if bn.affine: bn.bias.data.fill_(0.001) bn.weight.data.fill_((0.0 if zero else 1.0)) return bn
def _get_norm(prefix, nf, ndim=2, zero=False, **kwargs): assert (1 <= ndim <= 3) bn = getattr(nn, f'{prefix}{ndim}d')(nf, **kwargs) if bn.affine: bn.bias.data.fill_(0.001) bn.weight.data.fill_((0.0 if zero else 1.0)) return bn<|docstring|>Norm layer with `nf` features and `ndim` initialized depending on `norm_type`.<|endoftext|>
b13e30949d004883d89883e3a65bc10f7c496cdefd04aa5e2dc56ba862729b24
def _conv_func(ndim=2, transpose=False): 'Return the proper conv `ndim` function, potentially `transposed`.' assert (1 <= ndim <= 3) return getattr(nn, f"Conv{('Transpose' if transpose else '')}{ndim}d")
Return the proper conv `ndim` function, potentially `transposed`.
isic/layers.py
_conv_func
bomcon123456/isic
0
python
def _conv_func(ndim=2, transpose=False): assert (1 <= ndim <= 3) return getattr(nn, f"Conv{('Transpose' if transpose else )}{ndim}d")
def _conv_func(ndim=2, transpose=False): assert (1 <= ndim <= 3) return getattr(nn, f"Conv{('Transpose' if transpose else )}{ndim}d")<|docstring|>Return the proper conv `ndim` function, potentially `transposed`.<|endoftext|>
6f551d71f9e1224e79cabbef7f0b02bfe23bd569971edf827063cd4d090a32ca
def _matrix_operator(C, operator): ' Matrix equivalent of an operator. ' if (not np.isfinite(C).all()): raise ValueError('Covariance matrices must be positive definite. Add regularization to avoid this error.') (eigvals, eigvects) = scipy.linalg.eigh(C, check_finite=False) eigvals = np.diag(operator(eigvals)) C_out = ((eigvects @ eigvals) @ eigvects.T) return C_out
Matrix equivalent of an operator.
code/src/feature_extraction_functions/riemann.py
_matrix_operator
emihelj/cybathlon
8
python
def _matrix_operator(C, operator): ' ' if (not np.isfinite(C).all()): raise ValueError('Covariance matrices must be positive definite. Add regularization to avoid this error.') (eigvals, eigvects) = scipy.linalg.eigh(C, check_finite=False) eigvals = np.diag(operator(eigvals)) C_out = ((eigvects @ eigvals) @ eigvects.T) return C_out
def _matrix_operator(C, operator): ' ' if (not np.isfinite(C).all()): raise ValueError('Covariance matrices must be positive definite. Add regularization to avoid this error.') (eigvals, eigvects) = scipy.linalg.eigh(C, check_finite=False) eigvals = np.diag(operator(eigvals)) C_out = ((eigvects @ eigvals) @ eigvects.T) return C_out<|docstring|>Matrix equivalent of an operator.<|endoftext|>
eb01c665ebcfbcd21127f83b4a139141c9487161c6e9ed64b76640fed5dd70e8
def logm(C): ' Return the matrix logarithm of a covariance matrix. ' return _matrix_operator(C, np.log)
Return the matrix logarithm of a covariance matrix.
code/src/feature_extraction_functions/riemann.py
logm
emihelj/cybathlon
8
python
def logm(C): ' ' return _matrix_operator(C, np.log)
def logm(C): ' ' return _matrix_operator(C, np.log)<|docstring|>Return the matrix logarithm of a covariance matrix.<|endoftext|>
6381a9c12bd3cbb1c8bf8b06b9b8aad0256828d4e37221d3887950c5aa579bd7
def expm(C): ' Return the matrix exponential of a covariance matrix. ' return _matrix_operator(C, np.exp)
Return the matrix exponential of a covariance matrix.
code/src/feature_extraction_functions/riemann.py
expm
emihelj/cybathlon
8
python
def expm(C): ' ' return _matrix_operator(C, np.exp)
def expm(C): ' ' return _matrix_operator(C, np.exp)<|docstring|>Return the matrix exponential of a covariance matrix.<|endoftext|>
1090d0dbf2ce0e4117e15ed5c941c7e295858f550f35da6009b95c2837618a8f
def sqrtm(C): ' Return the matrix square root of a covariance matrix. ' return _matrix_operator(C, np.sqrt)
Return the matrix square root of a covariance matrix.
code/src/feature_extraction_functions/riemann.py
sqrtm
emihelj/cybathlon
8
python
def sqrtm(C): ' ' return _matrix_operator(C, np.sqrt)
def sqrtm(C): ' ' return _matrix_operator(C, np.sqrt)<|docstring|>Return the matrix square root of a covariance matrix.<|endoftext|>
747f3b8f5cc1d59806c91aa7571e986d2057deef2f76953e5db4945560e757ee
def invsqrtm(C): ' Return the inverse matrix square root of a covariance matrix ' def isqrt(x): return (1.0 / np.sqrt(x)) return _matrix_operator(C, isqrt)
Return the inverse matrix square root of a covariance matrix
code/src/feature_extraction_functions/riemann.py
invsqrtm
emihelj/cybathlon
8
python
def invsqrtm(C): ' ' def isqrt(x): return (1.0 / np.sqrt(x)) return _matrix_operator(C, isqrt)
def invsqrtm(C): ' ' def isqrt(x): return (1.0 / np.sqrt(x)) return _matrix_operator(C, isqrt)<|docstring|>Return the inverse matrix square root of a covariance matrix<|endoftext|>
27136dc415a8f04157e4867fed1a8982623a20ca0f5a63898b3f52f5c8a62794
def half_vectorization(C): '\n Calculates half vectorization of a matrix.\n Input:\n - C: SPD matrix of shape (n_channel,n_channel)\n Output:\n - C_vec: Vectorized matrix of shape n_riemann\n ' (n_channels, _) = C.shape n_elements = int((((n_channels + 1) * n_channels) / 2)) C_vec = np.zeros(n_elements) C_vec[:n_channels] = np.diag(C) sqrt2 = np.sqrt(2) tmp = np.triu(C, k=1).flatten() C_vec[n_channels:] = (sqrt2 * tmp[(tmp != 0)]) return C_vec
Calculates half vectorization of a matrix. Input: - C: SPD matrix of shape (n_channel,n_channel) Output: - C_vec: Vectorized matrix of shape n_riemann
code/src/feature_extraction_functions/riemann.py
half_vectorization
emihelj/cybathlon
8
python
def half_vectorization(C): '\n Calculates half vectorization of a matrix.\n Input:\n - C: SPD matrix of shape (n_channel,n_channel)\n Output:\n - C_vec: Vectorized matrix of shape n_riemann\n ' (n_channels, _) = C.shape n_elements = int((((n_channels + 1) * n_channels) / 2)) C_vec = np.zeros(n_elements) C_vec[:n_channels] = np.diag(C) sqrt2 = np.sqrt(2) tmp = np.triu(C, k=1).flatten() C_vec[n_channels:] = (sqrt2 * tmp[(tmp != 0)]) return C_vec
def half_vectorization(C): '\n Calculates half vectorization of a matrix.\n Input:\n - C: SPD matrix of shape (n_channel,n_channel)\n Output:\n - C_vec: Vectorized matrix of shape n_riemann\n ' (n_channels, _) = C.shape n_elements = int((((n_channels + 1) * n_channels) / 2)) C_vec = np.zeros(n_elements) C_vec[:n_channels] = np.diag(C) sqrt2 = np.sqrt(2) tmp = np.triu(C, k=1).flatten() C_vec[n_channels:] = (sqrt2 * tmp[(tmp != 0)]) return C_vec<|docstring|>Calculates half vectorization of a matrix. Input: - C: SPD matrix of shape (n_channel,n_channel) Output: - C_vec: Vectorized matrix of shape n_riemann<|endoftext|>
86deb247b5b86e25bd5624d89987fc66d8f7f65835d6463c0ac669fd26f7f432
def load_bands(self, bandwidths, f_min, f_max, f_order, f_type): " Initialize filter bank bands.\n Inputs:\n - bandwidths: List of filter bandwidths (array of int).\n - f_min, f_max: minimal and maximal filter frequencies (int).\n - f_order: filter order (int).\n - f_type: filter type {'butter', 'cheby', 'ellip'} (string).\n Output:\n - f_bands: filter bank bands (array of shape (n_bands, 2)).\n " f_bands = [] for bw in bandwidths: f = f_min while ((f + bw) <= f_max): f_bands.append([f, (f + bw)]) f += (2 if (bw < 4) else 4) f_bands = np.array(f_bands) return f_bands
Initialize filter bank bands. Inputs: - bandwidths: List of filter bandwidths (array of int). - f_min, f_max: minimal and maximal filter frequencies (int). - f_order: filter order (int). - f_type: filter type {'butter', 'cheby', 'ellip'} (string). Output: - f_bands: filter bank bands (array of shape (n_bands, 2)).
code/src/feature_extraction_functions/riemann.py
load_bands
emihelj/cybathlon
8
python
def load_bands(self, bandwidths, f_min, f_max, f_order, f_type): " Initialize filter bank bands.\n Inputs:\n - bandwidths: List of filter bandwidths (array of int).\n - f_min, f_max: minimal and maximal filter frequencies (int).\n - f_order: filter order (int).\n - f_type: filter type {'butter', 'cheby', 'ellip'} (string).\n Output:\n - f_bands: filter bank bands (array of shape (n_bands, 2)).\n " f_bands = [] for bw in bandwidths: f = f_min while ((f + bw) <= f_max): f_bands.append([f, (f + bw)]) f += (2 if (bw < 4) else 4) f_bands = np.array(f_bands) return f_bands
def load_bands(self, bandwidths, f_min, f_max, f_order, f_type): " Initialize filter bank bands.\n Inputs:\n - bandwidths: List of filter bandwidths (array of int).\n - f_min, f_max: minimal and maximal filter frequencies (int).\n - f_order: filter order (int).\n - f_type: filter type {'butter', 'cheby', 'ellip'} (string).\n Output:\n - f_bands: filter bank bands (array of shape (n_bands, 2)).\n " f_bands = [] for bw in bandwidths: f = f_min while ((f + bw) <= f_max): f_bands.append([f, (f + bw)]) f += (2 if (bw < 4) else 4) f_bands = np.array(f_bands) return f_bands<|docstring|>Initialize filter bank bands. Inputs: - bandwidths: List of filter bandwidths (array of int). - f_min, f_max: minimal and maximal filter frequencies (int). - f_order: filter order (int). - f_type: filter type {'butter', 'cheby', 'ellip'} (string). Output: - f_bands: filter bank bands (array of shape (n_bands, 2)).<|endoftext|>
b98c7b15b9851defc306ee19c86a7f5dda28e104935ffc45d26d85fc119568f0
def fit(self, X, y): '\n Apply filtering to input signal and compute regularized covariance matrices.\n Compute the reference matrices of each filter block.\n Input:\n X: EEG data in numpy format (trials, channels, samples).\n y: EEG labels numpy format (trial).\n ' now = time.time() (n_trials, n_channels, n_samples) = X.shape self.C_ref_invsqrt = np.zeros((len(self.f_bands), n_channels, n_channels)) for (band_idx, f_band) in enumerate(self.f_bands): X_filt = filtering(X, fs=self.fs, f_order=self.f_order, f_low=f_band[0], f_high=f_band[1], f_type=self.f_type) cov_matrices = np.array([(((1 / (n_samples - 1)) * np.dot(X_t, X_t.T)) + ((self.rho / n_samples) * np.eye(n_channels))) for X_t in X_filt]) C_ref = np.mean(cov_matrices, axis=0) self.C_ref_invsqrt[band_idx] = invsqrtm(C_ref) return self
Apply filtering to input signal and compute regularized covariance matrices. Compute the reference matrices of each filter block. Input: X: EEG data in numpy format (trials, channels, samples). y: EEG labels numpy format (trial).
code/src/feature_extraction_functions/riemann.py
fit
emihelj/cybathlon
8
python
def fit(self, X, y): '\n Apply filtering to input signal and compute regularized covariance matrices.\n Compute the reference matrices of each filter block.\n Input:\n X: EEG data in numpy format (trials, channels, samples).\n y: EEG labels numpy format (trial).\n ' now = time.time() (n_trials, n_channels, n_samples) = X.shape self.C_ref_invsqrt = np.zeros((len(self.f_bands), n_channels, n_channels)) for (band_idx, f_band) in enumerate(self.f_bands): X_filt = filtering(X, fs=self.fs, f_order=self.f_order, f_low=f_band[0], f_high=f_band[1], f_type=self.f_type) cov_matrices = np.array([(((1 / (n_samples - 1)) * np.dot(X_t, X_t.T)) + ((self.rho / n_samples) * np.eye(n_channels))) for X_t in X_filt]) C_ref = np.mean(cov_matrices, axis=0) self.C_ref_invsqrt[band_idx] = invsqrtm(C_ref) return self
def fit(self, X, y): '\n Apply filtering to input signal and compute regularized covariance matrices.\n Compute the reference matrices of each filter block.\n Input:\n X: EEG data in numpy format (trials, channels, samples).\n y: EEG labels numpy format (trial).\n ' now = time.time() (n_trials, n_channels, n_samples) = X.shape self.C_ref_invsqrt = np.zeros((len(self.f_bands), n_channels, n_channels)) for (band_idx, f_band) in enumerate(self.f_bands): X_filt = filtering(X, fs=self.fs, f_order=self.f_order, f_low=f_band[0], f_high=f_band[1], f_type=self.f_type) cov_matrices = np.array([(((1 / (n_samples - 1)) * np.dot(X_t, X_t.T)) + ((self.rho / n_samples) * np.eye(n_channels))) for X_t in X_filt]) C_ref = np.mean(cov_matrices, axis=0) self.C_ref_invsqrt[band_idx] = invsqrtm(C_ref) return self<|docstring|>Apply filtering to input signal and compute regularized covariance matrices. Compute the reference matrices of each filter block. Input: X: EEG data in numpy format (trials, channels, samples). y: EEG labels numpy format (trial).<|endoftext|>
1be9eb2e597fbe62cf649a033b70b0e153ab950a842d23b616378e60d80ea75e
def transform(self, X): '\n Compute multiscale riemannian features, i.e. the vectorized covariance matrices of each filter block projected in the Riemannian tangent space.\n Input:\n - X: EEG array of shape (n_trials, n_channels, n_samples).\n Output:\n - feats: extracted features of shape (n_trials, n_features).\n ' (n_trials, n_channels, n_samples) = X.shape feats = [] now = time.time() for (band_idx, f_band) in enumerate(self.f_bands): X_filt = filtering(X, fs=self.fs, f_order=self.f_order, f_low=f_band[0], f_high=f_band[1], f_type=self.f_type) cov_matrices = np.array([(((1 / (n_samples - 1)) * np.dot(X_t, X_t.T)) + ((self.rho / n_samples) * np.eye(n_channels))) for X_t in X_filt]) c_ref_invsqrt = self.C_ref_invsqrt[band_idx] S_projections = np.array([logm(((c_ref_invsqrt @ cov_matrices[trial_idx]) @ c_ref_invsqrt)) for trial_idx in range(n_trials)]) S_projections_vec = np.array([half_vectorization(S_projections[trial_idx]) for trial_idx in range(n_trials)]) feats = (S_projections_vec if (len(feats) == 0) else np.hstack([feats, S_projections_vec])) return feats
Compute multiscale riemannian features, i.e. the vectorized covariance matrices of each filter block projected in the Riemannian tangent space. Input: - X: EEG array of shape (n_trials, n_channels, n_samples). Output: - feats: extracted features of shape (n_trials, n_features).
code/src/feature_extraction_functions/riemann.py
transform
emihelj/cybathlon
8
python
def transform(self, X): '\n Compute multiscale riemannian features, i.e. the vectorized covariance matrices of each filter block projected in the Riemannian tangent space.\n Input:\n - X: EEG array of shape (n_trials, n_channels, n_samples).\n Output:\n - feats: extracted features of shape (n_trials, n_features).\n ' (n_trials, n_channels, n_samples) = X.shape feats = [] now = time.time() for (band_idx, f_band) in enumerate(self.f_bands): X_filt = filtering(X, fs=self.fs, f_order=self.f_order, f_low=f_band[0], f_high=f_band[1], f_type=self.f_type) cov_matrices = np.array([(((1 / (n_samples - 1)) * np.dot(X_t, X_t.T)) + ((self.rho / n_samples) * np.eye(n_channels))) for X_t in X_filt]) c_ref_invsqrt = self.C_ref_invsqrt[band_idx] S_projections = np.array([logm(((c_ref_invsqrt @ cov_matrices[trial_idx]) @ c_ref_invsqrt)) for trial_idx in range(n_trials)]) S_projections_vec = np.array([half_vectorization(S_projections[trial_idx]) for trial_idx in range(n_trials)]) feats = (S_projections_vec if (len(feats) == 0) else np.hstack([feats, S_projections_vec])) return feats
def transform(self, X): '\n Compute multiscale riemannian features, i.e. the vectorized covariance matrices of each filter block projected in the Riemannian tangent space.\n Input:\n - X: EEG array of shape (n_trials, n_channels, n_samples).\n Output:\n - feats: extracted features of shape (n_trials, n_features).\n ' (n_trials, n_channels, n_samples) = X.shape feats = [] now = time.time() for (band_idx, f_band) in enumerate(self.f_bands): X_filt = filtering(X, fs=self.fs, f_order=self.f_order, f_low=f_band[0], f_high=f_band[1], f_type=self.f_type) cov_matrices = np.array([(((1 / (n_samples - 1)) * np.dot(X_t, X_t.T)) + ((self.rho / n_samples) * np.eye(n_channels))) for X_t in X_filt]) c_ref_invsqrt = self.C_ref_invsqrt[band_idx] S_projections = np.array([logm(((c_ref_invsqrt @ cov_matrices[trial_idx]) @ c_ref_invsqrt)) for trial_idx in range(n_trials)]) S_projections_vec = np.array([half_vectorization(S_projections[trial_idx]) for trial_idx in range(n_trials)]) feats = (S_projections_vec if (len(feats) == 0) else np.hstack([feats, S_projections_vec])) return feats<|docstring|>Compute multiscale riemannian features, i.e. the vectorized covariance matrices of each filter block projected in the Riemannian tangent space. Input: - X: EEG array of shape (n_trials, n_channels, n_samples). Output: - feats: extracted features of shape (n_trials, n_features).<|endoftext|>
617dbe28c6dde4b27b8cd1d4c32bcaa6225969de8cb35385b5032334dc479795
def is_leaf(self): ' true, if and only if this node is a leaf, i.e. has no children ' return ((self.child1 is None) and (self.child2 is None) and (self.child3 is None) and (self.child4 is None))
true, if and only if this node is a leaf, i.e. has no children
wordle/trees.py
is_leaf
felicitywk/visualization
17
python
def is_leaf(self): ' ' return ((self.child1 is None) and (self.child2 is None) and (self.child3 is None) and (self.child4 is None))
def is_leaf(self): ' ' return ((self.child1 is None) and (self.child2 is None) and (self.child3 is None) and (self.child4 is None))<|docstring|>true, if and only if this node is a leaf, i.e. has no children<|endoftext|>
d579ba05e9f043b5da3756ebf7ff912e793742b8689914127942d4888678d6ea
def get_children_list(self): ' return the list of children nodes, if any, otherwise an empty list ' c = [] if (self.child1 is not None): c.append(self.child1) if (self.child2 is not None): c.append(self.child2) if (self.child3 is not None): c.append(self.child3) if (self.child4 is not None): c.append(self.child4) return c
return the list of children nodes, if any, otherwise an empty list
wordle/trees.py
get_children_list
felicitywk/visualization
17
python
def get_children_list(self): ' ' c = [] if (self.child1 is not None): c.append(self.child1) if (self.child2 is not None): c.append(self.child2) if (self.child3 is not None): c.append(self.child3) if (self.child4 is not None): c.append(self.child4) return c
def get_children_list(self): ' ' c = [] if (self.child1 is not None): c.append(self.child1) if (self.child2 is not None): c.append(self.child2) if (self.child3 is not None): c.append(self.child3) if (self.child4 is not None): c.append(self.child4) return c<|docstring|>return the list of children nodes, if any, otherwise an empty list<|endoftext|>
67445fc9c4b27d90c08e70e6b5e3597309e2f5de829ceefeef13deb42e1ece01
def all_children_are_leafs(self): ' True, if and only if no child of this node is a non-leaf node ' if (self.child1 is not None): if (not self.child1.is_leaf()): return False if (self.child2 is not None): if (not self.child2.is_leaf()): return False if (self.child3 is not None): if (not self.child3.is_leaf()): return False if (self.child4 is not None): if (not self.child4.is_leaf()): return False return True
True, if and only if no child of this node is a non-leaf node
wordle/trees.py
all_children_are_leafs
felicitywk/visualization
17
python
def all_children_are_leafs(self): ' ' if (self.child1 is not None): if (not self.child1.is_leaf()): return False if (self.child2 is not None): if (not self.child2.is_leaf()): return False if (self.child3 is not None): if (not self.child3.is_leaf()): return False if (self.child4 is not None): if (not self.child4.is_leaf()): return False return True
def all_children_are_leafs(self): ' ' if (self.child1 is not None): if (not self.child1.is_leaf()): return False if (self.child2 is not None): if (not self.child2.is_leaf()): return False if (self.child3 is not None): if (not self.child3.is_leaf()): return False if (self.child4 is not None): if (not self.child4.is_leaf()): return False return True<|docstring|>True, if and only if no child of this node is a non-leaf node<|endoftext|>
9a0c2d6e857b178aa5a670b96d425e69b4b53d70867faaf889fe7132079ddff3
def __init__(self, root): ' root is a QuadTreeNode that serves as the root of this tree ' self.root = root
root is a QuadTreeNode that serves as the root of this tree
wordle/trees.py
__init__
felicitywk/visualization
17
python
def __init__(self, root): ' ' self.root = root
def __init__(self, root): ' ' self.root = root<|docstring|>root is a QuadTreeNode that serves as the root of this tree<|endoftext|>
5304053c230f3bdc6144232eac61bc6a4cd1d5798c579c7a61f0ff350aebedce
def get_leaf_list(self): ' returns the leaves of the tree as a list ' if (self.root is None): return [] res = [] c = self.root.get_children_list() while c: c1 = [] for x in c: if x.is_leaf(): res.append(x) else: for u in x.get_children_list(): c1.append(u) c = c1 return res
returns the leaves of the tree as a list
wordle/trees.py
get_leaf_list
felicitywk/visualization
17
python
def get_leaf_list(self): ' ' if (self.root is None): return [] res = [] c = self.root.get_children_list() while c: c1 = [] for x in c: if x.is_leaf(): res.append(x) else: for u in x.get_children_list(): c1.append(u) c = c1 return res
def get_leaf_list(self): ' ' if (self.root is None): return [] res = [] c = self.root.get_children_list() while c: c1 = [] for x in c: if x.is_leaf(): res.append(x) else: for u in x.get_children_list(): c1.append(u) c = c1 return res<|docstring|>returns the leaves of the tree as a list<|endoftext|>
2659605f8ca88eea2e35f8f37b406bed4edab6cc94f3e845ef46f4daa012c212
def get_number_of_nodes(self): ' get the total number of nodes of this tree ' if (self.root is None): return 0 res = 1 c = self.root.get_children_list() while c: c1 = [] res += len(c) for x in c: if (not x.is_leaf()): for u in x.get_children_list(): c1.append(u) c = c1 return res
get the total number of nodes of this tree
wordle/trees.py
get_number_of_nodes
felicitywk/visualization
17
python
def get_number_of_nodes(self): ' ' if (self.root is None): return 0 res = 1 c = self.root.get_children_list() while c: c1 = [] res += len(c) for x in c: if (not x.is_leaf()): for u in x.get_children_list(): c1.append(u) c = c1 return res
def get_number_of_nodes(self): ' ' if (self.root is None): return 0 res = 1 c = self.root.get_children_list() while c: c1 = [] res += len(c) for x in c: if (not x.is_leaf()): for u in x.get_children_list(): c1.append(u) c = c1 return res<|docstring|>get the total number of nodes of this tree<|endoftext|>
4d903d3a026c4817add6ca89608c628e3918e9df1d597a2c33be597558f6e1a7
def get_node_value_list(self, output=False): '\n traverses the tree T from the root to its leaves and returns a list of all values of all nodes\n if output == True, print the values\n ' if (self.root is None): if output: print('The tree is empty', flush=True) return [] res = [self.root.value] c = self.root.get_children_list() i = 0 while c: i += 1 c1 = [] for x in c: if output: print('{} level {} : {}'.format((' ' * i), i, x.value), flush=True) res.append(x.value) if (not x.is_leaf()): for u in x.get_children_list(): c1.append(u) c = c1 return res
traverses the tree T from the root to its leaves and returns a list of all values of all nodes if output == True, print the values
wordle/trees.py
get_node_value_list
felicitywk/visualization
17
python
def get_node_value_list(self, output=False): '\n traverses the tree T from the root to its leaves and returns a list of all values of all nodes\n if output == True, print the values\n ' if (self.root is None): if output: print('The tree is empty', flush=True) return [] res = [self.root.value] c = self.root.get_children_list() i = 0 while c: i += 1 c1 = [] for x in c: if output: print('{} level {} : {}'.format((' ' * i), i, x.value), flush=True) res.append(x.value) if (not x.is_leaf()): for u in x.get_children_list(): c1.append(u) c = c1 return res
def get_node_value_list(self, output=False): '\n traverses the tree T from the root to its leaves and returns a list of all values of all nodes\n if output == True, print the values\n ' if (self.root is None): if output: print('The tree is empty', flush=True) return [] res = [self.root.value] c = self.root.get_children_list() i = 0 while c: i += 1 c1 = [] for x in c: if output: print('{} level {} : {}'.format((' ' * i), i, x.value), flush=True) res.append(x.value) if (not x.is_leaf()): for u in x.get_children_list(): c1.append(u) c = c1 return res<|docstring|>traverses the tree T from the root to its leaves and returns a list of all values of all nodes if output == True, print the values<|endoftext|>
c7866468a26c224eab7edc44c2fc32183a156d52d77ca18e3d3c4f1aca000840
def compress(self): '\n Compresses the tree T by removing all leaves whose siblings\n are leafs and whose parents have reached their full capacity,\n i.e. have MAX number of children (2 or 4 in our case).\n Performs this process from bottom-up until there is nothing to remove.\n ' if (self.root is None): return current_level = [self.root] nodes_at_level = [[current_level]] while True: c = [] for i in range(len(current_level)): x = current_level[i] if (x.child1 is not None): c.append(x.child1) if (x.child2 is not None): c.append(x.child2) if (x.child3 is not None): c.append(x.child3) if (x.child4 is not None): c.append(x.child4) if (not c): break nodes_at_level.append(c) current_level = c[:] for i in range((len(nodes_at_level) - 1), 0, (- 1)): for n in nodes_at_level[i]: if (n is None): continue p = n.parent if p.node_is_full: if p.all_children_are_leafs(): p.child1 = None p.child2 = None p.child3 = None p.child4 = None
Compresses the tree T by removing all leaves whose siblings are leafs and whose parents have reached their full capacity, i.e. have MAX number of children (2 or 4 in our case). Performs this process from bottom-up until there is nothing to remove.
wordle/trees.py
compress
felicitywk/visualization
17
python
def compress(self): '\n Compresses the tree T by removing all leaves whose siblings\n are leafs and whose parents have reached their full capacity,\n i.e. have MAX number of children (2 or 4 in our case).\n Performs this process from bottom-up until there is nothing to remove.\n ' if (self.root is None): return current_level = [self.root] nodes_at_level = [[current_level]] while True: c = [] for i in range(len(current_level)): x = current_level[i] if (x.child1 is not None): c.append(x.child1) if (x.child2 is not None): c.append(x.child2) if (x.child3 is not None): c.append(x.child3) if (x.child4 is not None): c.append(x.child4) if (not c): break nodes_at_level.append(c) current_level = c[:] for i in range((len(nodes_at_level) - 1), 0, (- 1)): for n in nodes_at_level[i]: if (n is None): continue p = n.parent if p.node_is_full: if p.all_children_are_leafs(): p.child1 = None p.child2 = None p.child3 = None p.child4 = None
def compress(self): '\n Compresses the tree T by removing all leaves whose siblings\n are leafs and whose parents have reached their full capacity,\n i.e. have MAX number of children (2 or 4 in our case).\n Performs this process from bottom-up until there is nothing to remove.\n ' if (self.root is None): return current_level = [self.root] nodes_at_level = [[current_level]] while True: c = [] for i in range(len(current_level)): x = current_level[i] if (x.child1 is not None): c.append(x.child1) if (x.child2 is not None): c.append(x.child2) if (x.child3 is not None): c.append(x.child3) if (x.child4 is not None): c.append(x.child4) if (not c): break nodes_at_level.append(c) current_level = c[:] for i in range((len(nodes_at_level) - 1), 0, (- 1)): for n in nodes_at_level[i]: if (n is None): continue p = n.parent if p.node_is_full: if p.all_children_are_leafs(): p.child1 = None p.child2 = None p.child3 = None p.child4 = None<|docstring|>Compresses the tree T by removing all leaves whose siblings are leafs and whose parents have reached their full capacity, i.e. have MAX number of children (2 or 4 in our case). Performs this process from bottom-up until there is nothing to remove.<|endoftext|>
b5078e9374c6d4f27b5de89a7b5a6a59a3bc19a5f0d2a1d8a6773db221957cad
def area_covered(self): '\n compute the numerical value of the 2d area covered by this Tree\n the object represented by this tree is the disjoint union of its leaves;\n leaves are rectangles, thus we need to compute the sum of the areas of these rectangles\n ' a = 0 c = self.get_leaf_list() for r in c: a += get_rectangle_area(r.value) return a
compute the numerical value of the 2d area covered by this Tree the object represented by this tree is the disjoint union of its leaves; leaves are rectangles, thus we need to compute the sum of the areas of these rectangles
wordle/trees.py
area_covered
felicitywk/visualization
17
python
def area_covered(self): '\n compute the numerical value of the 2d area covered by this Tree\n the object represented by this tree is the disjoint union of its leaves;\n leaves are rectangles, thus we need to compute the sum of the areas of these rectangles\n ' a = 0 c = self.get_leaf_list() for r in c: a += get_rectangle_area(r.value) return a
def area_covered(self): '\n compute the numerical value of the 2d area covered by this Tree\n the object represented by this tree is the disjoint union of its leaves;\n leaves are rectangles, thus we need to compute the sum of the areas of these rectangles\n ' a = 0 c = self.get_leaf_list() for r in c: a += get_rectangle_area(r.value) return a<|docstring|>compute the numerical value of the 2d area covered by this Tree the object represented by this tree is the disjoint union of its leaves; leaves are rectangles, thus we need to compute the sum of the areas of these rectangles<|endoftext|>
1f195b99c0ec3178f4e2fdf5bf7063c9b7798949ad1f307e8c0f9cb9fc5983aa
def brick(self, prv: Brick, cur: Brick, nxt: Brick, is_last: bool, **kwargs) -> Tuple[(float, str)]: '\n Draw the symbol of a given Brick element\n ' min_width = (1 if (cur.symbol == 'x') else 4) width = max((((self.width - self.offsetx) * cur.width) / self.draw_width), min_width) (error_width, width) = (((width - round(width)) * 8), round(width)) half_width = (width // 2) sequence = ''.join([prv.symbol, cur.symbol, nxt.symbol]) data = (str(cur.args.get('data', '')) or (' ' * (width - 1))) if (len(data) < (width - 1)): spaces_left = (' ' * (((width - 1) - len(data)) // 2)) spaces_right = (' ' * (((width - 1) - len(data)) - len(spaces_left))) data = ((spaces_left + data) + spaces_right) else: data = data[:(width - 1)] sequences = {'0': ('▁' * width), 'z': ('─' * width), 'x': ('╳' * width), '1': ('▔' * width), 'u': ('⎧' + ('▔' * (width - 1))), 'd': ('⎩' + ('▁' * (width - 1))), 'm': (('∿' * (width - 1)) + '╮'), 'M': (('∿' * (width - 1)) + '╯'), 'p': ((('╱' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), 'n': ((('╲' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), '=': ('❬' + data), '00': ('▁' * width), '0z': ('╭' + ('─' * (width - 1))), '0x': ('╱' + ('╳' * (width - 1))), '01': ('╱' + ('▔' * (width - 1))), '0m': (('╭' + ('∿' * (width - 2))) + '╮'), '0M': (('╭' + ('∿' * (width - 2))) + '╯'), 'up': ((('▔' + ('▔' * half_width)) + '╲') + ('▁' * ((width - half_width) - 1))), 'd1': ('╱' + ('▔' * (width - 1))), 'dn': ((('▁' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), '0n': ((('▁' + ('▁' * half_width)) + '╱') + ('▔' * ((width - half_width) - 1))), '0=': ('╱' + data), 'z0': ('╮' + ('▁' * (width - 1))), 'zx': ('⧼' + ('╳' * (width - 1))), 'z1': ('╯' + ('▔' * (width - 1))), 'zp': ((('╯' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), 'zn': ((('╮' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), 'z=': ('⧼' + data), 'x0': ('╲' + ('▁' * (width - 1))), 'xz': ('⧽' + ('─' * (width - 1))), 'x1': ('╱' + ('▔' * (width - 1))), 'xm': (('╳' + ('∿' * (width - 2))) + '╮'), 'xM': (('╳' + ('∿' * (width - 2))) + '╯'), 'x=': ('╳' + data), '10': ('╲' + ('▁' * (width - 1))), '1z': ('╰' + ('─' * (width - 1))), '1x': ('╲' + ('╳' * (width - 1))), '1m': (('╰' + ('∿' * (width - 2))) + '╮'), '1M': (('╰' + ('∿' * (width - 2))) + '╯'), '11': ('▔' * width), '1p': ((('▔' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), '1=': ('╲' + data), 'p1': ('╱' + ('▔' * (width - 1))), 'pz': ('╭' + ('─' * (width - 1))), 'px': ('╱' + ('╳' * (width - 1))), 'pd': ('▁' * width), 'pm': (('╭' + ('∿' * (width - 2))) + '╮'), 'pM': (('╭' + ('∿' * (width - 2))) + '╯'), 'pn': ((('▁' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), 'p=': ('╱' + data), 'n0': ('╲' + ('▁' * (width - 1))), 'np': ((('▔' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), 'nx': ('╲' + ('╳' * (width - 1))), 'nu': ('▔' * width), 'nm': (('╰' + ('∿' * (width - 2))) + '╮'), 'nM': (('╰' + ('∿' * (width - 2))) + '╯'), 'n=': ('╲' + data), 'mn': ((('▁' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), 'Mp': ((('▔' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), '=0': ('╲' + ('▁' * (width - 1))), '=z': ('⧽' + ('─' * (width - 1))), '=x': ('╳' * width), '=1': ('╱' + ('▔' * (width - 1))), '==': ('╳' + data)} text = sequences.get(sequence, sequences.get(sequence[0:2], sequences.get((' ' + sequence[1:]), sequences.get(cur.symbol, '')))) if (cur.symbol == '='): text = ((text[0] + '\x1b[47m\x1b[30m') + text[1:]) if (not is_last): text += '\x1b[49m\x1b[39m' return (error_width, text)
Draw the symbol of a given Brick element
undulate/renderers/termrenderer.py
brick
LudwigCRON/WavedromAnnotation
27
python
def brick(self, prv: Brick, cur: Brick, nxt: Brick, is_last: bool, **kwargs) -> Tuple[(float, str)]: '\n \n ' min_width = (1 if (cur.symbol == 'x') else 4) width = max((((self.width - self.offsetx) * cur.width) / self.draw_width), min_width) (error_width, width) = (((width - round(width)) * 8), round(width)) half_width = (width // 2) sequence = .join([prv.symbol, cur.symbol, nxt.symbol]) data = (str(cur.args.get('data', )) or (' ' * (width - 1))) if (len(data) < (width - 1)): spaces_left = (' ' * (((width - 1) - len(data)) // 2)) spaces_right = (' ' * (((width - 1) - len(data)) - len(spaces_left))) data = ((spaces_left + data) + spaces_right) else: data = data[:(width - 1)] sequences = {'0': ('▁' * width), 'z': ('─' * width), 'x': ('╳' * width), '1': ('▔' * width), 'u': ('⎧' + ('▔' * (width - 1))), 'd': ('⎩' + ('▁' * (width - 1))), 'm': (('∿' * (width - 1)) + '╮'), 'M': (('∿' * (width - 1)) + '╯'), 'p': ((('╱' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), 'n': ((('╲' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), '=': ('❬' + data), '00': ('▁' * width), '0z': ('╭' + ('─' * (width - 1))), '0x': ('╱' + ('╳' * (width - 1))), '01': ('╱' + ('▔' * (width - 1))), '0m': (('╭' + ('∿' * (width - 2))) + '╮'), '0M': (('╭' + ('∿' * (width - 2))) + '╯'), 'up': ((('▔' + ('▔' * half_width)) + '╲') + ('▁' * ((width - half_width) - 1))), 'd1': ('╱' + ('▔' * (width - 1))), 'dn': ((('▁' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), '0n': ((('▁' + ('▁' * half_width)) + '╱') + ('▔' * ((width - half_width) - 1))), '0=': ('╱' + data), 'z0': ('╮' + ('▁' * (width - 1))), 'zx': ('⧼' + ('╳' * (width - 1))), 'z1': ('╯' + ('▔' * (width - 1))), 'zp': ((('╯' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), 'zn': ((('╮' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), 'z=': ('⧼' + data), 'x0': ('╲' + ('▁' * (width - 1))), 'xz': ('⧽' + ('─' * (width - 1))), 'x1': ('╱' + ('▔' * (width - 1))), 'xm': (('╳' + ('∿' * (width - 2))) + '╮'), 'xM': (('╳' + ('∿' * (width - 2))) + '╯'), 'x=': ('╳' + data), '10': ('╲' + ('▁' * (width - 1))), '1z': ('╰' + ('─' * (width - 1))), '1x': ('╲' + ('╳' * (width - 1))), '1m': (('╰' + ('∿' * (width - 2))) + '╮'), '1M': (('╰' + ('∿' * (width - 2))) + '╯'), '11': ('▔' * width), '1p': ((('▔' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), '1=': ('╲' + data), 'p1': ('╱' + ('▔' * (width - 1))), 'pz': ('╭' + ('─' * (width - 1))), 'px': ('╱' + ('╳' * (width - 1))), 'pd': ('▁' * width), 'pm': (('╭' + ('∿' * (width - 2))) + '╮'), 'pM': (('╭' + ('∿' * (width - 2))) + '╯'), 'pn': ((('▁' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), 'p=': ('╱' + data), 'n0': ('╲' + ('▁' * (width - 1))), 'np': ((('▔' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), 'nx': ('╲' + ('╳' * (width - 1))), 'nu': ('▔' * width), 'nm': (('╰' + ('∿' * (width - 2))) + '╮'), 'nM': (('╰' + ('∿' * (width - 2))) + '╯'), 'n=': ('╲' + data), 'mn': ((('▁' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), 'Mp': ((('▔' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), '=0': ('╲' + ('▁' * (width - 1))), '=z': ('⧽' + ('─' * (width - 1))), '=x': ('╳' * width), '=1': ('╱' + ('▔' * (width - 1))), '==': ('╳' + data)} text = sequences.get(sequence, sequences.get(sequence[0:2], sequences.get((' ' + sequence[1:]), sequences.get(cur.symbol, )))) if (cur.symbol == '='): text = ((text[0] + '\x1b[47m\x1b[30m') + text[1:]) if (not is_last): text += '\x1b[49m\x1b[39m' return (error_width, text)
def brick(self, prv: Brick, cur: Brick, nxt: Brick, is_last: bool, **kwargs) -> Tuple[(float, str)]: '\n \n ' min_width = (1 if (cur.symbol == 'x') else 4) width = max((((self.width - self.offsetx) * cur.width) / self.draw_width), min_width) (error_width, width) = (((width - round(width)) * 8), round(width)) half_width = (width // 2) sequence = .join([prv.symbol, cur.symbol, nxt.symbol]) data = (str(cur.args.get('data', )) or (' ' * (width - 1))) if (len(data) < (width - 1)): spaces_left = (' ' * (((width - 1) - len(data)) // 2)) spaces_right = (' ' * (((width - 1) - len(data)) - len(spaces_left))) data = ((spaces_left + data) + spaces_right) else: data = data[:(width - 1)] sequences = {'0': ('▁' * width), 'z': ('─' * width), 'x': ('╳' * width), '1': ('▔' * width), 'u': ('⎧' + ('▔' * (width - 1))), 'd': ('⎩' + ('▁' * (width - 1))), 'm': (('∿' * (width - 1)) + '╮'), 'M': (('∿' * (width - 1)) + '╯'), 'p': ((('╱' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), 'n': ((('╲' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), '=': ('❬' + data), '00': ('▁' * width), '0z': ('╭' + ('─' * (width - 1))), '0x': ('╱' + ('╳' * (width - 1))), '01': ('╱' + ('▔' * (width - 1))), '0m': (('╭' + ('∿' * (width - 2))) + '╮'), '0M': (('╭' + ('∿' * (width - 2))) + '╯'), 'up': ((('▔' + ('▔' * half_width)) + '╲') + ('▁' * ((width - half_width) - 1))), 'd1': ('╱' + ('▔' * (width - 1))), 'dn': ((('▁' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), '0n': ((('▁' + ('▁' * half_width)) + '╱') + ('▔' * ((width - half_width) - 1))), '0=': ('╱' + data), 'z0': ('╮' + ('▁' * (width - 1))), 'zx': ('⧼' + ('╳' * (width - 1))), 'z1': ('╯' + ('▔' * (width - 1))), 'zp': ((('╯' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), 'zn': ((('╮' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), 'z=': ('⧼' + data), 'x0': ('╲' + ('▁' * (width - 1))), 'xz': ('⧽' + ('─' * (width - 1))), 'x1': ('╱' + ('▔' * (width - 1))), 'xm': (('╳' + ('∿' * (width - 2))) + '╮'), 'xM': (('╳' + ('∿' * (width - 2))) + '╯'), 'x=': ('╳' + data), '10': ('╲' + ('▁' * (width - 1))), '1z': ('╰' + ('─' * (width - 1))), '1x': ('╲' + ('╳' * (width - 1))), '1m': (('╰' + ('∿' * (width - 2))) + '╮'), '1M': (('╰' + ('∿' * (width - 2))) + '╯'), '11': ('▔' * width), '1p': ((('▔' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), '1=': ('╲' + data), 'p1': ('╱' + ('▔' * (width - 1))), 'pz': ('╭' + ('─' * (width - 1))), 'px': ('╱' + ('╳' * (width - 1))), 'pd': ('▁' * width), 'pm': (('╭' + ('∿' * (width - 2))) + '╮'), 'pM': (('╭' + ('∿' * (width - 2))) + '╯'), 'pn': ((('▁' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), 'p=': ('╱' + data), 'n0': ('╲' + ('▁' * (width - 1))), 'np': ((('▔' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), 'nx': ('╲' + ('╳' * (width - 1))), 'nu': ('▔' * width), 'nm': (('╰' + ('∿' * (width - 2))) + '╮'), 'nM': (('╰' + ('∿' * (width - 2))) + '╯'), 'n=': ('╲' + data), 'mn': ((('▁' + ('▁' * (half_width - 1))) + '╱') + ('▔' * ((width - half_width) - 1))), 'Mp': ((('▔' + ('▔' * (half_width - 1))) + '╲') + ('▁' * ((width - half_width) - 1))), '=0': ('╲' + ('▁' * (width - 1))), '=z': ('⧽' + ('─' * (width - 1))), '=x': ('╳' * width), '=1': ('╱' + ('▔' * (width - 1))), '==': ('╳' + data)} text = sequences.get(sequence, sequences.get(sequence[0:2], sequences.get((' ' + sequence[1:]), sequences.get(cur.symbol, )))) if (cur.symbol == '='): text = ((text[0] + '\x1b[47m\x1b[30m') + text[1:]) if (not is_last): text += '\x1b[49m\x1b[39m' return (error_width, text)<|docstring|>Draw the symbol of a given Brick element<|endoftext|>
a430b2369d0a5a76a81ff4fb973fbebcf07751ff86f36c76820b2cff4fcb24b0
def wavelane(self, name: str, wavelane: str, **kwargs) -> str: '\n Draw the internal Dict[str, Any] representing a waveform inside a waveform group.\n\n the internal Dict[str, Any] is expected to have at least the following two keys:\n\n - name : name of the waveform\n - wavelane : string which describes the waveform\n\n Args:\n name (str): name of the waveform\n wavelane (str): string of symbols describing the waveform\n extra (str): extra information given to self.group()\n y (float): global y position of the wavelane in the drawing context\n ' offsetx = kwargs.get('offsetx', 0) depth = kwargs.get('depth', 0) eol = kwargs.get('eol', '\n') hier_spaces = (' ' * max((depth - 1), 0)) spaces = (' ' * max((((offsetx - len(name)) - len(hier_spaces)) + 1), 1)) print(f'{hier_spaces}{name}{spaces}', end='') _wavelane = self._reduce_wavelane(name, wavelane, [], **kwargs) for (i, w) in enumerate(_wavelane): if (w.symbol in '=23456789'): _wavelane[i].symbol = '=' if (w.symbol in '0lL'): _wavelane[i].symbol = '0' if (w.symbol in '1hH'): _wavelane[i].symbol = '1' if (w.symbol in 'nN'): _wavelane[i].symbol = 'n' if (w.symbol in 'pP'): _wavelane[i].symbol = 'p' def previous_and_next(some_iterable): (prevs, items, nexts) = tee(some_iterable, 3) prevs = chain([BrickFactory.create(' ')], prevs) nexts = chain(islice(nexts, 1, None), [BrickFactory.create(' ')]) return zip(prevs, items, nexts) wave = [] width_error = 0.0 for (prv, cur, nxt) in previous_and_next(_wavelane): cur.width += width_error (width_error, text) = self.brick(prv, cur, nxt, is_last=(nxt.symbol == ' '), **kwargs) wave.append(text) wave = ''.join(wave) nb_ctrl = sum(((5 if (c == '\x1b') else 0) for c in wave)) if ((len(wave) - nb_ctrl) > ((self.width - offsetx) - 1)): print(wave[:(((self.width + nb_ctrl) - offsetx) - 2)], end=f'⋯{eol}') else: print(wave, end=f'{eol}')
Draw the internal Dict[str, Any] representing a waveform inside a waveform group. the internal Dict[str, Any] is expected to have at least the following two keys: - name : name of the waveform - wavelane : string which describes the waveform Args: name (str): name of the waveform wavelane (str): string of symbols describing the waveform extra (str): extra information given to self.group() y (float): global y position of the wavelane in the drawing context
undulate/renderers/termrenderer.py
wavelane
LudwigCRON/WavedromAnnotation
27
python
def wavelane(self, name: str, wavelane: str, **kwargs) -> str: '\n Draw the internal Dict[str, Any] representing a waveform inside a waveform group.\n\n the internal Dict[str, Any] is expected to have at least the following two keys:\n\n - name : name of the waveform\n - wavelane : string which describes the waveform\n\n Args:\n name (str): name of the waveform\n wavelane (str): string of symbols describing the waveform\n extra (str): extra information given to self.group()\n y (float): global y position of the wavelane in the drawing context\n ' offsetx = kwargs.get('offsetx', 0) depth = kwargs.get('depth', 0) eol = kwargs.get('eol', '\n') hier_spaces = (' ' * max((depth - 1), 0)) spaces = (' ' * max((((offsetx - len(name)) - len(hier_spaces)) + 1), 1)) print(f'{hier_spaces}{name}{spaces}', end=) _wavelane = self._reduce_wavelane(name, wavelane, [], **kwargs) for (i, w) in enumerate(_wavelane): if (w.symbol in '=23456789'): _wavelane[i].symbol = '=' if (w.symbol in '0lL'): _wavelane[i].symbol = '0' if (w.symbol in '1hH'): _wavelane[i].symbol = '1' if (w.symbol in 'nN'): _wavelane[i].symbol = 'n' if (w.symbol in 'pP'): _wavelane[i].symbol = 'p' def previous_and_next(some_iterable): (prevs, items, nexts) = tee(some_iterable, 3) prevs = chain([BrickFactory.create(' ')], prevs) nexts = chain(islice(nexts, 1, None), [BrickFactory.create(' ')]) return zip(prevs, items, nexts) wave = [] width_error = 0.0 for (prv, cur, nxt) in previous_and_next(_wavelane): cur.width += width_error (width_error, text) = self.brick(prv, cur, nxt, is_last=(nxt.symbol == ' '), **kwargs) wave.append(text) wave = .join(wave) nb_ctrl = sum(((5 if (c == '\x1b') else 0) for c in wave)) if ((len(wave) - nb_ctrl) > ((self.width - offsetx) - 1)): print(wave[:(((self.width + nb_ctrl) - offsetx) - 2)], end=f'⋯{eol}') else: print(wave, end=f'{eol}')
def wavelane(self, name: str, wavelane: str, **kwargs) -> str: '\n Draw the internal Dict[str, Any] representing a waveform inside a waveform group.\n\n the internal Dict[str, Any] is expected to have at least the following two keys:\n\n - name : name of the waveform\n - wavelane : string which describes the waveform\n\n Args:\n name (str): name of the waveform\n wavelane (str): string of symbols describing the waveform\n extra (str): extra information given to self.group()\n y (float): global y position of the wavelane in the drawing context\n ' offsetx = kwargs.get('offsetx', 0) depth = kwargs.get('depth', 0) eol = kwargs.get('eol', '\n') hier_spaces = (' ' * max((depth - 1), 0)) spaces = (' ' * max((((offsetx - len(name)) - len(hier_spaces)) + 1), 1)) print(f'{hier_spaces}{name}{spaces}', end=) _wavelane = self._reduce_wavelane(name, wavelane, [], **kwargs) for (i, w) in enumerate(_wavelane): if (w.symbol in '=23456789'): _wavelane[i].symbol = '=' if (w.symbol in '0lL'): _wavelane[i].symbol = '0' if (w.symbol in '1hH'): _wavelane[i].symbol = '1' if (w.symbol in 'nN'): _wavelane[i].symbol = 'n' if (w.symbol in 'pP'): _wavelane[i].symbol = 'p' def previous_and_next(some_iterable): (prevs, items, nexts) = tee(some_iterable, 3) prevs = chain([BrickFactory.create(' ')], prevs) nexts = chain(islice(nexts, 1, None), [BrickFactory.create(' ')]) return zip(prevs, items, nexts) wave = [] width_error = 0.0 for (prv, cur, nxt) in previous_and_next(_wavelane): cur.width += width_error (width_error, text) = self.brick(prv, cur, nxt, is_last=(nxt.symbol == ' '), **kwargs) wave.append(text) wave = .join(wave) nb_ctrl = sum(((5 if (c == '\x1b') else 0) for c in wave)) if ((len(wave) - nb_ctrl) > ((self.width - offsetx) - 1)): print(wave[:(((self.width + nb_ctrl) - offsetx) - 2)], end=f'⋯{eol}') else: print(wave, end=f'{eol}')<|docstring|>Draw the internal Dict[str, Any] representing a waveform inside a waveform group. the internal Dict[str, Any] is expected to have at least the following two keys: - name : name of the waveform - wavelane : string which describes the waveform Args: name (str): name of the waveform wavelane (str): string of symbols describing the waveform extra (str): extra information given to self.group() y (float): global y position of the wavelane in the drawing context<|endoftext|>
d180f1e85d8f966275e316883cc0c3578fd3ed5b86841744ccee7a4080ea3cf6
def wavegroup(self, name: str, wavelanes, depth: int=1, **kwargs) -> str: '\n Draw a group of waveforms\n\n Args:\n name (str) : name of the waveform group\n wavelanes (Dict[str, dict]): named waveforms composing the group\n depth (int) : depth of nested groups to represent hierarchy\n Parameters:\n config (Dict[str, Any]): config section of the input file\n brick_width (float): width of a brick, default is 20.0\n brick_height (float): height of a brick, default is 20.0\n width (float): image width\n height (float): image height\n ' if name.strip(): hier_spaces = (' ' * max((depth - 1), 0)) print(f'{hier_spaces}{name}:') for (wavename, wavelane) in wavelanes.items(): if ('wave' in wavelane): wavelane.update(**kwargs) wavelane['depth'] = (depth + 1) self.wavelane(wavename, wavelane.get('wave', []), **wavelane) else: self.wavegroup(wavename, wavelane, depth=(depth + 1), **kwargs)
Draw a group of waveforms Args: name (str) : name of the waveform group wavelanes (Dict[str, dict]): named waveforms composing the group depth (int) : depth of nested groups to represent hierarchy Parameters: config (Dict[str, Any]): config section of the input file brick_width (float): width of a brick, default is 20.0 brick_height (float): height of a brick, default is 20.0 width (float): image width height (float): image height
undulate/renderers/termrenderer.py
wavegroup
LudwigCRON/WavedromAnnotation
27
python
def wavegroup(self, name: str, wavelanes, depth: int=1, **kwargs) -> str: '\n Draw a group of waveforms\n\n Args:\n name (str) : name of the waveform group\n wavelanes (Dict[str, dict]): named waveforms composing the group\n depth (int) : depth of nested groups to represent hierarchy\n Parameters:\n config (Dict[str, Any]): config section of the input file\n brick_width (float): width of a brick, default is 20.0\n brick_height (float): height of a brick, default is 20.0\n width (float): image width\n height (float): image height\n ' if name.strip(): hier_spaces = (' ' * max((depth - 1), 0)) print(f'{hier_spaces}{name}:') for (wavename, wavelane) in wavelanes.items(): if ('wave' in wavelane): wavelane.update(**kwargs) wavelane['depth'] = (depth + 1) self.wavelane(wavename, wavelane.get('wave', []), **wavelane) else: self.wavegroup(wavename, wavelane, depth=(depth + 1), **kwargs)
def wavegroup(self, name: str, wavelanes, depth: int=1, **kwargs) -> str: '\n Draw a group of waveforms\n\n Args:\n name (str) : name of the waveform group\n wavelanes (Dict[str, dict]): named waveforms composing the group\n depth (int) : depth of nested groups to represent hierarchy\n Parameters:\n config (Dict[str, Any]): config section of the input file\n brick_width (float): width of a brick, default is 20.0\n brick_height (float): height of a brick, default is 20.0\n width (float): image width\n height (float): image height\n ' if name.strip(): hier_spaces = (' ' * max((depth - 1), 0)) print(f'{hier_spaces}{name}:') for (wavename, wavelane) in wavelanes.items(): if ('wave' in wavelane): wavelane.update(**kwargs) wavelane['depth'] = (depth + 1) self.wavelane(wavename, wavelane.get('wave', []), **wavelane) else: self.wavegroup(wavename, wavelane, depth=(depth + 1), **kwargs)<|docstring|>Draw a group of waveforms Args: name (str) : name of the waveform group wavelanes (Dict[str, dict]): named waveforms composing the group depth (int) : depth of nested groups to represent hierarchy Parameters: config (Dict[str, Any]): config section of the input file brick_width (float): width of a brick, default is 20.0 brick_height (float): height of a brick, default is 20.0 width (float): image width height (float): image height<|endoftext|>
a0d5427d29b5b2db9216dfd50b1b45ddba3e2dc97541335fa16447909b9b5868
def draw(self, wavelanes: dict, **kwargs) -> str: '\n Business function calling all others\n\n Args:\n wavelanes (dict): parsed dictionary from the input file\n filename (str) : file name of the output generated file\n brick_width (int): by default 40\n brick_height (int): by default 20\n is_reg (bool):\n if True `wavelanes` given represents a register\n otherwise it represents a bunch of signals\n ' _id = kwargs.get('id', '') brick_width = kwargs.get('brick_width', 40) brick_height = kwargs.get('brick_height', 20) eol = kwargs.get('eol', '\n') wavelanes.pop('annotations', None) wavelanes.pop('edges', None) wavelanes.pop('edge', None) wavelanes.pop('config', None) (lkeys, width, height, n) = self.size(wavelanes, **kwargs) self.draw_width = width self.offsetx = int((lkeys + (self.depth(wavelanes) * 1.75))) self.wavegroup(_id, wavelanes, brick_width=brick_width, brick_height=brick_height, width=width, height=height, eol=eol, offsetx=self.offsetx)
Business function calling all others Args: wavelanes (dict): parsed dictionary from the input file filename (str) : file name of the output generated file brick_width (int): by default 40 brick_height (int): by default 20 is_reg (bool): if True `wavelanes` given represents a register otherwise it represents a bunch of signals
undulate/renderers/termrenderer.py
draw
LudwigCRON/WavedromAnnotation
27
python
def draw(self, wavelanes: dict, **kwargs) -> str: '\n Business function calling all others\n\n Args:\n wavelanes (dict): parsed dictionary from the input file\n filename (str) : file name of the output generated file\n brick_width (int): by default 40\n brick_height (int): by default 20\n is_reg (bool):\n if True `wavelanes` given represents a register\n otherwise it represents a bunch of signals\n ' _id = kwargs.get('id', ) brick_width = kwargs.get('brick_width', 40) brick_height = kwargs.get('brick_height', 20) eol = kwargs.get('eol', '\n') wavelanes.pop('annotations', None) wavelanes.pop('edges', None) wavelanes.pop('edge', None) wavelanes.pop('config', None) (lkeys, width, height, n) = self.size(wavelanes, **kwargs) self.draw_width = width self.offsetx = int((lkeys + (self.depth(wavelanes) * 1.75))) self.wavegroup(_id, wavelanes, brick_width=brick_width, brick_height=brick_height, width=width, height=height, eol=eol, offsetx=self.offsetx)
def draw(self, wavelanes: dict, **kwargs) -> str: '\n Business function calling all others\n\n Args:\n wavelanes (dict): parsed dictionary from the input file\n filename (str) : file name of the output generated file\n brick_width (int): by default 40\n brick_height (int): by default 20\n is_reg (bool):\n if True `wavelanes` given represents a register\n otherwise it represents a bunch of signals\n ' _id = kwargs.get('id', ) brick_width = kwargs.get('brick_width', 40) brick_height = kwargs.get('brick_height', 20) eol = kwargs.get('eol', '\n') wavelanes.pop('annotations', None) wavelanes.pop('edges', None) wavelanes.pop('edge', None) wavelanes.pop('config', None) (lkeys, width, height, n) = self.size(wavelanes, **kwargs) self.draw_width = width self.offsetx = int((lkeys + (self.depth(wavelanes) * 1.75))) self.wavegroup(_id, wavelanes, brick_width=brick_width, brick_height=brick_height, width=width, height=height, eol=eol, offsetx=self.offsetx)<|docstring|>Business function calling all others Args: wavelanes (dict): parsed dictionary from the input file filename (str) : file name of the output generated file brick_width (int): by default 40 brick_height (int): by default 20 is_reg (bool): if True `wavelanes` given represents a register otherwise it represents a bunch of signals<|endoftext|>
61f2d1d22b0fa107b3e7793e0a6c343b850320222f7bfe0358a374f5d05da64d
def do_back(self, args): 'Exit the module' return True
Exit the module
common/modules.py
do_back
grimlyreaper/CICADA
4
python
def do_back(self, args): return True
def do_back(self, args): return True<|docstring|>Exit the module<|endoftext|>
95a13874902e4ca18028ccf4fe32d9df7edf625ffdc50d90ce05298f1d2938fe
def do_exit(self, args): 'Exit the module' return True
Exit the module
common/modules.py
do_exit
grimlyreaper/CICADA
4
python
def do_exit(self, args): return True
def do_exit(self, args): return True<|docstring|>Exit the module<|endoftext|>
da27d2b5013080cb8db08d84f8f329c34b88073c3f0584625108d6ba1215c038
def do_info(self, args): 'Give info about module' self.module.info()
Give info about module
common/modules.py
do_info
grimlyreaper/CICADA
4
python
def do_info(self, args): self.module.info()
def do_info(self, args): self.module.info()<|docstring|>Give info about module<|endoftext|>
9de96981d4b70b499c4beb5a5026a15fec7c1649c5ccc6ea4fc64ad9e98449bb
def do_targets(self, args): 'Lists of your targets' return True
Lists of your targets
common/modules.py
do_targets
grimlyreaper/CICADA
4
python
def do_targets(self, args): return True
def do_targets(self, args): return True<|docstring|>Lists of your targets<|endoftext|>
24248edd61c42325caeed2bac61bd3a9460a47f158833e75d90a3b9f3549f7f1
def do_exploit(self, args): 'Run the exploit' if (self.module.type == 'gitlab'): self.gitlabAttack() else: self.githubAttack()
Run the exploit
common/modules.py
do_exploit
grimlyreaper/CICADA
4
python
def do_exploit(self, args): if (self.module.type == 'gitlab'): self.gitlabAttack() else: self.githubAttack()
def do_exploit(self, args): if (self.module.type == 'gitlab'): self.gitlabAttack() else: self.githubAttack()<|docstring|>Run the exploit<|endoftext|>
dd0e4f3d91b1ba45217d8d2e92f8ae8fceac56703103ebd080dde28505f2c191
def do_run(self, args): 'Run the exploit' if (self.module.type == 'gitlab'): self.gitlabAttack() else: self.githubAttack()
Run the exploit
common/modules.py
do_run
grimlyreaper/CICADA
4
python
def do_run(self, args): if (self.module.type == 'gitlab'): self.gitlabAttack() else: self.githubAttack()
def do_run(self, args): if (self.module.type == 'gitlab'): self.gitlabAttack() else: self.githubAttack()<|docstring|>Run the exploit<|endoftext|>
fd7666d3b4c8a68121345574201d1a2b2f98eb070376b5740ef0597280e41f84
def close_websocket(self): '\n Close the websocket and invalidate this object.\n ' self.loop.stop() self.loop = None
Close the websocket and invalidate this object.
mattermost/ws.py
close_websocket
someone-somenet-org/mm-python-api
0
python
def close_websocket(self): '\n \n ' self.loop.stop() self.loop = None
def close_websocket(self): '\n \n ' self.loop.stop() self.loop = None<|docstring|>Close the websocket and invalidate this object.<|endoftext|>
e5ae42afe2bef6a827157015e3cc0e7d817b9494ea85b644ed8e30d333e314d4
def test_valid(self): '\n Testing if validly types are properly detected.\n ' print('Testing valid primitive types...') self.assertTrue(ISerializable._is_type_valid(int, int)) self.assertTrue(ISerializable._is_type_valid(str, str)) self.assertTrue(ISerializable._is_type_valid(float, float)) self.assertTrue(ISerializable._is_type_valid(bool, bool)) print('Testing valid list, dict, tuple, set...') self.assertTrue(ISerializable._is_type_valid(list, list)) self.assertTrue(ISerializable._is_type_valid(dict, dict)) self.assertTrue(ISerializable._is_type_valid(tuple, tuple)) self.assertTrue(ISerializable._is_type_valid(set, set)) print('Testing valid composed list, dict, tuple, set...') self.assertTrue(ISerializable._is_type_valid(list[(str, int)], type(['abc']))) self.assertTrue(ISerializable._is_type_valid(dict[(str, int)], type({'text': 'test', 'number': 123}))) self.assertTrue(ISerializable._is_type_valid(tuple[(str, int)], type(('abc',)))) print('Testing valid individual types in list...') self.assertTrue(ISerializable._is_type_valid([str, int], int, process_listed_types=True)) print('Testing valid serializable class...') self.assertTrue(ISerializable._is_type_valid(TestedValidClass, dict)) print('Testing valid typing special types...') self.assertTrue(ISerializable._is_type_valid(Union[(str, int)], int)) self.assertTrue(ISerializable._is_type_valid(Optional[str], str)) self.assertTrue(ISerializable._is_type_valid(Optional[str], None)) self.assertTrue(ISerializable._is_type_valid(None, None)) print("Testing validity with 'Any'...") for tested_type in [int, str, float, bool, [str, int]]: self.assertTrue(ISerializable._is_type_valid(Any, tested_type, process_listed_types=True)) print("Testing the absence of 'TypeError' with 'Any' and list of individual types...") self.assertTrue(ISerializable._is_type_valid(Any, int, process_listed_types=False))
Testing if validly types are properly detected.
tests/test_is_type_valid.py
test_valid
aziascreations/mooss-serialize
0
python
def test_valid(self): '\n \n ' print('Testing valid primitive types...') self.assertTrue(ISerializable._is_type_valid(int, int)) self.assertTrue(ISerializable._is_type_valid(str, str)) self.assertTrue(ISerializable._is_type_valid(float, float)) self.assertTrue(ISerializable._is_type_valid(bool, bool)) print('Testing valid list, dict, tuple, set...') self.assertTrue(ISerializable._is_type_valid(list, list)) self.assertTrue(ISerializable._is_type_valid(dict, dict)) self.assertTrue(ISerializable._is_type_valid(tuple, tuple)) self.assertTrue(ISerializable._is_type_valid(set, set)) print('Testing valid composed list, dict, tuple, set...') self.assertTrue(ISerializable._is_type_valid(list[(str, int)], type(['abc']))) self.assertTrue(ISerializable._is_type_valid(dict[(str, int)], type({'text': 'test', 'number': 123}))) self.assertTrue(ISerializable._is_type_valid(tuple[(str, int)], type(('abc',)))) print('Testing valid individual types in list...') self.assertTrue(ISerializable._is_type_valid([str, int], int, process_listed_types=True)) print('Testing valid serializable class...') self.assertTrue(ISerializable._is_type_valid(TestedValidClass, dict)) print('Testing valid typing special types...') self.assertTrue(ISerializable._is_type_valid(Union[(str, int)], int)) self.assertTrue(ISerializable._is_type_valid(Optional[str], str)) self.assertTrue(ISerializable._is_type_valid(Optional[str], None)) self.assertTrue(ISerializable._is_type_valid(None, None)) print("Testing validity with 'Any'...") for tested_type in [int, str, float, bool, [str, int]]: self.assertTrue(ISerializable._is_type_valid(Any, tested_type, process_listed_types=True)) print("Testing the absence of 'TypeError' with 'Any' and list of individual types...") self.assertTrue(ISerializable._is_type_valid(Any, int, process_listed_types=False))
def test_valid(self): '\n \n ' print('Testing valid primitive types...') self.assertTrue(ISerializable._is_type_valid(int, int)) self.assertTrue(ISerializable._is_type_valid(str, str)) self.assertTrue(ISerializable._is_type_valid(float, float)) self.assertTrue(ISerializable._is_type_valid(bool, bool)) print('Testing valid list, dict, tuple, set...') self.assertTrue(ISerializable._is_type_valid(list, list)) self.assertTrue(ISerializable._is_type_valid(dict, dict)) self.assertTrue(ISerializable._is_type_valid(tuple, tuple)) self.assertTrue(ISerializable._is_type_valid(set, set)) print('Testing valid composed list, dict, tuple, set...') self.assertTrue(ISerializable._is_type_valid(list[(str, int)], type(['abc']))) self.assertTrue(ISerializable._is_type_valid(dict[(str, int)], type({'text': 'test', 'number': 123}))) self.assertTrue(ISerializable._is_type_valid(tuple[(str, int)], type(('abc',)))) print('Testing valid individual types in list...') self.assertTrue(ISerializable._is_type_valid([str, int], int, process_listed_types=True)) print('Testing valid serializable class...') self.assertTrue(ISerializable._is_type_valid(TestedValidClass, dict)) print('Testing valid typing special types...') self.assertTrue(ISerializable._is_type_valid(Union[(str, int)], int)) self.assertTrue(ISerializable._is_type_valid(Optional[str], str)) self.assertTrue(ISerializable._is_type_valid(Optional[str], None)) self.assertTrue(ISerializable._is_type_valid(None, None)) print("Testing validity with 'Any'...") for tested_type in [int, str, float, bool, [str, int]]: self.assertTrue(ISerializable._is_type_valid(Any, tested_type, process_listed_types=True)) print("Testing the absence of 'TypeError' with 'Any' and list of individual types...") self.assertTrue(ISerializable._is_type_valid(Any, int, process_listed_types=False))<|docstring|>Testing if validly types are properly detected.<|endoftext|>
f5819d8bc0fc8505ea4922b5cee699a3f735658de1d4842edfe2f9160d186511
def test_invalid(self): '\n Testing if invalid types are properly detected.\n ' print('Testing invalid primitive types...') self.assertFalse(ISerializable._is_type_valid(int, float)) self.assertFalse(ISerializable._is_type_valid(str, int)) self.assertFalse(ISerializable._is_type_valid(float, bool)) self.assertFalse(ISerializable._is_type_valid(bool, str)) self.assertFalse(ISerializable._is_type_valid(bool, None)) self.assertFalse(ISerializable._is_type_valid(None, bool)) print('Testing invalid list, dict, tuple, set...') self.assertFalse(ISerializable._is_type_valid(list, set)) self.assertFalse(ISerializable._is_type_valid(dict, list)) self.assertFalse(ISerializable._is_type_valid(tuple, dict)) self.assertFalse(ISerializable._is_type_valid(set, tuple)) self.assertFalse(ISerializable._is_type_valid(list, None)) self.assertFalse(ISerializable._is_type_valid(None, list)) print('Testing invalid serializable class...') self.assertFalse(ISerializable._is_type_valid(TestedInvalidClass, int)) self.assertFalse(ISerializable._is_type_valid(TestedInvalidClass, None)) self.assertFalse(ISerializable._is_type_valid(None, TestedInvalidClass)) print('Testing invalid typing special types...') self.assertFalse(ISerializable._is_type_valid(Union[(str, int)], bool)) self.assertFalse(ISerializable._is_type_valid(Optional[str], float)) print('Testing invalid individual types in list...') self.assertRaises(TypeError, (lambda : ISerializable._is_type_valid([str, int], int, process_listed_types=False))) self.assertFalse(ISerializable._is_type_valid([str, int], bool, process_listed_types=True))
Testing if invalid types are properly detected.
tests/test_is_type_valid.py
test_invalid
aziascreations/mooss-serialize
0
python
def test_invalid(self): '\n \n ' print('Testing invalid primitive types...') self.assertFalse(ISerializable._is_type_valid(int, float)) self.assertFalse(ISerializable._is_type_valid(str, int)) self.assertFalse(ISerializable._is_type_valid(float, bool)) self.assertFalse(ISerializable._is_type_valid(bool, str)) self.assertFalse(ISerializable._is_type_valid(bool, None)) self.assertFalse(ISerializable._is_type_valid(None, bool)) print('Testing invalid list, dict, tuple, set...') self.assertFalse(ISerializable._is_type_valid(list, set)) self.assertFalse(ISerializable._is_type_valid(dict, list)) self.assertFalse(ISerializable._is_type_valid(tuple, dict)) self.assertFalse(ISerializable._is_type_valid(set, tuple)) self.assertFalse(ISerializable._is_type_valid(list, None)) self.assertFalse(ISerializable._is_type_valid(None, list)) print('Testing invalid serializable class...') self.assertFalse(ISerializable._is_type_valid(TestedInvalidClass, int)) self.assertFalse(ISerializable._is_type_valid(TestedInvalidClass, None)) self.assertFalse(ISerializable._is_type_valid(None, TestedInvalidClass)) print('Testing invalid typing special types...') self.assertFalse(ISerializable._is_type_valid(Union[(str, int)], bool)) self.assertFalse(ISerializable._is_type_valid(Optional[str], float)) print('Testing invalid individual types in list...') self.assertRaises(TypeError, (lambda : ISerializable._is_type_valid([str, int], int, process_listed_types=False))) self.assertFalse(ISerializable._is_type_valid([str, int], bool, process_listed_types=True))
def test_invalid(self): '\n \n ' print('Testing invalid primitive types...') self.assertFalse(ISerializable._is_type_valid(int, float)) self.assertFalse(ISerializable._is_type_valid(str, int)) self.assertFalse(ISerializable._is_type_valid(float, bool)) self.assertFalse(ISerializable._is_type_valid(bool, str)) self.assertFalse(ISerializable._is_type_valid(bool, None)) self.assertFalse(ISerializable._is_type_valid(None, bool)) print('Testing invalid list, dict, tuple, set...') self.assertFalse(ISerializable._is_type_valid(list, set)) self.assertFalse(ISerializable._is_type_valid(dict, list)) self.assertFalse(ISerializable._is_type_valid(tuple, dict)) self.assertFalse(ISerializable._is_type_valid(set, tuple)) self.assertFalse(ISerializable._is_type_valid(list, None)) self.assertFalse(ISerializable._is_type_valid(None, list)) print('Testing invalid serializable class...') self.assertFalse(ISerializable._is_type_valid(TestedInvalidClass, int)) self.assertFalse(ISerializable._is_type_valid(TestedInvalidClass, None)) self.assertFalse(ISerializable._is_type_valid(None, TestedInvalidClass)) print('Testing invalid typing special types...') self.assertFalse(ISerializable._is_type_valid(Union[(str, int)], bool)) self.assertFalse(ISerializable._is_type_valid(Optional[str], float)) print('Testing invalid individual types in list...') self.assertRaises(TypeError, (lambda : ISerializable._is_type_valid([str, int], int, process_listed_types=False))) self.assertFalse(ISerializable._is_type_valid([str, int], bool, process_listed_types=True))<|docstring|>Testing if invalid types are properly detected.<|endoftext|>
dfb1143d41c9bf290a81d855a3965970c01445650ce36448e88515ffde989c3e
def get_color(v, part='fill'): "\n In most svg renderers, if a color is unset, it will be rendered as black. This is\n different from the fill being none, or transparent.\n :param v: the dictionary of attributes from the xml tag\n :param part: the attribute that we're looking for.\n :return: a three item tuple\n " if (not isinstance(v, dict)): return [0, 0, 0] if ('style' not in v): if (part in v): if (isinstance(v[part], (list, tuple)) and (len(v[part]) == 3)): return v[part] if (v[part] in css2_names_to_hex): return hex_to_rgb(css2_names_to_hex[v[part]]) elif (v[part][0] == '#'): return hex_to_rgb(v[part]) elif (v[part] == 'none'): return None else: return None else: return None if (v['style'].find((part + ':')) >= 0): color = v['style'].split((part + ':'))[1].split(';')[0] if (color[0] == '#'): return hex_to_rgb(color) elif (color == 'none'): return None else: print(('not sure what to do with color: %s' % color)) return None else: return None
In most svg renderers, if a color is unset, it will be rendered as black. This is different from the fill being none, or transparent. :param v: the dictionary of attributes from the xml tag :param part: the attribute that we're looking for. :return: a three item tuple
svgutils.py
get_color
CatherineH/python-embroidery
21
python
def get_color(v, part='fill'): "\n In most svg renderers, if a color is unset, it will be rendered as black. This is\n different from the fill being none, or transparent.\n :param v: the dictionary of attributes from the xml tag\n :param part: the attribute that we're looking for.\n :return: a three item tuple\n " if (not isinstance(v, dict)): return [0, 0, 0] if ('style' not in v): if (part in v): if (isinstance(v[part], (list, tuple)) and (len(v[part]) == 3)): return v[part] if (v[part] in css2_names_to_hex): return hex_to_rgb(css2_names_to_hex[v[part]]) elif (v[part][0] == '#'): return hex_to_rgb(v[part]) elif (v[part] == 'none'): return None else: return None else: return None if (v['style'].find((part + ':')) >= 0): color = v['style'].split((part + ':'))[1].split(';')[0] if (color[0] == '#'): return hex_to_rgb(color) elif (color == 'none'): return None else: print(('not sure what to do with color: %s' % color)) return None else: return None
def get_color(v, part='fill'): "\n In most svg renderers, if a color is unset, it will be rendered as black. This is\n different from the fill being none, or transparent.\n :param v: the dictionary of attributes from the xml tag\n :param part: the attribute that we're looking for.\n :return: a three item tuple\n " if (not isinstance(v, dict)): return [0, 0, 0] if ('style' not in v): if (part in v): if (isinstance(v[part], (list, tuple)) and (len(v[part]) == 3)): return v[part] if (v[part] in css2_names_to_hex): return hex_to_rgb(css2_names_to_hex[v[part]]) elif (v[part][0] == '#'): return hex_to_rgb(v[part]) elif (v[part] == 'none'): return None else: return None else: return None if (v['style'].find((part + ':')) >= 0): color = v['style'].split((part + ':'))[1].split(';')[0] if (color[0] == '#'): return hex_to_rgb(color) elif (color == 'none'): return None else: print(('not sure what to do with color: %s' % color)) return None else: return None<|docstring|>In most svg renderers, if a color is unset, it will be rendered as black. This is different from the fill being none, or transparent. :param v: the dictionary of attributes from the xml tag :param part: the attribute that we're looking for. :return: a three item tuple<|endoftext|>
99a181e0695381f897fdbaea5987d4127e2f6ed67a273c65b0518ac7c8867d83
def write_debug(partial, parts, override=False): '\n write a set of shapes to an output file.\n\n :param partial: the filename part, i.e., if partial is xxxx, then the filename will\n be gen_xxxx_timestamp.svg\n :param parts: a list of shapes lists, where the first element of each shape is the\n svgpathtoolshape, the second value is the fill, and the third value is the stroke color.\n :return: nothing\n ' if ((not override) and (not DEBUG)): return debug_fh = open(gen_filename(partial), 'w') debug_dwg = svgwrite.Drawing(debug_fh, profile='tiny') for shape in parts: params = {} if (len(shape) > 2): shape[2] = get_color(shape[2]) if (shape[2] is not None): params['stroke'] = rgb_to_hex(shape[2]) if (len(shape) > 1): shape[1] = get_color(shape[1]) if (shape[1] is not None): params['fill'] = rgb_to_hex(shape[1]) if isinstance(shape[0], Path): debug_dwg.add(debug_dwg.path(d=shape[0].d(), **params)) elif isinstance(shape[0], Line): debug_dwg.add(debug_dwg.line(start=(shape[0].start.real, shape[0].start.imag), end=(shape[0].end.real, shape[0].end.imag), **params)) elif isinstance(shape[0], svgwrite.shapes.Rect): debug_dwg.add(shape[0]) elif isinstance(shape[0], svgwrite.shapes.Circle): debug_dwg.add(shape[0]) elif isinstance(shape[0], Text): debug_dwg.add(shape[0]) else: print("can't put shape", shape[0], ' in debug file') debug_dwg.write(debug_dwg.filename, pretty=False) debug_fh.close()
write a set of shapes to an output file. :param partial: the filename part, i.e., if partial is xxxx, then the filename will be gen_xxxx_timestamp.svg :param parts: a list of shapes lists, where the first element of each shape is the svgpathtoolshape, the second value is the fill, and the third value is the stroke color. :return: nothing
svgutils.py
write_debug
CatherineH/python-embroidery
21
python
def write_debug(partial, parts, override=False): '\n write a set of shapes to an output file.\n\n :param partial: the filename part, i.e., if partial is xxxx, then the filename will\n be gen_xxxx_timestamp.svg\n :param parts: a list of shapes lists, where the first element of each shape is the\n svgpathtoolshape, the second value is the fill, and the third value is the stroke color.\n :return: nothing\n ' if ((not override) and (not DEBUG)): return debug_fh = open(gen_filename(partial), 'w') debug_dwg = svgwrite.Drawing(debug_fh, profile='tiny') for shape in parts: params = {} if (len(shape) > 2): shape[2] = get_color(shape[2]) if (shape[2] is not None): params['stroke'] = rgb_to_hex(shape[2]) if (len(shape) > 1): shape[1] = get_color(shape[1]) if (shape[1] is not None): params['fill'] = rgb_to_hex(shape[1]) if isinstance(shape[0], Path): debug_dwg.add(debug_dwg.path(d=shape[0].d(), **params)) elif isinstance(shape[0], Line): debug_dwg.add(debug_dwg.line(start=(shape[0].start.real, shape[0].start.imag), end=(shape[0].end.real, shape[0].end.imag), **params)) elif isinstance(shape[0], svgwrite.shapes.Rect): debug_dwg.add(shape[0]) elif isinstance(shape[0], svgwrite.shapes.Circle): debug_dwg.add(shape[0]) elif isinstance(shape[0], Text): debug_dwg.add(shape[0]) else: print("can't put shape", shape[0], ' in debug file') debug_dwg.write(debug_dwg.filename, pretty=False) debug_fh.close()
def write_debug(partial, parts, override=False): '\n write a set of shapes to an output file.\n\n :param partial: the filename part, i.e., if partial is xxxx, then the filename will\n be gen_xxxx_timestamp.svg\n :param parts: a list of shapes lists, where the first element of each shape is the\n svgpathtoolshape, the second value is the fill, and the third value is the stroke color.\n :return: nothing\n ' if ((not override) and (not DEBUG)): return debug_fh = open(gen_filename(partial), 'w') debug_dwg = svgwrite.Drawing(debug_fh, profile='tiny') for shape in parts: params = {} if (len(shape) > 2): shape[2] = get_color(shape[2]) if (shape[2] is not None): params['stroke'] = rgb_to_hex(shape[2]) if (len(shape) > 1): shape[1] = get_color(shape[1]) if (shape[1] is not None): params['fill'] = rgb_to_hex(shape[1]) if isinstance(shape[0], Path): debug_dwg.add(debug_dwg.path(d=shape[0].d(), **params)) elif isinstance(shape[0], Line): debug_dwg.add(debug_dwg.line(start=(shape[0].start.real, shape[0].start.imag), end=(shape[0].end.real, shape[0].end.imag), **params)) elif isinstance(shape[0], svgwrite.shapes.Rect): debug_dwg.add(shape[0]) elif isinstance(shape[0], svgwrite.shapes.Circle): debug_dwg.add(shape[0]) elif isinstance(shape[0], Text): debug_dwg.add(shape[0]) else: print("can't put shape", shape[0], ' in debug file') debug_dwg.write(debug_dwg.filename, pretty=False) debug_fh.close()<|docstring|>write a set of shapes to an output file. :param partial: the filename part, i.e., if partial is xxxx, then the filename will be gen_xxxx_timestamp.svg :param parts: a list of shapes lists, where the first element of each shape is the svgpathtoolshape, the second value is the fill, and the third value is the stroke color. :return: nothing<|endoftext|>
f307c6402310c8c1b1c67c2eccdc60de66767b0d48fcdcb4a347ce660e10dd18
@blueprint.route(MEMBERS_QUERY, methods=['GET']) async def _members(request: Request, id: int): 'Get all members with a role\n ' guild: Guild = (await request.app.config.BOT_INSTANCE.guild()) role: Role = get(guild.roles, id=id) return json([{'name': member.name, 'id': member.id, 'discriminator': member.discriminator} for member in role.members])
Get all members with a role
litebot/server/routes/members_route.py
_members
rybot666/LiteBot
22
python
@blueprint.route(MEMBERS_QUERY, methods=['GET']) async def _members(request: Request, id: int): '\n ' guild: Guild = (await request.app.config.BOT_INSTANCE.guild()) role: Role = get(guild.roles, id=id) return json([{'name': member.name, 'id': member.id, 'discriminator': member.discriminator} for member in role.members])
@blueprint.route(MEMBERS_QUERY, methods=['GET']) async def _members(request: Request, id: int): '\n ' guild: Guild = (await request.app.config.BOT_INSTANCE.guild()) role: Role = get(guild.roles, id=id) return json([{'name': member.name, 'id': member.id, 'discriminator': member.discriminator} for member in role.members])<|docstring|>Get all members with a role<|endoftext|>
3b7bbda83dfa2aa47b053716aad8060934443b57950e1baf3f3b41525633734b
@blueprint.route(IN_GUILD_QUERY, methods=['GET']) async def in_guild(request: Request, id: int): 'Check if a member is in the main server guild\n\n ' guild = (await request.app.config.BOT_INSTANCE.guild()) member: discord.Member = (await guild.fetch_member(id)) if (not member): return json({'error': 'No member found!'}) return json({'res': f'success! the user is in the guild {guild.name}'})
Check if a member is in the main server guild
litebot/server/routes/members_route.py
in_guild
rybot666/LiteBot
22
python
@blueprint.route(IN_GUILD_QUERY, methods=['GET']) async def in_guild(request: Request, id: int): '\n\n ' guild = (await request.app.config.BOT_INSTANCE.guild()) member: discord.Member = (await guild.fetch_member(id)) if (not member): return json({'error': 'No member found!'}) return json({'res': f'success! the user is in the guild {guild.name}'})
@blueprint.route(IN_GUILD_QUERY, methods=['GET']) async def in_guild(request: Request, id: int): '\n\n ' guild = (await request.app.config.BOT_INSTANCE.guild()) member: discord.Member = (await guild.fetch_member(id)) if (not member): return json({'error': 'No member found!'}) return json({'res': f'success! the user is in the guild {guild.name}'})<|docstring|>Check if a member is in the main server guild<|endoftext|>
905773b347dbc36e137671c4ddd1e3fabd1d3aff9683a2577c5a98a083810642
@blueprint.route(ROLES_QUERY, methods=['GET']) async def fetch_roles(request: Request, id: int): 'Fetch the roles for a member\n ' guild = (await request.app.config.BOT_INSTANCE.guild()) member: discord.Member = (await guild.fetch_member(id)) if (not member): return json({'error': 'No member found!'}) return json({'res': [_serialize_role(role) for role in member.roles]})
Fetch the roles for a member
litebot/server/routes/members_route.py
fetch_roles
rybot666/LiteBot
22
python
@blueprint.route(ROLES_QUERY, methods=['GET']) async def fetch_roles(request: Request, id: int): '\n ' guild = (await request.app.config.BOT_INSTANCE.guild()) member: discord.Member = (await guild.fetch_member(id)) if (not member): return json({'error': 'No member found!'}) return json({'res': [_serialize_role(role) for role in member.roles]})
@blueprint.route(ROLES_QUERY, methods=['GET']) async def fetch_roles(request: Request, id: int): '\n ' guild = (await request.app.config.BOT_INSTANCE.guild()) member: discord.Member = (await guild.fetch_member(id)) if (not member): return json({'error': 'No member found!'}) return json({'res': [_serialize_role(role) for role in member.roles]})<|docstring|>Fetch the roles for a member<|endoftext|>
3846f72bae69bd9d05851ae2a15176403c874a886f90fa08e7c8e6183b931d5d
def calc_misc_Lynx(self, model): "\n These aren't really calculating right now. Just using defaults or forced values.\n Specifically registers that are not handled in inherited CALC_Misc_panther\n\n Args:\n model (ModelRoot) : Data model to read and write variables from\n " self._reg_write(model.vars.FRC_DFLCTRL_DFLBOIOFFSET, 0) self._reg_write(model.vars.FRC_CTRL_LPMODEDIS, 1) self._reg_write(model.vars.FRC_CTRL_WAITEOFEN, 0) self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TRAILTXREPLEN, 0) self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TXSUPPLENOVERIDE, 0) self._reg_write(model.vars.FRC_WCNTCMP3_SUPPLENFIELDLOC, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIEN, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIFIELDLOC, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIBITPOS, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIMATCHVAL, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLMODE, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLBITORDER, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLSHIFT, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLOFFSET, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLBITS, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLMINLENGTH, 0) self._reg_write(model.vars.FRC_DSLCTRL_RXSUPRECEPMODE, 0) self._reg_write(model.vars.FRC_DSLCTRL_STORESUP, 0) self._reg_write(model.vars.FRC_DSLCTRL_SUPSHFFACTOR, 0) self._reg_write(model.vars.FRC_WCNTCMP4_SUPPLENGTH, 0) self._reg_write(model.vars.MODEM_DIGMIXCTRL_DIGMIXFB, 0) self._reg_write(model.vars.MODEM_VTTRACK_SYNCTIMEOUTSEL, 1) self._reg_write(model.vars.MODEM_LRFRC_LRCORRMODE, 1) self._reg_write(model.vars.MODEM_REALTIMCFE_MINCOSTTHD, 500) self._reg_write(model.vars.MODEM_REALTIMCFE_RTSCHWIN, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_RTSCHMODE, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_TRACKINGWIN, 7) self._reg_write(model.vars.MODEM_REALTIMCFE_SYNCACQWIN, 7) self._reg_write(model.vars.MODEM_REALTIMCFE_SINEWEN, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_VTAFCFRAME, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_RTCFEEN, 0) self._reg_write(model.vars.MODEM_ETSCTRL_ETSLOC, 0) self._reg_write(model.vars.MODEM_ETSCTRL_CAPTRIG, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTDFLTSEL, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTCOUNT, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWTYPE, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_CFGANTPATTEN, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWENABLE, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_EXTDSTOPPULSECNT, 30) self._reg_write(model.vars.MODEM_ANTSWSTART_ANTSWSTARTTIM, 0) self._reg_write(model.vars.MODEM_ANTSWEND_ANTSWENDTIM, 0) self._reg_write(model.vars.MODEM_TRECPMPATT_PMEXPECTPATT, 1431655765) self._reg_write(model.vars.MODEM_TRECPMDET_PMACQUINGWIN, 7) self._reg_write(model.vars.MODEM_TRECPMDET_PMCOSTVALTHD, 2) self._reg_write(model.vars.MODEM_TRECPMDET_PMTIMEOUTSEL, 0) self._reg_write(model.vars.MODEM_TRECPMDET_PHSCALE, 0) self._reg_write(model.vars.MODEM_TRECPMDET_PMMINCOSTTHD, 0) self._reg_write(model.vars.MODEM_TRECPMDET_VTPMDETSEL, 0) self._reg_write(model.vars.MODEM_TRECPMDET_COSTHYST, 0) self._reg_write(model.vars.MODEM_TRECPMDET_PREAMSCH, 0) self._reg_write(model.vars.MODEM_CFGANTPATT_CFGANTPATTVAL, 0) self._reg_write(model.vars.MODEM_ETSTIM_ETSTIMVAL, 0) self._reg_write(model.vars.MODEM_ETSTIM_ETSCOUNTEREN, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL1_TIMEPERIOD, 436906) self._reg_write(model.vars.MODEM_COCURRMODE_CONCURRENT, 0) self._reg_write(model.vars.MODEM_ANTDIVCTRL_ADPRETHRESH, 0) self._reg_write(model.vars.MODEM_ANTDIVCTRL_ENADPRETHRESH, 0) self._reg_write(model.vars.MODEM_BLEIQDSAEXT2_DISMAXPEAKTRACKMODE, 0) self._reg_write(model.vars.MODEM_BLEIQDSAEXT2_BBSSDEBOUNCETIM, 0) self._reg_write(model.vars.MODEM_BLEIQDSAEXT2_BBSSDIFFCHVAL, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVN, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVP, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENREG3, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENBYPASS40MHZ, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTREG3ADJV, 2) self._reg_write(model.vars.RAC_CLKMULTEN1_CLKMULTDRVAMPSEL, 0) self._reg_write(model.vars.RAC_LNAMIXDEBUG_LNAMIXDISMXR, 0) self._reg_write(model.vars.RAC_LNAMIXTRIM0_LNAMIXRFPKDBWSEL, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM0_LNAMIXRFPKDCALDM, 16) self._reg_write(model.vars.RAC_LNAMIXTRIM0_LNAMIXTRIMVREG, 8) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXLNACAPSEL, 0) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXMXRBIAS, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXNCASADJ, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXPCASADJ, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXVOUTADJ, 8) self._reg_write(model.vars.RAC_PGACTRL_PGABWMODE, 0) self._reg_write(model.vars.RAC_SYTRIM1_SYLODIVSGTESTDIV, 0) self._reg_write(model.vars.RAC_SYMMDCTRL_SYMMDMODE, 2) self._reg_write(model.vars.RAC_SYNTHCTRL_MMDPOWERBALANCEDISABLE, 1) self._reg_write(model.vars.RAC_SYNTHREGCTRL_MMDLDOVREFTRIM, 3) self._reg_write_default(model.vars.RAC_IFADCTRIM0_IFADCSIDETONEAMP) self._reg_write_default(model.vars.FRC_AUTOCG_AUTOCGEN) if (model.vars.SYNTH_LPFCTRL1CAL_OP1BWCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_OP1BWCAL) if (model.vars.SYNTH_LPFCTRL1CAL_OP1COMPCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_OP1COMPCAL) if (model.vars.SYNTH_LPFCTRL1CAL_RZVALCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_RZVALCAL) if (model.vars.SYNTH_LPFCTRL1CAL_RPVALCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_RPVALCAL) if (model.vars.SYNTH_LPFCTRL1CAL_RFBVALCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_RFBVALCAL)
These aren't really calculating right now. Just using defaults or forced values. Specifically registers that are not handled in inherited CALC_Misc_panther Args: model (ModelRoot) : Data model to read and write variables from
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/lynx/calculators/calc_misc.py
calc_misc_Lynx
PascalGuenther/gecko_sdk
69
python
def calc_misc_Lynx(self, model): "\n These aren't really calculating right now. Just using defaults or forced values.\n Specifically registers that are not handled in inherited CALC_Misc_panther\n\n Args:\n model (ModelRoot) : Data model to read and write variables from\n " self._reg_write(model.vars.FRC_DFLCTRL_DFLBOIOFFSET, 0) self._reg_write(model.vars.FRC_CTRL_LPMODEDIS, 1) self._reg_write(model.vars.FRC_CTRL_WAITEOFEN, 0) self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TRAILTXREPLEN, 0) self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TXSUPPLENOVERIDE, 0) self._reg_write(model.vars.FRC_WCNTCMP3_SUPPLENFIELDLOC, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIEN, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIFIELDLOC, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIBITPOS, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIMATCHVAL, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLMODE, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLBITORDER, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLSHIFT, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLOFFSET, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLBITS, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLMINLENGTH, 0) self._reg_write(model.vars.FRC_DSLCTRL_RXSUPRECEPMODE, 0) self._reg_write(model.vars.FRC_DSLCTRL_STORESUP, 0) self._reg_write(model.vars.FRC_DSLCTRL_SUPSHFFACTOR, 0) self._reg_write(model.vars.FRC_WCNTCMP4_SUPPLENGTH, 0) self._reg_write(model.vars.MODEM_DIGMIXCTRL_DIGMIXFB, 0) self._reg_write(model.vars.MODEM_VTTRACK_SYNCTIMEOUTSEL, 1) self._reg_write(model.vars.MODEM_LRFRC_LRCORRMODE, 1) self._reg_write(model.vars.MODEM_REALTIMCFE_MINCOSTTHD, 500) self._reg_write(model.vars.MODEM_REALTIMCFE_RTSCHWIN, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_RTSCHMODE, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_TRACKINGWIN, 7) self._reg_write(model.vars.MODEM_REALTIMCFE_SYNCACQWIN, 7) self._reg_write(model.vars.MODEM_REALTIMCFE_SINEWEN, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_VTAFCFRAME, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_RTCFEEN, 0) self._reg_write(model.vars.MODEM_ETSCTRL_ETSLOC, 0) self._reg_write(model.vars.MODEM_ETSCTRL_CAPTRIG, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTDFLTSEL, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTCOUNT, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWTYPE, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_CFGANTPATTEN, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWENABLE, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_EXTDSTOPPULSECNT, 30) self._reg_write(model.vars.MODEM_ANTSWSTART_ANTSWSTARTTIM, 0) self._reg_write(model.vars.MODEM_ANTSWEND_ANTSWENDTIM, 0) self._reg_write(model.vars.MODEM_TRECPMPATT_PMEXPECTPATT, 1431655765) self._reg_write(model.vars.MODEM_TRECPMDET_PMACQUINGWIN, 7) self._reg_write(model.vars.MODEM_TRECPMDET_PMCOSTVALTHD, 2) self._reg_write(model.vars.MODEM_TRECPMDET_PMTIMEOUTSEL, 0) self._reg_write(model.vars.MODEM_TRECPMDET_PHSCALE, 0) self._reg_write(model.vars.MODEM_TRECPMDET_PMMINCOSTTHD, 0) self._reg_write(model.vars.MODEM_TRECPMDET_VTPMDETSEL, 0) self._reg_write(model.vars.MODEM_TRECPMDET_COSTHYST, 0) self._reg_write(model.vars.MODEM_TRECPMDET_PREAMSCH, 0) self._reg_write(model.vars.MODEM_CFGANTPATT_CFGANTPATTVAL, 0) self._reg_write(model.vars.MODEM_ETSTIM_ETSTIMVAL, 0) self._reg_write(model.vars.MODEM_ETSTIM_ETSCOUNTEREN, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL1_TIMEPERIOD, 436906) self._reg_write(model.vars.MODEM_COCURRMODE_CONCURRENT, 0) self._reg_write(model.vars.MODEM_ANTDIVCTRL_ADPRETHRESH, 0) self._reg_write(model.vars.MODEM_ANTDIVCTRL_ENADPRETHRESH, 0) self._reg_write(model.vars.MODEM_BLEIQDSAEXT2_DISMAXPEAKTRACKMODE, 0) self._reg_write(model.vars.MODEM_BLEIQDSAEXT2_BBSSDEBOUNCETIM, 0) self._reg_write(model.vars.MODEM_BLEIQDSAEXT2_BBSSDIFFCHVAL, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVN, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVP, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENREG3, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENBYPASS40MHZ, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTREG3ADJV, 2) self._reg_write(model.vars.RAC_CLKMULTEN1_CLKMULTDRVAMPSEL, 0) self._reg_write(model.vars.RAC_LNAMIXDEBUG_LNAMIXDISMXR, 0) self._reg_write(model.vars.RAC_LNAMIXTRIM0_LNAMIXRFPKDBWSEL, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM0_LNAMIXRFPKDCALDM, 16) self._reg_write(model.vars.RAC_LNAMIXTRIM0_LNAMIXTRIMVREG, 8) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXLNACAPSEL, 0) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXMXRBIAS, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXNCASADJ, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXPCASADJ, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXVOUTADJ, 8) self._reg_write(model.vars.RAC_PGACTRL_PGABWMODE, 0) self._reg_write(model.vars.RAC_SYTRIM1_SYLODIVSGTESTDIV, 0) self._reg_write(model.vars.RAC_SYMMDCTRL_SYMMDMODE, 2) self._reg_write(model.vars.RAC_SYNTHCTRL_MMDPOWERBALANCEDISABLE, 1) self._reg_write(model.vars.RAC_SYNTHREGCTRL_MMDLDOVREFTRIM, 3) self._reg_write_default(model.vars.RAC_IFADCTRIM0_IFADCSIDETONEAMP) self._reg_write_default(model.vars.FRC_AUTOCG_AUTOCGEN) if (model.vars.SYNTH_LPFCTRL1CAL_OP1BWCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_OP1BWCAL) if (model.vars.SYNTH_LPFCTRL1CAL_OP1COMPCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_OP1COMPCAL) if (model.vars.SYNTH_LPFCTRL1CAL_RZVALCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_RZVALCAL) if (model.vars.SYNTH_LPFCTRL1CAL_RPVALCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_RPVALCAL) if (model.vars.SYNTH_LPFCTRL1CAL_RFBVALCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_RFBVALCAL)
def calc_misc_Lynx(self, model): "\n These aren't really calculating right now. Just using defaults or forced values.\n Specifically registers that are not handled in inherited CALC_Misc_panther\n\n Args:\n model (ModelRoot) : Data model to read and write variables from\n " self._reg_write(model.vars.FRC_DFLCTRL_DFLBOIOFFSET, 0) self._reg_write(model.vars.FRC_CTRL_LPMODEDIS, 1) self._reg_write(model.vars.FRC_CTRL_WAITEOFEN, 0) self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TRAILTXREPLEN, 0) self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TXSUPPLENOVERIDE, 0) self._reg_write(model.vars.FRC_WCNTCMP3_SUPPLENFIELDLOC, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIEN, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIFIELDLOC, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIBITPOS, 0) self._reg_write(model.vars.FRC_BOICTRL_BOIMATCHVAL, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLMODE, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLBITORDER, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLSHIFT, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLOFFSET, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLBITS, 0) self._reg_write(model.vars.FRC_DSLCTRL_DSLMINLENGTH, 0) self._reg_write(model.vars.FRC_DSLCTRL_RXSUPRECEPMODE, 0) self._reg_write(model.vars.FRC_DSLCTRL_STORESUP, 0) self._reg_write(model.vars.FRC_DSLCTRL_SUPSHFFACTOR, 0) self._reg_write(model.vars.FRC_WCNTCMP4_SUPPLENGTH, 0) self._reg_write(model.vars.MODEM_DIGMIXCTRL_DIGMIXFB, 0) self._reg_write(model.vars.MODEM_VTTRACK_SYNCTIMEOUTSEL, 1) self._reg_write(model.vars.MODEM_LRFRC_LRCORRMODE, 1) self._reg_write(model.vars.MODEM_REALTIMCFE_MINCOSTTHD, 500) self._reg_write(model.vars.MODEM_REALTIMCFE_RTSCHWIN, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_RTSCHMODE, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_TRACKINGWIN, 7) self._reg_write(model.vars.MODEM_REALTIMCFE_SYNCACQWIN, 7) self._reg_write(model.vars.MODEM_REALTIMCFE_SINEWEN, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_VTAFCFRAME, 0) self._reg_write(model.vars.MODEM_REALTIMCFE_RTCFEEN, 0) self._reg_write(model.vars.MODEM_ETSCTRL_ETSLOC, 0) self._reg_write(model.vars.MODEM_ETSCTRL_CAPTRIG, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTDFLTSEL, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTCOUNT, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWTYPE, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_CFGANTPATTEN, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWENABLE, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL_EXTDSTOPPULSECNT, 30) self._reg_write(model.vars.MODEM_ANTSWSTART_ANTSWSTARTTIM, 0) self._reg_write(model.vars.MODEM_ANTSWEND_ANTSWENDTIM, 0) self._reg_write(model.vars.MODEM_TRECPMPATT_PMEXPECTPATT, 1431655765) self._reg_write(model.vars.MODEM_TRECPMDET_PMACQUINGWIN, 7) self._reg_write(model.vars.MODEM_TRECPMDET_PMCOSTVALTHD, 2) self._reg_write(model.vars.MODEM_TRECPMDET_PMTIMEOUTSEL, 0) self._reg_write(model.vars.MODEM_TRECPMDET_PHSCALE, 0) self._reg_write(model.vars.MODEM_TRECPMDET_PMMINCOSTTHD, 0) self._reg_write(model.vars.MODEM_TRECPMDET_VTPMDETSEL, 0) self._reg_write(model.vars.MODEM_TRECPMDET_COSTHYST, 0) self._reg_write(model.vars.MODEM_TRECPMDET_PREAMSCH, 0) self._reg_write(model.vars.MODEM_CFGANTPATT_CFGANTPATTVAL, 0) self._reg_write(model.vars.MODEM_ETSTIM_ETSTIMVAL, 0) self._reg_write(model.vars.MODEM_ETSTIM_ETSCOUNTEREN, 0) self._reg_write(model.vars.MODEM_ANTSWCTRL1_TIMEPERIOD, 436906) self._reg_write(model.vars.MODEM_COCURRMODE_CONCURRENT, 0) self._reg_write(model.vars.MODEM_ANTDIVCTRL_ADPRETHRESH, 0) self._reg_write(model.vars.MODEM_ANTDIVCTRL_ENADPRETHRESH, 0) self._reg_write(model.vars.MODEM_BLEIQDSAEXT2_DISMAXPEAKTRACKMODE, 0) self._reg_write(model.vars.MODEM_BLEIQDSAEXT2_BBSSDEBOUNCETIM, 0) self._reg_write(model.vars.MODEM_BLEIQDSAEXT2_BBSSDIFFCHVAL, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVN, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVP, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENREG3, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTENBYPASS40MHZ, 0) self._reg_write(model.vars.RAC_CLKMULTEN0_CLKMULTREG3ADJV, 2) self._reg_write(model.vars.RAC_CLKMULTEN1_CLKMULTDRVAMPSEL, 0) self._reg_write(model.vars.RAC_LNAMIXDEBUG_LNAMIXDISMXR, 0) self._reg_write(model.vars.RAC_LNAMIXTRIM0_LNAMIXRFPKDBWSEL, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM0_LNAMIXRFPKDCALDM, 16) self._reg_write(model.vars.RAC_LNAMIXTRIM0_LNAMIXTRIMVREG, 8) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXLNACAPSEL, 0) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXMXRBIAS, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXNCASADJ, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXPCASADJ, 2) self._reg_write(model.vars.RAC_LNAMIXTRIM1_LNAMIXVOUTADJ, 8) self._reg_write(model.vars.RAC_PGACTRL_PGABWMODE, 0) self._reg_write(model.vars.RAC_SYTRIM1_SYLODIVSGTESTDIV, 0) self._reg_write(model.vars.RAC_SYMMDCTRL_SYMMDMODE, 2) self._reg_write(model.vars.RAC_SYNTHCTRL_MMDPOWERBALANCEDISABLE, 1) self._reg_write(model.vars.RAC_SYNTHREGCTRL_MMDLDOVREFTRIM, 3) self._reg_write_default(model.vars.RAC_IFADCTRIM0_IFADCSIDETONEAMP) self._reg_write_default(model.vars.FRC_AUTOCG_AUTOCGEN) if (model.vars.SYNTH_LPFCTRL1CAL_OP1BWCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_OP1BWCAL) if (model.vars.SYNTH_LPFCTRL1CAL_OP1COMPCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_OP1COMPCAL) if (model.vars.SYNTH_LPFCTRL1CAL_RZVALCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_RZVALCAL) if (model.vars.SYNTH_LPFCTRL1CAL_RPVALCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_RPVALCAL) if (model.vars.SYNTH_LPFCTRL1CAL_RFBVALCAL.value_forced is None): self._reg_do_not_care(model.vars.SYNTH_LPFCTRL1CAL_RFBVALCAL)<|docstring|>These aren't really calculating right now. Just using defaults or forced values. Specifically registers that are not handled in inherited CALC_Misc_panther Args: model (ModelRoot) : Data model to read and write variables from<|endoftext|>
765b9f6849f452ab665dd609e3006e1e90fd48e590752ed4082526cb0b848264
def isMatch(self, s, p): '\n :type s: str\n :type p: str\n :rtype: bool\n ' results = {} def dp(i, j): return results.get((i, j), False) results[((- 1), (- 1))] = True i = 1 while (i < len(p)): if (p[i] == '*'): results[((- 1), i)] = True else: break i += 2 for i in range(len(s)): for j in range(len(p)): one_match = ((s[i] == p[(j - 1)]) or (p[(j - 1)] == '.')) if (p[j] == '*'): match = (dp(i, (j - 2)) or (one_match and dp((i - 1), j))) else: match = (((s[i] == p[j]) or (p[j] == '.')) and dp((i - 1), (j - 1))) results[(i, j)] = match return dp((len(s) - 1), (len(p) - 1))
:type s: str :type p: str :rtype: bool
regular_expression_matching_dp.py
isMatch
luozhaoyu/leetcode
0
python
def isMatch(self, s, p): '\n :type s: str\n :type p: str\n :rtype: bool\n ' results = {} def dp(i, j): return results.get((i, j), False) results[((- 1), (- 1))] = True i = 1 while (i < len(p)): if (p[i] == '*'): results[((- 1), i)] = True else: break i += 2 for i in range(len(s)): for j in range(len(p)): one_match = ((s[i] == p[(j - 1)]) or (p[(j - 1)] == '.')) if (p[j] == '*'): match = (dp(i, (j - 2)) or (one_match and dp((i - 1), j))) else: match = (((s[i] == p[j]) or (p[j] == '.')) and dp((i - 1), (j - 1))) results[(i, j)] = match return dp((len(s) - 1), (len(p) - 1))
def isMatch(self, s, p): '\n :type s: str\n :type p: str\n :rtype: bool\n ' results = {} def dp(i, j): return results.get((i, j), False) results[((- 1), (- 1))] = True i = 1 while (i < len(p)): if (p[i] == '*'): results[((- 1), i)] = True else: break i += 2 for i in range(len(s)): for j in range(len(p)): one_match = ((s[i] == p[(j - 1)]) or (p[(j - 1)] == '.')) if (p[j] == '*'): match = (dp(i, (j - 2)) or (one_match and dp((i - 1), j))) else: match = (((s[i] == p[j]) or (p[j] == '.')) and dp((i - 1), (j - 1))) results[(i, j)] = match return dp((len(s) - 1), (len(p) - 1))<|docstring|>:type s: str :type p: str :rtype: bool<|endoftext|>
a30aa425cc8ab051619101667a3629d5c994f807ca5f7958a5cdea701847f03b
def __init__(self, node: Union[(NodeProto, ValueInfoProto, TensorProto)]): '\n ValueInfoProto for input, TensorProto for initializer\n ' if (not isinstance(node, (NodeProto, ValueInfoProto, TensorProto))): raise TypeError('need NodeProto, not {}'.format(type(node))) self._node = node
ValueInfoProto for input, TensorProto for initializer
built-in/ACL_PyTorch/Official/nlp/TransformerXL_for_Pytorch/om_gener/gener_core/mod_modify/onnx_node.py
__init__
Ascend/modelzoo
12
python
def __init__(self, node: Union[(NodeProto, ValueInfoProto, TensorProto)]): '\n \n ' if (not isinstance(node, (NodeProto, ValueInfoProto, TensorProto))): raise TypeError('need NodeProto, not {}'.format(type(node))) self._node = node
def __init__(self, node: Union[(NodeProto, ValueInfoProto, TensorProto)]): '\n \n ' if (not isinstance(node, (NodeProto, ValueInfoProto, TensorProto))): raise TypeError('need NodeProto, not {}'.format(type(node))) self._node = node<|docstring|>ValueInfoProto for input, TensorProto for initializer<|endoftext|>
9d7ca27f21316a4239850b89b36f67e00d94be645aee530314827c7c696e43c9
@property def node(self): '\n get original onnx xxproto\n ' return self._node
get original onnx xxproto
built-in/ACL_PyTorch/Official/nlp/TransformerXL_for_Pytorch/om_gener/gener_core/mod_modify/onnx_node.py
node
Ascend/modelzoo
12
python
@property def node(self): '\n \n ' return self._node
@property def node(self): '\n \n ' return self._node<|docstring|>get original onnx xxproto<|endoftext|>
cd91ba2fcfb2e9ea215e19ef79c796dfb6ea33fb8cd3cc1f8f3f2b451a87d19d
@property def op_type(self) -> str: '\n INPUT_TYPE for compatible with Tensorflowz\n ' if isinstance(self._node, NodeProto): return self._node.op_type if isinstance(self._node, ValueInfoProto): return INPUT_TYPE else: return INIT_TYPE
INPUT_TYPE for compatible with Tensorflowz
built-in/ACL_PyTorch/Official/nlp/TransformerXL_for_Pytorch/om_gener/gener_core/mod_modify/onnx_node.py
op_type
Ascend/modelzoo
12
python
@property def op_type(self) -> str: '\n \n ' if isinstance(self._node, NodeProto): return self._node.op_type if isinstance(self._node, ValueInfoProto): return INPUT_TYPE else: return INIT_TYPE
@property def op_type(self) -> str: '\n \n ' if isinstance(self._node, NodeProto): return self._node.op_type if isinstance(self._node, ValueInfoProto): return INPUT_TYPE else: return INIT_TYPE<|docstring|>INPUT_TYPE for compatible with Tensorflowz<|endoftext|>
f5d08a1c3789c7d30e11a14219b5620f057977a75b61a2445fa178c11056a3cd
def fetch_number_of_organizations_needing_twitter_update(): '\n Do not include individuals in this.\n :return: \n ' organization_we_vote_id_list_to_exclude = [] status = '' twitter_user_manager = TwitterUserManager() results = twitter_user_manager.retrieve_twitter_link_to_organization_list(return_we_vote_id_list_only=True, read_only=True) organization_we_vote_id_list_to_include = results['organization_we_vote_id_list'] if len(organization_we_vote_id_list_to_include): try: remote_request_query = RemoteRequestHistory.objects.using('readonly').all() one_month_of_seconds = (((60 * 60) * 24) * 30) one_month_ago = (now() - timedelta(seconds=one_month_of_seconds)) remote_request_query = remote_request_query.filter(datetime_of_action__gt=one_month_ago) remote_request_query = remote_request_query.filter(kind_of_action__iexact=RETRIEVE_UPDATE_DATA_FROM_TWITTER) remote_request_query = remote_request_query.exclude((Q(organization_we_vote_id__isnull=True) | Q(organization_we_vote_id=''))) remote_request_query = remote_request_query.values_list('organization_we_vote_id', flat=True).distinct() organization_we_vote_id_list_to_exclude = list(remote_request_query) except Exception as e: status += (('FAILED_FETCHING_ORGANIZATIONS_FROM_REMOTE_REQUEST_HISTORY: ' + str(e)) + ' ') return 0 organization_we_vote_id_list = list((set(organization_we_vote_id_list_to_include) - set(organization_we_vote_id_list_to_exclude))) queryset = Organization.objects.using('readonly').all() queryset = queryset.filter(we_vote_id__in=organization_we_vote_id_list) queryset = queryset.exclude(organization_twitter_updates_failing=True) queryset = queryset.exclude(organization_type__in=INDIVIDUAL) try: organization_count = queryset.count() except Exception as e: organization_count = 0 return organization_count
Do not include individuals in this. :return:
import_export_twitter/controllers.py
fetch_number_of_organizations_needing_twitter_update
wevote/WeVoteServer
44
python
def fetch_number_of_organizations_needing_twitter_update(): '\n Do not include individuals in this.\n :return: \n ' organization_we_vote_id_list_to_exclude = [] status = twitter_user_manager = TwitterUserManager() results = twitter_user_manager.retrieve_twitter_link_to_organization_list(return_we_vote_id_list_only=True, read_only=True) organization_we_vote_id_list_to_include = results['organization_we_vote_id_list'] if len(organization_we_vote_id_list_to_include): try: remote_request_query = RemoteRequestHistory.objects.using('readonly').all() one_month_of_seconds = (((60 * 60) * 24) * 30) one_month_ago = (now() - timedelta(seconds=one_month_of_seconds)) remote_request_query = remote_request_query.filter(datetime_of_action__gt=one_month_ago) remote_request_query = remote_request_query.filter(kind_of_action__iexact=RETRIEVE_UPDATE_DATA_FROM_TWITTER) remote_request_query = remote_request_query.exclude((Q(organization_we_vote_id__isnull=True) | Q(organization_we_vote_id=))) remote_request_query = remote_request_query.values_list('organization_we_vote_id', flat=True).distinct() organization_we_vote_id_list_to_exclude = list(remote_request_query) except Exception as e: status += (('FAILED_FETCHING_ORGANIZATIONS_FROM_REMOTE_REQUEST_HISTORY: ' + str(e)) + ' ') return 0 organization_we_vote_id_list = list((set(organization_we_vote_id_list_to_include) - set(organization_we_vote_id_list_to_exclude))) queryset = Organization.objects.using('readonly').all() queryset = queryset.filter(we_vote_id__in=organization_we_vote_id_list) queryset = queryset.exclude(organization_twitter_updates_failing=True) queryset = queryset.exclude(organization_type__in=INDIVIDUAL) try: organization_count = queryset.count() except Exception as e: organization_count = 0 return organization_count
def fetch_number_of_organizations_needing_twitter_update(): '\n Do not include individuals in this.\n :return: \n ' organization_we_vote_id_list_to_exclude = [] status = twitter_user_manager = TwitterUserManager() results = twitter_user_manager.retrieve_twitter_link_to_organization_list(return_we_vote_id_list_only=True, read_only=True) organization_we_vote_id_list_to_include = results['organization_we_vote_id_list'] if len(organization_we_vote_id_list_to_include): try: remote_request_query = RemoteRequestHistory.objects.using('readonly').all() one_month_of_seconds = (((60 * 60) * 24) * 30) one_month_ago = (now() - timedelta(seconds=one_month_of_seconds)) remote_request_query = remote_request_query.filter(datetime_of_action__gt=one_month_ago) remote_request_query = remote_request_query.filter(kind_of_action__iexact=RETRIEVE_UPDATE_DATA_FROM_TWITTER) remote_request_query = remote_request_query.exclude((Q(organization_we_vote_id__isnull=True) | Q(organization_we_vote_id=))) remote_request_query = remote_request_query.values_list('organization_we_vote_id', flat=True).distinct() organization_we_vote_id_list_to_exclude = list(remote_request_query) except Exception as e: status += (('FAILED_FETCHING_ORGANIZATIONS_FROM_REMOTE_REQUEST_HISTORY: ' + str(e)) + ' ') return 0 organization_we_vote_id_list = list((set(organization_we_vote_id_list_to_include) - set(organization_we_vote_id_list_to_exclude))) queryset = Organization.objects.using('readonly').all() queryset = queryset.filter(we_vote_id__in=organization_we_vote_id_list) queryset = queryset.exclude(organization_twitter_updates_failing=True) queryset = queryset.exclude(organization_type__in=INDIVIDUAL) try: organization_count = queryset.count() except Exception as e: organization_count = 0 return organization_count<|docstring|>Do not include individuals in this. :return:<|endoftext|>
d561a2625e4613c1bcb9355948db838dd80d2e97b136781834a64cfcb0e23f41
def refresh_twitter_organization_details(organization, twitter_user_id=0): '\n This function assumes TwitterLinkToOrganization is happening outside of this function. It relies on our caching\n organization_twitter_handle in the organization object.\n :param organization:\n :param twitter_user_id:\n :return:\n ' organization_manager = OrganizationManager() we_vote_image_manager = WeVoteImageManager() status = '' organization_twitter_handle = '' twitter_image_load_info = '' if (not organization): status += 'ORGANIZATION_TWITTER_DETAILS_NOT_RETRIEVED-ORG_MISSING ' results = {'success': False, 'status': status, 'organization': organization, 'twitter_user_found': False, 'twitter_user_id': twitter_user_id, 'twitter_handle': organization_twitter_handle} return results twitter_user_found = False twitter_json = {} if positive_value_exists(twitter_user_id): try: status += 'REACHING_OUT_TO_TWITTER_BY_USER_ID ' results = retrieve_twitter_user_info(twitter_user_id) if results['success']: twitter_json = results['twitter_json'] twitter_user_found = True twitter_user_id = results['twitter_user_id'] elif (results['twitter_user_not_found_in_twitter'] or results['twitter_user_suspended_by_twitter']): try: organization.organization_twitter_updates_failing = True organization.save() except Exception as e: status += (('COULD_NOT_MARK_ORGANIZATION_TWITTER_UPDATES_FAILING1: ' + str(e)) + ' ') except Exception as e: status += (('RETRIEVE_TWITTER_USER_INFO_BY_USER_ID_FAILS: ' + str(e)) + ' ') if ((not twitter_user_found) and positive_value_exists(organization.organization_twitter_handle)): twitter_user_id_zero = 0 try: results = retrieve_twitter_user_info(twitter_user_id_zero, organization.organization_twitter_handle) status += results['status'] if results['success']: twitter_json = results['twitter_json'] twitter_user_found = True twitter_user_id = results['twitter_user_id'] elif (results['twitter_user_not_found_in_twitter'] or results['twitter_user_suspended_by_twitter']): try: organization.organization_twitter_updates_failing = True organization.save() except Exception as e: status += (('COULD_NOT_MARK_ORGANIZATION_TWITTER_UPDATES_FAILING2: ' + str(e)) + ' ') except Exception as e: status += (('RETRIEVE_TWITTER_USER_INFO_BY_HANDLE_FAILS: ' + str(e)) + ' ') if twitter_user_found: status += (str(organization.organization_twitter_handle) + '-RETRIEVED_FROM_TWITTER ') profile_image_url_https = (twitter_json['profile_image_url_https'] if ('profile_image_url_https' in twitter_json) else None) twitter_profile_image_url_https = we_vote_image_manager.twitter_profile_image_url_https_original(profile_image_url_https) twitter_profile_background_image_url_https = (twitter_json['profile_background_image_url_https'] if ('profile_background_image_url_https' in twitter_json) else None) twitter_profile_banner_url_https = (twitter_json['profile_banner_url'] if ('profile_banner_url' in twitter_json) else None) twitter_image_load_info = {'organization': organization, 'twitter_user_id': organization.twitter_user_id, 'twitter_profile_image_url_https': twitter_profile_image_url_https, 'twitter_profile_background_image_url_https': twitter_profile_background_image_url_https, 'twitter_profile_banner_url_https': twitter_profile_banner_url_https, 'twitter_json': twitter_json} process_twitter_images(twitter_image_load_info) else: status += (str(organization.organization_twitter_handle) + '-NOT_RETRIEVED_CLEARING_TWITTER_DETAILS ') save_organization_results = organization_manager.clear_organization_twitter_details(organization) if save_organization_results['success']: results = update_social_media_statistics_in_other_tables(organization) else: status += 'ORGANIZATION_TWITTER_DETAILS_NOT_CLEARED_FROM_DB ' results = {'success': True, 'status': status, 'organization': organization, 'twitter_user_found': twitter_user_found, 'twitter_user_id': twitter_user_id, 'twitter_handle': organization_twitter_handle, 'twitter_image_load_info': twitter_image_load_info} return results
This function assumes TwitterLinkToOrganization is happening outside of this function. It relies on our caching organization_twitter_handle in the organization object. :param organization: :param twitter_user_id: :return:
import_export_twitter/controllers.py
refresh_twitter_organization_details
wevote/WeVoteServer
44
python
def refresh_twitter_organization_details(organization, twitter_user_id=0): '\n This function assumes TwitterLinkToOrganization is happening outside of this function. It relies on our caching\n organization_twitter_handle in the organization object.\n :param organization:\n :param twitter_user_id:\n :return:\n ' organization_manager = OrganizationManager() we_vote_image_manager = WeVoteImageManager() status = organization_twitter_handle = twitter_image_load_info = if (not organization): status += 'ORGANIZATION_TWITTER_DETAILS_NOT_RETRIEVED-ORG_MISSING ' results = {'success': False, 'status': status, 'organization': organization, 'twitter_user_found': False, 'twitter_user_id': twitter_user_id, 'twitter_handle': organization_twitter_handle} return results twitter_user_found = False twitter_json = {} if positive_value_exists(twitter_user_id): try: status += 'REACHING_OUT_TO_TWITTER_BY_USER_ID ' results = retrieve_twitter_user_info(twitter_user_id) if results['success']: twitter_json = results['twitter_json'] twitter_user_found = True twitter_user_id = results['twitter_user_id'] elif (results['twitter_user_not_found_in_twitter'] or results['twitter_user_suspended_by_twitter']): try: organization.organization_twitter_updates_failing = True organization.save() except Exception as e: status += (('COULD_NOT_MARK_ORGANIZATION_TWITTER_UPDATES_FAILING1: ' + str(e)) + ' ') except Exception as e: status += (('RETRIEVE_TWITTER_USER_INFO_BY_USER_ID_FAILS: ' + str(e)) + ' ') if ((not twitter_user_found) and positive_value_exists(organization.organization_twitter_handle)): twitter_user_id_zero = 0 try: results = retrieve_twitter_user_info(twitter_user_id_zero, organization.organization_twitter_handle) status += results['status'] if results['success']: twitter_json = results['twitter_json'] twitter_user_found = True twitter_user_id = results['twitter_user_id'] elif (results['twitter_user_not_found_in_twitter'] or results['twitter_user_suspended_by_twitter']): try: organization.organization_twitter_updates_failing = True organization.save() except Exception as e: status += (('COULD_NOT_MARK_ORGANIZATION_TWITTER_UPDATES_FAILING2: ' + str(e)) + ' ') except Exception as e: status += (('RETRIEVE_TWITTER_USER_INFO_BY_HANDLE_FAILS: ' + str(e)) + ' ') if twitter_user_found: status += (str(organization.organization_twitter_handle) + '-RETRIEVED_FROM_TWITTER ') profile_image_url_https = (twitter_json['profile_image_url_https'] if ('profile_image_url_https' in twitter_json) else None) twitter_profile_image_url_https = we_vote_image_manager.twitter_profile_image_url_https_original(profile_image_url_https) twitter_profile_background_image_url_https = (twitter_json['profile_background_image_url_https'] if ('profile_background_image_url_https' in twitter_json) else None) twitter_profile_banner_url_https = (twitter_json['profile_banner_url'] if ('profile_banner_url' in twitter_json) else None) twitter_image_load_info = {'organization': organization, 'twitter_user_id': organization.twitter_user_id, 'twitter_profile_image_url_https': twitter_profile_image_url_https, 'twitter_profile_background_image_url_https': twitter_profile_background_image_url_https, 'twitter_profile_banner_url_https': twitter_profile_banner_url_https, 'twitter_json': twitter_json} process_twitter_images(twitter_image_load_info) else: status += (str(organization.organization_twitter_handle) + '-NOT_RETRIEVED_CLEARING_TWITTER_DETAILS ') save_organization_results = organization_manager.clear_organization_twitter_details(organization) if save_organization_results['success']: results = update_social_media_statistics_in_other_tables(organization) else: status += 'ORGANIZATION_TWITTER_DETAILS_NOT_CLEARED_FROM_DB ' results = {'success': True, 'status': status, 'organization': organization, 'twitter_user_found': twitter_user_found, 'twitter_user_id': twitter_user_id, 'twitter_handle': organization_twitter_handle, 'twitter_image_load_info': twitter_image_load_info} return results
def refresh_twitter_organization_details(organization, twitter_user_id=0): '\n This function assumes TwitterLinkToOrganization is happening outside of this function. It relies on our caching\n organization_twitter_handle in the organization object.\n :param organization:\n :param twitter_user_id:\n :return:\n ' organization_manager = OrganizationManager() we_vote_image_manager = WeVoteImageManager() status = organization_twitter_handle = twitter_image_load_info = if (not organization): status += 'ORGANIZATION_TWITTER_DETAILS_NOT_RETRIEVED-ORG_MISSING ' results = {'success': False, 'status': status, 'organization': organization, 'twitter_user_found': False, 'twitter_user_id': twitter_user_id, 'twitter_handle': organization_twitter_handle} return results twitter_user_found = False twitter_json = {} if positive_value_exists(twitter_user_id): try: status += 'REACHING_OUT_TO_TWITTER_BY_USER_ID ' results = retrieve_twitter_user_info(twitter_user_id) if results['success']: twitter_json = results['twitter_json'] twitter_user_found = True twitter_user_id = results['twitter_user_id'] elif (results['twitter_user_not_found_in_twitter'] or results['twitter_user_suspended_by_twitter']): try: organization.organization_twitter_updates_failing = True organization.save() except Exception as e: status += (('COULD_NOT_MARK_ORGANIZATION_TWITTER_UPDATES_FAILING1: ' + str(e)) + ' ') except Exception as e: status += (('RETRIEVE_TWITTER_USER_INFO_BY_USER_ID_FAILS: ' + str(e)) + ' ') if ((not twitter_user_found) and positive_value_exists(organization.organization_twitter_handle)): twitter_user_id_zero = 0 try: results = retrieve_twitter_user_info(twitter_user_id_zero, organization.organization_twitter_handle) status += results['status'] if results['success']: twitter_json = results['twitter_json'] twitter_user_found = True twitter_user_id = results['twitter_user_id'] elif (results['twitter_user_not_found_in_twitter'] or results['twitter_user_suspended_by_twitter']): try: organization.organization_twitter_updates_failing = True organization.save() except Exception as e: status += (('COULD_NOT_MARK_ORGANIZATION_TWITTER_UPDATES_FAILING2: ' + str(e)) + ' ') except Exception as e: status += (('RETRIEVE_TWITTER_USER_INFO_BY_HANDLE_FAILS: ' + str(e)) + ' ') if twitter_user_found: status += (str(organization.organization_twitter_handle) + '-RETRIEVED_FROM_TWITTER ') profile_image_url_https = (twitter_json['profile_image_url_https'] if ('profile_image_url_https' in twitter_json) else None) twitter_profile_image_url_https = we_vote_image_manager.twitter_profile_image_url_https_original(profile_image_url_https) twitter_profile_background_image_url_https = (twitter_json['profile_background_image_url_https'] if ('profile_background_image_url_https' in twitter_json) else None) twitter_profile_banner_url_https = (twitter_json['profile_banner_url'] if ('profile_banner_url' in twitter_json) else None) twitter_image_load_info = {'organization': organization, 'twitter_user_id': organization.twitter_user_id, 'twitter_profile_image_url_https': twitter_profile_image_url_https, 'twitter_profile_background_image_url_https': twitter_profile_background_image_url_https, 'twitter_profile_banner_url_https': twitter_profile_banner_url_https, 'twitter_json': twitter_json} process_twitter_images(twitter_image_load_info) else: status += (str(organization.organization_twitter_handle) + '-NOT_RETRIEVED_CLEARING_TWITTER_DETAILS ') save_organization_results = organization_manager.clear_organization_twitter_details(organization) if save_organization_results['success']: results = update_social_media_statistics_in_other_tables(organization) else: status += 'ORGANIZATION_TWITTER_DETAILS_NOT_CLEARED_FROM_DB ' results = {'success': True, 'status': status, 'organization': organization, 'twitter_user_found': twitter_user_found, 'twitter_user_id': twitter_user_id, 'twitter_handle': organization_twitter_handle, 'twitter_image_load_info': twitter_image_load_info} return results<|docstring|>This function assumes TwitterLinkToOrganization is happening outside of this function. It relies on our caching organization_twitter_handle in the organization object. :param organization: :param twitter_user_id: :return:<|endoftext|>
5bd283f986dd14e81fe5bb7cbfc56c0e44798e0fc44ad5b605e0c2afcffa59bb
def twitter_sign_in_start_for_api(voter_device_id, return_url, cordova): '\n\n :param voter_device_id:\n :param return_url: Where to direct the browser at the very end of the process\n :param cordova:\n :return:\n ' results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING', 'voter_device_id': voter_device_id, 'twitter_redirect_url': '', 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING', 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': '', 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return results voter = results['voter'] twitter_user_manager = TwitterUserManager() twitter_user_results = twitter_user_manager.retrieve_twitter_link_to_voter(voter.we_vote_id, read_only=True) if twitter_user_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': '', 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return error_results twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if auth_response_results['twitter_auth_response_found']: twitter_auth_response = auth_response_results['twitter_auth_response'] else: auth_create_results = twitter_auth_manager.update_or_create_twitter_auth_response(voter_device_id) if (not auth_create_results['twitter_auth_response_created']): error_results = {'status': auth_create_results['status'], 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': '', 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return error_results twitter_auth_response = auth_create_results['twitter_auth_response'] callback_url = (WE_VOTE_SERVER_ROOT_URL + '/apis/v1/twitterSignInRequest/') callback_url += '?voter_info_mode=0' callback_url += ('&voter_device_id=' + voter_device_id) callback_url += ('&return_url=' + return_url) callback_url += ('&cordova=' + str(cordova)) try: auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET, callback_url) twitter_authorization_url = auth.get_authorization_url() request_token_dict = auth.request_token twitter_request_token = '' twitter_request_token_secret = '' if ('oauth_token' in request_token_dict): twitter_request_token = request_token_dict['oauth_token'] if ('oauth_token_secret' in request_token_dict): twitter_request_token_secret = request_token_dict['oauth_token_secret'] if (positive_value_exists(twitter_request_token) and positive_value_exists(twitter_request_token_secret)): twitter_auth_response.twitter_request_token = twitter_request_token twitter_auth_response.twitter_request_secret = twitter_request_token_secret twitter_auth_response.save() success = True status = 'TWITTER_REDIRECT_URL_RETRIEVED' else: success = False status = 'TWITTER_REDIRECT_URL_NOT_RETRIEVED' except tweepy.TooManyRequests: success = False status = 'TWITTER_RATE_LIMIT_ERROR' except tweepy.TweepyException as error_instance: success = False err_string = 'GENERAL_TWEEPY_EXCEPTION' try: err_string = error_instance.args[0].args[0].args[0] except Exception: pass print(err_string) status = 'TWITTER_SIGN_IN_START: {}'.format(err_string) except Exception as e1: success = False status = 'TWITTER_SIGN_IN_START: {}'.format(e1) if success: results = {'status': status, 'success': True, 'voter_device_id': voter_device_id, 'twitter_redirect_url': twitter_authorization_url, 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} else: results = {'status': status, 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': '', 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return results
:param voter_device_id: :param return_url: Where to direct the browser at the very end of the process :param cordova: :return:
import_export_twitter/controllers.py
twitter_sign_in_start_for_api
wevote/WeVoteServer
44
python
def twitter_sign_in_start_for_api(voter_device_id, return_url, cordova): '\n\n :param voter_device_id:\n :param return_url: Where to direct the browser at the very end of the process\n :param cordova:\n :return:\n ' results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING', 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING', 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return results voter = results['voter'] twitter_user_manager = TwitterUserManager() twitter_user_results = twitter_user_manager.retrieve_twitter_link_to_voter(voter.we_vote_id, read_only=True) if twitter_user_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return error_results twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if auth_response_results['twitter_auth_response_found']: twitter_auth_response = auth_response_results['twitter_auth_response'] else: auth_create_results = twitter_auth_manager.update_or_create_twitter_auth_response(voter_device_id) if (not auth_create_results['twitter_auth_response_created']): error_results = {'status': auth_create_results['status'], 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return error_results twitter_auth_response = auth_create_results['twitter_auth_response'] callback_url = (WE_VOTE_SERVER_ROOT_URL + '/apis/v1/twitterSignInRequest/') callback_url += '?voter_info_mode=0' callback_url += ('&voter_device_id=' + voter_device_id) callback_url += ('&return_url=' + return_url) callback_url += ('&cordova=' + str(cordova)) try: auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET, callback_url) twitter_authorization_url = auth.get_authorization_url() request_token_dict = auth.request_token twitter_request_token = twitter_request_token_secret = if ('oauth_token' in request_token_dict): twitter_request_token = request_token_dict['oauth_token'] if ('oauth_token_secret' in request_token_dict): twitter_request_token_secret = request_token_dict['oauth_token_secret'] if (positive_value_exists(twitter_request_token) and positive_value_exists(twitter_request_token_secret)): twitter_auth_response.twitter_request_token = twitter_request_token twitter_auth_response.twitter_request_secret = twitter_request_token_secret twitter_auth_response.save() success = True status = 'TWITTER_REDIRECT_URL_RETRIEVED' else: success = False status = 'TWITTER_REDIRECT_URL_NOT_RETRIEVED' except tweepy.TooManyRequests: success = False status = 'TWITTER_RATE_LIMIT_ERROR' except tweepy.TweepyException as error_instance: success = False err_string = 'GENERAL_TWEEPY_EXCEPTION' try: err_string = error_instance.args[0].args[0].args[0] except Exception: pass print(err_string) status = 'TWITTER_SIGN_IN_START: {}'.format(err_string) except Exception as e1: success = False status = 'TWITTER_SIGN_IN_START: {}'.format(e1) if success: results = {'status': status, 'success': True, 'voter_device_id': voter_device_id, 'twitter_redirect_url': twitter_authorization_url, 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} else: results = {'status': status, 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return results
def twitter_sign_in_start_for_api(voter_device_id, return_url, cordova): '\n\n :param voter_device_id:\n :param return_url: Where to direct the browser at the very end of the process\n :param cordova:\n :return:\n ' results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING', 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING', 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return results voter = results['voter'] twitter_user_manager = TwitterUserManager() twitter_user_results = twitter_user_manager.retrieve_twitter_link_to_voter(voter.we_vote_id, read_only=True) if twitter_user_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return error_results twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if auth_response_results['twitter_auth_response_found']: twitter_auth_response = auth_response_results['twitter_auth_response'] else: auth_create_results = twitter_auth_manager.update_or_create_twitter_auth_response(voter_device_id) if (not auth_create_results['twitter_auth_response_created']): error_results = {'status': auth_create_results['status'], 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return error_results twitter_auth_response = auth_create_results['twitter_auth_response'] callback_url = (WE_VOTE_SERVER_ROOT_URL + '/apis/v1/twitterSignInRequest/') callback_url += '?voter_info_mode=0' callback_url += ('&voter_device_id=' + voter_device_id) callback_url += ('&return_url=' + return_url) callback_url += ('&cordova=' + str(cordova)) try: auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET, callback_url) twitter_authorization_url = auth.get_authorization_url() request_token_dict = auth.request_token twitter_request_token = twitter_request_token_secret = if ('oauth_token' in request_token_dict): twitter_request_token = request_token_dict['oauth_token'] if ('oauth_token_secret' in request_token_dict): twitter_request_token_secret = request_token_dict['oauth_token_secret'] if (positive_value_exists(twitter_request_token) and positive_value_exists(twitter_request_token_secret)): twitter_auth_response.twitter_request_token = twitter_request_token twitter_auth_response.twitter_request_secret = twitter_request_token_secret twitter_auth_response.save() success = True status = 'TWITTER_REDIRECT_URL_RETRIEVED' else: success = False status = 'TWITTER_REDIRECT_URL_NOT_RETRIEVED' except tweepy.TooManyRequests: success = False status = 'TWITTER_RATE_LIMIT_ERROR' except tweepy.TweepyException as error_instance: success = False err_string = 'GENERAL_TWEEPY_EXCEPTION' try: err_string = error_instance.args[0].args[0].args[0] except Exception: pass print(err_string) status = 'TWITTER_SIGN_IN_START: {}'.format(err_string) except Exception as e1: success = False status = 'TWITTER_SIGN_IN_START: {}'.format(e1) if success: results = {'status': status, 'success': True, 'voter_device_id': voter_device_id, 'twitter_redirect_url': twitter_authorization_url, 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} else: results = {'status': status, 'success': False, 'voter_device_id': voter_device_id, 'twitter_redirect_url': , 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False, 'return_url': return_url} return results<|docstring|>:param voter_device_id: :param return_url: Where to direct the browser at the very end of the process :param cordova: :return:<|endoftext|>
55795fd2425795d7baaaf9ce44d490873b4ce6a30233d4ac2ec6c2803aaa146b
def twitter_native_sign_in_save_for_api(voter_device_id, twitter_access_token, twitter_access_secret): '\n For react-native-oauth, we receive the tokens from a single authenticate() call, and save them to the\n TwitterAuthManager(). This is equivalent to Steps 1 & 2 in the WebApp oAuth processing\n\n :param voter_device_id:\n :param twitter_access_token: react-native-oauth refers to this as the "access_token"\n :param twitter_access_secret: react-native-oauth refers to this as the "access_token_secret"\n :return:\n ' results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING', 'voter_device_id': voter_device_id} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING', 'success': False, 'voter_device_id': voter_device_id} return results voter = results['voter'] twitter_user_manager = TwitterUserManager() twitter_user_results = twitter_user_manager.retrieve_twitter_link_to_voter(voter.we_vote_id, read_only=True) if twitter_user_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id} return error_results twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if auth_response_results['twitter_auth_response_found']: twitter_auth_response = auth_response_results['twitter_auth_response'] else: auth_create_results = twitter_auth_manager.update_or_create_twitter_auth_response(voter_device_id) if (not auth_create_results['twitter_auth_response_created']): error_results = {'status': auth_create_results['status'], 'success': False, 'voter_device_id': voter_device_id} return error_results twitter_auth_response = auth_create_results['twitter_auth_response'] try: if (positive_value_exists(twitter_access_token) and positive_value_exists(twitter_access_secret)): twitter_auth_response.twitter_access_token = twitter_access_token twitter_auth_response.twitter_access_secret = twitter_access_secret twitter_auth_response.twitter_request_token = TWITTER_NATIVE_INDICATOR twitter_auth_response.twitter_request_secret = TWITTER_NATIVE_INDICATOR twitter_auth_response.save() success = True status = 'TWITTER_TOKENS_STORED' else: success = False status = 'TWITTER_TOKENS_NOT_STORED_DUE_TO_BAD_PASSED_IN_TOKENS' logger.error('twitter_native_sign_in_save_for_api -- TWITTER_TOKENS_NOT_STORED_BAD_PASSED_IN_TOKENS') except Exception as e: success = False status = 'TWITTER_TOKEN_EXCEPTION_ON_FAILED_SAVE' logger.error(('twitter_native_sign_in_save_for_api -- save threw exception: ' + str(e))) if success: results = {'status': status, 'success': True, 'voter_device_id': voter_device_id, 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False} else: results = {'status': status, 'success': False, 'voter_device_id': voter_device_id, 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False} return results
For react-native-oauth, we receive the tokens from a single authenticate() call, and save them to the TwitterAuthManager(). This is equivalent to Steps 1 & 2 in the WebApp oAuth processing :param voter_device_id: :param twitter_access_token: react-native-oauth refers to this as the "access_token" :param twitter_access_secret: react-native-oauth refers to this as the "access_token_secret" :return:
import_export_twitter/controllers.py
twitter_native_sign_in_save_for_api
wevote/WeVoteServer
44
python
def twitter_native_sign_in_save_for_api(voter_device_id, twitter_access_token, twitter_access_secret): '\n For react-native-oauth, we receive the tokens from a single authenticate() call, and save them to the\n TwitterAuthManager(). This is equivalent to Steps 1 & 2 in the WebApp oAuth processing\n\n :param voter_device_id:\n :param twitter_access_token: react-native-oauth refers to this as the "access_token"\n :param twitter_access_secret: react-native-oauth refers to this as the "access_token_secret"\n :return:\n ' results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING', 'voter_device_id': voter_device_id} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING', 'success': False, 'voter_device_id': voter_device_id} return results voter = results['voter'] twitter_user_manager = TwitterUserManager() twitter_user_results = twitter_user_manager.retrieve_twitter_link_to_voter(voter.we_vote_id, read_only=True) if twitter_user_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id} return error_results twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if auth_response_results['twitter_auth_response_found']: twitter_auth_response = auth_response_results['twitter_auth_response'] else: auth_create_results = twitter_auth_manager.update_or_create_twitter_auth_response(voter_device_id) if (not auth_create_results['twitter_auth_response_created']): error_results = {'status': auth_create_results['status'], 'success': False, 'voter_device_id': voter_device_id} return error_results twitter_auth_response = auth_create_results['twitter_auth_response'] try: if (positive_value_exists(twitter_access_token) and positive_value_exists(twitter_access_secret)): twitter_auth_response.twitter_access_token = twitter_access_token twitter_auth_response.twitter_access_secret = twitter_access_secret twitter_auth_response.twitter_request_token = TWITTER_NATIVE_INDICATOR twitter_auth_response.twitter_request_secret = TWITTER_NATIVE_INDICATOR twitter_auth_response.save() success = True status = 'TWITTER_TOKENS_STORED' else: success = False status = 'TWITTER_TOKENS_NOT_STORED_DUE_TO_BAD_PASSED_IN_TOKENS' logger.error('twitter_native_sign_in_save_for_api -- TWITTER_TOKENS_NOT_STORED_BAD_PASSED_IN_TOKENS') except Exception as e: success = False status = 'TWITTER_TOKEN_EXCEPTION_ON_FAILED_SAVE' logger.error(('twitter_native_sign_in_save_for_api -- save threw exception: ' + str(e))) if success: results = {'status': status, 'success': True, 'voter_device_id': voter_device_id, 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False} else: results = {'status': status, 'success': False, 'voter_device_id': voter_device_id, 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False} return results
def twitter_native_sign_in_save_for_api(voter_device_id, twitter_access_token, twitter_access_secret): '\n For react-native-oauth, we receive the tokens from a single authenticate() call, and save them to the\n TwitterAuthManager(). This is equivalent to Steps 1 & 2 in the WebApp oAuth processing\n\n :param voter_device_id:\n :param twitter_access_token: react-native-oauth refers to this as the "access_token"\n :param twitter_access_secret: react-native-oauth refers to this as the "access_token_secret"\n :return:\n ' results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING', 'voter_device_id': voter_device_id} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING', 'success': False, 'voter_device_id': voter_device_id} return results voter = results['voter'] twitter_user_manager = TwitterUserManager() twitter_user_results = twitter_user_manager.retrieve_twitter_link_to_voter(voter.we_vote_id, read_only=True) if twitter_user_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id} return error_results twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if auth_response_results['twitter_auth_response_found']: twitter_auth_response = auth_response_results['twitter_auth_response'] else: auth_create_results = twitter_auth_manager.update_or_create_twitter_auth_response(voter_device_id) if (not auth_create_results['twitter_auth_response_created']): error_results = {'status': auth_create_results['status'], 'success': False, 'voter_device_id': voter_device_id} return error_results twitter_auth_response = auth_create_results['twitter_auth_response'] try: if (positive_value_exists(twitter_access_token) and positive_value_exists(twitter_access_secret)): twitter_auth_response.twitter_access_token = twitter_access_token twitter_auth_response.twitter_access_secret = twitter_access_secret twitter_auth_response.twitter_request_token = TWITTER_NATIVE_INDICATOR twitter_auth_response.twitter_request_secret = TWITTER_NATIVE_INDICATOR twitter_auth_response.save() success = True status = 'TWITTER_TOKENS_STORED' else: success = False status = 'TWITTER_TOKENS_NOT_STORED_DUE_TO_BAD_PASSED_IN_TOKENS' logger.error('twitter_native_sign_in_save_for_api -- TWITTER_TOKENS_NOT_STORED_BAD_PASSED_IN_TOKENS') except Exception as e: success = False status = 'TWITTER_TOKEN_EXCEPTION_ON_FAILED_SAVE' logger.error(('twitter_native_sign_in_save_for_api -- save threw exception: ' + str(e))) if success: results = {'status': status, 'success': True, 'voter_device_id': voter_device_id, 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False} else: results = {'status': status, 'success': False, 'voter_device_id': voter_device_id, 'voter_info_retrieved': False, 'switch_accounts': False, 'jump_to_request_voter_info': False} return results<|docstring|>For react-native-oauth, we receive the tokens from a single authenticate() call, and save them to the TwitterAuthManager(). This is equivalent to Steps 1 & 2 in the WebApp oAuth processing :param voter_device_id: :param twitter_access_token: react-native-oauth refers to this as the "access_token" :param twitter_access_secret: react-native-oauth refers to this as the "access_token_secret" :return:<|endoftext|>
ab57e89778dca53f4c9bdc78946f49a01be4bb48c04388843fc6ec4b57c01fe3
def twitter_sign_in_request_access_token_for_api(voter_device_id, incoming_request_token, incoming_oauth_verifier, return_url, cordova): "\n twitterSignInRequestAccessToken\n After signing in and agreeing to the application's terms, the user is redirected back to the application with\n the same request token and another value, this time the OAuth verifier.\n\n Within this function we use\n 1) the request token and\n 2) request secret along with the\n 3) OAuth verifier to get an access token, also from Twitter.\n :param voter_device_id:\n :param incoming_request_token:\n :param incoming_oauth_verifier:\n :param return_url: If a value is provided, return to this URL when the whole process is complete\n :param cordova:\n :return:\n " status = '' results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING ', 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING', 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results voter = results['voter'] twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if (not auth_response_results['twitter_auth_response_found']): results = {'status': 'REQUEST_ACCESS_TOKEN-TWITTER_AUTH_RESPONSE_NOT_FOUND ', 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results twitter_auth_response = auth_response_results['twitter_auth_response'] if (not (twitter_auth_response.twitter_request_token == incoming_request_token)): results = {'status': 'TWITTER_REQUEST_TOKEN_DOES_NOT_MATCH_STORED_VOTER_VALUE ', 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results twitter_access_token = '' twitter_access_token_secret = '' try: auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.request_token = {'oauth_token': twitter_auth_response.twitter_request_token, 'oauth_token_secret': twitter_auth_response.twitter_request_secret} auth.get_access_token(incoming_oauth_verifier) if (positive_value_exists(auth.access_token) and positive_value_exists(auth.access_token_secret)): twitter_access_token = auth.access_token twitter_access_token_secret = auth.access_token_secret except tweepy.TooManyRequests: success = False status = 'TWITTER_RATE_LIMIT_ERROR' except tweepy.TweepyException as error_instance: success = False err_string = 'GENERAL_TWEEPY_EXCEPTION' try: err_string = error_instance.args[0].args[0].args[0] except Exception: pass print(err_string) status = 'TWITTER_SIGN_IN_REQUEST_ACCESS_TOKEN: {}'.format(err_string) except Exception as e: success = False status += (('TWEEPY_EXCEPTION: ' + str(e)) + ' ') try: if (positive_value_exists(twitter_access_token) and positive_value_exists(twitter_access_token_secret)): twitter_auth_response.twitter_access_token = twitter_access_token twitter_auth_response.twitter_access_secret = twitter_access_token_secret twitter_auth_response.save() success = True status += 'TWITTER_ACCESS_TOKEN_RETRIEVED_AND_SAVED ' else: success = False status += 'TWITTER_ACCESS_TOKEN_NOT_RETRIEVED ' except Exception as e: success = False status += 'TWITTER_ACCESS_TOKEN_NOT_SAVED ' if success: results = {'status': status, 'success': True, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': True, 'return_url': return_url, 'cordova': cordova} else: results = {'status': status, 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results
twitterSignInRequestAccessToken After signing in and agreeing to the application's terms, the user is redirected back to the application with the same request token and another value, this time the OAuth verifier. Within this function we use 1) the request token and 2) request secret along with the 3) OAuth verifier to get an access token, also from Twitter. :param voter_device_id: :param incoming_request_token: :param incoming_oauth_verifier: :param return_url: If a value is provided, return to this URL when the whole process is complete :param cordova: :return:
import_export_twitter/controllers.py
twitter_sign_in_request_access_token_for_api
wevote/WeVoteServer
44
python
def twitter_sign_in_request_access_token_for_api(voter_device_id, incoming_request_token, incoming_oauth_verifier, return_url, cordova): "\n twitterSignInRequestAccessToken\n After signing in and agreeing to the application's terms, the user is redirected back to the application with\n the same request token and another value, this time the OAuth verifier.\n\n Within this function we use\n 1) the request token and\n 2) request secret along with the\n 3) OAuth verifier to get an access token, also from Twitter.\n :param voter_device_id:\n :param incoming_request_token:\n :param incoming_oauth_verifier:\n :param return_url: If a value is provided, return to this URL when the whole process is complete\n :param cordova:\n :return:\n " status = results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING ', 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING', 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results voter = results['voter'] twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if (not auth_response_results['twitter_auth_response_found']): results = {'status': 'REQUEST_ACCESS_TOKEN-TWITTER_AUTH_RESPONSE_NOT_FOUND ', 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results twitter_auth_response = auth_response_results['twitter_auth_response'] if (not (twitter_auth_response.twitter_request_token == incoming_request_token)): results = {'status': 'TWITTER_REQUEST_TOKEN_DOES_NOT_MATCH_STORED_VOTER_VALUE ', 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results twitter_access_token = twitter_access_token_secret = try: auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.request_token = {'oauth_token': twitter_auth_response.twitter_request_token, 'oauth_token_secret': twitter_auth_response.twitter_request_secret} auth.get_access_token(incoming_oauth_verifier) if (positive_value_exists(auth.access_token) and positive_value_exists(auth.access_token_secret)): twitter_access_token = auth.access_token twitter_access_token_secret = auth.access_token_secret except tweepy.TooManyRequests: success = False status = 'TWITTER_RATE_LIMIT_ERROR' except tweepy.TweepyException as error_instance: success = False err_string = 'GENERAL_TWEEPY_EXCEPTION' try: err_string = error_instance.args[0].args[0].args[0] except Exception: pass print(err_string) status = 'TWITTER_SIGN_IN_REQUEST_ACCESS_TOKEN: {}'.format(err_string) except Exception as e: success = False status += (('TWEEPY_EXCEPTION: ' + str(e)) + ' ') try: if (positive_value_exists(twitter_access_token) and positive_value_exists(twitter_access_token_secret)): twitter_auth_response.twitter_access_token = twitter_access_token twitter_auth_response.twitter_access_secret = twitter_access_token_secret twitter_auth_response.save() success = True status += 'TWITTER_ACCESS_TOKEN_RETRIEVED_AND_SAVED ' else: success = False status += 'TWITTER_ACCESS_TOKEN_NOT_RETRIEVED ' except Exception as e: success = False status += 'TWITTER_ACCESS_TOKEN_NOT_SAVED ' if success: results = {'status': status, 'success': True, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': True, 'return_url': return_url, 'cordova': cordova} else: results = {'status': status, 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results
def twitter_sign_in_request_access_token_for_api(voter_device_id, incoming_request_token, incoming_oauth_verifier, return_url, cordova): "\n twitterSignInRequestAccessToken\n After signing in and agreeing to the application's terms, the user is redirected back to the application with\n the same request token and another value, this time the OAuth verifier.\n\n Within this function we use\n 1) the request token and\n 2) request secret along with the\n 3) OAuth verifier to get an access token, also from Twitter.\n :param voter_device_id:\n :param incoming_request_token:\n :param incoming_oauth_verifier:\n :param return_url: If a value is provided, return to this URL when the whole process is complete\n :param cordova:\n :return:\n " status = results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING ', 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING', 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results voter = results['voter'] twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if (not auth_response_results['twitter_auth_response_found']): results = {'status': 'REQUEST_ACCESS_TOKEN-TWITTER_AUTH_RESPONSE_NOT_FOUND ', 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results twitter_auth_response = auth_response_results['twitter_auth_response'] if (not (twitter_auth_response.twitter_request_token == incoming_request_token)): results = {'status': 'TWITTER_REQUEST_TOKEN_DOES_NOT_MATCH_STORED_VOTER_VALUE ', 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results twitter_access_token = twitter_access_token_secret = try: auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.request_token = {'oauth_token': twitter_auth_response.twitter_request_token, 'oauth_token_secret': twitter_auth_response.twitter_request_secret} auth.get_access_token(incoming_oauth_verifier) if (positive_value_exists(auth.access_token) and positive_value_exists(auth.access_token_secret)): twitter_access_token = auth.access_token twitter_access_token_secret = auth.access_token_secret except tweepy.TooManyRequests: success = False status = 'TWITTER_RATE_LIMIT_ERROR' except tweepy.TweepyException as error_instance: success = False err_string = 'GENERAL_TWEEPY_EXCEPTION' try: err_string = error_instance.args[0].args[0].args[0] except Exception: pass print(err_string) status = 'TWITTER_SIGN_IN_REQUEST_ACCESS_TOKEN: {}'.format(err_string) except Exception as e: success = False status += (('TWEEPY_EXCEPTION: ' + str(e)) + ' ') try: if (positive_value_exists(twitter_access_token) and positive_value_exists(twitter_access_token_secret)): twitter_auth_response.twitter_access_token = twitter_access_token twitter_auth_response.twitter_access_secret = twitter_access_token_secret twitter_auth_response.save() success = True status += 'TWITTER_ACCESS_TOKEN_RETRIEVED_AND_SAVED ' else: success = False status += 'TWITTER_ACCESS_TOKEN_NOT_RETRIEVED ' except Exception as e: success = False status += 'TWITTER_ACCESS_TOKEN_NOT_SAVED ' if success: results = {'status': status, 'success': True, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': True, 'return_url': return_url, 'cordova': cordova} else: results = {'status': status, 'success': False, 'voter_device_id': voter_device_id, 'access_token_and_secret_returned': False, 'return_url': return_url, 'cordova': cordova} return results<|docstring|>twitterSignInRequestAccessToken After signing in and agreeing to the application's terms, the user is redirected back to the application with the same request token and another value, this time the OAuth verifier. Within this function we use 1) the request token and 2) request secret along with the 3) OAuth verifier to get an access token, also from Twitter. :param voter_device_id: :param incoming_request_token: :param incoming_oauth_verifier: :param return_url: If a value is provided, return to this URL when the whole process is complete :param cordova: :return:<|endoftext|>
a8241879aba4d2301b3df06d432e15282707bf62f7d073c9ccedbf7fbb014374
def twitter_sign_in_request_voter_info_for_api(voter_device_id, return_url): '\n (not directly called by) twitterSignInRequestVoterInfo\n When here, the incoming voter_device_id should already be authenticated\n :param voter_device_id:\n :param return_url: Where to return the browser when sign in process is complete\n :return:\n ' status = '' twitter_handle = '' twitter_handle_found = False tweepy_user_object = None twitter_user_object_found = False voter_info_retrieved = False switch_accounts = False twitter_secret_key = '' results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING ', 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING ', 'success': False, 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results voter = results['voter'] voter_we_vote_id = voter.we_vote_id twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if (not auth_response_results['twitter_auth_response_found']): results = {'status': 'TWITTER_AUTH_RESPONSE_NOT_FOUND ', 'success': False, 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results twitter_auth_response = auth_response_results['twitter_auth_response'] success = True auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token(twitter_auth_response.twitter_access_token, twitter_auth_response.twitter_access_secret) api = tweepy.API(auth) try: tweepy_user_object = api.verify_credentials() twitter_json = tweepy_user_object._json status += 'TWITTER_SIGN_IN_REQUEST_VOTER_INFO_SUCCESSFUL ' twitter_handle = tweepy_user_object.screen_name twitter_handle_found = True twitter_user_object_found = True except tweepy.TooManyRequests: success = False status = 'TWITTER_SIGN_IN_REQUEST_VOTER_INFO_RATE_LIMIT_ERROR ' except tweepy.TweepyException as error_instance: err_string = 'GENERAL_TWEEPY_EXCEPTION' try: err_string = error_instance.args[0].args[0].args[0] except Exception: pass print(err_string) status = 'TWITTER_SIGN_IN_REQUEST_VOTER_INFO_TWEEPY_ERROR: {}'.format(err_string) except Exception as e: success = False status += (('TWEEPY_EXCEPTION: ' + str(e)) + ' ') if twitter_user_object_found: status += 'TWITTER_SIGN_IN-ALREADY_LINKED_TO_OTHER_ACCOUNT ' success = True save_user_results = twitter_auth_manager.save_twitter_auth_values(twitter_auth_response, tweepy_user_object) if save_user_results['success']: voter_info_retrieved = True status += save_user_results['status'] twitter_user_manager = TwitterUserManager() twitter_link_to_voter_results = twitter_user_manager.retrieve_twitter_link_to_voter_from_voter_we_vote_id(voter_we_vote_id, read_only=True) if twitter_link_to_voter_results['twitter_link_to_voter_found']: twitter_link_to_voter = twitter_link_to_voter_results['twitter_link_to_voter'] twitter_secret_key = twitter_link_to_voter.secret_key results = {'status': status, 'success': success, 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results
(not directly called by) twitterSignInRequestVoterInfo When here, the incoming voter_device_id should already be authenticated :param voter_device_id: :param return_url: Where to return the browser when sign in process is complete :return:
import_export_twitter/controllers.py
twitter_sign_in_request_voter_info_for_api
wevote/WeVoteServer
44
python
def twitter_sign_in_request_voter_info_for_api(voter_device_id, return_url): '\n (not directly called by) twitterSignInRequestVoterInfo\n When here, the incoming voter_device_id should already be authenticated\n :param voter_device_id:\n :param return_url: Where to return the browser when sign in process is complete\n :return:\n ' status = twitter_handle = twitter_handle_found = False tweepy_user_object = None twitter_user_object_found = False voter_info_retrieved = False switch_accounts = False twitter_secret_key = results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING ', 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING ', 'success': False, 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results voter = results['voter'] voter_we_vote_id = voter.we_vote_id twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if (not auth_response_results['twitter_auth_response_found']): results = {'status': 'TWITTER_AUTH_RESPONSE_NOT_FOUND ', 'success': False, 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results twitter_auth_response = auth_response_results['twitter_auth_response'] success = True auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token(twitter_auth_response.twitter_access_token, twitter_auth_response.twitter_access_secret) api = tweepy.API(auth) try: tweepy_user_object = api.verify_credentials() twitter_json = tweepy_user_object._json status += 'TWITTER_SIGN_IN_REQUEST_VOTER_INFO_SUCCESSFUL ' twitter_handle = tweepy_user_object.screen_name twitter_handle_found = True twitter_user_object_found = True except tweepy.TooManyRequests: success = False status = 'TWITTER_SIGN_IN_REQUEST_VOTER_INFO_RATE_LIMIT_ERROR ' except tweepy.TweepyException as error_instance: err_string = 'GENERAL_TWEEPY_EXCEPTION' try: err_string = error_instance.args[0].args[0].args[0] except Exception: pass print(err_string) status = 'TWITTER_SIGN_IN_REQUEST_VOTER_INFO_TWEEPY_ERROR: {}'.format(err_string) except Exception as e: success = False status += (('TWEEPY_EXCEPTION: ' + str(e)) + ' ') if twitter_user_object_found: status += 'TWITTER_SIGN_IN-ALREADY_LINKED_TO_OTHER_ACCOUNT ' success = True save_user_results = twitter_auth_manager.save_twitter_auth_values(twitter_auth_response, tweepy_user_object) if save_user_results['success']: voter_info_retrieved = True status += save_user_results['status'] twitter_user_manager = TwitterUserManager() twitter_link_to_voter_results = twitter_user_manager.retrieve_twitter_link_to_voter_from_voter_we_vote_id(voter_we_vote_id, read_only=True) if twitter_link_to_voter_results['twitter_link_to_voter_found']: twitter_link_to_voter = twitter_link_to_voter_results['twitter_link_to_voter'] twitter_secret_key = twitter_link_to_voter.secret_key results = {'status': status, 'success': success, 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results
def twitter_sign_in_request_voter_info_for_api(voter_device_id, return_url): '\n (not directly called by) twitterSignInRequestVoterInfo\n When here, the incoming voter_device_id should already be authenticated\n :param voter_device_id:\n :param return_url: Where to return the browser when sign in process is complete\n :return:\n ' status = twitter_handle = twitter_handle_found = False tweepy_user_object = None twitter_user_object_found = False voter_info_retrieved = False switch_accounts = False twitter_secret_key = results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING ', 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) if (not positive_value_exists(results['voter_found'])): results = {'status': 'VALID_VOTER_MISSING ', 'success': False, 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results voter = results['voter'] voter_we_vote_id = voter.we_vote_id twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if (not auth_response_results['twitter_auth_response_found']): results = {'status': 'TWITTER_AUTH_RESPONSE_NOT_FOUND ', 'success': False, 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results twitter_auth_response = auth_response_results['twitter_auth_response'] success = True auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token(twitter_auth_response.twitter_access_token, twitter_auth_response.twitter_access_secret) api = tweepy.API(auth) try: tweepy_user_object = api.verify_credentials() twitter_json = tweepy_user_object._json status += 'TWITTER_SIGN_IN_REQUEST_VOTER_INFO_SUCCESSFUL ' twitter_handle = tweepy_user_object.screen_name twitter_handle_found = True twitter_user_object_found = True except tweepy.TooManyRequests: success = False status = 'TWITTER_SIGN_IN_REQUEST_VOTER_INFO_RATE_LIMIT_ERROR ' except tweepy.TweepyException as error_instance: err_string = 'GENERAL_TWEEPY_EXCEPTION' try: err_string = error_instance.args[0].args[0].args[0] except Exception: pass print(err_string) status = 'TWITTER_SIGN_IN_REQUEST_VOTER_INFO_TWEEPY_ERROR: {}'.format(err_string) except Exception as e: success = False status += (('TWEEPY_EXCEPTION: ' + str(e)) + ' ') if twitter_user_object_found: status += 'TWITTER_SIGN_IN-ALREADY_LINKED_TO_OTHER_ACCOUNT ' success = True save_user_results = twitter_auth_manager.save_twitter_auth_values(twitter_auth_response, tweepy_user_object) if save_user_results['success']: voter_info_retrieved = True status += save_user_results['status'] twitter_user_manager = TwitterUserManager() twitter_link_to_voter_results = twitter_user_manager.retrieve_twitter_link_to_voter_from_voter_we_vote_id(voter_we_vote_id, read_only=True) if twitter_link_to_voter_results['twitter_link_to_voter_found']: twitter_link_to_voter = twitter_link_to_voter_results['twitter_link_to_voter'] twitter_secret_key = twitter_link_to_voter.secret_key results = {'status': status, 'success': success, 'voter_device_id': voter_device_id, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'voter_info_retrieved': voter_info_retrieved, 'switch_accounts': switch_accounts, 'return_url': return_url, 'twitter_secret_key': twitter_secret_key} return results<|docstring|>(not directly called by) twitterSignInRequestVoterInfo When here, the incoming voter_device_id should already be authenticated :param voter_device_id: :param return_url: Where to return the browser when sign in process is complete :return:<|endoftext|>
1ea368c04e9c5ea771ffeecff217368c8c093a4767ab753e2a1d57e70f86f907
def twitter_sign_in_retrieve_for_api(voter_device_id, image_load_deferred): '\n We are asking for the results of the most recent Twitter authentication\n\n July 2017: We want the TwitterUser class/table to be the authoritative source of twitter info, ideally\n TwitterUser feeds the duplicated columns in voter, organization, candidate, etc.\n Unfortunately Django Auth, pre-populates voter with some key info first, which is fine, but makes it less clean.\n\n December 2021: This function used to process the incoming image URLs from twitter, resize them and store them in\n AWS inline, which took more than 5 seconds. Then we would merge the temporary voter record with a record we found\n on disk, and process the images again, for another 5 seconds. Now the processing of the images is initiated after\n the signin is complete via a call to twitter_process_deferred_images_for_api\n\n :param voter_device_id:\n :return:\n ' voter_manager = VoterManager() voter_results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) voter_id = voter_results['voter_id'] if (not positive_value_exists(voter_id)): success = False error_results = {'success': success, 'status': 'TWITTER_SIGN_IN_NO_VOTER', 'existing_twitter_account_found': False, 'twitter_access_secret': '', 'twitter_access_token': '', 'twitter_id': 0, 'twitter_image_load_info': '', 'twitter_name': '', 'twitter_profile_image_url_https': '', 'twitter_request_secret': '', 'twitter_request_token': '', 'twitter_screen_name': '', 'twitter_secret_key': '', 'twitter_sign_in_failed': True, 'twitter_sign_in_found': False, 'twitter_sign_in_verified': False, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': False, 'voter_we_vote_id': '', 'voter_we_vote_id_attached_to_twitter': '', 'we_vote_hosted_profile_image_url_large': '', 'we_vote_hosted_profile_image_url_medium': '', 'we_vote_hosted_profile_image_url_tiny': ''} return error_results voter = voter_results['voter'] voter_we_vote_id = voter.we_vote_id voter_has_data_to_preserve = voter.has_data_to_preserve() twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) status = auth_response_results['status'] if (not auth_response_results['twitter_auth_response_found']): success = False error_results = {'success': success, 'status': status, 'existing_twitter_account_found': False, 'twitter_access_secret': '', 'twitter_access_token': '', 'twitter_id': 0, 'twitter_image_load_info': '', 'twitter_name': '', 'twitter_profile_image_url_https': '', 'twitter_request_secret': '', 'twitter_request_token': '', 'twitter_screen_name': '', 'twitter_secret_key': '', 'twitter_sign_in_failed': True, 'twitter_sign_in_found': False, 'twitter_sign_in_verified': False, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': False, 'voter_we_vote_id': voter_we_vote_id, 'voter_we_vote_id_attached_to_twitter': '', 'we_vote_hosted_profile_image_url_large': '', 'we_vote_hosted_profile_image_url_medium': '', 'we_vote_hosted_profile_image_url_tiny': ''} return error_results success = True twitter_auth_response = auth_response_results['twitter_auth_response'] twitter_id = twitter_auth_response.twitter_id if (not twitter_id): success = False error_results = {'success': success, 'status': status, 'existing_twitter_account_found': False, 'twitter_access_secret': '', 'twitter_access_token': '', 'twitter_id': 0, 'twitter_image_load_info': '', 'twitter_name': '', 'twitter_profile_image_url_https': '', 'twitter_request_secret': '', 'twitter_request_token': '', 'twitter_screen_name': '', 'twitter_secret_key': '', 'twitter_sign_in_failed': True, 'twitter_sign_in_found': False, 'twitter_sign_in_verified': False, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': False, 'voter_we_vote_id': voter_we_vote_id, 'voter_we_vote_id_attached_to_twitter': '', 'we_vote_hosted_profile_image_url_large': '', 'we_vote_hosted_profile_image_url_medium': '', 'we_vote_hosted_profile_image_url_tiny': ''} return error_results twitter_user_manager = TwitterUserManager() twitter_sign_in_verified = True twitter_sign_in_failed = False twitter_secret_key = '' existing_twitter_account_found = False voter_we_vote_id_attached_to_twitter = '' repair_twitter_related_voter_caching_now = False t0 = time() twitter_link_results = twitter_user_manager.retrieve_twitter_link_to_voter(twitter_id, read_only=True) if twitter_link_results['twitter_link_to_voter_found']: twitter_link_to_voter = twitter_link_results['twitter_link_to_voter'] status += (' ' + twitter_link_results['status']) voter_we_vote_id_attached_to_twitter = twitter_link_to_voter.voter_we_vote_id twitter_secret_key = twitter_link_to_voter.secret_key existing_twitter_account_found = True repair_twitter_related_voter_caching_now = True else: voter_results = voter_manager.retrieve_voter_by_twitter_id_old(twitter_id) if voter_results['voter_found']: voter_with_twitter_id = voter_results['voter'] voter_we_vote_id_attached_to_twitter = voter_with_twitter_id.we_vote_id if positive_value_exists(voter_we_vote_id_attached_to_twitter): save_results = twitter_user_manager.create_twitter_link_to_voter(twitter_id, voter_we_vote_id_attached_to_twitter) status += (' ' + save_results['status']) if save_results['success']: repair_twitter_related_voter_caching_now = True else: save_results = twitter_user_manager.create_twitter_link_to_voter(twitter_id, voter_we_vote_id) t1 = time() if twitter_id: organization_list_manager = OrganizationListManager() repair_results = organization_list_manager.repair_twitter_related_organization_caching(twitter_id) status += repair_results['status'] if repair_twitter_related_voter_caching_now: repair_results = voter_manager.repair_twitter_related_voter_caching(twitter_id) status += repair_results['status'] t2 = time() if positive_value_exists(voter_we_vote_id_attached_to_twitter): voter_we_vote_id_for_cache = voter_we_vote_id_attached_to_twitter else: voter_we_vote_id_for_cache = voter_we_vote_id twitter_image_load_info = {'status': status, 'success': success, 'twitter_id': twitter_id, 'twitter_name': twitter_auth_response.twitter_name, 'twitter_profile_banner_url_https': twitter_auth_response.twitter_profile_banner_url_https, 'twitter_profile_image_url_https': twitter_auth_response.twitter_profile_banner_url_https, 'twitter_secret_key': twitter_secret_key, 'twitter_screen_name': twitter_auth_response.twitter_screen_name, 'voter_we_vote_id_for_cache': voter_we_vote_id_for_cache} if (not positive_value_exists(image_load_deferred)): twitter_process_deferred_images_for_api(status, success, twitter_id, twitter_auth_response.twitter_name, twitter_auth_response.twitter_profile_banner_url_https, twitter_auth_response.twitter_profile_banner_url_https, twitter_secret_key, twitter_auth_response.twitter_screen_name, voter_we_vote_id_for_cache) results = retrieve_twitter_user_info(twitter_id, twitter_auth_response.twitter_screen_name) if (not results['success']): twitter_json = {'id': twitter_id, 'name': twitter_auth_response.twitter_name, 'screen_name': twitter_auth_response.twitter_screen_name, 'profile_image_url_https': twitter_auth_response.twitter_profile_image_url_https} else: twitter_json = results['twitter_json'] twitter_user_results = twitter_user_manager.update_or_create_twitter_user(twitter_json=twitter_json, twitter_id=twitter_id) json_data = {'success': success, 'status': status, 'existing_twitter_account_found': existing_twitter_account_found, 'twitter_access_secret': twitter_auth_response.twitter_access_secret, 'twitter_access_token': twitter_auth_response.twitter_access_token, 'twitter_id': twitter_id, 'twitter_image_load_info': twitter_image_load_info, 'twitter_name': twitter_auth_response.twitter_name, 'twitter_profile_image_url_https': None, 'twitter_request_secret': twitter_auth_response.twitter_request_secret, 'twitter_request_token': twitter_auth_response.twitter_request_token, 'twitter_screen_name': twitter_auth_response.twitter_screen_name, 'twitter_secret_key': twitter_secret_key, 'twitter_sign_in_failed': twitter_sign_in_failed, 'twitter_sign_in_found': auth_response_results['twitter_auth_response_found'], 'twitter_sign_in_verified': twitter_sign_in_verified, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': voter_has_data_to_preserve, 'voter_we_vote_id': voter_we_vote_id, 'voter_we_vote_id_attached_to_twitter': voter_we_vote_id_attached_to_twitter, 'we_vote_hosted_profile_image_url_large': None, 'we_vote_hosted_profile_image_url_medium': None, 'we_vote_hosted_profile_image_url_tiny': None} t6 = time() return json_data
We are asking for the results of the most recent Twitter authentication July 2017: We want the TwitterUser class/table to be the authoritative source of twitter info, ideally TwitterUser feeds the duplicated columns in voter, organization, candidate, etc. Unfortunately Django Auth, pre-populates voter with some key info first, which is fine, but makes it less clean. December 2021: This function used to process the incoming image URLs from twitter, resize them and store them in AWS inline, which took more than 5 seconds. Then we would merge the temporary voter record with a record we found on disk, and process the images again, for another 5 seconds. Now the processing of the images is initiated after the signin is complete via a call to twitter_process_deferred_images_for_api :param voter_device_id: :return:
import_export_twitter/controllers.py
twitter_sign_in_retrieve_for_api
wevote/WeVoteServer
44
python
def twitter_sign_in_retrieve_for_api(voter_device_id, image_load_deferred): '\n We are asking for the results of the most recent Twitter authentication\n\n July 2017: We want the TwitterUser class/table to be the authoritative source of twitter info, ideally\n TwitterUser feeds the duplicated columns in voter, organization, candidate, etc.\n Unfortunately Django Auth, pre-populates voter with some key info first, which is fine, but makes it less clean.\n\n December 2021: This function used to process the incoming image URLs from twitter, resize them and store them in\n AWS inline, which took more than 5 seconds. Then we would merge the temporary voter record with a record we found\n on disk, and process the images again, for another 5 seconds. Now the processing of the images is initiated after\n the signin is complete via a call to twitter_process_deferred_images_for_api\n\n :param voter_device_id:\n :return:\n ' voter_manager = VoterManager() voter_results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) voter_id = voter_results['voter_id'] if (not positive_value_exists(voter_id)): success = False error_results = {'success': success, 'status': 'TWITTER_SIGN_IN_NO_VOTER', 'existing_twitter_account_found': False, 'twitter_access_secret': , 'twitter_access_token': , 'twitter_id': 0, 'twitter_image_load_info': , 'twitter_name': , 'twitter_profile_image_url_https': , 'twitter_request_secret': , 'twitter_request_token': , 'twitter_screen_name': , 'twitter_secret_key': , 'twitter_sign_in_failed': True, 'twitter_sign_in_found': False, 'twitter_sign_in_verified': False, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': False, 'voter_we_vote_id': , 'voter_we_vote_id_attached_to_twitter': , 'we_vote_hosted_profile_image_url_large': , 'we_vote_hosted_profile_image_url_medium': , 'we_vote_hosted_profile_image_url_tiny': } return error_results voter = voter_results['voter'] voter_we_vote_id = voter.we_vote_id voter_has_data_to_preserve = voter.has_data_to_preserve() twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) status = auth_response_results['status'] if (not auth_response_results['twitter_auth_response_found']): success = False error_results = {'success': success, 'status': status, 'existing_twitter_account_found': False, 'twitter_access_secret': , 'twitter_access_token': , 'twitter_id': 0, 'twitter_image_load_info': , 'twitter_name': , 'twitter_profile_image_url_https': , 'twitter_request_secret': , 'twitter_request_token': , 'twitter_screen_name': , 'twitter_secret_key': , 'twitter_sign_in_failed': True, 'twitter_sign_in_found': False, 'twitter_sign_in_verified': False, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': False, 'voter_we_vote_id': voter_we_vote_id, 'voter_we_vote_id_attached_to_twitter': , 'we_vote_hosted_profile_image_url_large': , 'we_vote_hosted_profile_image_url_medium': , 'we_vote_hosted_profile_image_url_tiny': } return error_results success = True twitter_auth_response = auth_response_results['twitter_auth_response'] twitter_id = twitter_auth_response.twitter_id if (not twitter_id): success = False error_results = {'success': success, 'status': status, 'existing_twitter_account_found': False, 'twitter_access_secret': , 'twitter_access_token': , 'twitter_id': 0, 'twitter_image_load_info': , 'twitter_name': , 'twitter_profile_image_url_https': , 'twitter_request_secret': , 'twitter_request_token': , 'twitter_screen_name': , 'twitter_secret_key': , 'twitter_sign_in_failed': True, 'twitter_sign_in_found': False, 'twitter_sign_in_verified': False, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': False, 'voter_we_vote_id': voter_we_vote_id, 'voter_we_vote_id_attached_to_twitter': , 'we_vote_hosted_profile_image_url_large': , 'we_vote_hosted_profile_image_url_medium': , 'we_vote_hosted_profile_image_url_tiny': } return error_results twitter_user_manager = TwitterUserManager() twitter_sign_in_verified = True twitter_sign_in_failed = False twitter_secret_key = existing_twitter_account_found = False voter_we_vote_id_attached_to_twitter = repair_twitter_related_voter_caching_now = False t0 = time() twitter_link_results = twitter_user_manager.retrieve_twitter_link_to_voter(twitter_id, read_only=True) if twitter_link_results['twitter_link_to_voter_found']: twitter_link_to_voter = twitter_link_results['twitter_link_to_voter'] status += (' ' + twitter_link_results['status']) voter_we_vote_id_attached_to_twitter = twitter_link_to_voter.voter_we_vote_id twitter_secret_key = twitter_link_to_voter.secret_key existing_twitter_account_found = True repair_twitter_related_voter_caching_now = True else: voter_results = voter_manager.retrieve_voter_by_twitter_id_old(twitter_id) if voter_results['voter_found']: voter_with_twitter_id = voter_results['voter'] voter_we_vote_id_attached_to_twitter = voter_with_twitter_id.we_vote_id if positive_value_exists(voter_we_vote_id_attached_to_twitter): save_results = twitter_user_manager.create_twitter_link_to_voter(twitter_id, voter_we_vote_id_attached_to_twitter) status += (' ' + save_results['status']) if save_results['success']: repair_twitter_related_voter_caching_now = True else: save_results = twitter_user_manager.create_twitter_link_to_voter(twitter_id, voter_we_vote_id) t1 = time() if twitter_id: organization_list_manager = OrganizationListManager() repair_results = organization_list_manager.repair_twitter_related_organization_caching(twitter_id) status += repair_results['status'] if repair_twitter_related_voter_caching_now: repair_results = voter_manager.repair_twitter_related_voter_caching(twitter_id) status += repair_results['status'] t2 = time() if positive_value_exists(voter_we_vote_id_attached_to_twitter): voter_we_vote_id_for_cache = voter_we_vote_id_attached_to_twitter else: voter_we_vote_id_for_cache = voter_we_vote_id twitter_image_load_info = {'status': status, 'success': success, 'twitter_id': twitter_id, 'twitter_name': twitter_auth_response.twitter_name, 'twitter_profile_banner_url_https': twitter_auth_response.twitter_profile_banner_url_https, 'twitter_profile_image_url_https': twitter_auth_response.twitter_profile_banner_url_https, 'twitter_secret_key': twitter_secret_key, 'twitter_screen_name': twitter_auth_response.twitter_screen_name, 'voter_we_vote_id_for_cache': voter_we_vote_id_for_cache} if (not positive_value_exists(image_load_deferred)): twitter_process_deferred_images_for_api(status, success, twitter_id, twitter_auth_response.twitter_name, twitter_auth_response.twitter_profile_banner_url_https, twitter_auth_response.twitter_profile_banner_url_https, twitter_secret_key, twitter_auth_response.twitter_screen_name, voter_we_vote_id_for_cache) results = retrieve_twitter_user_info(twitter_id, twitter_auth_response.twitter_screen_name) if (not results['success']): twitter_json = {'id': twitter_id, 'name': twitter_auth_response.twitter_name, 'screen_name': twitter_auth_response.twitter_screen_name, 'profile_image_url_https': twitter_auth_response.twitter_profile_image_url_https} else: twitter_json = results['twitter_json'] twitter_user_results = twitter_user_manager.update_or_create_twitter_user(twitter_json=twitter_json, twitter_id=twitter_id) json_data = {'success': success, 'status': status, 'existing_twitter_account_found': existing_twitter_account_found, 'twitter_access_secret': twitter_auth_response.twitter_access_secret, 'twitter_access_token': twitter_auth_response.twitter_access_token, 'twitter_id': twitter_id, 'twitter_image_load_info': twitter_image_load_info, 'twitter_name': twitter_auth_response.twitter_name, 'twitter_profile_image_url_https': None, 'twitter_request_secret': twitter_auth_response.twitter_request_secret, 'twitter_request_token': twitter_auth_response.twitter_request_token, 'twitter_screen_name': twitter_auth_response.twitter_screen_name, 'twitter_secret_key': twitter_secret_key, 'twitter_sign_in_failed': twitter_sign_in_failed, 'twitter_sign_in_found': auth_response_results['twitter_auth_response_found'], 'twitter_sign_in_verified': twitter_sign_in_verified, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': voter_has_data_to_preserve, 'voter_we_vote_id': voter_we_vote_id, 'voter_we_vote_id_attached_to_twitter': voter_we_vote_id_attached_to_twitter, 'we_vote_hosted_profile_image_url_large': None, 'we_vote_hosted_profile_image_url_medium': None, 'we_vote_hosted_profile_image_url_tiny': None} t6 = time() return json_data
def twitter_sign_in_retrieve_for_api(voter_device_id, image_load_deferred): '\n We are asking for the results of the most recent Twitter authentication\n\n July 2017: We want the TwitterUser class/table to be the authoritative source of twitter info, ideally\n TwitterUser feeds the duplicated columns in voter, organization, candidate, etc.\n Unfortunately Django Auth, pre-populates voter with some key info first, which is fine, but makes it less clean.\n\n December 2021: This function used to process the incoming image URLs from twitter, resize them and store them in\n AWS inline, which took more than 5 seconds. Then we would merge the temporary voter record with a record we found\n on disk, and process the images again, for another 5 seconds. Now the processing of the images is initiated after\n the signin is complete via a call to twitter_process_deferred_images_for_api\n\n :param voter_device_id:\n :return:\n ' voter_manager = VoterManager() voter_results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id, read_only=True) voter_id = voter_results['voter_id'] if (not positive_value_exists(voter_id)): success = False error_results = {'success': success, 'status': 'TWITTER_SIGN_IN_NO_VOTER', 'existing_twitter_account_found': False, 'twitter_access_secret': , 'twitter_access_token': , 'twitter_id': 0, 'twitter_image_load_info': , 'twitter_name': , 'twitter_profile_image_url_https': , 'twitter_request_secret': , 'twitter_request_token': , 'twitter_screen_name': , 'twitter_secret_key': , 'twitter_sign_in_failed': True, 'twitter_sign_in_found': False, 'twitter_sign_in_verified': False, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': False, 'voter_we_vote_id': , 'voter_we_vote_id_attached_to_twitter': , 'we_vote_hosted_profile_image_url_large': , 'we_vote_hosted_profile_image_url_medium': , 'we_vote_hosted_profile_image_url_tiny': } return error_results voter = voter_results['voter'] voter_we_vote_id = voter.we_vote_id voter_has_data_to_preserve = voter.has_data_to_preserve() twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) status = auth_response_results['status'] if (not auth_response_results['twitter_auth_response_found']): success = False error_results = {'success': success, 'status': status, 'existing_twitter_account_found': False, 'twitter_access_secret': , 'twitter_access_token': , 'twitter_id': 0, 'twitter_image_load_info': , 'twitter_name': , 'twitter_profile_image_url_https': , 'twitter_request_secret': , 'twitter_request_token': , 'twitter_screen_name': , 'twitter_secret_key': , 'twitter_sign_in_failed': True, 'twitter_sign_in_found': False, 'twitter_sign_in_verified': False, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': False, 'voter_we_vote_id': voter_we_vote_id, 'voter_we_vote_id_attached_to_twitter': , 'we_vote_hosted_profile_image_url_large': , 'we_vote_hosted_profile_image_url_medium': , 'we_vote_hosted_profile_image_url_tiny': } return error_results success = True twitter_auth_response = auth_response_results['twitter_auth_response'] twitter_id = twitter_auth_response.twitter_id if (not twitter_id): success = False error_results = {'success': success, 'status': status, 'existing_twitter_account_found': False, 'twitter_access_secret': , 'twitter_access_token': , 'twitter_id': 0, 'twitter_image_load_info': , 'twitter_name': , 'twitter_profile_image_url_https': , 'twitter_request_secret': , 'twitter_request_token': , 'twitter_screen_name': , 'twitter_secret_key': , 'twitter_sign_in_failed': True, 'twitter_sign_in_found': False, 'twitter_sign_in_verified': False, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': False, 'voter_we_vote_id': voter_we_vote_id, 'voter_we_vote_id_attached_to_twitter': , 'we_vote_hosted_profile_image_url_large': , 'we_vote_hosted_profile_image_url_medium': , 'we_vote_hosted_profile_image_url_tiny': } return error_results twitter_user_manager = TwitterUserManager() twitter_sign_in_verified = True twitter_sign_in_failed = False twitter_secret_key = existing_twitter_account_found = False voter_we_vote_id_attached_to_twitter = repair_twitter_related_voter_caching_now = False t0 = time() twitter_link_results = twitter_user_manager.retrieve_twitter_link_to_voter(twitter_id, read_only=True) if twitter_link_results['twitter_link_to_voter_found']: twitter_link_to_voter = twitter_link_results['twitter_link_to_voter'] status += (' ' + twitter_link_results['status']) voter_we_vote_id_attached_to_twitter = twitter_link_to_voter.voter_we_vote_id twitter_secret_key = twitter_link_to_voter.secret_key existing_twitter_account_found = True repair_twitter_related_voter_caching_now = True else: voter_results = voter_manager.retrieve_voter_by_twitter_id_old(twitter_id) if voter_results['voter_found']: voter_with_twitter_id = voter_results['voter'] voter_we_vote_id_attached_to_twitter = voter_with_twitter_id.we_vote_id if positive_value_exists(voter_we_vote_id_attached_to_twitter): save_results = twitter_user_manager.create_twitter_link_to_voter(twitter_id, voter_we_vote_id_attached_to_twitter) status += (' ' + save_results['status']) if save_results['success']: repair_twitter_related_voter_caching_now = True else: save_results = twitter_user_manager.create_twitter_link_to_voter(twitter_id, voter_we_vote_id) t1 = time() if twitter_id: organization_list_manager = OrganizationListManager() repair_results = organization_list_manager.repair_twitter_related_organization_caching(twitter_id) status += repair_results['status'] if repair_twitter_related_voter_caching_now: repair_results = voter_manager.repair_twitter_related_voter_caching(twitter_id) status += repair_results['status'] t2 = time() if positive_value_exists(voter_we_vote_id_attached_to_twitter): voter_we_vote_id_for_cache = voter_we_vote_id_attached_to_twitter else: voter_we_vote_id_for_cache = voter_we_vote_id twitter_image_load_info = {'status': status, 'success': success, 'twitter_id': twitter_id, 'twitter_name': twitter_auth_response.twitter_name, 'twitter_profile_banner_url_https': twitter_auth_response.twitter_profile_banner_url_https, 'twitter_profile_image_url_https': twitter_auth_response.twitter_profile_banner_url_https, 'twitter_secret_key': twitter_secret_key, 'twitter_screen_name': twitter_auth_response.twitter_screen_name, 'voter_we_vote_id_for_cache': voter_we_vote_id_for_cache} if (not positive_value_exists(image_load_deferred)): twitter_process_deferred_images_for_api(status, success, twitter_id, twitter_auth_response.twitter_name, twitter_auth_response.twitter_profile_banner_url_https, twitter_auth_response.twitter_profile_banner_url_https, twitter_secret_key, twitter_auth_response.twitter_screen_name, voter_we_vote_id_for_cache) results = retrieve_twitter_user_info(twitter_id, twitter_auth_response.twitter_screen_name) if (not results['success']): twitter_json = {'id': twitter_id, 'name': twitter_auth_response.twitter_name, 'screen_name': twitter_auth_response.twitter_screen_name, 'profile_image_url_https': twitter_auth_response.twitter_profile_image_url_https} else: twitter_json = results['twitter_json'] twitter_user_results = twitter_user_manager.update_or_create_twitter_user(twitter_json=twitter_json, twitter_id=twitter_id) json_data = {'success': success, 'status': status, 'existing_twitter_account_found': existing_twitter_account_found, 'twitter_access_secret': twitter_auth_response.twitter_access_secret, 'twitter_access_token': twitter_auth_response.twitter_access_token, 'twitter_id': twitter_id, 'twitter_image_load_info': twitter_image_load_info, 'twitter_name': twitter_auth_response.twitter_name, 'twitter_profile_image_url_https': None, 'twitter_request_secret': twitter_auth_response.twitter_request_secret, 'twitter_request_token': twitter_auth_response.twitter_request_token, 'twitter_screen_name': twitter_auth_response.twitter_screen_name, 'twitter_secret_key': twitter_secret_key, 'twitter_sign_in_failed': twitter_sign_in_failed, 'twitter_sign_in_found': auth_response_results['twitter_auth_response_found'], 'twitter_sign_in_verified': twitter_sign_in_verified, 'voter_device_id': voter_device_id, 'voter_has_data_to_preserve': voter_has_data_to_preserve, 'voter_we_vote_id': voter_we_vote_id, 'voter_we_vote_id_attached_to_twitter': voter_we_vote_id_attached_to_twitter, 'we_vote_hosted_profile_image_url_large': None, 'we_vote_hosted_profile_image_url_medium': None, 'we_vote_hosted_profile_image_url_tiny': None} t6 = time() return json_data<|docstring|>We are asking for the results of the most recent Twitter authentication July 2017: We want the TwitterUser class/table to be the authoritative source of twitter info, ideally TwitterUser feeds the duplicated columns in voter, organization, candidate, etc. Unfortunately Django Auth, pre-populates voter with some key info first, which is fine, but makes it less clean. December 2021: This function used to process the incoming image URLs from twitter, resize them and store them in AWS inline, which took more than 5 seconds. Then we would merge the temporary voter record with a record we found on disk, and process the images again, for another 5 seconds. Now the processing of the images is initiated after the signin is complete via a call to twitter_process_deferred_images_for_api :param voter_device_id: :return:<|endoftext|>
3c7f2d1ecbef776b94617e1a75c33bc2db7b143ad90f4de3c6f758a25245cf43
def twitter_retrieve_ids_i_follow_for_api(voter_device_id): '\n\n :param voter_device_id:\n :return:\n ' success = False twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) status = auth_response_results['status'] if (not auth_response_results['twitter_auth_response_found']): error_results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_ids_i_follow': []} return error_results twitter_auth_response = auth_response_results['twitter_auth_response'] if (not twitter_auth_response.twitter_id): success = False error_results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_ids_i_follow': []} return error_results twitter_user_manager = TwitterUserManager() twitter_ids_i_follow_results = twitter_user_manager.retrieve_twitter_ids_i_follow_from_twitter(twitter_auth_response.twitter_id, twitter_auth_response.twitter_access_token, twitter_auth_response.twitter_access_secret) status += (' ' + twitter_ids_i_follow_results['status']) twitter_ids_i_follow = twitter_ids_i_follow_results['twitter_ids_i_follow'] if twitter_ids_i_follow_results['success']: twitter_who_i_follow_results = twitter_user_manager.create_twitter_who_i_follow_entries(twitter_auth_response.twitter_id, twitter_ids_i_follow) status += (' ' + twitter_who_i_follow_results['status']) success = twitter_who_i_follow_results['success'] results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_ids_i_follow': twitter_ids_i_follow} return results
:param voter_device_id: :return:
import_export_twitter/controllers.py
twitter_retrieve_ids_i_follow_for_api
wevote/WeVoteServer
44
python
def twitter_retrieve_ids_i_follow_for_api(voter_device_id): '\n\n :param voter_device_id:\n :return:\n ' success = False twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) status = auth_response_results['status'] if (not auth_response_results['twitter_auth_response_found']): error_results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_ids_i_follow': []} return error_results twitter_auth_response = auth_response_results['twitter_auth_response'] if (not twitter_auth_response.twitter_id): success = False error_results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_ids_i_follow': []} return error_results twitter_user_manager = TwitterUserManager() twitter_ids_i_follow_results = twitter_user_manager.retrieve_twitter_ids_i_follow_from_twitter(twitter_auth_response.twitter_id, twitter_auth_response.twitter_access_token, twitter_auth_response.twitter_access_secret) status += (' ' + twitter_ids_i_follow_results['status']) twitter_ids_i_follow = twitter_ids_i_follow_results['twitter_ids_i_follow'] if twitter_ids_i_follow_results['success']: twitter_who_i_follow_results = twitter_user_manager.create_twitter_who_i_follow_entries(twitter_auth_response.twitter_id, twitter_ids_i_follow) status += (' ' + twitter_who_i_follow_results['status']) success = twitter_who_i_follow_results['success'] results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_ids_i_follow': twitter_ids_i_follow} return results
def twitter_retrieve_ids_i_follow_for_api(voter_device_id): '\n\n :param voter_device_id:\n :return:\n ' success = False twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) status = auth_response_results['status'] if (not auth_response_results['twitter_auth_response_found']): error_results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_ids_i_follow': []} return error_results twitter_auth_response = auth_response_results['twitter_auth_response'] if (not twitter_auth_response.twitter_id): success = False error_results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_ids_i_follow': []} return error_results twitter_user_manager = TwitterUserManager() twitter_ids_i_follow_results = twitter_user_manager.retrieve_twitter_ids_i_follow_from_twitter(twitter_auth_response.twitter_id, twitter_auth_response.twitter_access_token, twitter_auth_response.twitter_access_secret) status += (' ' + twitter_ids_i_follow_results['status']) twitter_ids_i_follow = twitter_ids_i_follow_results['twitter_ids_i_follow'] if twitter_ids_i_follow_results['success']: twitter_who_i_follow_results = twitter_user_manager.create_twitter_who_i_follow_entries(twitter_auth_response.twitter_id, twitter_ids_i_follow) status += (' ' + twitter_who_i_follow_results['status']) success = twitter_who_i_follow_results['success'] results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_ids_i_follow': twitter_ids_i_follow} return results<|docstring|>:param voter_device_id: :return:<|endoftext|>
1be21fb07a29059b1b0f6bd34d986022d81522bc831291c16363ddf535aa72dd
def voter_twitter_save_to_current_account_for_api(voter_device_id): '\n\n :param voter_device_id:\n :return:\n ' status = '' success = False twitter_account_created = False twitter_link_to_organization_exists = False twitter_link_to_organization_twitter_id = 0 results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING', 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id) if (not positive_value_exists(results['voter_found'])): results = {'success': False, 'status': 'VALID_VOTER_MISSING', 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return results voter = results['voter'] twitter_user_manager = TwitterUserManager() twitter_results = twitter_user_manager.retrieve_twitter_link_to_voter(0, voter.we_vote_id, read_only=True) if twitter_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if (not auth_response_results['twitter_auth_response_found']): error_results = {'status': 'TWITTER_AUTH_RESPONSE_COULD_NOT_BE_FOUND', 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results twitter_auth_response = auth_response_results['twitter_auth_response'] twitter_collision_results = twitter_user_manager.retrieve_twitter_link_to_voter(twitter_auth_response.twitter_id, read_only=True) if twitter_collision_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_FOR_ANOTHER_VOTER_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results link_results = twitter_user_manager.create_twitter_link_to_voter(twitter_auth_response.twitter_id, voter.we_vote_id) if (not link_results['twitter_link_to_voter_saved']): error_results = {'status': link_results['status'], 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results twitter_account_created = True twitter_link_to_voter = link_results['twitter_link_to_voter'] results = voter_manager.save_twitter_user_values(voter, twitter_auth_response) status += (results['status'] + ', ') success = results['success'] voter = results['voter'] twitter_results = twitter_user_manager.retrieve_twitter_link_to_organization(voter.we_vote_id, read_only=True) if twitter_results['twitter_link_to_organization_found']: twitter_link_to_organization = twitter_results['twitter_link_to_organization'] twitter_link_to_organization_exists = True twitter_link_to_organization_twitter_id = twitter_link_to_organization.twitter_id if voter.linked_organization_we_vote_id: if (positive_value_exists(twitter_link_to_organization.organization_we_vote_id) and positive_value_exists(voter.linked_organization_we_vote_id) and (twitter_link_to_organization.organization_we_vote_id != voter.linked_organization_we_vote_id)): twitter_link_to_organization_organization_id = 0 voter_linked_to_organization_organization_id = 0 move_organization_to_another_complete_results = move_organization_to_another_complete(twitter_link_to_organization_organization_id, twitter_link_to_organization.organization_we_vote_id, voter_linked_to_organization_organization_id, voter.linked_organization_we_vote_id, voter.id, voter.we_vote_id) status += (' ' + move_organization_to_another_complete_results['status']) else: try: voter.linked_organization_we_vote_id = twitter_link_to_organization.organization_we_vote_id voter.save() except Exception as e: success = False status += 'VOTER_LINKED_ORGANIZATION_WE_VOTE_ID_NOT_UPDATED ' else: organization_manager = OrganizationManager() organization_from_twitter_id_old_results = organization_manager.retrieve_organization_from_twitter_user_id_old(twitter_auth_response.twitter_id) new_organization_ready = False if organization_from_twitter_id_old_results['organization_found']: new_organization = organization_from_twitter_id_old_results['organization'] new_organization_ready = True else: organization_manager = OrganizationManager() create_results = organization_manager.create_organization(organization_name=voter.get_full_name(), organization_image=voter.voter_photo_url(), twitter_id=twitter_auth_response.twitter_id, organization_type=INDIVIDUAL, we_vote_hosted_profile_image_url_large=voter.we_vote_hosted_profile_image_url_large, we_vote_hosted_profile_image_url_medium=voter.we_vote_hosted_profile_image_url_medium, we_vote_hosted_profile_image_url_tiny=voter.we_vote_hosted_profile_image_url_tiny) if create_results['organization_created']: new_organization = create_results['organization'] new_organization_ready = True else: new_organization = Organization() status += 'NEW_ORGANIZATION_COULD_NOT_BE_CREATED ' if new_organization_ready: try: voter.linked_organization_we_vote_id = new_organization.organization_we_vote_id voter.save() except Exception as e: status += 'UNABLE_TO_UPDATE_VOTER_LINKED_ORGANIZATION_WE_VOTE_ID ' try: results = twitter_user_manager.create_twitter_link_to_organization(twitter_auth_response.twitter_id, voter.linked_organization_we_vote_id) if results['twitter_link_to_organization_saved']: status += 'TwitterLinkToOrganization_CREATED_AFTER_ORGANIZATION_CREATE ' twitter_link_to_organization_exists = True twitter_link_to_organization_twitter_id = twitter_auth_response.twitter_id else: status += results['status'] status += 'TwitterLinkToOrganization_NOT_CREATED_AFTER_ORGANIZATION_CREATE ' except Exception as e: status += results['status'] status += 'UNABLE_TO_CREATE_TWITTER_LINK_TO_ORG ' if twitter_link_to_organization_exists: organization_list_manager = OrganizationListManager() repair_results = organization_list_manager.repair_twitter_related_organization_caching(twitter_link_to_organization_twitter_id) status += repair_results['status'] results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return results
:param voter_device_id: :return:
import_export_twitter/controllers.py
voter_twitter_save_to_current_account_for_api
wevote/WeVoteServer
44
python
def voter_twitter_save_to_current_account_for_api(voter_device_id): '\n\n :param voter_device_id:\n :return:\n ' status = success = False twitter_account_created = False twitter_link_to_organization_exists = False twitter_link_to_organization_twitter_id = 0 results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING', 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id) if (not positive_value_exists(results['voter_found'])): results = {'success': False, 'status': 'VALID_VOTER_MISSING', 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return results voter = results['voter'] twitter_user_manager = TwitterUserManager() twitter_results = twitter_user_manager.retrieve_twitter_link_to_voter(0, voter.we_vote_id, read_only=True) if twitter_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if (not auth_response_results['twitter_auth_response_found']): error_results = {'status': 'TWITTER_AUTH_RESPONSE_COULD_NOT_BE_FOUND', 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results twitter_auth_response = auth_response_results['twitter_auth_response'] twitter_collision_results = twitter_user_manager.retrieve_twitter_link_to_voter(twitter_auth_response.twitter_id, read_only=True) if twitter_collision_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_FOR_ANOTHER_VOTER_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results link_results = twitter_user_manager.create_twitter_link_to_voter(twitter_auth_response.twitter_id, voter.we_vote_id) if (not link_results['twitter_link_to_voter_saved']): error_results = {'status': link_results['status'], 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results twitter_account_created = True twitter_link_to_voter = link_results['twitter_link_to_voter'] results = voter_manager.save_twitter_user_values(voter, twitter_auth_response) status += (results['status'] + ', ') success = results['success'] voter = results['voter'] twitter_results = twitter_user_manager.retrieve_twitter_link_to_organization(voter.we_vote_id, read_only=True) if twitter_results['twitter_link_to_organization_found']: twitter_link_to_organization = twitter_results['twitter_link_to_organization'] twitter_link_to_organization_exists = True twitter_link_to_organization_twitter_id = twitter_link_to_organization.twitter_id if voter.linked_organization_we_vote_id: if (positive_value_exists(twitter_link_to_organization.organization_we_vote_id) and positive_value_exists(voter.linked_organization_we_vote_id) and (twitter_link_to_organization.organization_we_vote_id != voter.linked_organization_we_vote_id)): twitter_link_to_organization_organization_id = 0 voter_linked_to_organization_organization_id = 0 move_organization_to_another_complete_results = move_organization_to_another_complete(twitter_link_to_organization_organization_id, twitter_link_to_organization.organization_we_vote_id, voter_linked_to_organization_organization_id, voter.linked_organization_we_vote_id, voter.id, voter.we_vote_id) status += (' ' + move_organization_to_another_complete_results['status']) else: try: voter.linked_organization_we_vote_id = twitter_link_to_organization.organization_we_vote_id voter.save() except Exception as e: success = False status += 'VOTER_LINKED_ORGANIZATION_WE_VOTE_ID_NOT_UPDATED ' else: organization_manager = OrganizationManager() organization_from_twitter_id_old_results = organization_manager.retrieve_organization_from_twitter_user_id_old(twitter_auth_response.twitter_id) new_organization_ready = False if organization_from_twitter_id_old_results['organization_found']: new_organization = organization_from_twitter_id_old_results['organization'] new_organization_ready = True else: organization_manager = OrganizationManager() create_results = organization_manager.create_organization(organization_name=voter.get_full_name(), organization_image=voter.voter_photo_url(), twitter_id=twitter_auth_response.twitter_id, organization_type=INDIVIDUAL, we_vote_hosted_profile_image_url_large=voter.we_vote_hosted_profile_image_url_large, we_vote_hosted_profile_image_url_medium=voter.we_vote_hosted_profile_image_url_medium, we_vote_hosted_profile_image_url_tiny=voter.we_vote_hosted_profile_image_url_tiny) if create_results['organization_created']: new_organization = create_results['organization'] new_organization_ready = True else: new_organization = Organization() status += 'NEW_ORGANIZATION_COULD_NOT_BE_CREATED ' if new_organization_ready: try: voter.linked_organization_we_vote_id = new_organization.organization_we_vote_id voter.save() except Exception as e: status += 'UNABLE_TO_UPDATE_VOTER_LINKED_ORGANIZATION_WE_VOTE_ID ' try: results = twitter_user_manager.create_twitter_link_to_organization(twitter_auth_response.twitter_id, voter.linked_organization_we_vote_id) if results['twitter_link_to_organization_saved']: status += 'TwitterLinkToOrganization_CREATED_AFTER_ORGANIZATION_CREATE ' twitter_link_to_organization_exists = True twitter_link_to_organization_twitter_id = twitter_auth_response.twitter_id else: status += results['status'] status += 'TwitterLinkToOrganization_NOT_CREATED_AFTER_ORGANIZATION_CREATE ' except Exception as e: status += results['status'] status += 'UNABLE_TO_CREATE_TWITTER_LINK_TO_ORG ' if twitter_link_to_organization_exists: organization_list_manager = OrganizationListManager() repair_results = organization_list_manager.repair_twitter_related_organization_caching(twitter_link_to_organization_twitter_id) status += repair_results['status'] results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return results
def voter_twitter_save_to_current_account_for_api(voter_device_id): '\n\n :param voter_device_id:\n :return:\n ' status = success = False twitter_account_created = False twitter_link_to_organization_exists = False twitter_link_to_organization_twitter_id = 0 results = is_voter_device_id_valid(voter_device_id) if (not results['success']): results = {'success': False, 'status': 'VALID_VOTER_DEVICE_ID_MISSING', 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return results voter_manager = VoterManager() results = voter_manager.retrieve_voter_from_voter_device_id(voter_device_id) if (not positive_value_exists(results['voter_found'])): results = {'success': False, 'status': 'VALID_VOTER_MISSING', 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return results voter = results['voter'] twitter_user_manager = TwitterUserManager() twitter_results = twitter_user_manager.retrieve_twitter_link_to_voter(0, voter.we_vote_id, read_only=True) if twitter_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results twitter_auth_manager = TwitterAuthManager() auth_response_results = twitter_auth_manager.retrieve_twitter_auth_response(voter_device_id) if (not auth_response_results['twitter_auth_response_found']): error_results = {'status': 'TWITTER_AUTH_RESPONSE_COULD_NOT_BE_FOUND', 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results twitter_auth_response = auth_response_results['twitter_auth_response'] twitter_collision_results = twitter_user_manager.retrieve_twitter_link_to_voter(twitter_auth_response.twitter_id, read_only=True) if twitter_collision_results['twitter_link_to_voter_found']: error_results = {'status': 'TWITTER_OWNER_VOTER_FOUND_FOR_ANOTHER_VOTER_WHEN_NOT_EXPECTED', 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results link_results = twitter_user_manager.create_twitter_link_to_voter(twitter_auth_response.twitter_id, voter.we_vote_id) if (not link_results['twitter_link_to_voter_saved']): error_results = {'status': link_results['status'], 'success': False, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return error_results twitter_account_created = True twitter_link_to_voter = link_results['twitter_link_to_voter'] results = voter_manager.save_twitter_user_values(voter, twitter_auth_response) status += (results['status'] + ', ') success = results['success'] voter = results['voter'] twitter_results = twitter_user_manager.retrieve_twitter_link_to_organization(voter.we_vote_id, read_only=True) if twitter_results['twitter_link_to_organization_found']: twitter_link_to_organization = twitter_results['twitter_link_to_organization'] twitter_link_to_organization_exists = True twitter_link_to_organization_twitter_id = twitter_link_to_organization.twitter_id if voter.linked_organization_we_vote_id: if (positive_value_exists(twitter_link_to_organization.organization_we_vote_id) and positive_value_exists(voter.linked_organization_we_vote_id) and (twitter_link_to_organization.organization_we_vote_id != voter.linked_organization_we_vote_id)): twitter_link_to_organization_organization_id = 0 voter_linked_to_organization_organization_id = 0 move_organization_to_another_complete_results = move_organization_to_another_complete(twitter_link_to_organization_organization_id, twitter_link_to_organization.organization_we_vote_id, voter_linked_to_organization_organization_id, voter.linked_organization_we_vote_id, voter.id, voter.we_vote_id) status += (' ' + move_organization_to_another_complete_results['status']) else: try: voter.linked_organization_we_vote_id = twitter_link_to_organization.organization_we_vote_id voter.save() except Exception as e: success = False status += 'VOTER_LINKED_ORGANIZATION_WE_VOTE_ID_NOT_UPDATED ' else: organization_manager = OrganizationManager() organization_from_twitter_id_old_results = organization_manager.retrieve_organization_from_twitter_user_id_old(twitter_auth_response.twitter_id) new_organization_ready = False if organization_from_twitter_id_old_results['organization_found']: new_organization = organization_from_twitter_id_old_results['organization'] new_organization_ready = True else: organization_manager = OrganizationManager() create_results = organization_manager.create_organization(organization_name=voter.get_full_name(), organization_image=voter.voter_photo_url(), twitter_id=twitter_auth_response.twitter_id, organization_type=INDIVIDUAL, we_vote_hosted_profile_image_url_large=voter.we_vote_hosted_profile_image_url_large, we_vote_hosted_profile_image_url_medium=voter.we_vote_hosted_profile_image_url_medium, we_vote_hosted_profile_image_url_tiny=voter.we_vote_hosted_profile_image_url_tiny) if create_results['organization_created']: new_organization = create_results['organization'] new_organization_ready = True else: new_organization = Organization() status += 'NEW_ORGANIZATION_COULD_NOT_BE_CREATED ' if new_organization_ready: try: voter.linked_organization_we_vote_id = new_organization.organization_we_vote_id voter.save() except Exception as e: status += 'UNABLE_TO_UPDATE_VOTER_LINKED_ORGANIZATION_WE_VOTE_ID ' try: results = twitter_user_manager.create_twitter_link_to_organization(twitter_auth_response.twitter_id, voter.linked_organization_we_vote_id) if results['twitter_link_to_organization_saved']: status += 'TwitterLinkToOrganization_CREATED_AFTER_ORGANIZATION_CREATE ' twitter_link_to_organization_exists = True twitter_link_to_organization_twitter_id = twitter_auth_response.twitter_id else: status += results['status'] status += 'TwitterLinkToOrganization_NOT_CREATED_AFTER_ORGANIZATION_CREATE ' except Exception as e: status += results['status'] status += 'UNABLE_TO_CREATE_TWITTER_LINK_TO_ORG ' if twitter_link_to_organization_exists: organization_list_manager = OrganizationListManager() repair_results = organization_list_manager.repair_twitter_related_organization_caching(twitter_link_to_organization_twitter_id) status += repair_results['status'] results = {'success': success, 'status': status, 'voter_device_id': voter_device_id, 'twitter_account_created': twitter_account_created} return results<|docstring|>:param voter_device_id: :return:<|endoftext|>
073066363f1ab3d86deda42be5c9f51804a5b8ca31196be5ace913a55bb47a61
def autodetect_dialects(root_path, resolver, logger): 'Auto-detects which providers to use based on the root path.\n\n Parameters\n ----------\n root_path: str\n The root path for the project provided by a user or detected by\n a LSP client\n\n resolver : PyImportResolver\n Resolver for orginial definition.\n\n logger : Logger object\n\n Returns\n -------\n dialects: list of provider\n ' dialects = [] if os.path.exists(os.path.join(root_path, 'python', 'tvm')): dialects.append(TVMProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'python', 'mxnet')): dialects.append(MXNetProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'torch')): dialects.append(TorchProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'python', 'dgl')): dialects.append(DGLProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'python', 'taichi')): dialects.append(TaichiProvider(resolver, logger)) return dialects
Auto-detects which providers to use based on the root path. Parameters ---------- root_path: str The root path for the project provided by a user or detected by a LSP client resolver : PyImportResolver Resolver for orginial definition. logger : Logger object Returns ------- dialects: list of provider
python/ffi_navigator/dialect/__init__.py
autodetect_dialects
comaniac/ffi-navigator
148
python
def autodetect_dialects(root_path, resolver, logger): 'Auto-detects which providers to use based on the root path.\n\n Parameters\n ----------\n root_path: str\n The root path for the project provided by a user or detected by\n a LSP client\n\n resolver : PyImportResolver\n Resolver for orginial definition.\n\n logger : Logger object\n\n Returns\n -------\n dialects: list of provider\n ' dialects = [] if os.path.exists(os.path.join(root_path, 'python', 'tvm')): dialects.append(TVMProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'python', 'mxnet')): dialects.append(MXNetProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'torch')): dialects.append(TorchProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'python', 'dgl')): dialects.append(DGLProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'python', 'taichi')): dialects.append(TaichiProvider(resolver, logger)) return dialects
def autodetect_dialects(root_path, resolver, logger): 'Auto-detects which providers to use based on the root path.\n\n Parameters\n ----------\n root_path: str\n The root path for the project provided by a user or detected by\n a LSP client\n\n resolver : PyImportResolver\n Resolver for orginial definition.\n\n logger : Logger object\n\n Returns\n -------\n dialects: list of provider\n ' dialects = [] if os.path.exists(os.path.join(root_path, 'python', 'tvm')): dialects.append(TVMProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'python', 'mxnet')): dialects.append(MXNetProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'torch')): dialects.append(TorchProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'python', 'dgl')): dialects.append(DGLProvider(resolver, logger)) elif os.path.exists(os.path.join(root_path, 'python', 'taichi')): dialects.append(TaichiProvider(resolver, logger)) return dialects<|docstring|>Auto-detects which providers to use based on the root path. Parameters ---------- root_path: str The root path for the project provided by a user or detected by a LSP client resolver : PyImportResolver Resolver for orginial definition. logger : Logger object Returns ------- dialects: list of provider<|endoftext|>
88cef5cc2088d7cb05a88a1dd7db601634dbfdbbc346aae982f37d0fa51d88f5
def add_image(self, filename, width='0.8\\textwidth', placement='\\centering'): 'Add an image.to the figure.\n\n :param filename:\n :param width:\n :param placement:\n\n :type filename: str\n :type width: str\n :type placement: str\n ' if (placement is not None): self.append(placement) if (width is not None): width = ('width=' + str(width)) self.append(Command('includegraphics', options=width, arguments=fix_filename(filename)))
Add an image.to the figure. :param filename: :param width: :param placement: :type filename: str :type width: str :type placement: str
pylatex/graphics.py
add_image
votti/PyLaTeX
0
python
def add_image(self, filename, width='0.8\\textwidth', placement='\\centering'): 'Add an image.to the figure.\n\n :param filename:\n :param width:\n :param placement:\n\n :type filename: str\n :type width: str\n :type placement: str\n ' if (placement is not None): self.append(placement) if (width is not None): width = ('width=' + str(width)) self.append(Command('includegraphics', options=width, arguments=fix_filename(filename)))
def add_image(self, filename, width='0.8\\textwidth', placement='\\centering'): 'Add an image.to the figure.\n\n :param filename:\n :param width:\n :param placement:\n\n :type filename: str\n :type width: str\n :type placement: str\n ' if (placement is not None): self.append(placement) if (width is not None): width = ('width=' + str(width)) self.append(Command('includegraphics', options=width, arguments=fix_filename(filename)))<|docstring|>Add an image.to the figure. :param filename: :param width: :param placement: :type filename: str :type width: str :type placement: str<|endoftext|>
65cf3c07609f11f28c08d83b7855cfcdaed4ed415f8f105d8671d11681bcf4ae
def add_image(self, filename, width='\\linewidth', placement=None): 'Add an image to the subfigure.\n\n :param filename:\n :param width:\n :param placement:\n\n :type filename: str\n :type width: str\n :type placement: str\n ' super().add_image(filename, width=width, placement=placement)
Add an image to the subfigure. :param filename: :param width: :param placement: :type filename: str :type width: str :type placement: str
pylatex/graphics.py
add_image
votti/PyLaTeX
0
python
def add_image(self, filename, width='\\linewidth', placement=None): 'Add an image to the subfigure.\n\n :param filename:\n :param width:\n :param placement:\n\n :type filename: str\n :type width: str\n :type placement: str\n ' super().add_image(filename, width=width, placement=placement)
def add_image(self, filename, width='\\linewidth', placement=None): 'Add an image to the subfigure.\n\n :param filename:\n :param width:\n :param placement:\n\n :type filename: str\n :type width: str\n :type placement: str\n ' super().add_image(filename, width=width, placement=placement)<|docstring|>Add an image to the subfigure. :param filename: :param width: :param placement: :type filename: str :type width: str :type placement: str<|endoftext|>
34dd31ef21eb6b4ad371872c27a6982b8837aad9fd9033bb746e7d1e636c74cf
def _save_plot(self, *args, **kwargs): 'Save the plot.\n\n :param plt: The matplotlib.pyplot module\n :type plt: matplotlib.pyplot\n\n :return: The basename with which the plot has been saved.\n :rtype: str\n ' tmp_path = make_temp_dir() filename = os.path.join(tmp_path, (str(uuid.uuid4()) + '.pdf')) self._plt.savefig(filename, *args, **kwargs) return filename
Save the plot. :param plt: The matplotlib.pyplot module :type plt: matplotlib.pyplot :return: The basename with which the plot has been saved. :rtype: str
pylatex/graphics.py
_save_plot
votti/PyLaTeX
0
python
def _save_plot(self, *args, **kwargs): 'Save the plot.\n\n :param plt: The matplotlib.pyplot module\n :type plt: matplotlib.pyplot\n\n :return: The basename with which the plot has been saved.\n :rtype: str\n ' tmp_path = make_temp_dir() filename = os.path.join(tmp_path, (str(uuid.uuid4()) + '.pdf')) self._plt.savefig(filename, *args, **kwargs) return filename
def _save_plot(self, *args, **kwargs): 'Save the plot.\n\n :param plt: The matplotlib.pyplot module\n :type plt: matplotlib.pyplot\n\n :return: The basename with which the plot has been saved.\n :rtype: str\n ' tmp_path = make_temp_dir() filename = os.path.join(tmp_path, (str(uuid.uuid4()) + '.pdf')) self._plt.savefig(filename, *args, **kwargs) return filename<|docstring|>Save the plot. :param plt: The matplotlib.pyplot module :type plt: matplotlib.pyplot :return: The basename with which the plot has been saved. :rtype: str<|endoftext|>
64f55aa1804feb0138d6440076369b0a27009b88a6f4450c99a8f924c74140a1
def add_plot(self, *args, **kwargs): 'Add a plot.\n\n Args\n ----\n args:\n Arguments passed to plt.savefig for displaying the plot.\n kwargs:\n Keyword arguments passed to plt.savefig for displaying the plot. In\n case these contain ``width`` or ``placement``, they will be used\n for the same purpose as in the add_image command. Namely the width\n and placement of the generated plot in the LaTeX document.\n ' add_image_kwargs = {} for key in ('width', 'placement'): if (key in kwargs): add_image_kwargs[key] = kwargs.pop(key) filename = self._save_plot(*args, **kwargs) self.add_image(filename, **add_image_kwargs)
Add a plot. Args ---- args: Arguments passed to plt.savefig for displaying the plot. kwargs: Keyword arguments passed to plt.savefig for displaying the plot. In case these contain ``width`` or ``placement``, they will be used for the same purpose as in the add_image command. Namely the width and placement of the generated plot in the LaTeX document.
pylatex/graphics.py
add_plot
votti/PyLaTeX
0
python
def add_plot(self, *args, **kwargs): 'Add a plot.\n\n Args\n ----\n args:\n Arguments passed to plt.savefig for displaying the plot.\n kwargs:\n Keyword arguments passed to plt.savefig for displaying the plot. In\n case these contain ``width`` or ``placement``, they will be used\n for the same purpose as in the add_image command. Namely the width\n and placement of the generated plot in the LaTeX document.\n ' add_image_kwargs = {} for key in ('width', 'placement'): if (key in kwargs): add_image_kwargs[key] = kwargs.pop(key) filename = self._save_plot(*args, **kwargs) self.add_image(filename, **add_image_kwargs)
def add_plot(self, *args, **kwargs): 'Add a plot.\n\n Args\n ----\n args:\n Arguments passed to plt.savefig for displaying the plot.\n kwargs:\n Keyword arguments passed to plt.savefig for displaying the plot. In\n case these contain ``width`` or ``placement``, they will be used\n for the same purpose as in the add_image command. Namely the width\n and placement of the generated plot in the LaTeX document.\n ' add_image_kwargs = {} for key in ('width', 'placement'): if (key in kwargs): add_image_kwargs[key] = kwargs.pop(key) filename = self._save_plot(*args, **kwargs) self.add_image(filename, **add_image_kwargs)<|docstring|>Add a plot. Args ---- args: Arguments passed to plt.savefig for displaying the plot. kwargs: Keyword arguments passed to plt.savefig for displaying the plot. In case these contain ``width`` or ``placement``, they will be used for the same purpose as in the add_image command. Namely the width and placement of the generated plot in the LaTeX document.<|endoftext|>
b34ca5fc6fb5b90d4652cb46a44927eb0a00a5834b95b2df2a93e1d72f6bcf53
def get_model_inputs(nested_utterances, data_dir='../data/len_500_data/', token_cutoff=500): ' Gets the input representations for running with the model - based on the input of utterances for a given BID\n \n args:\n nested_utterances: nested list of utterances (each nested list is a different DID)(and for each DID there utterances)(these utterances are only those by the author) - @@@ actually current implementation assumes all BID subarrays are already appended together: e.g.: [string1, string2, ...]\n data_dir: path to the directory which holds word_2_id and id_2_word, etc.\n token_cutoff: number of tokens in the input which should be send to model encoder\n\n Returns: x,x_indices,att_mask, x_indices_dict, index_to_word\n ' nlp = spacy.load('en_core_web_sm') with open((data_dir + 'word_to_index.json')) as in_file: word_to_index = json.load(in_file) with open((data_dir + 'index_to_word.json')) as in_file: index_to_word = json.load(in_file) num_fixed_words = len(word_to_index) all_token_lists = [[token.text.lower() for token in nlp(utterance)] for utterance in nested_utterances] all_tokens = [] for token_list in all_token_lists: all_tokens += token_list utterance_tokens = all_tokens[:token_cutoff] x = [] for token in utterance_tokens: if (token in word_to_index): x.append(word_to_index[token]) else: x.append(word_to_index['<UNK>']) att_mask = [0 for _ in range(len(x))] amount_to_pad = (token_cutoff - len(x)) att_mask += [(- np.inf) for _ in range(amount_to_pad)] x_indices = [] x_indices_dict = {} non_vocab_dict = {} index = num_fixed_words for token in utterance_tokens: if (token in word_to_index): x_indices.append(word_to_index[token]) elif (token in non_vocab_dict): x_indices.append(non_vocab_dict[token]) else: non_vocab_dict[token] = index x_indices_dict[index] = token x_indices.append(index) index += 1 x += [0 for _ in range(amount_to_pad)] x_indices += [0 for _ in range(amount_to_pad)] assert (len(x) == len(x_indices) == len(att_mask) == token_cutoff) x = np.expand_dims(np.asarray(x, dtype='int32'), axis=0) x_indices = np.expand_dims(np.asarray(x_indices, dtype='int32'), axis=0) att_mask = np.expand_dims(np.asarray(att_mask, dtype='float32'), axis=0) return (x, x_indices, att_mask, x_indices_dict, index_to_word, utterance_tokens)
Gets the input representations for running with the model - based on the input of utterances for a given BID args: nested_utterances: nested list of utterances (each nested list is a different DID)(and for each DID there utterances)(these utterances are only those by the author) - @@@ actually current implementation assumes all BID subarrays are already appended together: e.g.: [string1, string2, ...] data_dir: path to the directory which holds word_2_id and id_2_word, etc. token_cutoff: number of tokens in the input which should be send to model encoder Returns: x,x_indices,att_mask, x_indices_dict, index_to_word
pointer-gen_implementations/code/get_model_predictions.py
get_model_inputs
nateandre/machine_learning
1
python
def get_model_inputs(nested_utterances, data_dir='../data/len_500_data/', token_cutoff=500): ' Gets the input representations for running with the model - based on the input of utterances for a given BID\n \n args:\n nested_utterances: nested list of utterances (each nested list is a different DID)(and for each DID there utterances)(these utterances are only those by the author) - @@@ actually current implementation assumes all BID subarrays are already appended together: e.g.: [string1, string2, ...]\n data_dir: path to the directory which holds word_2_id and id_2_word, etc.\n token_cutoff: number of tokens in the input which should be send to model encoder\n\n Returns: x,x_indices,att_mask, x_indices_dict, index_to_word\n ' nlp = spacy.load('en_core_web_sm') with open((data_dir + 'word_to_index.json')) as in_file: word_to_index = json.load(in_file) with open((data_dir + 'index_to_word.json')) as in_file: index_to_word = json.load(in_file) num_fixed_words = len(word_to_index) all_token_lists = [[token.text.lower() for token in nlp(utterance)] for utterance in nested_utterances] all_tokens = [] for token_list in all_token_lists: all_tokens += token_list utterance_tokens = all_tokens[:token_cutoff] x = [] for token in utterance_tokens: if (token in word_to_index): x.append(word_to_index[token]) else: x.append(word_to_index['<UNK>']) att_mask = [0 for _ in range(len(x))] amount_to_pad = (token_cutoff - len(x)) att_mask += [(- np.inf) for _ in range(amount_to_pad)] x_indices = [] x_indices_dict = {} non_vocab_dict = {} index = num_fixed_words for token in utterance_tokens: if (token in word_to_index): x_indices.append(word_to_index[token]) elif (token in non_vocab_dict): x_indices.append(non_vocab_dict[token]) else: non_vocab_dict[token] = index x_indices_dict[index] = token x_indices.append(index) index += 1 x += [0 for _ in range(amount_to_pad)] x_indices += [0 for _ in range(amount_to_pad)] assert (len(x) == len(x_indices) == len(att_mask) == token_cutoff) x = np.expand_dims(np.asarray(x, dtype='int32'), axis=0) x_indices = np.expand_dims(np.asarray(x_indices, dtype='int32'), axis=0) att_mask = np.expand_dims(np.asarray(att_mask, dtype='float32'), axis=0) return (x, x_indices, att_mask, x_indices_dict, index_to_word, utterance_tokens)
def get_model_inputs(nested_utterances, data_dir='../data/len_500_data/', token_cutoff=500): ' Gets the input representations for running with the model - based on the input of utterances for a given BID\n \n args:\n nested_utterances: nested list of utterances (each nested list is a different DID)(and for each DID there utterances)(these utterances are only those by the author) - @@@ actually current implementation assumes all BID subarrays are already appended together: e.g.: [string1, string2, ...]\n data_dir: path to the directory which holds word_2_id and id_2_word, etc.\n token_cutoff: number of tokens in the input which should be send to model encoder\n\n Returns: x,x_indices,att_mask, x_indices_dict, index_to_word\n ' nlp = spacy.load('en_core_web_sm') with open((data_dir + 'word_to_index.json')) as in_file: word_to_index = json.load(in_file) with open((data_dir + 'index_to_word.json')) as in_file: index_to_word = json.load(in_file) num_fixed_words = len(word_to_index) all_token_lists = [[token.text.lower() for token in nlp(utterance)] for utterance in nested_utterances] all_tokens = [] for token_list in all_token_lists: all_tokens += token_list utterance_tokens = all_tokens[:token_cutoff] x = [] for token in utterance_tokens: if (token in word_to_index): x.append(word_to_index[token]) else: x.append(word_to_index['<UNK>']) att_mask = [0 for _ in range(len(x))] amount_to_pad = (token_cutoff - len(x)) att_mask += [(- np.inf) for _ in range(amount_to_pad)] x_indices = [] x_indices_dict = {} non_vocab_dict = {} index = num_fixed_words for token in utterance_tokens: if (token in word_to_index): x_indices.append(word_to_index[token]) elif (token in non_vocab_dict): x_indices.append(non_vocab_dict[token]) else: non_vocab_dict[token] = index x_indices_dict[index] = token x_indices.append(index) index += 1 x += [0 for _ in range(amount_to_pad)] x_indices += [0 for _ in range(amount_to_pad)] assert (len(x) == len(x_indices) == len(att_mask) == token_cutoff) x = np.expand_dims(np.asarray(x, dtype='int32'), axis=0) x_indices = np.expand_dims(np.asarray(x_indices, dtype='int32'), axis=0) att_mask = np.expand_dims(np.asarray(att_mask, dtype='float32'), axis=0) return (x, x_indices, att_mask, x_indices_dict, index_to_word, utterance_tokens)<|docstring|>Gets the input representations for running with the model - based on the input of utterances for a given BID args: nested_utterances: nested list of utterances (each nested list is a different DID)(and for each DID there utterances)(these utterances are only those by the author) - @@@ actually current implementation assumes all BID subarrays are already appended together: e.g.: [string1, string2, ...] data_dir: path to the directory which holds word_2_id and id_2_word, etc. token_cutoff: number of tokens in the input which should be send to model encoder Returns: x,x_indices,att_mask, x_indices_dict, index_to_word<|endoftext|>
14032c84f8d03a905aacddab2ddf41f6cf6728ea1f806c43bdcaf7bb057459d0
def apply_scatter_nd(updates, indices, tf_int, tf_float): ' applies scatter_nd over the batch dimension\n ' out = Lambda((lambda entry: K.map_fn((lambda entry: tf.scatter_nd(entry[0], entry[1], tf.constant([30100], dtype=tf_int))), entry, dtype=tf_float)))([indices, updates]) return out
applies scatter_nd over the batch dimension
pointer-gen_implementations/code/get_model_predictions.py
apply_scatter_nd
nateandre/machine_learning
1
python
def apply_scatter_nd(updates, indices, tf_int, tf_float): ' \n ' out = Lambda((lambda entry: K.map_fn((lambda entry: tf.scatter_nd(entry[0], entry[1], tf.constant([30100], dtype=tf_int))), entry, dtype=tf_float)))([indices, updates]) return out
def apply_scatter_nd(updates, indices, tf_int, tf_float): ' \n ' out = Lambda((lambda entry: K.map_fn((lambda entry: tf.scatter_nd(entry[0], entry[1], tf.constant([30100], dtype=tf_int))), entry, dtype=tf_float)))([indices, updates]) return out<|docstring|>applies scatter_nd over the batch dimension<|endoftext|>
be0b5cf0a7d6eb537e5c222d4a63e34abb58dcd49dffc5c4e850bf72f4f48eaa
def apply_scatter_nd_add(tensor, updates, indices, tf_int, tf_float): ' applies the tensor_scatter_nd_add over the batch dimension\n ' out = Lambda((lambda entry: K.map_fn((lambda entry: tf.tensor_scatter_nd_add(entry[0], entry[1], entry[2])), entry, dtype=tf_float)))([tensor, indices, updates]) return out
applies the tensor_scatter_nd_add over the batch dimension
pointer-gen_implementations/code/get_model_predictions.py
apply_scatter_nd_add
nateandre/machine_learning
1
python
def apply_scatter_nd_add(tensor, updates, indices, tf_int, tf_float): ' \n ' out = Lambda((lambda entry: K.map_fn((lambda entry: tf.tensor_scatter_nd_add(entry[0], entry[1], entry[2])), entry, dtype=tf_float)))([tensor, indices, updates]) return out
def apply_scatter_nd_add(tensor, updates, indices, tf_int, tf_float): ' \n ' out = Lambda((lambda entry: K.map_fn((lambda entry: tf.tensor_scatter_nd_add(entry[0], entry[1], entry[2])), entry, dtype=tf_float)))([tensor, indices, updates]) return out<|docstring|>applies the tensor_scatter_nd_add over the batch dimension<|endoftext|>
1713bcf552321ec2d1dbb503ed3e355733ce0f4972edabd99f34a47da031d545
def pointer_gen_encoder(embedding_layer, encoder_h=128, input_len=500, tf_int=tf.int32): ' Returns the encoder portion of the pointer-gen network\n ' x = Input(shape=input_len, dtype=tf_int) input_e = embedding_layer(x) h = Bidirectional(LSTM(encoder_h, activation='tanh', return_sequences=True), merge_mode='concat')(input_e) model = Model(inputs=[x], outputs=[h]) return model
Returns the encoder portion of the pointer-gen network
pointer-gen_implementations/code/get_model_predictions.py
pointer_gen_encoder
nateandre/machine_learning
1
python
def pointer_gen_encoder(embedding_layer, encoder_h=128, input_len=500, tf_int=tf.int32): ' \n ' x = Input(shape=input_len, dtype=tf_int) input_e = embedding_layer(x) h = Bidirectional(LSTM(encoder_h, activation='tanh', return_sequences=True), merge_mode='concat')(input_e) model = Model(inputs=[x], outputs=[h]) return model
def pointer_gen_encoder(embedding_layer, encoder_h=128, input_len=500, tf_int=tf.int32): ' \n ' x = Input(shape=input_len, dtype=tf_int) input_e = embedding_layer(x) h = Bidirectional(LSTM(encoder_h, activation='tanh', return_sequences=True), merge_mode='concat')(input_e) model = Model(inputs=[x], outputs=[h]) return model<|docstring|>Returns the encoder portion of the pointer-gen network<|endoftext|>
cb919d03b075b72a15dd60acfae856e012829c8c00e4b40d559c181abdf8ded4
def pointer_gen_decoder(embedding_layer, decoder_lstm, att_w1, att_w2, att_w3, att_v, vocab_d, vocab_d_pre, pgen_w1, pgen_w2, pgen_w3, encoder_h=128, input_len=500, tf_float=tf.float32, tf_int=tf.int32): ' Returns the decoder portion of the pointer-gen network \n -implemented so that it does only a single step\n ' h = Input(shape=(input_len, (encoder_h * 2)), dtype=tf_float) x_indices_ = Input(shape=input_len, dtype=tf_int) x_indices = tf.expand_dims(x_indices_, axis=(- 1)) fixed_vocab_indices_ = Input(shape=30000, dtype=tf_int) fixed_vocab_indices = tf.expand_dims(fixed_vocab_indices_, axis=(- 1)) att_mask = Input(shape=input_len, dtype=tf_float) decoder_x = Input(shape=1, dtype=tf_int) s_ = Input(shape=256, dtype=tf_float) c_ = Input(shape=256, dtype=tf_float) coverage_vector_ = Input(shape=input_len, dtype=tf_float) (s, c, coverage_vector) = (s_, c_, coverage_vector_) decoder_e = embedding_layer(decoder_x) decoder_input = decoder_e[(:, 0, :)] (s, _, c) = decoder_lstm(tf.expand_dims(decoder_input, axis=1), initial_state=[s, c]) s_rep = RepeatVector(input_len)(s) e = att_v(Activation('tanh')(((att_w1(h) + att_w2(s_rep)) + att_w3(tf.expand_dims(coverage_vector, axis=(- 1)))))) e = (tf.squeeze(e, axis=(- 1)) + att_mask) a = Activation('softmax')(e) coverage_vector += a context = Dot(axes=1)([a, h]) pre_vocab_prob = Concatenate()([s, context]) pre_vocab_prob = vocab_d_pre(pre_vocab_prob) pre_vocab_prob = vocab_d(pre_vocab_prob) vocab_prob = Activation('softmax')(pre_vocab_prob) pre_gen_prob = ((pgen_w1(context) + pgen_w2(s)) + pgen_w3(decoder_input)) gen_prob = Activation('sigmoid')(pre_gen_prob) vocab_prob *= gen_prob copy_prob = (a * (1 - gen_prob)) vocab_prob_projected = apply_scatter_nd(vocab_prob, fixed_vocab_indices, tf_int, tf_float) joint_prob = apply_scatter_nd_add(vocab_prob_projected, copy_prob, x_indices, tf_int, tf_float) model = Model(inputs=[h, x_indices_, decoder_x, att_mask, s_, c_, coverage_vector_, fixed_vocab_indices_], outputs=[joint_prob, s, c, coverage_vector]) return model
Returns the decoder portion of the pointer-gen network -implemented so that it does only a single step
pointer-gen_implementations/code/get_model_predictions.py
pointer_gen_decoder
nateandre/machine_learning
1
python
def pointer_gen_decoder(embedding_layer, decoder_lstm, att_w1, att_w2, att_w3, att_v, vocab_d, vocab_d_pre, pgen_w1, pgen_w2, pgen_w3, encoder_h=128, input_len=500, tf_float=tf.float32, tf_int=tf.int32): ' Returns the decoder portion of the pointer-gen network \n -implemented so that it does only a single step\n ' h = Input(shape=(input_len, (encoder_h * 2)), dtype=tf_float) x_indices_ = Input(shape=input_len, dtype=tf_int) x_indices = tf.expand_dims(x_indices_, axis=(- 1)) fixed_vocab_indices_ = Input(shape=30000, dtype=tf_int) fixed_vocab_indices = tf.expand_dims(fixed_vocab_indices_, axis=(- 1)) att_mask = Input(shape=input_len, dtype=tf_float) decoder_x = Input(shape=1, dtype=tf_int) s_ = Input(shape=256, dtype=tf_float) c_ = Input(shape=256, dtype=tf_float) coverage_vector_ = Input(shape=input_len, dtype=tf_float) (s, c, coverage_vector) = (s_, c_, coverage_vector_) decoder_e = embedding_layer(decoder_x) decoder_input = decoder_e[(:, 0, :)] (s, _, c) = decoder_lstm(tf.expand_dims(decoder_input, axis=1), initial_state=[s, c]) s_rep = RepeatVector(input_len)(s) e = att_v(Activation('tanh')(((att_w1(h) + att_w2(s_rep)) + att_w3(tf.expand_dims(coverage_vector, axis=(- 1)))))) e = (tf.squeeze(e, axis=(- 1)) + att_mask) a = Activation('softmax')(e) coverage_vector += a context = Dot(axes=1)([a, h]) pre_vocab_prob = Concatenate()([s, context]) pre_vocab_prob = vocab_d_pre(pre_vocab_prob) pre_vocab_prob = vocab_d(pre_vocab_prob) vocab_prob = Activation('softmax')(pre_vocab_prob) pre_gen_prob = ((pgen_w1(context) + pgen_w2(s)) + pgen_w3(decoder_input)) gen_prob = Activation('sigmoid')(pre_gen_prob) vocab_prob *= gen_prob copy_prob = (a * (1 - gen_prob)) vocab_prob_projected = apply_scatter_nd(vocab_prob, fixed_vocab_indices, tf_int, tf_float) joint_prob = apply_scatter_nd_add(vocab_prob_projected, copy_prob, x_indices, tf_int, tf_float) model = Model(inputs=[h, x_indices_, decoder_x, att_mask, s_, c_, coverage_vector_, fixed_vocab_indices_], outputs=[joint_prob, s, c, coverage_vector]) return model
def pointer_gen_decoder(embedding_layer, decoder_lstm, att_w1, att_w2, att_w3, att_v, vocab_d, vocab_d_pre, pgen_w1, pgen_w2, pgen_w3, encoder_h=128, input_len=500, tf_float=tf.float32, tf_int=tf.int32): ' Returns the decoder portion of the pointer-gen network \n -implemented so that it does only a single step\n ' h = Input(shape=(input_len, (encoder_h * 2)), dtype=tf_float) x_indices_ = Input(shape=input_len, dtype=tf_int) x_indices = tf.expand_dims(x_indices_, axis=(- 1)) fixed_vocab_indices_ = Input(shape=30000, dtype=tf_int) fixed_vocab_indices = tf.expand_dims(fixed_vocab_indices_, axis=(- 1)) att_mask = Input(shape=input_len, dtype=tf_float) decoder_x = Input(shape=1, dtype=tf_int) s_ = Input(shape=256, dtype=tf_float) c_ = Input(shape=256, dtype=tf_float) coverage_vector_ = Input(shape=input_len, dtype=tf_float) (s, c, coverage_vector) = (s_, c_, coverage_vector_) decoder_e = embedding_layer(decoder_x) decoder_input = decoder_e[(:, 0, :)] (s, _, c) = decoder_lstm(tf.expand_dims(decoder_input, axis=1), initial_state=[s, c]) s_rep = RepeatVector(input_len)(s) e = att_v(Activation('tanh')(((att_w1(h) + att_w2(s_rep)) + att_w3(tf.expand_dims(coverage_vector, axis=(- 1)))))) e = (tf.squeeze(e, axis=(- 1)) + att_mask) a = Activation('softmax')(e) coverage_vector += a context = Dot(axes=1)([a, h]) pre_vocab_prob = Concatenate()([s, context]) pre_vocab_prob = vocab_d_pre(pre_vocab_prob) pre_vocab_prob = vocab_d(pre_vocab_prob) vocab_prob = Activation('softmax')(pre_vocab_prob) pre_gen_prob = ((pgen_w1(context) + pgen_w2(s)) + pgen_w3(decoder_input)) gen_prob = Activation('sigmoid')(pre_gen_prob) vocab_prob *= gen_prob copy_prob = (a * (1 - gen_prob)) vocab_prob_projected = apply_scatter_nd(vocab_prob, fixed_vocab_indices, tf_int, tf_float) joint_prob = apply_scatter_nd_add(vocab_prob_projected, copy_prob, x_indices, tf_int, tf_float) model = Model(inputs=[h, x_indices_, decoder_x, att_mask, s_, c_, coverage_vector_, fixed_vocab_indices_], outputs=[joint_prob, s, c, coverage_vector]) return model<|docstring|>Returns the decoder portion of the pointer-gen network -implemented so that it does only a single step<|endoftext|>
866b518d955c6c50010dcc99f5b127dfce110d7f467fe98c0c79b8189b0e5b35
def get_pointer_gen_network(embedding_dim=100, input_len=500, tf_float=tf.float32, tf_int=tf.int32, model_save_path='../model_params/'): ' loads the encoder and decoder models from memory\n args:\n embedding_dim: the dimensionality of the word embeddings\n model_save_path: directory which stores the saved model parameters\n ' embedding_layer = Embedding(input_dim=30000, output_dim=embedding_dim, mask_zero=True) decoder_h = 256 encoder_h = 128 decoder_lstm = LSTM(decoder_h, activation='tanh', return_state=True) att_w1 = Dense(256, use_bias=True, activation=None) att_w2 = Dense(256, use_bias=True, activation=None) att_w3 = Dense(256, use_bias=True, activation=None) att_v = Dense(1, use_bias=False, activation=None) vocab_d_pre = Dense(512, use_bias=True, activation='relu') vocab_d = Dense(30000, use_bias=True, activation=None) pgen_w1 = Dense(1, use_bias=True, activation=None) pgen_w2 = Dense(1, use_bias=True, activation=None) pgen_w3 = Dense(1, use_bias=True, activation=None) encoder = pointer_gen_encoder(embedding_layer, encoder_h=encoder_h, input_len=input_len, tf_int=tf_int) encoder.load_weights((model_save_path + 'encoder')) decoder = pointer_gen_decoder(embedding_layer, decoder_lstm, att_w1, att_w2, att_w3, att_v, vocab_d, vocab_d_pre, pgen_w1, pgen_w2, pgen_w3, encoder_h=encoder_h, input_len=input_len, tf_float=tf_float, tf_int=tf_int) decoder.load_weights((model_save_path + 'decoder')) return (encoder, decoder)
loads the encoder and decoder models from memory args: embedding_dim: the dimensionality of the word embeddings model_save_path: directory which stores the saved model parameters
pointer-gen_implementations/code/get_model_predictions.py
get_pointer_gen_network
nateandre/machine_learning
1
python
def get_pointer_gen_network(embedding_dim=100, input_len=500, tf_float=tf.float32, tf_int=tf.int32, model_save_path='../model_params/'): ' loads the encoder and decoder models from memory\n args:\n embedding_dim: the dimensionality of the word embeddings\n model_save_path: directory which stores the saved model parameters\n ' embedding_layer = Embedding(input_dim=30000, output_dim=embedding_dim, mask_zero=True) decoder_h = 256 encoder_h = 128 decoder_lstm = LSTM(decoder_h, activation='tanh', return_state=True) att_w1 = Dense(256, use_bias=True, activation=None) att_w2 = Dense(256, use_bias=True, activation=None) att_w3 = Dense(256, use_bias=True, activation=None) att_v = Dense(1, use_bias=False, activation=None) vocab_d_pre = Dense(512, use_bias=True, activation='relu') vocab_d = Dense(30000, use_bias=True, activation=None) pgen_w1 = Dense(1, use_bias=True, activation=None) pgen_w2 = Dense(1, use_bias=True, activation=None) pgen_w3 = Dense(1, use_bias=True, activation=None) encoder = pointer_gen_encoder(embedding_layer, encoder_h=encoder_h, input_len=input_len, tf_int=tf_int) encoder.load_weights((model_save_path + 'encoder')) decoder = pointer_gen_decoder(embedding_layer, decoder_lstm, att_w1, att_w2, att_w3, att_v, vocab_d, vocab_d_pre, pgen_w1, pgen_w2, pgen_w3, encoder_h=encoder_h, input_len=input_len, tf_float=tf_float, tf_int=tf_int) decoder.load_weights((model_save_path + 'decoder')) return (encoder, decoder)
def get_pointer_gen_network(embedding_dim=100, input_len=500, tf_float=tf.float32, tf_int=tf.int32, model_save_path='../model_params/'): ' loads the encoder and decoder models from memory\n args:\n embedding_dim: the dimensionality of the word embeddings\n model_save_path: directory which stores the saved model parameters\n ' embedding_layer = Embedding(input_dim=30000, output_dim=embedding_dim, mask_zero=True) decoder_h = 256 encoder_h = 128 decoder_lstm = LSTM(decoder_h, activation='tanh', return_state=True) att_w1 = Dense(256, use_bias=True, activation=None) att_w2 = Dense(256, use_bias=True, activation=None) att_w3 = Dense(256, use_bias=True, activation=None) att_v = Dense(1, use_bias=False, activation=None) vocab_d_pre = Dense(512, use_bias=True, activation='relu') vocab_d = Dense(30000, use_bias=True, activation=None) pgen_w1 = Dense(1, use_bias=True, activation=None) pgen_w2 = Dense(1, use_bias=True, activation=None) pgen_w3 = Dense(1, use_bias=True, activation=None) encoder = pointer_gen_encoder(embedding_layer, encoder_h=encoder_h, input_len=input_len, tf_int=tf_int) encoder.load_weights((model_save_path + 'encoder')) decoder = pointer_gen_decoder(embedding_layer, decoder_lstm, att_w1, att_w2, att_w3, att_v, vocab_d, vocab_d_pre, pgen_w1, pgen_w2, pgen_w3, encoder_h=encoder_h, input_len=input_len, tf_float=tf_float, tf_int=tf_int) decoder.load_weights((model_save_path + 'decoder')) return (encoder, decoder)<|docstring|>loads the encoder and decoder models from memory args: embedding_dim: the dimensionality of the word embeddings model_save_path: directory which stores the saved model parameters<|endoftext|>
69202ef22bbc6125fd7a5e9144aeda9868d2b5d24161c38cfe5122c4f4ea62bf
def run_beam_search(x, x_indices, att_mask, x_indices_dict, index_to_word, encoder, decoder, max_tokens, beam_width, alpha, c=1e-18): ' Gets the top-prob. predictions based on beam search\n args:\n max_tokens: set maximum number of tokens for generated summary\n beam_width: the number of channels to use for beam search\n alpha: controls the length normalization for beam search\n ' vocab_size = len(index_to_word) models = defaultdict(dict) s = np.zeros((1, 256)).astype('float32') c = np.zeros((1, 256)).astype('float32') coverage_vector = np.zeros((1, 500)).astype('float32') fixed_vocab_indices = np.array([[i for i in range(30000)]]).astype('int32') decoder_x = np.ones((1, 1)).astype('int32') h = encoder([x]) (joint_prob, s, c, coverage_vector) = decoder([h, x_indices, decoder_x, att_mask, s, c, coverage_vector, fixed_vocab_indices]) joint_prob = joint_prob.numpy() for i in range(beam_width): arg_max = np.argmax(joint_prob) models[i]['prob'] = np.log(joint_prob[(0, arg_max)]) if (arg_max < vocab_size): models[i]['tokens'] = [index_to_word[str(arg_max)]] models[i]['next_input'] = np.array([[arg_max]]).astype('int32') else: models[i]['tokens'] = [x_indices_dict[arg_max]] models[i]['next_input'] = np.array([[2]]).astype('int32') (models[i]['s'], models[i]['c'], models[i]['coverage_vector']) = (s, c, coverage_vector) models[i]['done'] = ((arg_max == 1) or (len(models[i]['tokens']) == max_tokens)) joint_prob[(0, arg_max)] = (- np.inf) while (sum([models[i]['done'] for i in range(beam_width)]) != beam_width): all_joint_probs = [] for i in range(beam_width): if (models[i]['done'] is False): (s, c, coverage_vector, decoder_x) = (models[i]['s'], models[i]['c'], models[i]['coverage_vector'], models[i]['next_input']) (joint_prob, s, c, coverage_vector) = decoder([h, x_indices, decoder_x, att_mask, s, c, coverage_vector, fixed_vocab_indices]) joint_prob = ((models[i]['prob'] + np.log(joint_prob.numpy())) * (1 / ((len(models[i]['tokens']) + 1) ** alpha))) (models[i]['s'], models[i]['c'], models[i]['coverage_vector']) = (s, c, coverage_vector) else: joint_prob = np.full(joint_prob.shape, (- np.inf)).astype('float32') joint_prob[(0, 0)] = (models[i]['prob'] * (1 / (len(models[i]['tokens']) ** alpha))) all_joint_probs.append(joint_prob) all_joint_probs = np.hstack(all_joint_probs) new_models = defaultdict(dict) for i in range(beam_width): arg_max = np.argmax(all_joint_probs) model_no = (arg_max // joint_prob.shape[1]) if (models[model_no]['done'] is True): (new_models[i]['s'], new_models[i]['c'], new_models[i]['coverage_vector']) = (models[model_no]['s'], models[model_no]['c'], models[model_no]['coverage_vector']) (new_models[i]['prob'], new_models[i]['tokens'], new_models[i]['next_input'], new_models[i]['done']) = (models[model_no]['prob'], models[model_no]['tokens'], models[model_no]['next_input'], models[model_no]['done']) else: new_models[i]['prob'] = (all_joint_probs[(0, arg_max)] / (1 / ((len(models[model_no]['tokens']) + 1) ** alpha))) model_arg_max = (arg_max - (joint_prob.shape[1] * model_no)) if (model_arg_max < vocab_size): new_models[i]['tokens'] = (models[model_no]['tokens'] + [index_to_word[str(model_arg_max)]]) new_models[i]['next_input'] = np.array([[model_arg_max]]).astype('int32') else: new_models[i]['tokens'] = (models[model_no]['tokens'] + [x_indices_dict[model_arg_max]]) new_models[i]['next_input'] = np.array([[2]]).astype('int32') (new_models[i]['s'], new_models[i]['c'], new_models[i]['coverage_vector']) = (models[model_no]['s'], models[model_no]['c'], models[model_no]['coverage_vector']) new_models[i]['done'] = ((model_arg_max == 1) or (len(new_models[i]['tokens']) == max_tokens)) all_joint_probs[(0, arg_max)] = (- np.inf) models = new_models predicted_tokens = models[0]['tokens'] return predicted_tokens
Gets the top-prob. predictions based on beam search args: max_tokens: set maximum number of tokens for generated summary beam_width: the number of channels to use for beam search alpha: controls the length normalization for beam search
pointer-gen_implementations/code/get_model_predictions.py
run_beam_search
nateandre/machine_learning
1
python
def run_beam_search(x, x_indices, att_mask, x_indices_dict, index_to_word, encoder, decoder, max_tokens, beam_width, alpha, c=1e-18): ' Gets the top-prob. predictions based on beam search\n args:\n max_tokens: set maximum number of tokens for generated summary\n beam_width: the number of channels to use for beam search\n alpha: controls the length normalization for beam search\n ' vocab_size = len(index_to_word) models = defaultdict(dict) s = np.zeros((1, 256)).astype('float32') c = np.zeros((1, 256)).astype('float32') coverage_vector = np.zeros((1, 500)).astype('float32') fixed_vocab_indices = np.array([[i for i in range(30000)]]).astype('int32') decoder_x = np.ones((1, 1)).astype('int32') h = encoder([x]) (joint_prob, s, c, coverage_vector) = decoder([h, x_indices, decoder_x, att_mask, s, c, coverage_vector, fixed_vocab_indices]) joint_prob = joint_prob.numpy() for i in range(beam_width): arg_max = np.argmax(joint_prob) models[i]['prob'] = np.log(joint_prob[(0, arg_max)]) if (arg_max < vocab_size): models[i]['tokens'] = [index_to_word[str(arg_max)]] models[i]['next_input'] = np.array([[arg_max]]).astype('int32') else: models[i]['tokens'] = [x_indices_dict[arg_max]] models[i]['next_input'] = np.array([[2]]).astype('int32') (models[i]['s'], models[i]['c'], models[i]['coverage_vector']) = (s, c, coverage_vector) models[i]['done'] = ((arg_max == 1) or (len(models[i]['tokens']) == max_tokens)) joint_prob[(0, arg_max)] = (- np.inf) while (sum([models[i]['done'] for i in range(beam_width)]) != beam_width): all_joint_probs = [] for i in range(beam_width): if (models[i]['done'] is False): (s, c, coverage_vector, decoder_x) = (models[i]['s'], models[i]['c'], models[i]['coverage_vector'], models[i]['next_input']) (joint_prob, s, c, coverage_vector) = decoder([h, x_indices, decoder_x, att_mask, s, c, coverage_vector, fixed_vocab_indices]) joint_prob = ((models[i]['prob'] + np.log(joint_prob.numpy())) * (1 / ((len(models[i]['tokens']) + 1) ** alpha))) (models[i]['s'], models[i]['c'], models[i]['coverage_vector']) = (s, c, coverage_vector) else: joint_prob = np.full(joint_prob.shape, (- np.inf)).astype('float32') joint_prob[(0, 0)] = (models[i]['prob'] * (1 / (len(models[i]['tokens']) ** alpha))) all_joint_probs.append(joint_prob) all_joint_probs = np.hstack(all_joint_probs) new_models = defaultdict(dict) for i in range(beam_width): arg_max = np.argmax(all_joint_probs) model_no = (arg_max // joint_prob.shape[1]) if (models[model_no]['done'] is True): (new_models[i]['s'], new_models[i]['c'], new_models[i]['coverage_vector']) = (models[model_no]['s'], models[model_no]['c'], models[model_no]['coverage_vector']) (new_models[i]['prob'], new_models[i]['tokens'], new_models[i]['next_input'], new_models[i]['done']) = (models[model_no]['prob'], models[model_no]['tokens'], models[model_no]['next_input'], models[model_no]['done']) else: new_models[i]['prob'] = (all_joint_probs[(0, arg_max)] / (1 / ((len(models[model_no]['tokens']) + 1) ** alpha))) model_arg_max = (arg_max - (joint_prob.shape[1] * model_no)) if (model_arg_max < vocab_size): new_models[i]['tokens'] = (models[model_no]['tokens'] + [index_to_word[str(model_arg_max)]]) new_models[i]['next_input'] = np.array([[model_arg_max]]).astype('int32') else: new_models[i]['tokens'] = (models[model_no]['tokens'] + [x_indices_dict[model_arg_max]]) new_models[i]['next_input'] = np.array([[2]]).astype('int32') (new_models[i]['s'], new_models[i]['c'], new_models[i]['coverage_vector']) = (models[model_no]['s'], models[model_no]['c'], models[model_no]['coverage_vector']) new_models[i]['done'] = ((model_arg_max == 1) or (len(new_models[i]['tokens']) == max_tokens)) all_joint_probs[(0, arg_max)] = (- np.inf) models = new_models predicted_tokens = models[0]['tokens'] return predicted_tokens
def run_beam_search(x, x_indices, att_mask, x_indices_dict, index_to_word, encoder, decoder, max_tokens, beam_width, alpha, c=1e-18): ' Gets the top-prob. predictions based on beam search\n args:\n max_tokens: set maximum number of tokens for generated summary\n beam_width: the number of channels to use for beam search\n alpha: controls the length normalization for beam search\n ' vocab_size = len(index_to_word) models = defaultdict(dict) s = np.zeros((1, 256)).astype('float32') c = np.zeros((1, 256)).astype('float32') coverage_vector = np.zeros((1, 500)).astype('float32') fixed_vocab_indices = np.array([[i for i in range(30000)]]).astype('int32') decoder_x = np.ones((1, 1)).astype('int32') h = encoder([x]) (joint_prob, s, c, coverage_vector) = decoder([h, x_indices, decoder_x, att_mask, s, c, coverage_vector, fixed_vocab_indices]) joint_prob = joint_prob.numpy() for i in range(beam_width): arg_max = np.argmax(joint_prob) models[i]['prob'] = np.log(joint_prob[(0, arg_max)]) if (arg_max < vocab_size): models[i]['tokens'] = [index_to_word[str(arg_max)]] models[i]['next_input'] = np.array([[arg_max]]).astype('int32') else: models[i]['tokens'] = [x_indices_dict[arg_max]] models[i]['next_input'] = np.array([[2]]).astype('int32') (models[i]['s'], models[i]['c'], models[i]['coverage_vector']) = (s, c, coverage_vector) models[i]['done'] = ((arg_max == 1) or (len(models[i]['tokens']) == max_tokens)) joint_prob[(0, arg_max)] = (- np.inf) while (sum([models[i]['done'] for i in range(beam_width)]) != beam_width): all_joint_probs = [] for i in range(beam_width): if (models[i]['done'] is False): (s, c, coverage_vector, decoder_x) = (models[i]['s'], models[i]['c'], models[i]['coverage_vector'], models[i]['next_input']) (joint_prob, s, c, coverage_vector) = decoder([h, x_indices, decoder_x, att_mask, s, c, coverage_vector, fixed_vocab_indices]) joint_prob = ((models[i]['prob'] + np.log(joint_prob.numpy())) * (1 / ((len(models[i]['tokens']) + 1) ** alpha))) (models[i]['s'], models[i]['c'], models[i]['coverage_vector']) = (s, c, coverage_vector) else: joint_prob = np.full(joint_prob.shape, (- np.inf)).astype('float32') joint_prob[(0, 0)] = (models[i]['prob'] * (1 / (len(models[i]['tokens']) ** alpha))) all_joint_probs.append(joint_prob) all_joint_probs = np.hstack(all_joint_probs) new_models = defaultdict(dict) for i in range(beam_width): arg_max = np.argmax(all_joint_probs) model_no = (arg_max // joint_prob.shape[1]) if (models[model_no]['done'] is True): (new_models[i]['s'], new_models[i]['c'], new_models[i]['coverage_vector']) = (models[model_no]['s'], models[model_no]['c'], models[model_no]['coverage_vector']) (new_models[i]['prob'], new_models[i]['tokens'], new_models[i]['next_input'], new_models[i]['done']) = (models[model_no]['prob'], models[model_no]['tokens'], models[model_no]['next_input'], models[model_no]['done']) else: new_models[i]['prob'] = (all_joint_probs[(0, arg_max)] / (1 / ((len(models[model_no]['tokens']) + 1) ** alpha))) model_arg_max = (arg_max - (joint_prob.shape[1] * model_no)) if (model_arg_max < vocab_size): new_models[i]['tokens'] = (models[model_no]['tokens'] + [index_to_word[str(model_arg_max)]]) new_models[i]['next_input'] = np.array([[model_arg_max]]).astype('int32') else: new_models[i]['tokens'] = (models[model_no]['tokens'] + [x_indices_dict[model_arg_max]]) new_models[i]['next_input'] = np.array([[2]]).astype('int32') (new_models[i]['s'], new_models[i]['c'], new_models[i]['coverage_vector']) = (models[model_no]['s'], models[model_no]['c'], models[model_no]['coverage_vector']) new_models[i]['done'] = ((model_arg_max == 1) or (len(new_models[i]['tokens']) == max_tokens)) all_joint_probs[(0, arg_max)] = (- np.inf) models = new_models predicted_tokens = models[0]['tokens'] return predicted_tokens<|docstring|>Gets the top-prob. predictions based on beam search args: max_tokens: set maximum number of tokens for generated summary beam_width: the number of channels to use for beam search alpha: controls the length normalization for beam search<|endoftext|>
daba4cb8b37c3ef095d140fe1017a136f07b87dfc84ceccd59408dac0df80f09
def get_runtime_prediction(utterances, max_tokens=200, beam_width=3, alpha=1, embedding_dim=100, input_len=500, data_dir='../data/len_500_data/', model_save_path='../model_params/'): ' Gets runtime predictions using beam search\n args:\n utterances: 1D list of utterances (the second dimension of discussion id has been collapsed into the 1D)\n embedding_dim: the dimensionality of the word embeddings\n model_save_path: path to directory which holds pretrained model params\n data_dir: path to directory which holds preprocessed data\n max_tokens: set maximum number of tokens for generated summary\n beam_width: the number of channels to use for beam search\n alpha: controls the length normalization for beam search\n ' (x, x_indices, att_mask, x_indices_dict, index_to_word, _) = get_model_inputs(utterances, data_dir=data_dir, token_cutoff=input_len) (encoder, decoder) = get_pointer_gen_network(embedding_dim=embedding_dim, input_len=input_len, model_save_path=model_save_path) predicted_tokens = run_beam_search(x, x_indices, att_mask, x_indices_dict, index_to_word, encoder, decoder, max_tokens, beam_width, alpha) return predicted_tokens
Gets runtime predictions using beam search args: utterances: 1D list of utterances (the second dimension of discussion id has been collapsed into the 1D) embedding_dim: the dimensionality of the word embeddings model_save_path: path to directory which holds pretrained model params data_dir: path to directory which holds preprocessed data max_tokens: set maximum number of tokens for generated summary beam_width: the number of channels to use for beam search alpha: controls the length normalization for beam search
pointer-gen_implementations/code/get_model_predictions.py
get_runtime_prediction
nateandre/machine_learning
1
python
def get_runtime_prediction(utterances, max_tokens=200, beam_width=3, alpha=1, embedding_dim=100, input_len=500, data_dir='../data/len_500_data/', model_save_path='../model_params/'): ' Gets runtime predictions using beam search\n args:\n utterances: 1D list of utterances (the second dimension of discussion id has been collapsed into the 1D)\n embedding_dim: the dimensionality of the word embeddings\n model_save_path: path to directory which holds pretrained model params\n data_dir: path to directory which holds preprocessed data\n max_tokens: set maximum number of tokens for generated summary\n beam_width: the number of channels to use for beam search\n alpha: controls the length normalization for beam search\n ' (x, x_indices, att_mask, x_indices_dict, index_to_word, _) = get_model_inputs(utterances, data_dir=data_dir, token_cutoff=input_len) (encoder, decoder) = get_pointer_gen_network(embedding_dim=embedding_dim, input_len=input_len, model_save_path=model_save_path) predicted_tokens = run_beam_search(x, x_indices, att_mask, x_indices_dict, index_to_word, encoder, decoder, max_tokens, beam_width, alpha) return predicted_tokens
def get_runtime_prediction(utterances, max_tokens=200, beam_width=3, alpha=1, embedding_dim=100, input_len=500, data_dir='../data/len_500_data/', model_save_path='../model_params/'): ' Gets runtime predictions using beam search\n args:\n utterances: 1D list of utterances (the second dimension of discussion id has been collapsed into the 1D)\n embedding_dim: the dimensionality of the word embeddings\n model_save_path: path to directory which holds pretrained model params\n data_dir: path to directory which holds preprocessed data\n max_tokens: set maximum number of tokens for generated summary\n beam_width: the number of channels to use for beam search\n alpha: controls the length normalization for beam search\n ' (x, x_indices, att_mask, x_indices_dict, index_to_word, _) = get_model_inputs(utterances, data_dir=data_dir, token_cutoff=input_len) (encoder, decoder) = get_pointer_gen_network(embedding_dim=embedding_dim, input_len=input_len, model_save_path=model_save_path) predicted_tokens = run_beam_search(x, x_indices, att_mask, x_indices_dict, index_to_word, encoder, decoder, max_tokens, beam_width, alpha) return predicted_tokens<|docstring|>Gets runtime predictions using beam search args: utterances: 1D list of utterances (the second dimension of discussion id has been collapsed into the 1D) embedding_dim: the dimensionality of the word embeddings model_save_path: path to directory which holds pretrained model params data_dir: path to directory which holds preprocessed data max_tokens: set maximum number of tokens for generated summary beam_width: the number of channels to use for beam search alpha: controls the length normalization for beam search<|endoftext|>
933128ed1e2fcb1b0b4d634d607609264b454456f02b97aa372154b19f450509
@hook(cmds=['op'], trusted=True, ischannel=True, selfopped=True) def op(code, input): ' op <user> - Op users in a room. If no nick is given, input user is selected. ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '+o', nick])
op <user> - Op users in a room. If no nick is given, input user is selected.
modules/moderation.py
op
CHCMATT/Code
15
python
@hook(cmds=['op'], trusted=True, ischannel=True, selfopped=True) def op(code, input): ' ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '+o', nick])
@hook(cmds=['op'], trusted=True, ischannel=True, selfopped=True) def op(code, input): ' ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '+o', nick])<|docstring|>op <user> - Op users in a room. If no nick is given, input user is selected.<|endoftext|>
978f4043eb18ef42b1f73f0fcf49602b903a83b743069f8d4badbdb9b16e3423
@hook(cmds=['deop'], trusted=True, ischannel=True, selfopped=True) def deop(code, input): ' deop <user> - Deop users in a room. If no nick is given, input user is selected. ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '-o', nick])
deop <user> - Deop users in a room. If no nick is given, input user is selected.
modules/moderation.py
deop
CHCMATT/Code
15
python
@hook(cmds=['deop'], trusted=True, ischannel=True, selfopped=True) def deop(code, input): ' ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '-o', nick])
@hook(cmds=['deop'], trusted=True, ischannel=True, selfopped=True) def deop(code, input): ' ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '-o', nick])<|docstring|>deop <user> - Deop users in a room. If no nick is given, input user is selected.<|endoftext|>
7e2c28201b6182b360f1ac31166fc85d2fb4d4b1fdd1353c2c93a712a52fbc29
@hook(cmds=['voice'], trusted=True, ischannel=True, selfopped=True) def voice(code, input): ' voice <user> - Voice users in a room. If no nick is given, input user is selected. ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '+v', nick])
voice <user> - Voice users in a room. If no nick is given, input user is selected.
modules/moderation.py
voice
CHCMATT/Code
15
python
@hook(cmds=['voice'], trusted=True, ischannel=True, selfopped=True) def voice(code, input): ' ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '+v', nick])
@hook(cmds=['voice'], trusted=True, ischannel=True, selfopped=True) def voice(code, input): ' ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '+v', nick])<|docstring|>voice <user> - Voice users in a room. If no nick is given, input user is selected.<|endoftext|>
9774352e612a161478ccd45a20f0512e1dd681c3f86fa8a68498accc9cd401ee
@hook(cmds=['devoice'], trusted=True, ischannel=True, selfopped=True) def devoice(code, input): ' devoice <user> - Devoice users in a room. If no nick is given, input user is selected. ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '-v', nick])
devoice <user> - Devoice users in a room. If no nick is given, input user is selected.
modules/moderation.py
devoice
CHCMATT/Code
15
python
@hook(cmds=['devoice'], trusted=True, ischannel=True, selfopped=True) def devoice(code, input): ' ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '-v', nick])
@hook(cmds=['devoice'], trusted=True, ischannel=True, selfopped=True) def devoice(code, input): ' ' nick = (input.group(2) if input.group(2) else input.nick) code.write(['MODE', input.sender, '-v', nick])<|docstring|>devoice <user> - Devoice users in a room. If no nick is given, input user is selected.<|endoftext|>
71e58c208de2218d881da37cbe0b8eca594cdcd14f5fc25521b06f429981f6b4
@hook(cmds=['kick'], trusted=True, ischannel=True, selfopped=True, ex='kick Liam Abuse!', args=True) def kick(code, input): ' kick <user> [reason] - Kicks a user from the current channel, with a reason if supplied. ' text = input.group(2).split() if (len(text) == 1): target = input.group(2) reason = False else: target = text[0] reason = ' '.join(text[1:]) if (not reason): reason = kick_reason() if (target != code.nick): return code.write(['KICK', input.sender, target], reason) else: return code.say('...')
kick <user> [reason] - Kicks a user from the current channel, with a reason if supplied.
modules/moderation.py
kick
CHCMATT/Code
15
python
@hook(cmds=['kick'], trusted=True, ischannel=True, selfopped=True, ex='kick Liam Abuse!', args=True) def kick(code, input): ' ' text = input.group(2).split() if (len(text) == 1): target = input.group(2) reason = False else: target = text[0] reason = ' '.join(text[1:]) if (not reason): reason = kick_reason() if (target != code.nick): return code.write(['KICK', input.sender, target], reason) else: return code.say('...')
@hook(cmds=['kick'], trusted=True, ischannel=True, selfopped=True, ex='kick Liam Abuse!', args=True) def kick(code, input): ' ' text = input.group(2).split() if (len(text) == 1): target = input.group(2) reason = False else: target = text[0] reason = ' '.join(text[1:]) if (not reason): reason = kick_reason() if (target != code.nick): return code.write(['KICK', input.sender, target], reason) else: return code.say('...')<|docstring|>kick <user> [reason] - Kicks a user from the current channel, with a reason if supplied.<|endoftext|>
fb77a480abc729d938a65274b2f42162eb8d3e76e02cad5781333f6441912f7f
@hook(cmds=['ban', 'b', 'kickban'], trusted=True, ischannel=True, selfopped=True, args=True) def ban(code, input): ' ban <user> - Bans a user from the current channel. Auto-kicks any users matching mask. ' banmask = matchmask(input.group(2)) if (not banmask): return code.say('Invalid banmask! For more info, see: https://github.com/lrstanley/Code/wiki/Masks') return code.write(['MODE', input.sender, '+b', banmask])
ban <user> - Bans a user from the current channel. Auto-kicks any users matching mask.
modules/moderation.py
ban
CHCMATT/Code
15
python
@hook(cmds=['ban', 'b', 'kickban'], trusted=True, ischannel=True, selfopped=True, args=True) def ban(code, input): ' ' banmask = matchmask(input.group(2)) if (not banmask): return code.say('Invalid banmask! For more info, see: https://github.com/lrstanley/Code/wiki/Masks') return code.write(['MODE', input.sender, '+b', banmask])
@hook(cmds=['ban', 'b', 'kickban'], trusted=True, ischannel=True, selfopped=True, args=True) def ban(code, input): ' ' banmask = matchmask(input.group(2)) if (not banmask): return code.say('Invalid banmask! For more info, see: https://github.com/lrstanley/Code/wiki/Masks') return code.write(['MODE', input.sender, '+b', banmask])<|docstring|>ban <user> - Bans a user from the current channel. Auto-kicks any users matching mask.<|endoftext|>
0af52fb1b07e7174c7f851db7425349901b8d1f12c7e103b72dee532d02032d6
@hook(cmds=['unban', 'ub'], trusted=True, ischannel=True, selfopped=True, args=True) def unban(code, input): ' unban <user> - Unbans a user from the current channel. ' banmask = matchmask(input.group(2)) if (not banmask): return code.say('Invalid banmask! For more info, see: https://github.com/lrstanley/Code/wiki/Masks') return code.write(['MODE', input.sender, '-b', banmask])
unban <user> - Unbans a user from the current channel.
modules/moderation.py
unban
CHCMATT/Code
15
python
@hook(cmds=['unban', 'ub'], trusted=True, ischannel=True, selfopped=True, args=True) def unban(code, input): ' ' banmask = matchmask(input.group(2)) if (not banmask): return code.say('Invalid banmask! For more info, see: https://github.com/lrstanley/Code/wiki/Masks') return code.write(['MODE', input.sender, '-b', banmask])
@hook(cmds=['unban', 'ub'], trusted=True, ischannel=True, selfopped=True, args=True) def unban(code, input): ' ' banmask = matchmask(input.group(2)) if (not banmask): return code.say('Invalid banmask! For more info, see: https://github.com/lrstanley/Code/wiki/Masks') return code.write(['MODE', input.sender, '-b', banmask])<|docstring|>unban <user> - Unbans a user from the current channel.<|endoftext|>
a1be070b0881f143612e6cf321518e344bde73327b3a68e721b1a78e10136eec
@hook(cmds=['topic'], trusted=True, ischannel=True, selfopped=True, args=True) def topic(code, input): ' topic <text> - Sets the topic of the current channel to the given text. ' code.write(['PRIVMSG', 'ChanServ'], ('TOPIC %s %s' % (input.sender, input.group(2))))
topic <text> - Sets the topic of the current channel to the given text.
modules/moderation.py
topic
CHCMATT/Code
15
python
@hook(cmds=['topic'], trusted=True, ischannel=True, selfopped=True, args=True) def topic(code, input): ' ' code.write(['PRIVMSG', 'ChanServ'], ('TOPIC %s %s' % (input.sender, input.group(2))))
@hook(cmds=['topic'], trusted=True, ischannel=True, selfopped=True, args=True) def topic(code, input): ' ' code.write(['PRIVMSG', 'ChanServ'], ('TOPIC %s %s' % (input.sender, input.group(2))))<|docstring|>topic <text> - Sets the topic of the current channel to the given text.<|endoftext|>
4133f37c5141c0278a1d1c879d2e96e945e9d52b5c607b8a613f79b74ef10918
def perceptualLoss(output, target, vggnet): '\n use vgg19 conv1_2, conv2_2, conv3_3 feature, before relu layer\n ' weights = [1, 0.2, 0.04] features_fake = vggnet(fakeIm) features_real = vggnet(realIm) features_real_no_grad = [f_real.detach() for f_real in features_real] mse_loss = nn.MSELoss() loss = 0 for i in range(len(features_real)): loss_i = mse_loss(features_fake[i], features_real_no_grad[i]) loss = (loss + (loss_i * weights[i])) return loss
use vgg19 conv1_2, conv2_2, conv3_3 feature, before relu layer
losses/losses.py
perceptualLoss
jamekuma/IGNN
2
python
def perceptualLoss(output, target, vggnet): '\n \n ' weights = [1, 0.2, 0.04] features_fake = vggnet(fakeIm) features_real = vggnet(realIm) features_real_no_grad = [f_real.detach() for f_real in features_real] mse_loss = nn.MSELoss() loss = 0 for i in range(len(features_real)): loss_i = mse_loss(features_fake[i], features_real_no_grad[i]) loss = (loss + (loss_i * weights[i])) return loss
def perceptualLoss(output, target, vggnet): '\n \n ' weights = [1, 0.2, 0.04] features_fake = vggnet(fakeIm) features_real = vggnet(realIm) features_real_no_grad = [f_real.detach() for f_real in features_real] mse_loss = nn.MSELoss() loss = 0 for i in range(len(features_real)): loss_i = mse_loss(features_fake[i], features_real_no_grad[i]) loss = (loss + (loss_i * weights[i])) return loss<|docstring|>use vgg19 conv1_2, conv2_2, conv3_3 feature, before relu layer<|endoftext|>
240b70571c2fb4120eddb9d41ad1ad57a2a42ce3ca129975441755e3f3cefcf8
def __init__(self, n_components=None, n_selected_components=None, contamination=0.1, copy=True, whiten=False, svd_solver='auto', tol=0.0, iterated_power='auto', random_state=None, weighted=True, standardization=True): "Principal component analysis (PCA)\n\n Parameters\n ----------\n n_components : int\n Number of components to keep.\n\n n_selected_components : int, optional (default=None)\n If not set, use\n all principal components.\n\n contamination : float in (0., 0.5), optional (default=0.1)\n The amount of contamination of the data set, i.e.\n the proportion of outliers in the data set. Used when fitting to\n define the threshold on the decision function.\n\n copy : bool (default True)\n If False, data passed to fit are overwritten and running\n fit(X).transform(X) will not yield the expected results,\n use fit_transform(X) instead.\n\n whiten : bool, optional (default False)\n\n svd_solver : string {'auto', 'full', 'arpack', 'randomized'}\n\n tol : float >= 0, optional (default .0)\n Tolerance for singular values computed by svd_solver == 'arpack'.\n\n iterated_power : int >= 0, or 'auto', (default 'auto')\n Number of iterations for the power method computed by\n svd_solver == 'randomized'.\n\n random_state : int\n\n weighted : bool, optional (default=True)\n If True, the eigenvalues are used in score computation.\n\n standardization : bool, optional (default=True)\n If True, perform standardization first to convert\n data to zero mean and unit variance.\n See http://scikit-learn.org/stable/auto_examples/preprocessing/plot_scaling_importance.html\n\n " self.n_components = n_components self.n_selected_components = n_selected_components self.copy = copy self.whiten = whiten self.svd_solver = svd_solver self.tol = tol self.iterated_power = iterated_power self.random_state = random_state self.weighted = weighted self.standardization = standardization self.score_name = 'reconstructed' self.contamination = contamination
Principal component analysis (PCA) Parameters ---------- n_components : int Number of components to keep. n_selected_components : int, optional (default=None) If not set, use all principal components. contamination : float in (0., 0.5), optional (default=0.1) The amount of contamination of the data set, i.e. the proportion of outliers in the data set. Used when fitting to define the threshold on the decision function. copy : bool (default True) If False, data passed to fit are overwritten and running fit(X).transform(X) will not yield the expected results, use fit_transform(X) instead. whiten : bool, optional (default False) svd_solver : string {'auto', 'full', 'arpack', 'randomized'} tol : float >= 0, optional (default .0) Tolerance for singular values computed by svd_solver == 'arpack'. iterated_power : int >= 0, or 'auto', (default 'auto') Number of iterations for the power method computed by svd_solver == 'randomized'. random_state : int weighted : bool, optional (default=True) If True, the eigenvalues are used in score computation. standardization : bool, optional (default=True) If True, perform standardization first to convert data to zero mean and unit variance. See http://scikit-learn.org/stable/auto_examples/preprocessing/plot_scaling_importance.html
ndm/pca.py
__init__
shinan6/odet
0
python
def __init__(self, n_components=None, n_selected_components=None, contamination=0.1, copy=True, whiten=False, svd_solver='auto', tol=0.0, iterated_power='auto', random_state=None, weighted=True, standardization=True): "Principal component analysis (PCA)\n\n Parameters\n ----------\n n_components : int\n Number of components to keep.\n\n n_selected_components : int, optional (default=None)\n If not set, use\n all principal components.\n\n contamination : float in (0., 0.5), optional (default=0.1)\n The amount of contamination of the data set, i.e.\n the proportion of outliers in the data set. Used when fitting to\n define the threshold on the decision function.\n\n copy : bool (default True)\n If False, data passed to fit are overwritten and running\n fit(X).transform(X) will not yield the expected results,\n use fit_transform(X) instead.\n\n whiten : bool, optional (default False)\n\n svd_solver : string {'auto', 'full', 'arpack', 'randomized'}\n\n tol : float >= 0, optional (default .0)\n Tolerance for singular values computed by svd_solver == 'arpack'.\n\n iterated_power : int >= 0, or 'auto', (default 'auto')\n Number of iterations for the power method computed by\n svd_solver == 'randomized'.\n\n random_state : int\n\n weighted : bool, optional (default=True)\n If True, the eigenvalues are used in score computation.\n\n standardization : bool, optional (default=True)\n If True, perform standardization first to convert\n data to zero mean and unit variance.\n See http://scikit-learn.org/stable/auto_examples/preprocessing/plot_scaling_importance.html\n\n " self.n_components = n_components self.n_selected_components = n_selected_components self.copy = copy self.whiten = whiten self.svd_solver = svd_solver self.tol = tol self.iterated_power = iterated_power self.random_state = random_state self.weighted = weighted self.standardization = standardization self.score_name = 'reconstructed' self.contamination = contamination
def __init__(self, n_components=None, n_selected_components=None, contamination=0.1, copy=True, whiten=False, svd_solver='auto', tol=0.0, iterated_power='auto', random_state=None, weighted=True, standardization=True): "Principal component analysis (PCA)\n\n Parameters\n ----------\n n_components : int\n Number of components to keep.\n\n n_selected_components : int, optional (default=None)\n If not set, use\n all principal components.\n\n contamination : float in (0., 0.5), optional (default=0.1)\n The amount of contamination of the data set, i.e.\n the proportion of outliers in the data set. Used when fitting to\n define the threshold on the decision function.\n\n copy : bool (default True)\n If False, data passed to fit are overwritten and running\n fit(X).transform(X) will not yield the expected results,\n use fit_transform(X) instead.\n\n whiten : bool, optional (default False)\n\n svd_solver : string {'auto', 'full', 'arpack', 'randomized'}\n\n tol : float >= 0, optional (default .0)\n Tolerance for singular values computed by svd_solver == 'arpack'.\n\n iterated_power : int >= 0, or 'auto', (default 'auto')\n Number of iterations for the power method computed by\n svd_solver == 'randomized'.\n\n random_state : int\n\n weighted : bool, optional (default=True)\n If True, the eigenvalues are used in score computation.\n\n standardization : bool, optional (default=True)\n If True, perform standardization first to convert\n data to zero mean and unit variance.\n See http://scikit-learn.org/stable/auto_examples/preprocessing/plot_scaling_importance.html\n\n " self.n_components = n_components self.n_selected_components = n_selected_components self.copy = copy self.whiten = whiten self.svd_solver = svd_solver self.tol = tol self.iterated_power = iterated_power self.random_state = random_state self.weighted = weighted self.standardization = standardization self.score_name = 'reconstructed' self.contamination = contamination<|docstring|>Principal component analysis (PCA) Parameters ---------- n_components : int Number of components to keep. n_selected_components : int, optional (default=None) If not set, use all principal components. contamination : float in (0., 0.5), optional (default=0.1) The amount of contamination of the data set, i.e. the proportion of outliers in the data set. Used when fitting to define the threshold on the decision function. copy : bool (default True) If False, data passed to fit are overwritten and running fit(X).transform(X) will not yield the expected results, use fit_transform(X) instead. whiten : bool, optional (default False) svd_solver : string {'auto', 'full', 'arpack', 'randomized'} tol : float >= 0, optional (default .0) Tolerance for singular values computed by svd_solver == 'arpack'. iterated_power : int >= 0, or 'auto', (default 'auto') Number of iterations for the power method computed by svd_solver == 'randomized'. random_state : int weighted : bool, optional (default=True) If True, the eigenvalues are used in score computation. standardization : bool, optional (default=True) If True, perform standardization first to convert data to zero mean and unit variance. See http://scikit-learn.org/stable/auto_examples/preprocessing/plot_scaling_importance.html<|endoftext|>
cf128539d672650e749b8ca3137c6a121424eb89e727813cb59c1e4f89b0c741
def fit(self, X_train, y_train=None): 'Fit detector. y is ignored in unsupervised methods.\n\n Parameters\n ----------\n X_train : numpy array of shape (n_samples, n_features)\n The input samples.\n\n y_train : Ignored\n Not used, present for API consistency by convention.\n\n Returns\n -------\n self : object\n the fitted estimator.\n ' X_train = check_array(X_train) self._set_n_classes(y_train) self.model_ = _PCA(n_components=self.n_components, copy=self.copy, whiten=self.whiten, svd_solver=self.svd_solver, tol=self.tol, iterated_power=self.iterated_power, random_state=self.random_state) self.model_.fit(X_train) return self
Fit detector. y is ignored in unsupervised methods. Parameters ---------- X_train : numpy array of shape (n_samples, n_features) The input samples. y_train : Ignored Not used, present for API consistency by convention. Returns ------- self : object the fitted estimator.
ndm/pca.py
fit
shinan6/odet
0
python
def fit(self, X_train, y_train=None): 'Fit detector. y is ignored in unsupervised methods.\n\n Parameters\n ----------\n X_train : numpy array of shape (n_samples, n_features)\n The input samples.\n\n y_train : Ignored\n Not used, present for API consistency by convention.\n\n Returns\n -------\n self : object\n the fitted estimator.\n ' X_train = check_array(X_train) self._set_n_classes(y_train) self.model_ = _PCA(n_components=self.n_components, copy=self.copy, whiten=self.whiten, svd_solver=self.svd_solver, tol=self.tol, iterated_power=self.iterated_power, random_state=self.random_state) self.model_.fit(X_train) return self
def fit(self, X_train, y_train=None): 'Fit detector. y is ignored in unsupervised methods.\n\n Parameters\n ----------\n X_train : numpy array of shape (n_samples, n_features)\n The input samples.\n\n y_train : Ignored\n Not used, present for API consistency by convention.\n\n Returns\n -------\n self : object\n the fitted estimator.\n ' X_train = check_array(X_train) self._set_n_classes(y_train) self.model_ = _PCA(n_components=self.n_components, copy=self.copy, whiten=self.whiten, svd_solver=self.svd_solver, tol=self.tol, iterated_power=self.iterated_power, random_state=self.random_state) self.model_.fit(X_train) return self<|docstring|>Fit detector. y is ignored in unsupervised methods. Parameters ---------- X_train : numpy array of shape (n_samples, n_features) The input samples. y_train : Ignored Not used, present for API consistency by convention. Returns ------- self : object the fitted estimator.<|endoftext|>
6c95c250fff3ec534295afcc58126f47925d5a56de18d01ecf694e3aeb6ba905
def decision_function(self, X): 'Predict raw anomaly score of X using the fitted detector.\n\n The anomaly score of an input sample is computed based on different\n detector algorithms. For consistency, outliers are assigned with\n larger anomaly scores.\n\n Parameters\n ----------\n X : numpy array of shape (n_samples, n_features)\n The training input samples. Sparse matrices are accepted only\n if they are supported by the base estimator.\n\n Returns\n -------\n anomaly_scores : numpy array of shape (n_samples,)\n The anomaly score of the input samples.\n ' return self.model_.decision_function(X)
Predict raw anomaly score of X using the fitted detector. The anomaly score of an input sample is computed based on different detector algorithms. For consistency, outliers are assigned with larger anomaly scores. Parameters ---------- X : numpy array of shape (n_samples, n_features) The training input samples. Sparse matrices are accepted only if they are supported by the base estimator. Returns ------- anomaly_scores : numpy array of shape (n_samples,) The anomaly score of the input samples.
ndm/pca.py
decision_function
shinan6/odet
0
python
def decision_function(self, X): 'Predict raw anomaly score of X using the fitted detector.\n\n The anomaly score of an input sample is computed based on different\n detector algorithms. For consistency, outliers are assigned with\n larger anomaly scores.\n\n Parameters\n ----------\n X : numpy array of shape (n_samples, n_features)\n The training input samples. Sparse matrices are accepted only\n if they are supported by the base estimator.\n\n Returns\n -------\n anomaly_scores : numpy array of shape (n_samples,)\n The anomaly score of the input samples.\n ' return self.model_.decision_function(X)
def decision_function(self, X): 'Predict raw anomaly score of X using the fitted detector.\n\n The anomaly score of an input sample is computed based on different\n detector algorithms. For consistency, outliers are assigned with\n larger anomaly scores.\n\n Parameters\n ----------\n X : numpy array of shape (n_samples, n_features)\n The training input samples. Sparse matrices are accepted only\n if they are supported by the base estimator.\n\n Returns\n -------\n anomaly_scores : numpy array of shape (n_samples,)\n The anomaly score of the input samples.\n ' return self.model_.decision_function(X)<|docstring|>Predict raw anomaly score of X using the fitted detector. The anomaly score of an input sample is computed based on different detector algorithms. For consistency, outliers are assigned with larger anomaly scores. Parameters ---------- X : numpy array of shape (n_samples, n_features) The training input samples. Sparse matrices are accepted only if they are supported by the base estimator. Returns ------- anomaly_scores : numpy array of shape (n_samples,) The anomaly score of the input samples.<|endoftext|>
f9a4a130fd1ac368cd54b2aee513bdf446b00cadc73b655d7892e8fb94eb8e73
def get_colours(self): 'Maps the cube to the 1D colour list that can be displayed on the cube\n\n This mapping is done based on the order in which each LED exists in sequence in the real cube.\n The first layer is ordered:\n 16 15 14 13\n 9 10 11 12\n 8 7 6 5\n 1 2 3 4\n The second layer reverses this ordering.\n\n For 8x8x8 cubes, the layer ordering is reversed.\n ' return [self.grid[(((self.size - 1) - x) if (self.size == 8) else x)][(((self.size - 1) - y) if ((x % 2) == 0) else y)][(((self.size - 1) - z) if ((((x * self.size) + y) % 2) == 0) else z)] for x in range(self.size) for y in range(self.size) for z in range(self.size)]
Maps the cube to the 1D colour list that can be displayed on the cube This mapping is done based on the order in which each LED exists in sequence in the real cube. The first layer is ordered: 16 15 14 13 9 10 11 12 8 7 6 5 1 2 3 4 The second layer reverses this ordering. For 8x8x8 cubes, the layer ordering is reversed.
visuals/cube.py
get_colours
daliasen/LED-Cube
4
python
def get_colours(self): 'Maps the cube to the 1D colour list that can be displayed on the cube\n\n This mapping is done based on the order in which each LED exists in sequence in the real cube.\n The first layer is ordered:\n 16 15 14 13\n 9 10 11 12\n 8 7 6 5\n 1 2 3 4\n The second layer reverses this ordering.\n\n For 8x8x8 cubes, the layer ordering is reversed.\n ' return [self.grid[(((self.size - 1) - x) if (self.size == 8) else x)][(((self.size - 1) - y) if ((x % 2) == 0) else y)][(((self.size - 1) - z) if ((((x * self.size) + y) % 2) == 0) else z)] for x in range(self.size) for y in range(self.size) for z in range(self.size)]
def get_colours(self): 'Maps the cube to the 1D colour list that can be displayed on the cube\n\n This mapping is done based on the order in which each LED exists in sequence in the real cube.\n The first layer is ordered:\n 16 15 14 13\n 9 10 11 12\n 8 7 6 5\n 1 2 3 4\n The second layer reverses this ordering.\n\n For 8x8x8 cubes, the layer ordering is reversed.\n ' return [self.grid[(((self.size - 1) - x) if (self.size == 8) else x)][(((self.size - 1) - y) if ((x % 2) == 0) else y)][(((self.size - 1) - z) if ((((x * self.size) + y) % 2) == 0) else z)] for x in range(self.size) for y in range(self.size) for z in range(self.size)]<|docstring|>Maps the cube to the 1D colour list that can be displayed on the cube This mapping is done based on the order in which each LED exists in sequence in the real cube. The first layer is ordered: 16 15 14 13 9 10 11 12 8 7 6 5 1 2 3 4 The second layer reverses this ordering. For 8x8x8 cubes, the layer ordering is reversed.<|endoftext|>
d58c647cd2968c4a5fd804346a07591f8989990ea167de0263be479996fdb173
def fill_layer(self, direction, layer, colours): 'Fills the given layer in the given direction with the given colour.\n\n As the layer number increases [0-3], the filled layer moves away from the given direction.' if (type(colours) is Colour): colours = [[colours for i in range(SIZE)] for j in range(SIZE)] for i in range(SIZE): for j in range(SIZE): coords = convert_face_coordinates(direction, (i, j), layer) self.grid[coords.x][coords.y][coords.z] = colours[i][j]
Fills the given layer in the given direction with the given colour. As the layer number increases [0-3], the filled layer moves away from the given direction.
visuals/cube.py
fill_layer
daliasen/LED-Cube
4
python
def fill_layer(self, direction, layer, colours): 'Fills the given layer in the given direction with the given colour.\n\n As the layer number increases [0-3], the filled layer moves away from the given direction.' if (type(colours) is Colour): colours = [[colours for i in range(SIZE)] for j in range(SIZE)] for i in range(SIZE): for j in range(SIZE): coords = convert_face_coordinates(direction, (i, j), layer) self.grid[coords.x][coords.y][coords.z] = colours[i][j]
def fill_layer(self, direction, layer, colours): 'Fills the given layer in the given direction with the given colour.\n\n As the layer number increases [0-3], the filled layer moves away from the given direction.' if (type(colours) is Colour): colours = [[colours for i in range(SIZE)] for j in range(SIZE)] for i in range(SIZE): for j in range(SIZE): coords = convert_face_coordinates(direction, (i, j), layer) self.grid[coords.x][coords.y][coords.z] = colours[i][j]<|docstring|>Fills the given layer in the given direction with the given colour. As the layer number increases [0-3], the filled layer moves away from the given direction.<|endoftext|>
c5e412647693e47a898be140a517162bebb437f3f40b0ff32203e02bed3f2853
def get_layer(self, direction, layer): 'Gets the given layer, as a 2D list of colours.' result = [[Colour.BLACK for i in range(SIZE)] for j in range(SIZE)] for i in range(SIZE): for j in range(SIZE): coords = convert_face_coordinates(direction, (i, j), layer) result[i][j] = self.grid[coords.x][coords.y][coords.z] return result
Gets the given layer, as a 2D list of colours.
visuals/cube.py
get_layer
daliasen/LED-Cube
4
python
def get_layer(self, direction, layer): result = [[Colour.BLACK for i in range(SIZE)] for j in range(SIZE)] for i in range(SIZE): for j in range(SIZE): coords = convert_face_coordinates(direction, (i, j), layer) result[i][j] = self.grid[coords.x][coords.y][coords.z] return result
def get_layer(self, direction, layer): result = [[Colour.BLACK for i in range(SIZE)] for j in range(SIZE)] for i in range(SIZE): for j in range(SIZE): coords = convert_face_coordinates(direction, (i, j), layer) result[i][j] = self.grid[coords.x][coords.y][coords.z] return result<|docstring|>Gets the given layer, as a 2D list of colours.<|endoftext|>