content stringlengths 0 1.05M | origin stringclasses 2
values | type stringclasses 2
values |
|---|---|---|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO: define the initializers to create a Parameter in neural network
from ...fluid.initializer import Bilinear #DEFINE_ALIAS
from . import constant
from .constant import Constant #DEFINE_ALIAS
from . import kaiming
from .kaiming import KaimingNormal #DEFINE_ALIAS
from .kaiming import KaimingUniform #DEFINE_ALIAS
__all__ = ['Bilinear', ]
__all__ += constant.__all__
__all__ += kaiming.__all__
from . import xavier
from .xavier import XavierNormal #DEFINE_ALIAS
from .xavier import XavierUniform #DEFINE_ALIAS
from . import assign
from .assign import Assign #DEFINE_ALIAS
from . import normal
from .normal import Normal #DEFINE_ALIAS
from .normal import TruncatedNormal #DEFINE_ALIAS
from . import uniform
from .uniform import Uniform #DEFINE_ALIAS
__all__ += xavier.__all__
__all__ += assign.__all__
__all__ += normal.__all__
__all__ += uniform.__all__
| nilq/baby-python | python |
# ----- ---- --- -- -
# Copyright 2020 The Axiom Foundation. All Rights Reserved.
#
# Licensed under the Apache License 2.0 (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.apache.org/licenses/LICENSE-2.0.txt
# - -- --- ---- -----
#!/usr/bin/env python3
"""
Process csv for ETL script
"""
import sys # to print to stderr
import csv
import pymongo
from pymongo import MongoClient
import pdb
"""
# if getting timeout from MongoDB, you might have to whitelist your IP for access:
- login to AWS,
- go to EC2
- N Virginia east
- click on the 14 instances
- click on the one starting with Primary
- Click on the security groups below
- click anyone of them
- there will be a filter put in the above search
- take it out
- and put Mongo in there
- you should see 3 security groups
- click on each one of those and look at the Inbound tab below
- that is where you add the firewall rule
"""
def main():
import argparse
parser = argparse.ArgumentParser(description="Merges ndau spreadsheet with MongoDB data")
parser.add_argument('-v', '--verbose', action='store_true',
help=('print verbose info for debugging'
f'Default: false'))
parser.add_argument('-i', '--input', default="input.csv",
help=('input .csv file, default: input.csv'))
parser.add_argument('-o', '--output', default="output.csv",
help=('output .csv file, default output.csv'))
args = parser.parse_args()
# allow verbose printing
global verboseFlag
verboseFlag = args.verbose
if verboseFlag:
for p in sys.path:
print(p)
# list of node addresses to be assigned to accounts
# these nodes come from the "data and procedures/genesis_nodes.txt" file in the genesis repo
node_list = ['ndarw5i7rmqtqstw4mtnchmfvxnrq4k3e2ytsyvsc7nxt2y7',
'ndaq3nqhez3vvxn8rx4m6s6n3kv7k9js8i3xw8hqnwvi2ete',
'ndahnsxr8zh7r6u685ka865wz77wb78xcn45rgskpeyiwuza',
'ndam75fnjn7cdues7ivi7ccfq8f534quieaccqibrvuzhqxa',
'ndaekyty73hd56gynsswuj5q9em68tp6ed5v7tpft872hvuc']
node_list_index = 0
# connect to MongoDB
client = MongoClient('mongodb://admin:0n13r0Nd3v@34.228.30.229:27017')
if verboseFlag:
print(f'client = {client}')
print(f'db names = {client.list_database_names()}')
db = client['ndau_dashboard']
if verboseFlag:
print(f'db = {db}')
print(f'collection names = {db.list_collection_names()}')
collection = db['accountaddresses']
if verboseFlag:
print(f'collection = {collection}')
first = collection.find_one()
if verboseFlag:
print(f'item = {first}')
# pdb.set_trace()
r = csv.reader(open(args.input))
lines = list(r)
if verboseFlag:
print(f"addresses = {first['addresses']}")
# loop through MongoDB records
for record in collection.find():
if verboseFlag:
print(f'record = {record}')
addr_index = 0
addrs = record['addresses']
# loop through rows in spreadsheet
for line in lines:
# if userID in Mongo = userID in spreadsheet row, insert address into spreadsheet row
if record['userId'] == line[8]:
if addr_index == 0:
first_line = line
# pdb.set_trace()
# if we don't have enough addresses in Mongo for this userID, print a message
if addr_index >= len(addrs):
print(f'addr mismatch, num in Mongo: {len(addrs)}, num in CSV: {addr_index}')
else:
line[3] = addrs[addr_index]
addr_index += 1
# pdb.set_trace()
# if this row has data in it, assign a node address to this account in round robin
if line[13] != '':
line[12] = node_list[node_list_index]
node_list_index = (node_list_index + 1) % len(node_list)
# if we have too many addresses in Mongo for this userID, print a message. This happens
# on occasion because early on we generated too many addresses for each userID
if addr_index != len(addrs):
print(f'addr mismatch, num in Mongo: {len(addrs)}, num in CSV: {addr_index}')
first_line[11] = f'Mongo: {len(addrs)}'
writer = csv.writer(open(args.output, 'w'))
writer.writerows(lines)
print('All done.')
# kick it off
if __name__ == '__main__':
main()
| nilq/baby-python | python |
# uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.login.AvatarChooser
import math, time, os, random, sys
from pandac.PandaModules import *
from direct.gui.DirectGui import *
from direct.task.Task import Task
from direct.directnotify.DirectNotifyGlobal import directNotify
from direct.showbase.DirectObject import DirectObject
from direct.fsm.StateData import StateData
from direct.fsm.ClassicFSM import ClassicFSM
from direct.fsm.State import State
from direct.gui import DirectGuiGlobals
from direct.interval.IntervalGlobal import *
from direct.showbase.PythonUtil import quickProfile
from otp.otpgui import OTPDialog
from otp.otpbase import OTPGlobals
from pirates.audio import SoundGlobals
from pirates.piratesgui.GameOptions import GameOptions
from pirates.piratesbase import PLocalizer
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesgui import PDialog
from pirates.piratesgui.BorderFrame import BorderFrame
from pirates.piratesgui.ShardPanel import ShardPanel
from pirates.piratesbase import PiratesGlobals
from pirates.piratesbase import TimeOfDayManager
from pirates.piratesbase import TODGlobals
from pirates.pirate import Pirate
from pirates.seapatch.SeaPatch import SeaPatch
from pirates.seapatch.Reflection import Reflection
from pirates.makeapirate import NameGUI
from pirates.piratesgui import NonPayerPanel
from pirates.piratesgui import TrialNonPayerPanel
from pirates.piratesbase import UserFunnel
from pirates.pirate import Human
from pirates.pirate import HumanDNA
from pirates.audio import SoundGlobals
from pirates.audio.SoundGlobals import loadSfx
APPROVED = 1
DENIED = 2
class AvatarChooser(DirectObject, StateData):
__module__ = __name__
notify = directNotify.newCategory('AvatarChooser')
def __init__(self, parentFSM, doneEvent):
StateData.__init__(self, doneEvent)
self.choice = (0, 0)
self.gameOptions = None
self.av = None
self.deleteConfirmDialog = None
self.shareConfirmDialog = None
self.firstAddDialog = None
self.notQueueCompleteDialog = None
self.notDownloadDialog = None
self.notifications = {}
self.subFrames = {}
self.subAvButtons = {}
self.handleDialogOnScreen = 0
self.subIds = base.cr.avList.keys()
if base.cr.isPaid() == OTPGlobals.AccessVelvetRope:
for subId in base.cr.avList:
avSet = base.cr.avList[subId]
for avatar in avSet:
if type(avatar) != int:
avatar.dna.setTattooChest(0, 0, 0, 0, 0, 0)
avatar.dna.setTattooZone2(0, 0, 0, 0, 0, 0)
avatar.dna.setTattooZone3(0, 0, 0, 0, 0, 0)
avatar.dna.setTattooZone4(0, 0, 0, 0, 0, 0)
avatar.dna.setTattooZone5(0, 0, 0, 0, 0, 0)
avatar.dna.setTattooZone6(0, 0, 0, 0, 0, 0)
avatar.dna.setTattooZone7(0, 0, 0, 0, 0, 0)
avatar.dna.setJewelryZone1(0, 0, 0)
avatar.dna.setJewelryZone2(0, 0, 0)
avatar.dna.setJewelryZone3(0, 0, 0)
avatar.dna.setJewelryZone4(0, 0, 0)
avatar.dna.setJewelryZone5(0, 0, 0)
avatar.dna.setJewelryZone6(0, 0, 0)
avatar.dna.setJewelryZone7(0, 0, 0)
avatar.dna.setJewelryZone8(0, 0, 0)
self.subIds.sort()
self.currentSubIndex = 0
self.currentSubId = 0
self.nonPayerPanel = None
self.trialNonPayerPanel = None
self.httpClient = None
self.loginTask = None
self.loginStatusRequest = None
self.queueTask = None
self.queueRequest = None
self.queueComplete = False
self.allPhasesComplete = False
self.lastMousePos = (0, 0)
base.avc = self
self.forceQueueStr = ''
self.finalizeConfirmDialog = None
self.deniedConfirmDialog = None
return
def enter(self):
base.options.display.restrictToEmbedded(True)
taskMgr.setupTaskChain('phasePost', threadPriority=TPHigh)
if self.isLoaded == 0:
self.load()
base.disableMouse()
self.quitButton.show()
if base.cr.loginInterface.supportsRelogin():
self.logoutButton.show()
self.scene.reparentTo(render)
camera.reparentTo(render)
camera.setPosHpr(-29.0187, 37.0125, 24.75, 4.09, 1.0, 0.0)
loggedInSubId = base.cr.accountDetailRecord.playerAccountId
if loggedInSubId in self.subIds:
index = self.subIds.index(loggedInSubId)
else:
index = 0
self.showSub(index)
if self.ship:
taskMgr.add(self.__shipRockTask, 'avatarChooserShipRockTask')
base.transitions.fadeScreen(1)
base.transitions.fadeIn(3)
base.graphicsEngine.renderFrame()
base.graphicsEngine.renderFrame()
base.cr.loadingScreen.hide()
globalClock.tick()
base.graphicsEngine.renderFrame()
base.playSfx(self.oceanSfx, looping=1, volume=0.6)
base.playSfx(self.woodCreaksSfx, looping=1)
base.musicMgr.request(SoundGlobals.MUSIC_AVATAR_CHOOSER, volume=0.4, priority=-2)
self.accept('mouse1', self._startMouseReadTask)
self.accept('mouse1-up', self._stopMouseReadTask)
self.accept('mouse3', self._startMouseReadTask)
self.accept('mouse3-up', self._stopMouseReadTask)
if not self.disableOptions:
self.accept(PiratesGlobals.OptionsHotkey, self.__handleOptions)
if base.launcher.getPhaseComplete(5):
self.__allPhasesComplete()
else:
self.accept('launcherAllPhasesComplete', self.__allPhasesComplete)
self._startLoginStatusTask()
base.options.savePossibleWorking(base.options)
if launcher.getValue('GAME_SHOW_FIRSTADD'):
self.popupTrialPanel()
def exit(self):
if self.isLoaded == 0:
return
base.musicMgr.requestFadeOut(SoundGlobals.MUSIC_AVATAR_CHOOSER)
self.oceanSfx.stop()
self.woodCreaksSfx.stop()
if self.deleteConfirmDialog:
self.deleteConfirmDialog.destroy()
self.deleteConfirmDialog = None
if self.shareConfirmDialog:
self.shareConfirmDialog.destroy()
self.shareConfirmDialog = None
if self.notDownloadDialog:
self.notDownloadDialog.destroy()
self.notDownloadDialog = None
self.avatarListFrame.hide()
self.highlightFrame.hide()
self.quitFrame.hide()
self.renameButton.hide()
self.scene.detachNode()
if self.ship:
taskMgr.remove('avatarChooserShipRockTask')
self.ignore('mouse1')
self.ignore('mouse1-up')
self.ignore('mouse3')
self.ignore('mouse3-up')
self.ignore('f7')
self.ignore('launcherPercentPhaseComplete')
self.ignore('launcherAllPhasesComplete')
self._stopMouseReadTask()
self._stopQueueTask()
base.options.saveWorking()
self.ignoreAll()
if hasattr(self, 'fadeInterval'):
self.fadeInterval.pause()
del self.fadeInterval
if hasattr(self, 'fadeFrame'):
self.fadeFrame.destroy()
if self.doneStatus and self.doneStatus['mode'] == 'chose':
base.options.display.restrictToEmbedded(False)
taskMgr.setupTaskChain('phasePost', threadPriority=TPLow)
return
def load(self):
self.notify.debug('isPaid: %s' % str(base.cr.isPaid()))
if self.isLoaded == 1:
return
self.disableOptions = base.config.GetBool('disable-pirates-options', 0) or base.config.GetBool('location-kiosk', 0)
base.musicMgr.load('avchooser-theme')
self.model = loader.loadModel('models/gui/avatar_chooser_rope')
charGui = loader.loadModel('models/gui/char_gui')
self.oceanSfx = loadSfx(SoundGlobals.SFX_FX_OCEAN_LOOP)
self.woodCreaksSfx = loadSfx(SoundGlobals.SFX_SHIP_RIGGING)
self.exclam = charGui.find('**/chargui_exclamation_mark')
self.scene = NodePath('AvatarChooserScene')
self.todManager = TimeOfDayManager.TimeOfDayManager()
self.todManager.request('EnvironmentTOD')
self.todManager.setEnvironment(TODGlobals.ENV_AVATARCHOOSER, {})
self.todManager.doEndTimeOfDay()
self.todManager.skyGroup.setSunTrueAngle(Vec3(260, 0, 15))
self.todManager.skyGroup.setSunLock(1)
self.todManager.skyGroup.dirLightSun.node().setColor(Vec4(0.9, 0.7, 0.8, 1))
pier = loader.loadModel('models/islands/pier_port_royal_2deck')
pier.setPosHpr(-222.23, 360.08, 15.06, 251.57, 0.0, 0.0)
pier.flattenStrong()
pier.reparentTo(self.scene)
pier2 = loader.loadModel('models/islands/pier_port_royal_1deck')
pier2.setPosHpr(-35.0, 83.27, 19.26, 274.09, 0.0, 0.0)
pier2.setScale(0.4, 0.3, 0.4)
pier2.flattenStrong()
pier2.reparentTo(self.scene)
self.water = SeaPatch(render, Reflection.getGlobalReflection(), todMgr=self.todManager)
self.water.loadSeaPatchFile('out.spf')
self.water.updateWater(2)
self.water.ignore('grid-detail-changed')
self.ship = None
if base.launcher.getPhaseComplete(3):
from pirates.ship import ShipGlobals
self.ship = base.shipFactory.getShip(ShipGlobals.INTERCEPTORL1)
self.ship.modelRoot.setPosHpr(140.86, 538.97, -3.62, -133.04, 0.0, 0.0)
self.ship.modelRoot.reparentTo(self.scene)
self.shipRoot = self.ship.modelRoot
self.ship.playIdle()
lodNode = self.ship.lod.node()
self.ship.lod.node().forceSwitch(0)
self.avatarListFrame = DirectFrame(parent=base.a2dTopLeft, relief=None)
self.ropeFrame = DirectFrame(parent=self.avatarListFrame, relief=None, image=self.model.find('**/avatar_c_A_rope'), image_scale=0.36, pos=(0, 0, -0.015))
self.subFrame = BorderFrame(parent=self.avatarListFrame, frameSize=(-0.25, 0.25, -0.04, 0.09), borderScale=0.2, pos=(0, 0, -0.16), modelName='general_frame_f')
triangleGui = loader.loadModel('models/gui/triangle')
self.subLabel = DirectLabel(parent=self.subFrame, relief=None, text='', text_scale=0.045, text_fg=(1,
0.9,
0.7,
0.9), text_pos=(0,
0.035), textMayChange=1)
if base.config.GetBool('allow-linked-accounts', 0):
self.nextSubButton = DirectButton(parent=self.subFrame, relief=None, image=(triangleGui.find('**/triangle'), triangleGui.find('**/triangle_down'), triangleGui.find('**/triangle_over')), pos=(0.31,
0,
0.025), scale=0.08, command=self.changeSub, extraArgs=[1])
self.prevSubButton = DirectButton(parent=self.subFrame, relief=None, image=(triangleGui.find('**/triangle'), triangleGui.find('**/triangle_down'), triangleGui.find('**/triangle_over')), hpr=(0,
0,
180), pos=(-0.31, 0, 0.025), scale=0.08, command=self.changeSub, extraArgs=[-1])
self.__createAvatarButtons()
self.ropeFrame.reparentTo(self.avatarListFrame)
self.subFrame.reparentTo(self.avatarListFrame)
self.versionLabel = DirectLabel(parent=base.a2dTopRight, relief=None, text_scale=0.04, text_fg=(1,
1,
1,
0.5), text='%s\n%s' % (base.cr.getServerVersion(), base.win.getPipe().getInterfaceName()), text_align=TextNode.ARight, pos=(-0.05, 0, -0.05))
self.highlightFrame = DirectFrame(parent=base.a2dBottomCenter, relief=None, image=self.model.find('**/avatar_c_B_frame'), image_scale=0.37, pos=(0,
0,
0.25), scale=0.9)
self.highlightFrame.hide()
if base.config.GetBool('allow-linked-accounts', 0):
self.shareButton = DirectButton(parent=self.highlightFrame, relief=None, text_scale=0.045, text_fg=(1,
0.9,
0.7,
0.9), text_shadow=PiratesGuiGlobals.TextShadow, text=('', '', PLocalizer.AvatarChooserShared, ''), image=(self.model.find('**/avatar_c_B_unlock'), self.model.find('**/avatar_c_B_unlock'), self.model.find('**/avatar_c_B_unlock_over')), image_scale=0.37, text_pos=(0, -0.1), pos=(-0.51, 0, -0.08), scale=1.3, command=self.__handleShare)
self.playButton = DirectButton(parent=self.highlightFrame, relief=None, text_scale=0.05, text_fg=(0.7,
0.7,
0.7,
0.7), text_shadow=PiratesGuiGlobals.TextShadow, text='\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserLoading, image=(self.model.find('**/avatar_c_B_bottom'), self.model.find('**/avatar_c_B_bottom'), self.model.find('**/avatar_c_B_bottom_over')), image_scale=0.37, text_pos=(0, -0.015), pos=(0, 0, -0.08), scale=1.7, color=(0.7,
0.7,
0.7,
0.7), state=DGG.DISABLED, command=self.__handlePlay)
if not self.allPhasesComplete:
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserLoading
else:
if not self.queueComplete:
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserInQueue
else:
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserPlay
self.playButton.setColor(1, 1, 1, 1)
self.playButton['text_fg'] = (1.0, 0.9, 0.7, 0.9)
self.accept('enter', self.__handleEnter)
self.accept('arrow_up', self.__handleArrowUp)
self.accept('arrow_down', self.__handleArrowDown)
self.deleteButton = DirectButton(parent=self.highlightFrame, relief=None, text_scale=0.045, text_fg=(1,
0.9,
0.7,
0.9), text_shadow=PiratesGuiGlobals.TextShadow, text=('', '', PLocalizer.AvatarChooserDelete, ''), image=(self.model.find('**/avatar_c_B_delete'), self.model.find('**/avatar_c_B_delete'), self.model.find('**/avatar_c_B_delete_over')), image_scale=0.37, text_pos=(0, -0.1), pos=(0.51, 0, -0.08), scale=1.3, command=self.__handleDelete)
self.quitFrame = DirectFrame(parent=base.a2dBottomRight, relief=None, image=self.model.find('**/avatar_c_C_back'), image_scale=0.37, pos=(-0.4, 0, 0.21), scale=0.9)
self.quitFrameForeground = DirectFrame(parent=self.quitFrame, relief=None, image=self.model.find('**/avatar_c_C_frame'), image_scale=0.37, pos=(0,
0,
0))
self.logoutButton = DirectButton(parent=self.quitFrame, relief=None, text_scale=0.045, text_fg=(1,
0.9,
0.7,
0.9), text_shadow=PiratesGuiGlobals.TextShadow, text=PLocalizer.OptionsPageLogout, image=(self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box_over')), image_scale=0.37, text_pos=(0, -0.015), pos=(0,
0,
0.2), command=self.__handleLogoutWithoutConfirm)
self.logoutButton.hide()
if self.disableOptions:
optionsState = DGG.DISABLED
else:
optionsState = DGG.NORMAL
self.optionsButton = DirectButton(parent=self.quitFrame, relief=None, text_scale=0.05, text_fg=(1,
0.9,
0.7,
0.9), text_shadow=PiratesGuiGlobals.TextShadow, text='\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserOptions, image=(self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box_over')), image_scale=0.37, text_pos=(0, -0.015), pos=(0,
0,
0.21), command=self.__handleOptions, state=optionsState)
if self.disableOptions:
self.optionsButton.setColorScale(Vec4(0.7, 0.7, 0.7, 0.7))
self.upgradeButton = DirectButton(parent=self.quitFrame, relief=None, text_scale=0.05, text_fg=(1,
0.9,
0.7,
0.9), text_shadow=PiratesGuiGlobals.TextShadow, text='\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserUpgrade, image=(self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box_over')), image_scale=0.37, text_pos=(0, -0.015), pos=(0,
0,
0.07), command=self.__handleUpgrade)
if base.cr.isPaid() == OTPGlobals.AccessFull:
self.upgradeButton.hide()
self.optionsButton.setPos(0, 0, 0.07)
self.disableQuit = base.config.GetBool('location-kiosk', 0)
if self.disableQuit:
quitState = DGG.DISABLED
else:
quitState = DGG.NORMAL
self.quitButton = DirectButton(parent=self.quitFrame, state=quitState, relief=None, text_scale=0.05, text_fg=(1,
0.9,
0.7,
0.9), text_shadow=PiratesGuiGlobals.TextShadow, text='\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserQuit, image=(self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box_over')), image_scale=0.37, text_pos=(0, -0.015), pos=(0, 0, -0.07), command=self.__handleQuit)
if self.disableQuit:
self.quitButton.setColorScale(Vec4(0.7, 0.7, 0.7, 0.7))
self.renameButton = DirectButton(parent=base.a2dTopRight, relief=None, text_scale=0.05, text_fg=(1,
0.9,
0.7,
0.9), text_shadow=PiratesGuiGlobals.TextShadow, text='\x01smallCaps\x01%s\x02' % 'rename', image=(self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box'), self.model.find('**/avatar_c_C_box_over')), image_scale=0.37, text_pos=(0, -0.015), pos=(-0.3,
0,
-0.2), command=self.__handleRename)
def shardSelected(shardId):
base.cr.defaultShard = shardId
self.shardPanel = ShardPanel(base.a2dBottomLeft, gear=NodePath('gear'), inverted=True, relief=None, scale=0.85, hpr=Vec3(0, 0, 180), pos=Vec3(0.415, 0, 0.02), uppos=Vec3(0.415, 0, 0.02), downpos=Vec3(0.415, 0, 0.6), shardSelected=shardSelected, buttonFont=PiratesGlobals.getInterfaceFont())
self.shardPanel.setScissor(self.highlightFrame, Point3(-20, 0, -0.18), Point3(20, 0, 1.0))
self.shardPanelBottom = loader.loadModel('models/gui/general_frame_bottom')
self.shardPanelBottom.setPos(0.42, 0, 0.095)
self.shardPanelBottom.setScale(0.273)
self.shardPanelBottom.reparentTo(base.a2dBottomLeft)
self.logo = loader.loadModel('models/gui/potcLogo')
self.logo.reparentTo(self.avatarListFrame)
self.logo.setPos(0, 0, 0.1)
self.logo.setScale(0.66)
charGui.removeNode()
return
def __createAvatarButtons(self):
subCard = loader.loadModel('models/gui/toplevel_gui')
for subFrame in self.subFrames.values():
subFrame.destroy()
for buttonList in self.subAvButtons.values():
for button in buttonList:
button.destroy()
self.subFrames = {}
self.subAvButtons = {}
i = 0
for subId, avData in base.cr.avList.items():
subFrame = DirectFrame(parent=self.avatarListFrame, relief=None, pos=(0, 0, -0.3))
self.subFrames[subId] = subFrame
avatarButtons = []
self.subAvButtons[subId] = avatarButtons
spacing = -0.1
for av, slot in zip(avData, range(len(avData))):
x = 0.0
imageColor = Vec4(1, 1, 1, 1)
textScale = 0.045
textFg = (1, 0.9, 0.7, 0.9)
if slot == 0:
z = -0.08
textPos = (0, -0.02)
image = (
self.model.find('**/avatar_c_A_top'), self.model.find('**/avatar_c_A_top'), self.model.find('**/avatar_c_A_top_over'), self.model.find('**/avatar_c_A_top'))
else:
if slot == len(avData) - 1:
z = slot * spacing - 0.125
textPos = (0, 0.033)
image = (self.model.find('**/avatar_c_A_bottom'), self.model.find('**/avatar_c_A_bottom'), self.model.find('**/avatar_c_A_bottom_over'), self.model.find('**/avatar_c_A_bottom'))
else:
z = slot * spacing - 0.08
textPos = (0, -0.015)
image = (self.model.find('**/avatar_c_A_middle'), self.model.find('**/avatar_c_A_middle'), self.model.find('**/avatar_c_A_middle_over'), self.model.find('**/avatar_c_A_middle'))
if av == OTPGlobals.AvatarSlotAvailable:
text = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserCreate
if -3 in avData:
command = self.__handleCreate
else:
command = self.popupFeatureBrowser
state = DGG.NORMAL
else:
if av == OTPGlobals.AvatarPendingCreate:
text = PLocalizer.AvatarChooserUnderConstruction
command = None
state = DGG.DISABLED
imageColor = Vec4(0.7, 0.7, 0.7, 1)
else:
if av == OTPGlobals.AvatarSlotUnavailable:
text = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserCreate
if -3 in avData:
command = self.__handleCreate
else:
command = self.popupFeatureBrowser
state = DGG.NORMAL
else:
avName = av.dna.getDNAName()
text = avName
command = self.__handleHighlight
state = DGG.NORMAL
dib = DirectButton(relief=None, parent=subFrame, state=state, text_fg=textFg, text_scale=textScale, text_shadow=PiratesGuiGlobals.TextShadow, text=text, image=image, image_color=imageColor, image_scale=0.37, text_pos=textPos, pos=(x, 0, z), command=command, extraArgs=[subId, slot])
avatarButtons.append(dib)
i += 1
subCard.removeNode()
self.isLoaded = 1
if self.queueComplete == False:
self.__deactivateCreateButtons()
return
def unload(self):
if self.isLoaded == 0:
return
loader.unloadSfx(self.oceanSfx)
loader.unloadSfx(self.woodCreaksSfx)
del self.oceanSfx
del self.woodCreaksSfx
loader.unloadModel(self.model)
self.model.removeNode()
del self.model
self.todManager.skyGroup.setSunLock(0)
self.logo.removeNode()
if self.av:
self.av.delete()
del self.av
if self.ship:
self.ship.destroy()
self.ship = None
taskMgr.remove('avatarChooserShipRockTask')
self.shipRoot = None
self.water.delete()
del self.water
self.scene.removeNode()
del self.scene
self.todManager.disable()
self.todManager.delete()
del self.todManager
cleanupDialog('globalDialog')
for subFrame in self.subFrames.values():
subFrame.destroy()
for buttonList in self.subAvButtons.values():
for button in buttonList:
button.destroy()
del self.subFrames
del self.subAvButtons
self.avatarListFrame.destroy()
self.highlightFrame.destroy()
self.quitFrame.destroy()
self.renameButton.destroy()
if self.nonPayerPanel:
self.nonPayerPanel.destroy()
del self.nonPayerPanel
if self.trialNonPayerPanel:
self.trialNonPayerPanel.destroy()
del self.trialNonPayerPanel
if self.gameOptions is not None:
base.options = self.gameOptions.options
self.gameOptions.destroy()
del self.gameOptions
self.versionLabel.destroy()
del self.versionLabel
self.shardPanel.destroy()
del self.shardPanel
self.shardPanelBottom.removeNode()
self.ignoreAll()
self.isLoaded = 0
if self.finalizeConfirmDialog:
self.finalizeConfirmDialog.destroy()
self.finalizeConfirmDialog = None
if self.deniedConfirmDialog:
self.deniedConfirmDialog.destroy()
self.deniedConfirmDialog = None
return
def getChoice(self):
return self.choice
def __showHighlightedAvatar(self):
self.notify.debugCall()
subId, slot = self.choice
potAv = base.cr.avList[subId][slot]
if self.av:
self.av.cleanupHuman()
self.av.delete()
if self.deleteConfirmDialog:
self.deleteConfirmDialog.destroy()
self.deleteConfirmDialog = None
if self.shareConfirmDialog:
self.shareConfirmDialog.destroy()
self.shareConfirmDialog = None
self.av = Pirate.Pirate()
self.av.setDNAString(potAv.dna)
self.av.generateHuman(self.av.style.gender, base.cr.humanHigh)
self.av.setPosHpr(-29.69, 46.35, 22.05, 180.0, 0.0, 0.0)
self.av.reparentTo(self.scene)
self.av.bindAnim('idle')
self.av.loop('idle')
self.av.useLOD(2000)
self.highlightFrame.show()
if base.config.GetBool('allow-linked-accounts', 0):
if potAv.shared:
self.shareButton['image'] = (
self.model.find('**/avatar_c_B_unlock'), self.model.find('**/avatar_c_B_unlock'), self.model.find('**/avatar_c_B_unlock_over'))
self.shareButton['text'] = (
'', '', PLocalizer.AvatarChooserLocked, '')
else:
self.shareButton['image'] = (
self.model.find('**/avatar_c_B_lock'), self.model.find('**/avatar_c_B_lock'), self.model.find('**/avatar_c_B_lock_over'))
self.shareButton['text'] = (
'', '', PLocalizer.AvatarChooserShared, '')
if not potAv.online:
if potAv.creator or not base.config.GetBool('allow-linked-accounts', 0):
self.deleteButton['state'] = DGG.NORMAL
if base.config.GetBool('allow-linked-accounts', 0):
self.shareButton['state'] = DGG.NORMAL
else:
self.deleteButton['state'] = DGG.DISABLED
if base.config.GetBool('allow-linked-accounts', 0):
self.shareButton['state'] = DGG.DISABLED
if potAv.online:
self.playButton['text'] = PLocalizer.AvatarChooserAlreadyOnline
self.playButton['state'] = DGG.DISABLED
else:
if potAv.shared or potAv.creator or not base.config.GetBool('allow-linked-accounts', 0):
if not self.allPhasesComplete:
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserLoading
else:
if not self.queueComplete:
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserInQueue
else:
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserPlay
self.playButton['state'] = DGG.NORMAL
else:
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserLockedByOwner
self.playButton['state'] = DGG.DISABLED
self.renameButton.hide()
if potAv.wishState == 'APPROVED':
self.blockInput()
self.finalizeConfirmDialog = PDialog.PDialog(text=PLocalizer.AvatarChooserNameAccepted, style=OTPDialog.Acknowledge, command=self.__handleFinalize)
else:
if potAv.wishState == 'DENIED' or potAv.wishState == 'OPEN':
if self.notifications.get(slot, 0):
self.blockInput()
if not self.handleDialogOnScreen:
self.notify.info('deniedConfirmDialog on screen')
self.deniedConfirmDialog = PDialog.PDialog(text=PLocalizer.AvatarChooserPleaseRename, style=OTPDialog.Acknowledge, command=self.__handleDenied)
self.handleDialogOnScreen = 1
self.renameButton.show()
potAv.defaultShard = (not potAv.lastLogout or int(time.time() / 60) - potAv.lastLogout > 60) and 0
base.cr.avPlayedRecently = False
else:
base.cr.avPlayedRecently = True
if base.cr.defaultShard == 0:
self.shardPanel['preferredShard'] = potAv.defaultShard
return
def __hideHighlightedAvatar(self):
if self.av:
self.av.delete()
self.av = None
self.highlightFrame.hide()
self.renameButton.hide()
return
def __handleRename(self):
self.enterNameMode()
def __handleHighlight(self, subId, slot):
self.choice = (
subId, slot)
for button in self.subAvButtons[subId]:
if button['text'] == PLocalizer.AvatarChooserSlotUnavailable:
button['text_fg'] = (0.5, 0.5, 0.5, 1)
else:
button['text_fg'] = (1, 0.9, 0.7, 0.9)
self.subAvButtons[subId][slot]['text_fg'] = (1, 1, 1, 1)
self.__showHighlightedAvatar()
def __rotateHighlightedAvatar(self, val):
if self.av:
self.av.setH(val)
def __handleArrowUp(self):
if self.gameOptions is not None:
if not self.gameOptions.isHidden():
return
sub = self.choice[0]
slot = self.choice[1]
initialSlot = slot
if not sub:
return
numButtons = len(self.subAvButtons[sub])
av = False
for index in range(0, numButtons - 1):
if base.cr.avList.get(sub)[index] not in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate):
av = True
break
if not av:
return
slot = slot == 0 and numButtons - 1
else:
slot = slot - 1
while base.cr.avList.get(sub)[slot] in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate):
if slot > 0:
slot = slot - 1
else:
slot = numButtons - 1
if self.subAvButtons[sub][slot]['state'] == DGG.NORMAL and initialSlot != slot:
self.__handleHighlight(sub, slot)
return
def __handleArrowDown(self):
if self.gameOptions is not None:
if not self.gameOptions.isHidden():
return
sub = self.choice[0]
slot = self.choice[1]
initialSlot = slot
if not sub:
return
numButtons = len(self.subAvButtons[sub])
av = False
for index in range(0, numButtons - 1):
if base.cr.avList.get(sub)[index] not in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate):
av = True
break
if not av:
return
slot = slot == numButtons - 1 and 0
else:
slot = slot + 1
while base.cr.avList.get(sub)[slot] in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate):
if slot < numButtons - 1:
slot = slot + 1
else:
slot = 0
if self.subAvButtons[sub][slot]['state'] == DGG.NORMAL and initialSlot != slot:
self.__handleHighlight(sub, slot)
return
def __handleCreate(self, subId, slot):
if not self.queueComplete:
if not self.notQueueCompleteDialog:
self.notQueueCompleteDialog = PDialog.PDialog(text=PLocalizer.AvatarChooserQueued, style=OTPDialog.Acknowledge, command=self.__handleNotQueueComplete)
self.notQueueCompleteDialog.show()
return
self.choice = (
subId, slot)
self.accept('rejectAvatarSlot', self.__rejectAvatarSlot)
self.accept('avatarSlotResponse', self.__avatarSlotResponse)
base.cr.avatarManager.sendRequestAvatarSlot(subId, slot)
base.cr.waitForDatabaseTimeout(requestName='WaitForCreateAvatarResponse')
self.blockInput()
def __rejectAvatarSlot(self, reasonId, subId, slot):
self.notify.warning('rejectAvatarSlot: %s' % reasonId)
self.ignore('rejectAvatarSlot')
self.ignore('avatarSlotResponse')
base.cr.cleanupWaitingForDatabase()
self.allowInput()
def __avatarSlotResponse(self, subId, slot):
UserFunnel.loggingAvID('write', 'NEW')
UserFunnel.loggingSubID('write', subId)
self.ignore('rejectAvatarSlot')
self.ignore('avatarSlotResponse')
base.cr.cleanupWaitingForDatabase()
self.doneStatus = {'mode': 'create'}
self.acceptOnce(base.transitions.FadeOutEvent, lambda : messenger.send(self.doneEvent, [self.doneStatus]))
base.transitions.fadeOut()
def __handleShare(self):
if self.shareConfirmDialog:
self.shareConfirmDialog.destroy()
subId, slot = self.choice
potAv = base.cr.avList[subId][slot]
name = potAv.dna.getDNAName()
self.blockInput()
if potAv.shared:
self.shareConfirmDialog = PDialog.PDialog(text=PLocalizer.AvatarChooserConfirmLock % name, style=OTPDialog.TwoChoice, command=self.__handleShareConfirmation)
else:
self.shareConfirmDialog = PDialog.PDialog(text=PLocalizer.AvatarChooserConfirmShare % name, style=OTPDialog.TwoChoice, command=self.__handleShareConfirmation)
def __shareAvatarResponse(self, avatarId, subId, shared):
base.cr.cleanupWaitingForDatabase()
self.ignore('rejectShareAvatar')
self.ignore('shareAvatarResponse')
subId, slot = self.choice
potAv = base.cr.avList[subId][slot]
potAv.shared = shared
if potAv.shared:
self.shareButton['image'] = (
self.model.find('**/avatar_c_B_unlock'), self.model.find('**/avatar_c_B_unlock'), self.model.find('**/avatar_c_B_unlock_over'))
self.shareButton['text'] = (
'', '', PLocalizer.AvatarChooserLocked, '')
else:
self.shareButton['image'] = (
self.model.find('**/avatar_c_B_lock'), self.model.find('**/avatar_c_B_lock'), self.model.find('**/avatar_c_B_lock_over'))
self.shareButton['text'] = (
'', '', PLocalizer.AvatarChooserShared, '')
self.allowInput()
def __rejectShareAvatar(self, reasonId):
self.notify.warning('rejectShareAvatar: %s' % reasonId)
base.cr.cleanupWaitingForDatabase()
self.ignore('rejectShareAvatar')
self.ignore('shareAvatarResponse')
self.allowInput()
def __handleEnter(self):
if self.playButton['state'] == DGG.NORMAL:
self.__handlePlay()
def __handlePlay(self):
if not self.queueComplete:
if not self.notQueueCompleteDialog:
self.notQueueCompleteDialog = PDialog.PDialog(text=PLocalizer.AvatarChooserQueued, style=OTPDialog.Acknowledge, command=self.__handleNotQueueComplete)
self.notQueueCompleteDialog.show()
return
if not self.allPhasesComplete:
if self.notDownloadDialog:
self.notDownloadDialog.show()
else:
self.notDownloadDialog = PDialog.PDialog(text=PLocalizer.AvatarChooserNotDownload, style=OTPDialog.Acknowledge, command=self.__handleNotDownload)
base.cr.centralLogger.writeClientEvent('User encountered phase blocker at pick-a-pirate')
self.notDownloadDialog.show()
return
if (0, 0) == self.choice:
self.__handleCreate(self.currentSubId, 0)
return
subId, slot = self.choice
potAv = base.cr.avList[subId][slot]
if potAv in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate):
return
self.notify.info('AvatarChooser: wants to play slot: %s avId: %s subId: %s' % (slot, potAv.id, subId))
self.accept('rejectPlayAvatar', self.__rejectPlayAvatar)
self.accept('playAvatarResponse', self.__playAvatarResponse)
winInfo = base.win.getProperties()
x = winInfo.getXSize()
y = winInfo.getYSize()
ratio = float(x) / y
self.fadeFrame = DirectFrame(parent=aspect2dp, frameSize=(-1.0 * ratio, 1.0 * ratio, -1.0, 1.0))
self.fadeFrame.setTransparency(1)
self.fadeInterval = Sequence(Func(self.blockInput), Func(self.fadeFrame.show), LerpColorScaleInterval(self.fadeFrame, 0.3, Vec4(0.0, 0.0, 0.0, 1.0), Vec4(0.0, 0.0, 0.0, 0.0), blendType='easeInOut'), Func(base.transitions.fadeOut, t=0), Func(base.cr.avatarManager.sendRequestPlayAvatar, potAv.id, subId), Func(base.cr.waitForDatabaseTimeout, requestName='WaitForPlayAvatarResponse'))
self.fadeInterval.start()
base.emoteGender = base.cr.avList[subId][slot].dna.gender
def __rejectPlayAvatar(self, reasonId, avatarId):
self.notify.warning('rejectPlayAvatar: %s' % reasonId)
self.ignore('rejectPlayAvatar')
self.ignore('playAvatarResponse')
base.cr.cleanupWaitingForDatabase()
self.rejectPlayAvatarDialog = PDialog.PDialog(text=PLocalizer.AvatarChooserRejectPlayAvatar, style=OTPDialog.Acknowledge, command=self.__handleRejectPlayAvatar)
def __handleRejectPlayAvatar(self, value):
base.cr.loginFSM.request('shutdown')
def __playAvatarResponse(self, avatarId, subId, access, founder):
subId, slot = self.choice
self.notify.info('AvatarChooser: acquired avatar slot: %s avId: %s subId: %s' % (slot, avatarId, subId))
UserFunnel.loggingAvID('write', avatarId)
UserFunnel.loggingSubID('write', subId)
self.ignore('rejectPlayAvatar')
self.ignore('playAvatarResponse')
base.cr.cleanupWaitingForDatabase()
self.doneStatus = {'mode': 'chose'}
messenger.send(self.doneEvent, [self.doneStatus])
messenger.send('destroyFeedbackPanel')
def __activatePlayButton(self):
if not self.allPhasesComplete:
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserLoading
return
if not self.queueComplete:
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserInQueue
return
self.playButton['state'] = DGG.NORMAL
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserPlay
self.playButton.setColor(1, 1, 1, 1)
self.playButton['text_fg'] = (1.0, 0.9, 0.7, 0.9)
def __activateCreateButtons(self):
if not self.allPhasesComplete:
return
if not self.queueComplete:
return
for currSubId, currSubVal in base.cr.avList.items():
for currIdx in range(len(currSubVal)):
if currSubVal[currIdx] == OTPGlobals.AvatarSlotAvailable:
button = self.subAvButtons[currSubId][currIdx]
button.setColorScale(1, 1, 1, 1)
button['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserCreate
def __deactivatePlayButton(self):
self.playButton['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserInQueue
self.playButton.setColor(0.7, 0.7, 0.7, 0.7)
self.playButton['text_fg'] = (0.7, 0.7, 0.7, 0.7)
def __deactivateCreateButtons(self):
for currSubId, currSubVal in base.cr.avList.items():
for currIdx in range(len(currSubVal)):
if currSubVal[currIdx] == OTPGlobals.AvatarSlotAvailable:
button = self.subAvButtons[currSubId][currIdx]
button.setColorScale(0.7, 0.7, 0.7, 0.7)
button['text'] = '\x01smallCaps\x01%s\x02' % PLocalizer.AvatarChooserInQueue
def __allPhasesComplete(self):
self.allPhasesComplete = True
self.__activatePlayButton()
self.__activateCreateButtons()
self.ignore('launcherAllPhasesComplete')
def _startLoginStatusTask(self):
if __dev__ or launcher.getValue('IS_DEV'):
disableQueueDefault = 1
else:
disableQueueDefault = 0
if config.GetBool('disable-server-queueing', disableQueueDefault):
self._setQueueComplete()
return
self.httpClient = HTTPClient()
import urllib2
proxies = urllib2.getproxies()
if proxies:
proxies.get('http') and self.notify.info('queuing proxy found')
self.httpClient.setProxySpec(proxies.get('http'))
else:
self.notify.info('queuing proxy is none')
loginTokenKey = config.GetString('queueing-token-1', 'SESSION_TOKEN')
self.notify.info('using queueing token 1 of %s' % loginTokenKey)
self.loginToken = launcher.getValue(loginTokenKey, None)
self.queueComplete = False
self.queueStatus = launcher.getValue('LOGIN_ACTION', None)
if self.queueStatus and self.queueStatus == 'PLAY':
self._setQueueComplete()
return
self.queueFreqSeconds = launcher.getValue('QUEUE_FREQ_SECONDS', None)
self.queueUrl = launcher.getValue('QUEUE_URL', None)
if self.loginToken is not None and self.queueStatus == 'QUEUE' and self.queueFreqSeconds is not None and self.queueUrl is not None:
self.queueFreqSeconds = int(self.queueFreqSeconds)
self._startQueueTask()
return
self.loginStatusRequest = None
self.loginStatusTask = taskMgr.add(self._checkLoginStatus, 'AvatarChooser-CheckLoginStatus')
self.loginStatusTask.delayTime = 0.1
return
def _checkLoginStatus(self, task):
if not self.loginStatusRequest:
loginStatusUrl = launcher.getValue('WEB_PAGE_LOGIN_RPC', 'https://piratesonline.go.com/auth/piratesLogin') + '?'
if self.loginToken:
loginStatusUrl += 'login_token=%s' % (self.loginToken,)
else:
if __dev__ or launcher.getValue('IS_DEV'):
testLogin = config.GetString('queueing-login', 'xx')
testPass = config.GetString('queueing-pass', 'xx')
loginStatusUrl += 'username=%s&password=%s' % (testLogin, testPass)
if config.GetBool('server-queueing-force', 0):
self.forceQueueStr = '&wannaqueue=1'
loginStatusUrl += self.forceQueueStr
loginStatusUrl += '&fromGame=1'
self.notify.info('Checking login status at: %s' % (loginStatusUrl,))
self.statusRF = Ramfile()
self.loginStatusRequest = self.httpClient.makeChannel(False)
self.loginStatusRequest.beginGetDocument(DocumentSpec(loginStatusUrl))
self.loginStatusRequest.downloadToRam(self.statusRF)
if self.loginStatusRequest.run():
return task.again
requestData = ''
if self.loginStatusRequest.isValid():
if self.loginStatusRequest.isDownloadComplete():
requestData = self.statusRF.getData()
self.statusRF = None
else:
self.notify.info('LoginStatus check failed: %s' % (self.loginStatusRequest.getStatusString(),))
self.loginStatusRequest = None
return task.again
results = {}
for line in requestData.split('\n'):
pair = line.split('=', 1)
if len(pair) == 2:
results[pair[0].strip()] = pair[1].strip()
self.queueStatus = results.get('LOGIN_ACTION', None)
if self.queueStatus == 'PLAY':
self._setQueueComplete()
return task.done
if self.queueStatus != 'QUEUE':
self.notify.warning('Received invalid LOGIN_ACTION: %s' % (self.queueStatus,))
sys.exit(1)
loginTokenKey = config.GetString('queueing-token-2', 'SESSION_TOKEN')
self.notify.info('using queueing token 2 of %s' % loginTokenKey)
self.loginToken = results.get(loginTokenKey, self.loginToken)
self.queueFreqSeconds = int(results.get('QUEUE_FREQ_SECONDS', '10'))
self.queueUrl = results.get('QUEUE_URL', None)
if not self.loginToken or not self.queueUrl:
self.notify.warning('No login token or queueUrl, trying again:')
self.loginStatusRequest = None
return task.again
config.GetBool('server-queueing-force', 0) and self.notify.info('forcing queue')
self.forceQueueStr = '&wannaqueue=1'
def clearForceQueue(task=None):
if self.forceQueueStr:
self.notify.info('clearing force queue')
self.forceQueueStr = ''
else:
self.notify.info('setting force queue')
self.forceQueueStr = '&wannaqueue=1'
self._setQueueNotComplete()
self.accept('f1', clearForceQueue)
else:
self.forceQueueStr = ''
self._startQueueTask()
return task.done
def _stopLoginStatusTask(self):
self._stopQueueTask()
self.httpClient = None
self.loginStatusRequest = None
if self.loginStatusTask:
taskMgr.remove(self.loginStatusTask)
self.loginStatusTask = None
return
def _startQueueTask(self):
self.notify.info('Checking queue status...')
self.queueRequest = None
self.queueTask = taskMgr.add(self._checkQueue, 'AvatarChooser-CheckQueue')
return
def _checkQueue(self, task):
if not self.queueRequest:
self.notify.info('Checking queue status at: %s' % (self.queueUrl + self.forceQueueStr,))
self.queueRequest = self.httpClient.makeChannel(False)
self.queueRequest.beginGetDocument(DocumentSpec(self.queueUrl + self.forceQueueStr))
self.statusRF = Ramfile()
self.queueRequest.downloadToRam(self.statusRF)
task.delayTime = 0.1
if self.queueRequest.run():
return task.again
requestData = ''
if self.queueRequest.isValid():
self.queueRequest.isDownloadComplete() and self.notify.info('CheckQueue download complete')
requestData = self.statusRF.getData()
self.statusRF = None
else:
self.notify.info('CheckQueue check failed: %s' % (self.loginStatusRequest.getStatusString(),))
self.queueRequest = None
return task.again
task.delayTime = self.queueFreqSeconds
results = {}
for line in requestData.split('\n'):
pair = line.split('=', 1)
if len(pair) == 2:
results[pair[0].strip()] = pair[1].strip()
userError = results.get('USER_ERROR', None)
if userError:
self.notify.warning('Received USER_ERROR: %s fetching queue status' % (userError,))
sys.exit(1)
self.queueStatus = results.get('QUEUE_ACTION', None)
if self.queueStatus == 'PLAY':
self._setQueueComplete()
return task.done
if self.queueStatus != 'QUEUE':
self.notify.warning('Received invalid QUEUE_ACTION: %s' % (self.queueStatus,))
sys.exit(1)
self.notify.info('Queue not ready. Next check in %s seconds...' % (self.queueFreqSeconds,))
self.queueRequest = None
return task.again
def _setQueueComplete(self):
self.notify.info('Queueing is complete!')
self.queueTask = None
self.queueComplete = True
self.__activatePlayButton()
self.__activateCreateButtons()
return
def _setQueueNotComplete(self):
self.notify.info('Queueing is not complete!')
self.queueComplete = False
self.__deactivatePlayButton()
self.__deactivateCreateButtons()
if not taskMgr.hasTaskNamed('AvatarChooser-CheckQueue'):
self._startQueueTask()
def _stopQueueTask(self):
self.queueRequest = None
if self.queueTask:
taskMgr.remove(self.queueTask)
self.queueTask = None
return
def __handleDelete(self):
if self.deleteConfirmDialog:
self.deleteConfirmDialog.destroy()
subId, slot = self.choice
potAv = base.cr.avList[subId][slot]
name = potAv.dna.getDNAName()
self.blockInput()
self.deleteConfirmDialog = PDialog.PDialog(text=PLocalizer.AvatarChooserConfirmDelete % name, style=OTPDialog.YesNo, command=self.__handleDeleteConfirmation)
def __handleDeleteConfirmation(self, value):
self.deleteConfirmDialog.destroy()
self.deleteConfirmDialog = None
if value == DGG.DIALOG_OK:
subId, slot = self.choice
potAv = base.cr.avList[subId][slot]
self.notify.info('AvatarChooser: request delete slot: %s avId: %s subId: %s' % (slot, potAv.id, subId))
self.accept('rejectRemoveAvatar', self.__rejectRemoveAvatar)
self.accept('removeAvatarResponse', self.__removeAvatarResponse)
base.cr.avatarManager.sendRequestRemoveAvatar(potAv.id, subId, 'delete')
base.cr.waitForDatabaseTimeout(requestName='WaitForDeleteAvatarResponse')
self.blockInput()
else:
self.allowInput()
return
def __handleShareConfirmation(self, value):
self.shareConfirmDialog.destroy()
self.shareConfirmDialog = None
if value == DGG.DIALOG_OK:
subId, slot = self.choice
potAv = base.cr.avList[subId][slot]
self.notify.info('AvatarChooser: request share slot: %s avId: %s subId: %s' % (slot, potAv.id, subId))
self.accept('rejectShareAvatar', self.__rejectShareAvatar)
self.accept('shareAvatarResponse', self.__shareAvatarResponse)
if potAv.shared:
wantShared = 0
else:
wantShared = 1
base.cr.avatarManager.sendRequestShareAvatar(potAv.id, subId, wantShared)
base.cr.waitForDatabaseTimeout(requestName='WaitForShareAvatarResponse')
self.blockInput()
else:
self.allowInput()
return
def __removeAvatarResponse(self, avatarId, subId):
self.ignore('rejectRemoveAvatar')
self.ignore('removeAvatarResponse')
base.cr.cleanupWaitingForDatabase()
base.cr.sendGetAvatarsMsg()
def __rejectRemoveAvatar(self, reasonId):
self.notify.warning('rejectRemoveAvatar: %s' % reasonId)
self.ignore('rejectRemoveAvatar')
self.ignore('removeAvatarResponse')
base.cr.cleanupWaitingForDatabase()
self.allowInput()
def updateAvatarList(self):
self.__hideHighlightedAvatar()
self.__createAvatarButtons()
self.subIds = base.cr.avList.keys()
self.subIds.sort()
if self.currentSubId not in self.subIds:
self.notify.warning('subId %s is no longer in family: %s' % (self.currentSubIndex, self.subIds))
self.currentSubIndex = 0
self.showSub(self.currentSubIndex)
subAvs = base.cr.avList[self.currentSubId]
if len(subAvs) > 0 and subAvs[0] not in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate):
self.__handleHighlight(self.currentSubId, 0)
if not self.handleDialogOnScreen:
self.allowInput()
def __handleOptions(self):
if self.gameOptions is not None:
if self.gameOptions.isHidden():
self.gameOptions.show()
else:
self.gameOptions.hide()
else:
if base.config.GetBool('want-custom-keys', 0):
width = 1.8
else:
width = 1.6
height = 1.6
x = -width / 2
y = -height / 2
self.currentSubId = self.subIds[self.currentSubIndex]
subAccess = base.cr.accountDetailRecord.subDetails[self.currentSubId].subAccess
self.gameOptions = GameOptions('Game Options', x, y, width, height, base.options, access=subAccess, chooser=self)
self.gameOptions.show()
return
def __handleQuit(self):
self.doneStatus = {'mode': 'exit'}
messenger.send(self.doneEvent, [self.doneStatus])
def __handleUpgrade(self):
base.cr.centralLogger.writeClientEvent('Upgrade button pressed on Pick-A-Pirate screen')
base.popupBrowser('http://piratesonline.go.com/#/account_services/membership_options.html', True)
def __handleLogoutWithoutConfirm(self):
base.cr.loginFSM.request('login')
def __shipRockTask(self, task):
h = self.shipRoot.getH()
p = 1.5 * math.sin(task.time * 0.9)
r = 1.5 * math.cos(task.time * 1.1) + 1.5 * math.cos(task.time * 1.8)
self.shipRoot.setHpr(h, p, r)
return Task.cont
def blockInput(self):
color = Vec4(0.7, 0.7, 0.7, 0.7)
for subButtons in self.subAvButtons.values():
for button in subButtons:
button['state'] = DGG.DISABLED
button.setColorScale(color)
self.renameButton['state'] = DGG.DISABLED
self.renameButton.setColorScale(color)
self.quitButton['state'] = DGG.DISABLED
self.quitButton.setColorScale(color)
self.logoutButton['state'] = DGG.DISABLED
self.logoutButton.setColorScale(color)
self.playButton['state'] = DGG.DISABLED
self.playButton.setColorScale(color)
if base.config.GetBool('allow-linked-accounts', 0):
self.shareButton['state'] = DGG.DISABLED
self.shareButton.setColorScale(color)
self.deleteButton['state'] = DGG.DISABLED
self.deleteButton.setColorScale(color)
self.optionsButton['state'] = DGG.DISABLED
self.optionsButton.setColorScale(color)
self.upgradeButton['state'] = DGG.DISABLED
self.upgradeButton.setColorScale(color)
if base.config.GetBool('allow-linked-accounts', 0):
self.nextSubButton['state'] = DGG.DISABLED
self.nextSubButton.setColorScale(color)
self.prevSubButton['state'] = DGG.DISABLED
self.prevSubButton.setColorScale(color)
def allowInput(self):
for subButtons in self.subAvButtons.values():
for button in subButtons:
if button['text']:
button['state'] = DGG.NORMAL
else:
button['state'] = DGG.DISABLED
button.clearColorScale()
self.renameButton['state'] = DGG.NORMAL
self.renameButton.clearColorScale()
if not self.disableQuit:
self.quitButton['state'] = DGG.NORMAL
self.quitButton.clearColorScale()
self.logoutButton['state'] = DGG.NORMAL
self.logoutButton.clearColorScale()
self.playButton['state'] = DGG.NORMAL
self.playButton.clearColorScale()
if base.config.GetBool('allow-linked-accounts', 0):
self.shareButton['state'] = DGG.NORMAL
self.shareButton.clearColorScale()
self.deleteButton['state'] = DGG.NORMAL
self.deleteButton.clearColorScale()
self.upgradeButton['state'] = DGG.NORMAL
self.upgradeButton.clearColorScale()
if not self.disableOptions:
self.optionsButton['state'] = DGG.NORMAL
self.optionsButton.clearColorScale()
if base.config.GetBool('allow-linked-accounts', 0):
self.nextSubButton['state'] = DGG.NORMAL
self.nextSubButton.clearColorScale()
self.prevSubButton['state'] = DGG.NORMAL
self.prevSubButton.clearColorScale()
if self.choice == (0, 0):
potAv = None
else:
subId, slot = self.choice
potAv = base.cr.avList[subId][slot]
if potAv and potAv not in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate) and not potAv.online:
if potAv.creator or not base.config.GetBool('allow-linked-accounts', 0):
self.deleteButton['state'] = DGG.NORMAL
if base.config.GetBool('allow-linked-accounts', 0):
self.shareButton['state'] = DGG.NORMAL
else:
self.deleteButton['state'] = DGG.DISABLED
if base.config.GetBool('allow-linked-accounts', 0):
self.shareButton['state'] = DGG.DISABLED
if potAv.online:
self.playButton['state'] = DGG.DISABLED
elif potAv.shared or potAv.creator or not base.config.GetBool('allow-linked-accounts', 0):
self.playButton['state'] = DGG.NORMAL
else:
self.playButton['state'] = DGG.DISABLED
if self.queueComplete == False:
self.__deactivatePlayButton()
self.__deactivateCreateButtons()
return
def __handleFirstAdd(self, value):
self.firstAddDialog.destroy()
self.firstAddDialog = None
self.allowInput()
return
def __handleFinalize(self, value):
subId, slot = self.choice
self.notifications[slot].remove()
del self.notifications[slot]
self.finalizeConfirmDialog.destroy()
self.finalizeConfirmDialog = None
potAv = base.cr.avList[subId][slot]
base.cr.avatarManager.sendRequestFinalize(potAv.id)
potAv.name = potAv.wishName
potAv.wishState = 'CLOSED'
avButton = self.subAvButtons[subId][slot]
avButton['text'] = potAv.name
potAv.dna.setName(potAv.wishName)
avButton.setText()
self.allowInput()
return
def __handleNotQueueComplete(self, value):
self.notQueueCompleteDialog.destroy()
self.notQueueCompleteDialog = None
self.allowInput()
return
def __handleNotDownload(self, value):
self.notDownloadDialog.destroy()
self.notDownloadDialog = None
self.allowInput()
return
def __handleDenied(self, value):
subId, slot = self.choice
self.notifications[slot].remove()
del self.notifications[slot]
self.deniedConfirmDialog.destroy()
self.deniedConfirmDialog = None
self.handleDialogOnScreen = 0
self.allowInput()
return
def enterNameMode(self):
subId, slot = self.choice
self.quitFrame.setColorScale(Vec4(1, 1, 1, 0))
self.highlightFrame.setColorScale(Vec4(1, 1, 1, 0))
self.avatarListFrame.setColorScale(Vec4(1, 1, 1, 0))
base.camera.setX(-26)
self.subFrame.hide()
av = base.cr.avList[subId][slot]
base.accept('q', self.exitNameMode)
base.accept('NameGUIFinished', self.exitNameMode)
self.renameButton.hide()
self.nameGui = NameGUI.NameGUI(main=av, independent=True)
self.nameGui.enter()
def exitNameMode(self, value):
subId, slot = self.choice
if value == 1:
if self.nameGui.customName:
base.cr.avList[subId][slot].wishState = 'REQUESTED'
else:
potAv = base.cr.avList[subId][slot]
potAv.name = self.nameGui._getName()
potAv.wishState = 'CLOSED'
avButton = self.subAvButtons[subId][slot]
avButton['text'] = potAv.name
potAv.dna.setName(potAv.name)
avButton.setText()
if self.notifications.get(slot, 0):
self.notifications[slot].remove()
del self.notifications[slot]
else:
self.renameButton.show()
self.nameGui.unload()
del self.nameGui
base.ignore('q')
base.ignore('NameGUIFinished')
self.quitFrame.setColorScale(Vec4(1, 1, 1, 1))
self.highlightFrame.setColorScale(Vec4(1, 1, 1, 1))
self.avatarListFrame.setColorScale(Vec4(1, 1, 1, 1))
base.camera.setX(-29)
self.subFrame.show()
def placeNotification(self, slot, pos, style):
notification = self.exclam.copyTo(self.avatarListFrame)
self.notifications[slot] = notification
notification.setPos(pos[0], pos[1], pos[2])
notification.setScale(0.14)
notification.setR(25)
def changeSub(self, delta):
self.showSub(self.currentSubIndex + delta)
def showSub(self, index):
if self.subIds[self.currentSubIndex]:
numAvs = len(self.subAvButtons[self.subIds[self.currentSubIndex]])
for slot in range(0, numAvs):
if self.notifications.get(slot, 0):
self.notifications[slot].remove()
del self.notifications[slot]
self.currentSubIndex = index
numSubs = len(self.subIds)
if self.currentSubIndex <= 0:
self.currentSubIndex = 0
if base.config.GetBool('allow-linked-accounts', 0):
self.prevSubButton.hide()
else:
if base.config.GetBool('allow-linked-accounts', 0):
self.prevSubButton.show()
if self.currentSubIndex >= numSubs - 1:
self.currentSubIndex = numSubs - 1
if base.config.GetBool('allow-linked-accounts', 0):
self.nextSubButton.hide()
else:
if base.config.GetBool('allow-linked-accounts', 0):
self.nextSubButton.show()
self.currentSubId = self.subIds[self.currentSubIndex]
subName = base.cr.accountDetailRecord.subDetails[self.currentSubId].subName
subAccess = base.cr.accountDetailRecord.subDetails[self.currentSubId].subAccess
subAccessStr = PLocalizer.AccessLevel[subAccess]
subLabelText = '\x01white\x01%s\x02\n\x01smallCaps\x01%s\x02' % (subName, subAccessStr)
self.subLabel['text'] = subLabelText
for frame in self.subFrames.values():
frame.hide()
self.subFrames[self.currentSubId].show()
anyAvatars = False
for avList in base.cr.avList.values():
for av in avList:
if av not in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate):
anyAvatars = True
break
if anyAvatars:
break
avList = base.cr.avList[self.currentSubId]
for avIdx in range(0, len(avList)):
if avList[avIdx] not in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate):
if avList[avIdx].wishState == 'APPROVED':
self.placeNotification(avIdx, (0.32, 0, -0.37 - avIdx * 0.095), APPROVED)
elif avList[avIdx].wishState == 'DENIED' or avList[avIdx].wishState == 'OPEN':
self.placeNotification(avIdx, (0.32, 0, -0.37 - avIdx * 0.095), DENIED)
if anyAvatars:
self.avatarListFrame.reparentTo(base.a2dTopLeft)
self.avatarListFrame.setPosHprScale(0.42, 0, -0.3, 0, 0, 0, 1, 1, 1)
else:
self.avatarListFrame.reparentTo(base.a2dTopCenter)
self.avatarListFrame.setPosHprScale(0, 0, -0.3, 0, 0, 0, 1.1, 1.1, 1.1)
self.renameButton.hide()
self.shardPanel.hide()
self.shardPanelBottom.hide()
subAvs = base.cr.avList[self.currentSubId]
if len(subAvs) > 0:
subAvs[0] not in (OTPGlobals.AvatarSlotUnavailable, OTPGlobals.AvatarSlotAvailable, OTPGlobals.AvatarPendingCreate) and self.__handleHighlight(self.currentSubId, 0)
else:
self.__hideHighlightedAvatar()
def popupTrialPanel(self):
if not self.trialNonPayerPanel:
self.trialNonPayerPanel = TrialNonPayerPanel.TrialNonPayerPanel(trial=True)
self.trialNonPayerPanel.show()
def popupFeatureBrowser(self, subId, slot):
if not self.nonPayerPanel:
self.nonPayerPanel = TrialNonPayerPanel.TrialNonPayerPanel(trial=False)
self.nonPayerPanel.fullText['text'] = PLocalizer.VR_FeaturePopLongTextAvatars
self.nonPayerPanel.show()
def _stopMouseReadTask(self):
taskMgr.remove('AvatarChooser-MouseRead')
def _startMouseReadTask(self):
self._stopMouseReadTask()
mouseData = base.win.getPointer(0)
self.lastMousePos = (mouseData.getX(), mouseData.getY())
taskMgr.add(self._mouseReadTask, 'AvatarChooser-MouseRead')
def _mouseReadTask(self, task):
if not base.mouseWatcherNode.hasMouse():
pass
else:
winSize = (
base.win.getXSize(), base.win.getYSize())
mouseData = base.win.getPointer(0)
if mouseData.getX() > winSize[0] or mouseData.getY() > winSize[1]:
pass
else:
dx = mouseData.getX() - self.lastMousePos[0]
mouseData = base.win.getPointer(0)
self.lastMousePos = (mouseData.getX(), mouseData.getY())
if self.av:
value = self.av.getH()
value = (value + dx * 0.7) % 360
self.__rotateHighlightedAvatar(value)
return Task.cont | nilq/baby-python | python |
import argparse
from pathlib import Path
def arg_parsing():
"""Command line parsing"""
parser = argparse.ArgumentParser()
parser.add_argument('--mode', '-m', type=int, default=0, help='0 - local, 1 - master, 2 - encoder')
# Input/Output/Temp
parser.add_argument('--input', '-i', nargs='+', type=Path, help='Input File')
parser.add_argument('--temp', type=Path, default=Path('.temp'), help='Set temp folder path')
parser.add_argument('--output_file', '-o', type=Path, default=None, help='Specify output file')
# Splitting
parser.add_argument('--split_method', type=str, default='pyscene', help='Specify splitting method',
choices=['pyscene', 'aom_keyframes'])
parser.add_argument('--extra_split', '-xs', type=int, default=0, help='Number of frames after which make split')
parser.add_argument('--min_scene_len', type=int, default=None, help='Minimum number of frames in a split')
# PySceneDetect split
parser.add_argument('--scenes', '-s', type=str, default=None, help='File location for scenes')
parser.add_argument('--threshold', '-tr', type=float, default=50, help='PySceneDetect Threshold')
# Encoding
parser.add_argument('--passes', '-p', type=int, default=2, help='Specify encoding passes')
parser.add_argument('--video_params', '-v', type=str, default='', help='encoding settings')
parser.add_argument('--encoder', '-enc', type=str, default='aom', help='Choosing encoder',
choices=['aom', 'svt_av1', 'rav1e', 'vpx'])
parser.add_argument('--workers', '-w', type=int, default=0, help='Number of workers')
parser.add_argument('-cfg', '--config', type=Path, help='Parameters file. Save/Read: '
'Video, Audio, Encoder, FFmpeg parameteres')
# FFmpeg params
parser.add_argument('--ffmpeg', '-ff', type=str, default='', help='FFmpeg commands')
parser.add_argument('--audio_params', '-a', type=str, default='-c:a copy', help='FFmpeg audio settings')
parser.add_argument('--pix_format', '-fmt', type=str, default='yuv420p', help='FFmpeg pixel format')
# Misc
parser.add_argument('--logging', '-log', type=str, default=None, help='Enable logging')
parser.add_argument('--resume', '-r', help='Resuming previous session', action='store_true')
parser.add_argument('--no_check', '-n', help='Do not check encodings', action='store_true')
parser.add_argument('--keep', help='Keep temporally folder after encode', action='store_true')
# Boost
parser.add_argument('--boost', help='Experimental feature, decrease CQ of clip based on brightness.'
'Darker = lower CQ', action='store_true')
parser.add_argument('--boost_range', default=15, type=int, help='Range/strength of CQ change')
parser.add_argument('--boost_limit', default=10, type=int, help='CQ limit for boosting')
# Vmaf
parser.add_argument('--vmaf', help='Calculating vmaf after encode', action='store_true')
parser.add_argument('--vmaf_path', type=Path, default=None, help='Path to vmaf models')
# Target Vmaf
parser.add_argument('--vmaf_target', type=float, help='Value of Vmaf to target')
parser.add_argument('--vmaf_steps', type=int, default=4, help='Steps between min and max qp for target vmaf')
parser.add_argument('--min_cq', type=int, default=25, help='Min cq for target vmaf')
parser.add_argument('--max_cq', type=int, default=50, help='Max cq for target vmaf')
parser.add_argument('--vmaf_plots', help='Make plots of probes in temp folder', action='store_true')
# Server parts
parser.add_argument('--host', nargs='+', type=str, help='ips of encoders')
# Store all vars in dictionary
return vars(parser.parse_args())
| nilq/baby-python | python |
#!/usr/bin/env python
from cogent.app.parameters import FlagParameter, ValuedParameter
from cogent.app.util import CommandLineApplication, ResultPath
"""Application controller for sfffile"""
__author__ = "Kyle Bittinger"
__copyright__ = "Copyright 2007-2012, The Cogent Project"
__credits__ = ["Kyle Bittinger"]
__license__ = "GPL"
__version__ = "1.5.3"
__maintainer__ = "Kyle Bittinger"
__email__ = "kylebittinger@gmail.com"
__status__ = "Prototype"
class Sfffile(CommandLineApplication):
"""Simple sfffile application controller.
"""
_options = {
# output filepath
'-o': ValuedParameter('-', 'o', Delimiter=' '),
# file of accession numbers to be included
'-i': ValuedParameter('-', 'i', Delimiter=' '),
# file of accession numbers to be excluded
'-e': ValuedParameter('-', 'e', Delimiter=' '),
# file of custom trim points
'-t': ValuedParameter('-', 't', Delimiter=' '),
# number of cycles in output sff
'-c': ValuedParameter('-', 'c', Delimiter=' '),
# shortcut for -c 42
'-gs20': FlagParameter('-', 'gs20'),
# shortcut for -c 100
'-gsflx': FlagParameter('-', 'gsflx'),
# split multiplexed reads
'-s': ValuedParameter('-', 's', Delimiter=' '),
# custom MID configuration file
'-mcf': ValuedParameter('-', 'mcf', Delimiter=' '),
# prevent propagation of sff index
'-nmft': FlagParameter('-', 'nmft'),
}
_parameters = {}
_parameters.update(_options)
_input_handler = '_input_as_path'
_command = 'sfffile'
def _get_result_paths(self, data):
"""Collect the resultant SFF file in the results.
Because cogent.app.util.CommandLineAppResult opens output
files in text mode, this method may not be portable for
Windows users. A more portable solution would be to not use
the app controller results, but instead specify the output SFF
filepath manually via the '-o' parameter.
"""
if self.Parameters['-o'].isOn():
sff_path = self.Parameters['-o'].Value
else:
sff_path = '454Reads.sff'
return {'sff': ResultPath(sff_path)}
def _accept_exit_status(self, exit_status):
"""Accept an exit status of 0 for the sfffile program.
"""
return exit_status == 0
| nilq/baby-python | python |
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gtk, GtkSource
from diode.rendered_graph import RenderedGraph
from diode.abstract_sdfg import AbstractSDFG
from diode.images import ImageStore
from diode.property_renderer import PropertyRenderer, _get_edge_label
class PatternEditor:
def __init__(self, builder):
self.buttons = [
{
"image": "cursor.png",
"type": "mouse",
"tool": "Mouse"
},
{
"image": "delete.png",
"type": "delete",
"tool": "Delete"
},
{
"image": "array.png",
"type": "node",
"tool": "Array"
},
{
"image": "edge_thin.png",
"type": "edge",
"tool": "Memlet"
},
{
"image": "map.png",
"type": "node",
"tool": "Map"
},
{
"image": "unmap.png",
"type": "node",
"tool": "Unmap"
},
{
"image": "tasklet.png",
"type": "node",
"tool": "Tasklet"
},
{
"image": "stream.png",
"type": "node",
"tool": "Stream"
},
{
"image": "stream_map.png",
"type": "node",
"tool": "Stream Map"
},
{
"image": "stream_unmap.png",
"type": "node",
"tool": "Stream Unmap"
},
{
"image": "state.png",
"type": "node",
"tool": "State"
},
{
"image": "state_trans.png",
"type": "edge",
"tool": "State Transition"
},
]
self.active_tool = None # an element of self.buttons
self.builder = builder
self.main_sdfg = None
self.first_selected_node_for_edge = None
self.rendered_main_sdfg = RenderedGraph()
sdfg_da = self.builder.get_object("patedmainsdfg")
self.rendered_main_sdfg.set_drawing_area(sdfg_da)
self.abstract_find_sdfg = AbstractSDFG()
self.rendered_find_sdfg = RenderedGraph()
find_da = self.builder.get_object("find_da")
self.rendered_find_sdfg.set_drawing_area(find_da)
self.abstract_replace_sdfg = AbstractSDFG()
self.rendered_replace_sdfg = RenderedGraph()
replace_da = self.builder.get_object("replace_da")
self.rendered_replace_sdfg.set_drawing_area(replace_da)
tbuffer = self.builder.get_object("pe_sourceview").get_buffer()
self.init_syntax_highlighting("pe_sourceview", "python")
self.image_store = ImageStore()
plabel = self.builder.get_object("pe_propertylabel")
pgrid = self.builder.get_object("pe_propertygrid")
self.propren = PropertyRenderer(plabel, pgrid, self.OnSDFGUpdate)
self.load_buttons()
self.connect_signals()
def OnSDFGUpdate(self, sdfg, nodeid, propname, newval):
self.rendered_main_sdfg.set_dotcode(self.main_sdfg.draw())
def connect_signals(self):
find_da = self.builder.get_object("find_da")
replace_da = self.builder.get_object("replace_da")
sdfg_da = self.builder.get_object("patedmainsdfg")
sdfg_da.connect("draw", self.OnDrawMainSDFG)
find_da.connect("draw", self.OnDrawFindSDFG)
replace_da.connect("draw", self.OnDrawReplaceSDFG)
sdfg_da.connect("scroll-event", self.OnScrollMainSDFG)
find_da.connect("scroll-event", self.OnScrollFindSDFG)
replace_da.connect("scroll-event", self.OnScrollReplaceSDFG)
sdfg_da.connect("button-press-event", self.OnButtonPressMainSDFG)
sdfg_da.connect("button-release-event", self.OnButtonReleaseMainSDFG)
sdfg_da.connect("motion-notify-event", self.OnMouseMoveMainSDFG)
find_da.connect("button-press-event", self.OnButtonPressFindSDFG)
replace_da.connect("button-press-event", self.OnButtonPressReplaceSDFG)
def load_buttons(self):
toolbar = self.builder.get_object("pated_toolbar")
for b in self.buttons:
pixbuf = self.image_store.get_image(b["image"])
image = Gtk.Image.new_from_pixbuf(pixbuf)
button = Gtk.ToggleToolButton()
button.set_icon_widget(image)
toolbar.add(button)
b["button"] = button
if b["tool"] == "Mouse":
self.active_tool = b
button.connect("toggled", self.OnToggleTBButton, b)
def init_syntax_highlighting(self, widgetname, language):
tbuffer = self.builder.get_object(widgetname).get_buffer()
lang_manager = GtkSource.LanguageManager()
language = lang_manager.get_language(language)
tbuffer.set_language(language)
tbuffer.set_highlight_syntax(True)
def set_main_sdfg(self, sdfg):
self.main_sdfg = sdfg
dotcode = sdfg.draw()
self.rendered_main_sdfg.set_dotcode(dotcode)
def OnDrawMainSDFG(self, widget, cr):
self.rendered_main_sdfg.render(widget, cr)
return False
def OnDrawFindSDFG(self, widget, cr):
self.rendered_find_sdfg.render(widget, cr)
return False
def OnDrawReplaceSDFG(self, widget, cr):
self.rendered_replace_sdfg.render(widget, cr)
return False
def OnToggleTBButton(self, widget, button):
self.active_tool["button"].set_active(False)
statuslabel = self.builder.get_object("run_status_text")
if button["type"] == "node":
statuslabel.set_text("Click \"find\" or \"replace\" pane to " + \
"add a " + button["tool"] + " node.")
elif button["type"] == "edge":
statuslabel.set_text("In the \"find\" or \"replace\" pane, " + \
"click two nodes between which you want to add a " + \
button["tool"] + " edge.")
elif button["type"] == "edge_redir":
statuslabel.set_text("In the \"find\" or \"replace\" pane, " + \
"click an edge, followed by the new node it should " + \
"attach to.")
elif button["tool"] == "Delete":
statuslabel.set_text("Click a node or edge in the \"find\" or " + \
"\"replace\" pane in oder to delete it.")
self.active_tool = button
return True
def OnScrollMainSDFG(self, widget, ev):
d = self.rendered_main_sdfg.determine_scroll_direction(ev)
self.rendered_main_sdfg.zoom(d, pos=(ev.x, ev.y))
widget.queue_draw()
return False
def OnScrollFindSDFG(self, widget, ev):
d = self.rendered_find_sdfg.determine_scroll_direction(ev)
self.rendered_find_sdfg.zoom(d, pos=(ev.x, ev.y))
widget.queue_draw()
return False
def OnScrollReplaceSDFG(self, widget, ev):
d = self.rendered_replace_sdfg.determine_scroll_direction(ev)
self.rendered_replace_sdfg.zoom(d, pos=(ev.x, ev.y))
widget.queue_draw()
return False
def OnButtonPressMainSDFG(self, widget, ev):
x, y = ev.x, ev.y
elem = self.rendered_main_sdfg.get_element_by_coords(x, y)
if ev.button == 1:
self.rendered_main_sdfg.handle_button_press(ev)
elem = self.rendered_main_sdfg.get_element_by_coords(x, y)
if elem is not None:
self.rendered_main_sdfg.highlight_element(elem)
self.propren.render_properties_for_element(
self.main_sdfg, elem)
elif ev.button == 3:
if elem is None:
self.rendered_main_sdfg.clear_highlights()
else:
self.rendered_main_sdfg.highlight_element(elem)
def OnButtonReleaseMainSDFG(self, widget, ev):
self.rendered_main_sdfg.handle_button_release(ev)
return False
def OnMouseMoveMainSDFG(self, widget, ev):
self.rendered_main_sdfg.handle_drag_motion(ev)
return False
def OnRepFindNodePropsChanged(self, widget, data):
elem_in_replace = False
elem = self.abstract_find_sdfg.find_node(data)
if elem is None:
elem = self.abstract_replace_sdfg.find_node(data)
elem_in_replace = True
if elem is None:
raise ValueError("Could not find node " + data)
return
newval = widget.get_text()
elem.set_label(newval)
new_dot = ""
if elem_in_replace == False:
new_dot = self.abstract_find_sdfg.to_dot()
self.rendered_find_sdfg.set_dotcode(new_dot)
else:
new_dot = self.abstract_replace_sdfg.to_dot()
self.rendered_replace_sdfg.set_dotcode(new_dot)
def OnRepFindEdgePropsChanged(self, widget, data):
elem_in_replace = False
elem = self.abstract_find_sdfg.find_edge(data[0], data[1])
if elem is None:
elem = self.abstract_replace_sdfg.find_edge(data[0], data[1])
elem_in_replace = True
if elem is None:
raise ValueError("Could not find node " + data)
return
newval = widget.get_text()
elem.set_label(newval)
new_dot = ""
if elem_in_replace == False:
new_dot = self.abstract_find_sdfg.to_dot()
self.rendered_find_sdfg.set_dotcode(new_dot)
else:
new_dot = self.abstract_replace_sdfg.to_dot()
self.rendered_replace_sdfg.set_dotcode(new_dot)
def render_properties_for_repfind_node(self, elem, abstract_graph):
nodeid = elem.id.decode('utf-8')
node = abstract_graph.find_node(nodeid)
grid = self.builder.get_object("pe_propertygrid")
self.clear_property_list()
rownum = 0
label = Gtk.Label()
label.set_label("Node Label")
label.set_tooltip_text("set the label")
grid.attach(label, 0, rownum, 1, 1)
widget = Gtk.Entry()
widget.set_text(node.get_label())
nuid = node.get_uid()
widget.connect("changed", self.OnRepFindNodePropsChanged, nuid)
grid.attach(widget, 1, rownum, 1, 1)
rownum += 1
grid.show_all()
def render_properties_for_repfind_edge(self, tailelem, headelem,
abstract_graph):
tail_nodeid = tailelem.id.decode('utf-8')
tailnode = abstract_graph.find_node(tail_nodeid)
head_nodeid = headelem.id.decode('utf-8')
headnode = abstract_graph.find_node(head_nodeid)
edge = abstract_graph.find_edge(tail_nodeid, head_nodeid)
grid = self.builder.get_object("pe_propertygrid")
self.clear_property_list()
rownum = 0
label = Gtk.Label()
label.set_label("Edge Label")
label.set_tooltip_text("set the label")
grid.attach(label, 0, rownum, 1, 1)
widget = Gtk.Entry()
widget.set_text(_get_edge_label(edge))
widget.connect("changed", self.OnRepFindEdgePropsChanged,
[tail_nodeid, head_nodeid])
grid.attach(widget, 1, rownum, 1, 1)
rownum += 1
grid.show_all()
def button_press_in_replace_or_find(self, widget, ev, graph):
rendered_graph = None
abstract_sdfg = None
if graph == "replace":
rendered_graph = self.rendered_replace_sdfg
abstract_graph = self.abstract_replace_sdfg
elif graph == "find":
rendered_graph = self.rendered_find_sdfg
abstract_graph = self.abstract_find_sdfg
else:
raise ValueError("graph must be find or replace")
# if the active tool is the mouse, show properties of clicked elem
if self.active_tool["tool"] == "Mouse":
elem = rendered_graph.get_element_by_coords(ev.x, ev.y)
rendered_graph.clear_highlights()
rendered_graph.highlight_element(elem)
label = self.builder.get_object("pe_propertylabel")
self.clear_property_list()
if type(elem).__name__ == "Node":
label.set_text("Properties of: " + elem.id.decode('utf-8'))
self.render_properties_for_repfind_node(elem, abstract_graph)
elif type(elem).__name__ == "Edge":
tailelem = elem.src
headelem = elem.dst
label.set_text("Properties of: " + tailelem.id.decode('utf-8') \
+ " -> " + headelem.id.decode('utf-8'))
self.render_properties_for_repfind_edge(
tailelem, headelem, abstract_graph)
else:
label.set_text("Properties of: (Nothing selected)")
return False
elif self.active_tool["type"] == "node":
abstract_graph.add_node(self.active_tool["tool"])
new_dot = abstract_graph.to_dot()
rendered_graph.set_dotcode(new_dot)
elif self.active_tool["type"] == "edge":
elem = rendered_graph.get_element_by_coords(ev.x, ev.y)
if elem is None:
return
if self.first_selected_node_for_edge is None:
self.first_selected_node_for_edge = elem.id.decode('utf-8')
else:
second_selected_node_for_edge = elem.id.decode('utf-8')
abstract_graph.add_edge(self.first_selected_node_for_edge,
second_selected_node_for_edge)
self.first_selected_node_for_edge = None
new_dot = abstract_graph.to_dot()
rendered_graph.set_dotcode(new_dot)
elif self.active_tool["tool"] == "Delete":
elem = rendered_graph.get_element_by_coords(ev.x, ev.y)
abstract_graph.delete_node(elem.id.decode('utf-8'))
new_dot = abstract_graph.to_dot()
rendered_graph.set_dotcode(new_dot)
def OnButtonPressFindSDFG(self, widget, ev):
self.button_press_in_replace_or_find(widget, ev, "find")
def OnButtonPressReplaceSDFG(self, widget, ev):
self.button_press_in_replace_or_find(widget, ev, "replace")
| nilq/baby-python | python |
# -*- coding: utf-8 -*-
class CreateFaceSetResult(object):
"""Result of create face set."""
def __init__(self, content):
self.content_origin = content
self.content_eval = eval(content.replace("true", "True").replace("false", "False"))
def get_original_result(self):
"""Get original http content.
:return: http content
:rtype: str
"""
return self.content_origin
def get_eval_result(self):
"""Get Eval http content.
:return: formatted http content, which is easy to use.
:rtype: dict
"""
return self.content_eval
def get_face_set_info(self):
"""Get Face set info
:rtype: dict
"""
return self.content_eval.get("face_set_info")
def get_create_date(self):
"""Get create_date
:rtype: str
"""
return self.content_eval.get("face_set_info").get("create_date")
def get_face_set_capacity(self):
"""Get face_set_capacity
:rtype: int
"""
return self.content_eval.get("face_set_info").get("face_set_capacity")
def get_face_set_id(self):
"""Get face_set_id
:rtype: str
"""
return self.content_eval.get("face_set_info").get("face_set_id")
def get_face_number(self):
"""Get face_number
:rtype: int
"""
return self.content_eval.get("face_set_info").get("face_number")
def get_external_fields(self):
"""Get external_fields
:rtype: str
"""
return self.content_eval.get("face_set_info").get("external_fields")
def get_face_set_name(self):
"""Get face_set_name
:rtype: str
"""
return self.content_eval.get("face_set_info").get("face_set_name") | nilq/baby-python | python |
"""Geometric Operations"""
from typing import Dict, List, Tuple
import shapely.affinity
from shapely.geometry import Point
from util.files import ImageLayoutModel
def shift_label_points(label_data: Dict, x: int, y: int) -> Dict:
"""
Shift all label points by x and y
:param label_data:
:param x:
:param y:
:return:
"""
for shape in label_data["shapes"]:
circle = Circle.from_json(shape["points"])
circle.translate(x, y)
shape["points"] = circle.to_json()
return label_data
def is_shape_inside(shape: Dict, layout_model: ImageLayoutModel) -> bool:
"""
Check if the shapes center is inside the provided model
:param shape:
:param layout_model:
:return:
"""
centroid = shape["points"][0]
return layout_model.is_inside(centroid[0], centroid[1])
class Circle:
"""Circular ROI Element"""
IMAGE_ORIGIN = Point(0, 0)
def __init__(self, center: Tuple, point_on_radius: Tuple):
self.__centroid = Point(center[0], center[1])
self.__radius_point = Point(point_on_radius[0], point_on_radius[1])
@staticmethod
def from_region_props(region):
return Circle(
(region.centroid[1], region.centroid[0]),
(region.centroid[1] + region.equivalent_diameter / 2, region.centroid[0]),
)
@staticmethod
def from_json(json_points):
return Circle(json_points[0], json_points[1])
def to_json(self) -> Dict:
return [
[self.__centroid.x, self.__centroid.y],
[self.__radius_point.x, self.__radius_point.y],
]
@property
def centroid(self) -> Tuple:
return self.__centroid
@property
def radius(self) -> float:
return self.__centroid.distance(self.__radius_point)
@property
def bounding_box(self) -> List:
circle = self.centroid.buffer(self.centroid.distance(self.__radius_point))
return list(circle.bounds)
def translate(self, x: int, y: int) -> None:
self.__centroid = shapely.affinity.translate(self.__centroid, x, y)
self.__radius_point = shapely.affinity.translate(self.__radius_point, x, y)
def scale(self, x_scale: float, y_scale: float):
self.__centroid = shapely.affinity.scale(
self.__centroid, x_scale, y_scale, origin=self.IMAGE_ORIGIN
)
self.__radius_point = shapely.affinity.scale(
self.__radius_point, x_scale, y_scale, origin=self.IMAGE_ORIGIN
)
def iou(self, circle) -> float:
union_area = (
self.centroid.buffer(self.radius)
.union(circle.centroid.buffer(circle.radius))
.area
)
intersection_area = (
self.centroid.buffer(self.radius)
.intersection(circle.centroid.buffer(circle.radius))
.area
)
return intersection_area / union_area
| nilq/baby-python | python |
from django.shortcuts import render
from django.http import HttpResponse
from django.http import JsonResponse
from django.core import serializers
from django.contrib.auth.models import Group
from api.groups.serializers import GroupSerializer
from api import auth
def _groups_callback(key, user, user_type, id_map={}):
if user_type != 'Normal':
groups = Group.objects.all().order_by('name')
else:
print(user)
groups = user.groups.all() #Group.objects.filter(login_user_groups__user_id=user.id).order_by('name')
#serializer = GroupSerializer(groups, many=True, read_only=True)
data = []
for g in groups:
data.append({'id':g.id, 'n':g.name, 'c':'#000000'})
return JsonResponse(data, safe=False) #views.json_resp(paginator.get_page(1))
#return JsonResponse(serializer.data, safe=False)
def groups(request):
id_map = {}
return auth.auth(request, _groups_callback, id_map=id_map)
| nilq/baby-python | python |
# Generated by Django 3.1.5 on 2021-02-13 04:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('oxigeno', '0004_distribuidorpotencial_historicaldistribuidorpotencial'),
]
operations = [
migrations.AddField(
model_name='distribuidor',
name='abre_24h',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='distribuidor',
name='abre_fin_de_semana',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='distribuidor',
name='recarga_gratis',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='historicaldistribuidor',
name='abre_24h',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='historicaldistribuidor',
name='abre_fin_de_semana',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='historicaldistribuidor',
name='recarga_gratis',
field=models.BooleanField(default=False),
),
]
| nilq/baby-python | python |
"""
Defaults and globals.
Note, users will have to specify their own path to the Timbre Toolbox.
"""
import os
class RealPath:
"""
Convenient way to generate absolute file-paths.
"""
def __init__(self):
self.here = os.path.dirname(__file__)
def __call__(self, relative_path):
return os.path.realpath(
os.path.join(self.here, relative_path)
)
# Small value.
EPS = 1e-8
# Large value for data analysis.
MAX_INTEGER = 2**53 - 1
# Sample rates.
SAMPLE_RATE = 44100
PITCH_RATE = 200
# Relevant file paths.
real_path = RealPath()
ANA_PATH = real_path('../audio/ana')
SYN_PATH = real_path('../audio/syn')
DATA_PATH = real_path('../data')
TIMBRE_TOOLBOX_PATH = real_path('../matlab/timbretoolbox')
| nilq/baby-python | python |
# width:79
# height: 30
from ceefax import config
import os
from ceefax.page import PageManager
from ceefax.cupt import Screen
def is_page_file(f):
if not os.path.isfile(os.path.join(config.pages_dir, f)):
return False
if "_" in f:
return False
if "pyc" in f:
return False
return True
def get_ceefax(test=None):
if Ceefax._instance is None:
Ceefax._instance = Ceefax(test)
return Ceefax._instance
class Ceefax:
_instance = None
def __init__(self, test=None):
self.start_time = config.now()
self.test = test
self.page_manager = DummyPM()
def begin(self):
with Screen() as scr:
self.page_manager = PageManager(scr)
self.start_loop()
def start_loop(self):
self.page_manager.start_loop(self.test)
def kill(self):
raise KeyboardInterrupt
class DummyPM:
def sorted_pages(self):
return []
def index_pages(self):
return []
| nilq/baby-python | python |
import os
import pandas
import dateutil
from dfs.extdata.common.io import combine_dataframe_into_pickle_file
sbr_data_dir = os.path.join(GLOBAL_ROOT, 'db/{sport}/odds/sportsbookreview/')
# Find the odds file for a specific day
def get_gameday_filename(sport, game_day):
filename = os.path.join(sbr_data_dir.format(sport=sport), game_day.isoformat())
return filename
def save_sbr_odds_info(sport, game_day, dataframe):
"""
Save odds data from the game day to a file
:param str sport: sport odds are from
:param datetime.datetime game_day: day these odds are from
:param pandas.DataFrame dataframe: dataframe of odds
:return:
"""
combine_dataframe_into_pickle_file(dataframe, get_gameday_filename(sport, game_day))
def load_sbr_odds_info(sport, game_day):
"""
Load previously saved dataframes of odds information
:param str game_day: day to load odds info for
:return pandas.DataFrame: oddsinfo for that day
"""
game_date = dateutil.parser.parse(game_day)
df = pandas.read_pickle(get_gameday_filename(sport, game_date))
return df
| nilq/baby-python | python |
from os import environ
from datetime import datetime, timedelta
from flask import Blueprint, request, jsonify
from libs.connectors import STORAGE_CONNECTOR
from libs.utils.formatter import format_v1, format_v2
from libs.utils.mockup_data import get_mockup_data
ephad_bp = Blueprint("ephad_bp", __name__)
# BLUEPRINT FUNCTIONS
is_mockup_activated = bool(int(environ.get("ACTIVATE_MOCKUP", 0)))
def fetch_data(filename_format):
# Fetch Data from Mockup/Storage
if is_mockup_activated:
df = get_mockup_data()
else:
file_to_retrieve = STORAGE_CONNECTOR.get_last_filename_version(filename_format)
df = STORAGE_CONNECTOR.open_as_dataframe(file_to_retrieve, sep=";")
return df
def filter_data(df, date_query):
# Filter by date and area
data = df.query(date_query)
return data.to_dict(orient="records")
@ephad_bp.route("/api/get/data/ehpad/<yyyy>/<mm>/<dd>", methods=["GET"])
def get_data_ehpad(yyyy=None, mm=None, dd=None):
# Get the request arguments
yyyy = request.args.get("yyyy", yyyy)
mm = request.args.get("mm", mm)
dd = request.args.get("dd", dd)
# Check if date is not sampled genuinely
endpoint_has_no_date = (yyyy is None) or (mm is None) or (dd is None)
if endpoint_has_no_date:
# Return Error 400 BAD REQUEST
return jsonify({"message": "error", "type": "bad_request"}), 400
# Fetch Data from Mockup/Storage
df = fetch_data(filename_format="covid-19-with-ephad_")
df['jour'] = df['jour'].apply(lambda x: '-'.join(x.split('/')[::-1]))
df = df[df['dc ehpad'].notnull()][['jour', 'dc ehpad', 'dc ehpad quot']]
df.columns = ['jour', 'total_deaths_ehpad', 'new_deaths_ehpad']
# Filter Data with regard to filters
data = filter_data(
df,
date_query=f'jour == "{yyyy}-{mm}-{dd}"'
)
return jsonify(data), 200
@ephad_bp.route("/api/get/until/data/ehpad/<yyyy>/<mm>/<dd>", methods=["GET"])
def get_until_ehpad(yyyy=None, mm=None, dd=None):
# Get the request arguments
yyyy = request.args.get("yyyy", yyyy)
mm = request.args.get("mm", mm)
dd = request.args.get("dd", dd)
# Check if date is not sampled genuinely
endpoint_has_no_date = (yyyy is None) or (mm is None) or (dd is None)
if endpoint_has_no_date:
# Return Error 400 BAD REQUEST
return jsonify({"message": "error", "type": "bad_request"}), 400
# Fetch Data from Mockup/Storage
df = fetch_data(filename_format="covid-19-with-ephad_")
df['jour'] = df['jour'].apply(lambda x: '-'.join(x.split('/')[::-1]))
df = df[df['dc ehpad'].notnull()][['jour', 'dc ehpad', 'dc ehpad quot']]
df.columns = ['jour', 'total_deaths_ehpad', 'new_deaths_ehpad']
# Compute Start Date
N_DAYS = 8
from_date = (
datetime.strptime(f"{yyyy}-{mm}-{dd}", "%Y-%m-%d") - timedelta(days=N_DAYS)
).strftime("%Y-%m-%d")
# Filter Data with regard to filters
data = filter_data(
df,
date_query=f'"{from_date}" < jour <= "{yyyy}-{mm}-{dd}"',
)
return jsonify(data), 200
# Get Last Update from data sources
@ephad_bp.route("/get/last_updated/ehpad", methods=["GET"])
def get_last_updated_ehpad():
df = fetch_data(filename_format="covid-19-with-ephad_")
last_updated = df['jour'].apply(lambda x: '-'.join(x.split('/')[::-1])).max()
return jsonify({"last_updated": last_updated}), 200 | nilq/baby-python | python |
"""
Easing.
Move the mouse across the screen and the symbol will follow.
Between drawing each frame of the animation, the program
calculates the difference between the position of the
symbol and the cursor. If the distance is larger than
1 pixel, the symbol moves part of the distance (0.05) from its
current position toward the cursor.
"""
x = 0
y = 0
easing = 0.05
def setup():
size(640, 360)
noStroke()
def draw():
global x, y
background(51)
targetX = mouseX
dx = targetX - x
if(abs(dx) > 1):
x += dx * easing
targetY = mouseY
dy = targetY - y
if(abs(dy) > 1):
y += dy * easing
ellipse(x, y, 66, 66)
| nilq/baby-python | python |
# SPDX-FileCopyrightText: 2019-2021 REFITT Team
# SPDX-License-Identifier: Apache-2.0
"""Data broker client integration tests."""
# internal libs
from refitt.database.model import ObjectType, Object, ObservationType, Observation, Alert
from tests.unit.test_data.test_broker.test_alert import MockAlert
from tests.unit.test_data.test_broker.test_client import MockClient
class TestMockClient:
"""Integrations for data broker client interface."""
def test_stream_to_database(self) -> None:
"""Stream alerts from client to database."""
with MockClient(topic='topic', credentials=('key', 'secret')) as stream:
for count, alert in enumerate(stream):
received = alert.to_database()
assert Alert.from_id(received.id) == received
assert received.observation.id == received.observation_id
if count > 100:
break
| nilq/baby-python | python |
# Auto-generated by generate_passthrough_modules.py - do not modify
from .v0_2.raw_nodes import *
| nilq/baby-python | python |
from sys import exit
from os import remove
import webbrowser
from operations import solution
from operations import trigerrcheck
from operations import logsolve
from operations import q
from PyQt5.QtWidgets import QApplication,QLabel,QWidget,QGridLayout,QLineEdit,QPushButton,QCheckBox,QSlider
from PyQt5 import QtGui
from PyQt5.Qt import Qt
import matplotlib.pyplot as pp
from numpy import linspace
HELP_LINK = 'https://github.com/anshmehta7x/calc/blob/master/README.md'
ap = QApplication([])
win = QWidget()
win.setStyleSheet(open('styles/parwindowstyling.css').read())
win.setWindowTitle("Calculator")
win.setWindowIcon(QtGui.QIcon('assets/icon.png'))
lay = QGridLayout()
win.setLayout(lay)
#win.setGeometry(150,150,400,600)
#buttons
line = QLineEdit()
one = QPushButton("1")
two = QPushButton("2")
three = QPushButton("3")
four = QPushButton("4")
five = QPushButton("5")
six = QPushButton("6")
seven = QPushButton("7")
eight = QPushButton("8")
nine = QPushButton("9")
zero = QPushButton("0")
dot = QPushButton(".")
plus = QPushButton("+")
minus = QPushButton("-")
multiply = QPushButton("x")
divide = QPushButton("÷")
equal = QPushButton("=")
power = QPushButton("^")
clear = QPushButton("CLR")
trig = QPushButton("Trig")
log = QPushButton("Log")
quad = QPushButton("Quadratic")
help_ = QPushButton("Help")
#coloring
one.setStyleSheet(open("styles/styling.css").read())
two.setStyleSheet(open("styles/styling.css").read())
three.setStyleSheet(open("styles/styling.css").read())
four.setStyleSheet(open("styles/styling.css").read())
five.setStyleSheet(open("styles/styling.css").read())
six.setStyleSheet(open("styles/styling.css").read())
seven.setStyleSheet(open("styles/styling.css").read())
eight.setStyleSheet(open("styles/styling.css").read())
nine.setStyleSheet(open("styles/styling.css").read())
zero.setStyleSheet(open("styles/styling.css").read())
dot.setStyleSheet(open("styles/styling.css").read())
plus.setStyleSheet(open("styles/altstyling.css").read())
minus.setStyleSheet(open("styles/altstyling.css").read())
multiply.setStyleSheet(open("styles/altstyling.css").read())
divide.setStyleSheet(open("styles/altstyling.css").read())
equal.setStyleSheet(open("styles/altstyling.css").read())
power.setStyleSheet(open("styles/altstyling.css").read())
clear.setStyleSheet(open('styles/altstyling.css').read())
trig.setStyleSheet(open('styles/parwindowstyling.css').read())
line.setStyleSheet(open("styles/styling.css").read())
log.setStyleSheet(open('styles/parwindowstyling.css').read())
quad.setStyleSheet(open('styles/parwindowstyling.css').read())
help_.setStyleSheet(open('styles/parwindowstyling.css').read())
#row 1
lay.addWidget(line,1,1,2,4)
#row 2
lay.addWidget(one,3,1)
lay.addWidget(two,3,2)
lay.addWidget(three,3,3)
lay.addWidget(plus,3,4)
#row 3
lay.addWidget(four,4,1)
lay.addWidget(five,4,2)
lay.addWidget(six,4,3)
lay.addWidget(minus,4,4)
#row 4
lay.addWidget(seven,5,1)
lay.addWidget(eight,5,2)
lay.addWidget(nine,5,3)
lay.addWidget(multiply,5,4)
#row 5
lay.addWidget(power,6,1)
lay.addWidget(dot,6,2)
lay.addWidget(zero,6,3)
lay.addWidget(divide,6,4)
#row 6
lay.addWidget(equal,7,1,1,3)
lay.addWidget(clear,7,4)
#row 7
lay.addWidget(trig,8,1,1,2)
lay.addWidget(log,8,3,1,2)
#row 8
lay.addWidget(quad,9,1,1,2)
lay.addWidget(help_,9,3,1,2)
#trig window
class trigwin(QWidget):
def __init__(self):
super().__init__()
self.makeui()
def inv(self):
if self.invert.isChecked() == True:
self.is_inverse = True
self.sin.setText("sin⁻¹")
self.cos.setText("cos⁻¹")
self.tan.setText("tan⁻¹")
self.csc.setText("csc⁻¹")
self.sec.setText("sec⁻¹")
self.cot.setText("cot⁻¹")
else:
self.is_inverse = False
self.sin.setText("sin")
self.cos.setText("cos")
self.tan.setText("tan")
self.csc.setText("csc")
self.sec.setText("sec")
self.cot.setText("cot")
def angle_mode(self):
if self.degrad.value() == 2:
self.mode = False
self.switchlabel.setText("Rad")
elif self.degrad.value() == 1:
self.mode = True
self.switchlabel.setText("Deg")
def b(self, numbr):
x = str(numbr)
y = self.line.text()
y += x
self.line.setText(y)
def beq(self, op):
x = trigerrcheck(self.line.text(),op,self.is_inverse,self.mode)
self.line.setText(str(x))
def makeui(self):
self.setWindowTitle("Trigonometry")
self.setWindowIcon(QtGui.QIcon('assets/trigicon.png'))
self.is_inverse = False
'''True is degrees mode,
False is Radians'''
self.mode = True
self.setStyleSheet(open('styles/parwindowstyling.css').read())
self.l = QGridLayout()
self.one = QPushButton("1")
self.two = QPushButton("2")
self.three = QPushButton("3")
self.four = QPushButton("4")
self.five = QPushButton("5")
self.six = QPushButton("6")
self.seven = QPushButton("7")
self.eight = QPushButton("8")
self.nine = QPushButton("9")
self.zero = QPushButton("0")
self.dot = QPushButton(".")
self.pi = QPushButton("π")
self.line = QLineEdit()
self.sin = QPushButton("sin")
self.cos = QPushButton("cos")
self.tan = QPushButton("tan")
self.csc = QPushButton("csc")
self.sec = QPushButton("sec")
self.cot = QPushButton("cot")
self.invert = QCheckBox("Invert")
self.degrad = QSlider(Qt.Horizontal)
self.switchlabel = QLabel("Deg")
#slider for degrees/radians
self.degrad.setMaximum(2)
self.degrad.setMinimum(1)
self.degrad.setValue(1)
#colors
self.sin.setStyleSheet(open("styles/altstyling.css").read())
self.cos.setStyleSheet(open("styles/altstyling.css").read())
self.tan.setStyleSheet(open("styles/altstyling.css").read())
self.csc.setStyleSheet(open("styles/altstyling.css").read())
self.sec.setStyleSheet(open("styles/altstyling.css").read())
self.cot.setStyleSheet(open("styles/altstyling.css").read())
self.invert.setStyleSheet(open("styles/altstyling.css").read())
self.switchlabel.setStyleSheet(open("styles/altstyling.css").read())
self.switchlabel.setAlignment(Qt.AlignCenter)
self.l.addWidget(self.line,1,1,1,3)
self.l.addWidget(self.one,2,1)
self.l.addWidget(self.two,2,2)
self.l.addWidget(self.three,2,3)
self.l.addWidget(self.four,3,1)
self.l.addWidget(self.five,3,2)
self.l.addWidget(self.six,3,3)
self.l.addWidget(self.seven,4,1)
self.l.addWidget(self.eight,4,2)
self.l.addWidget(self.nine,4,3)
self.l.addWidget(self.dot,5,1)
self.l.addWidget(self.zero,5,2)
self.l.addWidget(self.pi,5,3)
self.l.addWidget(self.sin,6,1)
self.l.addWidget(self.cos,6,2)
self.l.addWidget(self.tan,6,3)
self.l.addWidget(self.csc,7,1)
self.l.addWidget(self.sec,7,2)
self.l.addWidget(self.cot,7,3)
self.l.addWidget(self.invert,8,1)
self.l.addWidget(self.switchlabel,8,3)
self.l.addWidget(self.degrad,8,2)
#connections
self.invert.toggled.connect(self.inv)
self.degrad.valueChanged.connect(self.angle_mode)
self.one.clicked.connect(lambda: self.b(1))
self.two.clicked.connect(lambda: self.b(2))
self.three.clicked.connect(lambda: self.b(3))
self.four.clicked.connect(lambda: self.b(4))
self.five.clicked.connect(lambda: self.b(5))
self.six.clicked.connect(lambda: self.b(6))
self.seven.clicked.connect(lambda: self.b(7))
self.eight.clicked.connect(lambda: self.b(8))
self.nine.clicked.connect(lambda: self.b(9))
self.zero.clicked.connect(lambda: self.b(0))
self.dot.clicked.connect(lambda: self.b("."))
self.pi.clicked.connect(lambda: self.b("π"))
self.sin.clicked.connect(lambda: self.beq("sin"))
self.cos.clicked.connect(lambda: self.beq("cos"))
self.tan.clicked.connect(lambda: self.beq("tan"))
self.csc.clicked.connect(lambda: self.beq("csc"))
self.sec.clicked.connect(lambda: self.beq("sec"))
self.cot.clicked.connect(lambda: self.beq("cot"))
#set layout
self.setLayout(self.l)
#log window
class logwin(QWidget):
def __init__(self):
super().__init__()
self.makeui()
#functions
def switch(self):
if self.inpbase == True:
self.inpORbase.setText("base")
self.inpORbase.setStyleSheet(open("styles/styling.css").read())
self.inpbase = False
elif self.inpbase == False:
self.inpORbase.setText("input")
self.inpORbase.setStyleSheet(open("styles/altstyling.css").read())
self.inpbase = True
def c(self, num):
numb = str(num)
if self.inpbase == True:
x = self.inp.text()
x += numb
self.inp.setText(x)
elif self.inpbase == False:
x = self.base.text()
x += numb
self.base.setText(x)
def out(self):
i = self.inp.text()
b = self.base.text()
to_set = logsolve(i,b)
self.output.setText(str(to_set))
def makeui(self):
self.inpbase = True
''' True is input
False is base'''
self.setWindowTitle("Logarithms")
self.setStyleSheet(open('styles/parwindowstyling.css').read())
self.setWindowIcon(QtGui.QIcon("assets/logicon.png"))
self.l = QGridLayout()
self.setLayout(self.l)
#input fields
self.inplabel = QLabel("Input:")
self.inp = QLineEdit()
self.baselabel = QLabel("Base:")
self.base = QLineEdit()
self.outputlabel = QLabel("Output:")
self.output = QLabel()
#numbers
self.one = QPushButton("1")
self.two = QPushButton("2")
self.three = QPushButton("3")
self.four = QPushButton("4")
self.five = QPushButton("5")
self.six = QPushButton("6")
self.seven = QPushButton("7")
self.eight = QPushButton("8")
self.nine = QPushButton("9")
self.zero = QPushButton("0")
self.dot = QPushButton(".")
self.e = QPushButton("e")
#equal
self.equal = QPushButton("=")
self.equal.setStyleSheet(open("styles/altstyling.css").read())
#switch
self.inpORbase = QPushButton("input")
self.inpORbase.setStyleSheet(open("styles/altstyling.css").read())
#layout
self.l.addWidget(self.inplabel,1,1)
self.l.addWidget(self.inp,1,2,1,2)
self.l.addWidget(self.baselabel,2,1)
self.l.addWidget(self.base,2,2,1,2)
self.l.addWidget(self.outputlabel,3,1)
self.l.addWidget(self.output,3,2,1,2)
self.l.addWidget(self.one,4,1)
self.l.addWidget(self.two,4,2)
self.l.addWidget(self.three,4,3)
self.l.addWidget(self.four,5,1)
self.l.addWidget(self.five,5,2)
self.l.addWidget(self.six,5,3)
self.l.addWidget(self.seven,6,1)
self.l.addWidget(self.eight,6,2)
self.l.addWidget(self.nine,6,3)
self.l.addWidget(self.dot,7,1)
self.l.addWidget(self.zero,7,2)
self.l.addWidget(self.e,7,3)
self.l.addWidget(self.equal,8,1,1,2)
self.l.addWidget(self.inpORbase,8,3)
#connections
self.inpORbase.clicked.connect(lambda: self.switch())
self.equal.clicked.connect(lambda: self.out())
self.e.clicked.connect(lambda: self.c("e"))
######################## logwin end
class quadwin(QWidget):
def __init__(self):
super().__init__()
self.setWindowTitle("Quadratic Equations")
self.l = QGridLayout()
self.setLayout(self.l)
self.setStyleSheet(open('styles/parwindowstyling.css').read())
self.setWindowIcon(QtGui.QIcon('assets/quadicon.png'))
self.makeui()
def operation(self):
try:
a = self.abox.text()
b = self.bbox.text()
c = self.cbox.text()
if float(b) < 0:
bsign = "-"
else:
bsign = "+"
if float(c) < 0:
csign = "-"
else:
csign = "+"
stat = str(float(a)) + " 𝑥² "+ bsign + " " + str(abs(float(b))) + " 𝑥 "+ csign + " " + str(abs(float(c)))
try:
A = float(a)
B = float(b)
C = float(c)
except:
pass
maxmin = -B/(2*A)
front = maxmin + 2.5*(maxmin)
back = maxmin - 2.5*(maxmin)
x = linspace(back,front,20)
y = A*(x**2) + B*x + C
fig = pp.figure(figsize=(8,5))
fig.set_facecolor('#66cdaa')
ax = pp.axes()
ax.set_facecolor('#66cdaa')
pp.plot(x,y,'#006400')
pp.savefig('sus.png')
self.statement = QLabel(stat)
self.maximaminima = QLabel("Maxima/minima at: x= {}".format(str(maxmin)))
self.statement.setAlignment(Qt.AlignCenter)
self.statement.setStyleSheet(open('styles/styling.css').read())
self.output = QLabel(q(a,b,c))
self.graph = QLabel("")
pmap = QtGui.QPixmap('sus.png')
remove('sus.png')
self.graph.setPixmap(pmap)
self.rst = QPushButton("Reset")
self.outui()
except:
pass
def clr(self):
self.abox.setText("")
self.bbox.setText("")
self.cbox.setText("")
def setbox(self,mode):
if mode == "a":
self.mode = self.abox
elif mode == "b":
self.mode = self.bbox
else:
self.mode = self.cbox
def d(self,numbr):
x = str(numbr)
y = self.mode.text()
y += x
self.mode.setText(y)
def makeui(self):
self.resize(200,300)
try:
self.clearLayout(self.l)
except:
pass
#fields
self.toplabel = QLabel("a𝑥²+b𝑥+c")
self.toplabel.setAlignment(Qt.AlignCenter)
self.guidelabel = QLabel("Enter value for:")
self.alabel = QPushButton(" a :")
self.abox = QLineEdit()
self.blabel = QPushButton(" b :")
self.bbox = QLineEdit()
self.clabel = QPushButton(" c :")
self.cbox = QLineEdit()
self.equal = QPushButton("=")
self.clear = QPushButton("CLR")
self.equal.setStyleSheet(open("styles/altstyling.css").read())
self.clear.setStyleSheet(open("styles/altstyling.css").read())
self.alabel.setStyleSheet(open("styles/styling.css").read())
self.blabel.setStyleSheet(open("styles/styling.css").read())
self.clabel.setStyleSheet(open("styles/styling.css").read())
#defaut config
self.mode = self.abox
#nuumbers
self.one = QPushButton("1")
self.two = QPushButton("2")
self.three = QPushButton("3")
self.four = QPushButton("4")
self.five = QPushButton("5")
self.six = QPushButton("6")
self.seven = QPushButton("7")
self.eight = QPushButton("8")
self.nine = QPushButton("9")
self.zero = QPushButton("0")
self.dot = QPushButton(".")
#layout
self.l.addWidget(self.toplabel,1,1,1,3)
self.l.addWidget(self.guidelabel,2,1,1,3)
self.l.addWidget(self.alabel,3,1)
self.l.addWidget(self.abox,3,2,1,2)
self.l.addWidget(self.blabel,4,1)
self.l.addWidget(self.bbox,4,2,1,2)
self.l.addWidget(self.clabel,5,1)
self.l.addWidget(self.cbox,5,2,1,2)
self.l.addWidget(self.equal,6,1,1,3)
self.l.addWidget(self.one,8,1)
self.l.addWidget(self.two,8,2)
self.l.addWidget(self.three,8,3)
self.l.addWidget(self.four,9,1)
self.l.addWidget(self.five,9,2)
self.l.addWidget(self.six,9,3)
self.l.addWidget(self.seven,10,1)
self.l.addWidget(self.eight,10,2)
self.l.addWidget(self.nine,10,3)
self.l.addWidget(self.dot,11,1)
self.l.addWidget(self.zero,11,2)
self.l.addWidget(self.clear,11,3)
self.clear.clicked.connect(self.clr)
self.equal.clicked.connect(lambda: self.operation())
self.abox.returnPressed.connect(lambda: self.operation())
self.bbox.returnPressed.connect(lambda: self.operation())
self.cbox.returnPressed.connect(lambda: self.operation())
self.alabel.clicked.connect(lambda: self.setbox("a"))
self.blabel.clicked.connect(lambda: self.setbox("b"))
self.clabel.clicked.connect(lambda: self.setbox("c"))
self.one.clicked.connect(lambda: self.d(1))
self.two.clicked.connect(lambda: self.d(2))
self.three.clicked.connect(lambda: self.d(3))
self.four.clicked.connect(lambda: self.d(4))
self.five.clicked.connect(lambda: self.d(5))
self.six.clicked.connect(lambda: self.d(6))
self.seven.clicked.connect(lambda: self.d(7))
self.eight.clicked.connect(lambda: self.d(8))
self.nine.clicked.connect(lambda: self.d(9))
self.zero.clicked.connect(lambda: self.d(0))
self.dot.clicked.connect(lambda: self.d("."))
def outui(self):
self.clearLayout(self.l)
self.l.addWidget(self.statement,1,1,1,3)
self.l.addWidget(self.output,3,1)
self.l.addWidget(self.graph,2,1,1,3)
self.l.addWidget(self.maximaminima,3,2)
self.l.addWidget(self.rst,3,3)
self.rst.clicked.connect(self.makeui)
def clearLayout(self,layout):
if layout is not None:
while layout.count():
child = layout.takeAt(0)
if child.widget() is not None:
child.widget().deleteLater()
elif child.layout() is not None:
self.clearLayout(child.layout())
#functions
def a(n):
x = str(n)
y = line.text()
y += x
line.setText(y)
def clr():
line.setText("")
def repl():
x = line.text()
y = x.replace("/","÷")
z = y.replace("*","x")
line.setText(z)
def eq():
x = line.text()
s = solution(x)
line.setText(str(s))
def tr():
global polnareff
polnareff = trigwin()
polnareff.show()
def lo():
global kakyoin
kakyoin = logwin()
kakyoin.show()
def qu():
global speedwagon
speedwagon = quadwin()
speedwagon.show()
def helper():
webbrowser.open(HELP_LINK,new=2)
#connections
line.textChanged.connect(repl)
one.clicked.connect(lambda: a(1))
two.clicked.connect(lambda: a(2))
three.clicked.connect(lambda: a(3))
four.clicked.connect(lambda: a(4))
five.clicked.connect(lambda: a(5))
six.clicked.connect(lambda: a(6))
seven.clicked.connect(lambda: a(7))
eight.clicked.connect(lambda: a(8))
nine.clicked.connect(lambda: a(9))
zero.clicked.connect(lambda: a(0))
dot.clicked.connect(lambda: a("."))
plus.clicked.connect(lambda: a("+"))
minus.clicked.connect(lambda: a("-"))
multiply.clicked.connect(lambda: a("x"))
divide.clicked.connect(lambda: a("÷"))
power.clicked.connect(lambda: a("^"))
equal.clicked.connect(eq)
clear.clicked.connect(clr)
line.returnPressed.connect(eq)
trig.clicked.connect(tr)
log.clicked.connect(lo)
quad.clicked.connect(qu)
help_.clicked.connect(helper)
if __name__ == "__main__":
win.show()
exit(ap.exec_()) | nilq/baby-python | python |
_MISSING_FOREIGN_KEY_TABLE = {
"ignore_validation": 1,
"@context": ["https://schema.org", {"bh": "https://schema.brighthive.io/"}],
"@type": "bh:DataResource",
"@id": "https://mydatatrust.brighthive.io/dr1",
"name": "2020 Census Data",
"description": "Description of data resource",
"ownerOrg": [
{
"@type": "Organization",
"@id": "#brighthive-org",
"name": "BrightHive",
"contactPoint": [
{
"@type": "ContactPoint",
"@id": "#matt",
"name": "Matt Gee",
"telephone": "555-555-5555",
"email": "matt@company.io",
"contactType": "Developer",
}
],
}
],
"published": True,
"dateCreated": "date",
"dateUpdated": "date",
"privacyRegulations": ["https://datatrust.org/privacyregulations/HIPAA"],
"category": "https://datatrust.org/catagory/external",
"url": "https://mydatatrust.brighthive.io/dr1",
"data": {
"dataDictionary": [
{
"@id": "https://mydatatrust.brighthive.io/dr1/fk_relation",
"@type": "bh:table",
"name": "fk_relation",
"tableSchema": {
"fields": [
{
"name": "id",
"type": "integer",
"title": "id",
"constraints": {},
},
{
"name": "string",
"title": "string",
"type": "string",
"constraints": {},
},
],
"foreignKeys": [
{
"fields": "fk_field",
"reference": {"resource": "fk_table", "fields": "id"},
}
],
"primaryKey": "id",
"missingValues": [],
},
}
],
"databaseSchema": "url-to-something",
"databaseType": "https://datatrust.org/databaseType/rdbms",
},
"api": {"apiType": "https://datatrust.org/apiType/rest", "apiSpec": {}},
}
_VALID_FOREIGN_KEY = {
"@context": ["https://schema.org", {"bh": "https://schema.brighthive.io/"}],
"@type": "bh:DataResource",
"@id": "https://mydatatrust.brighthive.io/dr1",
"name": "2020 Census Data",
"description": "Description of data resource",
"ownerOrg": [
{
"@type": "Organization",
"@id": "#brighthive-org",
"name": "BrightHive",
"contactPoint": [
{
"@type": "ContactPoint",
"@id": "#matt",
"name": "Matt Gee",
"telephone": "555-555-5555",
"email": "matt@company.io",
"contactType": "Developer",
}
],
}
],
"published": True,
"dateCreated": "date",
"dateUpdated": "date",
"privacyRegulations": ["https://datatrust.org/privacyregulations/HIPAA"],
"category": "https://datatrust.org/catagory/external",
"url": "https://mydatatrust.brighthive.io/dr1",
"data": {
"dataDictionary": [
{
"@id": "https://mydatatrust.brighthive.io/dr1/fk_relation",
"@type": "bh:table",
"name": "fk_relation",
"tableSchema": {
"fields": [
{
"name": "id",
"type": "integer",
"title": "id",
"constraints": {},
},
{
"name": "string",
"title": "string",
"type": "string",
"constraints": {},
},
{
"name": "fk_field",
"type": "integer",
"title": "fk_field",
"constraints": {},
},
],
"foreignKeys": [
{
"fields": "fk_field",
"reference": {"resource": "fk_table", "fields": "id"},
}
],
"primaryKey": "id",
"missingValues": [],
},
},
{
"@id": "https://mydatatrust.brighthive.io/dr1/fk_table",
"@type": "bh:table",
"name": "fk_table",
"tableSchema": {
"fields": [
{
"name": "id",
"type": "integer",
"title": "id",
"constraints": {},
}
],
"primaryKey": "id",
"missingValues": [],
},
},
],
"databaseSchema": "url-to-something",
"databaseType": "https://datatrust.org/databaseType/rdbms",
},
"api": {"apiType": "https://datatrust.org/apiType/rest", "apiSpec": {}},
}
| nilq/baby-python | python |
# Program 08c: The Belousov-Zhabotinski Reaction. See Figure 8.16.
# Plotting time series for a 3-dimensional ODE.
import numpy as np
from scipy.integrate import odeint
import matplotlib.pyplot as plt
# B_Z parameters and initial conditions.
q, f, eps, delta = 3.1746e-5, 1, 0.0099, 2.4802e-5
x0, y0, z0 = 0, 0, 0.1
# Maximum time point and total number of time points.
tmax, n = 50, 10000
def bz_reaction(X, t, q, f, eps, delta):
x, y, z = X
dx = (q*y - x*y + x*(1-x)) / eps
dy = (-q*y - x*y + f*z) / delta
dz = x-z
return (dx, dy, dz)
t = np.linspace(0, tmax, n)
f = odeint(bz_reaction, (x0, y0, z0), t, args=((q, f, eps, delta)))
x, y, z = f.T
# Plot time series.
fig = plt.figure(figsize=(15, 5))
fig.subplots_adjust(wspace=0.5, hspace=0.3)
ax1 = fig.add_subplot(1, 3, 1)
ax1.set_title('Relative concentration bromous acid', fontsize=12)
ax2 = fig.add_subplot(1, 3, 2)
ax2.set_title('Relative concentration bromide ions', fontsize=12)
ax3 = fig.add_subplot(1, 3, 3)
ax3.set_title('Relative concentration cerium ions', fontsize=12)
ax1.plot(t, x, 'b-')
ax2.plot(t, y, 'r-')
ax3.plot(t, z, 'm-')
plt.show()
| nilq/baby-python | python |
try:
import pkg_resources
version = pkg_resources.require("cabot")[0].version
except (Exception, ImportError):
version = "unknown"
| nilq/baby-python | python |
"""
Flask Application: run this script to create website and predict endpoint.
"""
import os
from flask import Flask, request, render_template
import numpy as np
import joblib
# Initialize app and model
app = Flask(__name__)
model = joblib.load("models/linear_model.pkl")
# Get version from VERSION file
with open("VERSION", 'r') as version_file:
__version = version_file.read().strip()
# Routes of app
@app.route("/")
def index():
return render_template("index.html")
@app.route("/version")
def version():
return f"Version: v{__version}"
@app.route("/predict", methods=["POST"])
def predict():
float_features = [float(x) for x in request.form.values()]
features = [np.array(float_features)]
prediction = model.predict(features)
return render_template("index.html", prediction_text=f"The predicted price is {round(prediction[0], 2)}")
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(debug=True, host="0.0.0.0", port=port)
| nilq/baby-python | python |
import numpy as np
from scipy.spatial.distance import cdist
class KMeans:
def __init__(
self,
k: int,
metric: str = "euclidean",
tol: float = 1e-6,
max_iter: int = 100,
seed: int = 42):
"""
inputs:
k: int
the number of centroids to use in cluster fitting
metric: str
the name of the distance metric to use
tol: float
the minimum error tolerance from previous error during optimization to quit the model fit
max_iter: int
the maximum number of iterations before quitting model fit
"""
np.random.seed(seed)
assert k > 0, "Error: The number of centroids must be greater than 0."
assert k > 1, "Error: Setting k=1 means every point belongs to the same cluster."
self.k = k
self.metric = metric
self.tol = tol
self.max_iter = max_iter
def _check_input_mat(self, mat: np.ndarray) -> np.ndarray:
"""
Check if the number of centroids are less than the number of observations in the input matrix.
Check if the input matrix is one dimensional
inputs:
mat: np.ndarray
outputs:
mat: np.ndarray
"""
assert self.k < mat.shape[0], "Error: The number of centroids must be less than the number of observations."
if mat.ndim == 1:
print("Warning: Reshaping 1D numpy array (-1,1).")
print("Warning: Consider an alternative algorithm like KDE for one dimensional data.")
mat = mat.reshape(-1,1)
return mat
def _find_nearest_centroids(self, mat: np.ndarray) -> np.ndarray:
"""
Find the nearest centroids for each point.
inputs:
mat: np.ndarray
A 2D matrix where the rows are observations and columns are features
output:
np.ndarray
A 1D array specifying the class labels
References:
-----------
1. https://scikit-learn.org/stable/modules/generated/sklearn.cluster.KMeans.html
2. https://www.askpython.com/python/examples/k-means-clustering-from-scratch
"""
self._distances = cdist(mat, self._centroids, self.metric)
return np.argmin(self._distances, axis=1)
def _calculate_mse(self, mat: np.ndarray) -> float:
"""
Calculate the mean squared error (mse) of the centroid distances.
inputs:
mat: np.ndarray
A 2D matrix where the rows are observations and columns are features
"""
points_to_centroid = np.array(list(map(lambda label: self._centroids[label], self._labels)))
distances = np.diag(cdist(mat, points_to_centroid, metric=self.metric))
distances_squared = np.square(distances)
mse = distances_squared.mean()
return mse
def fit(self, mat: np.ndarray):
"""
fits the kmeans algorithm onto a provided 2D matrix
inputs:
mat: np.ndarray
A 2D matrix where the rows are observations and columns are features
References
----------
1. https://blog.paperspace.com/speed-up-kmeans-numpy-vectorization-broadcasting-profiling/
"""
mat = self._check_input_mat(mat)
# Initialize random centroids
self._centroids = np.random.rand(self.k, mat.shape[1])
self._labels = self._find_nearest_centroids(mat)
# Fit model
self._mse = []
for iter_step in range(self.max_iter):
for k_cluster in range(self.k):
# Find the mean of the cluster for the new centroid
self._centroids[k_cluster,:] = np.mean(mat[self._labels == k_cluster,:],axis = 0)
# Find the nearest centroid for each point and compute the MSE
self._labels = self._find_nearest_centroids(mat)
self._mse.append(self._calculate_mse(mat))
# Check clustering stability against previous MSE during optimization to end the model fit
if iter_step > 0 and abs(self._mse[iter_step] - self._mse[iter_step-1]) <= self.tol:
self._mse = self._mse[iter_step]
break
def predict(self, mat: np.ndarray) -> np.ndarray:
"""
predicts the cluster labels for a provided 2D matrix
inputs:
mat: np.ndarray
A 2D matrix where the rows are observations and columns are features
outputs:
np.ndarray
a 1D array with the cluster label for each of the observations in `mat`
"""
test_mat = self._check_input_mat(mat)
predicted_labels = self._find_nearest_centroids(test_mat)
return predicted_labels
def get_error(self) -> float:
"""
returns the final squared-mean error of the fit model
outputs:
float
the squared-mean error of the fit model
"""
return self._mse
def get_centroids(self) -> np.ndarray:
"""
returns the centroid locations of the fit model
outputs:
np.ndarray
a `k x m` 2D matrix representing the cluster centroids of the fit model
"""
return self._centroids
| nilq/baby-python | python |
from collections import OrderedDict
from devito.core.autotuning import autotune
from devito.cgen_utils import printmark
from devito.ir.iet import (Call, List, HaloSpot, MetaCall, FindNodes, Transformer,
filter_iterations, retrieve_iteration_tree)
from devito.ir.support import align_accesses
from devito.parameters import configuration
from devito.mpi import copy, sendrecv, update_halo
from devito.operator import OperatorRunnable
from devito.tools import flatten
__all__ = ['Operator']
class OperatorCore(OperatorRunnable):
def _specialize_exprs(self, expressions):
# Align data accesses to the computational domain
key = lambda i: i.is_TensorFunction
expressions = [align_accesses(e, key=key) for e in expressions]
return super(OperatorCore, self)._specialize_exprs(expressions)
def _generate_mpi(self, iet, **kwargs):
if configuration['mpi'] is False:
return iet
halo_spots = FindNodes(HaloSpot).visit(iet)
# For each MPI-distributed TensorFunction, generate all necessary
# C-level routines to perform a halo update
callables = OrderedDict()
for hs in halo_spots:
for f, v in hs.fmapper.items():
callables[f] = [update_halo(f, v.loc_indices)]
callables[f].append(sendrecv(f, v.loc_indices))
callables[f].append(copy(f, v.loc_indices))
callables[f].append(copy(f, v.loc_indices, True))
callables = flatten(callables.values())
# Replace HaloSpots with suitable calls performing the halo update
mapper = {}
for hs in halo_spots:
for f, v in hs.fmapper.items():
stencil = [int(i) for i in hs.mask[f].values()]
comm = f.grid.distributor._C_comm
nb = f.grid.distributor._C_neighbours.obj
loc_indices = list(v.loc_indices.values())
dsizes = [d.symbolic_size for d in f.dimensions]
parameters = [f] + stencil + [comm, nb] + loc_indices + dsizes
call = Call('halo_exchange_%s' % f.name, parameters)
mapper.setdefault(hs, []).append(call)
# Sorting is for deterministic code generation. However, in practice,
# we don't expect `cstructs` to contain more than one element because
# there should always be one grid per Operator (though we're not really
# enforcing it)
cstructs = {f.grid.distributor._C_neighbours.cdef
for f in flatten(i.fmapper for i in halo_spots)}
self._globals.extend(sorted(cstructs, key=lambda i: i.tpname))
self._includes.append('mpi.h')
self._func_table.update(OrderedDict([(i.name, MetaCall(i, True))
for i in callables]))
# Add in the halo update calls
mapper = {k: List(body=v + list(k.body)) for k, v in mapper.items()}
iet = Transformer(mapper, nested=True).visit(iet)
return iet
def _autotune(self, args, setup):
if setup is False:
return args
elif setup is True:
level = configuration['autotuning'].level or 'basic'
args, summary = autotune(self, args, level, configuration['autotuning'].mode)
elif isinstance(setup, str):
args, summary = autotune(self, args, setup, configuration['autotuning'].mode)
elif isinstance(setup, tuple) and len(setup) == 2:
level, mode = setup
if level is False:
return args
else:
args, summary = autotune(self, args, level, mode)
else:
raise ValueError("Expected bool, str, or 2-tuple, got `%s` instead"
% type(setup))
# Record the tuned values
self._state.setdefault('autotuning', []).append(summary)
return args
class OperatorDebug(OperatorCore):
"""
Decorate the generated code with useful print statements.
"""
def __init__(self, expressions, **kwargs):
super(OperatorDebug, self).__init__(expressions, **kwargs)
self._includes.append('stdio.h')
# Minimize the trip count of the sequential loops
iterations = set(flatten(retrieve_iteration_tree(self.body)))
mapper = {i: i._rebuild(limits=(max(i.offsets) + 2))
for i in iterations if i.is_Sequential}
self.body = Transformer(mapper).visit(self.body)
# Mark entry/exit points of each non-sequential Iteration tree in the body
iterations = [filter_iterations(i, lambda i: not i.is_Sequential, 'any')
for i in retrieve_iteration_tree(self.body)]
iterations = [i[0] for i in iterations if i]
mapper = {t: List(header=printmark('In nest %d' % i), body=t)
for i, t in enumerate(iterations)}
self.body = Transformer(mapper).visit(self.body)
class Operator(object):
def __new__(cls, *args, **kwargs):
cls = OperatorDebug if kwargs.pop('debug', False) else OperatorCore
obj = cls.__new__(cls, *args, **kwargs)
obj.__init__(*args, **kwargs)
return obj
| nilq/baby-python | python |
from configparser import ConfigParser
class Role:
"""The Parent Role Object"""
def __init__(self, name: str, gender: str, toa: int, team: str, night_actions, day_actions, on_attack_actions,
death_actions,
img: str,
scenario: str):
self.name = name
self.gender = gender
self.toa = toa # time of awakening 0:0R, 1:1R, 2:PR,3:KR,4:AR
self.team = team
self.night_actions = night_actions # dict
self.day_actions = day_actions # dict
self.on_attack_actions = on_attack_actions # dict
self.death_actions = death_actions # dict
self.scenario = scenario
self.img = img
self.write_to_file()
def write_to_file(self):
"""Writes a Role to a Config-File"""
file = ConfigParser(allow_no_value=True)
file['GENERAL'] = {'name': self.name, 'gender': self.gender, 'toa': self.toa, 'team': self.team,
'img': self.img,
'scenario': self.scenario}
night_acts = {}
day_acts = {}
on_attack_acts = {}
death_acts = {}
for action in self.night_actions:
night_acts.update({action})
for action in self.day_actions:
day_acts.update({action})
for action in self.on_attack_actions:
on_attack_acts.update({action})
for action in self.death_actions:
death_acts.update({action})
file['NIGHT_ACTIONS'] = night_acts
file['DAY_ACTIONS'] = day_acts
file['ON_ATTACK_ACTIONS'] = on_attack_acts
file['DEATH_ACTIONS'] = death_acts
file_name = self.name + '.ini'
print('Write to \"' + file_name + '\"')
with open(file_name, 'w') as f:
file.write(f)
def wake_up(self):
"""Triggers the Night-Actions of the Role."""
print()
# TODO role wake_up
#Audio aufwachen
#Trigger Night-Actions
#Audio einschlafen
| nilq/baby-python | python |
# This file is automatically generated by tools/idna-data
# vim: set fileencoding=utf-8 :
"""IDNA Mapping Table from UTS46."""
__version__ = "12.1.0"
def _seg_0():
return [
(0x0, "3"),
(0x1, "3"),
(0x2, "3"),
(0x3, "3"),
(0x4, "3"),
(0x5, "3"),
(0x6, "3"),
(0x7, "3"),
(0x8, "3"),
(0x9, "3"),
(0xa, "3"),
(0xb, "3"),
(0xc, "3"),
(0xd, "3"),
(0xe, "3"),
(0xf, "3"),
(0x10, "3"),
(0x11, "3"),
(0x12, "3"),
(0x13, "3"),
(0x14, "3"),
(0x15, "3"),
(0x16, "3"),
(0x17, "3"),
(0x18, "3"),
(0x19, "3"),
(0x1a, "3"),
(0x1b, "3"),
(0x1c, "3"),
(0x1d, "3"),
(0x1e, "3"),
(0x1f, "3"),
(0x20, "3"),
(0x21, "3"),
(0x22, "3"),
(0x23, "3"),
(0x24, "3"),
(0x25, "3"),
(0x26, "3"),
(0x27, "3"),
(0x28, "3"),
(0x29, "3"),
(0x2a, "3"),
(0x2b, "3"),
(0x2c, "3"),
(0x2d, "V"),
(0x2e, "V"),
(0x2f, "3"),
(0x30, "V"),
(0x31, "V"),
(0x32, "V"),
(0x33, "V"),
(0x34, "V"),
(0x35, "V"),
(0x36, "V"),
(0x37, "V"),
(0x38, "V"),
(0x39, "V"),
(0x3a, "3"),
(0x3b, "3"),
(0x3c, "3"),
(0x3d, "3"),
(0x3e, "3"),
(0x3f, "3"),
(0x40, "3"),
(0x41, "M", u"a"),
(0x42, "M", u"b"),
(0x43, "M", u"c"),
(0x44, "M", u"d"),
(0x45, "M", u"e"),
(0x46, "M", u"f"),
(0x47, "M", u"g"),
(0x48, "M", u"h"),
(0x49, "M", u"i"),
(0x4a, "M", u"j"),
(0x4b, "M", u"k"),
(0x4c, "M", u"l"),
(0x4d, "M", u"m"),
(0x4e, "M", u"n"),
(0x4f, "M", u"o"),
(0x50, "M", u"p"),
(0x51, "M", u"q"),
(0x52, "M", u"r"),
(0x53, "M", u"s"),
(0x54, "M", u"t"),
(0x55, "M", u"u"),
(0x56, "M", u"v"),
(0x57, "M", u"w"),
(0x58, "M", u"x"),
(0x59, "M", u"y"),
(0x5a, "M", u"z"),
(0x5b, "3"),
(0x5c, "3"),
(0x5d, "3"),
(0x5e, "3"),
(0x5f, "3"),
(0x60, "3"),
(0x61, "V"),
(0x62, "V"),
(0x63, "V"),
]
def _seg_1():
return [
(0x64, "V"),
(0x65, "V"),
(0x66, "V"),
(0x67, "V"),
(0x68, "V"),
(0x69, "V"),
(0x6a, "V"),
(0x6b, "V"),
(0x6c, "V"),
(0x6d, "V"),
(0x6e, "V"),
(0x6f, "V"),
(0x70, "V"),
(0x71, "V"),
(0x72, "V"),
(0x73, "V"),
(0x74, "V"),
(0x75, "V"),
(0x76, "V"),
(0x77, "V"),
(0x78, "V"),
(0x79, "V"),
(0x7a, "V"),
(0x7b, "3"),
(0x7c, "3"),
(0x7d, "3"),
(0x7e, "3"),
(0x7f, "3"),
(0x80, "X"),
(0x81, "X"),
(0x82, "X"),
(0x83, "X"),
(0x84, "X"),
(0x85, "X"),
(0x86, "X"),
(0x87, "X"),
(0x88, "X"),
(0x89, "X"),
(0x8a, "X"),
(0x8b, "X"),
(0x8c, "X"),
(0x8d, "X"),
(0x8e, "X"),
(0x8f, "X"),
(0x90, "X"),
(0x91, "X"),
(0x92, "X"),
(0x93, "X"),
(0x94, "X"),
(0x95, "X"),
(0x96, "X"),
(0x97, "X"),
(0x98, "X"),
(0x99, "X"),
(0x9a, "X"),
(0x9b, "X"),
(0x9c, "X"),
(0x9d, "X"),
(0x9e, "X"),
(0x9f, "X"),
(0xa0, "3", u" "),
(0xa1, "V"),
(0xa2, "V"),
(0xa3, "V"),
(0xa4, "V"),
(0xa5, "V"),
(0xa6, "V"),
(0xa7, "V"),
(0xa8, "3", u" ̈"),
(0xa9, "V"),
(0xaa, "M", u"a"),
(0xab, "V"),
(0xac, "V"),
(0xad, "I"),
(0xae, "V"),
(0xaf, "3", u" ̄"),
(0xb0, "V"),
(0xb1, "V"),
(0xb2, "M", u"2"),
(0xb3, "M", u"3"),
(0xb4, "3", u" ́"),
(0xb5, "M", u"μ"),
(0xb6, "V"),
(0xb7, "V"),
(0xb8, "3", u" ̧"),
(0xb9, "M", u"1"),
(0xba, "M", u"o"),
(0xbb, "V"),
(0xbc, "M", u"1⁄4"),
(0xbd, "M", u"1⁄2"),
(0xbe, "M", u"3⁄4"),
(0xbf, "V"),
(0xc0, "M", u"à"),
(0xc1, "M", u"á"),
(0xc2, "M", u"â"),
(0xc3, "M", u"ã"),
(0xc4, "M", u"ä"),
(0xc5, "M", u"å"),
(0xc6, "M", u"æ"),
(0xc7, "M", u"ç"),
]
def _seg_2():
return [
(0xc8, "M", u"è"),
(0xc9, "M", u"é"),
(0xca, "M", u"ê"),
(0xcb, "M", u"ë"),
(0xcc, "M", u"ì"),
(0xcd, "M", u"í"),
(0xce, "M", u"î"),
(0xcf, "M", u"ï"),
(0xd0, "M", u"ð"),
(0xd1, "M", u"ñ"),
(0xd2, "M", u"ò"),
(0xd3, "M", u"ó"),
(0xd4, "M", u"ô"),
(0xd5, "M", u"õ"),
(0xd6, "M", u"ö"),
(0xd7, "V"),
(0xd8, "M", u"ø"),
(0xd9, "M", u"ù"),
(0xda, "M", u"ú"),
(0xdb, "M", u"û"),
(0xdc, "M", u"ü"),
(0xdd, "M", u"ý"),
(0xde, "M", u"þ"),
(0xdf, "D", u"ss"),
(0xe0, "V"),
(0xe1, "V"),
(0xe2, "V"),
(0xe3, "V"),
(0xe4, "V"),
(0xe5, "V"),
(0xe6, "V"),
(0xe7, "V"),
(0xe8, "V"),
(0xe9, "V"),
(0xea, "V"),
(0xeb, "V"),
(0xec, "V"),
(0xed, "V"),
(0xee, "V"),
(0xef, "V"),
(0xf0, "V"),
(0xf1, "V"),
(0xf2, "V"),
(0xf3, "V"),
(0xf4, "V"),
(0xf5, "V"),
(0xf6, "V"),
(0xf7, "V"),
(0xf8, "V"),
(0xf9, "V"),
(0xfa, "V"),
(0xfb, "V"),
(0xfc, "V"),
(0xfd, "V"),
(0xfe, "V"),
(0xff, "V"),
(0x100, "M", u"ā"),
(0x101, "V"),
(0x102, "M", u"ă"),
(0x103, "V"),
(0x104, "M", u"ą"),
(0x105, "V"),
(0x106, "M", u"ć"),
(0x107, "V"),
(0x108, "M", u"ĉ"),
(0x109, "V"),
(0x10a, "M", u"ċ"),
(0x10b, "V"),
(0x10c, "M", u"č"),
(0x10d, "V"),
(0x10e, "M", u"ď"),
(0x10f, "V"),
(0x110, "M", u"đ"),
(0x111, "V"),
(0x112, "M", u"ē"),
(0x113, "V"),
(0x114, "M", u"ĕ"),
(0x115, "V"),
(0x116, "M", u"ė"),
(0x117, "V"),
(0x118, "M", u"ę"),
(0x119, "V"),
(0x11a, "M", u"ě"),
(0x11b, "V"),
(0x11c, "M", u"ĝ"),
(0x11d, "V"),
(0x11e, "M", u"ğ"),
(0x11f, "V"),
(0x120, "M", u"ġ"),
(0x121, "V"),
(0x122, "M", u"ģ"),
(0x123, "V"),
(0x124, "M", u"ĥ"),
(0x125, "V"),
(0x126, "M", u"ħ"),
(0x127, "V"),
(0x128, "M", u"ĩ"),
(0x129, "V"),
(0x12a, "M", u"ī"),
(0x12b, "V"),
]
def _seg_3():
return [
(0x12c, "M", u"ĭ"),
(0x12d, "V"),
(0x12e, "M", u"į"),
(0x12f, "V"),
(0x130, "M", u"i̇"),
(0x131, "V"),
(0x132, "M", u"ij"),
(0x134, "M", u"ĵ"),
(0x135, "V"),
(0x136, "M", u"ķ"),
(0x137, "V"),
(0x139, "M", u"ĺ"),
(0x13a, "V"),
(0x13b, "M", u"ļ"),
(0x13c, "V"),
(0x13d, "M", u"ľ"),
(0x13e, "V"),
(0x13f, "M", u"l·"),
(0x141, "M", u"ł"),
(0x142, "V"),
(0x143, "M", u"ń"),
(0x144, "V"),
(0x145, "M", u"ņ"),
(0x146, "V"),
(0x147, "M", u"ň"),
(0x148, "V"),
(0x149, "M", u"ʼn"),
(0x14a, "M", u"ŋ"),
(0x14b, "V"),
(0x14c, "M", u"ō"),
(0x14d, "V"),
(0x14e, "M", u"ŏ"),
(0x14f, "V"),
(0x150, "M", u"ő"),
(0x151, "V"),
(0x152, "M", u"œ"),
(0x153, "V"),
(0x154, "M", u"ŕ"),
(0x155, "V"),
(0x156, "M", u"ŗ"),
(0x157, "V"),
(0x158, "M", u"ř"),
(0x159, "V"),
(0x15a, "M", u"ś"),
(0x15b, "V"),
(0x15c, "M", u"ŝ"),
(0x15d, "V"),
(0x15e, "M", u"ş"),
(0x15f, "V"),
(0x160, "M", u"š"),
(0x161, "V"),
(0x162, "M", u"ţ"),
(0x163, "V"),
(0x164, "M", u"ť"),
(0x165, "V"),
(0x166, "M", u"ŧ"),
(0x167, "V"),
(0x168, "M", u"ũ"),
(0x169, "V"),
(0x16a, "M", u"ū"),
(0x16b, "V"),
(0x16c, "M", u"ŭ"),
(0x16d, "V"),
(0x16e, "M", u"ů"),
(0x16f, "V"),
(0x170, "M", u"ű"),
(0x171, "V"),
(0x172, "M", u"ų"),
(0x173, "V"),
(0x174, "M", u"ŵ"),
(0x175, "V"),
(0x176, "M", u"ŷ"),
(0x177, "V"),
(0x178, "M", u"ÿ"),
(0x179, "M", u"ź"),
(0x17a, "V"),
(0x17b, "M", u"ż"),
(0x17c, "V"),
(0x17d, "M", u"ž"),
(0x17e, "V"),
(0x17f, "M", u"s"),
(0x180, "V"),
(0x181, "M", u"ɓ"),
(0x182, "M", u"ƃ"),
(0x183, "V"),
(0x184, "M", u"ƅ"),
(0x185, "V"),
(0x186, "M", u"ɔ"),
(0x187, "M", u"ƈ"),
(0x188, "V"),
(0x189, "M", u"ɖ"),
(0x18a, "M", u"ɗ"),
(0x18b, "M", u"ƌ"),
(0x18c, "V"),
(0x18e, "M", u"ǝ"),
(0x18f, "M", u"ə"),
(0x190, "M", u"ɛ"),
(0x191, "M", u"ƒ"),
(0x192, "V"),
(0x193, "M", u"ɠ"),
]
def _seg_4():
return [
(0x194, "M", u"ɣ"),
(0x195, "V"),
(0x196, "M", u"ɩ"),
(0x197, "M", u"ɨ"),
(0x198, "M", u"ƙ"),
(0x199, "V"),
(0x19c, "M", u"ɯ"),
(0x19d, "M", u"ɲ"),
(0x19e, "V"),
(0x19f, "M", u"ɵ"),
(0x1a0, "M", u"ơ"),
(0x1a1, "V"),
(0x1a2, "M", u"ƣ"),
(0x1a3, "V"),
(0x1a4, "M", u"ƥ"),
(0x1a5, "V"),
(0x1a6, "M", u"ʀ"),
(0x1a7, "M", u"ƨ"),
(0x1a8, "V"),
(0x1a9, "M", u"ʃ"),
(0x1aa, "V"),
(0x1ac, "M", u"ƭ"),
(0x1ad, "V"),
(0x1ae, "M", u"ʈ"),
(0x1af, "M", u"ư"),
(0x1b0, "V"),
(0x1b1, "M", u"ʊ"),
(0x1b2, "M", u"ʋ"),
(0x1b3, "M", u"ƴ"),
(0x1b4, "V"),
(0x1b5, "M", u"ƶ"),
(0x1b6, "V"),
(0x1b7, "M", u"ʒ"),
(0x1b8, "M", u"ƹ"),
(0x1b9, "V"),
(0x1bc, "M", u"ƽ"),
(0x1bd, "V"),
(0x1c4, "M", u"dž"),
(0x1c7, "M", u"lj"),
(0x1ca, "M", u"nj"),
(0x1cd, "M", u"ǎ"),
(0x1ce, "V"),
(0x1cf, "M", u"ǐ"),
(0x1d0, "V"),
(0x1d1, "M", u"ǒ"),
(0x1d2, "V"),
(0x1d3, "M", u"ǔ"),
(0x1d4, "V"),
(0x1d5, "M", u"ǖ"),
(0x1d6, "V"),
(0x1d7, "M", u"ǘ"),
(0x1d8, "V"),
(0x1d9, "M", u"ǚ"),
(0x1da, "V"),
(0x1db, "M", u"ǜ"),
(0x1dc, "V"),
(0x1de, "M", u"ǟ"),
(0x1df, "V"),
(0x1e0, "M", u"ǡ"),
(0x1e1, "V"),
(0x1e2, "M", u"ǣ"),
(0x1e3, "V"),
(0x1e4, "M", u"ǥ"),
(0x1e5, "V"),
(0x1e6, "M", u"ǧ"),
(0x1e7, "V"),
(0x1e8, "M", u"ǩ"),
(0x1e9, "V"),
(0x1ea, "M", u"ǫ"),
(0x1eb, "V"),
(0x1ec, "M", u"ǭ"),
(0x1ed, "V"),
(0x1ee, "M", u"ǯ"),
(0x1ef, "V"),
(0x1f1, "M", u"dz"),
(0x1f4, "M", u"ǵ"),
(0x1f5, "V"),
(0x1f6, "M", u"ƕ"),
(0x1f7, "M", u"ƿ"),
(0x1f8, "M", u"ǹ"),
(0x1f9, "V"),
(0x1fa, "M", u"ǻ"),
(0x1fb, "V"),
(0x1fc, "M", u"ǽ"),
(0x1fd, "V"),
(0x1fe, "M", u"ǿ"),
(0x1ff, "V"),
(0x200, "M", u"ȁ"),
(0x201, "V"),
(0x202, "M", u"ȃ"),
(0x203, "V"),
(0x204, "M", u"ȅ"),
(0x205, "V"),
(0x206, "M", u"ȇ"),
(0x207, "V"),
(0x208, "M", u"ȉ"),
(0x209, "V"),
(0x20a, "M", u"ȋ"),
(0x20b, "V"),
(0x20c, "M", u"ȍ"),
]
def _seg_5():
return [
(0x20d, "V"),
(0x20e, "M", u"ȏ"),
(0x20f, "V"),
(0x210, "M", u"ȑ"),
(0x211, "V"),
(0x212, "M", u"ȓ"),
(0x213, "V"),
(0x214, "M", u"ȕ"),
(0x215, "V"),
(0x216, "M", u"ȗ"),
(0x217, "V"),
(0x218, "M", u"ș"),
(0x219, "V"),
(0x21a, "M", u"ț"),
(0x21b, "V"),
(0x21c, "M", u"ȝ"),
(0x21d, "V"),
(0x21e, "M", u"ȟ"),
(0x21f, "V"),
(0x220, "M", u"ƞ"),
(0x221, "V"),
(0x222, "M", u"ȣ"),
(0x223, "V"),
(0x224, "M", u"ȥ"),
(0x225, "V"),
(0x226, "M", u"ȧ"),
(0x227, "V"),
(0x228, "M", u"ȩ"),
(0x229, "V"),
(0x22a, "M", u"ȫ"),
(0x22b, "V"),
(0x22c, "M", u"ȭ"),
(0x22d, "V"),
(0x22e, "M", u"ȯ"),
(0x22f, "V"),
(0x230, "M", u"ȱ"),
(0x231, "V"),
(0x232, "M", u"ȳ"),
(0x233, "V"),
(0x23a, "M", u"ⱥ"),
(0x23b, "M", u"ȼ"),
(0x23c, "V"),
(0x23d, "M", u"ƚ"),
(0x23e, "M", u"ⱦ"),
(0x23f, "V"),
(0x241, "M", u"ɂ"),
(0x242, "V"),
(0x243, "M", u"ƀ"),
(0x244, "M", u"ʉ"),
(0x245, "M", u"ʌ"),
(0x246, "M", u"ɇ"),
(0x247, "V"),
(0x248, "M", u"ɉ"),
(0x249, "V"),
(0x24a, "M", u"ɋ"),
(0x24b, "V"),
(0x24c, "M", u"ɍ"),
(0x24d, "V"),
(0x24e, "M", u"ɏ"),
(0x24f, "V"),
(0x2b0, "M", u"h"),
(0x2b1, "M", u"ɦ"),
(0x2b2, "M", u"j"),
(0x2b3, "M", u"r"),
(0x2b4, "M", u"ɹ"),
(0x2b5, "M", u"ɻ"),
(0x2b6, "M", u"ʁ"),
(0x2b7, "M", u"w"),
(0x2b8, "M", u"y"),
(0x2b9, "V"),
(0x2d8, "3", u" ̆"),
(0x2d9, "3", u" ̇"),
(0x2da, "3", u" ̊"),
(0x2db, "3", u" ̨"),
(0x2dc, "3", u" ̃"),
(0x2dd, "3", u" ̋"),
(0x2de, "V"),
(0x2e0, "M", u"ɣ"),
(0x2e1, "M", u"l"),
(0x2e2, "M", u"s"),
(0x2e3, "M", u"x"),
(0x2e4, "M", u"ʕ"),
(0x2e5, "V"),
(0x340, "M", u"̀"),
(0x341, "M", u"́"),
(0x342, "V"),
(0x343, "M", u"̓"),
(0x344, "M", u"̈́"),
(0x345, "M", u"ι"),
(0x346, "V"),
(0x34f, "I"),
(0x350, "V"),
(0x370, "M", u"ͱ"),
(0x371, "V"),
(0x372, "M", u"ͳ"),
(0x373, "V"),
(0x374, "M", u"ʹ"),
(0x375, "V"),
(0x376, "M", u"ͷ"),
(0x377, "V"),
]
def _seg_6():
return [
(0x378, "X"),
(0x37a, "3", u" ι"),
(0x37b, "V"),
(0x37e, "3", u";"),
(0x37f, "M", u"ϳ"),
(0x380, "X"),
(0x384, "3", u" ́"),
(0x385, "3", u" ̈́"),
(0x386, "M", u"ά"),
(0x387, "M", u"·"),
(0x388, "M", u"έ"),
(0x389, "M", u"ή"),
(0x38a, "M", u"ί"),
(0x38b, "X"),
(0x38c, "M", u"ό"),
(0x38d, "X"),
(0x38e, "M", u"ύ"),
(0x38f, "M", u"ώ"),
(0x390, "V"),
(0x391, "M", u"α"),
(0x392, "M", u"β"),
(0x393, "M", u"γ"),
(0x394, "M", u"δ"),
(0x395, "M", u"ε"),
(0x396, "M", u"ζ"),
(0x397, "M", u"η"),
(0x398, "M", u"θ"),
(0x399, "M", u"ι"),
(0x39a, "M", u"κ"),
(0x39b, "M", u"λ"),
(0x39c, "M", u"μ"),
(0x39d, "M", u"ν"),
(0x39e, "M", u"ξ"),
(0x39f, "M", u"ο"),
(0x3a0, "M", u"π"),
(0x3a1, "M", u"ρ"),
(0x3a2, "X"),
(0x3a3, "M", u"σ"),
(0x3a4, "M", u"τ"),
(0x3a5, "M", u"υ"),
(0x3a6, "M", u"φ"),
(0x3a7, "M", u"χ"),
(0x3a8, "M", u"ψ"),
(0x3a9, "M", u"ω"),
(0x3aa, "M", u"ϊ"),
(0x3ab, "M", u"ϋ"),
(0x3ac, "V"),
(0x3c2, "D", u"σ"),
(0x3c3, "V"),
(0x3cf, "M", u"ϗ"),
(0x3d0, "M", u"β"),
(0x3d1, "M", u"θ"),
(0x3d2, "M", u"υ"),
(0x3d3, "M", u"ύ"),
(0x3d4, "M", u"ϋ"),
(0x3d5, "M", u"φ"),
(0x3d6, "M", u"π"),
(0x3d7, "V"),
(0x3d8, "M", u"ϙ"),
(0x3d9, "V"),
(0x3da, "M", u"ϛ"),
(0x3db, "V"),
(0x3dc, "M", u"ϝ"),
(0x3dd, "V"),
(0x3de, "M", u"ϟ"),
(0x3df, "V"),
(0x3e0, "M", u"ϡ"),
(0x3e1, "V"),
(0x3e2, "M", u"ϣ"),
(0x3e3, "V"),
(0x3e4, "M", u"ϥ"),
(0x3e5, "V"),
(0x3e6, "M", u"ϧ"),
(0x3e7, "V"),
(0x3e8, "M", u"ϩ"),
(0x3e9, "V"),
(0x3ea, "M", u"ϫ"),
(0x3eb, "V"),
(0x3ec, "M", u"ϭ"),
(0x3ed, "V"),
(0x3ee, "M", u"ϯ"),
(0x3ef, "V"),
(0x3f0, "M", u"κ"),
(0x3f1, "M", u"ρ"),
(0x3f2, "M", u"σ"),
(0x3f3, "V"),
(0x3f4, "M", u"θ"),
(0x3f5, "M", u"ε"),
(0x3f6, "V"),
(0x3f7, "M", u"ϸ"),
(0x3f8, "V"),
(0x3f9, "M", u"σ"),
(0x3fa, "M", u"ϻ"),
(0x3fb, "V"),
(0x3fd, "M", u"ͻ"),
(0x3fe, "M", u"ͼ"),
(0x3ff, "M", u"ͽ"),
(0x400, "M", u"ѐ"),
(0x401, "M", u"ё"),
(0x402, "M", u"ђ"),
]
def _seg_7():
return [
(0x403, "M", u"ѓ"),
(0x404, "M", u"є"),
(0x405, "M", u"ѕ"),
(0x406, "M", u"і"),
(0x407, "M", u"ї"),
(0x408, "M", u"ј"),
(0x409, "M", u"љ"),
(0x40a, "M", u"њ"),
(0x40b, "M", u"ћ"),
(0x40c, "M", u"ќ"),
(0x40d, "M", u"ѝ"),
(0x40e, "M", u"ў"),
(0x40f, "M", u"џ"),
(0x410, "M", u"а"),
(0x411, "M", u"б"),
(0x412, "M", u"в"),
(0x413, "M", u"г"),
(0x414, "M", u"д"),
(0x415, "M", u"е"),
(0x416, "M", u"ж"),
(0x417, "M", u"з"),
(0x418, "M", u"и"),
(0x419, "M", u"й"),
(0x41a, "M", u"к"),
(0x41b, "M", u"л"),
(0x41c, "M", u"м"),
(0x41d, "M", u"н"),
(0x41e, "M", u"о"),
(0x41f, "M", u"п"),
(0x420, "M", u"р"),
(0x421, "M", u"с"),
(0x422, "M", u"т"),
(0x423, "M", u"у"),
(0x424, "M", u"ф"),
(0x425, "M", u"х"),
(0x426, "M", u"ц"),
(0x427, "M", u"ч"),
(0x428, "M", u"ш"),
(0x429, "M", u"щ"),
(0x42a, "M", u"ъ"),
(0x42b, "M", u"ы"),
(0x42c, "M", u"ь"),
(0x42d, "M", u"э"),
(0x42e, "M", u"ю"),
(0x42f, "M", u"я"),
(0x430, "V"),
(0x460, "M", u"ѡ"),
(0x461, "V"),
(0x462, "M", u"ѣ"),
(0x463, "V"),
(0x464, "M", u"ѥ"),
(0x465, "V"),
(0x466, "M", u"ѧ"),
(0x467, "V"),
(0x468, "M", u"ѩ"),
(0x469, "V"),
(0x46a, "M", u"ѫ"),
(0x46b, "V"),
(0x46c, "M", u"ѭ"),
(0x46d, "V"),
(0x46e, "M", u"ѯ"),
(0x46f, "V"),
(0x470, "M", u"ѱ"),
(0x471, "V"),
(0x472, "M", u"ѳ"),
(0x473, "V"),
(0x474, "M", u"ѵ"),
(0x475, "V"),
(0x476, "M", u"ѷ"),
(0x477, "V"),
(0x478, "M", u"ѹ"),
(0x479, "V"),
(0x47a, "M", u"ѻ"),
(0x47b, "V"),
(0x47c, "M", u"ѽ"),
(0x47d, "V"),
(0x47e, "M", u"ѿ"),
(0x47f, "V"),
(0x480, "M", u"ҁ"),
(0x481, "V"),
(0x48a, "M", u"ҋ"),
(0x48b, "V"),
(0x48c, "M", u"ҍ"),
(0x48d, "V"),
(0x48e, "M", u"ҏ"),
(0x48f, "V"),
(0x490, "M", u"ґ"),
(0x491, "V"),
(0x492, "M", u"ғ"),
(0x493, "V"),
(0x494, "M", u"ҕ"),
(0x495, "V"),
(0x496, "M", u"җ"),
(0x497, "V"),
(0x498, "M", u"ҙ"),
(0x499, "V"),
(0x49a, "M", u"қ"),
(0x49b, "V"),
(0x49c, "M", u"ҝ"),
(0x49d, "V"),
]
def _seg_8():
return [
(0x49e, "M", u"ҟ"),
(0x49f, "V"),
(0x4a0, "M", u"ҡ"),
(0x4a1, "V"),
(0x4a2, "M", u"ң"),
(0x4a3, "V"),
(0x4a4, "M", u"ҥ"),
(0x4a5, "V"),
(0x4a6, "M", u"ҧ"),
(0x4a7, "V"),
(0x4a8, "M", u"ҩ"),
(0x4a9, "V"),
(0x4aa, "M", u"ҫ"),
(0x4ab, "V"),
(0x4ac, "M", u"ҭ"),
(0x4ad, "V"),
(0x4ae, "M", u"ү"),
(0x4af, "V"),
(0x4b0, "M", u"ұ"),
(0x4b1, "V"),
(0x4b2, "M", u"ҳ"),
(0x4b3, "V"),
(0x4b4, "M", u"ҵ"),
(0x4b5, "V"),
(0x4b6, "M", u"ҷ"),
(0x4b7, "V"),
(0x4b8, "M", u"ҹ"),
(0x4b9, "V"),
(0x4ba, "M", u"һ"),
(0x4bb, "V"),
(0x4bc, "M", u"ҽ"),
(0x4bd, "V"),
(0x4be, "M", u"ҿ"),
(0x4bf, "V"),
(0x4c0, "X"),
(0x4c1, "M", u"ӂ"),
(0x4c2, "V"),
(0x4c3, "M", u"ӄ"),
(0x4c4, "V"),
(0x4c5, "M", u"ӆ"),
(0x4c6, "V"),
(0x4c7, "M", u"ӈ"),
(0x4c8, "V"),
(0x4c9, "M", u"ӊ"),
(0x4ca, "V"),
(0x4cb, "M", u"ӌ"),
(0x4cc, "V"),
(0x4cd, "M", u"ӎ"),
(0x4ce, "V"),
(0x4d0, "M", u"ӑ"),
(0x4d1, "V"),
(0x4d2, "M", u"ӓ"),
(0x4d3, "V"),
(0x4d4, "M", u"ӕ"),
(0x4d5, "V"),
(0x4d6, "M", u"ӗ"),
(0x4d7, "V"),
(0x4d8, "M", u"ә"),
(0x4d9, "V"),
(0x4da, "M", u"ӛ"),
(0x4db, "V"),
(0x4dc, "M", u"ӝ"),
(0x4dd, "V"),
(0x4de, "M", u"ӟ"),
(0x4df, "V"),
(0x4e0, "M", u"ӡ"),
(0x4e1, "V"),
(0x4e2, "M", u"ӣ"),
(0x4e3, "V"),
(0x4e4, "M", u"ӥ"),
(0x4e5, "V"),
(0x4e6, "M", u"ӧ"),
(0x4e7, "V"),
(0x4e8, "M", u"ө"),
(0x4e9, "V"),
(0x4ea, "M", u"ӫ"),
(0x4eb, "V"),
(0x4ec, "M", u"ӭ"),
(0x4ed, "V"),
(0x4ee, "M", u"ӯ"),
(0x4ef, "V"),
(0x4f0, "M", u"ӱ"),
(0x4f1, "V"),
(0x4f2, "M", u"ӳ"),
(0x4f3, "V"),
(0x4f4, "M", u"ӵ"),
(0x4f5, "V"),
(0x4f6, "M", u"ӷ"),
(0x4f7, "V"),
(0x4f8, "M", u"ӹ"),
(0x4f9, "V"),
(0x4fa, "M", u"ӻ"),
(0x4fb, "V"),
(0x4fc, "M", u"ӽ"),
(0x4fd, "V"),
(0x4fe, "M", u"ӿ"),
(0x4ff, "V"),
(0x500, "M", u"ԁ"),
(0x501, "V"),
(0x502, "M", u"ԃ"),
]
def _seg_9():
return [
(0x503, "V"),
(0x504, "M", u"ԅ"),
(0x505, "V"),
(0x506, "M", u"ԇ"),
(0x507, "V"),
(0x508, "M", u"ԉ"),
(0x509, "V"),
(0x50a, "M", u"ԋ"),
(0x50b, "V"),
(0x50c, "M", u"ԍ"),
(0x50d, "V"),
(0x50e, "M", u"ԏ"),
(0x50f, "V"),
(0x510, "M", u"ԑ"),
(0x511, "V"),
(0x512, "M", u"ԓ"),
(0x513, "V"),
(0x514, "M", u"ԕ"),
(0x515, "V"),
(0x516, "M", u"ԗ"),
(0x517, "V"),
(0x518, "M", u"ԙ"),
(0x519, "V"),
(0x51a, "M", u"ԛ"),
(0x51b, "V"),
(0x51c, "M", u"ԝ"),
(0x51d, "V"),
(0x51e, "M", u"ԟ"),
(0x51f, "V"),
(0x520, "M", u"ԡ"),
(0x521, "V"),
(0x522, "M", u"ԣ"),
(0x523, "V"),
(0x524, "M", u"ԥ"),
(0x525, "V"),
(0x526, "M", u"ԧ"),
(0x527, "V"),
(0x528, "M", u"ԩ"),
(0x529, "V"),
(0x52a, "M", u"ԫ"),
(0x52b, "V"),
(0x52c, "M", u"ԭ"),
(0x52d, "V"),
(0x52e, "M", u"ԯ"),
(0x52f, "V"),
(0x530, "X"),
(0x531, "M", u"ա"),
(0x532, "M", u"բ"),
(0x533, "M", u"գ"),
(0x534, "M", u"դ"),
(0x535, "M", u"ե"),
(0x536, "M", u"զ"),
(0x537, "M", u"է"),
(0x538, "M", u"ը"),
(0x539, "M", u"թ"),
(0x53a, "M", u"ժ"),
(0x53b, "M", u"ի"),
(0x53c, "M", u"լ"),
(0x53d, "M", u"խ"),
(0x53e, "M", u"ծ"),
(0x53f, "M", u"կ"),
(0x540, "M", u"հ"),
(0x541, "M", u"ձ"),
(0x542, "M", u"ղ"),
(0x543, "M", u"ճ"),
(0x544, "M", u"մ"),
(0x545, "M", u"յ"),
(0x546, "M", u"ն"),
(0x547, "M", u"շ"),
(0x548, "M", u"ո"),
(0x549, "M", u"չ"),
(0x54a, "M", u"պ"),
(0x54b, "M", u"ջ"),
(0x54c, "M", u"ռ"),
(0x54d, "M", u"ս"),
(0x54e, "M", u"վ"),
(0x54f, "M", u"տ"),
(0x550, "M", u"ր"),
(0x551, "M", u"ց"),
(0x552, "M", u"ւ"),
(0x553, "M", u"փ"),
(0x554, "M", u"ք"),
(0x555, "M", u"օ"),
(0x556, "M", u"ֆ"),
(0x557, "X"),
(0x559, "V"),
(0x587, "M", u"եւ"),
(0x588, "V"),
(0x58b, "X"),
(0x58d, "V"),
(0x590, "X"),
(0x591, "V"),
(0x5c8, "X"),
(0x5d0, "V"),
(0x5eb, "X"),
(0x5ef, "V"),
(0x5f5, "X"),
(0x606, "V"),
(0x61c, "X"),
(0x61e, "V"),
]
def _seg_10():
return [
(0x675, "M", u"اٴ"),
(0x676, "M", u"وٴ"),
(0x677, "M", u"ۇٴ"),
(0x678, "M", u"يٴ"),
(0x679, "V"),
(0x6dd, "X"),
(0x6de, "V"),
(0x70e, "X"),
(0x710, "V"),
(0x74b, "X"),
(0x74d, "V"),
(0x7b2, "X"),
(0x7c0, "V"),
(0x7fb, "X"),
(0x7fd, "V"),
(0x82e, "X"),
(0x830, "V"),
(0x83f, "X"),
(0x840, "V"),
(0x85c, "X"),
(0x85e, "V"),
(0x85f, "X"),
(0x860, "V"),
(0x86b, "X"),
(0x8a0, "V"),
(0x8b5, "X"),
(0x8b6, "V"),
(0x8be, "X"),
(0x8d3, "V"),
(0x8e2, "X"),
(0x8e3, "V"),
(0x958, "M", u"क़"),
(0x959, "M", u"ख़"),
(0x95a, "M", u"ग़"),
(0x95b, "M", u"ज़"),
(0x95c, "M", u"ड़"),
(0x95d, "M", u"ढ़"),
(0x95e, "M", u"फ़"),
(0x95f, "M", u"य़"),
(0x960, "V"),
(0x984, "X"),
(0x985, "V"),
(0x98d, "X"),
(0x98f, "V"),
(0x991, "X"),
(0x993, "V"),
(0x9a9, "X"),
(0x9aa, "V"),
(0x9b1, "X"),
(0x9b2, "V"),
(0x9b3, "X"),
(0x9b6, "V"),
(0x9ba, "X"),
(0x9bc, "V"),
(0x9c5, "X"),
(0x9c7, "V"),
(0x9c9, "X"),
(0x9cb, "V"),
(0x9cf, "X"),
(0x9d7, "V"),
(0x9d8, "X"),
(0x9dc, "M", u"ড়"),
(0x9dd, "M", u"ঢ়"),
(0x9de, "X"),
(0x9df, "M", u"য়"),
(0x9e0, "V"),
(0x9e4, "X"),
(0x9e6, "V"),
(0x9ff, "X"),
(0xa01, "V"),
(0xa04, "X"),
(0xa05, "V"),
(0xa0b, "X"),
(0xa0f, "V"),
(0xa11, "X"),
(0xa13, "V"),
(0xa29, "X"),
(0xa2a, "V"),
(0xa31, "X"),
(0xa32, "V"),
(0xa33, "M", u"ਲ਼"),
(0xa34, "X"),
(0xa35, "V"),
(0xa36, "M", u"ਸ਼"),
(0xa37, "X"),
(0xa38, "V"),
(0xa3a, "X"),
(0xa3c, "V"),
(0xa3d, "X"),
(0xa3e, "V"),
(0xa43, "X"),
(0xa47, "V"),
(0xa49, "X"),
(0xa4b, "V"),
(0xa4e, "X"),
(0xa51, "V"),
(0xa52, "X"),
(0xa59, "M", u"ਖ਼"),
(0xa5a, "M", u"ਗ਼"),
(0xa5b, "M", u"ਜ਼"),
]
def _seg_11():
return [
(0xa5c, "V"),
(0xa5d, "X"),
(0xa5e, "M", u"ਫ਼"),
(0xa5f, "X"),
(0xa66, "V"),
(0xa77, "X"),
(0xa81, "V"),
(0xa84, "X"),
(0xa85, "V"),
(0xa8e, "X"),
(0xa8f, "V"),
(0xa92, "X"),
(0xa93, "V"),
(0xaa9, "X"),
(0xaaa, "V"),
(0xab1, "X"),
(0xab2, "V"),
(0xab4, "X"),
(0xab5, "V"),
(0xaba, "X"),
(0xabc, "V"),
(0xac6, "X"),
(0xac7, "V"),
(0xaca, "X"),
(0xacb, "V"),
(0xace, "X"),
(0xad0, "V"),
(0xad1, "X"),
(0xae0, "V"),
(0xae4, "X"),
(0xae6, "V"),
(0xaf2, "X"),
(0xaf9, "V"),
(0xb00, "X"),
(0xb01, "V"),
(0xb04, "X"),
(0xb05, "V"),
(0xb0d, "X"),
(0xb0f, "V"),
(0xb11, "X"),
(0xb13, "V"),
(0xb29, "X"),
(0xb2a, "V"),
(0xb31, "X"),
(0xb32, "V"),
(0xb34, "X"),
(0xb35, "V"),
(0xb3a, "X"),
(0xb3c, "V"),
(0xb45, "X"),
(0xb47, "V"),
(0xb49, "X"),
(0xb4b, "V"),
(0xb4e, "X"),
(0xb56, "V"),
(0xb58, "X"),
(0xb5c, "M", u"ଡ଼"),
(0xb5d, "M", u"ଢ଼"),
(0xb5e, "X"),
(0xb5f, "V"),
(0xb64, "X"),
(0xb66, "V"),
(0xb78, "X"),
(0xb82, "V"),
(0xb84, "X"),
(0xb85, "V"),
(0xb8b, "X"),
(0xb8e, "V"),
(0xb91, "X"),
(0xb92, "V"),
(0xb96, "X"),
(0xb99, "V"),
(0xb9b, "X"),
(0xb9c, "V"),
(0xb9d, "X"),
(0xb9e, "V"),
(0xba0, "X"),
(0xba3, "V"),
(0xba5, "X"),
(0xba8, "V"),
(0xbab, "X"),
(0xbae, "V"),
(0xbba, "X"),
(0xbbe, "V"),
(0xbc3, "X"),
(0xbc6, "V"),
(0xbc9, "X"),
(0xbca, "V"),
(0xbce, "X"),
(0xbd0, "V"),
(0xbd1, "X"),
(0xbd7, "V"),
(0xbd8, "X"),
(0xbe6, "V"),
(0xbfb, "X"),
(0xc00, "V"),
(0xc0d, "X"),
(0xc0e, "V"),
(0xc11, "X"),
(0xc12, "V"),
]
def _seg_12():
return [
(0xc29, "X"),
(0xc2a, "V"),
(0xc3a, "X"),
(0xc3d, "V"),
(0xc45, "X"),
(0xc46, "V"),
(0xc49, "X"),
(0xc4a, "V"),
(0xc4e, "X"),
(0xc55, "V"),
(0xc57, "X"),
(0xc58, "V"),
(0xc5b, "X"),
(0xc60, "V"),
(0xc64, "X"),
(0xc66, "V"),
(0xc70, "X"),
(0xc77, "V"),
(0xc8d, "X"),
(0xc8e, "V"),
(0xc91, "X"),
(0xc92, "V"),
(0xca9, "X"),
(0xcaa, "V"),
(0xcb4, "X"),
(0xcb5, "V"),
(0xcba, "X"),
(0xcbc, "V"),
(0xcc5, "X"),
(0xcc6, "V"),
(0xcc9, "X"),
(0xcca, "V"),
(0xcce, "X"),
(0xcd5, "V"),
(0xcd7, "X"),
(0xcde, "V"),
(0xcdf, "X"),
(0xce0, "V"),
(0xce4, "X"),
(0xce6, "V"),
(0xcf0, "X"),
(0xcf1, "V"),
(0xcf3, "X"),
(0xd00, "V"),
(0xd04, "X"),
(0xd05, "V"),
(0xd0d, "X"),
(0xd0e, "V"),
(0xd11, "X"),
(0xd12, "V"),
(0xd45, "X"),
(0xd46, "V"),
(0xd49, "X"),
(0xd4a, "V"),
(0xd50, "X"),
(0xd54, "V"),
(0xd64, "X"),
(0xd66, "V"),
(0xd80, "X"),
(0xd82, "V"),
(0xd84, "X"),
(0xd85, "V"),
(0xd97, "X"),
(0xd9a, "V"),
(0xdb2, "X"),
(0xdb3, "V"),
(0xdbc, "X"),
(0xdbd, "V"),
(0xdbe, "X"),
(0xdc0, "V"),
(0xdc7, "X"),
(0xdca, "V"),
(0xdcb, "X"),
(0xdcf, "V"),
(0xdd5, "X"),
(0xdd6, "V"),
(0xdd7, "X"),
(0xdd8, "V"),
(0xde0, "X"),
(0xde6, "V"),
(0xdf0, "X"),
(0xdf2, "V"),
(0xdf5, "X"),
(0xe01, "V"),
(0xe33, "M", u"ํา"),
(0xe34, "V"),
(0xe3b, "X"),
(0xe3f, "V"),
(0xe5c, "X"),
(0xe81, "V"),
(0xe83, "X"),
(0xe84, "V"),
(0xe85, "X"),
(0xe86, "V"),
(0xe8b, "X"),
(0xe8c, "V"),
(0xea4, "X"),
(0xea5, "V"),
(0xea6, "X"),
(0xea7, "V"),
]
def _seg_13():
return [
(0xeb3, "M", u"ໍາ"),
(0xeb4, "V"),
(0xebe, "X"),
(0xec0, "V"),
(0xec5, "X"),
(0xec6, "V"),
(0xec7, "X"),
(0xec8, "V"),
(0xece, "X"),
(0xed0, "V"),
(0xeda, "X"),
(0xedc, "M", u"ຫນ"),
(0xedd, "M", u"ຫມ"),
(0xede, "V"),
(0xee0, "X"),
(0xf00, "V"),
(0xf0c, "M", u"་"),
(0xf0d, "V"),
(0xf43, "M", u"གྷ"),
(0xf44, "V"),
(0xf48, "X"),
(0xf49, "V"),
(0xf4d, "M", u"ཌྷ"),
(0xf4e, "V"),
(0xf52, "M", u"དྷ"),
(0xf53, "V"),
(0xf57, "M", u"བྷ"),
(0xf58, "V"),
(0xf5c, "M", u"ཛྷ"),
(0xf5d, "V"),
(0xf69, "M", u"ཀྵ"),
(0xf6a, "V"),
(0xf6d, "X"),
(0xf71, "V"),
(0xf73, "M", u"ཱི"),
(0xf74, "V"),
(0xf75, "M", u"ཱུ"),
(0xf76, "M", u"ྲྀ"),
(0xf77, "M", u"ྲཱྀ"),
(0xf78, "M", u"ླྀ"),
(0xf79, "M", u"ླཱྀ"),
(0xf7a, "V"),
(0xf81, "M", u"ཱྀ"),
(0xf82, "V"),
(0xf93, "M", u"ྒྷ"),
(0xf94, "V"),
(0xf98, "X"),
(0xf99, "V"),
(0xf9d, "M", u"ྜྷ"),
(0xf9e, "V"),
(0xfa2, "M", u"ྡྷ"),
(0xfa3, "V"),
(0xfa7, "M", u"ྦྷ"),
(0xfa8, "V"),
(0xfac, "M", u"ྫྷ"),
(0xfad, "V"),
(0xfb9, "M", u"ྐྵ"),
(0xfba, "V"),
(0xfbd, "X"),
(0xfbe, "V"),
(0xfcd, "X"),
(0xfce, "V"),
(0xfdb, "X"),
(0x1000, "V"),
(0x10a0, "X"),
(0x10c7, "M", u"ⴧ"),
(0x10c8, "X"),
(0x10cd, "M", u"ⴭ"),
(0x10ce, "X"),
(0x10d0, "V"),
(0x10fc, "M", u"ნ"),
(0x10fd, "V"),
(0x115f, "X"),
(0x1161, "V"),
(0x1249, "X"),
(0x124a, "V"),
(0x124e, "X"),
(0x1250, "V"),
(0x1257, "X"),
(0x1258, "V"),
(0x1259, "X"),
(0x125a, "V"),
(0x125e, "X"),
(0x1260, "V"),
(0x1289, "X"),
(0x128a, "V"),
(0x128e, "X"),
(0x1290, "V"),
(0x12b1, "X"),
(0x12b2, "V"),
(0x12b6, "X"),
(0x12b8, "V"),
(0x12bf, "X"),
(0x12c0, "V"),
(0x12c1, "X"),
(0x12c2, "V"),
(0x12c6, "X"),
(0x12c8, "V"),
(0x12d7, "X"),
(0x12d8, "V"),
]
def _seg_14():
return [
(0x1311, "X"),
(0x1312, "V"),
(0x1316, "X"),
(0x1318, "V"),
(0x135b, "X"),
(0x135d, "V"),
(0x137d, "X"),
(0x1380, "V"),
(0x139a, "X"),
(0x13a0, "V"),
(0x13f6, "X"),
(0x13f8, "M", u"Ᏸ"),
(0x13f9, "M", u"Ᏹ"),
(0x13fa, "M", u"Ᏺ"),
(0x13fb, "M", u"Ᏻ"),
(0x13fc, "M", u"Ᏼ"),
(0x13fd, "M", u"Ᏽ"),
(0x13fe, "X"),
(0x1400, "V"),
(0x1680, "X"),
(0x1681, "V"),
(0x169d, "X"),
(0x16a0, "V"),
(0x16f9, "X"),
(0x1700, "V"),
(0x170d, "X"),
(0x170e, "V"),
(0x1715, "X"),
(0x1720, "V"),
(0x1737, "X"),
(0x1740, "V"),
(0x1754, "X"),
(0x1760, "V"),
(0x176d, "X"),
(0x176e, "V"),
(0x1771, "X"),
(0x1772, "V"),
(0x1774, "X"),
(0x1780, "V"),
(0x17b4, "X"),
(0x17b6, "V"),
(0x17de, "X"),
(0x17e0, "V"),
(0x17ea, "X"),
(0x17f0, "V"),
(0x17fa, "X"),
(0x1800, "V"),
(0x1806, "X"),
(0x1807, "V"),
(0x180b, "I"),
(0x180e, "X"),
(0x1810, "V"),
(0x181a, "X"),
(0x1820, "V"),
(0x1879, "X"),
(0x1880, "V"),
(0x18ab, "X"),
(0x18b0, "V"),
(0x18f6, "X"),
(0x1900, "V"),
(0x191f, "X"),
(0x1920, "V"),
(0x192c, "X"),
(0x1930, "V"),
(0x193c, "X"),
(0x1940, "V"),
(0x1941, "X"),
(0x1944, "V"),
(0x196e, "X"),
(0x1970, "V"),
(0x1975, "X"),
(0x1980, "V"),
(0x19ac, "X"),
(0x19b0, "V"),
(0x19ca, "X"),
(0x19d0, "V"),
(0x19db, "X"),
(0x19de, "V"),
(0x1a1c, "X"),
(0x1a1e, "V"),
(0x1a5f, "X"),
(0x1a60, "V"),
(0x1a7d, "X"),
(0x1a7f, "V"),
(0x1a8a, "X"),
(0x1a90, "V"),
(0x1a9a, "X"),
(0x1aa0, "V"),
(0x1aae, "X"),
(0x1ab0, "V"),
(0x1abf, "X"),
(0x1b00, "V"),
(0x1b4c, "X"),
(0x1b50, "V"),
(0x1b7d, "X"),
(0x1b80, "V"),
(0x1bf4, "X"),
(0x1bfc, "V"),
(0x1c38, "X"),
(0x1c3b, "V"),
]
def _seg_15():
return [
(0x1c4a, "X"),
(0x1c4d, "V"),
(0x1c80, "M", u"в"),
(0x1c81, "M", u"д"),
(0x1c82, "M", u"о"),
(0x1c83, "M", u"с"),
(0x1c84, "M", u"т"),
(0x1c86, "M", u"ъ"),
(0x1c87, "M", u"ѣ"),
(0x1c88, "M", u"ꙋ"),
(0x1c89, "X"),
(0x1c90, "M", u"ა"),
(0x1c91, "M", u"ბ"),
(0x1c92, "M", u"გ"),
(0x1c93, "M", u"დ"),
(0x1c94, "M", u"ე"),
(0x1c95, "M", u"ვ"),
(0x1c96, "M", u"ზ"),
(0x1c97, "M", u"თ"),
(0x1c98, "M", u"ი"),
(0x1c99, "M", u"კ"),
(0x1c9a, "M", u"ლ"),
(0x1c9b, "M", u"მ"),
(0x1c9c, "M", u"ნ"),
(0x1c9d, "M", u"ო"),
(0x1c9e, "M", u"პ"),
(0x1c9f, "M", u"ჟ"),
(0x1ca0, "M", u"რ"),
(0x1ca1, "M", u"ს"),
(0x1ca2, "M", u"ტ"),
(0x1ca3, "M", u"უ"),
(0x1ca4, "M", u"ფ"),
(0x1ca5, "M", u"ქ"),
(0x1ca6, "M", u"ღ"),
(0x1ca7, "M", u"ყ"),
(0x1ca8, "M", u"შ"),
(0x1ca9, "M", u"ჩ"),
(0x1caa, "M", u"ც"),
(0x1cab, "M", u"ძ"),
(0x1cac, "M", u"წ"),
(0x1cad, "M", u"ჭ"),
(0x1cae, "M", u"ხ"),
(0x1caf, "M", u"ჯ"),
(0x1cb0, "M", u"ჰ"),
(0x1cb1, "M", u"ჱ"),
(0x1cb2, "M", u"ჲ"),
(0x1cb3, "M", u"ჳ"),
(0x1cb4, "M", u"ჴ"),
(0x1cb5, "M", u"ჵ"),
(0x1cb6, "M", u"ჶ"),
(0x1cb7, "M", u"ჷ"),
(0x1cb8, "M", u"ჸ"),
(0x1cb9, "M", u"ჹ"),
(0x1cba, "M", u"ჺ"),
(0x1cbb, "X"),
(0x1cbd, "M", u"ჽ"),
(0x1cbe, "M", u"ჾ"),
(0x1cbf, "M", u"ჿ"),
(0x1cc0, "V"),
(0x1cc8, "X"),
(0x1cd0, "V"),
(0x1cfb, "X"),
(0x1d00, "V"),
(0x1d2c, "M", u"a"),
(0x1d2d, "M", u"æ"),
(0x1d2e, "M", u"b"),
(0x1d2f, "V"),
(0x1d30, "M", u"d"),
(0x1d31, "M", u"e"),
(0x1d32, "M", u"ǝ"),
(0x1d33, "M", u"g"),
(0x1d34, "M", u"h"),
(0x1d35, "M", u"i"),
(0x1d36, "M", u"j"),
(0x1d37, "M", u"k"),
(0x1d38, "M", u"l"),
(0x1d39, "M", u"m"),
(0x1d3a, "M", u"n"),
(0x1d3b, "V"),
(0x1d3c, "M", u"o"),
(0x1d3d, "M", u"ȣ"),
(0x1d3e, "M", u"p"),
(0x1d3f, "M", u"r"),
(0x1d40, "M", u"t"),
(0x1d41, "M", u"u"),
(0x1d42, "M", u"w"),
(0x1d43, "M", u"a"),
(0x1d44, "M", u"ɐ"),
(0x1d45, "M", u"ɑ"),
(0x1d46, "M", u"ᴂ"),
(0x1d47, "M", u"b"),
(0x1d48, "M", u"d"),
(0x1d49, "M", u"e"),
(0x1d4a, "M", u"ə"),
(0x1d4b, "M", u"ɛ"),
(0x1d4c, "M", u"ɜ"),
(0x1d4d, "M", u"g"),
(0x1d4e, "V"),
(0x1d4f, "M", u"k"),
(0x1d50, "M", u"m"),
]
def _seg_16():
return [
(0x1d51, "M", u"ŋ"),
(0x1d52, "M", u"o"),
(0x1d53, "M", u"ɔ"),
(0x1d54, "M", u"ᴖ"),
(0x1d55, "M", u"ᴗ"),
(0x1d56, "M", u"p"),
(0x1d57, "M", u"t"),
(0x1d58, "M", u"u"),
(0x1d59, "M", u"ᴝ"),
(0x1d5a, "M", u"ɯ"),
(0x1d5b, "M", u"v"),
(0x1d5c, "M", u"ᴥ"),
(0x1d5d, "M", u"β"),
(0x1d5e, "M", u"γ"),
(0x1d5f, "M", u"δ"),
(0x1d60, "M", u"φ"),
(0x1d61, "M", u"χ"),
(0x1d62, "M", u"i"),
(0x1d63, "M", u"r"),
(0x1d64, "M", u"u"),
(0x1d65, "M", u"v"),
(0x1d66, "M", u"β"),
(0x1d67, "M", u"γ"),
(0x1d68, "M", u"ρ"),
(0x1d69, "M", u"φ"),
(0x1d6a, "M", u"χ"),
(0x1d6b, "V"),
(0x1d78, "M", u"н"),
(0x1d79, "V"),
(0x1d9b, "M", u"ɒ"),
(0x1d9c, "M", u"c"),
(0x1d9d, "M", u"ɕ"),
(0x1d9e, "M", u"ð"),
(0x1d9f, "M", u"ɜ"),
(0x1da0, "M", u"f"),
(0x1da1, "M", u"ɟ"),
(0x1da2, "M", u"ɡ"),
(0x1da3, "M", u"ɥ"),
(0x1da4, "M", u"ɨ"),
(0x1da5, "M", u"ɩ"),
(0x1da6, "M", u"ɪ"),
(0x1da7, "M", u"ᵻ"),
(0x1da8, "M", u"ʝ"),
(0x1da9, "M", u"ɭ"),
(0x1daa, "M", u"ᶅ"),
(0x1dab, "M", u"ʟ"),
(0x1dac, "M", u"ɱ"),
(0x1dad, "M", u"ɰ"),
(0x1dae, "M", u"ɲ"),
(0x1daf, "M", u"ɳ"),
(0x1db0, "M", u"ɴ"),
(0x1db1, "M", u"ɵ"),
(0x1db2, "M", u"ɸ"),
(0x1db3, "M", u"ʂ"),
(0x1db4, "M", u"ʃ"),
(0x1db5, "M", u"ƫ"),
(0x1db6, "M", u"ʉ"),
(0x1db7, "M", u"ʊ"),
(0x1db8, "M", u"ᴜ"),
(0x1db9, "M", u"ʋ"),
(0x1dba, "M", u"ʌ"),
(0x1dbb, "M", u"z"),
(0x1dbc, "M", u"ʐ"),
(0x1dbd, "M", u"ʑ"),
(0x1dbe, "M", u"ʒ"),
(0x1dbf, "M", u"θ"),
(0x1dc0, "V"),
(0x1dfa, "X"),
(0x1dfb, "V"),
(0x1e00, "M", u"ḁ"),
(0x1e01, "V"),
(0x1e02, "M", u"ḃ"),
(0x1e03, "V"),
(0x1e04, "M", u"ḅ"),
(0x1e05, "V"),
(0x1e06, "M", u"ḇ"),
(0x1e07, "V"),
(0x1e08, "M", u"ḉ"),
(0x1e09, "V"),
(0x1e0a, "M", u"ḋ"),
(0x1e0b, "V"),
(0x1e0c, "M", u"ḍ"),
(0x1e0d, "V"),
(0x1e0e, "M", u"ḏ"),
(0x1e0f, "V"),
(0x1e10, "M", u"ḑ"),
(0x1e11, "V"),
(0x1e12, "M", u"ḓ"),
(0x1e13, "V"),
(0x1e14, "M", u"ḕ"),
(0x1e15, "V"),
(0x1e16, "M", u"ḗ"),
(0x1e17, "V"),
(0x1e18, "M", u"ḙ"),
(0x1e19, "V"),
(0x1e1a, "M", u"ḛ"),
(0x1e1b, "V"),
(0x1e1c, "M", u"ḝ"),
(0x1e1d, "V"),
(0x1e1e, "M", u"ḟ"),
]
def _seg_17():
return [
(0x1e1f, "V"),
(0x1e20, "M", u"ḡ"),
(0x1e21, "V"),
(0x1e22, "M", u"ḣ"),
(0x1e23, "V"),
(0x1e24, "M", u"ḥ"),
(0x1e25, "V"),
(0x1e26, "M", u"ḧ"),
(0x1e27, "V"),
(0x1e28, "M", u"ḩ"),
(0x1e29, "V"),
(0x1e2a, "M", u"ḫ"),
(0x1e2b, "V"),
(0x1e2c, "M", u"ḭ"),
(0x1e2d, "V"),
(0x1e2e, "M", u"ḯ"),
(0x1e2f, "V"),
(0x1e30, "M", u"ḱ"),
(0x1e31, "V"),
(0x1e32, "M", u"ḳ"),
(0x1e33, "V"),
(0x1e34, "M", u"ḵ"),
(0x1e35, "V"),
(0x1e36, "M", u"ḷ"),
(0x1e37, "V"),
(0x1e38, "M", u"ḹ"),
(0x1e39, "V"),
(0x1e3a, "M", u"ḻ"),
(0x1e3b, "V"),
(0x1e3c, "M", u"ḽ"),
(0x1e3d, "V"),
(0x1e3e, "M", u"ḿ"),
(0x1e3f, "V"),
(0x1e40, "M", u"ṁ"),
(0x1e41, "V"),
(0x1e42, "M", u"ṃ"),
(0x1e43, "V"),
(0x1e44, "M", u"ṅ"),
(0x1e45, "V"),
(0x1e46, "M", u"ṇ"),
(0x1e47, "V"),
(0x1e48, "M", u"ṉ"),
(0x1e49, "V"),
(0x1e4a, "M", u"ṋ"),
(0x1e4b, "V"),
(0x1e4c, "M", u"ṍ"),
(0x1e4d, "V"),
(0x1e4e, "M", u"ṏ"),
(0x1e4f, "V"),
(0x1e50, "M", u"ṑ"),
(0x1e51, "V"),
(0x1e52, "M", u"ṓ"),
(0x1e53, "V"),
(0x1e54, "M", u"ṕ"),
(0x1e55, "V"),
(0x1e56, "M", u"ṗ"),
(0x1e57, "V"),
(0x1e58, "M", u"ṙ"),
(0x1e59, "V"),
(0x1e5a, "M", u"ṛ"),
(0x1e5b, "V"),
(0x1e5c, "M", u"ṝ"),
(0x1e5d, "V"),
(0x1e5e, "M", u"ṟ"),
(0x1e5f, "V"),
(0x1e60, "M", u"ṡ"),
(0x1e61, "V"),
(0x1e62, "M", u"ṣ"),
(0x1e63, "V"),
(0x1e64, "M", u"ṥ"),
(0x1e65, "V"),
(0x1e66, "M", u"ṧ"),
(0x1e67, "V"),
(0x1e68, "M", u"ṩ"),
(0x1e69, "V"),
(0x1e6a, "M", u"ṫ"),
(0x1e6b, "V"),
(0x1e6c, "M", u"ṭ"),
(0x1e6d, "V"),
(0x1e6e, "M", u"ṯ"),
(0x1e6f, "V"),
(0x1e70, "M", u"ṱ"),
(0x1e71, "V"),
(0x1e72, "M", u"ṳ"),
(0x1e73, "V"),
(0x1e74, "M", u"ṵ"),
(0x1e75, "V"),
(0x1e76, "M", u"ṷ"),
(0x1e77, "V"),
(0x1e78, "M", u"ṹ"),
(0x1e79, "V"),
(0x1e7a, "M", u"ṻ"),
(0x1e7b, "V"),
(0x1e7c, "M", u"ṽ"),
(0x1e7d, "V"),
(0x1e7e, "M", u"ṿ"),
(0x1e7f, "V"),
(0x1e80, "M", u"ẁ"),
(0x1e81, "V"),
(0x1e82, "M", u"ẃ"),
]
def _seg_18():
return [
(0x1e83, "V"),
(0x1e84, "M", u"ẅ"),
(0x1e85, "V"),
(0x1e86, "M", u"ẇ"),
(0x1e87, "V"),
(0x1e88, "M", u"ẉ"),
(0x1e89, "V"),
(0x1e8a, "M", u"ẋ"),
(0x1e8b, "V"),
(0x1e8c, "M", u"ẍ"),
(0x1e8d, "V"),
(0x1e8e, "M", u"ẏ"),
(0x1e8f, "V"),
(0x1e90, "M", u"ẑ"),
(0x1e91, "V"),
(0x1e92, "M", u"ẓ"),
(0x1e93, "V"),
(0x1e94, "M", u"ẕ"),
(0x1e95, "V"),
(0x1e9a, "M", u"aʾ"),
(0x1e9b, "M", u"ṡ"),
(0x1e9c, "V"),
(0x1e9e, "M", u"ss"),
(0x1e9f, "V"),
(0x1ea0, "M", u"ạ"),
(0x1ea1, "V"),
(0x1ea2, "M", u"ả"),
(0x1ea3, "V"),
(0x1ea4, "M", u"ấ"),
(0x1ea5, "V"),
(0x1ea6, "M", u"ầ"),
(0x1ea7, "V"),
(0x1ea8, "M", u"ẩ"),
(0x1ea9, "V"),
(0x1eaa, "M", u"ẫ"),
(0x1eab, "V"),
(0x1eac, "M", u"ậ"),
(0x1ead, "V"),
(0x1eae, "M", u"ắ"),
(0x1eaf, "V"),
(0x1eb0, "M", u"ằ"),
(0x1eb1, "V"),
(0x1eb2, "M", u"ẳ"),
(0x1eb3, "V"),
(0x1eb4, "M", u"ẵ"),
(0x1eb5, "V"),
(0x1eb6, "M", u"ặ"),
(0x1eb7, "V"),
(0x1eb8, "M", u"ẹ"),
(0x1eb9, "V"),
(0x1eba, "M", u"ẻ"),
(0x1ebb, "V"),
(0x1ebc, "M", u"ẽ"),
(0x1ebd, "V"),
(0x1ebe, "M", u"ế"),
(0x1ebf, "V"),
(0x1ec0, "M", u"ề"),
(0x1ec1, "V"),
(0x1ec2, "M", u"ể"),
(0x1ec3, "V"),
(0x1ec4, "M", u"ễ"),
(0x1ec5, "V"),
(0x1ec6, "M", u"ệ"),
(0x1ec7, "V"),
(0x1ec8, "M", u"ỉ"),
(0x1ec9, "V"),
(0x1eca, "M", u"ị"),
(0x1ecb, "V"),
(0x1ecc, "M", u"ọ"),
(0x1ecd, "V"),
(0x1ece, "M", u"ỏ"),
(0x1ecf, "V"),
(0x1ed0, "M", u"ố"),
(0x1ed1, "V"),
(0x1ed2, "M", u"ồ"),
(0x1ed3, "V"),
(0x1ed4, "M", u"ổ"),
(0x1ed5, "V"),
(0x1ed6, "M", u"ỗ"),
(0x1ed7, "V"),
(0x1ed8, "M", u"ộ"),
(0x1ed9, "V"),
(0x1eda, "M", u"ớ"),
(0x1edb, "V"),
(0x1edc, "M", u"ờ"),
(0x1edd, "V"),
(0x1ede, "M", u"ở"),
(0x1edf, "V"),
(0x1ee0, "M", u"ỡ"),
(0x1ee1, "V"),
(0x1ee2, "M", u"ợ"),
(0x1ee3, "V"),
(0x1ee4, "M", u"ụ"),
(0x1ee5, "V"),
(0x1ee6, "M", u"ủ"),
(0x1ee7, "V"),
(0x1ee8, "M", u"ứ"),
(0x1ee9, "V"),
(0x1eea, "M", u"ừ"),
(0x1eeb, "V"),
]
def _seg_19():
return [
(0x1eec, "M", u"ử"),
(0x1eed, "V"),
(0x1eee, "M", u"ữ"),
(0x1eef, "V"),
(0x1ef0, "M", u"ự"),
(0x1ef1, "V"),
(0x1ef2, "M", u"ỳ"),
(0x1ef3, "V"),
(0x1ef4, "M", u"ỵ"),
(0x1ef5, "V"),
(0x1ef6, "M", u"ỷ"),
(0x1ef7, "V"),
(0x1ef8, "M", u"ỹ"),
(0x1ef9, "V"),
(0x1efa, "M", u"ỻ"),
(0x1efb, "V"),
(0x1efc, "M", u"ỽ"),
(0x1efd, "V"),
(0x1efe, "M", u"ỿ"),
(0x1eff, "V"),
(0x1f08, "M", u"ἀ"),
(0x1f09, "M", u"ἁ"),
(0x1f0a, "M", u"ἂ"),
(0x1f0b, "M", u"ἃ"),
(0x1f0c, "M", u"ἄ"),
(0x1f0d, "M", u"ἅ"),
(0x1f0e, "M", u"ἆ"),
(0x1f0f, "M", u"ἇ"),
(0x1f10, "V"),
(0x1f16, "X"),
(0x1f18, "M", u"ἐ"),
(0x1f19, "M", u"ἑ"),
(0x1f1a, "M", u"ἒ"),
(0x1f1b, "M", u"ἓ"),
(0x1f1c, "M", u"ἔ"),
(0x1f1d, "M", u"ἕ"),
(0x1f1e, "X"),
(0x1f20, "V"),
(0x1f28, "M", u"ἠ"),
(0x1f29, "M", u"ἡ"),
(0x1f2a, "M", u"ἢ"),
(0x1f2b, "M", u"ἣ"),
(0x1f2c, "M", u"ἤ"),
(0x1f2d, "M", u"ἥ"),
(0x1f2e, "M", u"ἦ"),
(0x1f2f, "M", u"ἧ"),
(0x1f30, "V"),
(0x1f38, "M", u"ἰ"),
(0x1f39, "M", u"ἱ"),
(0x1f3a, "M", u"ἲ"),
(0x1f3b, "M", u"ἳ"),
(0x1f3c, "M", u"ἴ"),
(0x1f3d, "M", u"ἵ"),
(0x1f3e, "M", u"ἶ"),
(0x1f3f, "M", u"ἷ"),
(0x1f40, "V"),
(0x1f46, "X"),
(0x1f48, "M", u"ὀ"),
(0x1f49, "M", u"ὁ"),
(0x1f4a, "M", u"ὂ"),
(0x1f4b, "M", u"ὃ"),
(0x1f4c, "M", u"ὄ"),
(0x1f4d, "M", u"ὅ"),
(0x1f4e, "X"),
(0x1f50, "V"),
(0x1f58, "X"),
(0x1f59, "M", u"ὑ"),
(0x1f5a, "X"),
(0x1f5b, "M", u"ὓ"),
(0x1f5c, "X"),
(0x1f5d, "M", u"ὕ"),
(0x1f5e, "X"),
(0x1f5f, "M", u"ὗ"),
(0x1f60, "V"),
(0x1f68, "M", u"ὠ"),
(0x1f69, "M", u"ὡ"),
(0x1f6a, "M", u"ὢ"),
(0x1f6b, "M", u"ὣ"),
(0x1f6c, "M", u"ὤ"),
(0x1f6d, "M", u"ὥ"),
(0x1f6e, "M", u"ὦ"),
(0x1f6f, "M", u"ὧ"),
(0x1f70, "V"),
(0x1f71, "M", u"ά"),
(0x1f72, "V"),
(0x1f73, "M", u"έ"),
(0x1f74, "V"),
(0x1f75, "M", u"ή"),
(0x1f76, "V"),
(0x1f77, "M", u"ί"),
(0x1f78, "V"),
(0x1f79, "M", u"ό"),
(0x1f7a, "V"),
(0x1f7b, "M", u"ύ"),
(0x1f7c, "V"),
(0x1f7d, "M", u"ώ"),
(0x1f7e, "X"),
(0x1f80, "M", u"ἀι"),
(0x1f81, "M", u"ἁι"),
(0x1f82, "M", u"ἂι"),
]
def _seg_20():
return [
(0x1f83, "M", u"ἃι"),
(0x1f84, "M", u"ἄι"),
(0x1f85, "M", u"ἅι"),
(0x1f86, "M", u"ἆι"),
(0x1f87, "M", u"ἇι"),
(0x1f88, "M", u"ἀι"),
(0x1f89, "M", u"ἁι"),
(0x1f8a, "M", u"ἂι"),
(0x1f8b, "M", u"ἃι"),
(0x1f8c, "M", u"ἄι"),
(0x1f8d, "M", u"ἅι"),
(0x1f8e, "M", u"ἆι"),
(0x1f8f, "M", u"ἇι"),
(0x1f90, "M", u"ἠι"),
(0x1f91, "M", u"ἡι"),
(0x1f92, "M", u"ἢι"),
(0x1f93, "M", u"ἣι"),
(0x1f94, "M", u"ἤι"),
(0x1f95, "M", u"ἥι"),
(0x1f96, "M", u"ἦι"),
(0x1f97, "M", u"ἧι"),
(0x1f98, "M", u"ἠι"),
(0x1f99, "M", u"ἡι"),
(0x1f9a, "M", u"ἢι"),
(0x1f9b, "M", u"ἣι"),
(0x1f9c, "M", u"ἤι"),
(0x1f9d, "M", u"ἥι"),
(0x1f9e, "M", u"ἦι"),
(0x1f9f, "M", u"ἧι"),
(0x1fa0, "M", u"ὠι"),
(0x1fa1, "M", u"ὡι"),
(0x1fa2, "M", u"ὢι"),
(0x1fa3, "M", u"ὣι"),
(0x1fa4, "M", u"ὤι"),
(0x1fa5, "M", u"ὥι"),
(0x1fa6, "M", u"ὦι"),
(0x1fa7, "M", u"ὧι"),
(0x1fa8, "M", u"ὠι"),
(0x1fa9, "M", u"ὡι"),
(0x1faa, "M", u"ὢι"),
(0x1fab, "M", u"ὣι"),
(0x1fac, "M", u"ὤι"),
(0x1fad, "M", u"ὥι"),
(0x1fae, "M", u"ὦι"),
(0x1faf, "M", u"ὧι"),
(0x1fb0, "V"),
(0x1fb2, "M", u"ὰι"),
(0x1fb3, "M", u"αι"),
(0x1fb4, "M", u"άι"),
(0x1fb5, "X"),
(0x1fb6, "V"),
(0x1fb7, "M", u"ᾶι"),
(0x1fb8, "M", u"ᾰ"),
(0x1fb9, "M", u"ᾱ"),
(0x1fba, "M", u"ὰ"),
(0x1fbb, "M", u"ά"),
(0x1fbc, "M", u"αι"),
(0x1fbd, "3", u" ̓"),
(0x1fbe, "M", u"ι"),
(0x1fbf, "3", u" ̓"),
(0x1fc0, "3", u" ͂"),
(0x1fc1, "3", u" ̈͂"),
(0x1fc2, "M", u"ὴι"),
(0x1fc3, "M", u"ηι"),
(0x1fc4, "M", u"ήι"),
(0x1fc5, "X"),
(0x1fc6, "V"),
(0x1fc7, "M", u"ῆι"),
(0x1fc8, "M", u"ὲ"),
(0x1fc9, "M", u"έ"),
(0x1fca, "M", u"ὴ"),
(0x1fcb, "M", u"ή"),
(0x1fcc, "M", u"ηι"),
(0x1fcd, "3", u" ̓̀"),
(0x1fce, "3", u" ̓́"),
(0x1fcf, "3", u" ̓͂"),
(0x1fd0, "V"),
(0x1fd3, "M", u"ΐ"),
(0x1fd4, "X"),
(0x1fd6, "V"),
(0x1fd8, "M", u"ῐ"),
(0x1fd9, "M", u"ῑ"),
(0x1fda, "M", u"ὶ"),
(0x1fdb, "M", u"ί"),
(0x1fdc, "X"),
(0x1fdd, "3", u" ̔̀"),
(0x1fde, "3", u" ̔́"),
(0x1fdf, "3", u" ̔͂"),
(0x1fe0, "V"),
(0x1fe3, "M", u"ΰ"),
(0x1fe4, "V"),
(0x1fe8, "M", u"ῠ"),
(0x1fe9, "M", u"ῡ"),
(0x1fea, "M", u"ὺ"),
(0x1feb, "M", u"ύ"),
(0x1fec, "M", u"ῥ"),
(0x1fed, "3", u" ̈̀"),
(0x1fee, "3", u" ̈́"),
(0x1fef, "3", u"`"),
(0x1ff0, "X"),
]
def _seg_21():
return [
(0x1ff2, "M", u"ὼι"),
(0x1ff3, "M", u"ωι"),
(0x1ff4, "M", u"ώι"),
(0x1ff5, "X"),
(0x1ff6, "V"),
(0x1ff7, "M", u"ῶι"),
(0x1ff8, "M", u"ὸ"),
(0x1ff9, "M", u"ό"),
(0x1ffa, "M", u"ὼ"),
(0x1ffb, "M", u"ώ"),
(0x1ffc, "M", u"ωι"),
(0x1ffd, "3", u" ́"),
(0x1ffe, "3", u" ̔"),
(0x1fff, "X"),
(0x2000, "3", u" "),
(0x200b, "I"),
(0x200c, "D", u""),
(0x200e, "X"),
(0x2010, "V"),
(0x2011, "M", u"‐"),
(0x2012, "V"),
(0x2017, "3", u" ̳"),
(0x2018, "V"),
(0x2024, "X"),
(0x2027, "V"),
(0x2028, "X"),
(0x202f, "3", u" "),
(0x2030, "V"),
(0x2033, "M", u"′′"),
(0x2034, "M", u"′′′"),
(0x2035, "V"),
(0x2036, "M", u"‵‵"),
(0x2037, "M", u"‵‵‵"),
(0x2038, "V"),
(0x203c, "3", u"!!"),
(0x203d, "V"),
(0x203e, "3", u" ̅"),
(0x203f, "V"),
(0x2047, "3", u"??"),
(0x2048, "3", u"?!"),
(0x2049, "3", u"!?"),
(0x204a, "V"),
(0x2057, "M", u"′′′′"),
(0x2058, "V"),
(0x205f, "3", u" "),
(0x2060, "I"),
(0x2061, "X"),
(0x2064, "I"),
(0x2065, "X"),
(0x2070, "M", u"0"),
(0x2071, "M", u"i"),
(0x2072, "X"),
(0x2074, "M", u"4"),
(0x2075, "M", u"5"),
(0x2076, "M", u"6"),
(0x2077, "M", u"7"),
(0x2078, "M", u"8"),
(0x2079, "M", u"9"),
(0x207a, "3", u"+"),
(0x207b, "M", u"−"),
(0x207c, "3", u"="),
(0x207d, "3", u"("),
(0x207e, "3", u")"),
(0x207f, "M", u"n"),
(0x2080, "M", u"0"),
(0x2081, "M", u"1"),
(0x2082, "M", u"2"),
(0x2083, "M", u"3"),
(0x2084, "M", u"4"),
(0x2085, "M", u"5"),
(0x2086, "M", u"6"),
(0x2087, "M", u"7"),
(0x2088, "M", u"8"),
(0x2089, "M", u"9"),
(0x208a, "3", u"+"),
(0x208b, "M", u"−"),
(0x208c, "3", u"="),
(0x208d, "3", u"("),
(0x208e, "3", u")"),
(0x208f, "X"),
(0x2090, "M", u"a"),
(0x2091, "M", u"e"),
(0x2092, "M", u"o"),
(0x2093, "M", u"x"),
(0x2094, "M", u"ə"),
(0x2095, "M", u"h"),
(0x2096, "M", u"k"),
(0x2097, "M", u"l"),
(0x2098, "M", u"m"),
(0x2099, "M", u"n"),
(0x209a, "M", u"p"),
(0x209b, "M", u"s"),
(0x209c, "M", u"t"),
(0x209d, "X"),
(0x20a0, "V"),
(0x20a8, "M", u"rs"),
(0x20a9, "V"),
(0x20c0, "X"),
(0x20d0, "V"),
(0x20f1, "X"),
]
def _seg_22():
return [
(0x2100, "3", u"a/c"),
(0x2101, "3", u"a/s"),
(0x2102, "M", u"c"),
(0x2103, "M", u"°c"),
(0x2104, "V"),
(0x2105, "3", u"c/o"),
(0x2106, "3", u"c/u"),
(0x2107, "M", u"ɛ"),
(0x2108, "V"),
(0x2109, "M", u"°f"),
(0x210a, "M", u"g"),
(0x210b, "M", u"h"),
(0x210f, "M", u"ħ"),
(0x2110, "M", u"i"),
(0x2112, "M", u"l"),
(0x2114, "V"),
(0x2115, "M", u"n"),
(0x2116, "M", u"no"),
(0x2117, "V"),
(0x2119, "M", u"p"),
(0x211a, "M", u"q"),
(0x211b, "M", u"r"),
(0x211e, "V"),
(0x2120, "M", u"sm"),
(0x2121, "M", u"tel"),
(0x2122, "M", u"tm"),
(0x2123, "V"),
(0x2124, "M", u"z"),
(0x2125, "V"),
(0x2126, "M", u"ω"),
(0x2127, "V"),
(0x2128, "M", u"z"),
(0x2129, "V"),
(0x212a, "M", u"k"),
(0x212b, "M", u"å"),
(0x212c, "M", u"b"),
(0x212d, "M", u"c"),
(0x212e, "V"),
(0x212f, "M", u"e"),
(0x2131, "M", u"f"),
(0x2132, "X"),
(0x2133, "M", u"m"),
(0x2134, "M", u"o"),
(0x2135, "M", u"א"),
(0x2136, "M", u"ב"),
(0x2137, "M", u"ג"),
(0x2138, "M", u"ד"),
(0x2139, "M", u"i"),
(0x213a, "V"),
(0x213b, "M", u"fax"),
(0x213c, "M", u"π"),
(0x213d, "M", u"γ"),
(0x213f, "M", u"π"),
(0x2140, "M", u"∑"),
(0x2141, "V"),
(0x2145, "M", u"d"),
(0x2147, "M", u"e"),
(0x2148, "M", u"i"),
(0x2149, "M", u"j"),
(0x214a, "V"),
(0x2150, "M", u"1⁄7"),
(0x2151, "M", u"1⁄9"),
(0x2152, "M", u"1⁄10"),
(0x2153, "M", u"1⁄3"),
(0x2154, "M", u"2⁄3"),
(0x2155, "M", u"1⁄5"),
(0x2156, "M", u"2⁄5"),
(0x2157, "M", u"3⁄5"),
(0x2158, "M", u"4⁄5"),
(0x2159, "M", u"1⁄6"),
(0x215a, "M", u"5⁄6"),
(0x215b, "M", u"1⁄8"),
(0x215c, "M", u"3⁄8"),
(0x215d, "M", u"5⁄8"),
(0x215e, "M", u"7⁄8"),
(0x215f, "M", u"1⁄"),
(0x2160, "M", u"i"),
(0x2161, "M", u"ii"),
(0x2162, "M", u"iii"),
(0x2163, "M", u"iv"),
(0x2164, "M", u"v"),
(0x2165, "M", u"vi"),
(0x2166, "M", u"vii"),
(0x2167, "M", u"viii"),
(0x2168, "M", u"ix"),
(0x2169, "M", u"x"),
(0x216a, "M", u"xi"),
(0x216b, "M", u"xii"),
(0x216c, "M", u"l"),
(0x216d, "M", u"c"),
(0x216e, "M", u"d"),
(0x216f, "M", u"m"),
(0x2170, "M", u"i"),
(0x2171, "M", u"ii"),
(0x2172, "M", u"iii"),
(0x2173, "M", u"iv"),
(0x2174, "M", u"v"),
(0x2175, "M", u"vi"),
(0x2176, "M", u"vii"),
(0x2177, "M", u"viii"),
]
def _seg_23():
return [
(0x2178, "M", u"ix"),
(0x2179, "M", u"x"),
(0x217a, "M", u"xi"),
(0x217b, "M", u"xii"),
(0x217c, "M", u"l"),
(0x217d, "M", u"c"),
(0x217e, "M", u"d"),
(0x217f, "M", u"m"),
(0x2180, "V"),
(0x2183, "X"),
(0x2184, "V"),
(0x2189, "M", u"0⁄3"),
(0x218a, "V"),
(0x218c, "X"),
(0x2190, "V"),
(0x222c, "M", u"∫∫"),
(0x222d, "M", u"∫∫∫"),
(0x222e, "V"),
(0x222f, "M", u"∮∮"),
(0x2230, "M", u"∮∮∮"),
(0x2231, "V"),
(0x2260, "3"),
(0x2261, "V"),
(0x226e, "3"),
(0x2270, "V"),
(0x2329, "M", u"〈"),
(0x232a, "M", u"〉"),
(0x232b, "V"),
(0x2427, "X"),
(0x2440, "V"),
(0x244b, "X"),
(0x2460, "M", u"1"),
(0x2461, "M", u"2"),
(0x2462, "M", u"3"),
(0x2463, "M", u"4"),
(0x2464, "M", u"5"),
(0x2465, "M", u"6"),
(0x2466, "M", u"7"),
(0x2467, "M", u"8"),
(0x2468, "M", u"9"),
(0x2469, "M", u"10"),
(0x246a, "M", u"11"),
(0x246b, "M", u"12"),
(0x246c, "M", u"13"),
(0x246d, "M", u"14"),
(0x246e, "M", u"15"),
(0x246f, "M", u"16"),
(0x2470, "M", u"17"),
(0x2471, "M", u"18"),
(0x2472, "M", u"19"),
(0x2473, "M", u"20"),
(0x2474, "3", u"(1)"),
(0x2475, "3", u"(2)"),
(0x2476, "3", u"(3)"),
(0x2477, "3", u"(4)"),
(0x2478, "3", u"(5)"),
(0x2479, "3", u"(6)"),
(0x247a, "3", u"(7)"),
(0x247b, "3", u"(8)"),
(0x247c, "3", u"(9)"),
(0x247d, "3", u"(10)"),
(0x247e, "3", u"(11)"),
(0x247f, "3", u"(12)"),
(0x2480, "3", u"(13)"),
(0x2481, "3", u"(14)"),
(0x2482, "3", u"(15)"),
(0x2483, "3", u"(16)"),
(0x2484, "3", u"(17)"),
(0x2485, "3", u"(18)"),
(0x2486, "3", u"(19)"),
(0x2487, "3", u"(20)"),
(0x2488, "X"),
(0x249c, "3", u"(a)"),
(0x249d, "3", u"(b)"),
(0x249e, "3", u"(c)"),
(0x249f, "3", u"(d)"),
(0x24a0, "3", u"(e)"),
(0x24a1, "3", u"(f)"),
(0x24a2, "3", u"(g)"),
(0x24a3, "3", u"(h)"),
(0x24a4, "3", u"(i)"),
(0x24a5, "3", u"(j)"),
(0x24a6, "3", u"(k)"),
(0x24a7, "3", u"(l)"),
(0x24a8, "3", u"(m)"),
(0x24a9, "3", u"(n)"),
(0x24aa, "3", u"(o)"),
(0x24ab, "3", u"(p)"),
(0x24ac, "3", u"(q)"),
(0x24ad, "3", u"(r)"),
(0x24ae, "3", u"(s)"),
(0x24af, "3", u"(t)"),
(0x24b0, "3", u"(u)"),
(0x24b1, "3", u"(v)"),
(0x24b2, "3", u"(w)"),
(0x24b3, "3", u"(x)"),
(0x24b4, "3", u"(y)"),
(0x24b5, "3", u"(z)"),
(0x24b6, "M", u"a"),
(0x24b7, "M", u"b"),
]
def _seg_24():
return [
(0x24b8, "M", u"c"),
(0x24b9, "M", u"d"),
(0x24ba, "M", u"e"),
(0x24bb, "M", u"f"),
(0x24bc, "M", u"g"),
(0x24bd, "M", u"h"),
(0x24be, "M", u"i"),
(0x24bf, "M", u"j"),
(0x24c0, "M", u"k"),
(0x24c1, "M", u"l"),
(0x24c2, "M", u"m"),
(0x24c3, "M", u"n"),
(0x24c4, "M", u"o"),
(0x24c5, "M", u"p"),
(0x24c6, "M", u"q"),
(0x24c7, "M", u"r"),
(0x24c8, "M", u"s"),
(0x24c9, "M", u"t"),
(0x24ca, "M", u"u"),
(0x24cb, "M", u"v"),
(0x24cc, "M", u"w"),
(0x24cd, "M", u"x"),
(0x24ce, "M", u"y"),
(0x24cf, "M", u"z"),
(0x24d0, "M", u"a"),
(0x24d1, "M", u"b"),
(0x24d2, "M", u"c"),
(0x24d3, "M", u"d"),
(0x24d4, "M", u"e"),
(0x24d5, "M", u"f"),
(0x24d6, "M", u"g"),
(0x24d7, "M", u"h"),
(0x24d8, "M", u"i"),
(0x24d9, "M", u"j"),
(0x24da, "M", u"k"),
(0x24db, "M", u"l"),
(0x24dc, "M", u"m"),
(0x24dd, "M", u"n"),
(0x24de, "M", u"o"),
(0x24df, "M", u"p"),
(0x24e0, "M", u"q"),
(0x24e1, "M", u"r"),
(0x24e2, "M", u"s"),
(0x24e3, "M", u"t"),
(0x24e4, "M", u"u"),
(0x24e5, "M", u"v"),
(0x24e6, "M", u"w"),
(0x24e7, "M", u"x"),
(0x24e8, "M", u"y"),
(0x24e9, "M", u"z"),
(0x24ea, "M", u"0"),
(0x24eb, "V"),
(0x2a0c, "M", u"∫∫∫∫"),
(0x2a0d, "V"),
(0x2a74, "3", u"::="),
(0x2a75, "3", u"=="),
(0x2a76, "3", u"==="),
(0x2a77, "V"),
(0x2adc, "M", u"⫝̸"),
(0x2add, "V"),
(0x2b74, "X"),
(0x2b76, "V"),
(0x2b96, "X"),
(0x2b98, "V"),
(0x2c00, "M", u"ⰰ"),
(0x2c01, "M", u"ⰱ"),
(0x2c02, "M", u"ⰲ"),
(0x2c03, "M", u"ⰳ"),
(0x2c04, "M", u"ⰴ"),
(0x2c05, "M", u"ⰵ"),
(0x2c06, "M", u"ⰶ"),
(0x2c07, "M", u"ⰷ"),
(0x2c08, "M", u"ⰸ"),
(0x2c09, "M", u"ⰹ"),
(0x2c0a, "M", u"ⰺ"),
(0x2c0b, "M", u"ⰻ"),
(0x2c0c, "M", u"ⰼ"),
(0x2c0d, "M", u"ⰽ"),
(0x2c0e, "M", u"ⰾ"),
(0x2c0f, "M", u"ⰿ"),
(0x2c10, "M", u"ⱀ"),
(0x2c11, "M", u"ⱁ"),
(0x2c12, "M", u"ⱂ"),
(0x2c13, "M", u"ⱃ"),
(0x2c14, "M", u"ⱄ"),
(0x2c15, "M", u"ⱅ"),
(0x2c16, "M", u"ⱆ"),
(0x2c17, "M", u"ⱇ"),
(0x2c18, "M", u"ⱈ"),
(0x2c19, "M", u"ⱉ"),
(0x2c1a, "M", u"ⱊ"),
(0x2c1b, "M", u"ⱋ"),
(0x2c1c, "M", u"ⱌ"),
(0x2c1d, "M", u"ⱍ"),
(0x2c1e, "M", u"ⱎ"),
(0x2c1f, "M", u"ⱏ"),
(0x2c20, "M", u"ⱐ"),
(0x2c21, "M", u"ⱑ"),
(0x2c22, "M", u"ⱒ"),
(0x2c23, "M", u"ⱓ"),
]
def _seg_25():
return [
(0x2c24, "M", u"ⱔ"),
(0x2c25, "M", u"ⱕ"),
(0x2c26, "M", u"ⱖ"),
(0x2c27, "M", u"ⱗ"),
(0x2c28, "M", u"ⱘ"),
(0x2c29, "M", u"ⱙ"),
(0x2c2a, "M", u"ⱚ"),
(0x2c2b, "M", u"ⱛ"),
(0x2c2c, "M", u"ⱜ"),
(0x2c2d, "M", u"ⱝ"),
(0x2c2e, "M", u"ⱞ"),
(0x2c2f, "X"),
(0x2c30, "V"),
(0x2c5f, "X"),
(0x2c60, "M", u"ⱡ"),
(0x2c61, "V"),
(0x2c62, "M", u"ɫ"),
(0x2c63, "M", u"ᵽ"),
(0x2c64, "M", u"ɽ"),
(0x2c65, "V"),
(0x2c67, "M", u"ⱨ"),
(0x2c68, "V"),
(0x2c69, "M", u"ⱪ"),
(0x2c6a, "V"),
(0x2c6b, "M", u"ⱬ"),
(0x2c6c, "V"),
(0x2c6d, "M", u"ɑ"),
(0x2c6e, "M", u"ɱ"),
(0x2c6f, "M", u"ɐ"),
(0x2c70, "M", u"ɒ"),
(0x2c71, "V"),
(0x2c72, "M", u"ⱳ"),
(0x2c73, "V"),
(0x2c75, "M", u"ⱶ"),
(0x2c76, "V"),
(0x2c7c, "M", u"j"),
(0x2c7d, "M", u"v"),
(0x2c7e, "M", u"ȿ"),
(0x2c7f, "M", u"ɀ"),
(0x2c80, "M", u"ⲁ"),
(0x2c81, "V"),
(0x2c82, "M", u"ⲃ"),
(0x2c83, "V"),
(0x2c84, "M", u"ⲅ"),
(0x2c85, "V"),
(0x2c86, "M", u"ⲇ"),
(0x2c87, "V"),
(0x2c88, "M", u"ⲉ"),
(0x2c89, "V"),
(0x2c8a, "M", u"ⲋ"),
(0x2c8b, "V"),
(0x2c8c, "M", u"ⲍ"),
(0x2c8d, "V"),
(0x2c8e, "M", u"ⲏ"),
(0x2c8f, "V"),
(0x2c90, "M", u"ⲑ"),
(0x2c91, "V"),
(0x2c92, "M", u"ⲓ"),
(0x2c93, "V"),
(0x2c94, "M", u"ⲕ"),
(0x2c95, "V"),
(0x2c96, "M", u"ⲗ"),
(0x2c97, "V"),
(0x2c98, "M", u"ⲙ"),
(0x2c99, "V"),
(0x2c9a, "M", u"ⲛ"),
(0x2c9b, "V"),
(0x2c9c, "M", u"ⲝ"),
(0x2c9d, "V"),
(0x2c9e, "M", u"ⲟ"),
(0x2c9f, "V"),
(0x2ca0, "M", u"ⲡ"),
(0x2ca1, "V"),
(0x2ca2, "M", u"ⲣ"),
(0x2ca3, "V"),
(0x2ca4, "M", u"ⲥ"),
(0x2ca5, "V"),
(0x2ca6, "M", u"ⲧ"),
(0x2ca7, "V"),
(0x2ca8, "M", u"ⲩ"),
(0x2ca9, "V"),
(0x2caa, "M", u"ⲫ"),
(0x2cab, "V"),
(0x2cac, "M", u"ⲭ"),
(0x2cad, "V"),
(0x2cae, "M", u"ⲯ"),
(0x2caf, "V"),
(0x2cb0, "M", u"ⲱ"),
(0x2cb1, "V"),
(0x2cb2, "M", u"ⲳ"),
(0x2cb3, "V"),
(0x2cb4, "M", u"ⲵ"),
(0x2cb5, "V"),
(0x2cb6, "M", u"ⲷ"),
(0x2cb7, "V"),
(0x2cb8, "M", u"ⲹ"),
(0x2cb9, "V"),
(0x2cba, "M", u"ⲻ"),
(0x2cbb, "V"),
(0x2cbc, "M", u"ⲽ"),
]
def _seg_26():
return [
(0x2cbd, "V"),
(0x2cbe, "M", u"ⲿ"),
(0x2cbf, "V"),
(0x2cc0, "M", u"ⳁ"),
(0x2cc1, "V"),
(0x2cc2, "M", u"ⳃ"),
(0x2cc3, "V"),
(0x2cc4, "M", u"ⳅ"),
(0x2cc5, "V"),
(0x2cc6, "M", u"ⳇ"),
(0x2cc7, "V"),
(0x2cc8, "M", u"ⳉ"),
(0x2cc9, "V"),
(0x2cca, "M", u"ⳋ"),
(0x2ccb, "V"),
(0x2ccc, "M", u"ⳍ"),
(0x2ccd, "V"),
(0x2cce, "M", u"ⳏ"),
(0x2ccf, "V"),
(0x2cd0, "M", u"ⳑ"),
(0x2cd1, "V"),
(0x2cd2, "M", u"ⳓ"),
(0x2cd3, "V"),
(0x2cd4, "M", u"ⳕ"),
(0x2cd5, "V"),
(0x2cd6, "M", u"ⳗ"),
(0x2cd7, "V"),
(0x2cd8, "M", u"ⳙ"),
(0x2cd9, "V"),
(0x2cda, "M", u"ⳛ"),
(0x2cdb, "V"),
(0x2cdc, "M", u"ⳝ"),
(0x2cdd, "V"),
(0x2cde, "M", u"ⳟ"),
(0x2cdf, "V"),
(0x2ce0, "M", u"ⳡ"),
(0x2ce1, "V"),
(0x2ce2, "M", u"ⳣ"),
(0x2ce3, "V"),
(0x2ceb, "M", u"ⳬ"),
(0x2cec, "V"),
(0x2ced, "M", u"ⳮ"),
(0x2cee, "V"),
(0x2cf2, "M", u"ⳳ"),
(0x2cf3, "V"),
(0x2cf4, "X"),
(0x2cf9, "V"),
(0x2d26, "X"),
(0x2d27, "V"),
(0x2d28, "X"),
(0x2d2d, "V"),
(0x2d2e, "X"),
(0x2d30, "V"),
(0x2d68, "X"),
(0x2d6f, "M", u"ⵡ"),
(0x2d70, "V"),
(0x2d71, "X"),
(0x2d7f, "V"),
(0x2d97, "X"),
(0x2da0, "V"),
(0x2da7, "X"),
(0x2da8, "V"),
(0x2daf, "X"),
(0x2db0, "V"),
(0x2db7, "X"),
(0x2db8, "V"),
(0x2dbf, "X"),
(0x2dc0, "V"),
(0x2dc7, "X"),
(0x2dc8, "V"),
(0x2dcf, "X"),
(0x2dd0, "V"),
(0x2dd7, "X"),
(0x2dd8, "V"),
(0x2ddf, "X"),
(0x2de0, "V"),
(0x2e50, "X"),
(0x2e80, "V"),
(0x2e9a, "X"),
(0x2e9b, "V"),
(0x2e9f, "M", u"母"),
(0x2ea0, "V"),
(0x2ef3, "M", u"龟"),
(0x2ef4, "X"),
(0x2f00, "M", u"一"),
(0x2f01, "M", u"丨"),
(0x2f02, "M", u"丶"),
(0x2f03, "M", u"丿"),
(0x2f04, "M", u"乙"),
(0x2f05, "M", u"亅"),
(0x2f06, "M", u"二"),
(0x2f07, "M", u"亠"),
(0x2f08, "M", u"人"),
(0x2f09, "M", u"儿"),
(0x2f0a, "M", u"入"),
(0x2f0b, "M", u"八"),
(0x2f0c, "M", u"冂"),
(0x2f0d, "M", u"冖"),
(0x2f0e, "M", u"冫"),
(0x2f0f, "M", u"几"),
]
def _seg_27():
return [
(0x2f10, "M", u"凵"),
(0x2f11, "M", u"刀"),
(0x2f12, "M", u"力"),
(0x2f13, "M", u"勹"),
(0x2f14, "M", u"匕"),
(0x2f15, "M", u"匚"),
(0x2f16, "M", u"匸"),
(0x2f17, "M", u"十"),
(0x2f18, "M", u"卜"),
(0x2f19, "M", u"卩"),
(0x2f1a, "M", u"厂"),
(0x2f1b, "M", u"厶"),
(0x2f1c, "M", u"又"),
(0x2f1d, "M", u"口"),
(0x2f1e, "M", u"囗"),
(0x2f1f, "M", u"土"),
(0x2f20, "M", u"士"),
(0x2f21, "M", u"夂"),
(0x2f22, "M", u"夊"),
(0x2f23, "M", u"夕"),
(0x2f24, "M", u"大"),
(0x2f25, "M", u"女"),
(0x2f26, "M", u"子"),
(0x2f27, "M", u"宀"),
(0x2f28, "M", u"寸"),
(0x2f29, "M", u"小"),
(0x2f2a, "M", u"尢"),
(0x2f2b, "M", u"尸"),
(0x2f2c, "M", u"屮"),
(0x2f2d, "M", u"山"),
(0x2f2e, "M", u"巛"),
(0x2f2f, "M", u"工"),
(0x2f30, "M", u"己"),
(0x2f31, "M", u"巾"),
(0x2f32, "M", u"干"),
(0x2f33, "M", u"幺"),
(0x2f34, "M", u"广"),
(0x2f35, "M", u"廴"),
(0x2f36, "M", u"廾"),
(0x2f37, "M", u"弋"),
(0x2f38, "M", u"弓"),
(0x2f39, "M", u"彐"),
(0x2f3a, "M", u"彡"),
(0x2f3b, "M", u"彳"),
(0x2f3c, "M", u"心"),
(0x2f3d, "M", u"戈"),
(0x2f3e, "M", u"戶"),
(0x2f3f, "M", u"手"),
(0x2f40, "M", u"支"),
(0x2f41, "M", u"攴"),
(0x2f42, "M", u"文"),
(0x2f43, "M", u"斗"),
(0x2f44, "M", u"斤"),
(0x2f45, "M", u"方"),
(0x2f46, "M", u"无"),
(0x2f47, "M", u"日"),
(0x2f48, "M", u"曰"),
(0x2f49, "M", u"月"),
(0x2f4a, "M", u"木"),
(0x2f4b, "M", u"欠"),
(0x2f4c, "M", u"止"),
(0x2f4d, "M", u"歹"),
(0x2f4e, "M", u"殳"),
(0x2f4f, "M", u"毋"),
(0x2f50, "M", u"比"),
(0x2f51, "M", u"毛"),
(0x2f52, "M", u"氏"),
(0x2f53, "M", u"气"),
(0x2f54, "M", u"水"),
(0x2f55, "M", u"火"),
(0x2f56, "M", u"爪"),
(0x2f57, "M", u"父"),
(0x2f58, "M", u"爻"),
(0x2f59, "M", u"爿"),
(0x2f5a, "M", u"片"),
(0x2f5b, "M", u"牙"),
(0x2f5c, "M", u"牛"),
(0x2f5d, "M", u"犬"),
(0x2f5e, "M", u"玄"),
(0x2f5f, "M", u"玉"),
(0x2f60, "M", u"瓜"),
(0x2f61, "M", u"瓦"),
(0x2f62, "M", u"甘"),
(0x2f63, "M", u"生"),
(0x2f64, "M", u"用"),
(0x2f65, "M", u"田"),
(0x2f66, "M", u"疋"),
(0x2f67, "M", u"疒"),
(0x2f68, "M", u"癶"),
(0x2f69, "M", u"白"),
(0x2f6a, "M", u"皮"),
(0x2f6b, "M", u"皿"),
(0x2f6c, "M", u"目"),
(0x2f6d, "M", u"矛"),
(0x2f6e, "M", u"矢"),
(0x2f6f, "M", u"石"),
(0x2f70, "M", u"示"),
(0x2f71, "M", u"禸"),
(0x2f72, "M", u"禾"),
(0x2f73, "M", u"穴"),
]
def _seg_28():
return [
(0x2f74, "M", u"立"),
(0x2f75, "M", u"竹"),
(0x2f76, "M", u"米"),
(0x2f77, "M", u"糸"),
(0x2f78, "M", u"缶"),
(0x2f79, "M", u"网"),
(0x2f7a, "M", u"羊"),
(0x2f7b, "M", u"羽"),
(0x2f7c, "M", u"老"),
(0x2f7d, "M", u"而"),
(0x2f7e, "M", u"耒"),
(0x2f7f, "M", u"耳"),
(0x2f80, "M", u"聿"),
(0x2f81, "M", u"肉"),
(0x2f82, "M", u"臣"),
(0x2f83, "M", u"自"),
(0x2f84, "M", u"至"),
(0x2f85, "M", u"臼"),
(0x2f86, "M", u"舌"),
(0x2f87, "M", u"舛"),
(0x2f88, "M", u"舟"),
(0x2f89, "M", u"艮"),
(0x2f8a, "M", u"色"),
(0x2f8b, "M", u"艸"),
(0x2f8c, "M", u"虍"),
(0x2f8d, "M", u"虫"),
(0x2f8e, "M", u"血"),
(0x2f8f, "M", u"行"),
(0x2f90, "M", u"衣"),
(0x2f91, "M", u"襾"),
(0x2f92, "M", u"見"),
(0x2f93, "M", u"角"),
(0x2f94, "M", u"言"),
(0x2f95, "M", u"谷"),
(0x2f96, "M", u"豆"),
(0x2f97, "M", u"豕"),
(0x2f98, "M", u"豸"),
(0x2f99, "M", u"貝"),
(0x2f9a, "M", u"赤"),
(0x2f9b, "M", u"走"),
(0x2f9c, "M", u"足"),
(0x2f9d, "M", u"身"),
(0x2f9e, "M", u"車"),
(0x2f9f, "M", u"辛"),
(0x2fa0, "M", u"辰"),
(0x2fa1, "M", u"辵"),
(0x2fa2, "M", u"邑"),
(0x2fa3, "M", u"酉"),
(0x2fa4, "M", u"釆"),
(0x2fa5, "M", u"里"),
(0x2fa6, "M", u"金"),
(0x2fa7, "M", u"長"),
(0x2fa8, "M", u"門"),
(0x2fa9, "M", u"阜"),
(0x2faa, "M", u"隶"),
(0x2fab, "M", u"隹"),
(0x2fac, "M", u"雨"),
(0x2fad, "M", u"靑"),
(0x2fae, "M", u"非"),
(0x2faf, "M", u"面"),
(0x2fb0, "M", u"革"),
(0x2fb1, "M", u"韋"),
(0x2fb2, "M", u"韭"),
(0x2fb3, "M", u"音"),
(0x2fb4, "M", u"頁"),
(0x2fb5, "M", u"風"),
(0x2fb6, "M", u"飛"),
(0x2fb7, "M", u"食"),
(0x2fb8, "M", u"首"),
(0x2fb9, "M", u"香"),
(0x2fba, "M", u"馬"),
(0x2fbb, "M", u"骨"),
(0x2fbc, "M", u"高"),
(0x2fbd, "M", u"髟"),
(0x2fbe, "M", u"鬥"),
(0x2fbf, "M", u"鬯"),
(0x2fc0, "M", u"鬲"),
(0x2fc1, "M", u"鬼"),
(0x2fc2, "M", u"魚"),
(0x2fc3, "M", u"鳥"),
(0x2fc4, "M", u"鹵"),
(0x2fc5, "M", u"鹿"),
(0x2fc6, "M", u"麥"),
(0x2fc7, "M", u"麻"),
(0x2fc8, "M", u"黃"),
(0x2fc9, "M", u"黍"),
(0x2fca, "M", u"黑"),
(0x2fcb, "M", u"黹"),
(0x2fcc, "M", u"黽"),
(0x2fcd, "M", u"鼎"),
(0x2fce, "M", u"鼓"),
(0x2fcf, "M", u"鼠"),
(0x2fd0, "M", u"鼻"),
(0x2fd1, "M", u"齊"),
(0x2fd2, "M", u"齒"),
(0x2fd3, "M", u"龍"),
(0x2fd4, "M", u"龜"),
(0x2fd5, "M", u"龠"),
(0x2fd6, "X"),
(0x3000, "3", u" "),
]
def _seg_29():
return [
(0x3001, "V"),
(0x3002, "M", u"."),
(0x3003, "V"),
(0x3036, "M", u"〒"),
(0x3037, "V"),
(0x3038, "M", u"十"),
(0x3039, "M", u"卄"),
(0x303a, "M", u"卅"),
(0x303b, "V"),
(0x3040, "X"),
(0x3041, "V"),
(0x3097, "X"),
(0x3099, "V"),
(0x309b, "3", u" ゙"),
(0x309c, "3", u" ゚"),
(0x309d, "V"),
(0x309f, "M", u"より"),
(0x30a0, "V"),
(0x30ff, "M", u"コト"),
(0x3100, "X"),
(0x3105, "V"),
(0x3130, "X"),
(0x3131, "M", u"ᄀ"),
(0x3132, "M", u"ᄁ"),
(0x3133, "M", u"ᆪ"),
(0x3134, "M", u"ᄂ"),
(0x3135, "M", u"ᆬ"),
(0x3136, "M", u"ᆭ"),
(0x3137, "M", u"ᄃ"),
(0x3138, "M", u"ᄄ"),
(0x3139, "M", u"ᄅ"),
(0x313a, "M", u"ᆰ"),
(0x313b, "M", u"ᆱ"),
(0x313c, "M", u"ᆲ"),
(0x313d, "M", u"ᆳ"),
(0x313e, "M", u"ᆴ"),
(0x313f, "M", u"ᆵ"),
(0x3140, "M", u"ᄚ"),
(0x3141, "M", u"ᄆ"),
(0x3142, "M", u"ᄇ"),
(0x3143, "M", u"ᄈ"),
(0x3144, "M", u"ᄡ"),
(0x3145, "M", u"ᄉ"),
(0x3146, "M", u"ᄊ"),
(0x3147, "M", u"ᄋ"),
(0x3148, "M", u"ᄌ"),
(0x3149, "M", u"ᄍ"),
(0x314a, "M", u"ᄎ"),
(0x314b, "M", u"ᄏ"),
(0x314c, "M", u"ᄐ"),
(0x314d, "M", u"ᄑ"),
(0x314e, "M", u"ᄒ"),
(0x314f, "M", u"ᅡ"),
(0x3150, "M", u"ᅢ"),
(0x3151, "M", u"ᅣ"),
(0x3152, "M", u"ᅤ"),
(0x3153, "M", u"ᅥ"),
(0x3154, "M", u"ᅦ"),
(0x3155, "M", u"ᅧ"),
(0x3156, "M", u"ᅨ"),
(0x3157, "M", u"ᅩ"),
(0x3158, "M", u"ᅪ"),
(0x3159, "M", u"ᅫ"),
(0x315a, "M", u"ᅬ"),
(0x315b, "M", u"ᅭ"),
(0x315c, "M", u"ᅮ"),
(0x315d, "M", u"ᅯ"),
(0x315e, "M", u"ᅰ"),
(0x315f, "M", u"ᅱ"),
(0x3160, "M", u"ᅲ"),
(0x3161, "M", u"ᅳ"),
(0x3162, "M", u"ᅴ"),
(0x3163, "M", u"ᅵ"),
(0x3164, "X"),
(0x3165, "M", u"ᄔ"),
(0x3166, "M", u"ᄕ"),
(0x3167, "M", u"ᇇ"),
(0x3168, "M", u"ᇈ"),
(0x3169, "M", u"ᇌ"),
(0x316a, "M", u"ᇎ"),
(0x316b, "M", u"ᇓ"),
(0x316c, "M", u"ᇗ"),
(0x316d, "M", u"ᇙ"),
(0x316e, "M", u"ᄜ"),
(0x316f, "M", u"ᇝ"),
(0x3170, "M", u"ᇟ"),
(0x3171, "M", u"ᄝ"),
(0x3172, "M", u"ᄞ"),
(0x3173, "M", u"ᄠ"),
(0x3174, "M", u"ᄢ"),
(0x3175, "M", u"ᄣ"),
(0x3176, "M", u"ᄧ"),
(0x3177, "M", u"ᄩ"),
(0x3178, "M", u"ᄫ"),
(0x3179, "M", u"ᄬ"),
(0x317a, "M", u"ᄭ"),
(0x317b, "M", u"ᄮ"),
(0x317c, "M", u"ᄯ"),
(0x317d, "M", u"ᄲ"),
(0x317e, "M", u"ᄶ"),
]
def _seg_30():
return [
(0x317f, "M", u"ᅀ"),
(0x3180, "M", u"ᅇ"),
(0x3181, "M", u"ᅌ"),
(0x3182, "M", u"ᇱ"),
(0x3183, "M", u"ᇲ"),
(0x3184, "M", u"ᅗ"),
(0x3185, "M", u"ᅘ"),
(0x3186, "M", u"ᅙ"),
(0x3187, "M", u"ᆄ"),
(0x3188, "M", u"ᆅ"),
(0x3189, "M", u"ᆈ"),
(0x318a, "M", u"ᆑ"),
(0x318b, "M", u"ᆒ"),
(0x318c, "M", u"ᆔ"),
(0x318d, "M", u"ᆞ"),
(0x318e, "M", u"ᆡ"),
(0x318f, "X"),
(0x3190, "V"),
(0x3192, "M", u"一"),
(0x3193, "M", u"二"),
(0x3194, "M", u"三"),
(0x3195, "M", u"四"),
(0x3196, "M", u"上"),
(0x3197, "M", u"中"),
(0x3198, "M", u"下"),
(0x3199, "M", u"甲"),
(0x319a, "M", u"乙"),
(0x319b, "M", u"丙"),
(0x319c, "M", u"丁"),
(0x319d, "M", u"天"),
(0x319e, "M", u"地"),
(0x319f, "M", u"人"),
(0x31a0, "V"),
(0x31bb, "X"),
(0x31c0, "V"),
(0x31e4, "X"),
(0x31f0, "V"),
(0x3200, "3", u"(ᄀ)"),
(0x3201, "3", u"(ᄂ)"),
(0x3202, "3", u"(ᄃ)"),
(0x3203, "3", u"(ᄅ)"),
(0x3204, "3", u"(ᄆ)"),
(0x3205, "3", u"(ᄇ)"),
(0x3206, "3", u"(ᄉ)"),
(0x3207, "3", u"(ᄋ)"),
(0x3208, "3", u"(ᄌ)"),
(0x3209, "3", u"(ᄎ)"),
(0x320a, "3", u"(ᄏ)"),
(0x320b, "3", u"(ᄐ)"),
(0x320c, "3", u"(ᄑ)"),
(0x320d, "3", u"(ᄒ)"),
(0x320e, "3", u"(가)"),
(0x320f, "3", u"(나)"),
(0x3210, "3", u"(다)"),
(0x3211, "3", u"(라)"),
(0x3212, "3", u"(마)"),
(0x3213, "3", u"(바)"),
(0x3214, "3", u"(사)"),
(0x3215, "3", u"(아)"),
(0x3216, "3", u"(자)"),
(0x3217, "3", u"(차)"),
(0x3218, "3", u"(카)"),
(0x3219, "3", u"(타)"),
(0x321a, "3", u"(파)"),
(0x321b, "3", u"(하)"),
(0x321c, "3", u"(주)"),
(0x321d, "3", u"(오전)"),
(0x321e, "3", u"(오후)"),
(0x321f, "X"),
(0x3220, "3", u"(一)"),
(0x3221, "3", u"(二)"),
(0x3222, "3", u"(三)"),
(0x3223, "3", u"(四)"),
(0x3224, "3", u"(五)"),
(0x3225, "3", u"(六)"),
(0x3226, "3", u"(七)"),
(0x3227, "3", u"(八)"),
(0x3228, "3", u"(九)"),
(0x3229, "3", u"(十)"),
(0x322a, "3", u"(月)"),
(0x322b, "3", u"(火)"),
(0x322c, "3", u"(水)"),
(0x322d, "3", u"(木)"),
(0x322e, "3", u"(金)"),
(0x322f, "3", u"(土)"),
(0x3230, "3", u"(日)"),
(0x3231, "3", u"(株)"),
(0x3232, "3", u"(有)"),
(0x3233, "3", u"(社)"),
(0x3234, "3", u"(名)"),
(0x3235, "3", u"(特)"),
(0x3236, "3", u"(財)"),
(0x3237, "3", u"(祝)"),
(0x3238, "3", u"(労)"),
(0x3239, "3", u"(代)"),
(0x323a, "3", u"(呼)"),
(0x323b, "3", u"(学)"),
(0x323c, "3", u"(監)"),
(0x323d, "3", u"(企)"),
(0x323e, "3", u"(資)"),
]
def _seg_31():
return [
(0x323f, "3", u"(協)"),
(0x3240, "3", u"(祭)"),
(0x3241, "3", u"(休)"),
(0x3242, "3", u"(自)"),
(0x3243, "3", u"(至)"),
(0x3244, "M", u"問"),
(0x3245, "M", u"幼"),
(0x3246, "M", u"文"),
(0x3247, "M", u"箏"),
(0x3248, "V"),
(0x3250, "M", u"pte"),
(0x3251, "M", u"21"),
(0x3252, "M", u"22"),
(0x3253, "M", u"23"),
(0x3254, "M", u"24"),
(0x3255, "M", u"25"),
(0x3256, "M", u"26"),
(0x3257, "M", u"27"),
(0x3258, "M", u"28"),
(0x3259, "M", u"29"),
(0x325a, "M", u"30"),
(0x325b, "M", u"31"),
(0x325c, "M", u"32"),
(0x325d, "M", u"33"),
(0x325e, "M", u"34"),
(0x325f, "M", u"35"),
(0x3260, "M", u"ᄀ"),
(0x3261, "M", u"ᄂ"),
(0x3262, "M", u"ᄃ"),
(0x3263, "M", u"ᄅ"),
(0x3264, "M", u"ᄆ"),
(0x3265, "M", u"ᄇ"),
(0x3266, "M", u"ᄉ"),
(0x3267, "M", u"ᄋ"),
(0x3268, "M", u"ᄌ"),
(0x3269, "M", u"ᄎ"),
(0x326a, "M", u"ᄏ"),
(0x326b, "M", u"ᄐ"),
(0x326c, "M", u"ᄑ"),
(0x326d, "M", u"ᄒ"),
(0x326e, "M", u"가"),
(0x326f, "M", u"나"),
(0x3270, "M", u"다"),
(0x3271, "M", u"라"),
(0x3272, "M", u"마"),
(0x3273, "M", u"바"),
(0x3274, "M", u"사"),
(0x3275, "M", u"아"),
(0x3276, "M", u"자"),
(0x3277, "M", u"차"),
(0x3278, "M", u"카"),
(0x3279, "M", u"타"),
(0x327a, "M", u"파"),
(0x327b, "M", u"하"),
(0x327c, "M", u"참고"),
(0x327d, "M", u"주의"),
(0x327e, "M", u"우"),
(0x327f, "V"),
(0x3280, "M", u"一"),
(0x3281, "M", u"二"),
(0x3282, "M", u"三"),
(0x3283, "M", u"四"),
(0x3284, "M", u"五"),
(0x3285, "M", u"六"),
(0x3286, "M", u"七"),
(0x3287, "M", u"八"),
(0x3288, "M", u"九"),
(0x3289, "M", u"十"),
(0x328a, "M", u"月"),
(0x328b, "M", u"火"),
(0x328c, "M", u"水"),
(0x328d, "M", u"木"),
(0x328e, "M", u"金"),
(0x328f, "M", u"土"),
(0x3290, "M", u"日"),
(0x3291, "M", u"株"),
(0x3292, "M", u"有"),
(0x3293, "M", u"社"),
(0x3294, "M", u"名"),
(0x3295, "M", u"特"),
(0x3296, "M", u"財"),
(0x3297, "M", u"祝"),
(0x3298, "M", u"労"),
(0x3299, "M", u"秘"),
(0x329a, "M", u"男"),
(0x329b, "M", u"女"),
(0x329c, "M", u"適"),
(0x329d, "M", u"優"),
(0x329e, "M", u"印"),
(0x329f, "M", u"注"),
(0x32a0, "M", u"項"),
(0x32a1, "M", u"休"),
(0x32a2, "M", u"写"),
(0x32a3, "M", u"正"),
(0x32a4, "M", u"上"),
(0x32a5, "M", u"中"),
(0x32a6, "M", u"下"),
(0x32a7, "M", u"左"),
(0x32a8, "M", u"右"),
(0x32a9, "M", u"医"),
]
def _seg_32():
return [
(0x32aa, "M", u"宗"),
(0x32ab, "M", u"学"),
(0x32ac, "M", u"監"),
(0x32ad, "M", u"企"),
(0x32ae, "M", u"資"),
(0x32af, "M", u"協"),
(0x32b0, "M", u"夜"),
(0x32b1, "M", u"36"),
(0x32b2, "M", u"37"),
(0x32b3, "M", u"38"),
(0x32b4, "M", u"39"),
(0x32b5, "M", u"40"),
(0x32b6, "M", u"41"),
(0x32b7, "M", u"42"),
(0x32b8, "M", u"43"),
(0x32b9, "M", u"44"),
(0x32ba, "M", u"45"),
(0x32bb, "M", u"46"),
(0x32bc, "M", u"47"),
(0x32bd, "M", u"48"),
(0x32be, "M", u"49"),
(0x32bf, "M", u"50"),
(0x32c0, "M", u"1月"),
(0x32c1, "M", u"2月"),
(0x32c2, "M", u"3月"),
(0x32c3, "M", u"4月"),
(0x32c4, "M", u"5月"),
(0x32c5, "M", u"6月"),
(0x32c6, "M", u"7月"),
(0x32c7, "M", u"8月"),
(0x32c8, "M", u"9月"),
(0x32c9, "M", u"10月"),
(0x32ca, "M", u"11月"),
(0x32cb, "M", u"12月"),
(0x32cc, "M", u"hg"),
(0x32cd, "M", u"erg"),
(0x32ce, "M", u"ev"),
(0x32cf, "M", u"ltd"),
(0x32d0, "M", u"ア"),
(0x32d1, "M", u"イ"),
(0x32d2, "M", u"ウ"),
(0x32d3, "M", u"エ"),
(0x32d4, "M", u"オ"),
(0x32d5, "M", u"カ"),
(0x32d6, "M", u"キ"),
(0x32d7, "M", u"ク"),
(0x32d8, "M", u"ケ"),
(0x32d9, "M", u"コ"),
(0x32da, "M", u"サ"),
(0x32db, "M", u"シ"),
(0x32dc, "M", u"ス"),
(0x32dd, "M", u"セ"),
(0x32de, "M", u"ソ"),
(0x32df, "M", u"タ"),
(0x32e0, "M", u"チ"),
(0x32e1, "M", u"ツ"),
(0x32e2, "M", u"テ"),
(0x32e3, "M", u"ト"),
(0x32e4, "M", u"ナ"),
(0x32e5, "M", u"ニ"),
(0x32e6, "M", u"ヌ"),
(0x32e7, "M", u"ネ"),
(0x32e8, "M", u"ノ"),
(0x32e9, "M", u"ハ"),
(0x32ea, "M", u"ヒ"),
(0x32eb, "M", u"フ"),
(0x32ec, "M", u"ヘ"),
(0x32ed, "M", u"ホ"),
(0x32ee, "M", u"マ"),
(0x32ef, "M", u"ミ"),
(0x32f0, "M", u"ム"),
(0x32f1, "M", u"メ"),
(0x32f2, "M", u"モ"),
(0x32f3, "M", u"ヤ"),
(0x32f4, "M", u"ユ"),
(0x32f5, "M", u"ヨ"),
(0x32f6, "M", u"ラ"),
(0x32f7, "M", u"リ"),
(0x32f8, "M", u"ル"),
(0x32f9, "M", u"レ"),
(0x32fa, "M", u"ロ"),
(0x32fb, "M", u"ワ"),
(0x32fc, "M", u"ヰ"),
(0x32fd, "M", u"ヱ"),
(0x32fe, "M", u"ヲ"),
(0x32ff, "M", u"令和"),
(0x3300, "M", u"アパート"),
(0x3301, "M", u"アルファ"),
(0x3302, "M", u"アンペア"),
(0x3303, "M", u"アール"),
(0x3304, "M", u"イニング"),
(0x3305, "M", u"インチ"),
(0x3306, "M", u"ウォン"),
(0x3307, "M", u"エスクード"),
(0x3308, "M", u"エーカー"),
(0x3309, "M", u"オンス"),
(0x330a, "M", u"オーム"),
(0x330b, "M", u"カイリ"),
(0x330c, "M", u"カラット"),
(0x330d, "M", u"カロリー"),
]
def _seg_33():
return [
(0x330e, "M", u"ガロン"),
(0x330f, "M", u"ガンマ"),
(0x3310, "M", u"ギガ"),
(0x3311, "M", u"ギニー"),
(0x3312, "M", u"キュリー"),
(0x3313, "M", u"ギルダー"),
(0x3314, "M", u"キロ"),
(0x3315, "M", u"キログラム"),
(0x3316, "M", u"キロメートル"),
(0x3317, "M", u"キロワット"),
(0x3318, "M", u"グラム"),
(0x3319, "M", u"グラムトン"),
(0x331a, "M", u"クルゼイロ"),
(0x331b, "M", u"クローネ"),
(0x331c, "M", u"ケース"),
(0x331d, "M", u"コルナ"),
(0x331e, "M", u"コーポ"),
(0x331f, "M", u"サイクル"),
(0x3320, "M", u"サンチーム"),
(0x3321, "M", u"シリング"),
(0x3322, "M", u"センチ"),
(0x3323, "M", u"セント"),
(0x3324, "M", u"ダース"),
(0x3325, "M", u"デシ"),
(0x3326, "M", u"ドル"),
(0x3327, "M", u"トン"),
(0x3328, "M", u"ナノ"),
(0x3329, "M", u"ノット"),
(0x332a, "M", u"ハイツ"),
(0x332b, "M", u"パーセント"),
(0x332c, "M", u"パーツ"),
(0x332d, "M", u"バーレル"),
(0x332e, "M", u"ピアストル"),
(0x332f, "M", u"ピクル"),
(0x3330, "M", u"ピコ"),
(0x3331, "M", u"ビル"),
(0x3332, "M", u"ファラッド"),
(0x3333, "M", u"フィート"),
(0x3334, "M", u"ブッシェル"),
(0x3335, "M", u"フラン"),
(0x3336, "M", u"ヘクタール"),
(0x3337, "M", u"ペソ"),
(0x3338, "M", u"ペニヒ"),
(0x3339, "M", u"ヘルツ"),
(0x333a, "M", u"ペンス"),
(0x333b, "M", u"ページ"),
(0x333c, "M", u"ベータ"),
(0x333d, "M", u"ポイント"),
(0x333e, "M", u"ボルト"),
(0x333f, "M", u"ホン"),
(0x3340, "M", u"ポンド"),
(0x3341, "M", u"ホール"),
(0x3342, "M", u"ホーン"),
(0x3343, "M", u"マイクロ"),
(0x3344, "M", u"マイル"),
(0x3345, "M", u"マッハ"),
(0x3346, "M", u"マルク"),
(0x3347, "M", u"マンション"),
(0x3348, "M", u"ミクロン"),
(0x3349, "M", u"ミリ"),
(0x334a, "M", u"ミリバール"),
(0x334b, "M", u"メガ"),
(0x334c, "M", u"メガトン"),
(0x334d, "M", u"メートル"),
(0x334e, "M", u"ヤード"),
(0x334f, "M", u"ヤール"),
(0x3350, "M", u"ユアン"),
(0x3351, "M", u"リットル"),
(0x3352, "M", u"リラ"),
(0x3353, "M", u"ルピー"),
(0x3354, "M", u"ルーブル"),
(0x3355, "M", u"レム"),
(0x3356, "M", u"レントゲン"),
(0x3357, "M", u"ワット"),
(0x3358, "M", u"0点"),
(0x3359, "M", u"1点"),
(0x335a, "M", u"2点"),
(0x335b, "M", u"3点"),
(0x335c, "M", u"4点"),
(0x335d, "M", u"5点"),
(0x335e, "M", u"6点"),
(0x335f, "M", u"7点"),
(0x3360, "M", u"8点"),
(0x3361, "M", u"9点"),
(0x3362, "M", u"10点"),
(0x3363, "M", u"11点"),
(0x3364, "M", u"12点"),
(0x3365, "M", u"13点"),
(0x3366, "M", u"14点"),
(0x3367, "M", u"15点"),
(0x3368, "M", u"16点"),
(0x3369, "M", u"17点"),
(0x336a, "M", u"18点"),
(0x336b, "M", u"19点"),
(0x336c, "M", u"20点"),
(0x336d, "M", u"21点"),
(0x336e, "M", u"22点"),
(0x336f, "M", u"23点"),
(0x3370, "M", u"24点"),
(0x3371, "M", u"hpa"),
]
def _seg_34():
return [
(0x3372, "M", u"da"),
(0x3373, "M", u"au"),
(0x3374, "M", u"bar"),
(0x3375, "M", u"ov"),
(0x3376, "M", u"pc"),
(0x3377, "M", u"dm"),
(0x3378, "M", u"dm2"),
(0x3379, "M", u"dm3"),
(0x337a, "M", u"iu"),
(0x337b, "M", u"平成"),
(0x337c, "M", u"昭和"),
(0x337d, "M", u"大正"),
(0x337e, "M", u"明治"),
(0x337f, "M", u"株式会社"),
(0x3380, "M", u"pa"),
(0x3381, "M", u"na"),
(0x3382, "M", u"μa"),
(0x3383, "M", u"ma"),
(0x3384, "M", u"ka"),
(0x3385, "M", u"kb"),
(0x3386, "M", u"mb"),
(0x3387, "M", u"gb"),
(0x3388, "M", u"cal"),
(0x3389, "M", u"kcal"),
(0x338a, "M", u"pf"),
(0x338b, "M", u"nf"),
(0x338c, "M", u"μf"),
(0x338d, "M", u"μg"),
(0x338e, "M", u"mg"),
(0x338f, "M", u"kg"),
(0x3390, "M", u"hz"),
(0x3391, "M", u"khz"),
(0x3392, "M", u"mhz"),
(0x3393, "M", u"ghz"),
(0x3394, "M", u"thz"),
(0x3395, "M", u"μl"),
(0x3396, "M", u"ml"),
(0x3397, "M", u"dl"),
(0x3398, "M", u"kl"),
(0x3399, "M", u"fm"),
(0x339a, "M", u"nm"),
(0x339b, "M", u"μm"),
(0x339c, "M", u"mm"),
(0x339d, "M", u"cm"),
(0x339e, "M", u"km"),
(0x339f, "M", u"mm2"),
(0x33a0, "M", u"cm2"),
(0x33a1, "M", u"m2"),
(0x33a2, "M", u"km2"),
(0x33a3, "M", u"mm3"),
(0x33a4, "M", u"cm3"),
(0x33a5, "M", u"m3"),
(0x33a6, "M", u"km3"),
(0x33a7, "M", u"m∕s"),
(0x33a8, "M", u"m∕s2"),
(0x33a9, "M", u"pa"),
(0x33aa, "M", u"kpa"),
(0x33ab, "M", u"mpa"),
(0x33ac, "M", u"gpa"),
(0x33ad, "M", u"rad"),
(0x33ae, "M", u"rad∕s"),
(0x33af, "M", u"rad∕s2"),
(0x33b0, "M", u"ps"),
(0x33b1, "M", u"ns"),
(0x33b2, "M", u"μs"),
(0x33b3, "M", u"ms"),
(0x33b4, "M", u"pv"),
(0x33b5, "M", u"nv"),
(0x33b6, "M", u"μv"),
(0x33b7, "M", u"mv"),
(0x33b8, "M", u"kv"),
(0x33b9, "M", u"mv"),
(0x33ba, "M", u"pw"),
(0x33bb, "M", u"nw"),
(0x33bc, "M", u"μw"),
(0x33bd, "M", u"mw"),
(0x33be, "M", u"kw"),
(0x33bf, "M", u"mw"),
(0x33c0, "M", u"kω"),
(0x33c1, "M", u"mω"),
(0x33c2, "X"),
(0x33c3, "M", u"bq"),
(0x33c4, "M", u"cc"),
(0x33c5, "M", u"cd"),
(0x33c6, "M", u"c∕kg"),
(0x33c7, "X"),
(0x33c8, "M", u"db"),
(0x33c9, "M", u"gy"),
(0x33ca, "M", u"ha"),
(0x33cb, "M", u"hp"),
(0x33cc, "M", u"in"),
(0x33cd, "M", u"kk"),
(0x33ce, "M", u"km"),
(0x33cf, "M", u"kt"),
(0x33d0, "M", u"lm"),
(0x33d1, "M", u"ln"),
(0x33d2, "M", u"log"),
(0x33d3, "M", u"lx"),
(0x33d4, "M", u"mb"),
(0x33d5, "M", u"mil"),
]
def _seg_35():
return [
(0x33d6, "M", u"mol"),
(0x33d7, "M", u"ph"),
(0x33d8, "X"),
(0x33d9, "M", u"ppm"),
(0x33da, "M", u"pr"),
(0x33db, "M", u"sr"),
(0x33dc, "M", u"sv"),
(0x33dd, "M", u"wb"),
(0x33de, "M", u"v∕m"),
(0x33df, "M", u"a∕m"),
(0x33e0, "M", u"1日"),
(0x33e1, "M", u"2日"),
(0x33e2, "M", u"3日"),
(0x33e3, "M", u"4日"),
(0x33e4, "M", u"5日"),
(0x33e5, "M", u"6日"),
(0x33e6, "M", u"7日"),
(0x33e7, "M", u"8日"),
(0x33e8, "M", u"9日"),
(0x33e9, "M", u"10日"),
(0x33ea, "M", u"11日"),
(0x33eb, "M", u"12日"),
(0x33ec, "M", u"13日"),
(0x33ed, "M", u"14日"),
(0x33ee, "M", u"15日"),
(0x33ef, "M", u"16日"),
(0x33f0, "M", u"17日"),
(0x33f1, "M", u"18日"),
(0x33f2, "M", u"19日"),
(0x33f3, "M", u"20日"),
(0x33f4, "M", u"21日"),
(0x33f5, "M", u"22日"),
(0x33f6, "M", u"23日"),
(0x33f7, "M", u"24日"),
(0x33f8, "M", u"25日"),
(0x33f9, "M", u"26日"),
(0x33fa, "M", u"27日"),
(0x33fb, "M", u"28日"),
(0x33fc, "M", u"29日"),
(0x33fd, "M", u"30日"),
(0x33fe, "M", u"31日"),
(0x33ff, "M", u"gal"),
(0x3400, "V"),
(0x4db6, "X"),
(0x4dc0, "V"),
(0x9ff0, "X"),
(0xa000, "V"),
(0xa48d, "X"),
(0xa490, "V"),
(0xa4c7, "X"),
(0xa4d0, "V"),
(0xa62c, "X"),
(0xa640, "M", u"ꙁ"),
(0xa641, "V"),
(0xa642, "M", u"ꙃ"),
(0xa643, "V"),
(0xa644, "M", u"ꙅ"),
(0xa645, "V"),
(0xa646, "M", u"ꙇ"),
(0xa647, "V"),
(0xa648, "M", u"ꙉ"),
(0xa649, "V"),
(0xa64a, "M", u"ꙋ"),
(0xa64b, "V"),
(0xa64c, "M", u"ꙍ"),
(0xa64d, "V"),
(0xa64e, "M", u"ꙏ"),
(0xa64f, "V"),
(0xa650, "M", u"ꙑ"),
(0xa651, "V"),
(0xa652, "M", u"ꙓ"),
(0xa653, "V"),
(0xa654, "M", u"ꙕ"),
(0xa655, "V"),
(0xa656, "M", u"ꙗ"),
(0xa657, "V"),
(0xa658, "M", u"ꙙ"),
(0xa659, "V"),
(0xa65a, "M", u"ꙛ"),
(0xa65b, "V"),
(0xa65c, "M", u"ꙝ"),
(0xa65d, "V"),
(0xa65e, "M", u"ꙟ"),
(0xa65f, "V"),
(0xa660, "M", u"ꙡ"),
(0xa661, "V"),
(0xa662, "M", u"ꙣ"),
(0xa663, "V"),
(0xa664, "M", u"ꙥ"),
(0xa665, "V"),
(0xa666, "M", u"ꙧ"),
(0xa667, "V"),
(0xa668, "M", u"ꙩ"),
(0xa669, "V"),
(0xa66a, "M", u"ꙫ"),
(0xa66b, "V"),
(0xa66c, "M", u"ꙭ"),
(0xa66d, "V"),
(0xa680, "M", u"ꚁ"),
(0xa681, "V"),
]
def _seg_36():
return [
(0xa682, "M", u"ꚃ"),
(0xa683, "V"),
(0xa684, "M", u"ꚅ"),
(0xa685, "V"),
(0xa686, "M", u"ꚇ"),
(0xa687, "V"),
(0xa688, "M", u"ꚉ"),
(0xa689, "V"),
(0xa68a, "M", u"ꚋ"),
(0xa68b, "V"),
(0xa68c, "M", u"ꚍ"),
(0xa68d, "V"),
(0xa68e, "M", u"ꚏ"),
(0xa68f, "V"),
(0xa690, "M", u"ꚑ"),
(0xa691, "V"),
(0xa692, "M", u"ꚓ"),
(0xa693, "V"),
(0xa694, "M", u"ꚕ"),
(0xa695, "V"),
(0xa696, "M", u"ꚗ"),
(0xa697, "V"),
(0xa698, "M", u"ꚙ"),
(0xa699, "V"),
(0xa69a, "M", u"ꚛ"),
(0xa69b, "V"),
(0xa69c, "M", u"ъ"),
(0xa69d, "M", u"ь"),
(0xa69e, "V"),
(0xa6f8, "X"),
(0xa700, "V"),
(0xa722, "M", u"ꜣ"),
(0xa723, "V"),
(0xa724, "M", u"ꜥ"),
(0xa725, "V"),
(0xa726, "M", u"ꜧ"),
(0xa727, "V"),
(0xa728, "M", u"ꜩ"),
(0xa729, "V"),
(0xa72a, "M", u"ꜫ"),
(0xa72b, "V"),
(0xa72c, "M", u"ꜭ"),
(0xa72d, "V"),
(0xa72e, "M", u"ꜯ"),
(0xa72f, "V"),
(0xa732, "M", u"ꜳ"),
(0xa733, "V"),
(0xa734, "M", u"ꜵ"),
(0xa735, "V"),
(0xa736, "M", u"ꜷ"),
(0xa737, "V"),
(0xa738, "M", u"ꜹ"),
(0xa739, "V"),
(0xa73a, "M", u"ꜻ"),
(0xa73b, "V"),
(0xa73c, "M", u"ꜽ"),
(0xa73d, "V"),
(0xa73e, "M", u"ꜿ"),
(0xa73f, "V"),
(0xa740, "M", u"ꝁ"),
(0xa741, "V"),
(0xa742, "M", u"ꝃ"),
(0xa743, "V"),
(0xa744, "M", u"ꝅ"),
(0xa745, "V"),
(0xa746, "M", u"ꝇ"),
(0xa747, "V"),
(0xa748, "M", u"ꝉ"),
(0xa749, "V"),
(0xa74a, "M", u"ꝋ"),
(0xa74b, "V"),
(0xa74c, "M", u"ꝍ"),
(0xa74d, "V"),
(0xa74e, "M", u"ꝏ"),
(0xa74f, "V"),
(0xa750, "M", u"ꝑ"),
(0xa751, "V"),
(0xa752, "M", u"ꝓ"),
(0xa753, "V"),
(0xa754, "M", u"ꝕ"),
(0xa755, "V"),
(0xa756, "M", u"ꝗ"),
(0xa757, "V"),
(0xa758, "M", u"ꝙ"),
(0xa759, "V"),
(0xa75a, "M", u"ꝛ"),
(0xa75b, "V"),
(0xa75c, "M", u"ꝝ"),
(0xa75d, "V"),
(0xa75e, "M", u"ꝟ"),
(0xa75f, "V"),
(0xa760, "M", u"ꝡ"),
(0xa761, "V"),
(0xa762, "M", u"ꝣ"),
(0xa763, "V"),
(0xa764, "M", u"ꝥ"),
(0xa765, "V"),
(0xa766, "M", u"ꝧ"),
(0xa767, "V"),
(0xa768, "M", u"ꝩ"),
]
def _seg_37():
return [
(0xa769, "V"),
(0xa76a, "M", u"ꝫ"),
(0xa76b, "V"),
(0xa76c, "M", u"ꝭ"),
(0xa76d, "V"),
(0xa76e, "M", u"ꝯ"),
(0xa76f, "V"),
(0xa770, "M", u"ꝯ"),
(0xa771, "V"),
(0xa779, "M", u"ꝺ"),
(0xa77a, "V"),
(0xa77b, "M", u"ꝼ"),
(0xa77c, "V"),
(0xa77d, "M", u"ᵹ"),
(0xa77e, "M", u"ꝿ"),
(0xa77f, "V"),
(0xa780, "M", u"ꞁ"),
(0xa781, "V"),
(0xa782, "M", u"ꞃ"),
(0xa783, "V"),
(0xa784, "M", u"ꞅ"),
(0xa785, "V"),
(0xa786, "M", u"ꞇ"),
(0xa787, "V"),
(0xa78b, "M", u"ꞌ"),
(0xa78c, "V"),
(0xa78d, "M", u"ɥ"),
(0xa78e, "V"),
(0xa790, "M", u"ꞑ"),
(0xa791, "V"),
(0xa792, "M", u"ꞓ"),
(0xa793, "V"),
(0xa796, "M", u"ꞗ"),
(0xa797, "V"),
(0xa798, "M", u"ꞙ"),
(0xa799, "V"),
(0xa79a, "M", u"ꞛ"),
(0xa79b, "V"),
(0xa79c, "M", u"ꞝ"),
(0xa79d, "V"),
(0xa79e, "M", u"ꞟ"),
(0xa79f, "V"),
(0xa7a0, "M", u"ꞡ"),
(0xa7a1, "V"),
(0xa7a2, "M", u"ꞣ"),
(0xa7a3, "V"),
(0xa7a4, "M", u"ꞥ"),
(0xa7a5, "V"),
(0xa7a6, "M", u"ꞧ"),
(0xa7a7, "V"),
(0xa7a8, "M", u"ꞩ"),
(0xa7a9, "V"),
(0xa7aa, "M", u"ɦ"),
(0xa7ab, "M", u"ɜ"),
(0xa7ac, "M", u"ɡ"),
(0xa7ad, "M", u"ɬ"),
(0xa7ae, "M", u"ɪ"),
(0xa7af, "V"),
(0xa7b0, "M", u"ʞ"),
(0xa7b1, "M", u"ʇ"),
(0xa7b2, "M", u"ʝ"),
(0xa7b3, "M", u"ꭓ"),
(0xa7b4, "M", u"ꞵ"),
(0xa7b5, "V"),
(0xa7b6, "M", u"ꞷ"),
(0xa7b7, "V"),
(0xa7b8, "M", u"ꞹ"),
(0xa7b9, "V"),
(0xa7ba, "M", u"ꞻ"),
(0xa7bb, "V"),
(0xa7bc, "M", u"ꞽ"),
(0xa7bd, "V"),
(0xa7be, "M", u"ꞿ"),
(0xa7bf, "V"),
(0xa7c0, "X"),
(0xa7c2, "M", u"ꟃ"),
(0xa7c3, "V"),
(0xa7c4, "M", u"ꞔ"),
(0xa7c5, "M", u"ʂ"),
(0xa7c6, "M", u"ᶎ"),
(0xa7c7, "X"),
(0xa7f7, "V"),
(0xa7f8, "M", u"ħ"),
(0xa7f9, "M", u"œ"),
(0xa7fa, "V"),
(0xa82c, "X"),
(0xa830, "V"),
(0xa83a, "X"),
(0xa840, "V"),
(0xa878, "X"),
(0xa880, "V"),
(0xa8c6, "X"),
(0xa8ce, "V"),
(0xa8da, "X"),
(0xa8e0, "V"),
(0xa954, "X"),
(0xa95f, "V"),
(0xa97d, "X"),
(0xa980, "V"),
(0xa9ce, "X"),
]
def _seg_38():
return [
(0xa9cf, "V"),
(0xa9da, "X"),
(0xa9de, "V"),
(0xa9ff, "X"),
(0xaa00, "V"),
(0xaa37, "X"),
(0xaa40, "V"),
(0xaa4e, "X"),
(0xaa50, "V"),
(0xaa5a, "X"),
(0xaa5c, "V"),
(0xaac3, "X"),
(0xaadb, "V"),
(0xaaf7, "X"),
(0xab01, "V"),
(0xab07, "X"),
(0xab09, "V"),
(0xab0f, "X"),
(0xab11, "V"),
(0xab17, "X"),
(0xab20, "V"),
(0xab27, "X"),
(0xab28, "V"),
(0xab2f, "X"),
(0xab30, "V"),
(0xab5c, "M", u"ꜧ"),
(0xab5d, "M", u"ꬷ"),
(0xab5e, "M", u"ɫ"),
(0xab5f, "M", u"ꭒ"),
(0xab60, "V"),
(0xab68, "X"),
(0xab70, "M", u"Ꭰ"),
(0xab71, "M", u"Ꭱ"),
(0xab72, "M", u"Ꭲ"),
(0xab73, "M", u"Ꭳ"),
(0xab74, "M", u"Ꭴ"),
(0xab75, "M", u"Ꭵ"),
(0xab76, "M", u"Ꭶ"),
(0xab77, "M", u"Ꭷ"),
(0xab78, "M", u"Ꭸ"),
(0xab79, "M", u"Ꭹ"),
(0xab7a, "M", u"Ꭺ"),
(0xab7b, "M", u"Ꭻ"),
(0xab7c, "M", u"Ꭼ"),
(0xab7d, "M", u"Ꭽ"),
(0xab7e, "M", u"Ꭾ"),
(0xab7f, "M", u"Ꭿ"),
(0xab80, "M", u"Ꮀ"),
(0xab81, "M", u"Ꮁ"),
(0xab82, "M", u"Ꮂ"),
(0xab83, "M", u"Ꮃ"),
(0xab84, "M", u"Ꮄ"),
(0xab85, "M", u"Ꮅ"),
(0xab86, "M", u"Ꮆ"),
(0xab87, "M", u"Ꮇ"),
(0xab88, "M", u"Ꮈ"),
(0xab89, "M", u"Ꮉ"),
(0xab8a, "M", u"Ꮊ"),
(0xab8b, "M", u"Ꮋ"),
(0xab8c, "M", u"Ꮌ"),
(0xab8d, "M", u"Ꮍ"),
(0xab8e, "M", u"Ꮎ"),
(0xab8f, "M", u"Ꮏ"),
(0xab90, "M", u"Ꮐ"),
(0xab91, "M", u"Ꮑ"),
(0xab92, "M", u"Ꮒ"),
(0xab93, "M", u"Ꮓ"),
(0xab94, "M", u"Ꮔ"),
(0xab95, "M", u"Ꮕ"),
(0xab96, "M", u"Ꮖ"),
(0xab97, "M", u"Ꮗ"),
(0xab98, "M", u"Ꮘ"),
(0xab99, "M", u"Ꮙ"),
(0xab9a, "M", u"Ꮚ"),
(0xab9b, "M", u"Ꮛ"),
(0xab9c, "M", u"Ꮜ"),
(0xab9d, "M", u"Ꮝ"),
(0xab9e, "M", u"Ꮞ"),
(0xab9f, "M", u"Ꮟ"),
(0xaba0, "M", u"Ꮠ"),
(0xaba1, "M", u"Ꮡ"),
(0xaba2, "M", u"Ꮢ"),
(0xaba3, "M", u"Ꮣ"),
(0xaba4, "M", u"Ꮤ"),
(0xaba5, "M", u"Ꮥ"),
(0xaba6, "M", u"Ꮦ"),
(0xaba7, "M", u"Ꮧ"),
(0xaba8, "M", u"Ꮨ"),
(0xaba9, "M", u"Ꮩ"),
(0xabaa, "M", u"Ꮪ"),
(0xabab, "M", u"Ꮫ"),
(0xabac, "M", u"Ꮬ"),
(0xabad, "M", u"Ꮭ"),
(0xabae, "M", u"Ꮮ"),
(0xabaf, "M", u"Ꮯ"),
(0xabb0, "M", u"Ꮰ"),
(0xabb1, "M", u"Ꮱ"),
(0xabb2, "M", u"Ꮲ"),
(0xabb3, "M", u"Ꮳ"),
(0xabb4, "M", u"Ꮴ"),
]
def _seg_39():
return [
(0xabb5, "M", u"Ꮵ"),
(0xabb6, "M", u"Ꮶ"),
(0xabb7, "M", u"Ꮷ"),
(0xabb8, "M", u"Ꮸ"),
(0xabb9, "M", u"Ꮹ"),
(0xabba, "M", u"Ꮺ"),
(0xabbb, "M", u"Ꮻ"),
(0xabbc, "M", u"Ꮼ"),
(0xabbd, "M", u"Ꮽ"),
(0xabbe, "M", u"Ꮾ"),
(0xabbf, "M", u"Ꮿ"),
(0xabc0, "V"),
(0xabee, "X"),
(0xabf0, "V"),
(0xabfa, "X"),
(0xac00, "V"),
(0xd7a4, "X"),
(0xd7b0, "V"),
(0xd7c7, "X"),
(0xd7cb, "V"),
(0xd7fc, "X"),
(0xf900, "M", u"豈"),
(0xf901, "M", u"更"),
(0xf902, "M", u"車"),
(0xf903, "M", u"賈"),
(0xf904, "M", u"滑"),
(0xf905, "M", u"串"),
(0xf906, "M", u"句"),
(0xf907, "M", u"龜"),
(0xf909, "M", u"契"),
(0xf90a, "M", u"金"),
(0xf90b, "M", u"喇"),
(0xf90c, "M", u"奈"),
(0xf90d, "M", u"懶"),
(0xf90e, "M", u"癩"),
(0xf90f, "M", u"羅"),
(0xf910, "M", u"蘿"),
(0xf911, "M", u"螺"),
(0xf912, "M", u"裸"),
(0xf913, "M", u"邏"),
(0xf914, "M", u"樂"),
(0xf915, "M", u"洛"),
(0xf916, "M", u"烙"),
(0xf917, "M", u"珞"),
(0xf918, "M", u"落"),
(0xf919, "M", u"酪"),
(0xf91a, "M", u"駱"),
(0xf91b, "M", u"亂"),
(0xf91c, "M", u"卵"),
(0xf91d, "M", u"欄"),
(0xf91e, "M", u"爛"),
(0xf91f, "M", u"蘭"),
(0xf920, "M", u"鸞"),
(0xf921, "M", u"嵐"),
(0xf922, "M", u"濫"),
(0xf923, "M", u"藍"),
(0xf924, "M", u"襤"),
(0xf925, "M", u"拉"),
(0xf926, "M", u"臘"),
(0xf927, "M", u"蠟"),
(0xf928, "M", u"廊"),
(0xf929, "M", u"朗"),
(0xf92a, "M", u"浪"),
(0xf92b, "M", u"狼"),
(0xf92c, "M", u"郎"),
(0xf92d, "M", u"來"),
(0xf92e, "M", u"冷"),
(0xf92f, "M", u"勞"),
(0xf930, "M", u"擄"),
(0xf931, "M", u"櫓"),
(0xf932, "M", u"爐"),
(0xf933, "M", u"盧"),
(0xf934, "M", u"老"),
(0xf935, "M", u"蘆"),
(0xf936, "M", u"虜"),
(0xf937, "M", u"路"),
(0xf938, "M", u"露"),
(0xf939, "M", u"魯"),
(0xf93a, "M", u"鷺"),
(0xf93b, "M", u"碌"),
(0xf93c, "M", u"祿"),
(0xf93d, "M", u"綠"),
(0xf93e, "M", u"菉"),
(0xf93f, "M", u"錄"),
(0xf940, "M", u"鹿"),
(0xf941, "M", u"論"),
(0xf942, "M", u"壟"),
(0xf943, "M", u"弄"),
(0xf944, "M", u"籠"),
(0xf945, "M", u"聾"),
(0xf946, "M", u"牢"),
(0xf947, "M", u"磊"),
(0xf948, "M", u"賂"),
(0xf949, "M", u"雷"),
(0xf94a, "M", u"壘"),
(0xf94b, "M", u"屢"),
(0xf94c, "M", u"樓"),
(0xf94d, "M", u"淚"),
(0xf94e, "M", u"漏"),
(0xf94f, "M", u"累"),
]
def _seg_40():
return [
(0xf950, "M", u"縷"),
(0xf951, "M", u"陋"),
(0xf952, "M", u"勒"),
(0xf953, "M", u"肋"),
(0xf954, "M", u"凜"),
(0xf955, "M", u"凌"),
(0xf956, "M", u"稜"),
(0xf957, "M", u"綾"),
(0xf958, "M", u"菱"),
(0xf959, "M", u"陵"),
(0xf95a, "M", u"讀"),
(0xf95b, "M", u"拏"),
(0xf95c, "M", u"樂"),
(0xf95d, "M", u"諾"),
(0xf95e, "M", u"丹"),
(0xf95f, "M", u"寧"),
(0xf960, "M", u"怒"),
(0xf961, "M", u"率"),
(0xf962, "M", u"異"),
(0xf963, "M", u"北"),
(0xf964, "M", u"磻"),
(0xf965, "M", u"便"),
(0xf966, "M", u"復"),
(0xf967, "M", u"不"),
(0xf968, "M", u"泌"),
(0xf969, "M", u"數"),
(0xf96a, "M", u"索"),
(0xf96b, "M", u"參"),
(0xf96c, "M", u"塞"),
(0xf96d, "M", u"省"),
(0xf96e, "M", u"葉"),
(0xf96f, "M", u"說"),
(0xf970, "M", u"殺"),
(0xf971, "M", u"辰"),
(0xf972, "M", u"沈"),
(0xf973, "M", u"拾"),
(0xf974, "M", u"若"),
(0xf975, "M", u"掠"),
(0xf976, "M", u"略"),
(0xf977, "M", u"亮"),
(0xf978, "M", u"兩"),
(0xf979, "M", u"凉"),
(0xf97a, "M", u"梁"),
(0xf97b, "M", u"糧"),
(0xf97c, "M", u"良"),
(0xf97d, "M", u"諒"),
(0xf97e, "M", u"量"),
(0xf97f, "M", u"勵"),
(0xf980, "M", u"呂"),
(0xf981, "M", u"女"),
(0xf982, "M", u"廬"),
(0xf983, "M", u"旅"),
(0xf984, "M", u"濾"),
(0xf985, "M", u"礪"),
(0xf986, "M", u"閭"),
(0xf987, "M", u"驪"),
(0xf988, "M", u"麗"),
(0xf989, "M", u"黎"),
(0xf98a, "M", u"力"),
(0xf98b, "M", u"曆"),
(0xf98c, "M", u"歷"),
(0xf98d, "M", u"轢"),
(0xf98e, "M", u"年"),
(0xf98f, "M", u"憐"),
(0xf990, "M", u"戀"),
(0xf991, "M", u"撚"),
(0xf992, "M", u"漣"),
(0xf993, "M", u"煉"),
(0xf994, "M", u"璉"),
(0xf995, "M", u"秊"),
(0xf996, "M", u"練"),
(0xf997, "M", u"聯"),
(0xf998, "M", u"輦"),
(0xf999, "M", u"蓮"),
(0xf99a, "M", u"連"),
(0xf99b, "M", u"鍊"),
(0xf99c, "M", u"列"),
(0xf99d, "M", u"劣"),
(0xf99e, "M", u"咽"),
(0xf99f, "M", u"烈"),
(0xf9a0, "M", u"裂"),
(0xf9a1, "M", u"說"),
(0xf9a2, "M", u"廉"),
(0xf9a3, "M", u"念"),
(0xf9a4, "M", u"捻"),
(0xf9a5, "M", u"殮"),
(0xf9a6, "M", u"簾"),
(0xf9a7, "M", u"獵"),
(0xf9a8, "M", u"令"),
(0xf9a9, "M", u"囹"),
(0xf9aa, "M", u"寧"),
(0xf9ab, "M", u"嶺"),
(0xf9ac, "M", u"怜"),
(0xf9ad, "M", u"玲"),
(0xf9ae, "M", u"瑩"),
(0xf9af, "M", u"羚"),
(0xf9b0, "M", u"聆"),
(0xf9b1, "M", u"鈴"),
(0xf9b2, "M", u"零"),
(0xf9b3, "M", u"靈"),
]
def _seg_41():
return [
(0xf9b4, "M", u"領"),
(0xf9b5, "M", u"例"),
(0xf9b6, "M", u"禮"),
(0xf9b7, "M", u"醴"),
(0xf9b8, "M", u"隸"),
(0xf9b9, "M", u"惡"),
(0xf9ba, "M", u"了"),
(0xf9bb, "M", u"僚"),
(0xf9bc, "M", u"寮"),
(0xf9bd, "M", u"尿"),
(0xf9be, "M", u"料"),
(0xf9bf, "M", u"樂"),
(0xf9c0, "M", u"燎"),
(0xf9c1, "M", u"療"),
(0xf9c2, "M", u"蓼"),
(0xf9c3, "M", u"遼"),
(0xf9c4, "M", u"龍"),
(0xf9c5, "M", u"暈"),
(0xf9c6, "M", u"阮"),
(0xf9c7, "M", u"劉"),
(0xf9c8, "M", u"杻"),
(0xf9c9, "M", u"柳"),
(0xf9ca, "M", u"流"),
(0xf9cb, "M", u"溜"),
(0xf9cc, "M", u"琉"),
(0xf9cd, "M", u"留"),
(0xf9ce, "M", u"硫"),
(0xf9cf, "M", u"紐"),
(0xf9d0, "M", u"類"),
(0xf9d1, "M", u"六"),
(0xf9d2, "M", u"戮"),
(0xf9d3, "M", u"陸"),
(0xf9d4, "M", u"倫"),
(0xf9d5, "M", u"崙"),
(0xf9d6, "M", u"淪"),
(0xf9d7, "M", u"輪"),
(0xf9d8, "M", u"律"),
(0xf9d9, "M", u"慄"),
(0xf9da, "M", u"栗"),
(0xf9db, "M", u"率"),
(0xf9dc, "M", u"隆"),
(0xf9dd, "M", u"利"),
(0xf9de, "M", u"吏"),
(0xf9df, "M", u"履"),
(0xf9e0, "M", u"易"),
(0xf9e1, "M", u"李"),
(0xf9e2, "M", u"梨"),
(0xf9e3, "M", u"泥"),
(0xf9e4, "M", u"理"),
(0xf9e5, "M", u"痢"),
(0xf9e6, "M", u"罹"),
(0xf9e7, "M", u"裏"),
(0xf9e8, "M", u"裡"),
(0xf9e9, "M", u"里"),
(0xf9ea, "M", u"離"),
(0xf9eb, "M", u"匿"),
(0xf9ec, "M", u"溺"),
(0xf9ed, "M", u"吝"),
(0xf9ee, "M", u"燐"),
(0xf9ef, "M", u"璘"),
(0xf9f0, "M", u"藺"),
(0xf9f1, "M", u"隣"),
(0xf9f2, "M", u"鱗"),
(0xf9f3, "M", u"麟"),
(0xf9f4, "M", u"林"),
(0xf9f5, "M", u"淋"),
(0xf9f6, "M", u"臨"),
(0xf9f7, "M", u"立"),
(0xf9f8, "M", u"笠"),
(0xf9f9, "M", u"粒"),
(0xf9fa, "M", u"狀"),
(0xf9fb, "M", u"炙"),
(0xf9fc, "M", u"識"),
(0xf9fd, "M", u"什"),
(0xf9fe, "M", u"茶"),
(0xf9ff, "M", u"刺"),
(0xfa00, "M", u"切"),
(0xfa01, "M", u"度"),
(0xfa02, "M", u"拓"),
(0xfa03, "M", u"糖"),
(0xfa04, "M", u"宅"),
(0xfa05, "M", u"洞"),
(0xfa06, "M", u"暴"),
(0xfa07, "M", u"輻"),
(0xfa08, "M", u"行"),
(0xfa09, "M", u"降"),
(0xfa0a, "M", u"見"),
(0xfa0b, "M", u"廓"),
(0xfa0c, "M", u"兀"),
(0xfa0d, "M", u"嗀"),
(0xfa0e, "V"),
(0xfa10, "M", u"塚"),
(0xfa11, "V"),
(0xfa12, "M", u"晴"),
(0xfa13, "V"),
(0xfa15, "M", u"凞"),
(0xfa16, "M", u"猪"),
(0xfa17, "M", u"益"),
(0xfa18, "M", u"礼"),
(0xfa19, "M", u"神"),
]
def _seg_42():
return [
(0xfa1a, "M", u"祥"),
(0xfa1b, "M", u"福"),
(0xfa1c, "M", u"靖"),
(0xfa1d, "M", u"精"),
(0xfa1e, "M", u"羽"),
(0xfa1f, "V"),
(0xfa20, "M", u"蘒"),
(0xfa21, "V"),
(0xfa22, "M", u"諸"),
(0xfa23, "V"),
(0xfa25, "M", u"逸"),
(0xfa26, "M", u"都"),
(0xfa27, "V"),
(0xfa2a, "M", u"飯"),
(0xfa2b, "M", u"飼"),
(0xfa2c, "M", u"館"),
(0xfa2d, "M", u"鶴"),
(0xfa2e, "M", u"郞"),
(0xfa2f, "M", u"隷"),
(0xfa30, "M", u"侮"),
(0xfa31, "M", u"僧"),
(0xfa32, "M", u"免"),
(0xfa33, "M", u"勉"),
(0xfa34, "M", u"勤"),
(0xfa35, "M", u"卑"),
(0xfa36, "M", u"喝"),
(0xfa37, "M", u"嘆"),
(0xfa38, "M", u"器"),
(0xfa39, "M", u"塀"),
(0xfa3a, "M", u"墨"),
(0xfa3b, "M", u"層"),
(0xfa3c, "M", u"屮"),
(0xfa3d, "M", u"悔"),
(0xfa3e, "M", u"慨"),
(0xfa3f, "M", u"憎"),
(0xfa40, "M", u"懲"),
(0xfa41, "M", u"敏"),
(0xfa42, "M", u"既"),
(0xfa43, "M", u"暑"),
(0xfa44, "M", u"梅"),
(0xfa45, "M", u"海"),
(0xfa46, "M", u"渚"),
(0xfa47, "M", u"漢"),
(0xfa48, "M", u"煮"),
(0xfa49, "M", u"爫"),
(0xfa4a, "M", u"琢"),
(0xfa4b, "M", u"碑"),
(0xfa4c, "M", u"社"),
(0xfa4d, "M", u"祉"),
(0xfa4e, "M", u"祈"),
(0xfa4f, "M", u"祐"),
(0xfa50, "M", u"祖"),
(0xfa51, "M", u"祝"),
(0xfa52, "M", u"禍"),
(0xfa53, "M", u"禎"),
(0xfa54, "M", u"穀"),
(0xfa55, "M", u"突"),
(0xfa56, "M", u"節"),
(0xfa57, "M", u"練"),
(0xfa58, "M", u"縉"),
(0xfa59, "M", u"繁"),
(0xfa5a, "M", u"署"),
(0xfa5b, "M", u"者"),
(0xfa5c, "M", u"臭"),
(0xfa5d, "M", u"艹"),
(0xfa5f, "M", u"著"),
(0xfa60, "M", u"褐"),
(0xfa61, "M", u"視"),
(0xfa62, "M", u"謁"),
(0xfa63, "M", u"謹"),
(0xfa64, "M", u"賓"),
(0xfa65, "M", u"贈"),
(0xfa66, "M", u"辶"),
(0xfa67, "M", u"逸"),
(0xfa68, "M", u"難"),
(0xfa69, "M", u"響"),
(0xfa6a, "M", u"頻"),
(0xfa6b, "M", u"恵"),
(0xfa6c, "M", u"𤋮"),
(0xfa6d, "M", u"舘"),
(0xfa6e, "X"),
(0xfa70, "M", u"並"),
(0xfa71, "M", u"况"),
(0xfa72, "M", u"全"),
(0xfa73, "M", u"侀"),
(0xfa74, "M", u"充"),
(0xfa75, "M", u"冀"),
(0xfa76, "M", u"勇"),
(0xfa77, "M", u"勺"),
(0xfa78, "M", u"喝"),
(0xfa79, "M", u"啕"),
(0xfa7a, "M", u"喙"),
(0xfa7b, "M", u"嗢"),
(0xfa7c, "M", u"塚"),
(0xfa7d, "M", u"墳"),
(0xfa7e, "M", u"奄"),
(0xfa7f, "M", u"奔"),
(0xfa80, "M", u"婢"),
(0xfa81, "M", u"嬨"),
(0xfa82, "M", u"廒"),
]
def _seg_43():
return [
(0xfa83, "M", u"廙"),
(0xfa84, "M", u"彩"),
(0xfa85, "M", u"徭"),
(0xfa86, "M", u"惘"),
(0xfa87, "M", u"慎"),
(0xfa88, "M", u"愈"),
(0xfa89, "M", u"憎"),
(0xfa8a, "M", u"慠"),
(0xfa8b, "M", u"懲"),
(0xfa8c, "M", u"戴"),
(0xfa8d, "M", u"揄"),
(0xfa8e, "M", u"搜"),
(0xfa8f, "M", u"摒"),
(0xfa90, "M", u"敖"),
(0xfa91, "M", u"晴"),
(0xfa92, "M", u"朗"),
(0xfa93, "M", u"望"),
(0xfa94, "M", u"杖"),
(0xfa95, "M", u"歹"),
(0xfa96, "M", u"殺"),
(0xfa97, "M", u"流"),
(0xfa98, "M", u"滛"),
(0xfa99, "M", u"滋"),
(0xfa9a, "M", u"漢"),
(0xfa9b, "M", u"瀞"),
(0xfa9c, "M", u"煮"),
(0xfa9d, "M", u"瞧"),
(0xfa9e, "M", u"爵"),
(0xfa9f, "M", u"犯"),
(0xfaa0, "M", u"猪"),
(0xfaa1, "M", u"瑱"),
(0xfaa2, "M", u"甆"),
(0xfaa3, "M", u"画"),
(0xfaa4, "M", u"瘝"),
(0xfaa5, "M", u"瘟"),
(0xfaa6, "M", u"益"),
(0xfaa7, "M", u"盛"),
(0xfaa8, "M", u"直"),
(0xfaa9, "M", u"睊"),
(0xfaaa, "M", u"着"),
(0xfaab, "M", u"磌"),
(0xfaac, "M", u"窱"),
(0xfaad, "M", u"節"),
(0xfaae, "M", u"类"),
(0xfaaf, "M", u"絛"),
(0xfab0, "M", u"練"),
(0xfab1, "M", u"缾"),
(0xfab2, "M", u"者"),
(0xfab3, "M", u"荒"),
(0xfab4, "M", u"華"),
(0xfab5, "M", u"蝹"),
(0xfab6, "M", u"襁"),
(0xfab7, "M", u"覆"),
(0xfab8, "M", u"視"),
(0xfab9, "M", u"調"),
(0xfaba, "M", u"諸"),
(0xfabb, "M", u"請"),
(0xfabc, "M", u"謁"),
(0xfabd, "M", u"諾"),
(0xfabe, "M", u"諭"),
(0xfabf, "M", u"謹"),
(0xfac0, "M", u"變"),
(0xfac1, "M", u"贈"),
(0xfac2, "M", u"輸"),
(0xfac3, "M", u"遲"),
(0xfac4, "M", u"醙"),
(0xfac5, "M", u"鉶"),
(0xfac6, "M", u"陼"),
(0xfac7, "M", u"難"),
(0xfac8, "M", u"靖"),
(0xfac9, "M", u"韛"),
(0xfaca, "M", u"響"),
(0xfacb, "M", u"頋"),
(0xfacc, "M", u"頻"),
(0xfacd, "M", u"鬒"),
(0xface, "M", u"龜"),
(0xfacf, "M", u"𢡊"),
(0xfad0, "M", u"𢡄"),
(0xfad1, "M", u"𣏕"),
(0xfad2, "M", u"㮝"),
(0xfad3, "M", u"䀘"),
(0xfad4, "M", u"䀹"),
(0xfad5, "M", u"𥉉"),
(0xfad6, "M", u"𥳐"),
(0xfad7, "M", u"𧻓"),
(0xfad8, "M", u"齃"),
(0xfad9, "M", u"龎"),
(0xfada, "X"),
(0xfb00, "M", u"ff"),
(0xfb01, "M", u"fi"),
(0xfb02, "M", u"fl"),
(0xfb03, "M", u"ffi"),
(0xfb04, "M", u"ffl"),
(0xfb05, "M", u"st"),
(0xfb07, "X"),
(0xfb13, "M", u"մն"),
(0xfb14, "M", u"մե"),
(0xfb15, "M", u"մի"),
(0xfb16, "M", u"վն"),
(0xfb17, "M", u"մխ"),
]
def _seg_44():
return [
(0xfb18, "X"),
(0xfb1d, "M", u"יִ"),
(0xfb1e, "V"),
(0xfb1f, "M", u"ײַ"),
(0xfb20, "M", u"ע"),
(0xfb21, "M", u"א"),
(0xfb22, "M", u"ד"),
(0xfb23, "M", u"ה"),
(0xfb24, "M", u"כ"),
(0xfb25, "M", u"ל"),
(0xfb26, "M", u"ם"),
(0xfb27, "M", u"ר"),
(0xfb28, "M", u"ת"),
(0xfb29, "3", u"+"),
(0xfb2a, "M", u"שׁ"),
(0xfb2b, "M", u"שׂ"),
(0xfb2c, "M", u"שּׁ"),
(0xfb2d, "M", u"שּׂ"),
(0xfb2e, "M", u"אַ"),
(0xfb2f, "M", u"אָ"),
(0xfb30, "M", u"אּ"),
(0xfb31, "M", u"בּ"),
(0xfb32, "M", u"גּ"),
(0xfb33, "M", u"דּ"),
(0xfb34, "M", u"הּ"),
(0xfb35, "M", u"וּ"),
(0xfb36, "M", u"זּ"),
(0xfb37, "X"),
(0xfb38, "M", u"טּ"),
(0xfb39, "M", u"יּ"),
(0xfb3a, "M", u"ךּ"),
(0xfb3b, "M", u"כּ"),
(0xfb3c, "M", u"לּ"),
(0xfb3d, "X"),
(0xfb3e, "M", u"מּ"),
(0xfb3f, "X"),
(0xfb40, "M", u"נּ"),
(0xfb41, "M", u"סּ"),
(0xfb42, "X"),
(0xfb43, "M", u"ףּ"),
(0xfb44, "M", u"פּ"),
(0xfb45, "X"),
(0xfb46, "M", u"צּ"),
(0xfb47, "M", u"קּ"),
(0xfb48, "M", u"רּ"),
(0xfb49, "M", u"שּ"),
(0xfb4a, "M", u"תּ"),
(0xfb4b, "M", u"וֹ"),
(0xfb4c, "M", u"בֿ"),
(0xfb4d, "M", u"כֿ"),
(0xfb4e, "M", u"פֿ"),
(0xfb4f, "M", u"אל"),
(0xfb50, "M", u"ٱ"),
(0xfb52, "M", u"ٻ"),
(0xfb56, "M", u"پ"),
(0xfb5a, "M", u"ڀ"),
(0xfb5e, "M", u"ٺ"),
(0xfb62, "M", u"ٿ"),
(0xfb66, "M", u"ٹ"),
(0xfb6a, "M", u"ڤ"),
(0xfb6e, "M", u"ڦ"),
(0xfb72, "M", u"ڄ"),
(0xfb76, "M", u"ڃ"),
(0xfb7a, "M", u"چ"),
(0xfb7e, "M", u"ڇ"),
(0xfb82, "M", u"ڍ"),
(0xfb84, "M", u"ڌ"),
(0xfb86, "M", u"ڎ"),
(0xfb88, "M", u"ڈ"),
(0xfb8a, "M", u"ژ"),
(0xfb8c, "M", u"ڑ"),
(0xfb8e, "M", u"ک"),
(0xfb92, "M", u"گ"),
(0xfb96, "M", u"ڳ"),
(0xfb9a, "M", u"ڱ"),
(0xfb9e, "M", u"ں"),
(0xfba0, "M", u"ڻ"),
(0xfba4, "M", u"ۀ"),
(0xfba6, "M", u"ہ"),
(0xfbaa, "M", u"ھ"),
(0xfbae, "M", u"ے"),
(0xfbb0, "M", u"ۓ"),
(0xfbb2, "V"),
(0xfbc2, "X"),
(0xfbd3, "M", u"ڭ"),
(0xfbd7, "M", u"ۇ"),
(0xfbd9, "M", u"ۆ"),
(0xfbdb, "M", u"ۈ"),
(0xfbdd, "M", u"ۇٴ"),
(0xfbde, "M", u"ۋ"),
(0xfbe0, "M", u"ۅ"),
(0xfbe2, "M", u"ۉ"),
(0xfbe4, "M", u"ې"),
(0xfbe8, "M", u"ى"),
(0xfbea, "M", u"ئا"),
(0xfbec, "M", u"ئە"),
(0xfbee, "M", u"ئو"),
(0xfbf0, "M", u"ئۇ"),
(0xfbf2, "M", u"ئۆ"),
(0xfbf4, "M", u"ئۈ"),
]
def _seg_45():
return [
(0xfbf6, "M", u"ئې"),
(0xfbf9, "M", u"ئى"),
(0xfbfc, "M", u"ی"),
(0xfc00, "M", u"ئج"),
(0xfc01, "M", u"ئح"),
(0xfc02, "M", u"ئم"),
(0xfc03, "M", u"ئى"),
(0xfc04, "M", u"ئي"),
(0xfc05, "M", u"بج"),
(0xfc06, "M", u"بح"),
(0xfc07, "M", u"بخ"),
(0xfc08, "M", u"بم"),
(0xfc09, "M", u"بى"),
(0xfc0a, "M", u"بي"),
(0xfc0b, "M", u"تج"),
(0xfc0c, "M", u"تح"),
(0xfc0d, "M", u"تخ"),
(0xfc0e, "M", u"تم"),
(0xfc0f, "M", u"تى"),
(0xfc10, "M", u"تي"),
(0xfc11, "M", u"ثج"),
(0xfc12, "M", u"ثم"),
(0xfc13, "M", u"ثى"),
(0xfc14, "M", u"ثي"),
(0xfc15, "M", u"جح"),
(0xfc16, "M", u"جم"),
(0xfc17, "M", u"حج"),
(0xfc18, "M", u"حم"),
(0xfc19, "M", u"خج"),
(0xfc1a, "M", u"خح"),
(0xfc1b, "M", u"خم"),
(0xfc1c, "M", u"سج"),
(0xfc1d, "M", u"سح"),
(0xfc1e, "M", u"سخ"),
(0xfc1f, "M", u"سم"),
(0xfc20, "M", u"صح"),
(0xfc21, "M", u"صم"),
(0xfc22, "M", u"ضج"),
(0xfc23, "M", u"ضح"),
(0xfc24, "M", u"ضخ"),
(0xfc25, "M", u"ضم"),
(0xfc26, "M", u"طح"),
(0xfc27, "M", u"طم"),
(0xfc28, "M", u"ظم"),
(0xfc29, "M", u"عج"),
(0xfc2a, "M", u"عم"),
(0xfc2b, "M", u"غج"),
(0xfc2c, "M", u"غم"),
(0xfc2d, "M", u"فج"),
(0xfc2e, "M", u"فح"),
(0xfc2f, "M", u"فخ"),
(0xfc30, "M", u"فم"),
(0xfc31, "M", u"فى"),
(0xfc32, "M", u"في"),
(0xfc33, "M", u"قح"),
(0xfc34, "M", u"قم"),
(0xfc35, "M", u"قى"),
(0xfc36, "M", u"قي"),
(0xfc37, "M", u"كا"),
(0xfc38, "M", u"كج"),
(0xfc39, "M", u"كح"),
(0xfc3a, "M", u"كخ"),
(0xfc3b, "M", u"كل"),
(0xfc3c, "M", u"كم"),
(0xfc3d, "M", u"كى"),
(0xfc3e, "M", u"كي"),
(0xfc3f, "M", u"لج"),
(0xfc40, "M", u"لح"),
(0xfc41, "M", u"لخ"),
(0xfc42, "M", u"لم"),
(0xfc43, "M", u"لى"),
(0xfc44, "M", u"لي"),
(0xfc45, "M", u"مج"),
(0xfc46, "M", u"مح"),
(0xfc47, "M", u"مخ"),
(0xfc48, "M", u"مم"),
(0xfc49, "M", u"مى"),
(0xfc4a, "M", u"مي"),
(0xfc4b, "M", u"نج"),
(0xfc4c, "M", u"نح"),
(0xfc4d, "M", u"نخ"),
(0xfc4e, "M", u"نم"),
(0xfc4f, "M", u"نى"),
(0xfc50, "M", u"ني"),
(0xfc51, "M", u"هج"),
(0xfc52, "M", u"هم"),
(0xfc53, "M", u"هى"),
(0xfc54, "M", u"هي"),
(0xfc55, "M", u"يج"),
(0xfc56, "M", u"يح"),
(0xfc57, "M", u"يخ"),
(0xfc58, "M", u"يم"),
(0xfc59, "M", u"يى"),
(0xfc5a, "M", u"يي"),
(0xfc5b, "M", u"ذٰ"),
(0xfc5c, "M", u"رٰ"),
(0xfc5d, "M", u"ىٰ"),
(0xfc5e, "3", u" ٌّ"),
(0xfc5f, "3", u" ٍّ"),
(0xfc60, "3", u" َّ"),
]
def _seg_46():
return [
(0xfc61, "3", u" ُّ"),
(0xfc62, "3", u" ِّ"),
(0xfc63, "3", u" ّٰ"),
(0xfc64, "M", u"ئر"),
(0xfc65, "M", u"ئز"),
(0xfc66, "M", u"ئم"),
(0xfc67, "M", u"ئن"),
(0xfc68, "M", u"ئى"),
(0xfc69, "M", u"ئي"),
(0xfc6a, "M", u"بر"),
(0xfc6b, "M", u"بز"),
(0xfc6c, "M", u"بم"),
(0xfc6d, "M", u"بن"),
(0xfc6e, "M", u"بى"),
(0xfc6f, "M", u"بي"),
(0xfc70, "M", u"تر"),
(0xfc71, "M", u"تز"),
(0xfc72, "M", u"تم"),
(0xfc73, "M", u"تن"),
(0xfc74, "M", u"تى"),
(0xfc75, "M", u"تي"),
(0xfc76, "M", u"ثر"),
(0xfc77, "M", u"ثز"),
(0xfc78, "M", u"ثم"),
(0xfc79, "M", u"ثن"),
(0xfc7a, "M", u"ثى"),
(0xfc7b, "M", u"ثي"),
(0xfc7c, "M", u"فى"),
(0xfc7d, "M", u"في"),
(0xfc7e, "M", u"قى"),
(0xfc7f, "M", u"قي"),
(0xfc80, "M", u"كا"),
(0xfc81, "M", u"كل"),
(0xfc82, "M", u"كم"),
(0xfc83, "M", u"كى"),
(0xfc84, "M", u"كي"),
(0xfc85, "M", u"لم"),
(0xfc86, "M", u"لى"),
(0xfc87, "M", u"لي"),
(0xfc88, "M", u"ما"),
(0xfc89, "M", u"مم"),
(0xfc8a, "M", u"نر"),
(0xfc8b, "M", u"نز"),
(0xfc8c, "M", u"نم"),
(0xfc8d, "M", u"نن"),
(0xfc8e, "M", u"نى"),
(0xfc8f, "M", u"ني"),
(0xfc90, "M", u"ىٰ"),
(0xfc91, "M", u"ير"),
(0xfc92, "M", u"يز"),
(0xfc93, "M", u"يم"),
(0xfc94, "M", u"ين"),
(0xfc95, "M", u"يى"),
(0xfc96, "M", u"يي"),
(0xfc97, "M", u"ئج"),
(0xfc98, "M", u"ئح"),
(0xfc99, "M", u"ئخ"),
(0xfc9a, "M", u"ئم"),
(0xfc9b, "M", u"ئه"),
(0xfc9c, "M", u"بج"),
(0xfc9d, "M", u"بح"),
(0xfc9e, "M", u"بخ"),
(0xfc9f, "M", u"بم"),
(0xfca0, "M", u"به"),
(0xfca1, "M", u"تج"),
(0xfca2, "M", u"تح"),
(0xfca3, "M", u"تخ"),
(0xfca4, "M", u"تم"),
(0xfca5, "M", u"ته"),
(0xfca6, "M", u"ثم"),
(0xfca7, "M", u"جح"),
(0xfca8, "M", u"جم"),
(0xfca9, "M", u"حج"),
(0xfcaa, "M", u"حم"),
(0xfcab, "M", u"خج"),
(0xfcac, "M", u"خم"),
(0xfcad, "M", u"سج"),
(0xfcae, "M", u"سح"),
(0xfcaf, "M", u"سخ"),
(0xfcb0, "M", u"سم"),
(0xfcb1, "M", u"صح"),
(0xfcb2, "M", u"صخ"),
(0xfcb3, "M", u"صم"),
(0xfcb4, "M", u"ضج"),
(0xfcb5, "M", u"ضح"),
(0xfcb6, "M", u"ضخ"),
(0xfcb7, "M", u"ضم"),
(0xfcb8, "M", u"طح"),
(0xfcb9, "M", u"ظم"),
(0xfcba, "M", u"عج"),
(0xfcbb, "M", u"عم"),
(0xfcbc, "M", u"غج"),
(0xfcbd, "M", u"غم"),
(0xfcbe, "M", u"فج"),
(0xfcbf, "M", u"فح"),
(0xfcc0, "M", u"فخ"),
(0xfcc1, "M", u"فم"),
(0xfcc2, "M", u"قح"),
(0xfcc3, "M", u"قم"),
(0xfcc4, "M", u"كج"),
]
def _seg_47():
return [
(0xfcc5, "M", u"كح"),
(0xfcc6, "M", u"كخ"),
(0xfcc7, "M", u"كل"),
(0xfcc8, "M", u"كم"),
(0xfcc9, "M", u"لج"),
(0xfcca, "M", u"لح"),
(0xfccb, "M", u"لخ"),
(0xfccc, "M", u"لم"),
(0xfccd, "M", u"له"),
(0xfcce, "M", u"مج"),
(0xfccf, "M", u"مح"),
(0xfcd0, "M", u"مخ"),
(0xfcd1, "M", u"مم"),
(0xfcd2, "M", u"نج"),
(0xfcd3, "M", u"نح"),
(0xfcd4, "M", u"نخ"),
(0xfcd5, "M", u"نم"),
(0xfcd6, "M", u"نه"),
(0xfcd7, "M", u"هج"),
(0xfcd8, "M", u"هم"),
(0xfcd9, "M", u"هٰ"),
(0xfcda, "M", u"يج"),
(0xfcdb, "M", u"يح"),
(0xfcdc, "M", u"يخ"),
(0xfcdd, "M", u"يم"),
(0xfcde, "M", u"يه"),
(0xfcdf, "M", u"ئم"),
(0xfce0, "M", u"ئه"),
(0xfce1, "M", u"بم"),
(0xfce2, "M", u"به"),
(0xfce3, "M", u"تم"),
(0xfce4, "M", u"ته"),
(0xfce5, "M", u"ثم"),
(0xfce6, "M", u"ثه"),
(0xfce7, "M", u"سم"),
(0xfce8, "M", u"سه"),
(0xfce9, "M", u"شم"),
(0xfcea, "M", u"شه"),
(0xfceb, "M", u"كل"),
(0xfcec, "M", u"كم"),
(0xfced, "M", u"لم"),
(0xfcee, "M", u"نم"),
(0xfcef, "M", u"نه"),
(0xfcf0, "M", u"يم"),
(0xfcf1, "M", u"يه"),
(0xfcf2, "M", u"ـَّ"),
(0xfcf3, "M", u"ـُّ"),
(0xfcf4, "M", u"ـِّ"),
(0xfcf5, "M", u"طى"),
(0xfcf6, "M", u"طي"),
(0xfcf7, "M", u"عى"),
(0xfcf8, "M", u"عي"),
(0xfcf9, "M", u"غى"),
(0xfcfa, "M", u"غي"),
(0xfcfb, "M", u"سى"),
(0xfcfc, "M", u"سي"),
(0xfcfd, "M", u"شى"),
(0xfcfe, "M", u"شي"),
(0xfcff, "M", u"حى"),
(0xfd00, "M", u"حي"),
(0xfd01, "M", u"جى"),
(0xfd02, "M", u"جي"),
(0xfd03, "M", u"خى"),
(0xfd04, "M", u"خي"),
(0xfd05, "M", u"صى"),
(0xfd06, "M", u"صي"),
(0xfd07, "M", u"ضى"),
(0xfd08, "M", u"ضي"),
(0xfd09, "M", u"شج"),
(0xfd0a, "M", u"شح"),
(0xfd0b, "M", u"شخ"),
(0xfd0c, "M", u"شم"),
(0xfd0d, "M", u"شر"),
(0xfd0e, "M", u"سر"),
(0xfd0f, "M", u"صر"),
(0xfd10, "M", u"ضر"),
(0xfd11, "M", u"طى"),
(0xfd12, "M", u"طي"),
(0xfd13, "M", u"عى"),
(0xfd14, "M", u"عي"),
(0xfd15, "M", u"غى"),
(0xfd16, "M", u"غي"),
(0xfd17, "M", u"سى"),
(0xfd18, "M", u"سي"),
(0xfd19, "M", u"شى"),
(0xfd1a, "M", u"شي"),
(0xfd1b, "M", u"حى"),
(0xfd1c, "M", u"حي"),
(0xfd1d, "M", u"جى"),
(0xfd1e, "M", u"جي"),
(0xfd1f, "M", u"خى"),
(0xfd20, "M", u"خي"),
(0xfd21, "M", u"صى"),
(0xfd22, "M", u"صي"),
(0xfd23, "M", u"ضى"),
(0xfd24, "M", u"ضي"),
(0xfd25, "M", u"شج"),
(0xfd26, "M", u"شح"),
(0xfd27, "M", u"شخ"),
(0xfd28, "M", u"شم"),
]
def _seg_48():
return [
(0xfd29, "M", u"شر"),
(0xfd2a, "M", u"سر"),
(0xfd2b, "M", u"صر"),
(0xfd2c, "M", u"ضر"),
(0xfd2d, "M", u"شج"),
(0xfd2e, "M", u"شح"),
(0xfd2f, "M", u"شخ"),
(0xfd30, "M", u"شم"),
(0xfd31, "M", u"سه"),
(0xfd32, "M", u"شه"),
(0xfd33, "M", u"طم"),
(0xfd34, "M", u"سج"),
(0xfd35, "M", u"سح"),
(0xfd36, "M", u"سخ"),
(0xfd37, "M", u"شج"),
(0xfd38, "M", u"شح"),
(0xfd39, "M", u"شخ"),
(0xfd3a, "M", u"طم"),
(0xfd3b, "M", u"ظم"),
(0xfd3c, "M", u"اً"),
(0xfd3e, "V"),
(0xfd40, "X"),
(0xfd50, "M", u"تجم"),
(0xfd51, "M", u"تحج"),
(0xfd53, "M", u"تحم"),
(0xfd54, "M", u"تخم"),
(0xfd55, "M", u"تمج"),
(0xfd56, "M", u"تمح"),
(0xfd57, "M", u"تمخ"),
(0xfd58, "M", u"جمح"),
(0xfd5a, "M", u"حمي"),
(0xfd5b, "M", u"حمى"),
(0xfd5c, "M", u"سحج"),
(0xfd5d, "M", u"سجح"),
(0xfd5e, "M", u"سجى"),
(0xfd5f, "M", u"سمح"),
(0xfd61, "M", u"سمج"),
(0xfd62, "M", u"سمم"),
(0xfd64, "M", u"صحح"),
(0xfd66, "M", u"صمم"),
(0xfd67, "M", u"شحم"),
(0xfd69, "M", u"شجي"),
(0xfd6a, "M", u"شمخ"),
(0xfd6c, "M", u"شمم"),
(0xfd6e, "M", u"ضحى"),
(0xfd6f, "M", u"ضخم"),
(0xfd71, "M", u"طمح"),
(0xfd73, "M", u"طمم"),
(0xfd74, "M", u"طمي"),
(0xfd75, "M", u"عجم"),
(0xfd76, "M", u"عمم"),
(0xfd78, "M", u"عمى"),
(0xfd79, "M", u"غمم"),
(0xfd7a, "M", u"غمي"),
(0xfd7b, "M", u"غمى"),
(0xfd7c, "M", u"فخم"),
(0xfd7e, "M", u"قمح"),
(0xfd7f, "M", u"قمم"),
(0xfd80, "M", u"لحم"),
(0xfd81, "M", u"لحي"),
(0xfd82, "M", u"لحى"),
(0xfd83, "M", u"لجج"),
(0xfd85, "M", u"لخم"),
(0xfd87, "M", u"لمح"),
(0xfd89, "M", u"محج"),
(0xfd8a, "M", u"محم"),
(0xfd8b, "M", u"محي"),
(0xfd8c, "M", u"مجح"),
(0xfd8d, "M", u"مجم"),
(0xfd8e, "M", u"مخج"),
(0xfd8f, "M", u"مخم"),
(0xfd90, "X"),
(0xfd92, "M", u"مجخ"),
(0xfd93, "M", u"همج"),
(0xfd94, "M", u"همم"),
(0xfd95, "M", u"نحم"),
(0xfd96, "M", u"نحى"),
(0xfd97, "M", u"نجم"),
(0xfd99, "M", u"نجى"),
(0xfd9a, "M", u"نمي"),
(0xfd9b, "M", u"نمى"),
(0xfd9c, "M", u"يمم"),
(0xfd9e, "M", u"بخي"),
(0xfd9f, "M", u"تجي"),
(0xfda0, "M", u"تجى"),
(0xfda1, "M", u"تخي"),
(0xfda2, "M", u"تخى"),
(0xfda3, "M", u"تمي"),
(0xfda4, "M", u"تمى"),
(0xfda5, "M", u"جمي"),
(0xfda6, "M", u"جحى"),
(0xfda7, "M", u"جمى"),
(0xfda8, "M", u"سخى"),
(0xfda9, "M", u"صحي"),
(0xfdaa, "M", u"شحي"),
(0xfdab, "M", u"ضحي"),
(0xfdac, "M", u"لجي"),
(0xfdad, "M", u"لمي"),
(0xfdae, "M", u"يحي"),
(0xfdaf, "M", u"يجي"),
]
def _seg_49():
return [
(0xfdb0, "M", u"يمي"),
(0xfdb1, "M", u"ممي"),
(0xfdb2, "M", u"قمي"),
(0xfdb3, "M", u"نحي"),
(0xfdb4, "M", u"قمح"),
(0xfdb5, "M", u"لحم"),
(0xfdb6, "M", u"عمي"),
(0xfdb7, "M", u"كمي"),
(0xfdb8, "M", u"نجح"),
(0xfdb9, "M", u"مخي"),
(0xfdba, "M", u"لجم"),
(0xfdbb, "M", u"كمم"),
(0xfdbc, "M", u"لجم"),
(0xfdbd, "M", u"نجح"),
(0xfdbe, "M", u"جحي"),
(0xfdbf, "M", u"حجي"),
(0xfdc0, "M", u"مجي"),
(0xfdc1, "M", u"فمي"),
(0xfdc2, "M", u"بحي"),
(0xfdc3, "M", u"كمم"),
(0xfdc4, "M", u"عجم"),
(0xfdc5, "M", u"صمم"),
(0xfdc6, "M", u"سخي"),
(0xfdc7, "M", u"نجي"),
(0xfdc8, "X"),
(0xfdf0, "M", u"صلے"),
(0xfdf1, "M", u"قلے"),
(0xfdf2, "M", u"الله"),
(0xfdf3, "M", u"اكبر"),
(0xfdf4, "M", u"محمد"),
(0xfdf5, "M", u"صلعم"),
(0xfdf6, "M", u"رسول"),
(0xfdf7, "M", u"عليه"),
(0xfdf8, "M", u"وسلم"),
(0xfdf9, "M", u"صلى"),
(0xfdfa, "3", u"صلى الله عليه وسلم"),
(0xfdfb, "3", u"جل جلاله"),
(0xfdfc, "M", u"ریال"),
(0xfdfd, "V"),
(0xfdfe, "X"),
(0xfe00, "I"),
(0xfe10, "3", u","),
(0xfe11, "M", u"、"),
(0xfe12, "X"),
(0xfe13, "3", u":"),
(0xfe14, "3", u";"),
(0xfe15, "3", u"!"),
(0xfe16, "3", u"?"),
(0xfe17, "M", u"〖"),
(0xfe18, "M", u"〗"),
(0xfe19, "X"),
(0xfe20, "V"),
(0xfe30, "X"),
(0xfe31, "M", u"—"),
(0xfe32, "M", u"–"),
(0xfe33, "3", u"_"),
(0xfe35, "3", u"("),
(0xfe36, "3", u")"),
(0xfe37, "3", u"{"),
(0xfe38, "3", u"}"),
(0xfe39, "M", u"〔"),
(0xfe3a, "M", u"〕"),
(0xfe3b, "M", u"【"),
(0xfe3c, "M", u"】"),
(0xfe3d, "M", u"《"),
(0xfe3e, "M", u"》"),
(0xfe3f, "M", u"〈"),
(0xfe40, "M", u"〉"),
(0xfe41, "M", u"「"),
(0xfe42, "M", u"」"),
(0xfe43, "M", u"『"),
(0xfe44, "M", u"』"),
(0xfe45, "V"),
(0xfe47, "3", u"["),
(0xfe48, "3", u"]"),
(0xfe49, "3", u" ̅"),
(0xfe4d, "3", u"_"),
(0xfe50, "3", u","),
(0xfe51, "M", u"、"),
(0xfe52, "X"),
(0xfe54, "3", u";"),
(0xfe55, "3", u":"),
(0xfe56, "3", u"?"),
(0xfe57, "3", u"!"),
(0xfe58, "M", u"—"),
(0xfe59, "3", u"("),
(0xfe5a, "3", u")"),
(0xfe5b, "3", u"{"),
(0xfe5c, "3", u"}"),
(0xfe5d, "M", u"〔"),
(0xfe5e, "M", u"〕"),
(0xfe5f, "3", u"#"),
(0xfe60, "3", u"&"),
(0xfe61, "3", u"*"),
(0xfe62, "3", u"+"),
(0xfe63, "M", u"-"),
(0xfe64, "3", u"<"),
(0xfe65, "3", u">"),
(0xfe66, "3", u"="),
(0xfe67, "X"),
]
def _seg_50():
return [
(0xfe68, "3", u"\\"),
(0xfe69, "3", u"$"),
(0xfe6a, "3", u"%"),
(0xfe6b, "3", u"@"),
(0xfe6c, "X"),
(0xfe70, "3", u" ً"),
(0xfe71, "M", u"ـً"),
(0xfe72, "3", u" ٌ"),
(0xfe73, "V"),
(0xfe74, "3", u" ٍ"),
(0xfe75, "X"),
(0xfe76, "3", u" َ"),
(0xfe77, "M", u"ـَ"),
(0xfe78, "3", u" ُ"),
(0xfe79, "M", u"ـُ"),
(0xfe7a, "3", u" ِ"),
(0xfe7b, "M", u"ـِ"),
(0xfe7c, "3", u" ّ"),
(0xfe7d, "M", u"ـّ"),
(0xfe7e, "3", u" ْ"),
(0xfe7f, "M", u"ـْ"),
(0xfe80, "M", u"ء"),
(0xfe81, "M", u"آ"),
(0xfe83, "M", u"أ"),
(0xfe85, "M", u"ؤ"),
(0xfe87, "M", u"إ"),
(0xfe89, "M", u"ئ"),
(0xfe8d, "M", u"ا"),
(0xfe8f, "M", u"ب"),
(0xfe93, "M", u"ة"),
(0xfe95, "M", u"ت"),
(0xfe99, "M", u"ث"),
(0xfe9d, "M", u"ج"),
(0xfea1, "M", u"ح"),
(0xfea5, "M", u"خ"),
(0xfea9, "M", u"د"),
(0xfeab, "M", u"ذ"),
(0xfead, "M", u"ر"),
(0xfeaf, "M", u"ز"),
(0xfeb1, "M", u"س"),
(0xfeb5, "M", u"ش"),
(0xfeb9, "M", u"ص"),
(0xfebd, "M", u"ض"),
(0xfec1, "M", u"ط"),
(0xfec5, "M", u"ظ"),
(0xfec9, "M", u"ع"),
(0xfecd, "M", u"غ"),
(0xfed1, "M", u"ف"),
(0xfed5, "M", u"ق"),
(0xfed9, "M", u"ك"),
(0xfedd, "M", u"ل"),
(0xfee1, "M", u"م"),
(0xfee5, "M", u"ن"),
(0xfee9, "M", u"ه"),
(0xfeed, "M", u"و"),
(0xfeef, "M", u"ى"),
(0xfef1, "M", u"ي"),
(0xfef5, "M", u"لآ"),
(0xfef7, "M", u"لأ"),
(0xfef9, "M", u"لإ"),
(0xfefb, "M", u"لا"),
(0xfefd, "X"),
(0xfeff, "I"),
(0xff00, "X"),
(0xff01, "3", u"!"),
(0xff02, "3", u'"'),
(0xff03, "3", u"#"),
(0xff04, "3", u"$"),
(0xff05, "3", u"%"),
(0xff06, "3", u"&"),
(0xff07, "3", u"'"),
(0xff08, "3", u"("),
(0xff09, "3", u")"),
(0xff0a, "3", u"*"),
(0xff0b, "3", u"+"),
(0xff0c, "3", u","),
(0xff0d, "M", u"-"),
(0xff0e, "M", u"."),
(0xff0f, "3", u"/"),
(0xff10, "M", u"0"),
(0xff11, "M", u"1"),
(0xff12, "M", u"2"),
(0xff13, "M", u"3"),
(0xff14, "M", u"4"),
(0xff15, "M", u"5"),
(0xff16, "M", u"6"),
(0xff17, "M", u"7"),
(0xff18, "M", u"8"),
(0xff19, "M", u"9"),
(0xff1a, "3", u":"),
(0xff1b, "3", u";"),
(0xff1c, "3", u"<"),
(0xff1d, "3", u"="),
(0xff1e, "3", u">"),
(0xff1f, "3", u"?"),
(0xff20, "3", u"@"),
(0xff21, "M", u"a"),
(0xff22, "M", u"b"),
(0xff23, "M", u"c"),
(0xff24, "M", u"d"),
]
def _seg_51():
return [
(0xff25, "M", u"e"),
(0xff26, "M", u"f"),
(0xff27, "M", u"g"),
(0xff28, "M", u"h"),
(0xff29, "M", u"i"),
(0xff2a, "M", u"j"),
(0xff2b, "M", u"k"),
(0xff2c, "M", u"l"),
(0xff2d, "M", u"m"),
(0xff2e, "M", u"n"),
(0xff2f, "M", u"o"),
(0xff30, "M", u"p"),
(0xff31, "M", u"q"),
(0xff32, "M", u"r"),
(0xff33, "M", u"s"),
(0xff34, "M", u"t"),
(0xff35, "M", u"u"),
(0xff36, "M", u"v"),
(0xff37, "M", u"w"),
(0xff38, "M", u"x"),
(0xff39, "M", u"y"),
(0xff3a, "M", u"z"),
(0xff3b, "3", u"["),
(0xff3c, "3", u"\\"),
(0xff3d, "3", u"]"),
(0xff3e, "3", u"^"),
(0xff3f, "3", u"_"),
(0xff40, "3", u"`"),
(0xff41, "M", u"a"),
(0xff42, "M", u"b"),
(0xff43, "M", u"c"),
(0xff44, "M", u"d"),
(0xff45, "M", u"e"),
(0xff46, "M", u"f"),
(0xff47, "M", u"g"),
(0xff48, "M", u"h"),
(0xff49, "M", u"i"),
(0xff4a, "M", u"j"),
(0xff4b, "M", u"k"),
(0xff4c, "M", u"l"),
(0xff4d, "M", u"m"),
(0xff4e, "M", u"n"),
(0xff4f, "M", u"o"),
(0xff50, "M", u"p"),
(0xff51, "M", u"q"),
(0xff52, "M", u"r"),
(0xff53, "M", u"s"),
(0xff54, "M", u"t"),
(0xff55, "M", u"u"),
(0xff56, "M", u"v"),
(0xff57, "M", u"w"),
(0xff58, "M", u"x"),
(0xff59, "M", u"y"),
(0xff5a, "M", u"z"),
(0xff5b, "3", u"{"),
(0xff5c, "3", u"|"),
(0xff5d, "3", u"}"),
(0xff5e, "3", u"~"),
(0xff5f, "M", u"⦅"),
(0xff60, "M", u"⦆"),
(0xff61, "M", u"."),
(0xff62, "M", u"「"),
(0xff63, "M", u"」"),
(0xff64, "M", u"、"),
(0xff65, "M", u"・"),
(0xff66, "M", u"ヲ"),
(0xff67, "M", u"ァ"),
(0xff68, "M", u"ィ"),
(0xff69, "M", u"ゥ"),
(0xff6a, "M", u"ェ"),
(0xff6b, "M", u"ォ"),
(0xff6c, "M", u"ャ"),
(0xff6d, "M", u"ュ"),
(0xff6e, "M", u"ョ"),
(0xff6f, "M", u"ッ"),
(0xff70, "M", u"ー"),
(0xff71, "M", u"ア"),
(0xff72, "M", u"イ"),
(0xff73, "M", u"ウ"),
(0xff74, "M", u"エ"),
(0xff75, "M", u"オ"),
(0xff76, "M", u"カ"),
(0xff77, "M", u"キ"),
(0xff78, "M", u"ク"),
(0xff79, "M", u"ケ"),
(0xff7a, "M", u"コ"),
(0xff7b, "M", u"サ"),
(0xff7c, "M", u"シ"),
(0xff7d, "M", u"ス"),
(0xff7e, "M", u"セ"),
(0xff7f, "M", u"ソ"),
(0xff80, "M", u"タ"),
(0xff81, "M", u"チ"),
(0xff82, "M", u"ツ"),
(0xff83, "M", u"テ"),
(0xff84, "M", u"ト"),
(0xff85, "M", u"ナ"),
(0xff86, "M", u"ニ"),
(0xff87, "M", u"ヌ"),
(0xff88, "M", u"ネ"),
]
def _seg_52():
return [
(0xff89, "M", u"ノ"),
(0xff8a, "M", u"ハ"),
(0xff8b, "M", u"ヒ"),
(0xff8c, "M", u"フ"),
(0xff8d, "M", u"ヘ"),
(0xff8e, "M", u"ホ"),
(0xff8f, "M", u"マ"),
(0xff90, "M", u"ミ"),
(0xff91, "M", u"ム"),
(0xff92, "M", u"メ"),
(0xff93, "M", u"モ"),
(0xff94, "M", u"ヤ"),
(0xff95, "M", u"ユ"),
(0xff96, "M", u"ヨ"),
(0xff97, "M", u"ラ"),
(0xff98, "M", u"リ"),
(0xff99, "M", u"ル"),
(0xff9a, "M", u"レ"),
(0xff9b, "M", u"ロ"),
(0xff9c, "M", u"ワ"),
(0xff9d, "M", u"ン"),
(0xff9e, "M", u"゙"),
(0xff9f, "M", u"゚"),
(0xffa0, "X"),
(0xffa1, "M", u"ᄀ"),
(0xffa2, "M", u"ᄁ"),
(0xffa3, "M", u"ᆪ"),
(0xffa4, "M", u"ᄂ"),
(0xffa5, "M", u"ᆬ"),
(0xffa6, "M", u"ᆭ"),
(0xffa7, "M", u"ᄃ"),
(0xffa8, "M", u"ᄄ"),
(0xffa9, "M", u"ᄅ"),
(0xffaa, "M", u"ᆰ"),
(0xffab, "M", u"ᆱ"),
(0xffac, "M", u"ᆲ"),
(0xffad, "M", u"ᆳ"),
(0xffae, "M", u"ᆴ"),
(0xffaf, "M", u"ᆵ"),
(0xffb0, "M", u"ᄚ"),
(0xffb1, "M", u"ᄆ"),
(0xffb2, "M", u"ᄇ"),
(0xffb3, "M", u"ᄈ"),
(0xffb4, "M", u"ᄡ"),
(0xffb5, "M", u"ᄉ"),
(0xffb6, "M", u"ᄊ"),
(0xffb7, "M", u"ᄋ"),
(0xffb8, "M", u"ᄌ"),
(0xffb9, "M", u"ᄍ"),
(0xffba, "M", u"ᄎ"),
(0xffbb, "M", u"ᄏ"),
(0xffbc, "M", u"ᄐ"),
(0xffbd, "M", u"ᄑ"),
(0xffbe, "M", u"ᄒ"),
(0xffbf, "X"),
(0xffc2, "M", u"ᅡ"),
(0xffc3, "M", u"ᅢ"),
(0xffc4, "M", u"ᅣ"),
(0xffc5, "M", u"ᅤ"),
(0xffc6, "M", u"ᅥ"),
(0xffc7, "M", u"ᅦ"),
(0xffc8, "X"),
(0xffca, "M", u"ᅧ"),
(0xffcb, "M", u"ᅨ"),
(0xffcc, "M", u"ᅩ"),
(0xffcd, "M", u"ᅪ"),
(0xffce, "M", u"ᅫ"),
(0xffcf, "M", u"ᅬ"),
(0xffd0, "X"),
(0xffd2, "M", u"ᅭ"),
(0xffd3, "M", u"ᅮ"),
(0xffd4, "M", u"ᅯ"),
(0xffd5, "M", u"ᅰ"),
(0xffd6, "M", u"ᅱ"),
(0xffd7, "M", u"ᅲ"),
(0xffd8, "X"),
(0xffda, "M", u"ᅳ"),
(0xffdb, "M", u"ᅴ"),
(0xffdc, "M", u"ᅵ"),
(0xffdd, "X"),
(0xffe0, "M", u"¢"),
(0xffe1, "M", u"£"),
(0xffe2, "M", u"¬"),
(0xffe3, "3", u" ̄"),
(0xffe4, "M", u"¦"),
(0xffe5, "M", u"¥"),
(0xffe6, "M", u"₩"),
(0xffe7, "X"),
(0xffe8, "M", u"│"),
(0xffe9, "M", u"←"),
(0xffea, "M", u"↑"),
(0xffeb, "M", u"→"),
(0xffec, "M", u"↓"),
(0xffed, "M", u"■"),
(0xffee, "M", u"○"),
(0xffef, "X"),
(0x10000, "V"),
(0x1000c, "X"),
(0x1000d, "V"),
(0x10027, "X"),
]
def _seg_53():
return [
(0x10028, "V"),
(0x1003b, "X"),
(0x1003c, "V"),
(0x1003e, "X"),
(0x1003f, "V"),
(0x1004e, "X"),
(0x10050, "V"),
(0x1005e, "X"),
(0x10080, "V"),
(0x100fb, "X"),
(0x10100, "V"),
(0x10103, "X"),
(0x10107, "V"),
(0x10134, "X"),
(0x10137, "V"),
(0x1018f, "X"),
(0x10190, "V"),
(0x1019c, "X"),
(0x101a0, "V"),
(0x101a1, "X"),
(0x101d0, "V"),
(0x101fe, "X"),
(0x10280, "V"),
(0x1029d, "X"),
(0x102a0, "V"),
(0x102d1, "X"),
(0x102e0, "V"),
(0x102fc, "X"),
(0x10300, "V"),
(0x10324, "X"),
(0x1032d, "V"),
(0x1034b, "X"),
(0x10350, "V"),
(0x1037b, "X"),
(0x10380, "V"),
(0x1039e, "X"),
(0x1039f, "V"),
(0x103c4, "X"),
(0x103c8, "V"),
(0x103d6, "X"),
(0x10400, "M", u"𐐨"),
(0x10401, "M", u"𐐩"),
(0x10402, "M", u"𐐪"),
(0x10403, "M", u"𐐫"),
(0x10404, "M", u"𐐬"),
(0x10405, "M", u"𐐭"),
(0x10406, "M", u"𐐮"),
(0x10407, "M", u"𐐯"),
(0x10408, "M", u"𐐰"),
(0x10409, "M", u"𐐱"),
(0x1040a, "M", u"𐐲"),
(0x1040b, "M", u"𐐳"),
(0x1040c, "M", u"𐐴"),
(0x1040d, "M", u"𐐵"),
(0x1040e, "M", u"𐐶"),
(0x1040f, "M", u"𐐷"),
(0x10410, "M", u"𐐸"),
(0x10411, "M", u"𐐹"),
(0x10412, "M", u"𐐺"),
(0x10413, "M", u"𐐻"),
(0x10414, "M", u"𐐼"),
(0x10415, "M", u"𐐽"),
(0x10416, "M", u"𐐾"),
(0x10417, "M", u"𐐿"),
(0x10418, "M", u"𐑀"),
(0x10419, "M", u"𐑁"),
(0x1041a, "M", u"𐑂"),
(0x1041b, "M", u"𐑃"),
(0x1041c, "M", u"𐑄"),
(0x1041d, "M", u"𐑅"),
(0x1041e, "M", u"𐑆"),
(0x1041f, "M", u"𐑇"),
(0x10420, "M", u"𐑈"),
(0x10421, "M", u"𐑉"),
(0x10422, "M", u"𐑊"),
(0x10423, "M", u"𐑋"),
(0x10424, "M", u"𐑌"),
(0x10425, "M", u"𐑍"),
(0x10426, "M", u"𐑎"),
(0x10427, "M", u"𐑏"),
(0x10428, "V"),
(0x1049e, "X"),
(0x104a0, "V"),
(0x104aa, "X"),
(0x104b0, "M", u"𐓘"),
(0x104b1, "M", u"𐓙"),
(0x104b2, "M", u"𐓚"),
(0x104b3, "M", u"𐓛"),
(0x104b4, "M", u"𐓜"),
(0x104b5, "M", u"𐓝"),
(0x104b6, "M", u"𐓞"),
(0x104b7, "M", u"𐓟"),
(0x104b8, "M", u"𐓠"),
(0x104b9, "M", u"𐓡"),
(0x104ba, "M", u"𐓢"),
(0x104bb, "M", u"𐓣"),
(0x104bc, "M", u"𐓤"),
(0x104bd, "M", u"𐓥"),
(0x104be, "M", u"𐓦"),
(0x104bf, "M", u"𐓧"),
]
def _seg_54():
return [
(0x104c0, "M", u"𐓨"),
(0x104c1, "M", u"𐓩"),
(0x104c2, "M", u"𐓪"),
(0x104c3, "M", u"𐓫"),
(0x104c4, "M", u"𐓬"),
(0x104c5, "M", u"𐓭"),
(0x104c6, "M", u"𐓮"),
(0x104c7, "M", u"𐓯"),
(0x104c8, "M", u"𐓰"),
(0x104c9, "M", u"𐓱"),
(0x104ca, "M", u"𐓲"),
(0x104cb, "M", u"𐓳"),
(0x104cc, "M", u"𐓴"),
(0x104cd, "M", u"𐓵"),
(0x104ce, "M", u"𐓶"),
(0x104cf, "M", u"𐓷"),
(0x104d0, "M", u"𐓸"),
(0x104d1, "M", u"𐓹"),
(0x104d2, "M", u"𐓺"),
(0x104d3, "M", u"𐓻"),
(0x104d4, "X"),
(0x104d8, "V"),
(0x104fc, "X"),
(0x10500, "V"),
(0x10528, "X"),
(0x10530, "V"),
(0x10564, "X"),
(0x1056f, "V"),
(0x10570, "X"),
(0x10600, "V"),
(0x10737, "X"),
(0x10740, "V"),
(0x10756, "X"),
(0x10760, "V"),
(0x10768, "X"),
(0x10800, "V"),
(0x10806, "X"),
(0x10808, "V"),
(0x10809, "X"),
(0x1080a, "V"),
(0x10836, "X"),
(0x10837, "V"),
(0x10839, "X"),
(0x1083c, "V"),
(0x1083d, "X"),
(0x1083f, "V"),
(0x10856, "X"),
(0x10857, "V"),
(0x1089f, "X"),
(0x108a7, "V"),
(0x108b0, "X"),
(0x108e0, "V"),
(0x108f3, "X"),
(0x108f4, "V"),
(0x108f6, "X"),
(0x108fb, "V"),
(0x1091c, "X"),
(0x1091f, "V"),
(0x1093a, "X"),
(0x1093f, "V"),
(0x10940, "X"),
(0x10980, "V"),
(0x109b8, "X"),
(0x109bc, "V"),
(0x109d0, "X"),
(0x109d2, "V"),
(0x10a04, "X"),
(0x10a05, "V"),
(0x10a07, "X"),
(0x10a0c, "V"),
(0x10a14, "X"),
(0x10a15, "V"),
(0x10a18, "X"),
(0x10a19, "V"),
(0x10a36, "X"),
(0x10a38, "V"),
(0x10a3b, "X"),
(0x10a3f, "V"),
(0x10a49, "X"),
(0x10a50, "V"),
(0x10a59, "X"),
(0x10a60, "V"),
(0x10aa0, "X"),
(0x10ac0, "V"),
(0x10ae7, "X"),
(0x10aeb, "V"),
(0x10af7, "X"),
(0x10b00, "V"),
(0x10b36, "X"),
(0x10b39, "V"),
(0x10b56, "X"),
(0x10b58, "V"),
(0x10b73, "X"),
(0x10b78, "V"),
(0x10b92, "X"),
(0x10b99, "V"),
(0x10b9d, "X"),
(0x10ba9, "V"),
(0x10bb0, "X"),
(0x10c00, "V"),
]
def _seg_55():
return [
(0x10c49, "X"),
(0x10c80, "M", u"𐳀"),
(0x10c81, "M", u"𐳁"),
(0x10c82, "M", u"𐳂"),
(0x10c83, "M", u"𐳃"),
(0x10c84, "M", u"𐳄"),
(0x10c85, "M", u"𐳅"),
(0x10c86, "M", u"𐳆"),
(0x10c87, "M", u"𐳇"),
(0x10c88, "M", u"𐳈"),
(0x10c89, "M", u"𐳉"),
(0x10c8a, "M", u"𐳊"),
(0x10c8b, "M", u"𐳋"),
(0x10c8c, "M", u"𐳌"),
(0x10c8d, "M", u"𐳍"),
(0x10c8e, "M", u"𐳎"),
(0x10c8f, "M", u"𐳏"),
(0x10c90, "M", u"𐳐"),
(0x10c91, "M", u"𐳑"),
(0x10c92, "M", u"𐳒"),
(0x10c93, "M", u"𐳓"),
(0x10c94, "M", u"𐳔"),
(0x10c95, "M", u"𐳕"),
(0x10c96, "M", u"𐳖"),
(0x10c97, "M", u"𐳗"),
(0x10c98, "M", u"𐳘"),
(0x10c99, "M", u"𐳙"),
(0x10c9a, "M", u"𐳚"),
(0x10c9b, "M", u"𐳛"),
(0x10c9c, "M", u"𐳜"),
(0x10c9d, "M", u"𐳝"),
(0x10c9e, "M", u"𐳞"),
(0x10c9f, "M", u"𐳟"),
(0x10ca0, "M", u"𐳠"),
(0x10ca1, "M", u"𐳡"),
(0x10ca2, "M", u"𐳢"),
(0x10ca3, "M", u"𐳣"),
(0x10ca4, "M", u"𐳤"),
(0x10ca5, "M", u"𐳥"),
(0x10ca6, "M", u"𐳦"),
(0x10ca7, "M", u"𐳧"),
(0x10ca8, "M", u"𐳨"),
(0x10ca9, "M", u"𐳩"),
(0x10caa, "M", u"𐳪"),
(0x10cab, "M", u"𐳫"),
(0x10cac, "M", u"𐳬"),
(0x10cad, "M", u"𐳭"),
(0x10cae, "M", u"𐳮"),
(0x10caf, "M", u"𐳯"),
(0x10cb0, "M", u"𐳰"),
(0x10cb1, "M", u"𐳱"),
(0x10cb2, "M", u"𐳲"),
(0x10cb3, "X"),
(0x10cc0, "V"),
(0x10cf3, "X"),
(0x10cfa, "V"),
(0x10d28, "X"),
(0x10d30, "V"),
(0x10d3a, "X"),
(0x10e60, "V"),
(0x10e7f, "X"),
(0x10f00, "V"),
(0x10f28, "X"),
(0x10f30, "V"),
(0x10f5a, "X"),
(0x10fe0, "V"),
(0x10ff7, "X"),
(0x11000, "V"),
(0x1104e, "X"),
(0x11052, "V"),
(0x11070, "X"),
(0x1107f, "V"),
(0x110bd, "X"),
(0x110be, "V"),
(0x110c2, "X"),
(0x110d0, "V"),
(0x110e9, "X"),
(0x110f0, "V"),
(0x110fa, "X"),
(0x11100, "V"),
(0x11135, "X"),
(0x11136, "V"),
(0x11147, "X"),
(0x11150, "V"),
(0x11177, "X"),
(0x11180, "V"),
(0x111ce, "X"),
(0x111d0, "V"),
(0x111e0, "X"),
(0x111e1, "V"),
(0x111f5, "X"),
(0x11200, "V"),
(0x11212, "X"),
(0x11213, "V"),
(0x1123f, "X"),
(0x11280, "V"),
(0x11287, "X"),
(0x11288, "V"),
(0x11289, "X"),
(0x1128a, "V"),
]
def _seg_56():
return [
(0x1128e, "X"),
(0x1128f, "V"),
(0x1129e, "X"),
(0x1129f, "V"),
(0x112aa, "X"),
(0x112b0, "V"),
(0x112eb, "X"),
(0x112f0, "V"),
(0x112fa, "X"),
(0x11300, "V"),
(0x11304, "X"),
(0x11305, "V"),
(0x1130d, "X"),
(0x1130f, "V"),
(0x11311, "X"),
(0x11313, "V"),
(0x11329, "X"),
(0x1132a, "V"),
(0x11331, "X"),
(0x11332, "V"),
(0x11334, "X"),
(0x11335, "V"),
(0x1133a, "X"),
(0x1133b, "V"),
(0x11345, "X"),
(0x11347, "V"),
(0x11349, "X"),
(0x1134b, "V"),
(0x1134e, "X"),
(0x11350, "V"),
(0x11351, "X"),
(0x11357, "V"),
(0x11358, "X"),
(0x1135d, "V"),
(0x11364, "X"),
(0x11366, "V"),
(0x1136d, "X"),
(0x11370, "V"),
(0x11375, "X"),
(0x11400, "V"),
(0x1145a, "X"),
(0x1145b, "V"),
(0x1145c, "X"),
(0x1145d, "V"),
(0x11460, "X"),
(0x11480, "V"),
(0x114c8, "X"),
(0x114d0, "V"),
(0x114da, "X"),
(0x11580, "V"),
(0x115b6, "X"),
(0x115b8, "V"),
(0x115de, "X"),
(0x11600, "V"),
(0x11645, "X"),
(0x11650, "V"),
(0x1165a, "X"),
(0x11660, "V"),
(0x1166d, "X"),
(0x11680, "V"),
(0x116b9, "X"),
(0x116c0, "V"),
(0x116ca, "X"),
(0x11700, "V"),
(0x1171b, "X"),
(0x1171d, "V"),
(0x1172c, "X"),
(0x11730, "V"),
(0x11740, "X"),
(0x11800, "V"),
(0x1183c, "X"),
(0x118a0, "M", u"𑣀"),
(0x118a1, "M", u"𑣁"),
(0x118a2, "M", u"𑣂"),
(0x118a3, "M", u"𑣃"),
(0x118a4, "M", u"𑣄"),
(0x118a5, "M", u"𑣅"),
(0x118a6, "M", u"𑣆"),
(0x118a7, "M", u"𑣇"),
(0x118a8, "M", u"𑣈"),
(0x118a9, "M", u"𑣉"),
(0x118aa, "M", u"𑣊"),
(0x118ab, "M", u"𑣋"),
(0x118ac, "M", u"𑣌"),
(0x118ad, "M", u"𑣍"),
(0x118ae, "M", u"𑣎"),
(0x118af, "M", u"𑣏"),
(0x118b0, "M", u"𑣐"),
(0x118b1, "M", u"𑣑"),
(0x118b2, "M", u"𑣒"),
(0x118b3, "M", u"𑣓"),
(0x118b4, "M", u"𑣔"),
(0x118b5, "M", u"𑣕"),
(0x118b6, "M", u"𑣖"),
(0x118b7, "M", u"𑣗"),
(0x118b8, "M", u"𑣘"),
(0x118b9, "M", u"𑣙"),
(0x118ba, "M", u"𑣚"),
(0x118bb, "M", u"𑣛"),
(0x118bc, "M", u"𑣜"),
]
def _seg_57():
return [
(0x118bd, "M", u"𑣝"),
(0x118be, "M", u"𑣞"),
(0x118bf, "M", u"𑣟"),
(0x118c0, "V"),
(0x118f3, "X"),
(0x118ff, "V"),
(0x11900, "X"),
(0x119a0, "V"),
(0x119a8, "X"),
(0x119aa, "V"),
(0x119d8, "X"),
(0x119da, "V"),
(0x119e5, "X"),
(0x11a00, "V"),
(0x11a48, "X"),
(0x11a50, "V"),
(0x11aa3, "X"),
(0x11ac0, "V"),
(0x11af9, "X"),
(0x11c00, "V"),
(0x11c09, "X"),
(0x11c0a, "V"),
(0x11c37, "X"),
(0x11c38, "V"),
(0x11c46, "X"),
(0x11c50, "V"),
(0x11c6d, "X"),
(0x11c70, "V"),
(0x11c90, "X"),
(0x11c92, "V"),
(0x11ca8, "X"),
(0x11ca9, "V"),
(0x11cb7, "X"),
(0x11d00, "V"),
(0x11d07, "X"),
(0x11d08, "V"),
(0x11d0a, "X"),
(0x11d0b, "V"),
(0x11d37, "X"),
(0x11d3a, "V"),
(0x11d3b, "X"),
(0x11d3c, "V"),
(0x11d3e, "X"),
(0x11d3f, "V"),
(0x11d48, "X"),
(0x11d50, "V"),
(0x11d5a, "X"),
(0x11d60, "V"),
(0x11d66, "X"),
(0x11d67, "V"),
(0x11d69, "X"),
(0x11d6a, "V"),
(0x11d8f, "X"),
(0x11d90, "V"),
(0x11d92, "X"),
(0x11d93, "V"),
(0x11d99, "X"),
(0x11da0, "V"),
(0x11daa, "X"),
(0x11ee0, "V"),
(0x11ef9, "X"),
(0x11fc0, "V"),
(0x11ff2, "X"),
(0x11fff, "V"),
(0x1239a, "X"),
(0x12400, "V"),
(0x1246f, "X"),
(0x12470, "V"),
(0x12475, "X"),
(0x12480, "V"),
(0x12544, "X"),
(0x13000, "V"),
(0x1342f, "X"),
(0x14400, "V"),
(0x14647, "X"),
(0x16800, "V"),
(0x16a39, "X"),
(0x16a40, "V"),
(0x16a5f, "X"),
(0x16a60, "V"),
(0x16a6a, "X"),
(0x16a6e, "V"),
(0x16a70, "X"),
(0x16ad0, "V"),
(0x16aee, "X"),
(0x16af0, "V"),
(0x16af6, "X"),
(0x16b00, "V"),
(0x16b46, "X"),
(0x16b50, "V"),
(0x16b5a, "X"),
(0x16b5b, "V"),
(0x16b62, "X"),
(0x16b63, "V"),
(0x16b78, "X"),
(0x16b7d, "V"),
(0x16b90, "X"),
(0x16e40, "M", u"𖹠"),
(0x16e41, "M", u"𖹡"),
(0x16e42, "M", u"𖹢"),
]
def _seg_58():
return [
(0x16e43, "M", u"𖹣"),
(0x16e44, "M", u"𖹤"),
(0x16e45, "M", u"𖹥"),
(0x16e46, "M", u"𖹦"),
(0x16e47, "M", u"𖹧"),
(0x16e48, "M", u"𖹨"),
(0x16e49, "M", u"𖹩"),
(0x16e4a, "M", u"𖹪"),
(0x16e4b, "M", u"𖹫"),
(0x16e4c, "M", u"𖹬"),
(0x16e4d, "M", u"𖹭"),
(0x16e4e, "M", u"𖹮"),
(0x16e4f, "M", u"𖹯"),
(0x16e50, "M", u"𖹰"),
(0x16e51, "M", u"𖹱"),
(0x16e52, "M", u"𖹲"),
(0x16e53, "M", u"𖹳"),
(0x16e54, "M", u"𖹴"),
(0x16e55, "M", u"𖹵"),
(0x16e56, "M", u"𖹶"),
(0x16e57, "M", u"𖹷"),
(0x16e58, "M", u"𖹸"),
(0x16e59, "M", u"𖹹"),
(0x16e5a, "M", u"𖹺"),
(0x16e5b, "M", u"𖹻"),
(0x16e5c, "M", u"𖹼"),
(0x16e5d, "M", u"𖹽"),
(0x16e5e, "M", u"𖹾"),
(0x16e5f, "M", u"𖹿"),
(0x16e60, "V"),
(0x16e9b, "X"),
(0x16f00, "V"),
(0x16f4b, "X"),
(0x16f4f, "V"),
(0x16f88, "X"),
(0x16f8f, "V"),
(0x16fa0, "X"),
(0x16fe0, "V"),
(0x16fe4, "X"),
(0x17000, "V"),
(0x187f8, "X"),
(0x18800, "V"),
(0x18af3, "X"),
(0x1b000, "V"),
(0x1b11f, "X"),
(0x1b150, "V"),
(0x1b153, "X"),
(0x1b164, "V"),
(0x1b168, "X"),
(0x1b170, "V"),
(0x1b2fc, "X"),
(0x1bc00, "V"),
(0x1bc6b, "X"),
(0x1bc70, "V"),
(0x1bc7d, "X"),
(0x1bc80, "V"),
(0x1bc89, "X"),
(0x1bc90, "V"),
(0x1bc9a, "X"),
(0x1bc9c, "V"),
(0x1bca0, "I"),
(0x1bca4, "X"),
(0x1d000, "V"),
(0x1d0f6, "X"),
(0x1d100, "V"),
(0x1d127, "X"),
(0x1d129, "V"),
(0x1d15e, "M", u"𝅗𝅥"),
(0x1d15f, "M", u"𝅘𝅥"),
(0x1d160, "M", u"𝅘𝅥𝅮"),
(0x1d161, "M", u"𝅘𝅥𝅯"),
(0x1d162, "M", u"𝅘𝅥𝅰"),
(0x1d163, "M", u"𝅘𝅥𝅱"),
(0x1d164, "M", u"𝅘𝅥𝅲"),
(0x1d165, "V"),
(0x1d173, "X"),
(0x1d17b, "V"),
(0x1d1bb, "M", u"𝆹𝅥"),
(0x1d1bc, "M", u"𝆺𝅥"),
(0x1d1bd, "M", u"𝆹𝅥𝅮"),
(0x1d1be, "M", u"𝆺𝅥𝅮"),
(0x1d1bf, "M", u"𝆹𝅥𝅯"),
(0x1d1c0, "M", u"𝆺𝅥𝅯"),
(0x1d1c1, "V"),
(0x1d1e9, "X"),
(0x1d200, "V"),
(0x1d246, "X"),
(0x1d2e0, "V"),
(0x1d2f4, "X"),
(0x1d300, "V"),
(0x1d357, "X"),
(0x1d360, "V"),
(0x1d379, "X"),
(0x1d400, "M", u"a"),
(0x1d401, "M", u"b"),
(0x1d402, "M", u"c"),
(0x1d403, "M", u"d"),
(0x1d404, "M", u"e"),
(0x1d405, "M", u"f"),
(0x1d406, "M", u"g"),
]
def _seg_59():
return [
(0x1d407, "M", u"h"),
(0x1d408, "M", u"i"),
(0x1d409, "M", u"j"),
(0x1d40a, "M", u"k"),
(0x1d40b, "M", u"l"),
(0x1d40c, "M", u"m"),
(0x1d40d, "M", u"n"),
(0x1d40e, "M", u"o"),
(0x1d40f, "M", u"p"),
(0x1d410, "M", u"q"),
(0x1d411, "M", u"r"),
(0x1d412, "M", u"s"),
(0x1d413, "M", u"t"),
(0x1d414, "M", u"u"),
(0x1d415, "M", u"v"),
(0x1d416, "M", u"w"),
(0x1d417, "M", u"x"),
(0x1d418, "M", u"y"),
(0x1d419, "M", u"z"),
(0x1d41a, "M", u"a"),
(0x1d41b, "M", u"b"),
(0x1d41c, "M", u"c"),
(0x1d41d, "M", u"d"),
(0x1d41e, "M", u"e"),
(0x1d41f, "M", u"f"),
(0x1d420, "M", u"g"),
(0x1d421, "M", u"h"),
(0x1d422, "M", u"i"),
(0x1d423, "M", u"j"),
(0x1d424, "M", u"k"),
(0x1d425, "M", u"l"),
(0x1d426, "M", u"m"),
(0x1d427, "M", u"n"),
(0x1d428, "M", u"o"),
(0x1d429, "M", u"p"),
(0x1d42a, "M", u"q"),
(0x1d42b, "M", u"r"),
(0x1d42c, "M", u"s"),
(0x1d42d, "M", u"t"),
(0x1d42e, "M", u"u"),
(0x1d42f, "M", u"v"),
(0x1d430, "M", u"w"),
(0x1d431, "M", u"x"),
(0x1d432, "M", u"y"),
(0x1d433, "M", u"z"),
(0x1d434, "M", u"a"),
(0x1d435, "M", u"b"),
(0x1d436, "M", u"c"),
(0x1d437, "M", u"d"),
(0x1d438, "M", u"e"),
(0x1d439, "M", u"f"),
(0x1d43a, "M", u"g"),
(0x1d43b, "M", u"h"),
(0x1d43c, "M", u"i"),
(0x1d43d, "M", u"j"),
(0x1d43e, "M", u"k"),
(0x1d43f, "M", u"l"),
(0x1d440, "M", u"m"),
(0x1d441, "M", u"n"),
(0x1d442, "M", u"o"),
(0x1d443, "M", u"p"),
(0x1d444, "M", u"q"),
(0x1d445, "M", u"r"),
(0x1d446, "M", u"s"),
(0x1d447, "M", u"t"),
(0x1d448, "M", u"u"),
(0x1d449, "M", u"v"),
(0x1d44a, "M", u"w"),
(0x1d44b, "M", u"x"),
(0x1d44c, "M", u"y"),
(0x1d44d, "M", u"z"),
(0x1d44e, "M", u"a"),
(0x1d44f, "M", u"b"),
(0x1d450, "M", u"c"),
(0x1d451, "M", u"d"),
(0x1d452, "M", u"e"),
(0x1d453, "M", u"f"),
(0x1d454, "M", u"g"),
(0x1d455, "X"),
(0x1d456, "M", u"i"),
(0x1d457, "M", u"j"),
(0x1d458, "M", u"k"),
(0x1d459, "M", u"l"),
(0x1d45a, "M", u"m"),
(0x1d45b, "M", u"n"),
(0x1d45c, "M", u"o"),
(0x1d45d, "M", u"p"),
(0x1d45e, "M", u"q"),
(0x1d45f, "M", u"r"),
(0x1d460, "M", u"s"),
(0x1d461, "M", u"t"),
(0x1d462, "M", u"u"),
(0x1d463, "M", u"v"),
(0x1d464, "M", u"w"),
(0x1d465, "M", u"x"),
(0x1d466, "M", u"y"),
(0x1d467, "M", u"z"),
(0x1d468, "M", u"a"),
(0x1d469, "M", u"b"),
(0x1d46a, "M", u"c"),
]
def _seg_60():
return [
(0x1d46b, "M", u"d"),
(0x1d46c, "M", u"e"),
(0x1d46d, "M", u"f"),
(0x1d46e, "M", u"g"),
(0x1d46f, "M", u"h"),
(0x1d470, "M", u"i"),
(0x1d471, "M", u"j"),
(0x1d472, "M", u"k"),
(0x1d473, "M", u"l"),
(0x1d474, "M", u"m"),
(0x1d475, "M", u"n"),
(0x1d476, "M", u"o"),
(0x1d477, "M", u"p"),
(0x1d478, "M", u"q"),
(0x1d479, "M", u"r"),
(0x1d47a, "M", u"s"),
(0x1d47b, "M", u"t"),
(0x1d47c, "M", u"u"),
(0x1d47d, "M", u"v"),
(0x1d47e, "M", u"w"),
(0x1d47f, "M", u"x"),
(0x1d480, "M", u"y"),
(0x1d481, "M", u"z"),
(0x1d482, "M", u"a"),
(0x1d483, "M", u"b"),
(0x1d484, "M", u"c"),
(0x1d485, "M", u"d"),
(0x1d486, "M", u"e"),
(0x1d487, "M", u"f"),
(0x1d488, "M", u"g"),
(0x1d489, "M", u"h"),
(0x1d48a, "M", u"i"),
(0x1d48b, "M", u"j"),
(0x1d48c, "M", u"k"),
(0x1d48d, "M", u"l"),
(0x1d48e, "M", u"m"),
(0x1d48f, "M", u"n"),
(0x1d490, "M", u"o"),
(0x1d491, "M", u"p"),
(0x1d492, "M", u"q"),
(0x1d493, "M", u"r"),
(0x1d494, "M", u"s"),
(0x1d495, "M", u"t"),
(0x1d496, "M", u"u"),
(0x1d497, "M", u"v"),
(0x1d498, "M", u"w"),
(0x1d499, "M", u"x"),
(0x1d49a, "M", u"y"),
(0x1d49b, "M", u"z"),
(0x1d49c, "M", u"a"),
(0x1d49d, "X"),
(0x1d49e, "M", u"c"),
(0x1d49f, "M", u"d"),
(0x1d4a0, "X"),
(0x1d4a2, "M", u"g"),
(0x1d4a3, "X"),
(0x1d4a5, "M", u"j"),
(0x1d4a6, "M", u"k"),
(0x1d4a7, "X"),
(0x1d4a9, "M", u"n"),
(0x1d4aa, "M", u"o"),
(0x1d4ab, "M", u"p"),
(0x1d4ac, "M", u"q"),
(0x1d4ad, "X"),
(0x1d4ae, "M", u"s"),
(0x1d4af, "M", u"t"),
(0x1d4b0, "M", u"u"),
(0x1d4b1, "M", u"v"),
(0x1d4b2, "M", u"w"),
(0x1d4b3, "M", u"x"),
(0x1d4b4, "M", u"y"),
(0x1d4b5, "M", u"z"),
(0x1d4b6, "M", u"a"),
(0x1d4b7, "M", u"b"),
(0x1d4b8, "M", u"c"),
(0x1d4b9, "M", u"d"),
(0x1d4ba, "X"),
(0x1d4bb, "M", u"f"),
(0x1d4bc, "X"),
(0x1d4bd, "M", u"h"),
(0x1d4be, "M", u"i"),
(0x1d4bf, "M", u"j"),
(0x1d4c0, "M", u"k"),
(0x1d4c1, "M", u"l"),
(0x1d4c2, "M", u"m"),
(0x1d4c3, "M", u"n"),
(0x1d4c4, "X"),
(0x1d4c5, "M", u"p"),
(0x1d4c6, "M", u"q"),
(0x1d4c7, "M", u"r"),
(0x1d4c8, "M", u"s"),
(0x1d4c9, "M", u"t"),
(0x1d4ca, "M", u"u"),
(0x1d4cb, "M", u"v"),
(0x1d4cc, "M", u"w"),
(0x1d4cd, "M", u"x"),
(0x1d4ce, "M", u"y"),
(0x1d4cf, "M", u"z"),
(0x1d4d0, "M", u"a"),
(0x1d4d1, "M", u"b"),
]
def _seg_61():
return [
(0x1d4d2, "M", u"c"),
(0x1d4d3, "M", u"d"),
(0x1d4d4, "M", u"e"),
(0x1d4d5, "M", u"f"),
(0x1d4d6, "M", u"g"),
(0x1d4d7, "M", u"h"),
(0x1d4d8, "M", u"i"),
(0x1d4d9, "M", u"j"),
(0x1d4da, "M", u"k"),
(0x1d4db, "M", u"l"),
(0x1d4dc, "M", u"m"),
(0x1d4dd, "M", u"n"),
(0x1d4de, "M", u"o"),
(0x1d4df, "M", u"p"),
(0x1d4e0, "M", u"q"),
(0x1d4e1, "M", u"r"),
(0x1d4e2, "M", u"s"),
(0x1d4e3, "M", u"t"),
(0x1d4e4, "M", u"u"),
(0x1d4e5, "M", u"v"),
(0x1d4e6, "M", u"w"),
(0x1d4e7, "M", u"x"),
(0x1d4e8, "M", u"y"),
(0x1d4e9, "M", u"z"),
(0x1d4ea, "M", u"a"),
(0x1d4eb, "M", u"b"),
(0x1d4ec, "M", u"c"),
(0x1d4ed, "M", u"d"),
(0x1d4ee, "M", u"e"),
(0x1d4ef, "M", u"f"),
(0x1d4f0, "M", u"g"),
(0x1d4f1, "M", u"h"),
(0x1d4f2, "M", u"i"),
(0x1d4f3, "M", u"j"),
(0x1d4f4, "M", u"k"),
(0x1d4f5, "M", u"l"),
(0x1d4f6, "M", u"m"),
(0x1d4f7, "M", u"n"),
(0x1d4f8, "M", u"o"),
(0x1d4f9, "M", u"p"),
(0x1d4fa, "M", u"q"),
(0x1d4fb, "M", u"r"),
(0x1d4fc, "M", u"s"),
(0x1d4fd, "M", u"t"),
(0x1d4fe, "M", u"u"),
(0x1d4ff, "M", u"v"),
(0x1d500, "M", u"w"),
(0x1d501, "M", u"x"),
(0x1d502, "M", u"y"),
(0x1d503, "M", u"z"),
(0x1d504, "M", u"a"),
(0x1d505, "M", u"b"),
(0x1d506, "X"),
(0x1d507, "M", u"d"),
(0x1d508, "M", u"e"),
(0x1d509, "M", u"f"),
(0x1d50a, "M", u"g"),
(0x1d50b, "X"),
(0x1d50d, "M", u"j"),
(0x1d50e, "M", u"k"),
(0x1d50f, "M", u"l"),
(0x1d510, "M", u"m"),
(0x1d511, "M", u"n"),
(0x1d512, "M", u"o"),
(0x1d513, "M", u"p"),
(0x1d514, "M", u"q"),
(0x1d515, "X"),
(0x1d516, "M", u"s"),
(0x1d517, "M", u"t"),
(0x1d518, "M", u"u"),
(0x1d519, "M", u"v"),
(0x1d51a, "M", u"w"),
(0x1d51b, "M", u"x"),
(0x1d51c, "M", u"y"),
(0x1d51d, "X"),
(0x1d51e, "M", u"a"),
(0x1d51f, "M", u"b"),
(0x1d520, "M", u"c"),
(0x1d521, "M", u"d"),
(0x1d522, "M", u"e"),
(0x1d523, "M", u"f"),
(0x1d524, "M", u"g"),
(0x1d525, "M", u"h"),
(0x1d526, "M", u"i"),
(0x1d527, "M", u"j"),
(0x1d528, "M", u"k"),
(0x1d529, "M", u"l"),
(0x1d52a, "M", u"m"),
(0x1d52b, "M", u"n"),
(0x1d52c, "M", u"o"),
(0x1d52d, "M", u"p"),
(0x1d52e, "M", u"q"),
(0x1d52f, "M", u"r"),
(0x1d530, "M", u"s"),
(0x1d531, "M", u"t"),
(0x1d532, "M", u"u"),
(0x1d533, "M", u"v"),
(0x1d534, "M", u"w"),
(0x1d535, "M", u"x"),
(0x1d536, "M", u"y"),
]
def _seg_62():
return [
(0x1d537, "M", u"z"),
(0x1d538, "M", u"a"),
(0x1d539, "M", u"b"),
(0x1d53a, "X"),
(0x1d53b, "M", u"d"),
(0x1d53c, "M", u"e"),
(0x1d53d, "M", u"f"),
(0x1d53e, "M", u"g"),
(0x1d53f, "X"),
(0x1d540, "M", u"i"),
(0x1d541, "M", u"j"),
(0x1d542, "M", u"k"),
(0x1d543, "M", u"l"),
(0x1d544, "M", u"m"),
(0x1d545, "X"),
(0x1d546, "M", u"o"),
(0x1d547, "X"),
(0x1d54a, "M", u"s"),
(0x1d54b, "M", u"t"),
(0x1d54c, "M", u"u"),
(0x1d54d, "M", u"v"),
(0x1d54e, "M", u"w"),
(0x1d54f, "M", u"x"),
(0x1d550, "M", u"y"),
(0x1d551, "X"),
(0x1d552, "M", u"a"),
(0x1d553, "M", u"b"),
(0x1d554, "M", u"c"),
(0x1d555, "M", u"d"),
(0x1d556, "M", u"e"),
(0x1d557, "M", u"f"),
(0x1d558, "M", u"g"),
(0x1d559, "M", u"h"),
(0x1d55a, "M", u"i"),
(0x1d55b, "M", u"j"),
(0x1d55c, "M", u"k"),
(0x1d55d, "M", u"l"),
(0x1d55e, "M", u"m"),
(0x1d55f, "M", u"n"),
(0x1d560, "M", u"o"),
(0x1d561, "M", u"p"),
(0x1d562, "M", u"q"),
(0x1d563, "M", u"r"),
(0x1d564, "M", u"s"),
(0x1d565, "M", u"t"),
(0x1d566, "M", u"u"),
(0x1d567, "M", u"v"),
(0x1d568, "M", u"w"),
(0x1d569, "M", u"x"),
(0x1d56a, "M", u"y"),
(0x1d56b, "M", u"z"),
(0x1d56c, "M", u"a"),
(0x1d56d, "M", u"b"),
(0x1d56e, "M", u"c"),
(0x1d56f, "M", u"d"),
(0x1d570, "M", u"e"),
(0x1d571, "M", u"f"),
(0x1d572, "M", u"g"),
(0x1d573, "M", u"h"),
(0x1d574, "M", u"i"),
(0x1d575, "M", u"j"),
(0x1d576, "M", u"k"),
(0x1d577, "M", u"l"),
(0x1d578, "M", u"m"),
(0x1d579, "M", u"n"),
(0x1d57a, "M", u"o"),
(0x1d57b, "M", u"p"),
(0x1d57c, "M", u"q"),
(0x1d57d, "M", u"r"),
(0x1d57e, "M", u"s"),
(0x1d57f, "M", u"t"),
(0x1d580, "M", u"u"),
(0x1d581, "M", u"v"),
(0x1d582, "M", u"w"),
(0x1d583, "M", u"x"),
(0x1d584, "M", u"y"),
(0x1d585, "M", u"z"),
(0x1d586, "M", u"a"),
(0x1d587, "M", u"b"),
(0x1d588, "M", u"c"),
(0x1d589, "M", u"d"),
(0x1d58a, "M", u"e"),
(0x1d58b, "M", u"f"),
(0x1d58c, "M", u"g"),
(0x1d58d, "M", u"h"),
(0x1d58e, "M", u"i"),
(0x1d58f, "M", u"j"),
(0x1d590, "M", u"k"),
(0x1d591, "M", u"l"),
(0x1d592, "M", u"m"),
(0x1d593, "M", u"n"),
(0x1d594, "M", u"o"),
(0x1d595, "M", u"p"),
(0x1d596, "M", u"q"),
(0x1d597, "M", u"r"),
(0x1d598, "M", u"s"),
(0x1d599, "M", u"t"),
(0x1d59a, "M", u"u"),
(0x1d59b, "M", u"v"),
(0x1d59c, "M", u"w"),
]
def _seg_63():
return [
(0x1d59d, "M", u"x"),
(0x1d59e, "M", u"y"),
(0x1d59f, "M", u"z"),
(0x1d5a0, "M", u"a"),
(0x1d5a1, "M", u"b"),
(0x1d5a2, "M", u"c"),
(0x1d5a3, "M", u"d"),
(0x1d5a4, "M", u"e"),
(0x1d5a5, "M", u"f"),
(0x1d5a6, "M", u"g"),
(0x1d5a7, "M", u"h"),
(0x1d5a8, "M", u"i"),
(0x1d5a9, "M", u"j"),
(0x1d5aa, "M", u"k"),
(0x1d5ab, "M", u"l"),
(0x1d5ac, "M", u"m"),
(0x1d5ad, "M", u"n"),
(0x1d5ae, "M", u"o"),
(0x1d5af, "M", u"p"),
(0x1d5b0, "M", u"q"),
(0x1d5b1, "M", u"r"),
(0x1d5b2, "M", u"s"),
(0x1d5b3, "M", u"t"),
(0x1d5b4, "M", u"u"),
(0x1d5b5, "M", u"v"),
(0x1d5b6, "M", u"w"),
(0x1d5b7, "M", u"x"),
(0x1d5b8, "M", u"y"),
(0x1d5b9, "M", u"z"),
(0x1d5ba, "M", u"a"),
(0x1d5bb, "M", u"b"),
(0x1d5bc, "M", u"c"),
(0x1d5bd, "M", u"d"),
(0x1d5be, "M", u"e"),
(0x1d5bf, "M", u"f"),
(0x1d5c0, "M", u"g"),
(0x1d5c1, "M", u"h"),
(0x1d5c2, "M", u"i"),
(0x1d5c3, "M", u"j"),
(0x1d5c4, "M", u"k"),
(0x1d5c5, "M", u"l"),
(0x1d5c6, "M", u"m"),
(0x1d5c7, "M", u"n"),
(0x1d5c8, "M", u"o"),
(0x1d5c9, "M", u"p"),
(0x1d5ca, "M", u"q"),
(0x1d5cb, "M", u"r"),
(0x1d5cc, "M", u"s"),
(0x1d5cd, "M", u"t"),
(0x1d5ce, "M", u"u"),
(0x1d5cf, "M", u"v"),
(0x1d5d0, "M", u"w"),
(0x1d5d1, "M", u"x"),
(0x1d5d2, "M", u"y"),
(0x1d5d3, "M", u"z"),
(0x1d5d4, "M", u"a"),
(0x1d5d5, "M", u"b"),
(0x1d5d6, "M", u"c"),
(0x1d5d7, "M", u"d"),
(0x1d5d8, "M", u"e"),
(0x1d5d9, "M", u"f"),
(0x1d5da, "M", u"g"),
(0x1d5db, "M", u"h"),
(0x1d5dc, "M", u"i"),
(0x1d5dd, "M", u"j"),
(0x1d5de, "M", u"k"),
(0x1d5df, "M", u"l"),
(0x1d5e0, "M", u"m"),
(0x1d5e1, "M", u"n"),
(0x1d5e2, "M", u"o"),
(0x1d5e3, "M", u"p"),
(0x1d5e4, "M", u"q"),
(0x1d5e5, "M", u"r"),
(0x1d5e6, "M", u"s"),
(0x1d5e7, "M", u"t"),
(0x1d5e8, "M", u"u"),
(0x1d5e9, "M", u"v"),
(0x1d5ea, "M", u"w"),
(0x1d5eb, "M", u"x"),
(0x1d5ec, "M", u"y"),
(0x1d5ed, "M", u"z"),
(0x1d5ee, "M", u"a"),
(0x1d5ef, "M", u"b"),
(0x1d5f0, "M", u"c"),
(0x1d5f1, "M", u"d"),
(0x1d5f2, "M", u"e"),
(0x1d5f3, "M", u"f"),
(0x1d5f4, "M", u"g"),
(0x1d5f5, "M", u"h"),
(0x1d5f6, "M", u"i"),
(0x1d5f7, "M", u"j"),
(0x1d5f8, "M", u"k"),
(0x1d5f9, "M", u"l"),
(0x1d5fa, "M", u"m"),
(0x1d5fb, "M", u"n"),
(0x1d5fc, "M", u"o"),
(0x1d5fd, "M", u"p"),
(0x1d5fe, "M", u"q"),
(0x1d5ff, "M", u"r"),
(0x1d600, "M", u"s"),
]
def _seg_64():
return [
(0x1d601, "M", u"t"),
(0x1d602, "M", u"u"),
(0x1d603, "M", u"v"),
(0x1d604, "M", u"w"),
(0x1d605, "M", u"x"),
(0x1d606, "M", u"y"),
(0x1d607, "M", u"z"),
(0x1d608, "M", u"a"),
(0x1d609, "M", u"b"),
(0x1d60a, "M", u"c"),
(0x1d60b, "M", u"d"),
(0x1d60c, "M", u"e"),
(0x1d60d, "M", u"f"),
(0x1d60e, "M", u"g"),
(0x1d60f, "M", u"h"),
(0x1d610, "M", u"i"),
(0x1d611, "M", u"j"),
(0x1d612, "M", u"k"),
(0x1d613, "M", u"l"),
(0x1d614, "M", u"m"),
(0x1d615, "M", u"n"),
(0x1d616, "M", u"o"),
(0x1d617, "M", u"p"),
(0x1d618, "M", u"q"),
(0x1d619, "M", u"r"),
(0x1d61a, "M", u"s"),
(0x1d61b, "M", u"t"),
(0x1d61c, "M", u"u"),
(0x1d61d, "M", u"v"),
(0x1d61e, "M", u"w"),
(0x1d61f, "M", u"x"),
(0x1d620, "M", u"y"),
(0x1d621, "M", u"z"),
(0x1d622, "M", u"a"),
(0x1d623, "M", u"b"),
(0x1d624, "M", u"c"),
(0x1d625, "M", u"d"),
(0x1d626, "M", u"e"),
(0x1d627, "M", u"f"),
(0x1d628, "M", u"g"),
(0x1d629, "M", u"h"),
(0x1d62a, "M", u"i"),
(0x1d62b, "M", u"j"),
(0x1d62c, "M", u"k"),
(0x1d62d, "M", u"l"),
(0x1d62e, "M", u"m"),
(0x1d62f, "M", u"n"),
(0x1d630, "M", u"o"),
(0x1d631, "M", u"p"),
(0x1d632, "M", u"q"),
(0x1d633, "M", u"r"),
(0x1d634, "M", u"s"),
(0x1d635, "M", u"t"),
(0x1d636, "M", u"u"),
(0x1d637, "M", u"v"),
(0x1d638, "M", u"w"),
(0x1d639, "M", u"x"),
(0x1d63a, "M", u"y"),
(0x1d63b, "M", u"z"),
(0x1d63c, "M", u"a"),
(0x1d63d, "M", u"b"),
(0x1d63e, "M", u"c"),
(0x1d63f, "M", u"d"),
(0x1d640, "M", u"e"),
(0x1d641, "M", u"f"),
(0x1d642, "M", u"g"),
(0x1d643, "M", u"h"),
(0x1d644, "M", u"i"),
(0x1d645, "M", u"j"),
(0x1d646, "M", u"k"),
(0x1d647, "M", u"l"),
(0x1d648, "M", u"m"),
(0x1d649, "M", u"n"),
(0x1d64a, "M", u"o"),
(0x1d64b, "M", u"p"),
(0x1d64c, "M", u"q"),
(0x1d64d, "M", u"r"),
(0x1d64e, "M", u"s"),
(0x1d64f, "M", u"t"),
(0x1d650, "M", u"u"),
(0x1d651, "M", u"v"),
(0x1d652, "M", u"w"),
(0x1d653, "M", u"x"),
(0x1d654, "M", u"y"),
(0x1d655, "M", u"z"),
(0x1d656, "M", u"a"),
(0x1d657, "M", u"b"),
(0x1d658, "M", u"c"),
(0x1d659, "M", u"d"),
(0x1d65a, "M", u"e"),
(0x1d65b, "M", u"f"),
(0x1d65c, "M", u"g"),
(0x1d65d, "M", u"h"),
(0x1d65e, "M", u"i"),
(0x1d65f, "M", u"j"),
(0x1d660, "M", u"k"),
(0x1d661, "M", u"l"),
(0x1d662, "M", u"m"),
(0x1d663, "M", u"n"),
(0x1d664, "M", u"o"),
]
def _seg_65():
return [
(0x1d665, "M", u"p"),
(0x1d666, "M", u"q"),
(0x1d667, "M", u"r"),
(0x1d668, "M", u"s"),
(0x1d669, "M", u"t"),
(0x1d66a, "M", u"u"),
(0x1d66b, "M", u"v"),
(0x1d66c, "M", u"w"),
(0x1d66d, "M", u"x"),
(0x1d66e, "M", u"y"),
(0x1d66f, "M", u"z"),
(0x1d670, "M", u"a"),
(0x1d671, "M", u"b"),
(0x1d672, "M", u"c"),
(0x1d673, "M", u"d"),
(0x1d674, "M", u"e"),
(0x1d675, "M", u"f"),
(0x1d676, "M", u"g"),
(0x1d677, "M", u"h"),
(0x1d678, "M", u"i"),
(0x1d679, "M", u"j"),
(0x1d67a, "M", u"k"),
(0x1d67b, "M", u"l"),
(0x1d67c, "M", u"m"),
(0x1d67d, "M", u"n"),
(0x1d67e, "M", u"o"),
(0x1d67f, "M", u"p"),
(0x1d680, "M", u"q"),
(0x1d681, "M", u"r"),
(0x1d682, "M", u"s"),
(0x1d683, "M", u"t"),
(0x1d684, "M", u"u"),
(0x1d685, "M", u"v"),
(0x1d686, "M", u"w"),
(0x1d687, "M", u"x"),
(0x1d688, "M", u"y"),
(0x1d689, "M", u"z"),
(0x1d68a, "M", u"a"),
(0x1d68b, "M", u"b"),
(0x1d68c, "M", u"c"),
(0x1d68d, "M", u"d"),
(0x1d68e, "M", u"e"),
(0x1d68f, "M", u"f"),
(0x1d690, "M", u"g"),
(0x1d691, "M", u"h"),
(0x1d692, "M", u"i"),
(0x1d693, "M", u"j"),
(0x1d694, "M", u"k"),
(0x1d695, "M", u"l"),
(0x1d696, "M", u"m"),
(0x1d697, "M", u"n"),
(0x1d698, "M", u"o"),
(0x1d699, "M", u"p"),
(0x1d69a, "M", u"q"),
(0x1d69b, "M", u"r"),
(0x1d69c, "M", u"s"),
(0x1d69d, "M", u"t"),
(0x1d69e, "M", u"u"),
(0x1d69f, "M", u"v"),
(0x1d6a0, "M", u"w"),
(0x1d6a1, "M", u"x"),
(0x1d6a2, "M", u"y"),
(0x1d6a3, "M", u"z"),
(0x1d6a4, "M", u"ı"),
(0x1d6a5, "M", u"ȷ"),
(0x1d6a6, "X"),
(0x1d6a8, "M", u"α"),
(0x1d6a9, "M", u"β"),
(0x1d6aa, "M", u"γ"),
(0x1d6ab, "M", u"δ"),
(0x1d6ac, "M", u"ε"),
(0x1d6ad, "M", u"ζ"),
(0x1d6ae, "M", u"η"),
(0x1d6af, "M", u"θ"),
(0x1d6b0, "M", u"ι"),
(0x1d6b1, "M", u"κ"),
(0x1d6b2, "M", u"λ"),
(0x1d6b3, "M", u"μ"),
(0x1d6b4, "M", u"ν"),
(0x1d6b5, "M", u"ξ"),
(0x1d6b6, "M", u"ο"),
(0x1d6b7, "M", u"π"),
(0x1d6b8, "M", u"ρ"),
(0x1d6b9, "M", u"θ"),
(0x1d6ba, "M", u"σ"),
(0x1d6bb, "M", u"τ"),
(0x1d6bc, "M", u"υ"),
(0x1d6bd, "M", u"φ"),
(0x1d6be, "M", u"χ"),
(0x1d6bf, "M", u"ψ"),
(0x1d6c0, "M", u"ω"),
(0x1d6c1, "M", u"∇"),
(0x1d6c2, "M", u"α"),
(0x1d6c3, "M", u"β"),
(0x1d6c4, "M", u"γ"),
(0x1d6c5, "M", u"δ"),
(0x1d6c6, "M", u"ε"),
(0x1d6c7, "M", u"ζ"),
(0x1d6c8, "M", u"η"),
(0x1d6c9, "M", u"θ"),
]
def _seg_66():
return [
(0x1d6ca, "M", u"ι"),
(0x1d6cb, "M", u"κ"),
(0x1d6cc, "M", u"λ"),
(0x1d6cd, "M", u"μ"),
(0x1d6ce, "M", u"ν"),
(0x1d6cf, "M", u"ξ"),
(0x1d6d0, "M", u"ο"),
(0x1d6d1, "M", u"π"),
(0x1d6d2, "M", u"ρ"),
(0x1d6d3, "M", u"σ"),
(0x1d6d5, "M", u"τ"),
(0x1d6d6, "M", u"υ"),
(0x1d6d7, "M", u"φ"),
(0x1d6d8, "M", u"χ"),
(0x1d6d9, "M", u"ψ"),
(0x1d6da, "M", u"ω"),
(0x1d6db, "M", u"∂"),
(0x1d6dc, "M", u"ε"),
(0x1d6dd, "M", u"θ"),
(0x1d6de, "M", u"κ"),
(0x1d6df, "M", u"φ"),
(0x1d6e0, "M", u"ρ"),
(0x1d6e1, "M", u"π"),
(0x1d6e2, "M", u"α"),
(0x1d6e3, "M", u"β"),
(0x1d6e4, "M", u"γ"),
(0x1d6e5, "M", u"δ"),
(0x1d6e6, "M", u"ε"),
(0x1d6e7, "M", u"ζ"),
(0x1d6e8, "M", u"η"),
(0x1d6e9, "M", u"θ"),
(0x1d6ea, "M", u"ι"),
(0x1d6eb, "M", u"κ"),
(0x1d6ec, "M", u"λ"),
(0x1d6ed, "M", u"μ"),
(0x1d6ee, "M", u"ν"),
(0x1d6ef, "M", u"ξ"),
(0x1d6f0, "M", u"ο"),
(0x1d6f1, "M", u"π"),
(0x1d6f2, "M", u"ρ"),
(0x1d6f3, "M", u"θ"),
(0x1d6f4, "M", u"σ"),
(0x1d6f5, "M", u"τ"),
(0x1d6f6, "M", u"υ"),
(0x1d6f7, "M", u"φ"),
(0x1d6f8, "M", u"χ"),
(0x1d6f9, "M", u"ψ"),
(0x1d6fa, "M", u"ω"),
(0x1d6fb, "M", u"∇"),
(0x1d6fc, "M", u"α"),
(0x1d6fd, "M", u"β"),
(0x1d6fe, "M", u"γ"),
(0x1d6ff, "M", u"δ"),
(0x1d700, "M", u"ε"),
(0x1d701, "M", u"ζ"),
(0x1d702, "M", u"η"),
(0x1d703, "M", u"θ"),
(0x1d704, "M", u"ι"),
(0x1d705, "M", u"κ"),
(0x1d706, "M", u"λ"),
(0x1d707, "M", u"μ"),
(0x1d708, "M", u"ν"),
(0x1d709, "M", u"ξ"),
(0x1d70a, "M", u"ο"),
(0x1d70b, "M", u"π"),
(0x1d70c, "M", u"ρ"),
(0x1d70d, "M", u"σ"),
(0x1d70f, "M", u"τ"),
(0x1d710, "M", u"υ"),
(0x1d711, "M", u"φ"),
(0x1d712, "M", u"χ"),
(0x1d713, "M", u"ψ"),
(0x1d714, "M", u"ω"),
(0x1d715, "M", u"∂"),
(0x1d716, "M", u"ε"),
(0x1d717, "M", u"θ"),
(0x1d718, "M", u"κ"),
(0x1d719, "M", u"φ"),
(0x1d71a, "M", u"ρ"),
(0x1d71b, "M", u"π"),
(0x1d71c, "M", u"α"),
(0x1d71d, "M", u"β"),
(0x1d71e, "M", u"γ"),
(0x1d71f, "M", u"δ"),
(0x1d720, "M", u"ε"),
(0x1d721, "M", u"ζ"),
(0x1d722, "M", u"η"),
(0x1d723, "M", u"θ"),
(0x1d724, "M", u"ι"),
(0x1d725, "M", u"κ"),
(0x1d726, "M", u"λ"),
(0x1d727, "M", u"μ"),
(0x1d728, "M", u"ν"),
(0x1d729, "M", u"ξ"),
(0x1d72a, "M", u"ο"),
(0x1d72b, "M", u"π"),
(0x1d72c, "M", u"ρ"),
(0x1d72d, "M", u"θ"),
(0x1d72e, "M", u"σ"),
(0x1d72f, "M", u"τ"),
]
def _seg_67():
return [
(0x1d730, "M", u"υ"),
(0x1d731, "M", u"φ"),
(0x1d732, "M", u"χ"),
(0x1d733, "M", u"ψ"),
(0x1d734, "M", u"ω"),
(0x1d735, "M", u"∇"),
(0x1d736, "M", u"α"),
(0x1d737, "M", u"β"),
(0x1d738, "M", u"γ"),
(0x1d739, "M", u"δ"),
(0x1d73a, "M", u"ε"),
(0x1d73b, "M", u"ζ"),
(0x1d73c, "M", u"η"),
(0x1d73d, "M", u"θ"),
(0x1d73e, "M", u"ι"),
(0x1d73f, "M", u"κ"),
(0x1d740, "M", u"λ"),
(0x1d741, "M", u"μ"),
(0x1d742, "M", u"ν"),
(0x1d743, "M", u"ξ"),
(0x1d744, "M", u"ο"),
(0x1d745, "M", u"π"),
(0x1d746, "M", u"ρ"),
(0x1d747, "M", u"σ"),
(0x1d749, "M", u"τ"),
(0x1d74a, "M", u"υ"),
(0x1d74b, "M", u"φ"),
(0x1d74c, "M", u"χ"),
(0x1d74d, "M", u"ψ"),
(0x1d74e, "M", u"ω"),
(0x1d74f, "M", u"∂"),
(0x1d750, "M", u"ε"),
(0x1d751, "M", u"θ"),
(0x1d752, "M", u"κ"),
(0x1d753, "M", u"φ"),
(0x1d754, "M", u"ρ"),
(0x1d755, "M", u"π"),
(0x1d756, "M", u"α"),
(0x1d757, "M", u"β"),
(0x1d758, "M", u"γ"),
(0x1d759, "M", u"δ"),
(0x1d75a, "M", u"ε"),
(0x1d75b, "M", u"ζ"),
(0x1d75c, "M", u"η"),
(0x1d75d, "M", u"θ"),
(0x1d75e, "M", u"ι"),
(0x1d75f, "M", u"κ"),
(0x1d760, "M", u"λ"),
(0x1d761, "M", u"μ"),
(0x1d762, "M", u"ν"),
(0x1d763, "M", u"ξ"),
(0x1d764, "M", u"ο"),
(0x1d765, "M", u"π"),
(0x1d766, "M", u"ρ"),
(0x1d767, "M", u"θ"),
(0x1d768, "M", u"σ"),
(0x1d769, "M", u"τ"),
(0x1d76a, "M", u"υ"),
(0x1d76b, "M", u"φ"),
(0x1d76c, "M", u"χ"),
(0x1d76d, "M", u"ψ"),
(0x1d76e, "M", u"ω"),
(0x1d76f, "M", u"∇"),
(0x1d770, "M", u"α"),
(0x1d771, "M", u"β"),
(0x1d772, "M", u"γ"),
(0x1d773, "M", u"δ"),
(0x1d774, "M", u"ε"),
(0x1d775, "M", u"ζ"),
(0x1d776, "M", u"η"),
(0x1d777, "M", u"θ"),
(0x1d778, "M", u"ι"),
(0x1d779, "M", u"κ"),
(0x1d77a, "M", u"λ"),
(0x1d77b, "M", u"μ"),
(0x1d77c, "M", u"ν"),
(0x1d77d, "M", u"ξ"),
(0x1d77e, "M", u"ο"),
(0x1d77f, "M", u"π"),
(0x1d780, "M", u"ρ"),
(0x1d781, "M", u"σ"),
(0x1d783, "M", u"τ"),
(0x1d784, "M", u"υ"),
(0x1d785, "M", u"φ"),
(0x1d786, "M", u"χ"),
(0x1d787, "M", u"ψ"),
(0x1d788, "M", u"ω"),
(0x1d789, "M", u"∂"),
(0x1d78a, "M", u"ε"),
(0x1d78b, "M", u"θ"),
(0x1d78c, "M", u"κ"),
(0x1d78d, "M", u"φ"),
(0x1d78e, "M", u"ρ"),
(0x1d78f, "M", u"π"),
(0x1d790, "M", u"α"),
(0x1d791, "M", u"β"),
(0x1d792, "M", u"γ"),
(0x1d793, "M", u"δ"),
(0x1d794, "M", u"ε"),
(0x1d795, "M", u"ζ"),
]
def _seg_68():
return [
(0x1d796, "M", u"η"),
(0x1d797, "M", u"θ"),
(0x1d798, "M", u"ι"),
(0x1d799, "M", u"κ"),
(0x1d79a, "M", u"λ"),
(0x1d79b, "M", u"μ"),
(0x1d79c, "M", u"ν"),
(0x1d79d, "M", u"ξ"),
(0x1d79e, "M", u"ο"),
(0x1d79f, "M", u"π"),
(0x1d7a0, "M", u"ρ"),
(0x1d7a1, "M", u"θ"),
(0x1d7a2, "M", u"σ"),
(0x1d7a3, "M", u"τ"),
(0x1d7a4, "M", u"υ"),
(0x1d7a5, "M", u"φ"),
(0x1d7a6, "M", u"χ"),
(0x1d7a7, "M", u"ψ"),
(0x1d7a8, "M", u"ω"),
(0x1d7a9, "M", u"∇"),
(0x1d7aa, "M", u"α"),
(0x1d7ab, "M", u"β"),
(0x1d7ac, "M", u"γ"),
(0x1d7ad, "M", u"δ"),
(0x1d7ae, "M", u"ε"),
(0x1d7af, "M", u"ζ"),
(0x1d7b0, "M", u"η"),
(0x1d7b1, "M", u"θ"),
(0x1d7b2, "M", u"ι"),
(0x1d7b3, "M", u"κ"),
(0x1d7b4, "M", u"λ"),
(0x1d7b5, "M", u"μ"),
(0x1d7b6, "M", u"ν"),
(0x1d7b7, "M", u"ξ"),
(0x1d7b8, "M", u"ο"),
(0x1d7b9, "M", u"π"),
(0x1d7ba, "M", u"ρ"),
(0x1d7bb, "M", u"σ"),
(0x1d7bd, "M", u"τ"),
(0x1d7be, "M", u"υ"),
(0x1d7bf, "M", u"φ"),
(0x1d7c0, "M", u"χ"),
(0x1d7c1, "M", u"ψ"),
(0x1d7c2, "M", u"ω"),
(0x1d7c3, "M", u"∂"),
(0x1d7c4, "M", u"ε"),
(0x1d7c5, "M", u"θ"),
(0x1d7c6, "M", u"κ"),
(0x1d7c7, "M", u"φ"),
(0x1d7c8, "M", u"ρ"),
(0x1d7c9, "M", u"π"),
(0x1d7ca, "M", u"ϝ"),
(0x1d7cc, "X"),
(0x1d7ce, "M", u"0"),
(0x1d7cf, "M", u"1"),
(0x1d7d0, "M", u"2"),
(0x1d7d1, "M", u"3"),
(0x1d7d2, "M", u"4"),
(0x1d7d3, "M", u"5"),
(0x1d7d4, "M", u"6"),
(0x1d7d5, "M", u"7"),
(0x1d7d6, "M", u"8"),
(0x1d7d7, "M", u"9"),
(0x1d7d8, "M", u"0"),
(0x1d7d9, "M", u"1"),
(0x1d7da, "M", u"2"),
(0x1d7db, "M", u"3"),
(0x1d7dc, "M", u"4"),
(0x1d7dd, "M", u"5"),
(0x1d7de, "M", u"6"),
(0x1d7df, "M", u"7"),
(0x1d7e0, "M", u"8"),
(0x1d7e1, "M", u"9"),
(0x1d7e2, "M", u"0"),
(0x1d7e3, "M", u"1"),
(0x1d7e4, "M", u"2"),
(0x1d7e5, "M", u"3"),
(0x1d7e6, "M", u"4"),
(0x1d7e7, "M", u"5"),
(0x1d7e8, "M", u"6"),
(0x1d7e9, "M", u"7"),
(0x1d7ea, "M", u"8"),
(0x1d7eb, "M", u"9"),
(0x1d7ec, "M", u"0"),
(0x1d7ed, "M", u"1"),
(0x1d7ee, "M", u"2"),
(0x1d7ef, "M", u"3"),
(0x1d7f0, "M", u"4"),
(0x1d7f1, "M", u"5"),
(0x1d7f2, "M", u"6"),
(0x1d7f3, "M", u"7"),
(0x1d7f4, "M", u"8"),
(0x1d7f5, "M", u"9"),
(0x1d7f6, "M", u"0"),
(0x1d7f7, "M", u"1"),
(0x1d7f8, "M", u"2"),
(0x1d7f9, "M", u"3"),
(0x1d7fa, "M", u"4"),
(0x1d7fb, "M", u"5"),
(0x1d7fc, "M", u"6"),
]
def _seg_69():
return [
(0x1d7fd, "M", u"7"),
(0x1d7fe, "M", u"8"),
(0x1d7ff, "M", u"9"),
(0x1d800, "V"),
(0x1da8c, "X"),
(0x1da9b, "V"),
(0x1daa0, "X"),
(0x1daa1, "V"),
(0x1dab0, "X"),
(0x1e000, "V"),
(0x1e007, "X"),
(0x1e008, "V"),
(0x1e019, "X"),
(0x1e01b, "V"),
(0x1e022, "X"),
(0x1e023, "V"),
(0x1e025, "X"),
(0x1e026, "V"),
(0x1e02b, "X"),
(0x1e100, "V"),
(0x1e12d, "X"),
(0x1e130, "V"),
(0x1e13e, "X"),
(0x1e140, "V"),
(0x1e14a, "X"),
(0x1e14e, "V"),
(0x1e150, "X"),
(0x1e2c0, "V"),
(0x1e2fa, "X"),
(0x1e2ff, "V"),
(0x1e300, "X"),
(0x1e800, "V"),
(0x1e8c5, "X"),
(0x1e8c7, "V"),
(0x1e8d7, "X"),
(0x1e900, "M", u"𞤢"),
(0x1e901, "M", u"𞤣"),
(0x1e902, "M", u"𞤤"),
(0x1e903, "M", u"𞤥"),
(0x1e904, "M", u"𞤦"),
(0x1e905, "M", u"𞤧"),
(0x1e906, "M", u"𞤨"),
(0x1e907, "M", u"𞤩"),
(0x1e908, "M", u"𞤪"),
(0x1e909, "M", u"𞤫"),
(0x1e90a, "M", u"𞤬"),
(0x1e90b, "M", u"𞤭"),
(0x1e90c, "M", u"𞤮"),
(0x1e90d, "M", u"𞤯"),
(0x1e90e, "M", u"𞤰"),
(0x1e90f, "M", u"𞤱"),
(0x1e910, "M", u"𞤲"),
(0x1e911, "M", u"𞤳"),
(0x1e912, "M", u"𞤴"),
(0x1e913, "M", u"𞤵"),
(0x1e914, "M", u"𞤶"),
(0x1e915, "M", u"𞤷"),
(0x1e916, "M", u"𞤸"),
(0x1e917, "M", u"𞤹"),
(0x1e918, "M", u"𞤺"),
(0x1e919, "M", u"𞤻"),
(0x1e91a, "M", u"𞤼"),
(0x1e91b, "M", u"𞤽"),
(0x1e91c, "M", u"𞤾"),
(0x1e91d, "M", u"𞤿"),
(0x1e91e, "M", u"𞥀"),
(0x1e91f, "M", u"𞥁"),
(0x1e920, "M", u"𞥂"),
(0x1e921, "M", u"𞥃"),
(0x1e922, "V"),
(0x1e94c, "X"),
(0x1e950, "V"),
(0x1e95a, "X"),
(0x1e95e, "V"),
(0x1e960, "X"),
(0x1ec71, "V"),
(0x1ecb5, "X"),
(0x1ed01, "V"),
(0x1ed3e, "X"),
(0x1ee00, "M", u"ا"),
(0x1ee01, "M", u"ب"),
(0x1ee02, "M", u"ج"),
(0x1ee03, "M", u"د"),
(0x1ee04, "X"),
(0x1ee05, "M", u"و"),
(0x1ee06, "M", u"ز"),
(0x1ee07, "M", u"ح"),
(0x1ee08, "M", u"ط"),
(0x1ee09, "M", u"ي"),
(0x1ee0a, "M", u"ك"),
(0x1ee0b, "M", u"ل"),
(0x1ee0c, "M", u"م"),
(0x1ee0d, "M", u"ن"),
(0x1ee0e, "M", u"س"),
(0x1ee0f, "M", u"ع"),
(0x1ee10, "M", u"ف"),
(0x1ee11, "M", u"ص"),
(0x1ee12, "M", u"ق"),
(0x1ee13, "M", u"ر"),
(0x1ee14, "M", u"ش"),
]
def _seg_70():
return [
(0x1ee15, "M", u"ت"),
(0x1ee16, "M", u"ث"),
(0x1ee17, "M", u"خ"),
(0x1ee18, "M", u"ذ"),
(0x1ee19, "M", u"ض"),
(0x1ee1a, "M", u"ظ"),
(0x1ee1b, "M", u"غ"),
(0x1ee1c, "M", u"ٮ"),
(0x1ee1d, "M", u"ں"),
(0x1ee1e, "M", u"ڡ"),
(0x1ee1f, "M", u"ٯ"),
(0x1ee20, "X"),
(0x1ee21, "M", u"ب"),
(0x1ee22, "M", u"ج"),
(0x1ee23, "X"),
(0x1ee24, "M", u"ه"),
(0x1ee25, "X"),
(0x1ee27, "M", u"ح"),
(0x1ee28, "X"),
(0x1ee29, "M", u"ي"),
(0x1ee2a, "M", u"ك"),
(0x1ee2b, "M", u"ل"),
(0x1ee2c, "M", u"م"),
(0x1ee2d, "M", u"ن"),
(0x1ee2e, "M", u"س"),
(0x1ee2f, "M", u"ع"),
(0x1ee30, "M", u"ف"),
(0x1ee31, "M", u"ص"),
(0x1ee32, "M", u"ق"),
(0x1ee33, "X"),
(0x1ee34, "M", u"ش"),
(0x1ee35, "M", u"ت"),
(0x1ee36, "M", u"ث"),
(0x1ee37, "M", u"خ"),
(0x1ee38, "X"),
(0x1ee39, "M", u"ض"),
(0x1ee3a, "X"),
(0x1ee3b, "M", u"غ"),
(0x1ee3c, "X"),
(0x1ee42, "M", u"ج"),
(0x1ee43, "X"),
(0x1ee47, "M", u"ح"),
(0x1ee48, "X"),
(0x1ee49, "M", u"ي"),
(0x1ee4a, "X"),
(0x1ee4b, "M", u"ل"),
(0x1ee4c, "X"),
(0x1ee4d, "M", u"ن"),
(0x1ee4e, "M", u"س"),
(0x1ee4f, "M", u"ع"),
(0x1ee50, "X"),
(0x1ee51, "M", u"ص"),
(0x1ee52, "M", u"ق"),
(0x1ee53, "X"),
(0x1ee54, "M", u"ش"),
(0x1ee55, "X"),
(0x1ee57, "M", u"خ"),
(0x1ee58, "X"),
(0x1ee59, "M", u"ض"),
(0x1ee5a, "X"),
(0x1ee5b, "M", u"غ"),
(0x1ee5c, "X"),
(0x1ee5d, "M", u"ں"),
(0x1ee5e, "X"),
(0x1ee5f, "M", u"ٯ"),
(0x1ee60, "X"),
(0x1ee61, "M", u"ب"),
(0x1ee62, "M", u"ج"),
(0x1ee63, "X"),
(0x1ee64, "M", u"ه"),
(0x1ee65, "X"),
(0x1ee67, "M", u"ح"),
(0x1ee68, "M", u"ط"),
(0x1ee69, "M", u"ي"),
(0x1ee6a, "M", u"ك"),
(0x1ee6b, "X"),
(0x1ee6c, "M", u"م"),
(0x1ee6d, "M", u"ن"),
(0x1ee6e, "M", u"س"),
(0x1ee6f, "M", u"ع"),
(0x1ee70, "M", u"ف"),
(0x1ee71, "M", u"ص"),
(0x1ee72, "M", u"ق"),
(0x1ee73, "X"),
(0x1ee74, "M", u"ش"),
(0x1ee75, "M", u"ت"),
(0x1ee76, "M", u"ث"),
(0x1ee77, "M", u"خ"),
(0x1ee78, "X"),
(0x1ee79, "M", u"ض"),
(0x1ee7a, "M", u"ظ"),
(0x1ee7b, "M", u"غ"),
(0x1ee7c, "M", u"ٮ"),
(0x1ee7d, "X"),
(0x1ee7e, "M", u"ڡ"),
(0x1ee7f, "X"),
(0x1ee80, "M", u"ا"),
(0x1ee81, "M", u"ب"),
(0x1ee82, "M", u"ج"),
(0x1ee83, "M", u"د"),
]
def _seg_71():
return [
(0x1ee84, "M", u"ه"),
(0x1ee85, "M", u"و"),
(0x1ee86, "M", u"ز"),
(0x1ee87, "M", u"ح"),
(0x1ee88, "M", u"ط"),
(0x1ee89, "M", u"ي"),
(0x1ee8a, "X"),
(0x1ee8b, "M", u"ل"),
(0x1ee8c, "M", u"م"),
(0x1ee8d, "M", u"ن"),
(0x1ee8e, "M", u"س"),
(0x1ee8f, "M", u"ع"),
(0x1ee90, "M", u"ف"),
(0x1ee91, "M", u"ص"),
(0x1ee92, "M", u"ق"),
(0x1ee93, "M", u"ر"),
(0x1ee94, "M", u"ش"),
(0x1ee95, "M", u"ت"),
(0x1ee96, "M", u"ث"),
(0x1ee97, "M", u"خ"),
(0x1ee98, "M", u"ذ"),
(0x1ee99, "M", u"ض"),
(0x1ee9a, "M", u"ظ"),
(0x1ee9b, "M", u"غ"),
(0x1ee9c, "X"),
(0x1eea1, "M", u"ب"),
(0x1eea2, "M", u"ج"),
(0x1eea3, "M", u"د"),
(0x1eea4, "X"),
(0x1eea5, "M", u"و"),
(0x1eea6, "M", u"ز"),
(0x1eea7, "M", u"ح"),
(0x1eea8, "M", u"ط"),
(0x1eea9, "M", u"ي"),
(0x1eeaa, "X"),
(0x1eeab, "M", u"ل"),
(0x1eeac, "M", u"م"),
(0x1eead, "M", u"ن"),
(0x1eeae, "M", u"س"),
(0x1eeaf, "M", u"ع"),
(0x1eeb0, "M", u"ف"),
(0x1eeb1, "M", u"ص"),
(0x1eeb2, "M", u"ق"),
(0x1eeb3, "M", u"ر"),
(0x1eeb4, "M", u"ش"),
(0x1eeb5, "M", u"ت"),
(0x1eeb6, "M", u"ث"),
(0x1eeb7, "M", u"خ"),
(0x1eeb8, "M", u"ذ"),
(0x1eeb9, "M", u"ض"),
(0x1eeba, "M", u"ظ"),
(0x1eebb, "M", u"غ"),
(0x1eebc, "X"),
(0x1eef0, "V"),
(0x1eef2, "X"),
(0x1f000, "V"),
(0x1f02c, "X"),
(0x1f030, "V"),
(0x1f094, "X"),
(0x1f0a0, "V"),
(0x1f0af, "X"),
(0x1f0b1, "V"),
(0x1f0c0, "X"),
(0x1f0c1, "V"),
(0x1f0d0, "X"),
(0x1f0d1, "V"),
(0x1f0f6, "X"),
(0x1f101, "3", u"0,"),
(0x1f102, "3", u"1,"),
(0x1f103, "3", u"2,"),
(0x1f104, "3", u"3,"),
(0x1f105, "3", u"4,"),
(0x1f106, "3", u"5,"),
(0x1f107, "3", u"6,"),
(0x1f108, "3", u"7,"),
(0x1f109, "3", u"8,"),
(0x1f10a, "3", u"9,"),
(0x1f10b, "V"),
(0x1f10d, "X"),
(0x1f110, "3", u"(a)"),
(0x1f111, "3", u"(b)"),
(0x1f112, "3", u"(c)"),
(0x1f113, "3", u"(d)"),
(0x1f114, "3", u"(e)"),
(0x1f115, "3", u"(f)"),
(0x1f116, "3", u"(g)"),
(0x1f117, "3", u"(h)"),
(0x1f118, "3", u"(i)"),
(0x1f119, "3", u"(j)"),
(0x1f11a, "3", u"(k)"),
(0x1f11b, "3", u"(l)"),
(0x1f11c, "3", u"(m)"),
(0x1f11d, "3", u"(n)"),
(0x1f11e, "3", u"(o)"),
(0x1f11f, "3", u"(p)"),
(0x1f120, "3", u"(q)"),
(0x1f121, "3", u"(r)"),
(0x1f122, "3", u"(s)"),
(0x1f123, "3", u"(t)"),
(0x1f124, "3", u"(u)"),
]
def _seg_72():
return [
(0x1f125, "3", u"(v)"),
(0x1f126, "3", u"(w)"),
(0x1f127, "3", u"(x)"),
(0x1f128, "3", u"(y)"),
(0x1f129, "3", u"(z)"),
(0x1f12a, "M", u"〔s〕"),
(0x1f12b, "M", u"c"),
(0x1f12c, "M", u"r"),
(0x1f12d, "M", u"cd"),
(0x1f12e, "M", u"wz"),
(0x1f12f, "V"),
(0x1f130, "M", u"a"),
(0x1f131, "M", u"b"),
(0x1f132, "M", u"c"),
(0x1f133, "M", u"d"),
(0x1f134, "M", u"e"),
(0x1f135, "M", u"f"),
(0x1f136, "M", u"g"),
(0x1f137, "M", u"h"),
(0x1f138, "M", u"i"),
(0x1f139, "M", u"j"),
(0x1f13a, "M", u"k"),
(0x1f13b, "M", u"l"),
(0x1f13c, "M", u"m"),
(0x1f13d, "M", u"n"),
(0x1f13e, "M", u"o"),
(0x1f13f, "M", u"p"),
(0x1f140, "M", u"q"),
(0x1f141, "M", u"r"),
(0x1f142, "M", u"s"),
(0x1f143, "M", u"t"),
(0x1f144, "M", u"u"),
(0x1f145, "M", u"v"),
(0x1f146, "M", u"w"),
(0x1f147, "M", u"x"),
(0x1f148, "M", u"y"),
(0x1f149, "M", u"z"),
(0x1f14a, "M", u"hv"),
(0x1f14b, "M", u"mv"),
(0x1f14c, "M", u"sd"),
(0x1f14d, "M", u"ss"),
(0x1f14e, "M", u"ppv"),
(0x1f14f, "M", u"wc"),
(0x1f150, "V"),
(0x1f16a, "M", u"mc"),
(0x1f16b, "M", u"md"),
(0x1f16c, "M", u"mr"),
(0x1f16d, "X"),
(0x1f170, "V"),
(0x1f190, "M", u"dj"),
(0x1f191, "V"),
(0x1f1ad, "X"),
(0x1f1e6, "V"),
(0x1f200, "M", u"ほか"),
(0x1f201, "M", u"ココ"),
(0x1f202, "M", u"サ"),
(0x1f203, "X"),
(0x1f210, "M", u"手"),
(0x1f211, "M", u"字"),
(0x1f212, "M", u"双"),
(0x1f213, "M", u"デ"),
(0x1f214, "M", u"二"),
(0x1f215, "M", u"多"),
(0x1f216, "M", u"解"),
(0x1f217, "M", u"天"),
(0x1f218, "M", u"交"),
(0x1f219, "M", u"映"),
(0x1f21a, "M", u"無"),
(0x1f21b, "M", u"料"),
(0x1f21c, "M", u"前"),
(0x1f21d, "M", u"後"),
(0x1f21e, "M", u"再"),
(0x1f21f, "M", u"新"),
(0x1f220, "M", u"初"),
(0x1f221, "M", u"終"),
(0x1f222, "M", u"生"),
(0x1f223, "M", u"販"),
(0x1f224, "M", u"声"),
(0x1f225, "M", u"吹"),
(0x1f226, "M", u"演"),
(0x1f227, "M", u"投"),
(0x1f228, "M", u"捕"),
(0x1f229, "M", u"一"),
(0x1f22a, "M", u"三"),
(0x1f22b, "M", u"遊"),
(0x1f22c, "M", u"左"),
(0x1f22d, "M", u"中"),
(0x1f22e, "M", u"右"),
(0x1f22f, "M", u"指"),
(0x1f230, "M", u"走"),
(0x1f231, "M", u"打"),
(0x1f232, "M", u"禁"),
(0x1f233, "M", u"空"),
(0x1f234, "M", u"合"),
(0x1f235, "M", u"満"),
(0x1f236, "M", u"有"),
(0x1f237, "M", u"月"),
(0x1f238, "M", u"申"),
(0x1f239, "M", u"割"),
(0x1f23a, "M", u"営"),
]
def _seg_73():
return [
(0x1f23b, "M", u"配"),
(0x1f23c, "X"),
(0x1f240, "M", u"〔本〕"),
(0x1f241, "M", u"〔三〕"),
(0x1f242, "M", u"〔二〕"),
(0x1f243, "M", u"〔安〕"),
(0x1f244, "M", u"〔点〕"),
(0x1f245, "M", u"〔打〕"),
(0x1f246, "M", u"〔盗〕"),
(0x1f247, "M", u"〔勝〕"),
(0x1f248, "M", u"〔敗〕"),
(0x1f249, "X"),
(0x1f250, "M", u"得"),
(0x1f251, "M", u"可"),
(0x1f252, "X"),
(0x1f260, "V"),
(0x1f266, "X"),
(0x1f300, "V"),
(0x1f6d6, "X"),
(0x1f6e0, "V"),
(0x1f6ed, "X"),
(0x1f6f0, "V"),
(0x1f6fb, "X"),
(0x1f700, "V"),
(0x1f774, "X"),
(0x1f780, "V"),
(0x1f7d9, "X"),
(0x1f7e0, "V"),
(0x1f7ec, "X"),
(0x1f800, "V"),
(0x1f80c, "X"),
(0x1f810, "V"),
(0x1f848, "X"),
(0x1f850, "V"),
(0x1f85a, "X"),
(0x1f860, "V"),
(0x1f888, "X"),
(0x1f890, "V"),
(0x1f8ae, "X"),
(0x1f900, "V"),
(0x1f90c, "X"),
(0x1f90d, "V"),
(0x1f972, "X"),
(0x1f973, "V"),
(0x1f977, "X"),
(0x1f97a, "V"),
(0x1f9a3, "X"),
(0x1f9a5, "V"),
(0x1f9ab, "X"),
(0x1f9ae, "V"),
(0x1f9cb, "X"),
(0x1f9cd, "V"),
(0x1fa54, "X"),
(0x1fa60, "V"),
(0x1fa6e, "X"),
(0x1fa70, "V"),
(0x1fa74, "X"),
(0x1fa78, "V"),
(0x1fa7b, "X"),
(0x1fa80, "V"),
(0x1fa83, "X"),
(0x1fa90, "V"),
(0x1fa96, "X"),
(0x20000, "V"),
(0x2a6d7, "X"),
(0x2a700, "V"),
(0x2b735, "X"),
(0x2b740, "V"),
(0x2b81e, "X"),
(0x2b820, "V"),
(0x2cea2, "X"),
(0x2ceb0, "V"),
(0x2ebe1, "X"),
(0x2f800, "M", u"丽"),
(0x2f801, "M", u"丸"),
(0x2f802, "M", u"乁"),
(0x2f803, "M", u"𠄢"),
(0x2f804, "M", u"你"),
(0x2f805, "M", u"侮"),
(0x2f806, "M", u"侻"),
(0x2f807, "M", u"倂"),
(0x2f808, "M", u"偺"),
(0x2f809, "M", u"備"),
(0x2f80a, "M", u"僧"),
(0x2f80b, "M", u"像"),
(0x2f80c, "M", u"㒞"),
(0x2f80d, "M", u"𠘺"),
(0x2f80e, "M", u"免"),
(0x2f80f, "M", u"兔"),
(0x2f810, "M", u"兤"),
(0x2f811, "M", u"具"),
(0x2f812, "M", u"𠔜"),
(0x2f813, "M", u"㒹"),
(0x2f814, "M", u"內"),
(0x2f815, "M", u"再"),
(0x2f816, "M", u"𠕋"),
(0x2f817, "M", u"冗"),
(0x2f818, "M", u"冤"),
(0x2f819, "M", u"仌"),
(0x2f81a, "M", u"冬"),
]
def _seg_74():
return [
(0x2f81b, "M", u"况"),
(0x2f81c, "M", u"𩇟"),
(0x2f81d, "M", u"凵"),
(0x2f81e, "M", u"刃"),
(0x2f81f, "M", u"㓟"),
(0x2f820, "M", u"刻"),
(0x2f821, "M", u"剆"),
(0x2f822, "M", u"割"),
(0x2f823, "M", u"剷"),
(0x2f824, "M", u"㔕"),
(0x2f825, "M", u"勇"),
(0x2f826, "M", u"勉"),
(0x2f827, "M", u"勤"),
(0x2f828, "M", u"勺"),
(0x2f829, "M", u"包"),
(0x2f82a, "M", u"匆"),
(0x2f82b, "M", u"北"),
(0x2f82c, "M", u"卉"),
(0x2f82d, "M", u"卑"),
(0x2f82e, "M", u"博"),
(0x2f82f, "M", u"即"),
(0x2f830, "M", u"卽"),
(0x2f831, "M", u"卿"),
(0x2f834, "M", u"𠨬"),
(0x2f835, "M", u"灰"),
(0x2f836, "M", u"及"),
(0x2f837, "M", u"叟"),
(0x2f838, "M", u"𠭣"),
(0x2f839, "M", u"叫"),
(0x2f83a, "M", u"叱"),
(0x2f83b, "M", u"吆"),
(0x2f83c, "M", u"咞"),
(0x2f83d, "M", u"吸"),
(0x2f83e, "M", u"呈"),
(0x2f83f, "M", u"周"),
(0x2f840, "M", u"咢"),
(0x2f841, "M", u"哶"),
(0x2f842, "M", u"唐"),
(0x2f843, "M", u"啓"),
(0x2f844, "M", u"啣"),
(0x2f845, "M", u"善"),
(0x2f847, "M", u"喙"),
(0x2f848, "M", u"喫"),
(0x2f849, "M", u"喳"),
(0x2f84a, "M", u"嗂"),
(0x2f84b, "M", u"圖"),
(0x2f84c, "M", u"嘆"),
(0x2f84d, "M", u"圗"),
(0x2f84e, "M", u"噑"),
(0x2f84f, "M", u"噴"),
(0x2f850, "M", u"切"),
(0x2f851, "M", u"壮"),
(0x2f852, "M", u"城"),
(0x2f853, "M", u"埴"),
(0x2f854, "M", u"堍"),
(0x2f855, "M", u"型"),
(0x2f856, "M", u"堲"),
(0x2f857, "M", u"報"),
(0x2f858, "M", u"墬"),
(0x2f859, "M", u"𡓤"),
(0x2f85a, "M", u"売"),
(0x2f85b, "M", u"壷"),
(0x2f85c, "M", u"夆"),
(0x2f85d, "M", u"多"),
(0x2f85e, "M", u"夢"),
(0x2f85f, "M", u"奢"),
(0x2f860, "M", u"𡚨"),
(0x2f861, "M", u"𡛪"),
(0x2f862, "M", u"姬"),
(0x2f863, "M", u"娛"),
(0x2f864, "M", u"娧"),
(0x2f865, "M", u"姘"),
(0x2f866, "M", u"婦"),
(0x2f867, "M", u"㛮"),
(0x2f868, "X"),
(0x2f869, "M", u"嬈"),
(0x2f86a, "M", u"嬾"),
(0x2f86c, "M", u"𡧈"),
(0x2f86d, "M", u"寃"),
(0x2f86e, "M", u"寘"),
(0x2f86f, "M", u"寧"),
(0x2f870, "M", u"寳"),
(0x2f871, "M", u"𡬘"),
(0x2f872, "M", u"寿"),
(0x2f873, "M", u"将"),
(0x2f874, "X"),
(0x2f875, "M", u"尢"),
(0x2f876, "M", u"㞁"),
(0x2f877, "M", u"屠"),
(0x2f878, "M", u"屮"),
(0x2f879, "M", u"峀"),
(0x2f87a, "M", u"岍"),
(0x2f87b, "M", u"𡷤"),
(0x2f87c, "M", u"嵃"),
(0x2f87d, "M", u"𡷦"),
(0x2f87e, "M", u"嵮"),
(0x2f87f, "M", u"嵫"),
(0x2f880, "M", u"嵼"),
(0x2f881, "M", u"巡"),
(0x2f882, "M", u"巢"),
]
def _seg_75():
return [
(0x2f883, "M", u"㠯"),
(0x2f884, "M", u"巽"),
(0x2f885, "M", u"帨"),
(0x2f886, "M", u"帽"),
(0x2f887, "M", u"幩"),
(0x2f888, "M", u"㡢"),
(0x2f889, "M", u"𢆃"),
(0x2f88a, "M", u"㡼"),
(0x2f88b, "M", u"庰"),
(0x2f88c, "M", u"庳"),
(0x2f88d, "M", u"庶"),
(0x2f88e, "M", u"廊"),
(0x2f88f, "M", u"𪎒"),
(0x2f890, "M", u"廾"),
(0x2f891, "M", u"𢌱"),
(0x2f893, "M", u"舁"),
(0x2f894, "M", u"弢"),
(0x2f896, "M", u"㣇"),
(0x2f897, "M", u"𣊸"),
(0x2f898, "M", u"𦇚"),
(0x2f899, "M", u"形"),
(0x2f89a, "M", u"彫"),
(0x2f89b, "M", u"㣣"),
(0x2f89c, "M", u"徚"),
(0x2f89d, "M", u"忍"),
(0x2f89e, "M", u"志"),
(0x2f89f, "M", u"忹"),
(0x2f8a0, "M", u"悁"),
(0x2f8a1, "M", u"㤺"),
(0x2f8a2, "M", u"㤜"),
(0x2f8a3, "M", u"悔"),
(0x2f8a4, "M", u"𢛔"),
(0x2f8a5, "M", u"惇"),
(0x2f8a6, "M", u"慈"),
(0x2f8a7, "M", u"慌"),
(0x2f8a8, "M", u"慎"),
(0x2f8a9, "M", u"慌"),
(0x2f8aa, "M", u"慺"),
(0x2f8ab, "M", u"憎"),
(0x2f8ac, "M", u"憲"),
(0x2f8ad, "M", u"憤"),
(0x2f8ae, "M", u"憯"),
(0x2f8af, "M", u"懞"),
(0x2f8b0, "M", u"懲"),
(0x2f8b1, "M", u"懶"),
(0x2f8b2, "M", u"成"),
(0x2f8b3, "M", u"戛"),
(0x2f8b4, "M", u"扝"),
(0x2f8b5, "M", u"抱"),
(0x2f8b6, "M", u"拔"),
(0x2f8b7, "M", u"捐"),
(0x2f8b8, "M", u"𢬌"),
(0x2f8b9, "M", u"挽"),
(0x2f8ba, "M", u"拼"),
(0x2f8bb, "M", u"捨"),
(0x2f8bc, "M", u"掃"),
(0x2f8bd, "M", u"揤"),
(0x2f8be, "M", u"𢯱"),
(0x2f8bf, "M", u"搢"),
(0x2f8c0, "M", u"揅"),
(0x2f8c1, "M", u"掩"),
(0x2f8c2, "M", u"㨮"),
(0x2f8c3, "M", u"摩"),
(0x2f8c4, "M", u"摾"),
(0x2f8c5, "M", u"撝"),
(0x2f8c6, "M", u"摷"),
(0x2f8c7, "M", u"㩬"),
(0x2f8c8, "M", u"敏"),
(0x2f8c9, "M", u"敬"),
(0x2f8ca, "M", u"𣀊"),
(0x2f8cb, "M", u"旣"),
(0x2f8cc, "M", u"書"),
(0x2f8cd, "M", u"晉"),
(0x2f8ce, "M", u"㬙"),
(0x2f8cf, "M", u"暑"),
(0x2f8d0, "M", u"㬈"),
(0x2f8d1, "M", u"㫤"),
(0x2f8d2, "M", u"冒"),
(0x2f8d3, "M", u"冕"),
(0x2f8d4, "M", u"最"),
(0x2f8d5, "M", u"暜"),
(0x2f8d6, "M", u"肭"),
(0x2f8d7, "M", u"䏙"),
(0x2f8d8, "M", u"朗"),
(0x2f8d9, "M", u"望"),
(0x2f8da, "M", u"朡"),
(0x2f8db, "M", u"杞"),
(0x2f8dc, "M", u"杓"),
(0x2f8dd, "M", u"𣏃"),
(0x2f8de, "M", u"㭉"),
(0x2f8df, "M", u"柺"),
(0x2f8e0, "M", u"枅"),
(0x2f8e1, "M", u"桒"),
(0x2f8e2, "M", u"梅"),
(0x2f8e3, "M", u"𣑭"),
(0x2f8e4, "M", u"梎"),
(0x2f8e5, "M", u"栟"),
(0x2f8e6, "M", u"椔"),
(0x2f8e7, "M", u"㮝"),
(0x2f8e8, "M", u"楂"),
]
def _seg_76():
return [
(0x2f8e9, "M", u"榣"),
(0x2f8ea, "M", u"槪"),
(0x2f8eb, "M", u"檨"),
(0x2f8ec, "M", u"𣚣"),
(0x2f8ed, "M", u"櫛"),
(0x2f8ee, "M", u"㰘"),
(0x2f8ef, "M", u"次"),
(0x2f8f0, "M", u"𣢧"),
(0x2f8f1, "M", u"歔"),
(0x2f8f2, "M", u"㱎"),
(0x2f8f3, "M", u"歲"),
(0x2f8f4, "M", u"殟"),
(0x2f8f5, "M", u"殺"),
(0x2f8f6, "M", u"殻"),
(0x2f8f7, "M", u"𣪍"),
(0x2f8f8, "M", u"𡴋"),
(0x2f8f9, "M", u"𣫺"),
(0x2f8fa, "M", u"汎"),
(0x2f8fb, "M", u"𣲼"),
(0x2f8fc, "M", u"沿"),
(0x2f8fd, "M", u"泍"),
(0x2f8fe, "M", u"汧"),
(0x2f8ff, "M", u"洖"),
(0x2f900, "M", u"派"),
(0x2f901, "M", u"海"),
(0x2f902, "M", u"流"),
(0x2f903, "M", u"浩"),
(0x2f904, "M", u"浸"),
(0x2f905, "M", u"涅"),
(0x2f906, "M", u"𣴞"),
(0x2f907, "M", u"洴"),
(0x2f908, "M", u"港"),
(0x2f909, "M", u"湮"),
(0x2f90a, "M", u"㴳"),
(0x2f90b, "M", u"滋"),
(0x2f90c, "M", u"滇"),
(0x2f90d, "M", u"𣻑"),
(0x2f90e, "M", u"淹"),
(0x2f90f, "M", u"潮"),
(0x2f910, "M", u"𣽞"),
(0x2f911, "M", u"𣾎"),
(0x2f912, "M", u"濆"),
(0x2f913, "M", u"瀹"),
(0x2f914, "M", u"瀞"),
(0x2f915, "M", u"瀛"),
(0x2f916, "M", u"㶖"),
(0x2f917, "M", u"灊"),
(0x2f918, "M", u"災"),
(0x2f919, "M", u"灷"),
(0x2f91a, "M", u"炭"),
(0x2f91b, "M", u"𠔥"),
(0x2f91c, "M", u"煅"),
(0x2f91d, "M", u"𤉣"),
(0x2f91e, "M", u"熜"),
(0x2f91f, "X"),
(0x2f920, "M", u"爨"),
(0x2f921, "M", u"爵"),
(0x2f922, "M", u"牐"),
(0x2f923, "M", u"𤘈"),
(0x2f924, "M", u"犀"),
(0x2f925, "M", u"犕"),
(0x2f926, "M", u"𤜵"),
(0x2f927, "M", u"𤠔"),
(0x2f928, "M", u"獺"),
(0x2f929, "M", u"王"),
(0x2f92a, "M", u"㺬"),
(0x2f92b, "M", u"玥"),
(0x2f92c, "M", u"㺸"),
(0x2f92e, "M", u"瑇"),
(0x2f92f, "M", u"瑜"),
(0x2f930, "M", u"瑱"),
(0x2f931, "M", u"璅"),
(0x2f932, "M", u"瓊"),
(0x2f933, "M", u"㼛"),
(0x2f934, "M", u"甤"),
(0x2f935, "M", u"𤰶"),
(0x2f936, "M", u"甾"),
(0x2f937, "M", u"𤲒"),
(0x2f938, "M", u"異"),
(0x2f939, "M", u"𢆟"),
(0x2f93a, "M", u"瘐"),
(0x2f93b, "M", u"𤾡"),
(0x2f93c, "M", u"𤾸"),
(0x2f93d, "M", u"𥁄"),
(0x2f93e, "M", u"㿼"),
(0x2f93f, "M", u"䀈"),
(0x2f940, "M", u"直"),
(0x2f941, "M", u"𥃳"),
(0x2f942, "M", u"𥃲"),
(0x2f943, "M", u"𥄙"),
(0x2f944, "M", u"𥄳"),
(0x2f945, "M", u"眞"),
(0x2f946, "M", u"真"),
(0x2f948, "M", u"睊"),
(0x2f949, "M", u"䀹"),
(0x2f94a, "M", u"瞋"),
(0x2f94b, "M", u"䁆"),
(0x2f94c, "M", u"䂖"),
(0x2f94d, "M", u"𥐝"),
(0x2f94e, "M", u"硎"),
]
def _seg_77():
return [
(0x2f94f, "M", u"碌"),
(0x2f950, "M", u"磌"),
(0x2f951, "M", u"䃣"),
(0x2f952, "M", u"𥘦"),
(0x2f953, "M", u"祖"),
(0x2f954, "M", u"𥚚"),
(0x2f955, "M", u"𥛅"),
(0x2f956, "M", u"福"),
(0x2f957, "M", u"秫"),
(0x2f958, "M", u"䄯"),
(0x2f959, "M", u"穀"),
(0x2f95a, "M", u"穊"),
(0x2f95b, "M", u"穏"),
(0x2f95c, "M", u"𥥼"),
(0x2f95d, "M", u"𥪧"),
(0x2f95f, "X"),
(0x2f960, "M", u"䈂"),
(0x2f961, "M", u"𥮫"),
(0x2f962, "M", u"篆"),
(0x2f963, "M", u"築"),
(0x2f964, "M", u"䈧"),
(0x2f965, "M", u"𥲀"),
(0x2f966, "M", u"糒"),
(0x2f967, "M", u"䊠"),
(0x2f968, "M", u"糨"),
(0x2f969, "M", u"糣"),
(0x2f96a, "M", u"紀"),
(0x2f96b, "M", u"𥾆"),
(0x2f96c, "M", u"絣"),
(0x2f96d, "M", u"䌁"),
(0x2f96e, "M", u"緇"),
(0x2f96f, "M", u"縂"),
(0x2f970, "M", u"繅"),
(0x2f971, "M", u"䌴"),
(0x2f972, "M", u"𦈨"),
(0x2f973, "M", u"𦉇"),
(0x2f974, "M", u"䍙"),
(0x2f975, "M", u"𦋙"),
(0x2f976, "M", u"罺"),
(0x2f977, "M", u"𦌾"),
(0x2f978, "M", u"羕"),
(0x2f979, "M", u"翺"),
(0x2f97a, "M", u"者"),
(0x2f97b, "M", u"𦓚"),
(0x2f97c, "M", u"𦔣"),
(0x2f97d, "M", u"聠"),
(0x2f97e, "M", u"𦖨"),
(0x2f97f, "M", u"聰"),
(0x2f980, "M", u"𣍟"),
(0x2f981, "M", u"䏕"),
(0x2f982, "M", u"育"),
(0x2f983, "M", u"脃"),
(0x2f984, "M", u"䐋"),
(0x2f985, "M", u"脾"),
(0x2f986, "M", u"媵"),
(0x2f987, "M", u"𦞧"),
(0x2f988, "M", u"𦞵"),
(0x2f989, "M", u"𣎓"),
(0x2f98a, "M", u"𣎜"),
(0x2f98b, "M", u"舁"),
(0x2f98c, "M", u"舄"),
(0x2f98d, "M", u"辞"),
(0x2f98e, "M", u"䑫"),
(0x2f98f, "M", u"芑"),
(0x2f990, "M", u"芋"),
(0x2f991, "M", u"芝"),
(0x2f992, "M", u"劳"),
(0x2f993, "M", u"花"),
(0x2f994, "M", u"芳"),
(0x2f995, "M", u"芽"),
(0x2f996, "M", u"苦"),
(0x2f997, "M", u"𦬼"),
(0x2f998, "M", u"若"),
(0x2f999, "M", u"茝"),
(0x2f99a, "M", u"荣"),
(0x2f99b, "M", u"莭"),
(0x2f99c, "M", u"茣"),
(0x2f99d, "M", u"莽"),
(0x2f99e, "M", u"菧"),
(0x2f99f, "M", u"著"),
(0x2f9a0, "M", u"荓"),
(0x2f9a1, "M", u"菊"),
(0x2f9a2, "M", u"菌"),
(0x2f9a3, "M", u"菜"),
(0x2f9a4, "M", u"𦰶"),
(0x2f9a5, "M", u"𦵫"),
(0x2f9a6, "M", u"𦳕"),
(0x2f9a7, "M", u"䔫"),
(0x2f9a8, "M", u"蓱"),
(0x2f9a9, "M", u"蓳"),
(0x2f9aa, "M", u"蔖"),
(0x2f9ab, "M", u"𧏊"),
(0x2f9ac, "M", u"蕤"),
(0x2f9ad, "M", u"𦼬"),
(0x2f9ae, "M", u"䕝"),
(0x2f9af, "M", u"䕡"),
(0x2f9b0, "M", u"𦾱"),
(0x2f9b1, "M", u"𧃒"),
(0x2f9b2, "M", u"䕫"),
(0x2f9b3, "M", u"虐"),
]
def _seg_78():
return [
(0x2f9b4, "M", u"虜"),
(0x2f9b5, "M", u"虧"),
(0x2f9b6, "M", u"虩"),
(0x2f9b7, "M", u"蚩"),
(0x2f9b8, "M", u"蚈"),
(0x2f9b9, "M", u"蜎"),
(0x2f9ba, "M", u"蛢"),
(0x2f9bb, "M", u"蝹"),
(0x2f9bc, "M", u"蜨"),
(0x2f9bd, "M", u"蝫"),
(0x2f9be, "M", u"螆"),
(0x2f9bf, "X"),
(0x2f9c0, "M", u"蟡"),
(0x2f9c1, "M", u"蠁"),
(0x2f9c2, "M", u"䗹"),
(0x2f9c3, "M", u"衠"),
(0x2f9c4, "M", u"衣"),
(0x2f9c5, "M", u"𧙧"),
(0x2f9c6, "M", u"裗"),
(0x2f9c7, "M", u"裞"),
(0x2f9c8, "M", u"䘵"),
(0x2f9c9, "M", u"裺"),
(0x2f9ca, "M", u"㒻"),
(0x2f9cb, "M", u"𧢮"),
(0x2f9cc, "M", u"𧥦"),
(0x2f9cd, "M", u"䚾"),
(0x2f9ce, "M", u"䛇"),
(0x2f9cf, "M", u"誠"),
(0x2f9d0, "M", u"諭"),
(0x2f9d1, "M", u"變"),
(0x2f9d2, "M", u"豕"),
(0x2f9d3, "M", u"𧲨"),
(0x2f9d4, "M", u"貫"),
(0x2f9d5, "M", u"賁"),
(0x2f9d6, "M", u"贛"),
(0x2f9d7, "M", u"起"),
(0x2f9d8, "M", u"𧼯"),
(0x2f9d9, "M", u"𠠄"),
(0x2f9da, "M", u"跋"),
(0x2f9db, "M", u"趼"),
(0x2f9dc, "M", u"跰"),
(0x2f9dd, "M", u"𠣞"),
(0x2f9de, "M", u"軔"),
(0x2f9df, "M", u"輸"),
(0x2f9e0, "M", u"𨗒"),
(0x2f9e1, "M", u"𨗭"),
(0x2f9e2, "M", u"邔"),
(0x2f9e3, "M", u"郱"),
(0x2f9e4, "M", u"鄑"),
(0x2f9e5, "M", u"𨜮"),
(0x2f9e6, "M", u"鄛"),
(0x2f9e7, "M", u"鈸"),
(0x2f9e8, "M", u"鋗"),
(0x2f9e9, "M", u"鋘"),
(0x2f9ea, "M", u"鉼"),
(0x2f9eb, "M", u"鏹"),
(0x2f9ec, "M", u"鐕"),
(0x2f9ed, "M", u"𨯺"),
(0x2f9ee, "M", u"開"),
(0x2f9ef, "M", u"䦕"),
(0x2f9f0, "M", u"閷"),
(0x2f9f1, "M", u"𨵷"),
(0x2f9f2, "M", u"䧦"),
(0x2f9f3, "M", u"雃"),
(0x2f9f4, "M", u"嶲"),
(0x2f9f5, "M", u"霣"),
(0x2f9f6, "M", u"𩅅"),
(0x2f9f7, "M", u"𩈚"),
(0x2f9f8, "M", u"䩮"),
(0x2f9f9, "M", u"䩶"),
(0x2f9fa, "M", u"韠"),
(0x2f9fb, "M", u"𩐊"),
(0x2f9fc, "M", u"䪲"),
(0x2f9fd, "M", u"𩒖"),
(0x2f9fe, "M", u"頋"),
(0x2fa00, "M", u"頩"),
(0x2fa01, "M", u"𩖶"),
(0x2fa02, "M", u"飢"),
(0x2fa03, "M", u"䬳"),
(0x2fa04, "M", u"餩"),
(0x2fa05, "M", u"馧"),
(0x2fa06, "M", u"駂"),
(0x2fa07, "M", u"駾"),
(0x2fa08, "M", u"䯎"),
(0x2fa09, "M", u"𩬰"),
(0x2fa0a, "M", u"鬒"),
(0x2fa0b, "M", u"鱀"),
(0x2fa0c, "M", u"鳽"),
(0x2fa0d, "M", u"䳎"),
(0x2fa0e, "M", u"䳭"),
(0x2fa0f, "M", u"鵧"),
(0x2fa10, "M", u"𪃎"),
(0x2fa11, "M", u"䳸"),
(0x2fa12, "M", u"𪄅"),
(0x2fa13, "M", u"𪈎"),
(0x2fa14, "M", u"𪊑"),
(0x2fa15, "M", u"麻"),
(0x2fa16, "M", u"䵖"),
(0x2fa17, "M", u"黹"),
(0x2fa18, "M", u"黾"),
]
def _seg_79():
return [
(0x2fa19, "M", u"鼅"),
(0x2fa1a, "M", u"鼏"),
(0x2fa1b, "M", u"鼖"),
(0x2fa1c, "M", u"鼻"),
(0x2fa1d, "M", u"𪘀"),
(0x2fa1e, "X"),
(0xe0100, "I"),
(0xe01f0, "X"),
]
uts46data = tuple(
_seg_0()
+ _seg_1()
+ _seg_2()
+ _seg_3()
+ _seg_4()
+ _seg_5()
+ _seg_6()
+ _seg_7()
+ _seg_8()
+ _seg_9()
+ _seg_10()
+ _seg_11()
+ _seg_12()
+ _seg_13()
+ _seg_14()
+ _seg_15()
+ _seg_16()
+ _seg_17()
+ _seg_18()
+ _seg_19()
+ _seg_20()
+ _seg_21()
+ _seg_22()
+ _seg_23()
+ _seg_24()
+ _seg_25()
+ _seg_26()
+ _seg_27()
+ _seg_28()
+ _seg_29()
+ _seg_30()
+ _seg_31()
+ _seg_32()
+ _seg_33()
+ _seg_34()
+ _seg_35()
+ _seg_36()
+ _seg_37()
+ _seg_38()
+ _seg_39()
+ _seg_40()
+ _seg_41()
+ _seg_42()
+ _seg_43()
+ _seg_44()
+ _seg_45()
+ _seg_46()
+ _seg_47()
+ _seg_48()
+ _seg_49()
+ _seg_50()
+ _seg_51()
+ _seg_52()
+ _seg_53()
+ _seg_54()
+ _seg_55()
+ _seg_56()
+ _seg_57()
+ _seg_58()
+ _seg_59()
+ _seg_60()
+ _seg_61()
+ _seg_62()
+ _seg_63()
+ _seg_64()
+ _seg_65()
+ _seg_66()
+ _seg_67()
+ _seg_68()
+ _seg_69()
+ _seg_70()
+ _seg_71()
+ _seg_72()
+ _seg_73()
+ _seg_74()
+ _seg_75()
+ _seg_76()
+ _seg_77()
+ _seg_78()
+ _seg_79()
)
| nilq/baby-python | python |
def read_input():
# for puzzles where each input line is an object
with open('input.txt') as fh:
for line in fh.readlines():
yield int(line.strip())
def is_sum_of_two_in(num, buf):
for i in range(len(buf)):
for j in range(i+1,len(buf)):
if num==buf[i]+buf[j]:
return True
return False
def main():
buf=[0]*25
bufptr=0
full=False
for num in read_input():
if full:
if not is_sum_of_two_in(num, buf):
print(num)
return
buf[bufptr]=num
if bufptr==24:
full=True
bufptr=(bufptr+1)%25
if __name__ == '__main__':
main() | nilq/baby-python | python |
"""
Background job servicers
"""
import logging
from datetime import timedelta
from sqlalchemy.sql import func, or_
from couchers import config, email, urls
from couchers.db import session_scope
from couchers.email.dev import print_dev_email
from couchers.email.smtp import send_smtp_email
from couchers.models import GroupChat, GroupChatSubscription, LoginToken, Message, MessageType, SignupToken, User
from couchers.utils import now
logger = logging.getLogger(__name__)
def process_send_email(payload):
logger.info(f"Sending email with subject '{payload.subject}' to '{payload.recipient}'")
# selects a "sender", which either prints the email to the logger or sends it out with SMTP
sender = send_smtp_email if config.config["ENABLE_EMAIL"] else print_dev_email
# the sender must return a models.Email object that can be added to the database
email = sender(
sender_name=payload.sender_name,
sender_email=payload.sender_email,
recipient=payload.recipient,
subject=payload.subject,
plain=payload.plain,
html=payload.html,
)
with session_scope() as session:
session.add(email)
def process_purge_login_tokens(payload):
logger.info(f"Purging login tokens")
with session_scope() as session:
session.query(LoginToken).filter(LoginToken.is_valid == False).delete(synchronize_session=False)
def process_purge_signup_tokens(payload):
logger.info(f"Purging signup tokens")
with session_scope() as session:
session.query(SignupToken).filter(SignupToken.is_valid == False).delete(synchronize_session=False)
def process_send_message_notifications(payload):
"""
Sends out email notifications for messages that have been unseen for a long enough time
"""
# very crude and dumb algorithm
logger.info(f"Sending out email notifications for unseen messages")
with session_scope() as session:
# users who have unnotified messages older than 5 minutes in any group chat
users = (
session.query(User)
.join(GroupChatSubscription, GroupChatSubscription.user_id == User.id)
.join(Message, Message.conversation_id == GroupChatSubscription.group_chat_id)
.filter(Message.time >= GroupChatSubscription.joined)
.filter(or_(Message.time <= GroupChatSubscription.left, GroupChatSubscription.left == None))
.filter(Message.id > User.last_notified_message_id)
.filter(Message.id > GroupChatSubscription.last_seen_message_id)
.filter(Message.time < now() - timedelta(minutes=5))
.filter(Message.message_type == MessageType.text) # TODO: only text messages for now
.all()
)
for user in users:
# now actually grab all the group chats, not just less than 5 min old
subquery = (
session.query(
GroupChatSubscription.group_chat_id.label("group_chat_id"),
func.max(GroupChatSubscription.id).label("group_chat_subscriptions_id"),
func.max(Message.id).label("message_id"),
func.count(Message.id).label("count_unseen"),
)
.join(Message, Message.conversation_id == GroupChatSubscription.group_chat_id)
.filter(GroupChatSubscription.user_id == user.id)
.filter(Message.id > user.last_notified_message_id)
.filter(Message.id > GroupChatSubscription.last_seen_message_id)
.filter(Message.time >= GroupChatSubscription.joined)
.filter(Message.message_type == MessageType.text) # TODO: only text messages for now
.filter(or_(Message.time <= GroupChatSubscription.left, GroupChatSubscription.left == None))
.group_by(GroupChatSubscription.group_chat_id)
.order_by(func.max(Message.id).desc())
.subquery()
)
unseen_messages = (
session.query(GroupChat, Message, subquery.c.count_unseen)
.join(subquery, subquery.c.message_id == Message.id)
.join(GroupChat, GroupChat.conversation_id == subquery.c.group_chat_id)
.order_by(subquery.c.message_id.desc())
.all()
)
user.last_notified_message_id = max(message.id for _, message, _ in unseen_messages)
session.commit()
total_unseen_message_count = sum(count for _, _, count in unseen_messages)
email.enqueue_email_from_template(
user.email,
"unseen_messages",
template_args={
"user": user,
"total_unseen_message_count": total_unseen_message_count,
"unseen_messages": [
(group_chat, latest_message, count) for group_chat, latest_message, count in unseen_messages
],
"group_chats_link": urls.messages_link(),
},
)
| nilq/baby-python | python |
import random
import math
from scytale.ciphers.base import Cipher
from scytale.exceptions import ScytaleError
class Fleissner(Cipher):
name = "Fleissner"
default = "XooXooooooXoXoooXoooXXoXoooooooooXoXoooXooooXoooXoXoooXXoooooooo"
def __init__(self, key=None):
self.key = self.validate(key)
self.grille = self.init_grille(self.key)
self.key_size = len(self.key)
self.grille_size = len(self.grille)
all_a = self.encrypt("A" * self.key_size)
all_a = all_a.replace("X", "")
if len(all_a) != self.key_size:
raise ScytaleError(
"Either a space in the grille overlaps another, or your gaps do not cover the grid."
)
def compare_ciphertext(self, a, b):
"""Returns true if the two ciphertexts are equivalent in this cipher"""
return self.compare_plaintext(self.decrypt(a), self.decrypt(b))
def compare_plaintext(self, a, b):
"""Returns true if the two ciphertexts are equivalent in this cipher"""
a = self.make_comparable(a)
b = self.make_comparable(b)
return b.startswith(a) or a.startswith(
b
) # i.e. ignore any final random letters
def validate(self, key):
if key is None:
key = self.default
xo = set(list(key))
if xo != set(["X", "o"]):
raise ScytaleError(
"The Fleissner Grille key must be a string of X (cut) and o (don't cut) letters only"
)
length = len(key)
sqrt = int(math.sqrt(length))
if math.pow(sqrt, 2) != length:
raise ScytaleError("You cannot form a square from {0} cells".format(length))
return key
def init_grille(self, key):
size = int(math.sqrt(len(key)))
return [list(key[i:i + size]) for i in range(0, len(key), size)]
def rotate(self, grille, clockwise=True):
if clockwise:
return list(zip(*grille[::-1]))
return list(zip(*grille))[::-1]
def next_cell(self, grille, row, column):
if column == self.grille_size - 1:
column = 0
if row == self.grille_size - 1:
row = 0
grille = self.rotate(grille)
else:
row += 1
else:
column += 1
return grille, row, column
def space_at(self, grille, row=0, column=0):
space = grille[row][column]
while space != "X":
grille, row, column = self.next_cell(grille, row, column)
space = grille[row][column]
return grille, row, column
def write(self, text):
ciphertext = ["X" for _ in range(self.key_size)]
row, column = 0, 0
grille = self.grille
while text:
grille, row, column = self.space_at(grille, row=row, column=column)
ciphertext[self.grille_size * row + column] = text.pop(0)
grille, row, column = self.next_cell(grille, row, column)
return "".join(ciphertext)
def read(self, text):
plaintext = []
row, column = 0, 0
grille = self.grille
for _ in range(self.key_size):
grille, row, column = self.space_at(grille, row=row, column=column)
plaintext.append(text[self.grille_size * row + column])
grille, row, column = self.next_cell(grille, row, column)
return "".join(plaintext)
def encrypt(self, plaintext):
plaintext = self.clean(plaintext.upper())
texts = [
list(plaintext[i:i + self.key_size])
for i in range(0, len(plaintext), self.key_size)
]
return "".join([self.write(t) for t in texts])
def decrypt(self, ciphertext):
texts = [
list(ciphertext[i:i + self.key_size])
for i in range(0, len(ciphertext), self.key_size)
]
plaintext = "".join([self.read(t) for t in texts])
return plaintext.rstrip("X")
@staticmethod
def generate_key():
"""OMG this might the worst code I've ever written.
It just randomly tries keys until one validates.
I need to learn maths and try to write this again :("""
key = list(Fleissner.default)
random.shuffle(key)
done = False
while not done:
try:
Fleissner(key="".join(key))
done = True
except:
random.shuffle(key)
return "".join(key)
| nilq/baby-python | python |
from collections import defaultdict
from factored_reps.models.parents_net import ParentsNet
import numpy as np
import torch
import torch.nn
from markov_abstr.gridworld.models.nnutils import Network
from markov_abstr.gridworld.models.phinet import PhiNet
from markov_abstr.gridworld.models.invnet import InvNet
from markov_abstr.gridworld.models.fwdnet import FwdNet
from markov_abstr.gridworld.models.contrastivenet import ContrastiveNet
class FactorNet(Network):
def __init__(self,
n_actions,
input_shape=2,
n_latent_dims=4,
n_hidden_layers=1,
n_units_per_layer=32,
lr=0.001,
max_dz=0.1,
coefs=None):
super().__init__()
self.n_actions = n_actions
self.n_latent_dims = n_latent_dims
self.lr = lr
self.max_dz = max_dz
self.coefs = defaultdict(lambda: 1.0)
if coefs is not None:
for k, v in coefs.items():
self.coefs[k] = v
self.phi = PhiNet(input_shape=input_shape,
n_latent_dims=n_latent_dims,
n_units_per_layer=n_units_per_layer,
n_hidden_layers=n_hidden_layers)
self.inv_model = InvNet(n_actions=n_actions,
n_latent_dims=n_latent_dims,
n_units_per_layer=n_units_per_layer,
n_hidden_layers=n_hidden_layers)
self.discriminator = ContrastiveNet(n_latent_dims=n_latent_dims,
n_hidden_layers=1,
n_units_per_layer=n_units_per_layer)
self.parents_model = ParentsNet(n_actions=n_actions,
n_latent_dims=n_latent_dims,
n_units_per_layer=n_units_per_layer,
n_hidden_layers=n_hidden_layers,
factored=True)
self.fwd_model = FwdNet(n_actions=n_actions,
n_latent_dims=n_latent_dims,
n_hidden_layers=n_hidden_layers,
n_units_per_layer=n_units_per_layer,
factored=True)
self.cross_entropy = torch.nn.CrossEntropyLoss()
self.bce_loss = torch.nn.BCELoss()
self.mse = torch.nn.MSELoss()
self.optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)
def inverse_loss(self, z0, z1, a):
if self.coefs['L_inv'] == 0.0:
return torch.tensor(0.0)
a_hat = self.inv_model(z0, z1)
return self.cross_entropy(input=a_hat, target=a)
def ratio_loss(self, z0, z1):
if self.coefs['L_rat'] == 0.0:
return torch.tensor(0.0)
N = len(z0)
# shuffle next states
idx = torch.randperm(N)
z1_neg = z1.view(N, -1)[idx].view(z1.size())
# concatenate positive and negative examples
z0_extended = torch.cat([z0, z0], dim=0)
z1_pos_neg = torch.cat([z1, z1_neg], dim=0)
is_fake = torch.cat([torch.zeros(N), torch.ones(N)], dim=0)
# Compute which ones are fakes
fakes = self.discriminator(z0_extended, z1_pos_neg)
return self.bce_loss(input=fakes, target=is_fake.float())
def compute_fwd_loss(self, z1, z1_hat):
if self.coefs['L_fwd'] == 0.0:
return torch.tensor(0.0)
return self.mse(z1, z1_hat)
def distance_loss(self, z0, z1):
if self.coefs['L_dis'] == 0.0:
return torch.tensor(0.0)
dz = torch.norm(z1 - z0, dim=-1, p=2)
excess = torch.nn.functional.relu(dz - self.max_dz)
return self.mse(excess, torch.zeros_like(excess))
def compute_factored_loss(self, parent_likelihood):
if self.coefs['L_fac'] == 0.0:
return torch.tensor(0.0)
# TODO: how to compute factored loss?
# 1. mean?
loss = torch.mean(parent_likelihood, dim=-1)
if parent_likelihood.ndim > 2:
loss = torch.mean(loss, dim=0)
# 2. sum?
# loss = torch.sum(parent_likelihood, dim=-1)
# 3. ???
loss = torch.mean(loss, dim=0)
return loss
def encode(self, x):
z = self.phi(x)
return z
def forward(self, *args, **kwargs):
raise NotImplementedError
def predict_a(self, z0, z1):
raise NotImplementedError
# a_logits = self.inv_model(z0, z1)
# return torch.argmax(a_logits, dim=-1)
def compute_loss(self, z0, a, parent_likelihood, z1, z1_hat):
loss_info = {
'L_inv': self.inverse_loss(z0, z1, a),
'L_rat': self.ratio_loss(z0, z1),
'L_dis': self.distance_loss(z0, z1),
'L_fwd': self.compute_fwd_loss(z1, z1_hat),
'L_fac': self.compute_factored_loss(parent_likelihood),
}
loss = 0
for loss_type in ['L_inv', 'L_rat', 'L_dis', 'L_fwd', 'L_fac']:
loss += self.coefs[loss_type] * loss_info[loss_type]
loss_info['L'] = loss
return loss_info
def train_batch(self, x0, a, x1, test=False):
if not test:
self.train()
self.optimizer.zero_grad()
z0 = self.phi(x0)
parent_dependencies, parent_likelihood = self.parents_model(z0, a)
dz_hat = self.fwd_model(z0, a, parent_dependencies)
z1_hat = z0 + dz_hat
z1 = self.phi(x1)
loss_info = self.compute_loss(z0, a, parent_likelihood, z1, z1_hat)
if not test:
loss_info['L'].backward()
self.optimizer.step()
return z0, z1, loss_info
| nilq/baby-python | python |
import os
from typing import List
from typing import Dict
from typing import Any
from ase.build import bulk
from ase.data import atomic_numbers
from ase.data import reference_states
from ase.data import ground_state_magnetic_moments
from autocat.data.lattice_parameters import BULK_PBE_FD
from autocat.data.lattice_parameters import BULK_PBE_PW
from autocat.data.lattice_parameters import BULK_BEEFVDW_FD
from autocat.data.lattice_parameters import BULK_BEEFVDW_PW
def generate_bulk_structures(
species_list: List[str],
crystal_structures: Dict[str, str] = None,
default_lat_param_lib: str = None,
a_dict: Dict[str, float] = None,
c_dict: Dict[str, float] = None,
set_magnetic_moments: List[str] = None,
magnetic_moments: Dict[str, float] = None,
write_to_disk: bool = False,
write_location: str = ".",
dirs_exist_ok: bool = False,
) -> Dict[str, Dict[str, Any]]:
"""
Generates bulk crystal structures and writes them to separate
directories, if specified.
Parameters
----------
species_list (REQUIRED):
List of chemical symbols of the bulk structures to be constructed.
cystal_structures:
Dictionary with crystal structure to be used for each species.
These will be passed on as input to `ase.build.bulk`. So, must be one
of sc, fcc, bcc, tetragonal, bct, hcp, rhombohedral, orthorhombic,
diamond, zincblende, rocksalt, cesiumchloride, fluorite or wurtzite.
If not specified, the default reference crystal structure for each
species from `ase.data` will be used.
default_lat_param_lib:
String indicating which library the lattice constants should be pulled
from if not specified in either a_dict or c_dict.
Defaults to lattice constants defined in `ase.data`.
Options:
pbe_fd: parameters calculated using xc=PBE and finite-difference
beefvdw_fd: parameters calculated using xc=BEEF-vdW and finite-difference
pbe_pw: parameters calculated using xc=PBE and a plane-wave basis set
beefvdw_fd: parameters calculated using xc=BEEF-vdW and a plane-wave basis set
N.B. if there is a species present in species_list that is NOT in the
reference library specified, it will be pulled from `ase.data`.
a_dict:
Dictionary with lattice parameters <a> to be used for each species.
If not specified, defaults from `default_lat_param_lib` are used.
c_dict:
Dictionary with lattice parameters <c> to be used for each species.
If not specified, defaults from `default_lat_param_lib` are used.
set_magnetic_moments:
List of species for which magnetic moments need to be set.
If not specified, magnetic moments will be set only for Fe, Co, Ni
(the ferromagnetic elements).
magnetic_moments:
Dictionary with the magnetic moments to be set for the chemical
species listed previously.
If not specified, default ground state magnetic moments from
`ase.data` are used.
write_to_disk:
Boolean specifying whether the bulk structures generated should be
written to disk.
Defaults to False.
write_location:
String with the location where the per-species/per-crystal structure
directories must be constructed and structure files written to disk.
In the specified write_location, the following directory structure
will be created:
[species_1]_bulk_[crystal_structure_1]/input.traj
[species_1]_bulk_[crystal_structure_2]/input.traj
...
[species_2]_bulk_[crystal_structure_2]/input.traj
...
Defaults to the current working directory.
dirs_exist_ok:
Boolean specifying whether existing directories/files should be
overwritten or not. This is passed on to the `os.makedirs` builtin.
Defaults to False (raises an error if directories corresponding the
species and crystal structure already exist).
Returns
-------
Dictionary with bulk structures (as `ase.Atoms` objects) and
write-location (if any) for each input species.
Example:
{
"Pt": {"structure": Pt_ase_obj, "traj_file_path": "/path/to/Pt/traj/file"},
"Cr": ...
}
"""
lpl = {
"pbe_fd": BULK_PBE_FD,
"beefvdw_fd": BULK_BEEFVDW_FD,
"pbe_pw": BULK_PBE_PW,
"beefvdw_pw": BULK_BEEFVDW_PW,
}
if crystal_structures is None:
crystal_structures = {}
if a_dict is None:
a_dict = {}
if c_dict is None:
c_dict = {}
if set_magnetic_moments is None:
set_magnetic_moments = ["Fe", "Co", "Ni"]
if magnetic_moments is None:
magnetic_moments = {}
# load crystal structure defaults from `ase.data`, override with user input
cs_library = {
species: reference_states[atomic_numbers[species]].get("symmetry")
for species in species_list
}
cs_library.update(crystal_structures)
# load lattice params <a>, <c> from reference library, override with user input
a_library = {}
c_library = {}
if default_lat_param_lib is not None:
a_library.update(
{
species: lpl[default_lat_param_lib].get(species, {}).get("a")
for species in species_list
}
)
c_library.update(
{
species: lpl[default_lat_param_lib].get(species, {}).get("c")
for species in species_list
}
)
a_library.update(a_dict)
c_library.update(c_dict)
# load magnetic moment defaults from `ase.data`, override with user input
mm_library = {
species: ground_state_magnetic_moments[atomic_numbers[species]]
for species in species_list
}
mm_library.update(magnetic_moments)
bulk_structures = {}
for species in species_list:
cs = cs_library.get(species)
a = a_library.get(species)
c = c_library.get(species)
bs = bulk(species, crystalstructure=cs, a=a, c=c)
if species in set_magnetic_moments:
bs.set_initial_magnetic_moments([mm_library[species]] * len(bs))
traj_file_path = None
if write_to_disk:
dir_path = os.path.join(write_location, f"{species}_bulk_{cs}")
os.makedirs(dir_path, exist_ok=dirs_exist_ok)
traj_file_path = os.path.join(dir_path, "input.traj")
bs.write(traj_file_path)
print(f"{species}_bulk_{cs} structure written to {traj_file_path}")
bulk_structures[species] = {
"structure": bs,
"traj_file_path": traj_file_path,
}
return bulk_structures
| nilq/baby-python | python |
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# author: bigfoolliu
"""
redis 临时状态存储接口
键:
1. 计数器 str
2. order对应关系、超时队列、成功队列 —— 字典 uuid-orderinfo
"order:"+str(goods_id)
"order:"+str(goods_id)+":"+"overtime" 超时订单
"order:"+str(goods_id)+":"+"deal" 完成的订单
改:
除了order对应关系需要获取值 仍然用字典
由于只需要判断是否存在 redis 中集合的底层实现是整数链表或哈希表
所以此处用集合可以达到同样的效率,而不需要设置键值。
"""
from typing import Dict
from conn import redis_conn
def plus_counter(goods_id: int, storage: int = 1000) -> bool:
"""
通过id找到对应的key, 加1,返回是否成功
:param goods_id:
:param storage: 库存数量
:return:
"""
_count = redis_conn.incr(f'counter:{str(goods_id)}')
if _count > storage:
return False
return True
def create_order(order_info: Dict) -> bool:
"""
创建订单, 建立user和uuid的关系,用于后续的支付
:return:
"""
user_id = order_info.get('user_id')
goods_id = order_info.get('goods_id')
order_id = order_info.get('order_id')
redis_conn.hset(f'order:{str(goods_id)}{str(order_id)}{str(user_id)}')
return True
"""
订单的几种状态:
order:goods_id 一般订单
order:goods_i:overtime 超时队列
order:goods_id:deal 成功处理的队列
"""
def check_order(order_info: Dict) -> bool:
"""
支付时用于验证支付,还要检查订单是否过期
:return:
"""
user_id = order_info.get('user_id')
order_id = order_info.get('order_id')
goods_id = order_info.get('goods_id')
if redis_conn.sismember(f'order:{str(goods_id)}:overtime', order_id): # 订单过期
return -1
else:
return user_id == str(redis_conn.hget(f'order:{goods_id}', order_id), encoding='utf-8')
def enter_overtime(order_info: Dict) -> bool:
"""
将订单信息写入支付队列中,超时未支付,写入redis
:param order_info:
:return:
"""
user_id = order_info.get('user_id')
order_id = order_info.get('order_id')
goods_id = order_info.get('goods_id')
if _is_deal(order_info):
return False
else:
redis_conn.sadd('order:{str(goods_id)}:overtime', order_id)
return True
def _is_deal(order_info: Dict) -> bool:
"""
判断订单是否过期
:param order_info:
:return:
"""
user_id = order_info.get('user_id')
order_id = order_info.get('order_id')
goods_id = order_info.get('goods_id')
return redis_conn.sismember(f'order:{str(goods_id)}:deal', order_id)
def _is_overtime(order_info: Dict) -> bool:
"""
判断订单是否超时
:return:
"""
user_id = order_info.get('user_id')
order_id = order_info.get('order_id')
goods_id = order_info.get('goods_id')
return redis_conn.sismember(f'order:{str(goods_id)}:overtime', order_id)
def paid_order(order_info: Dict) -> bool:
"""
判断订单是否支付
:param order_info:
:return:
"""
order_id = order_info.get('order_id')
goods_id = order_info.get('goods_id')
if _is_overtime(order_info):
return False
else:
redis_conn.sadd('order:{str(goods_id)}:deal', order_id)
return True
if __name__ == '__main__':
create_order(order_info={'goods_id': 1, 'user_id': 1, 'order_id': 'a'})
| nilq/baby-python | python |
# Main network and testnet3 definitions
# AXE src/chainparams.cpp
params = {
'axe_main': {
'pubkey_address': 55, #L120
'script_address': 16, #L122
'genesis_hash': '00000c33631ca6f2f61368991ce2dc03306b5bb50bf7cede5cfbba6db38e52e6' #L110
},
'axe_test': {
'pubkey_address': 140, #L220
'script_address': 19, #L222
'genesis_hash': '00000bafbc94add76cb75e2ec92894837288a481e5c005f6563d91623bf8bc2c' #L210
}
}
| nilq/baby-python | python |
"""
Unit tests for the ESV schema package.
"""
from aura.file.esv import ESV
import unittest
import os
def _locate(filename):
"""
Find the file relative to where the test is located.
"""
return os.path.join(os.path.dirname(__file__), filename)
class ESV_test(unittest.TestCase):
"""
Unit tests for schema parsing.
"""
def check_invalid(self, basename):
"""
Workhorse function to check invalid schema files.
"""
filename = _locate(basename)
with open(filename) as infile:
with self.assertRaises(ValueError):
ESV.load(infile)
def test_invalid_schemas(self):
"""
Test that we catch different types of invalid files.
"""
self.check_invalid('invalid2.esv')
def test_simple_schema(self):
"""
Test that we can create some valid schemas.
"""
for basename in ('valid1.esv',):
filename = _locate(basename)
with open(filename) as infile:
dobject = ESV.load(infile)
| nilq/baby-python | python |
# -*- coding: utf-8 -*-
"""
CCR plotting module
Kiri Choi (c) 2018
"""
import os, sys
import tellurium as te
import roadrunner
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sb
def plotAllProgress(listOfDistances, labels=None, SAVE_PATH=None):
"""
Plots multiple convergence progress
:param listOfDistances: 2D array of distances
:param labels: list of strings to use as labels
:param SAVE: flag for saving the output
"""
for i in range(len(listOfDistances)):
plt.plot(listOfDistances[i])
if labels:
plt.legend(labels)
plt.xlabel("Generations", fontsize=15)
plt.ylabel("Distance", fontsize=15)
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
if SAVE_PATH is not None:
if os.path.splitext(SAVE_PATH)[1] == '':
plt.savefig(os.path.join(SAVE_PATH, 'images/convergence.pdf'), bbox_inches='tight')
else:
plt.savefig(SAVE_PATH, bbox_inches='tight')
plt.show()
def plotProgress(distance, SAVE_PATH=None):
"""
Plots convergence progress
:param distance: array of distances
:param model_type: reference model type, e.g. 'FFL', 'Linear', etc.
:param SAVE: flag for saving the output
"""
plt.plot(distance)
plt.xlabel("Generations", fontsize=15)
plt.ylabel("Distance", fontsize=15)
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
if SAVE_PATH is not None:
if os.path.splitext(SAVE_PATH)[1] == '':
plt.savefig(os.path.join(SAVE_PATH, 'images/convergence.pdf'), bbox_inches='tight')
else:
plt.savefig(SAVE_PATH, bbox_inches='tight')
plt.show()
def plotResidual(realModel, ens_model, ens_dist, SAVE_PATH=None):
"""
Plots residuals
:param realModel: reference model
:param ens_model: model ensemble
:param ens_dist: model distances
:param model_typ: reference model type
:param SAVE: flag for saving the output
"""
r_real = te.loada(realModel)
result_real = r_real.simulate(0, 100, 100)
top_result = []
top_diff = []
for i in range(len(ens_model)):
r = te.loada(ens_model[np.argsort(ens_dist)[i]])
top_sim = r.simulate(0, 100, 100)
top_result.append(top_sim)
top_diff.append(np.subtract(result_real[:,1:], top_sim[:,1:]))
percentage = 0.1#float(pass_size)/ens_size
ave_diff = np.average(top_diff[:int(len(ens_model)*percentage)], axis=0)
plt.plot(ave_diff)
plt.xlabel("Time (s)", fontsize=15)
plt.ylabel("Residual", fontsize=15)
plt.legend(r.getFloatingSpeciesIds())
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
if SAVE_PATH is not None:
if os.path.splitext(SAVE_PATH)[1] == '':
plt.savefig(os.path.join(SAVE_PATH, 'images/average_residual.pdf'), bbox_inches='tight')
else:
plt.savefig(SAVE_PATH, bbox_inches='tight')
plt.show()
def plotHistogram():
"""
"""
def plotDistanceHistogram(ens_dist, nbin=25, SAVE_PATH=None):
"""
"""
plt.hist(ens_dist, bins=nbin, density=True)
plt.xlabel("Distance", fontsize=15)
plt.ylabel("Normalized Frequency", fontsize=15)
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
if SAVE_PATH is not None:
if os.path.splitext(SAVE_PATH)[1] == '':
plt.savefig(os.path.join(SAVE_PATH, 'images/distance_hist.pdf'), bbox_inches='tight')
else:
plt.savefig(SAVE_PATH, bbox_inches='tight')
plt.show()
def plotDistanceHistogramWithKDE(dist_top, log_dens, minInd, nbin=40, SAVE_PATH=None):
"""
"""
hist = plt.hist(dist_top, bins=nbin, density=True)
plt.vlines(dist_top[minInd[0][0]], 0, np.max(hist[0]), linestyles='dashed')
plt.plot(dist_top, np.exp(log_dens), color='tab:red')
plt.xlabel("Distance", fontsize=15)
plt.ylabel("Normalized Frequency", fontsize=15)
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
if SAVE_PATH is not None:
if os.path.splitext(SAVE_PATH)[1] == '':
plt.savefig(os.path.join(SAVE_PATH, 'images/distance_hist_w_KDE.pdf'), bbox_inches='tight')
else:
plt.savefig(SAVE_PATH, bbox_inches='tight')
plt.show()
def plotNetwork(path, scale=1.5):
"""
Plot a network diagram
:param path: path to a model
:param scale: diagram scale
"""
import netplotlib as npl
net = npl.Network(path)
net.scale = scale
net.draw()
def plotNetworkEnsemble(path, index=None, threshold=0., scale=1.5):
"""
Plot network ensemble diagram
:param path: path to output folder
:param index: index of models to be included in the diagram
:param threshold: threshold of reactions to be plotted
:param scale: diagram scale
"""
import netplotlib as npl
model_dir = os.path.join(path, 'models')
modelfiles = [f for f in os.listdir(model_dir) if os.path.isfile(os.path.join(model_dir, f))]
modelcontent = []
for i in modelfiles:
sbmlstr = open(os.path.join(model_dir, i), 'r')
modelcontent.append(sbmlstr.read())
sbmlstr.close()
if index >= len(modelcontent):
raise Exception("Specified index value is larger than the size of the list")
net = npl.NetworkEnsemble(modelcontent[:index])
net.plottingThreshold = threshold
net.scale = scale
net.drawWeightedDiagram()
| nilq/baby-python | python |
# -*- coding: utf-8 -*-
# MIT License
# Copyright (c) 2021 Alexsandro Thomas
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import aqt, os
from ...path import ICONS_PATH
from . import ui_SelectWord, ui_SelectExamples
class SelectWord(aqt.QDialog) :
"""Select which word from a dictionary query to use"""
def __init__(self, parent, dictionaries, note, internal_config) :
#init window
super(SelectWord,self).__init__(parent)
self.setWindowFlags(aqt.Qt.Dialog | aqt.Qt.MSWindowsFixedSizeDialogHint)
#data and methods
self.closed = False #window is closed (bool)
self.skipped = False #user skipped (bool)
self.searchResults = None #search result rows (List[dict])
self.selected = None #selected row (int)
self.gen = None #search generator
self.dictionaries = dictionaries
self.note = note
self.internal_config = internal_config
#setup UI
self.ui = ui_SelectWord.Ui_diagSelectWord()
self.ui.setupUi(self)
self.ui.pbSearch.setHidden(True)
self.ui.bContinue.setHidden(True)
self.__updateDropdowns()
icon = aqt.QIcon(os.path.join(ICONS_PATH, "nihongo_companion.png"))
self.setWindowIcon(icon)
#hooks
self.ui.bSearch.clicked.connect(self.__search)
self.ui.bCancel.clicked.connect(self.__cancel)
self.ui.bSkip.clicked.connect(self.__skip)
self.ui.bConfirm.clicked.connect(self.__confirm)
self.ui.bContinue.clicked.connect(self.__continue)
self.ui.listResults.doubleClicked.connect(self.__confirm)
self.ui.cbDict.currentTextChanged.connect(self.__changeDict)
#begin search
configObj = aqt.mw.addonManager.getConfig(__name__)
if configObj["autoSearch"] and self.internal_config["auto_search"] and self.dictionaries[self.internal_config["dict"]].needsSearch : self.__search()
def __changeDict(self, value) :
self.ui.pbSearch.setHidden(True)
self.ui.bContinue.setHidden(True)
self.ui.listResults.setEnabled(False)
self.ui.listResults.clear()
self.ui.bConfirm.setEnabled(False if self.dictionaries[value].needsSearch else True)
self.ui.bSearch.setEnabled(True if self.dictionaries[value].needsSearch else False)
aqt.QApplication.processEvents() #update
def __updateDropdowns(self) -> None :
for field,_ in self.note.items() :
self.ui.cbField_in.addItem(field)
self.ui.cbField_in.setCurrentIndex(self.internal_config["in_field"])
self.ui.bConfirm.setEnabled(False if self.dictionaries[self.internal_config["dict"]].needsSearch else True)
self.ui.bSearch.setEnabled(True if self.dictionaries[self.internal_config["dict"]].needsSearch else False)
for field,_ in self.dictionaries.items() :
self.ui.cbDict.addItem(field)
self.ui.cbDict.setCurrentText(self.internal_config["dict"])
def __search(self) -> None :
self.ui.bSearch.setEnabled(False)
self.ui.listResults.setEnabled(False)
self.ui.bConfirm.setEnabled(False)
self.ui.bContinue.setEnabled(True)
self.ui.listResults.clear()
query = self.note.values()[self.ui.cbField_in.currentIndex()]
self.ui.pbSearch.setValue(0)
self.ui.pbSearch.setHidden(False)
self.ui.bContinue.setHidden(False)
self.gen = self.dictionaries[self.ui.cbDict.currentText()].search(query)
self.searchResults = []
self.__continue()
def __continue(self) -> None :
if self.gen is None : return
results, cur, tot = None, None, None
try :
results, cur, tot = next(self.gen)
except StopIteration:
self.ui.bContinue.setEnabled(False)
self.gen = None
return
if self.closed : return
self.ui.bSearch.setEnabled(False)
self.ui.bContinue.setEnabled(False)
self.ui.listResults.setEnabled(False)
self.ui.bConfirm.setEnabled(False)
if results!=None and len(results)>0 :
aqt.QApplication.processEvents() #update
self.ui.pbSearch.setValue(100*cur//tot)
if 100*cur//tot == 100 :
self.ui.bContinue.setEnabled(False)
self.gen = None
self.searchResults += results
for result in results :
item = aqt.QListWidgetItem()
item.setText(
result["title"]+'\n'+
result["kana"]+" ["+result["type"]+"]\n - "+
"\n - ".join(result["english"])
)
self.ui.listResults.addItem(item)
if self.selected == None :
self.selected = 0
self.ui.listResults.setCurrentItem(item)
aqt.QApplication.processEvents() #update
else :
self.searchResults = None
self.ui.pbSearch.setValue(0)
self.ui.bSearch.setEnabled(True)
self.ui.pbSearch.setHidden(True)
self.ui.bContinue.setHidden(True)
aqt.utils.showInfo("Nothing found!")
return
self.ui.listResults.setEnabled(True)
self.ui.bConfirm.setEnabled(True)
if self.gen is not None : self.ui.bContinue.setEnabled(True)
self.ui.bSearch.setEnabled(True)
#overload close event if needed
def closeEvent(self, a0) -> None:
return super().closeEvent(a0)
def __cancel(self) -> None :
self.closed = True
self.close()
def __confirm(self) -> None :
if self.dictionaries[self.ui.cbDict.currentText()].needsSearch :
if len(self.ui.listResults.selectedIndexes())==0 :
aqt.utils.showInfo("No word selected!")
return
self.selected = self.ui.listResults.selectedIndexes()[0].row()
else :
sel = self.note.values()[self.ui.cbField_in.currentIndex()]
self.searchResults = [{
"title": sel,
"uri": sel,
"kana": ''
}]
self.selected = 0
self.internal_config["in_field"] = self.ui.cbField_in.currentIndex()
self.internal_config["auto_search"] = True
self.internal_config["dict"] = self.ui.cbDict.currentText()
self.accept()
def __skip(self) -> None :
self.skipped = True
self.__cancel()
class SelectExamples(aqt.QDialog) :
"""Select which examples from a dictionary query to use"""
def __init__(self, parent, dictionary, queryWord:dict, note, internal_config) :
#init window
super(SelectExamples,self).__init__(parent)
self.setWindowFlags(aqt.Qt.Dialog | aqt.Qt.MSWindowsFixedSizeDialogHint)
#data and methods
self.closed = False #bool
self.dictionary = dictionary #dictionary
self.queryWord = queryWord #dict
self.internal_config = internal_config #dict
self.searchResults = None #list
self.selected = None #list
self.error = False #bool
self.field = None #str
self.extraReadings = None #set[str]
self.note = note #note
#setup UI
self.ui = ui_SelectExamples.Ui_diagSelectExamples()
self.ui.setupUi(self)
self.__resizeHeaders()
self.__updateDropdowns()
self.ui.cbField_out.setCurrentIndex(self.internal_config["out_field"])
self.ui.buttonGroup.setExclusive(False)
self.ui.rbOverwrite.setChecked(not(self.internal_config["append"]))
self.ui.rbAppend.setChecked(self.internal_config["append"])
self.ui.buttonGroup.setExclusive(True)
icon = aqt.QIcon(os.path.join(ICONS_PATH, "nihongo_companion.png"))
self.setWindowIcon(icon)
#hooks
self.ui.bCancel.clicked.connect(self.__cancel)
self.ui.bConfirm.clicked.connect(self.__confirm)
self.ui.rbAppend.toggled.connect(self.__onClickedAppend)
quit = aqt.QAction("Quit", self)
quit.triggered.connect(self.__cancel)
def __onClickedAppend(self) :
radioBtn = self.sender()
if radioBtn.isChecked():
self.internal_config["append"] = True
return
self.internal_config["append"] = False
def __radioChanged(self) -> None :
if self.ui.rbAppend.isChecked() :
self.ui.rbOverwrite.setChecked(False)
return
self.ui.rbAppend.setChecked(False)
aqt.QApplication.processEvents() #update
def __updateDropdowns(self) -> None :
for field,_ in self.note.items() :
self.ui.cbField_out.addItem(field)
def __resizeHeaders(self) -> None:
#TODO: Wrap text
header = self.ui.tExamples.horizontalHeader()
header.setSectionResizeMode(aqt.QHeaderView.ResizeToContents)
header.setSectionResizeMode(0, aqt.QHeaderView.Stretch)
header.setSectionResizeMode(1, aqt.QHeaderView.Stretch)
self.ui.tExamples.setSelectionBehavior(aqt.QAbstractItemView.SelectRows)
def search(self) -> None :
self.searchResults = self.dictionary.get_examples(self.queryWord['uri'])
if self.searchResults!=None and len(self.searchResults)>0 :
if self.closed : return
self.ui.tExamples.setRowCount(len(self.searchResults))
i = 0
for example in self.searchResults :
item = aqt.QTableWidgetItem()
item.setText(example['japanese'])
self.ui.tExamples.setItem(i,0,item)
item = aqt.QTableWidgetItem()
item.setText(example['english'])
self.ui.tExamples.setItem(i,1,item)
i+=1
aqt.QApplication.processEvents() #update
self.ui.tExamples.selectRow(0)
self.ui.tExamples.setEnabled(True)
self.ui.bConfirm.setEnabled(True)
self.ui.tExamples.resizeRowsToContents()
else :
self.error = True
def __cancel(self) -> None :
self.closed = True
self.close()
def __confirm(self) -> None :
if len(self.ui.tExamples.selectedIndexes())==0 :
aqt.utils.showInfo("No example selected!")
return
self.field = self.note.keys()[self.ui.cbField_out.currentIndex()]
self.selected = list(sorted(set(map(lambda index : index.row(), self.ui.tExamples.selectedIndexes()))))
self.internal_config["out_field"] = self.ui.cbField_out.currentIndex()
self.extraReadings = set(self.ui.leReadings.text().replace('、',',').split(','))
self.accept()
| nilq/baby-python | python |
#!/usr/bin/env python
import pytest
from versionner.version import Version
class TestVersionCompare:
def test_eq(self):
v1 = Version('1.2.3')
v2 = Version('2.3.4')
v3 = Version('1.2.3')
assert v1 == v3
assert v1 != v2
assert v1 == '1.2.3'
assert v1 != '2.3.4'
def test_lt(self):
v1 = Version('1.2.3')
v2 = Version('2.3.4')
v3 = '7.6.5'
assert v1 < v2
assert v1 < v3
assert v2 < v3
def test_gt(self):
v1 = Version('1.2.3')
v2 = Version('2.3.4')
v3 = '7.6.5'
assert v2 > v1
assert v3 > v1
assert v3 > v2
def test_sort(self):
v1 = Version('1.2.3')
v2 = Version('2.3.4')
v3 = Version('1.2.3')
v4 = '7.6.5'
result = sorted([v1, v2, v3, v4])
assert [id(o) for o in result] == [id(v1), id(v3), id(v2), id(v4)]
if __name__ == '__main__':
pytest.main()
| nilq/baby-python | python |
"""empty message
Revision ID: a92b018b8c85
Revises: 875879b0cbcc
Create Date: 2020-06-18 18:00:12.242034
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a92b018b8c85'
down_revision = '875879b0cbcc'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('teams', sa.Column('login_before', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('teams', 'login_before')
# ### end Alembic commands ###
| nilq/baby-python | python |
# Generated by Django 3.1.1 on 2020-09-13 10:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='userbmi',
name='height',
field=models.DecimalField(decimal_places=2, max_digits=6),
),
migrations.AlterField(
model_name='userbmi',
name='weight',
field=models.DecimalField(decimal_places=2, max_digits=6),
),
]
| nilq/baby-python | python |
import json
from logger import log
menuStore = "data/menu.json"
class MenuBot():
status = "Empty"
menu = dict()
def getStatus(self):
return self.status
def saveMenu(self):
with open( menuStore, 'w' ) as outfile:
jstr = json.dumps( self.menu )
outfile.write(jstr)
def loadMenu(self):
try:
with open(menuStore) as json_file:
self.menu = json.load( json_file )
log( self.menu )
self.status = "Ready"
except Exception as e:
print( e )
self.status = "Read Error"
def determineReply(self, req):
all = ""
delim = ""
try:
t = req.lower().strip()
for row in self.menu:
if not row['active']:
continue
choice = row['query']
if t == choice.lower():
return row['reply']
all += delim + choice
delim = ', '
except Exception as e:
print(e)
return "Sorry, error formulating response to " + req
return 'I can reply to ' + all
| nilq/baby-python | python |
#!/usr/bin/env python
## -*- coding: utf-8 -*-
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from nose.tools import assert_equal, assert_true, assert_false
from django.contrib.auth.models import User
from django.urls import reverse
from azure.conf import is_adls_enabled
from desktop.lib.django_test_util import make_logged_in_client
from desktop.lib.test_utils import grant_access, add_permission
from desktop.models import Directory, Document, Document2
from hadoop import cluster as originalCluster
import notebook.connectors.hiveserver2
from notebook.api import _historify
from notebook.connectors.base import Notebook, QueryError, Api
from notebook.decorators import api_error_handler
from notebook.conf import get_ordered_interpreters, INTERPRETERS_SHOWN_ON_WHEEL, INTERPRETERS
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict # Python 2.6
class TestNotebookApi(object):
def setUp(self):
self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
self.client_not_me = make_logged_in_client(username="not_perm_user", groupname="default", recreate=True, is_superuser=False)
self.user = User.objects.get(username="test")
self.user_not_me = User.objects.get(username="not_perm_user")
grant_access("test", "default", "notebook")
grant_access("not_perm_user", "default", "notebook")
self.notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "query-hive",
"id": 50010,
"snippets": [{"id":"2b7d1f46-17a0-30af-efeb-33d4c29b1055","type":"hive","status":"running","statement_raw":"select * from default.web_logs where app = '${app_name}';","variables":[{"name":"app_name","value":"metastore"}],"statement":"select * from default.web_logs where app = 'metastore';","properties":{"settings":[],"files":[],"functions":[]},"result":{"id":"b424befa-f4f5-8799-a0b4-79753f2552b1","type":"table","handle":{"log_context":null,"statements_count":1,"end":{"column":21,"row":0},"statement_id":0,"has_more_statements":false,"start":{"column":0,"row":0},"secret":"rVRWw7YPRGqPT7LZ/TeFaA==an","has_result_set":true,"statement":"select * from default.web_logs where app = 'metastore';","operation_type":0,"modified_row_count":null,"guid":"7xm6+epkRx6dyvYvGNYePA==an"}},"lastExecuted": 1462554843817,"database":"default"}],
"uuid": "5982a274-de78-083c-2efc-74f53dce744c",
"isSaved": false,
"parentUuid": null
}
"""
self.notebook = json.loads(self.notebook_json)
self.doc2 = Document2.objects.create(id=50010, name=self.notebook['name'], type=self.notebook['type'], owner=self.user)
self.doc1 = Document.objects.link(self.doc2, owner=self.user, name=self.doc2.name,
description=self.doc2.description, extra=self.doc2.type)
def test_save_notebook(self):
# Test that saving a new document with a new parent will set the parent_directory
home_dir = Document2.objects.get_home_directory(self.user)
assert_equal(home_dir.uuid, self.doc2.parent_directory.uuid)
new_dir = Directory.objects.create(name='new_dir', owner=self.user, parent_directory=home_dir)
notebook_cp = self.notebook.copy()
notebook_cp.pop('id')
notebook_cp['directoryUuid'] = new_dir.uuid
notebook_json = json.dumps(notebook_cp)
response = self.client.post(reverse('notebook:save_notebook'), {'notebook': notebook_json})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
doc = Document2.objects.get(pk=data['id'])
assert_equal(new_dir.uuid, doc.parent_directory.uuid)
# Test that saving a new document with a no parent will map it to its home dir
notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "query-hive",
"id": null,
"snippets": [{"id":"2b7d1f46-17a0-30af-efeb-33d4c29b1055","type":"hive","status":"running","statement_raw":"select * from default.web_logs where app = '${app_name}';","variables":[{"name":"app_name","value":"metastore"}],"statement":"select * from default.web_logs where app = 'metastore';","properties":{"settings":[],"files":[],"functions":[]},"result":{"id":"b424befa-f4f5-8799-a0b4-79753f2552b1","type":"table","handle":{"log_context":null,"statements_count":1,"end":{"column":21,"row":0},"statement_id":0,"has_more_statements":false,"start":{"column":0,"row":0},"secret":"rVRWw7YPRGqPT7LZ/TeFaA==an","has_result_set":true,"statement":"select * from default.web_logs where app = 'metastore';","operation_type":0,"modified_row_count":null,"guid":"7xm6+epkRx6dyvYvGNYePA==an"}},"lastExecuted": 1462554843817,"database":"default"}],
"uuid": "d9efdee1-ef25-4d43-b8f9-1a170f69a05a"
}
"""
response = self.client.post(reverse('notebook:save_notebook'), {'notebook': notebook_json})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
doc = Document2.objects.get(pk=data['id'])
assert_equal(Document2.objects.get_home_directory(self.user).uuid, doc.parent_directory.uuid)
# Test that saving a notebook will save the search field to the first statement text
assert_equal(doc.search, "select * from default.web_logs where app = 'metastore';")
def test_historify(self):
# Starts with no history
assert_equal(0, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
assert_equal(1, Document.objects.filter(name__contains=self.notebook['name']).count())
history_doc = _historify(self.notebook, self.user)
assert_true(history_doc.id > 0)
# Test that historify creates new Doc2 and linked Doc1
assert_equal(1, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
assert_equal(2, Document.objects.filter(name__contains=self.notebook['name']).count())
# Historify again
history_doc = _historify(self.notebook, self.user)
assert_equal(2, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
assert_equal(3, Document.objects.filter(name__contains=self.notebook['name']).count())
def test_get_history(self):
assert_equal(0, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
_historify(self.notebook, self.user)
_historify(self.notebook, self.user)
_historify(self.notebook, self.user)
assert_equal(3, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
# History should not return history objects that don't have the given doc type
Document2.objects.create(name='Impala History', type='query-impala', data=self.notebook_json, owner=self.user, is_history=True)
# Verify that get_history API returns history objects for given type and current user
response = self.client.get(reverse('notebook:get_history'), {'doc_type': 'hive'})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal(3, len(data['history']), data)
assert_true(all(doc['type'] == 'query-hive' for doc in data['history']), data)
# TODO: test that query history for shared query only returns docs accessible by current user
def test_clear_history(self):
assert_equal(0, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
_historify(self.notebook, self.user)
_historify(self.notebook, self.user)
_historify(self.notebook, self.user)
assert_equal(3, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
# Clear history should not clear history objects that don't have the given doc type
Document2.objects.create(name='Impala History', type='query-impala', owner=self.user, is_history=True)
# clear history should retain original document but wipe history
response = self.client.post(reverse('notebook:clear_history'), {'notebook': self.notebook_json, 'doc_type': 'hive'})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_false(Document2.objects.filter(type='query-hive', is_history=True).exists())
assert_true(Document2.objects.filter(type='query-hive', is_history=False).exists())
assert_true(Document2.objects.filter(type='query-impala', is_history=True).exists())
def test_delete_notebook(self):
trash_notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "query-hive",
"id": null,
"snippets": [{"id": "e069ef32-5c95-4507-b961-e79c090b5abf","type":"hive","status":"ready","database":"default","statement":"select * from web_logs","statement_raw":"select * from web_logs","variables":[],"properties":{"settings":[],"files":[],"functions":[]},"result":{}}],
"uuid": "8a20da5f-b69c-4843-b17d-dea5c74c41d1"
}
"""
# Assert that the notebook is first saved
response = self.client.post(reverse('notebook:save_notebook'), {'notebook': trash_notebook_json})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
# Test that deleting it moves it to the user's Trash folder
notebook_doc = Document2.objects.get(id=data['id'])
trash_notebooks = [Notebook(notebook_doc).get_data()]
response = self.client.post(reverse('notebook:delete'), {'notebooks': json.dumps(trash_notebooks)})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('Trashed 1 notebook(s)', data['message'], data)
response = self.client.get('/desktop/api2/doc', {'path': '/.Trash'})
data = json.loads(response.content)
trash_uuids = [doc['uuid'] for doc in data['children']]
assert_true(notebook_doc.uuid in trash_uuids, data)
# Test that any errors are reported in the response
nonexistant_doc = {
"id": 12345,
"uuid": "ea22da5f-b69c-4843-b17d-dea5c74c41d1",
"selectedSnippet": "hive",
"showHistory": False,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": None,
}
],
"type": "query-hive",
"snippets": [{
"id": "e069ef32-5c95-4507-b961-e79c090b5abf",
"type": "hive",
"status": "ready",
"database": "default",
"statement": "select * from web_logs",
"statement_raw": "select * from web_logs",
"variables": [],
"properties": {"settings": [], "files": [], "functions": []},
"result": {}
}]
}
trash_notebooks = [nonexistant_doc]
response = self.client.post(reverse('notebook:delete'), {'notebooks': json.dumps(trash_notebooks)})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('Trashed 0 notebook(s) and failed to delete 1 notebook(s).', data['message'], data)
assert_equal(['ea22da5f-b69c-4843-b17d-dea5c74c41d1'], data['errors'])
def test_query_error_encoding(self):
@api_error_handler
def send_exception(message):
raise QueryError(message=message)
message = """SELECT
a.key,
a.*
FROM customers c, c.addresses a"""
response =send_exception(message)
data = json.loads(response.content)
assert_equal(1, data['status'])
message = """SELECT
\u2002\u2002a.key,
\u2002\u2002a.*
FROM customers c, c.addresses a"""
response =send_exception(message)
data = json.loads(response.content)
assert_equal(1, data['status'])
message = u"""SELECT
a.key,
a.*
FROM déclenché c, c.addresses a"""
response =send_exception(message)
data = json.loads(response.content)
assert_equal(1, data['status'])
class MockedApi(Api):
def export_data_as_hdfs_file(self, snippet, target_file, overwrite):
return {'destination': target_file}
class MockFs():
def __init__(self, logical_name=None):
self.fs_defaultfs = 'hdfs://curacao:8020'
self.logical_name = logical_name if logical_name else ''
self.DEFAULT_USER = 'test'
self.user = 'test'
self._filebrowser_action = ''
def setuser(self, user):
self.user = user
@property
def user(self):
return self.user
def do_as_user(self, username, fn, *args, **kwargs):
return ''
def exists(self, path):
return True
def isdir(self, path):
return path == '/user/hue'
def filebrowser_action(self):
return self._filebrowser_action
class TestNotebookApiMocked(object):
def setUp(self):
self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
self.client_not_me = make_logged_in_client(username="not_perm_user", groupname="default", recreate=True, is_superuser=False)
self.user = User.objects.get(username="test")
self.user_not_me = User.objects.get(username="not_perm_user")
# Beware: Monkey patch HS2API Mock API
if not hasattr(notebook.connectors.hiveserver2, 'original_HS2Api'): # Could not monkey patch base.get_api
notebook.connectors.hiveserver2.original_HS2Api = notebook.connectors.hiveserver2.HS2Api
notebook.connectors.hiveserver2.HS2Api = MockedApi
originalCluster.get_hdfs()
self.original_fs = originalCluster.FS_CACHE["default"]
originalCluster.FS_CACHE["default"] = MockFs()
grant_access("test", "default", "notebook")
grant_access("test", "default", "beeswax")
grant_access("not_perm_user", "default", "notebook")
grant_access("not_perm_user", "default", "beeswax")
add_permission('test', 'has_adls', permname='adls_access', appname='filebrowser')
def tearDown(self):
notebook.connectors.hiveserver2.HS2Api = notebook.connectors.hiveserver2.original_HS2Api
if originalCluster.FS_CACHE is None:
originalCluster.FS_CACHE = {}
originalCluster.FS_CACHE["default"] = self.original_fs
def test_export_result(self):
notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "query-hive",
"id": null,
"snippets": [{"id":"2b7d1f46-17a0-30af-efeb-33d4c29b1055","type":"hive","status":"running","statement":"select * from web_logs","properties":{"settings":[],"variables":[],"files":[],"functions":[]},"result":{"id":"b424befa-f4f5-8799-a0b4-79753f2552b1","type":"table","handle":{"log_context":null,"statements_count":1,"end":{"column":21,"row":0},"statement_id":0,"has_more_statements":false,"start":{"column":0,"row":0},"secret":"rVRWw7YPRGqPT7LZ/TeFaA==an","has_result_set":true,"statement":"select * from web_logs","operation_type":0,"modified_row_count":null,"guid":"7xm6+epkRx6dyvYvGNYePA==an"}},"lastExecuted": 1462554843817,"database":"default"}],
"uuid": "d9efdee1-ef25-4d43-b8f9-1a170f69a05a"
}
"""
response = self.client.post(reverse('notebook:export_result'), {
'notebook': notebook_json,
'snippet': json.dumps(json.loads(notebook_json)['snippets'][0]),
'format': json.dumps('hdfs-file'),
'destination': json.dumps('/user/hue'),
'overwrite': json.dumps(False)
})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('/user/hue/Test Hive Query.csv', data['watch_url']['destination'], data)
response = self.client.post(reverse('notebook:export_result'), {
'notebook': notebook_json,
'snippet': json.dumps(json.loads(notebook_json)['snippets'][0]),
'format': json.dumps('hdfs-file'),
'destination': json.dumps('/user/hue/path.csv'),
'overwrite': json.dumps(False)
})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('/user/hue/path.csv', data['watch_url']['destination'], data)
if is_adls_enabled():
response = self.client.post(reverse('notebook:export_result'), {
'notebook': notebook_json,
'snippet': json.dumps(json.loads(notebook_json)['snippets'][0]),
'format': json.dumps('hdfs-file'),
'destination': json.dumps('adl:/user/hue/path.csv'),
'overwrite': json.dumps(False)
})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('adl:/user/hue/path.csv', data['watch_url']['destination'], data)
def test_get_interpreters_to_show():
default_interpreters = OrderedDict((
('hive', {
'name': 'Hive', 'interface': 'hiveserver2', 'type': 'hive', 'is_sql': True, 'options': {}
}),
('spark', {
'name': 'Scala', 'interface': 'livy', 'type': 'spark', 'is_sql': False, 'options': {}
}),
('pig', {
'name': 'Pig', 'interface': 'pig', 'type': 'pig', 'is_sql': False, 'options': {}
}),
('java', {
'name': 'Java', 'interface': 'oozie', 'type': 'java', 'is_sql': False, 'options': {}
})
))
expected_interpreters = OrderedDict((
('java', {
'name': 'Java', 'interface': 'oozie', 'type': 'java', 'is_sql': False, 'options': {}
}),
('pig', {
'name': 'Pig', 'interface': 'pig', 'is_sql': False, 'type': 'pig', 'options': {}
}),
('hive', {
'name': 'Hive', 'interface': 'hiveserver2', 'is_sql': True, 'type': 'hive', 'options': {}
}),
('spark', {
'name': 'Scala', 'interface': 'livy', 'type': 'spark', 'is_sql': False, 'options': {}
})
))
try:
resets = [INTERPRETERS.set_for_testing(default_interpreters)]
interpreters_shown_on_wheel_unset = get_ordered_interpreters()
assert_equal(default_interpreters.values(), interpreters_shown_on_wheel_unset,
'get_interpreters_to_show should return the same as get_interpreters when '
'interpreters_shown_on_wheel is unset. expected: %s, actual: %s'
% (default_interpreters.values(), interpreters_shown_on_wheel_unset))
resets.append(INTERPRETERS_SHOWN_ON_WHEEL.set_for_testing('java,pig'))
assert_equal(expected_interpreters.values(), get_ordered_interpreters(),
'get_interpreters_to_show did not return interpreters in the correct order expected: %s, actual: %s'
% (expected_interpreters.values(), get_ordered_interpreters()))
finally:
for reset in resets:
reset()
| nilq/baby-python | python |
"""empty message
Revision ID: 3b84fe4459c9
Revises: 0149ad5e844c
Create Date: 2021-05-08 13:32:44.910084
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '3b84fe4459c9'
down_revision = '0149ad5e844c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('dataset', sa.Column('avg_sentence_count', sa.Float(), nullable=True))
op.drop_column('dataset', 'sentence_count')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('dataset', sa.Column('sentence_count', mysql.INTEGER(), autoincrement=False, nullable=True))
op.drop_column('dataset', 'avg_sentence_count')
# ### end Alembic commands ###
| nilq/baby-python | python |
from rest_framework.generics import RetrieveUpdateAPIView
from rest_framework.permissions import *
from rest_framework.request import Request
from rest_framework.schemas.openapi import AutoSchema
from user.permissons import IsAdminUserOrReadOnly
from .models import SiteConfiguration
from .serializers import SiteConfigurationSerializer, PublicSiteConfigurationSerializer
class SiteConfigurationView(RetrieveUpdateAPIView):
"""
全局的设置
publisher_email 出版商的电子邮箱
discount_rate 折扣率,0-1,其中0代表没有折扣,1代表白送
old_rate 折旧的折扣率,0-1,其中0代表没有折扣,1代表白送
recycle_rate 回收的折扣率,0-1其中0代表没有折扣,1代表白送
"""
permission_classes = (IsAdminUserOrReadOnly,)
serializer_class = SiteConfigurationSerializer
schema = AutoSchema(
tags=['config', 'admin']
)
def get_object(self):
return SiteConfiguration.get_solo()
def get_queryset(self):
return SiteConfiguration.get_solo()
| nilq/baby-python | python |
import copy
from django.test import override_settings
from rest_framework.test import APIClient
from users.models import User
from saef.models import DatasetSession
from saefportal.settings import MSG_ERROR_INVALID_INPUT, MSG_ERROR_REQUIRED_INPUT, MSG_ERROR_MISSING_OBJECT_INPUT, \
MSG_ERROR_EXISTING
from utils.test_utils import load_test_json, load_test_db
from django.test import TransactionTestCase
from analyzer.celery_conf import app
from celery.contrib.testing.worker import start_worker
test_data = load_test_json('restapi')
def setUpTestDatabase():
load_test_db('restapi', 'test_dataset_session')
class DatasetSessionStartTests(TransactionTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.celery_worker = start_worker(app)
cls.celery_worker.__enter__()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls.celery_worker.__exit__(None, None, None)
def setUp(self):
super().setUp()
setUpTestDatabase()
self.data = copy.deepcopy(test_data)
self.user = User.objects.create_user(**self.data['Credentials'])
self.client = APIClient()
self.client.force_authenticate(user=self.user)
def test_dataset_status_post_success(self):
dataset_sessions = DatasetSession.objects.filter().count()
self.assertEqual(dataset_sessions, 0)
response = self.client.post('http://localhost:8000/restapi/dataset_session/calculate/',
self.data['DatasetSessionCalculate'], format='json',
timeout=(5, 120))
self.assertEqual(response.status_code, 200)
dataset_sessions = DatasetSession.objects.filter().count()
self.assertEqual(dataset_sessions, 1)
def test_dataset_status_post_required(self):
self.data['DatasetSessionCalculate'].pop('job_execution_id')
response = self.client.post('http://localhost:8000/restapi/dataset_session/calculate/',
self.data['DatasetSessionCalculate'], format='json',
timeout=(5, 120))
self.assertEqual(response.status_code, 400)
self.assertEqual(response.data['error'], MSG_ERROR_REQUIRED_INPUT('job_execution_id and name'))
def test_dataset_status_post_invalid(self):
self.data['DatasetSessionCalculate']['job_execution_id'] = 'notvalid'
response = self.client.post('http://localhost:8000/restapi/dataset_session/calculate/',
self.data['DatasetSessionCalculate'], format='json',
timeout=(5, 120))
self.assertEqual(response.status_code, 400)
self.assertEqual(response.data['error'], MSG_ERROR_INVALID_INPUT('UUID'))
def test_dataset_status_post_missing_object(self):
self.data['DatasetSessionCalculate']['job_execution_id'] = '11a1a11a-a11a-1111-1a11-a1a1aaa11111'
response = self.client.post('http://localhost:8000/restapi/dataset_session/calculate/',
self.data['DatasetSessionCalculate'], format='json',
timeout=(5, 120))
self.assertEqual(response.status_code, 400)
self.assertEqual(response.data['error'], MSG_ERROR_MISSING_OBJECT_INPUT("job execution id or dataset name"))
| nilq/baby-python | python |
#!/bin/python3
import math
import os
import random
import re
import sys
if __name__ == '__main__':
n = int(input())
for i in range(1, 11):
m = n*i
print('{} x {} = {}'.format(n, i, m))
| nilq/baby-python | python |
def generate_explanation_text(concept_qid,defining_formula,
identifier_properties,identifier_values,):
#url = "https://www.wikidata.org/wiki/" + concept_qid
url = "www.wikidata.org/wiki/" + concept_qid
# insert values
symbol_value_unit = {}
try:
for idx in range(len(identifier_properties)):
symbol_value_unit[identifier_properties[idx][1]] = " " + str(identifier_values[idx]) + " " + identifier_properties[idx][2] + " "
values_inserted = ""
for character in defining_formula:
try:
character = character.replace(character,symbol_value_unit[character])
except:
pass
values_inserted += character
explanation_text = "Solution from " + url + " formula " + defining_formula + " with " + values_inserted + "."
except:
try:
explanation_text = "Solution from " + url + " formula " + defining_formula + " ."
except:
explanation_text = ""
return explanation_text | nilq/baby-python | python |
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
This is an bayes module.
It seems that it has to have THIS docstring with a summary line, a blank line
and sume more text like here. Wow.
"""
from loguru import logger
from system.other import TARANTOOL_CONN
import sys
import mailparser
from datetime import datetime
from smtp_an.general_func import BAYES_FROMAT_STRING
def startLearn() -> str:
start_time = datetime.now()
tmp_date = int(start_time.timestamp())
result = ""
if len(sys.argv) != 3:
logger.error("Error. Input spam/ham [file name]")
result = "Error. Input spam/ham [file name]"
return result
if sys.argv[1] != "spam" and sys.argv[1] != "ham":
result = "Error. Input spam/ham [file name]"
return result
file_byte_data = b""
try:
f = open(sys.argv[2], "rb")
file_byte_data = f.read()
f.close()
except Exception:
logger.error("Error. Can't open file {}".format(sys.argv[2]))
result = "Error. Can't open file {}".format(sys.argv[2])
return result
mail = mailparser.mailparser.parse_from_bytes(file_byte_data)
words = BAYES_FROMAT_STRING(mail.text_plain[0])
space_bayes, tarantool_space_status = TARANTOOL_CONN("bayes")
if tarantool_space_status is False:
logger.error("Erorr connection tarntool.")
result = "Erorr connection tarntool."
return result
for row_word in words:
tr_result = space_bayes.select(row_word)
if tr_result.rowcount > 0:
if sys.argv[1] == "spam":
tr_result = space_bayes.update(row_word, [('+', 1, 1), ('=', 3, tmp_date)])
elif sys.argv[1] == "ham":
tr_result = space_bayes.update(row_word, [('+', 2, 1), ('=', 3, tmp_date)])
else:
if sys.argv[1] == "spam":
tr_result = space_bayes.insert((row_word, 1, 0, tmp_date, tmp_date))
elif sys.argv[1] == "ham":
tr_result = space_bayes.insert((row_word, 0, 1, tmp_date, tmp_date))
logger.info(str(tr_result[0]))
result = "Learn comlited."
return result
if __name__ == "__main__":
print(startLearn())
| nilq/baby-python | python |
# 002 找出檔案內6 & 11 的公倍數
f = open("input.txt", mode='r')
for line in f.readlines():
num = int(line)
if(num % 6 == 0 and num % 11 == 0):
print(num)
f.close()
| nilq/baby-python | python |
#!/bin/python3
# answer to this question:
# https://www.hackerrank.com/challenges/python-time-delta/problem?h_r=next-challenge&h_v=zen&h_r=next-challenge&h_v=zen
from datetime import datetime as dtf #date time functionalities
# why import datetime from datetime?:
# datetime module has datetime class so datetime.datetime.*
# https://stackoverflow.com/questions/19480028/attributeerror-datetime-module-has-no-attribute-strptime#19480045
def time_delta(t1, t2):
# datetime docs:
# https://docs.python.org/3/library/datetime.html#datetime.datetime.strptime
# formatting:
# https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior
# Sun 10 May 2015 13:54:36 -0700
form = "%a %d %b %Y %H:%M:%S %z"
d1 = dtf.strptime(t1, form)
d2 = dtf.strptime(t2, form)
diff = abs(d1 - d2)
return int(diff.total_seconds())
t = int(input())
for t_itr in range(t):
t1 = input()
t2 = input()
delta = time_delta(t1, t2)
print(delta)#end of loop
# one = t1.split()
# two = t2.split()
# hourMinSec1 = one[4].split(sep = ":")
# hourMinSec2 = two[4].split(sep = ":")
# d1 = datetime.datetime(int(one[3]), one[2], int(one[1]), hour = int(hourMinSec1[0]), minute = int(hourMinSec1[1]), second = int(hourMinSec1[2]), tzinfo = one[5])
# d2 = datetime.datetime(two[3], two[2], two[1], hour = hourMinSec2[0], minute = hourMinSec2[1], second = hourMinSec2[2], tzinfo = two[5])
# def timeZoneDiff(t1s, t2s)
# print(t1s[5][0])
# print(t2s[5][0])
# if((t2s[5][0] == "-" and t1s[5][0] == "-") or (t2s[5][0] == "+" and t1s[5][0] == "+")): #signs match
# zone = int(t1s[5][1:]) - int(t2s[5][1:])
# else: #signs don't match -- wraps around 0
# zone = int(t1s[5][1:]) + int(t2s[5][1:])
# return zone;
# def yearConverter(year1, year2):
# diff = year1 - year2
# return diff * 365 * 24 * 60 * 60; #assumes neither has leap days -- must be accounted for elsewhere
# # Complete the time_delta function below.
# def time_delta(t1, t2):
# t1s = t1.split()
# t2s = t2.split()
# zone = timeZoneDiff(t1s, t2s)
# diff = abs(int(t1s[1]) - int(t2s[1])) * 24 * 60 * 60 #diff of days in seconds
# hourMinSec1 = t1s[4].split(sep=":")
# hourMinSec2 = t2s[4].split(sep=":")
# diff += int(hourMinSec1[0]) - int(hourMinSec2[0]) * 60 * 60
# diff += int(hourMinSec1[1]) - int(hourMinSec2[1]) * 60
# diff += int(hourMinSec1[2]) - int(hourMinSec2[2])
# # diff += monthConverter() #converts difference between months
# diff += yearConverter(int(t1s[3]), int(t2s[3]))
# diff += zone
# return str(diff)
# diff += abs(t1s[] - t2s[])
# print(t1.split()[5])
# Sun 10 May 2015 13:54:36 -0700
# scratch work:
# t1 = +0500
# t2 = +0300
# 0200 = abs (0500-0300)
# means t1 is 2 hours ahead of t2
# t1 = -0200
# t2 = +0300
# diff = abs(0200 + 0300)
# means they are 5 hours apart
# t1 = -0500
# t2 = -0300
# diff = abs(0500-0300)
# means they are 2 hours apart
# t1 = +0400
# t2 = -1200
# diff = abs(1200 + 0400)
# means they are 16 hours apart
| nilq/baby-python | python |
from asset_manager.data.schemas.developer import DeveloperMongo
from pymongo.database import Database
from asset_manager.data.repos.base import MongoRepository
class DeveloperRepository(MongoRepository[DeveloperMongo]):
def __init__(self, db: Database):
super().__init__(db, "developers", DeveloperMongo)
| nilq/baby-python | python |
#!/usr/bin/env python
#! coding:utf8
import sys
import numpy as np
# approximation valid for
# 0 degrees Celsius < T < 60 degrees Celcius
# 1% < RH < 100%
# 0 degrees Celcius < Td < 50 degrees Celcius
# constants
a = 17.271
b = 237.7 # in units of degrees Celcius
def dewpoint_approximation(T,RH):
"""
PURPOSE:
approximate the dewpoint given temperature and relative humidty
INPUT:
T: temperature
RH: relative humidity
"""
Td = (b * gamma(T,RH)) / (a - gamma(T,RH))
return Td
def gamma(T,RH):
"""
PURPOSE:
helper function used to calc. dewpoint
INPUT:
T: temperature
RH: relative humidity
"""
g = (a * T / (b + T)) + np.log(RH/100.0)
return g
if __name__ == '__main__':
# sys.argv[0] is program name
T=float(sys.argv[1])
RH=float(sys.argv[2])
Td = dewpoint_approximation(T,RH)
print('T, RH: '+str(T)+u'°C, '+str(RH)+'%')
print('Td: '+str(Td))
# USAGE: python dewpoint.py T RH
# e.g. python dewpoint.py 10 98 | nilq/baby-python | python |
from flask import render_template, url_for, flash, redirect, request
from todo_project import app, db, bcrypt
# Import the forms
from todo_project.forms import (LoginForm, RegistrationForm, UpdateUserInfoForm,
UpdateUserPassword, TaskForm, UpdateTaskForm)
# Import the Models
from todo_project.models import User, Task
# Import
from flask_login import login_required, current_user, login_user, logout_user
@app.errorhandler(404)
def error_404(error):
return (render_template('errors/404.html'), 404)
@app.errorhandler(403)
def error_403(error):
return (render_template('errors/403.html'), 403)
@app.errorhandler(500)
def error_500(error):
return (render_template('errors/500.html'), 500)
@app.route("/")
@app.route("/about")
def about():
return render_template('about.html', title='About')
@app.route("/login", methods=['POST', 'GET'])
def login():
if current_user.is_authenticated:
return redirect(url_for('all_tasks'))
form = LoginForm()
# After you submit the form
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
# Check if the user exists and the password is valid
if user and bcrypt.check_password_hash(user.password, form.password.data):
login_user(user)
task_form = TaskForm()
flash('Login Successfull', 'success')
return redirect(url_for('all_tasks'))
else:
flash('Login Unsuccessful. Please check Username Or Password', 'danger')
return render_template('login.html', title='Login', form=form)
@app.route("/logout")
def logout():
logout_user()
return redirect(url_for('login'))
@app.route("/register", methods=['POST', 'GET'])
def register():
if current_user.is_authenticated:
return redirect(url_for('all_tasks'))
form = RegistrationForm()
if form.validate_on_submit():
hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')
user = User(username=form.username.data, password=hashed_password)
db.session.add(user)
db.session.commit()
flash(f'Account Created For {form.username.data}', 'success')
return redirect(url_for('login'))
return render_template('register.html', title='Register', form=form)
@app.route("/all_tasks")
@login_required
def all_tasks():
tasks = User.query.filter_by(username=current_user.username).first().tasks
return render_template('all_tasks.html', title='All Tasks', tasks=tasks)
@app.route("/add_task", methods=['POST', 'GET'])
@login_required
def add_task():
form = TaskForm()
if form.validate_on_submit():
task = Task(content=form.task_name.data, author=current_user)
db.session.add(task)
db.session.commit()
flash('Task Created', 'success')
return redirect(url_for('add_task'))
return render_template('add_task.html', form=form, title='Add Task')
@app.route("/all_tasks/<int:task_id>/update_task", methods=['GET', 'POST'])
@login_required
def update_task(task_id):
task = Task.query.get_or_404(task_id)
form = UpdateTaskForm()
if form.validate_on_submit():
if form.task_name.data != task.content:
task.content = form.task_name.data
db.session.commit()
flash('Task Updated', 'success')
return redirect(url_for('all_tasks'))
else:
flash('No Changes Made', 'warning')
return redirect(url_for('all_tasks'))
elif request.method == 'GET':
form.task_name.data = task.content
return render_template('add_task.html', title='Update Task', form=form)
@app.route("/all_tasks/<int:task_id>/delete_task")
@login_required
def delete_task(task_id):
task = Task.query.get_or_404(task_id)
db.session.delete(task)
db.session.commit()
flash('Task Deleted', 'info')
return redirect(url_for('all_tasks'))
@app.route("/account", methods=['POST', 'GET'])
@login_required
def account():
form = UpdateUserInfoForm()
if form.validate_on_submit():
if form.username.data != current_user.username:
current_user.username = form.username.data
db.session.commit()
flash('Username Updated Successfully', 'success')
return redirect(url_for('account'))
elif request.method == 'GET':
form.username.data = current_user.username
return render_template('account.html', title='Account Settings', form=form)
@app.route("/account/change_password", methods=['POST', 'GET'])
@login_required
def change_password():
form = UpdateUserPassword()
if form.validate_on_submit():
if bcrypt.check_password_hash(current_user.password, form.old_password.data):
current_user.password = bcrypt.generate_password_hash(form.new_password.data).decode('utf-8')
db.session.commit()
flash('Password Changed Successfully', 'success')
redirect(url_for('account'))
else:
flash('Please Enter Correct Password', 'danger')
return render_template('change_password.html', title='Change Password', form=form)
| nilq/baby-python | python |
"""
Intent
-------
Define an interface for creating an object, but let subclasses decide which class to
instantiate. Factory Method lets a class defer instantiation to subclasses.
People often use Factory Method as the standard way to create objects; but it isn't
necessary if: the class that's instantiated never changes, or instantiation takes
place in an operation that subclasses can easily override (such as an initialization
operation).
The client is totally decoupled from the implementation details of derived classes.
Polymorphic creation is now possible.
The Factory Method defines an interface for creating objects, but lets subclasses
decide which classes to instantiate.
Summary:
--------
Define an interface for creating an object, but let subclasses decide which class to
instantiate. Factory Method lets a class defer instantiation to subclasses.
Consider making all constructors private or protect. Some Factory Method advocates recommend
that as a matter of language design (or failing that, as a matter of style) absolutely all
constructors should be private or protected. It's no one else's business whether a class
manufactures a new object or recycles an old one. The new operator considered harmful. There
is a difference between requesting an object and creating one. The new operator always creates
an object, and fails to encapsulate object creation. A Factory Method enforces that
encapsulation, and allows an object to be requested without inextricable coupling to the
act of creation.
The advantage of a Factory Method is that it can return the same instance multiple times, or can
return a subclass rather than an object of that exact type.
"""
import abc
# it is important to write metaclass here
class Creator(metaclass=abc.ABCMeta):
"""
Declare the factory method, which returns an object of type Product.
Creator may also define a default implementation of the factory
method that returns a default ConcreteProduct object.
Call the factory method to create a Product object.
"""
def __init__(self):
self.product = self._factory_method()
@abc.abstractmethod
def _factory_method(self):
pass
def client_uses_this_api_to_call_product_function(self):
return self.product.interface()
class ConcreteCreatorA(Creator):
"""
Override the factory method to return an instance of a
ConcreteProduct1.
"""
def _factory_method(self):
return ConcreteProductA()
class ConcreteCreatorB(Creator):
"""
Override the factory method to return an instance of a
ConcreteProduct2.
"""
def _factory_method(self):
return ConcreteProductB()
class Product(metaclass=abc.ABCMeta):
"""
Define the interface of objects the factory method creates.
"""
@abc.abstractmethod
def interface(self):
pass
def common_function(self):
return "Product: common function"
class ConcreteProductA(Product):
"""
Implement the Product interface.
"""
def interface(self):
return "Concrete Product A"
class ConcreteProductB(Product):
"""
Implement the Product interface.
"""
def interface(self):
return "Concrete Product B"
# This is how Client uses the context interface, he does not initiate the product but let
# application decide which subclass to initiate.
# Example 1
concrete_creator = ConcreteCreatorA()
assert "Concrete Product A" == concrete_creator.client_uses_this_api_to_call_product_function()
assert "Product: common function" == concrete_creator.product.common_function()
# Example 2
concrete_creator = ConcreteCreatorB()
assert "Concrete Product B" == concrete_creator.client_uses_this_api_to_call_product_function()
assert "Product: common function" == concrete_creator.product.common_function()
| nilq/baby-python | python |
#! python
# coding:utf-8
"""API の Undo/Redo 用プラグイン"""
import sys
import maya.api.OpenMaya as om
# コマンド名
kPluginCmdName = "nnSnapshotState"
def maya_useNewAPI():
"""プラグインが API2.0 ベースであることの明示"""
pass
class NnSnapshotState(om.MPxCommand):
"""コマンドクラス"""
def __init__(self):
om.MPxCommand.__init__(self)
self.targets = []
self.to_store_normals = False
self.to_store_positions = False
self.to_store_colors = False
self.to_store_smooths = False
def doIt(self, args):
"""実行時の処理"""
# 引数の解析
self.parseArguments(args)
# オブジェクトの状態を保存
for target in self.targets:
slist = om.MSelectionList()
slist.add(target)
dag = slist.getDagPath(0)
fn_mesh = om.MFnMesh(dag)
if self.to_store_smooths or self.to_store_normals:
all_edge_ids = range(fn_mesh.numEdges)
self.smooths = [fn_mesh.isEdgeSmooth(ei) for ei in all_edge_ids]
if self.to_store_normals:
self.normals = fn_mesh.getNormals()
if self.to_store_positions:
self.positions = fn_mesh.getPoints()
if self.to_store_colors:
self.colors = fn_mesh.getColors()
def parseArguments(self, args):
"""引数の解析"""
# 引数オブジェクト
argData = om.MArgParser(self.syntax(), args)
# スナップショット対象オブジェクトの名前
self.targets = []
num_targets = argData.numberOfFlagUses('-t')
for i in range(num_targets):
# flag_pos = argData.getFlagArgumentPosition('-t', i)
argsList = argData.getFlagArgumentList('-t', i)
self.targets.append(argsList.asString(0))
# スナップショットに含めるデータ
if argData.isFlagSet('-n'):
self.to_store_normals = argData.flagArgumentBool('-n', 0)
if argData.isFlagSet('-p'):
self.to_store_positions = argData.flagArgumentBool('-p', 0)
if argData.isFlagSet('-c'):
self.to_store_colors = argData.flagArgumentBool('-c', 0)
if argData.isFlagSet('-sm'):
self.to_store_smooths = argData.flagArgumentBool('-sm', 0)
def redoIt(self):
"""Redo時の処理"""
# オブジェクトの状態を復帰
for target in self.targets:
slist = om.MSelectionList()
slist.add(target)
dag = slist.getDagPath(0)
fn_mesh = om.MFnMesh(dag)
if self.to_store_smooths or self.to_store_normals:
all_edge_ids = range(fn_mesh.numEdges)
fn_mesh.setEdgeSmoothings(all_edge_ids, self.smooths)
if self.to_store_normals:
fn_mesh.setNormals(self.normals)
if self.to_store_positions:
fn_mesh.setPoints(self.positions)
if self.to_store_colors:
fn_mesh.setColors(self.colors)
fn_mesh.updateSurface()
def undoIt(self):
"""Undo時の処理"""
# オブジェクトの状態を復帰
for target in self.targets:
slist = om.MSelectionList()
slist.add(target)
dag = slist.getDagPath(0)
fn_mesh = om.MFnMesh(dag)
if self.to_store_smooths or self.to_store_normals:
all_edge_ids = range(fn_mesh.numEdges)
fn_mesh.setEdgeSmoothings(all_edge_ids, self.smooths)
if self.to_store_normals:
fn_mesh.setNormals(self.normals)
if self.to_store_positions:
fn_mesh.setPoints(self.positions)
if self.to_store_colors:
fn_mesh.setColors(self.colors)
fn_mesh.updateSurface()
def isUndoable(self):
"""Undo可能ならTrueを返す"""
return True
def cmdCreator():
"""コマンドのクラスを返す"""
return NnSnapshotState()
def syntaxCreator():
"""引数の構成を設定したシンタックスオブジェクトを返す"""
# TODO: 実装
# シンタックスオブジェクト
syntax = om.MSyntax()
# 対象オブジェクト
syntax.addFlag('-t', '-targets', om.MSyntax.kString)
syntax.makeFlagMultiUse('-t')
# ブール
syntax.addFlag('-n', '-normal', om.MSyntax.kBoolean)
syntax.addFlag('-p', '-position', om.MSyntax.kBoolean)
syntax.addFlag('-c', '-color', om.MSyntax.kBoolean)
syntax.addFlag('-sm', '-smooth', om.MSyntax.kBoolean)
return syntax
def initializePlugin(mobject):
"""プラグインを有効にした際の処理"""
# プラグインオブジェクト
mplugin = om.MFnPlugin(mobject)
# 登録
try:
mplugin.registerCommand(kPluginCmdName, cmdCreator, syntaxCreator)
except:
sys.stderr.write('Failed to register command: ' + kPluginCmdName)
def uninitializePlugin(mobject):
"""プラグインを無効にした際の処理"""
# プラグインオブジェクト
mplugin = om.MFnPlugin(mobject)
# 削除
try:
mplugin.deregisterCommand(kPluginCmdName)
except:
sys.stderr.write('Failed to unregister command: ' + kPluginCmdName)
| nilq/baby-python | python |
import numpy as np
from sklearn import datasets
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score
from sklearn.linear_model import LogisticRegression
from sklearn.linear_model import Perceptron
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsClassifier
# from sklearn.tree import export_graphviz
from matplotlib.colors import ListedColormap
import matplotlib.pyplot as plt
import warnings
from plot_decision_regions import *
# for sklearn 0.18's alternative syntax
from distutils.version import LooseVersion as Version
from sklearn import __version__ as sklearn_version
if Version(sklearn_version) < '0.18':
from sklearn.grid_search import train_test_split
else:
from sklearn.model_selection import train_test_split
#############################################################################
iris = datasets.load_iris()
X = iris.data[:, [2, 3]]
y = iris.target
print('Class labels:', np.unique(y))
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.3, random_state=0)
sc = StandardScaler()
sc.fit(X_train)
X_train_std = sc.transform(X_train)
X_test_std = sc.transform(X_test)
X_combined_std = np.vstack((X_train_std, X_test_std))
y_combined = np.hstack((y_train, y_test))
#############################################################################
print(50 * '=')
print('Section: Training a logistic regression model with scikit-learn')
print(50 * '-')
lr = LogisticRegression(C=1000.0, random_state=0)
#lr = LogisticRegression(C=0.01, random_state=0)
lr.fit(X_train_std, y_train)
plot_decision_regions(X_combined_std, y_combined,
classifier=lr, test_idx=range(105, 150))
plt.xlabel('petal length [standardized]')
plt.ylabel('petal width [standardized]')
plt.legend(loc='upper left')
# plt.tight_layout()
# plt.savefig('./figures/logistic_regression.png', dpi=300)
plt.show()
print('Predicted probabilities', lr.predict_proba(X_test_std[0, :]
.reshape(1, -1)))
# seki: confirms that probability is equal to sigmoid(w^t x)
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
print('coef_', lr.coef_)
print('intercept_', lr.intercept_)
print('X_test_std[0]', X_test_std[0, :])
# [[-7.34015187 -6.64685581]
# [ 2.54373335 -2.3421979 ]
# [ 9.46617627 6.44380858]]
# [-9.31757401 -0.89462847 -8.85765974]
# [ 0.70793846 1.50872803]
w = np.c_[lr.intercept_.reshape(3,1), lr.coef_]
print('W', w)
x = np.r_[np.array([1]), X_test_std[0, :]]
print('x', x)
print('W^t x', np.dot(w, x))
print('sigmoid(W^t, x)', sigmoid(np.dot(w, x)))
| nilq/baby-python | python |
# Written by Bram Cohen
# see LICENSE.txt for license information
#
# $Id: btformats.py 68 2006-04-26 20:14:35Z sgrayban $
#
from types import StringType, LongType, IntType, ListType, DictType
from re import compile
reg = compile(r'^[^/\\.~][^/\\]*$')
ints = (LongType, IntType)
def check_info(info):
if type(info) != DictType:
raise ValueError, 'bad metainfo - not a dictionary'
pieces = info.get('pieces')
if type(pieces) != StringType or len(pieces) % 20 != 0:
raise ValueError, 'bad metainfo - bad pieces key'
piecelength = info.get('piece length')
if type(piecelength) not in ints or piecelength <= 0:
raise ValueError, 'bad metainfo - illegal piece length'
name = info.get('name')
if type(name) != StringType:
raise ValueError, 'bad metainfo - bad name'
if not reg.match(name):
raise ValueError, 'name %s disallowed for security reasons' % name
if info.has_key('files') == info.has_key('length'):
raise ValueError, 'single/multiple file mix'
if info.has_key('length'):
length = info.get('length')
if type(length) not in ints or length < 0:
raise ValueError, 'bad metainfo - bad length'
else:
files = info.get('files')
if type(files) != ListType:
raise ValueError
for f in files:
if type(f) != DictType:
raise ValueError, 'bad metainfo - bad file value'
length = f.get('length')
if type(length) not in ints or length < 0:
raise ValueError, 'bad metainfo - bad length'
path = f.get('path')
if type(path) != ListType or path == []:
raise ValueError, 'bad metainfo - bad path'
for p in path:
if type(p) != StringType:
raise ValueError, 'bad metainfo - bad path dir'
if not reg.match(p):
raise ValueError, 'path %s disallowed for security reasons' % p
for i in xrange(len(files)):
for j in xrange(i):
if files[i]['path'] == files[j]['path']:
raise ValueError, 'bad metainfo - duplicate path'
def check_message(message):
if type(message) != DictType:
raise ValueError
check_info(message.get('info'))
if type(message.get('announce')) != StringType:
raise ValueError
def check_peers(message):
if type(message) != DictType:
raise ValueError
if message.has_key('failure reason'):
if type(message['failure reason']) != StringType:
raise ValueError
return
peers = message.get('peers')
if type(peers) == ListType:
for p in peers:
if type(p) != DictType:
raise ValueError
if type(p.get('ip')) != StringType:
raise ValueError
port = p.get('port')
if type(port) not in ints or p <= 0:
raise ValueError
if p.has_key('peer id'):
id = p.get('peer id')
if type(id) != StringType or len(id) != 20:
raise ValueError
elif type(peers) != StringType or len(peers) % 6 != 0:
raise ValueError
interval = message.get('interval', 1)
if type(interval) not in ints or interval <= 0:
raise ValueError
minint = message.get('min interval', 1)
if type(minint) not in ints or minint <= 0:
raise ValueError
if type(message.get('tracker id', '')) != StringType:
raise ValueError
npeers = message.get('num peers', 0)
if type(npeers) not in ints or npeers < 0:
raise ValueError
dpeers = message.get('done peers', 0)
if type(dpeers) not in ints or dpeers < 0:
raise ValueError
last = message.get('last', 0)
if type(last) not in ints or last < 0:
raise ValueError
| nilq/baby-python | python |
from .net import BayesNet
from .vertex.base import Vertex
from typing import Any, Mapping
class Model:
def __init__(self, vertices: Mapping[str, Vertex] = {}) -> None:
self.__dict__["_vertices"] = {}
self.__dict__["_vertices"].update(vertices)
def to_bayes_net(self) -> BayesNet:
return BayesNet((filter(lambda vertex: isinstance(vertex, Vertex), self._vertices.values())))
def __setattr__(self, k: str, v: Any) -> None:
if k in self.__dict__:
super(Model, self).__setattr__(k, v)
else:
self._vertices[k] = v
if isinstance(v, Vertex):
v.set_label(k)
def __getattr__(self, k: str) -> Any:
if k in self.__dict__:
return self.__dict__[k]
return self._vertices[k]
def __enter__(self, *args: Any, **kwargs: Any) -> 'Model':
return self
def __exit__(self, *args: Any, **kwargs: Any) -> None:
pass
| nilq/baby-python | python |
import h5py
import numpy as np
from versioned_hdf5.replay import (modify_metadata, delete_version,
delete_versions, _recreate_raw_data,
_recreate_hashtable,
_recreate_virtual_dataset)
from versioned_hdf5.hashtable import Hashtable
def setup_vfile(file):
with file.stage_version('version1') as g:
data = g.create_dataset('test_data', data=None, fillvalue=1., shape=(10000,), chunks=(1000,))
data[0] = 0.
g.create_dataset('test_data2', data=[1, 2, 3], chunks=(1000,))
group = g.create_group('group')
group.create_dataset('test_data4', data=[1, 2, 3, 4], chunks=(1000,))
with file.stage_version('version2') as g:
g['test_data'][2000] = 2.
g.create_dataset('test_data3', data=[1, 2, 3, 4], chunks=(1000,))
g['group']['test_data4'][0] = 5
def check_data(file, test_data_fillvalue=1., version2=True, test_data4_fillvalue=0):
assert set(file['version1']) == {'test_data', 'test_data2', 'group'}
assert file['version1']['test_data'].shape == (10000,)
assert file['version1']['test_data'][0] == 0.
assert np.all(file['version1']['test_data'][1:] == test_data_fillvalue)
if version2:
assert set(file['version2']) == {'test_data', 'test_data2',
'test_data3', 'group'}
assert file['version2']['test_data'].shape == (10000,)
assert file['version2']['test_data'][0] == 0.
assert np.all(file['version2']['test_data'][1:2000] == test_data_fillvalue)
assert file['version2']['test_data'][2000] == 2.
assert np.all(file['version2']['test_data'][2001:] == test_data_fillvalue)
assert file['version1']['test_data2'].shape == (3,)
assert np.all(file['version1']['test_data2'][:] == [1, 2, 3])
if version2:
assert file['version2']['test_data2'].shape == (3,)
assert np.all(file['version2']['test_data2'][:] == [1, 2, 3])
assert 'test_data3' not in file['version1']
if version2:
assert file['version2']['test_data3'].shape == (4,)
assert np.all(file['version2']['test_data3'][:] == [1, 2, 3, 4])
assert set(file['version1']['group']) == {'test_data4'}
assert file['version1']['group']['test_data4'].shape == (4,)
np.testing.assert_equal(file['version1']['group']['test_data4'][:4],
[1, 2, 3, 4])
assert np.all(file['version1']['group']['test_data4'][4:] == test_data4_fillvalue)
if version2:
assert set(file['version2']['group']) == {'test_data4'}
assert file['version2']['group']['test_data4'].shape == (4,)
np.testing.assert_equal(file['version2']['group']['test_data4'][:4],
[5, 2, 3, 4])
assert np.all(file['version2']['group']['test_data4'][4:] == test_data4_fillvalue)
def test_modify_metadata_compression(vfile):
setup_vfile(vfile)
f = vfile.f
assert vfile['version1']['test_data'].compression == None
assert vfile['version2']['test_data'].compression == None
assert vfile['version1']['test_data'].compression_opts == None
assert vfile['version2']['test_data'].compression_opts == None
assert vfile['version1']['test_data2'].compression == None
assert vfile['version2']['test_data2'].compression == None
assert vfile['version1']['test_data2'].compression_opts == None
assert vfile['version2']['test_data2'].compression_opts == None
assert vfile['version2']['test_data3'].compression == None
assert vfile['version2']['test_data3'].compression_opts == None
assert vfile['version1']['group']['test_data4'].compression == None
assert vfile['version2']['group']['test_data4'].compression == None
assert vfile['version1']['group']['test_data4'].compression_opts == None
assert vfile['version2']['group']['test_data4'].compression_opts == None
assert f['_version_data']['test_data']['raw_data'].compression == None
assert f['_version_data']['test_data2']['raw_data'].compression == None
assert f['_version_data']['test_data3']['raw_data'].compression == None
assert f['_version_data']['group']['test_data4']['raw_data'].compression == None
assert f['_version_data']['test_data']['raw_data'].compression_opts == None
assert f['_version_data']['test_data2']['raw_data'].compression_opts == None
assert f['_version_data']['test_data3']['raw_data'].compression_opts == None
assert f['_version_data']['group']['test_data4']['raw_data'].compression_opts == None
modify_metadata(f, 'test_data2', compression='gzip', compression_opts=3)
check_data(vfile)
assert vfile['version1']['test_data'].compression == None
assert vfile['version2']['test_data'].compression == None
assert vfile['version1']['test_data'].compression_opts == None
assert vfile['version2']['test_data'].compression_opts == None
assert vfile['version1']['test_data2'].compression == 'gzip'
assert vfile['version2']['test_data2'].compression == 'gzip'
assert vfile['version1']['test_data2'].compression_opts == 3
assert vfile['version2']['test_data2'].compression_opts == 3
assert vfile['version2']['test_data3'].compression == None
assert vfile['version2']['test_data3'].compression_opts == None
assert vfile['version1']['group']['test_data4'].compression == None
assert vfile['version2']['group']['test_data4'].compression == None
assert vfile['version1']['group']['test_data4'].compression_opts == None
assert vfile['version2']['group']['test_data4'].compression_opts == None
assert f['_version_data']['test_data']['raw_data'].compression == None
assert f['_version_data']['test_data2']['raw_data'].compression == 'gzip'
assert f['_version_data']['test_data3']['raw_data'].compression == None
assert f['_version_data']['group']['test_data4']['raw_data'].compression == None
assert f['_version_data']['test_data']['raw_data'].compression_opts == None
assert f['_version_data']['test_data2']['raw_data'].compression_opts == 3
assert f['_version_data']['test_data3']['raw_data'].compression_opts == None
assert f['_version_data']['group']['test_data4']['raw_data'].compression_opts == None
# Make sure the tmp group group has been destroyed.
assert set(f['_version_data']) == {'test_data', 'test_data2',
'test_data3', 'group', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
def test_modify_metadata_compressio2(vfile):
setup_vfile(vfile)
f = vfile.f
assert vfile['version1']['test_data'].compression == None
assert vfile['version2']['test_data'].compression == None
assert vfile['version1']['test_data'].compression_opts == None
assert vfile['version2']['test_data'].compression_opts == None
assert vfile['version1']['test_data2'].compression == None
assert vfile['version2']['test_data2'].compression == None
assert vfile['version1']['test_data2'].compression_opts == None
assert vfile['version2']['test_data2'].compression_opts == None
assert vfile['version2']['test_data3'].compression == None
assert vfile['version2']['test_data3'].compression_opts == None
assert vfile['version1']['group']['test_data4'].compression == None
assert vfile['version2']['group']['test_data4'].compression == None
assert vfile['version1']['group']['test_data4'].compression_opts == None
assert vfile['version2']['group']['test_data4'].compression_opts == None
assert f['_version_data']['test_data']['raw_data'].compression == None
assert f['_version_data']['test_data2']['raw_data'].compression == None
assert f['_version_data']['test_data3']['raw_data'].compression == None
assert f['_version_data']['group']['test_data4']['raw_data'].compression == None
assert f['_version_data']['test_data']['raw_data'].compression_opts == None
assert f['_version_data']['test_data2']['raw_data'].compression_opts == None
assert f['_version_data']['test_data3']['raw_data'].compression_opts == None
assert f['_version_data']['group']['test_data4']['raw_data'].compression_opts == None
modify_metadata(f, 'group/test_data4', compression='gzip', compression_opts=3)
check_data(vfile)
assert vfile['version1']['test_data'].compression == None
assert vfile['version2']['test_data'].compression == None
assert vfile['version1']['test_data'].compression_opts == None
assert vfile['version2']['test_data'].compression_opts == None
assert vfile['version1']['test_data2'].compression == None
assert vfile['version2']['test_data2'].compression == None
assert vfile['version1']['test_data2'].compression_opts == None
assert vfile['version2']['test_data2'].compression_opts == None
assert vfile['version2']['test_data3'].compression == None
assert vfile['version2']['test_data3'].compression_opts == None
assert vfile['version1']['group']['test_data4'].compression == 'gzip'
assert vfile['version2']['group']['test_data4'].compression == 'gzip'
assert vfile['version1']['group']['test_data4'].compression_opts == 3
assert vfile['version2']['group']['test_data4'].compression_opts == 3
assert f['_version_data']['test_data']['raw_data'].compression == None
assert f['_version_data']['test_data2']['raw_data'].compression == None
assert f['_version_data']['test_data3']['raw_data'].compression == None
assert f['_version_data']['group']['test_data4']['raw_data'].compression == 'gzip'
assert f['_version_data']['test_data']['raw_data'].compression_opts == None
assert f['_version_data']['test_data2']['raw_data'].compression_opts == None
assert f['_version_data']['test_data3']['raw_data'].compression_opts == None
assert f['_version_data']['group']['test_data4']['raw_data'].compression_opts == 3
# Make sure the tmp group group has been destroyed.
assert set(f['_version_data']) == {'test_data', 'test_data2',
'test_data3', 'group', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
def test_modify_metadata_chunks(vfile):
setup_vfile(vfile)
f = vfile.f
assert vfile['version1']['test_data'].chunks == (1000,)
assert vfile['version2']['test_data'].chunks == (1000,)
assert vfile['version1']['test_data2'].chunks == (1000,)
assert vfile['version2']['test_data2'].chunks == (1000,)
assert vfile['version2']['test_data3'].chunks == (1000,)
assert vfile['version1']['group']['test_data4'].chunks == (1000,)
assert vfile['version2']['group']['test_data4'].chunks == (1000,)
assert f['_version_data']['test_data']['raw_data'].chunks == (1000,)
assert f['_version_data']['test_data2']['raw_data'].chunks == (1000,)
assert f['_version_data']['test_data3']['raw_data'].chunks == (1000,)
assert f['_version_data']['group']['test_data4']['raw_data'].chunks == (1000,)
modify_metadata(f, 'test_data2', chunks=(500,))
check_data(vfile)
assert vfile['version1']['test_data'].chunks == (1000,)
assert vfile['version2']['test_data'].chunks == (1000,)
assert vfile['version1']['test_data2'].chunks == (500,)
assert vfile['version2']['test_data2'].chunks == (500,)
assert vfile['version2']['test_data3'].chunks == (1000,)
assert vfile['version1']['group']['test_data4'].chunks == (1000,)
assert vfile['version2']['group']['test_data4'].chunks == (1000,)
assert f['_version_data']['test_data']['raw_data'].chunks == (1000,)
assert f['_version_data']['test_data2']['raw_data'].chunks == (500,)
assert f['_version_data']['test_data3']['raw_data'].chunks == (1000,)
assert f['_version_data']['group']['test_data4']['raw_data'].chunks == (1000,)
# Make sure the tmp group group has been destroyed.
assert set(f['_version_data']) == {'test_data', 'test_data2',
'test_data3', 'group', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
def test_modify_metadata_chunk2(vfile):
setup_vfile(vfile)
f = vfile.f
assert vfile['version1']['test_data'].chunks == (1000,)
assert vfile['version2']['test_data'].chunks == (1000,)
assert vfile['version1']['test_data2'].chunks == (1000,)
assert vfile['version2']['test_data2'].chunks == (1000,)
assert vfile['version2']['test_data3'].chunks == (1000,)
assert vfile['version1']['group']['test_data4'].chunks == (1000,)
assert vfile['version2']['group']['test_data4'].chunks == (1000,)
assert f['_version_data']['test_data']['raw_data'].chunks == (1000,)
assert f['_version_data']['test_data2']['raw_data'].chunks == (1000,)
assert f['_version_data']['test_data3']['raw_data'].chunks == (1000,)
assert f['_version_data']['group']['test_data4']['raw_data'].chunks == (1000,)
modify_metadata(f, 'group/test_data4', chunks=(500,))
check_data(vfile)
assert vfile['version1']['test_data'].chunks == (1000,)
assert vfile['version2']['test_data'].chunks == (1000,)
assert vfile['version1']['test_data2'].chunks == (1000,)
assert vfile['version2']['test_data2'].chunks == (1000,)
assert vfile['version2']['test_data3'].chunks == (1000,)
assert vfile['version1']['group']['test_data4'].chunks == (500,)
assert vfile['version2']['group']['test_data4'].chunks == (500,)
assert f['_version_data']['test_data']['raw_data'].chunks == (1000,)
assert f['_version_data']['test_data2']['raw_data'].chunks == (1000,)
assert f['_version_data']['test_data3']['raw_data'].chunks == (1000,)
assert f['_version_data']['group']['test_data4']['raw_data'].chunks == (500,)
# Make sure the tmp group group has been destroyed.
assert set(f['_version_data']) == {'test_data', 'test_data2',
'test_data3', 'group', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
def test_modify_metadata_dtype(vfile):
setup_vfile(vfile)
f = vfile.f
assert vfile['version1']['test_data'].dtype == np.float64
assert vfile['version2']['test_data'].dtype == np.float64
assert vfile['version1']['test_data2'].dtype == np.int64
assert vfile['version2']['test_data2'].dtype == np.int64
assert vfile['version2']['test_data3'].dtype == np.int64
assert vfile['version1']['group']['test_data4'].dtype == np.int64
assert vfile['version2']['group']['test_data4'].dtype == np.int64
assert f['_version_data']['test_data']['raw_data'].dtype == np.float64
assert f['_version_data']['test_data2']['raw_data'].dtype == np.int64
assert f['_version_data']['test_data3']['raw_data'].dtype == np.int64
assert f['_version_data']['group']['test_data4']['raw_data'].dtype == np.int64
modify_metadata(f, 'test_data2', dtype=np.float64)
check_data(vfile)
assert vfile['version1']['test_data'].dtype == np.float64
assert vfile['version2']['test_data'].dtype == np.float64
assert vfile['version1']['test_data2'].dtype == np.float64
assert vfile['version2']['test_data2'].dtype == np.float64
assert vfile['version2']['test_data3'].dtype == np.int64
assert vfile['version1']['group']['test_data4'].dtype == np.int64
assert vfile['version2']['group']['test_data4'].dtype == np.int64
assert f['_version_data']['test_data']['raw_data'].dtype == np.float64
assert f['_version_data']['test_data2']['raw_data'].dtype == np.float64
assert f['_version_data']['test_data3']['raw_data'].dtype == np.int64
assert f['_version_data']['group']['test_data4']['raw_data'].dtype == np.int64
# Make sure the tmp group group has been destroyed.
assert set(f['_version_data']) == {'test_data', 'test_data2',
'test_data3', 'group', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
def test_modify_metadata_dtype2(vfile):
setup_vfile(vfile)
f = vfile.f
assert vfile['version1']['test_data'].dtype == np.float64
assert vfile['version2']['test_data'].dtype == np.float64
assert vfile['version1']['test_data2'].dtype == np.int64
assert vfile['version2']['test_data2'].dtype == np.int64
assert vfile['version2']['test_data3'].dtype == np.int64
assert vfile['version1']['group']['test_data4'].dtype == np.int64
assert vfile['version2']['group']['test_data4'].dtype == np.int64
assert f['_version_data']['test_data']['raw_data'].dtype == np.float64
assert f['_version_data']['test_data2']['raw_data'].dtype == np.int64
assert f['_version_data']['test_data3']['raw_data'].dtype == np.int64
assert f['_version_data']['group']['test_data4']['raw_data'].dtype == np.int64
modify_metadata(f, 'group/test_data4', dtype=np.float64)
check_data(vfile)
assert vfile['version1']['test_data'].dtype == np.float64
assert vfile['version2']['test_data'].dtype == np.float64
assert vfile['version1']['test_data2'].dtype == np.int64
assert vfile['version2']['test_data2'].dtype == np.int64
assert vfile['version2']['test_data3'].dtype == np.int64
assert vfile['version1']['group']['test_data4'].dtype == np.float64
assert vfile['version2']['group']['test_data4'].dtype == np.float64
assert f['_version_data']['test_data']['raw_data'].dtype == np.float64
assert f['_version_data']['test_data2']['raw_data'].dtype == np.int64
assert f['_version_data']['test_data3']['raw_data'].dtype == np.int64
assert f['_version_data']['group']['test_data4']['raw_data'].dtype == np.float64
# Make sure the tmp group group has been destroyed.
assert set(f['_version_data']) == {'test_data', 'test_data2',
'test_data3', 'group', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
def test_modify_metadata_fillvalue1(vfile):
setup_vfile(vfile)
f = vfile.f
assert vfile['version1']['test_data'].fillvalue == 1.
assert vfile['version2']['test_data'].fillvalue == 1.
assert vfile['version1']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data3'].fillvalue == 0
assert vfile['version1']['group']['test_data4'].fillvalue == 0
assert vfile['version2']['group']['test_data4'].fillvalue == 0
assert f['_version_data']['test_data']['raw_data'].fillvalue == 1.
assert f['_version_data']['test_data2']['raw_data'].fillvalue == 0
assert f['_version_data']['test_data3']['raw_data'].fillvalue == 0
assert f['_version_data']['group']['test_data4']['raw_data'].fillvalue == 0
modify_metadata(f, 'test_data', fillvalue=3.)
check_data(vfile, test_data_fillvalue=3.)
assert vfile['version1']['test_data'].fillvalue == 3.
assert vfile['version2']['test_data'].fillvalue == 3.
assert vfile['version1']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data3'].fillvalue == 0
assert vfile['version1']['group']['test_data4'].fillvalue == 0
assert vfile['version2']['group']['test_data4'].fillvalue == 0
assert f['_version_data']['test_data']['raw_data'].fillvalue == 3.
assert f['_version_data']['test_data2']['raw_data'].fillvalue == 0
assert f['_version_data']['test_data3']['raw_data'].fillvalue == 0
assert f['_version_data']['group']['test_data4']['raw_data'].fillvalue == 0
# Make sure the tmp group group has been destroyed.
assert set(f['_version_data']) == {'test_data', 'test_data2',
'test_data3', 'group', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
def test_modify_metadata_fillvalue2(vfile):
setup_vfile(vfile)
f = vfile.f
assert vfile['version1']['test_data'].fillvalue == 1.
assert vfile['version2']['test_data'].fillvalue == 1.
assert vfile['version1']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data3'].fillvalue == 0
assert vfile['version1']['group']['test_data4'].fillvalue == 0
assert vfile['version2']['group']['test_data4'].fillvalue == 0
assert f['_version_data']['test_data']['raw_data'].fillvalue == 1.
assert f['_version_data']['test_data2']['raw_data'].fillvalue == 0
assert f['_version_data']['test_data3']['raw_data'].fillvalue == 0
assert f['_version_data']['group']['test_data4']['raw_data'].fillvalue == 0
modify_metadata(f, 'test_data2', fillvalue=3)
check_data(vfile)
assert vfile['version1']['test_data'].fillvalue == 1.
assert vfile['version2']['test_data'].fillvalue == 1.
assert vfile['version1']['test_data2'].fillvalue == 3
assert vfile['version2']['test_data2'].fillvalue == 3
assert vfile['version2']['test_data3'].fillvalue == 0
assert vfile['version1']['group']['test_data4'].fillvalue == 0
assert vfile['version2']['group']['test_data4'].fillvalue == 0
assert f['_version_data']['test_data']['raw_data'].fillvalue == 1.
assert f['_version_data']['test_data2']['raw_data'].fillvalue == 3
assert f['_version_data']['test_data3']['raw_data'].fillvalue == 0
assert f['_version_data']['group']['test_data4']['raw_data'].fillvalue == 0
# Make sure the tmp group group has been destroyed.
assert set(f['_version_data']) == {'test_data', 'test_data2',
'test_data3', 'group', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
def test_modify_metadata_fillvalue3(vfile):
setup_vfile(vfile)
f = vfile.f
assert vfile['version1']['test_data'].fillvalue == 1.
assert vfile['version2']['test_data'].fillvalue == 1.
assert vfile['version1']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data3'].fillvalue == 0
assert vfile['version1']['group']['test_data4'].fillvalue == 0
assert vfile['version2']['group']['test_data4'].fillvalue == 0
assert f['_version_data']['test_data']['raw_data'].fillvalue == 1.
assert f['_version_data']['test_data2']['raw_data'].fillvalue == 0
assert f['_version_data']['test_data3']['raw_data'].fillvalue == 0
assert f['_version_data']['group']['test_data4']['raw_data'].fillvalue == 0
modify_metadata(f, 'group/test_data4', fillvalue=2)
check_data(vfile)
assert vfile['version1']['test_data'].fillvalue == 1.
assert vfile['version2']['test_data'].fillvalue == 1.
assert vfile['version1']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data2'].fillvalue == 0
assert vfile['version2']['test_data3'].fillvalue == 0
assert vfile['version1']['group']['test_data4'].fillvalue == 2
assert vfile['version2']['group']['test_data4'].fillvalue == 2
assert f['_version_data']['test_data']['raw_data'].fillvalue == 1.
assert f['_version_data']['test_data2']['raw_data'].fillvalue == 0
assert f['_version_data']['test_data3']['raw_data'].fillvalue == 0
assert f['_version_data']['group']['test_data4']['raw_data'].fillvalue == 2
# Make sure the tmp group group has been destroyed.
assert set(f['_version_data']) == {'test_data', 'test_data2',
'test_data3', 'group', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
def test_delete_version(vfile):
setup_vfile(vfile)
f = vfile.f
delete_version(f, 'version2')
check_data(vfile, version2=False)
assert list(vfile) == ['version1']
assert set(f['_version_data']) == {'group', 'test_data', 'test_data2', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
assert not np.isin(2., f['_version_data']['test_data']['raw_data'][:])
assert not np.isin(5, f['_version_data']['group']['test_data4']['raw_data'][:])
def test_delete_versions(vfile):
setup_vfile(vfile)
with vfile.stage_version('version3') as g:
g['test_data'][2000] = 3.
g.create_dataset('test_data4', data=[1, 2, 3, 4], chunks=(1000,))
f = vfile.f
delete_versions(f, ['version2', 'version3'])
check_data(vfile, version2=False)
assert list(vfile) == ['version1']
assert set(f['_version_data']) == {'group', 'test_data', 'test_data2', 'versions'}
assert set(f['_version_data']['group']) == {'test_data4'}
assert not np.isin(2., f['_version_data']['test_data']['raw_data'][:])
assert not np.isin(5, f['_version_data']['group']['test_data4']['raw_data'][:])
def test_delete_versions_no_data(vfile):
with vfile.stage_version('version1') as g:
g.create_dataset('data', maxshape=(None, None), chunks=(20, 20), shape=(5, 5), dtype=np.dtype('int8'), fillvalue=0)
with vfile.stage_version('version2') as g:
g['data'][0] = 1
f = vfile.f
delete_versions(f, ['version2'])
assert list(vfile) == ['version1']
assert list(vfile['version1']) == ['data']
assert vfile['version1']['data'].shape == (5, 5)
assert np.all(vfile['version1']['data'][:] == 0)
def test_delete_versions_no_data2(vfile):
with vfile.stage_version('version1') as g:
g.create_dataset('data', maxshape=(None, None), chunks=(20, 20), shape=(5, 5), dtype=np.dtype('int8'), fillvalue=0)
with vfile.stage_version('version2') as g:
g['data'][0] = 1
f = vfile.f
delete_versions(f, ['version1'])
assert list(vfile) == ['version2']
assert list(vfile['version2']) == ['data']
assert vfile['version2']['data'].shape == (5, 5)
assert np.all(vfile['version2']['data'][1:] == 0)
assert np.all(vfile['version2']['data'][0] == 1)
def test_delete_versions_nested_groups(vfile):
data = []
with vfile.stage_version('r0') as sv:
data_group = sv.create_group('group1/group2')
data.append(np.arange(500))
data_group.create_dataset('test_data', maxshape=(None,), chunks=(1000), data=data[0])
for i in range(1, 11):
with vfile.stage_version(f'r{i}') as sv:
data.append(np.random.randint(0, 1000, size=500))
sv['group1']['group2']['test_data'][:] = data[-1]
assert set(vfile) == {'r0', 'r1', 'r2', 'r3', 'r4', 'r5', 'r6', 'r7', 'r8', 'r9', 'r10'}
for i in range(11):
assert list(vfile[f'r{i}']) == ['group1'], i
assert list(vfile[f'r{i}']['group1']) == ['group2']
assert list(vfile[f'r{i}']['group1']['group2']) == ['test_data']
np.testing.assert_equal(vfile[f'r{i}']['group1']['group2']['test_data'][:], data[i])
delete_versions(vfile, ['r3', 'r6'])
assert set(vfile) == {'r0', 'r1', 'r2', 'r4', 'r5', 'r7', 'r8', 'r9', 'r10'}
for i in range(11):
if i in [3, 6]:
continue
assert list(vfile[f'r{i}']) == ['group1'], i
assert list(vfile[f'r{i}']['group1']) == ['group2']
assert list(vfile[f'r{i}']['group1']['group2']) == ['test_data']
np.testing.assert_equal(vfile[f'r{i}']['group1']['group2']['test_data'][:], data[i])
def setup2(vfile):
with vfile.stage_version('version1') as g:
g.create_dataset('test_data',
data=np.arange(20000).reshape((1000, 20)),
chunks=(101,11))
with vfile.stage_version('version2') as g:
g['test_data'][::200] = -g['test_data'][::200]
def test_recreate_raw_data(vfile):
setup2(vfile)
chunks_map = _recreate_raw_data(vfile.f, 'test_data', ['version1'], tmp=True)
assert len(chunks_map) == 20
raw_data = vfile.f['_version_data/test_data/raw_data']
tmp_raw_data = vfile.f['_version_data/test_data/_tmp_raw_data']
assert raw_data.shape == (3030, 11)
assert tmp_raw_data.shape == (2020, 11)
for old, new in chunks_map.items():
a = raw_data[old.raw]
b = tmp_raw_data[new.raw]
assert a.shape == b.shape
np.testing.assert_equal(a, b)
def test_recreate_hashtable(vfile):
setup2(vfile)
chunks_map = _recreate_raw_data(vfile.f, 'test_data', ['version1'], tmp=False)
# Recreate a separate, independent version, with the dataset as it would
# be with version1 deleted.
with vfile.stage_version('version2_2', prev_version='') as g:
g.create_dataset('test_data2',
data=np.arange(20000).reshape((1000, 20)),
chunks=(101,11))
g['test_data2'][::200] = -g['test_data2'][::200]
# orig_hashtable = Hashtable(vfile.f, 'test_data')
_recreate_hashtable(vfile.f, 'test_data', chunks_map, tmp=True)
new_hash_table = Hashtable(vfile.f, 'test_data',
hash_table_name='_tmp_hash_table')
new_hash_table2 = Hashtable(vfile.f, 'test_data2')
d1 = dict(new_hash_table)
d2 = dict(new_hash_table2)
assert d1.keys() == d2.keys()
# The exact slices won't be the same because raw data won't be in the same
# order
for h in d1:
np.testing.assert_equal(
vfile.f['_version_data/test_data/raw_data'][d1[h].raw],
vfile.f['_version_data/test_data2/raw_data'][d2[h].raw],
)
def test_recreate_virtual_dataset(vfile):
setup2(vfile)
orig_virtual_dataset = vfile.f['_version_data/versions/version2/test_data'][:]
chunks_map = _recreate_raw_data(vfile.f, 'test_data', ['version1'], tmp=False)
_recreate_hashtable(vfile.f, 'test_data', chunks_map, tmp=False)
_recreate_virtual_dataset(vfile.f, 'test_data', ['version2'], chunks_map, tmp=True)
new_virtual_dataset = vfile.f['_version_data/versions/version2/_tmp_test_data'][:]
np.testing.assert_equal(orig_virtual_dataset, new_virtual_dataset)
def test_delete_versions2(vfile):
setup2(vfile)
data = np.arange(20000).reshape((1000, 20))
data[::200] = -data[::200]
assert vfile['version2']['test_data'].shape == data.shape
delete_versions(vfile, ['version1'])
assert list(vfile) == ['version2']
assert list(vfile['version2']) == ['test_data']
assert vfile['version2']['test_data'].shape == data.shape
np.testing.assert_equal(vfile['version2']['test_data'][:], data)
assert set(vfile.f['_version_data/test_data/raw_data'][:].flat) == set(data.flat)
def test_delete_versions_variable_length_strings(vfile):
with vfile.stage_version('r0') as sv:
data = np.array(['foo'], dtype='O')
sv.create_dataset('bar', data=data, dtype=h5py.string_dtype(encoding='ascii'))
for i in range(1, 11):
with vfile.stage_version('r{}'.format(i)) as sv:
sv['bar'].resize((i+1,))
sv['bar'][i] = 'foo'
delete_versions(vfile, ['r2', 'r4', 'r6'])
def test_delete_versions_fillvalue_only_dataset(vfile):
with vfile.stage_version('r0') as sv:
sv.create_dataset('fillvalue_only', shape=(6,),
dtype=np.dtype('int64'), data=None,
maxshape=(None,), chunks=(10000,), fillvalue=0)
sv.create_dataset('has_data', shape=(6,), dtype=np.dtype('int64'),
data=np.arange(6), maxshape=(None,),
chunks=(10000,), fillvalue=0)
with vfile.stage_version('r1') as sv:
sv['has_data'] = np.arange(5, -1, -1)
delete_versions(vfile, ['r0'])
with vfile.stage_version('r2') as sv:
sv['fillvalue_only'][0] = 1
assert set(vfile) == {'r1', 'r2'}
assert set(vfile['r1']) == {'fillvalue_only', 'has_data'}
assert set(vfile['r2']) == {'fillvalue_only', 'has_data'}
np.testing.assert_equal(vfile['r1']['fillvalue_only'][:], 0)
np.testing.assert_equal(vfile['r2']['fillvalue_only'][:],
np.array([1, 0, 0, 0, 0, 0]))
np.testing.assert_equal(vfile['r1']['has_data'][:], np.arange(5, -1, -1))
np.testing.assert_equal(vfile['r2']['has_data'][:], np.arange(5, -1, -1))
def test_delete_versions_current_version(vfile):
with vfile.stage_version('r0') as sv:
sv.create_dataset('bar', data=np.arange(10))
for i in range(1, 11):
with vfile.stage_version('r{}'.format(i)) as sv:
sv['bar'] = np.arange(10 + i)
delete_versions(vfile, ['r2', 'r4', 'r6', 'r8', 'r9', 'r10'])
cv = vfile.current_version
assert cv == 'r7'
np.testing.assert_equal(vfile[cv]['bar'][:], np.arange(17))
| nilq/baby-python | python |
import copy
from geometry_utils.three_d.point3 import is_point3
from geometry_utils.two_d.path2 import Path2
from geometry_utils.three_d.path3 import is_path3
from geometry_utils.two_d.edge2 import Edge2
from geometry_utils.two_d.point2 import Point2, is_point2
class PathFieldInterpreter(Path2, object):
# Symbols used in the PathField
NEW_PATH_CHAR = '|'
LAYER_CHAR = '&'
NAME_CHAR = '@'
POINT_SEPARATOR = ';'
POINT_ELEMENT_SEPARATOR = ':'
CLOSED_PATH_INDICATOR = '#'
MIRRORED_PATH_INDICATOR = '^'
MIRRORED_PATH_POINT_INDICATOR = '*'
LINE_STYLE_INDICATOR = '%'
FILL_INDICATOR = '#'
CURVE_LARGE_CLOCK = '{'
CURVE_LARGE_ANTICLOCK = '}'
CURVE_SMALL_CLOCK = '('
CURVE_SMALL_ANTICLOCK = ')'
RELATIVE_CHAR = '~'
TYPE_DELIMITER_CHAR = '"'
INCLUDE_START = '?'
INCLUDE_DELIMITER = ','
INCLUDE_CONDITION_DELIMITER = '?'
SPECIAL_SHAPES = '_'
FUNCTION_CHAR = '!'
TAG_START_CHAR = '<'
TAG_END_CHAR = '>'
def __init__(self):
super(PathFieldInterpreter, self).__init__()
self.write_buffer = ''
self.read_buffer = ''
self.variables = {}
def clear_path(self):
self.write_buffer = ''
self.read_buffer = ''
self.list_of_edges = []
def add_path(self, path):
"""
Add a Path2() to the PathField and return the PathField string.
The paths accumulate in the buffer, so multiple calls to this function will
build up a PathField string containing multiple paths.
@param path: Path2() instance
@return: PathField string
"""
def format_num(num):
"""
Formats a number to PathField spec:
- Rounded to 2dp.
- Any trailing 0's and .'s removed.
- eg: 12.00003535 -> 12
- eg: 12.300 -> 12.3
- eg: 12.000000 -> 12
@param num: float or integer.
@return: formatted number as string
"""
try:
str_num = "%.2f" % float(num)
except ValueError:
return "%s" % num
if str_num == '0.00':
return '0'
return str_num.rstrip('0').rstrip('.')
def format_point(point):
if is_point2(point):
point_string = [format_num(point.x), format_num(point.y)]
return point_string
elif is_point3(point):
point_string = [format_num(point.x), format_num(point.y), format_num(point.z)]
return point_string
else:
raise TypeError('Argument must be a type of Point2 or Point3')
def get_curve_indicator(_edge):
"""
Retrieves the correct curve indicator given large and clockwise parameters
for a curve.
@param _edge:
@return:
"""
if _edge.large and _edge.clockwise:
return self.CURVE_LARGE_CLOCK
elif _edge.large and not _edge.clockwise:
return self.CURVE_LARGE_ANTICLOCK
elif not _edge.large and _edge.clockwise:
return self.CURVE_SMALL_CLOCK
elif not _edge.large and not _edge.clockwise:
return self.CURVE_SMALL_ANTICLOCK
def add_point(_index, point, _last):
delimiter_buffer = ''
point_string = format_point(point)
if point_string[0] != _last[0]:
self.write_buffer += point_string[0]
_last[0] = point_string[0]
elif _index == 0 and self.path_length == 1:
self.write_buffer += _last[0]
delimiter_buffer += self.POINT_ELEMENT_SEPARATOR
if point_string[1] != _last[1]:
self.write_buffer += delimiter_buffer + point_string[1]
_last[1] = point_string[1]
delimiter_buffer = self.POINT_ELEMENT_SEPARATOR
elif _index == 0 and self.path_length == 1:
self.write_buffer += delimiter_buffer + _last[1]
delimiter_buffer = self.POINT_ELEMENT_SEPARATOR
else:
delimiter_buffer += self.POINT_ELEMENT_SEPARATOR
if is_point3(point):
if point_string[2] != _last[2]:
self.write_buffer += delimiter_buffer + point_string[2]
_last[2] = format_num(point.z)
return _last
# If there is already a path in the buffer, append the path separator first
if self.write_buffer != '':
self.write_buffer += self.NEW_PATH_CHAR
# Write out layer names if given
if path.layers:
first = True
for layer in path.layers:
if not first:
self.write_buffer += ','
self.write_buffer += layer
first = False
self.write_buffer += self.LAYER_CHAR
# Write out path name if given
if path.name != '':
self.write_buffer += path.name + self.NAME_CHAR
# State variables, initialised to 0 so if first point is 0, 0, 0 the values wont be written
# as required by the spec
last = ['0', '0', '0']
last_r = '0'
indicator_buffer = ''
path_length = path.path_length
last_index = path_length - 1
# Loop through the points and write them out
for index, edge in enumerate(path.list_of_edges):
# If this is the last point in a closed path, output the closed path indicator, rather than the xyz pos
if path.is_closed and index == last_index:
self.write_buffer += self.CLOSED_PATH_INDICATOR
else:
if index == 0 or edge.p1 != path.list_of_edges[index - 1].p2:
last = add_point(index, edge.p1, last)
if index != last_index:
self.write_buffer += self.POINT_SEPARATOR
last = add_point(index, edge.p2, last)
# Only a valid curve if all three curve parameters are present
if edge.is_arc():
self.write_buffer += get_curve_indicator(edge)
if format_num(edge.radius) != last_r:
self.write_buffer += format_num(edge.radius)
last_r = format_num(edge.radius)
indicator_buffer = ''
# Add point name if given
# Skip the point name if its the last point in a closed path, as path name is invalid
# and extra comma not needed
if not (index == last_index and path.is_closed):
indicator_buffer += ','
if edge.p1.name:
self.write_buffer += indicator_buffer + edge.p1.name
elif edge.p2.name:
self.write_buffer += indicator_buffer + edge.p2.name
indicator_buffer = ''
# Add edge name if given
indicator_buffer += ','
if edge.name:
self.write_buffer += indicator_buffer + edge.name
indicator_buffer = ''
# Add edge style if given
if edge.style:
self.write_buffer += indicator_buffer + self.LINE_STYLE_INDICATOR + edge.style
indicator_buffer = ''
if index != last_index:
self.write_buffer += self.POINT_SEPARATOR
if path.fill != '':
if indicator_buffer != '':
if path.list_of_edges[-1].is_arc():
self.write_buffer += indicator_buffer + self.FILL_INDICATOR
self.write_buffer += path.fill
else:
self.write_buffer += self.FILL_INDICATOR + path.fill
outbuf = self.write_buffer.replace(';;', ';')
return outbuf
def parse_curve_def(self, curve_def, edit_mode):
"""
Turns arc definition into clockwise, large and radius attributes.
@param curve_def: arc definition eg: '(10'
@param edit_mode:
@return: clockwise (bool), large (bool), radius (num) (if radius is not given, returns -1)
"""
if curve_def[0] == self.CURVE_LARGE_ANTICLOCK:
clockwise = False
large = True
elif curve_def[0] == self.CURVE_LARGE_CLOCK:
clockwise = True
large = True
elif curve_def[0] == self.CURVE_SMALL_ANTICLOCK:
clockwise = False
large = False
else:
clockwise = True
large = False
if edit_mode:
return clockwise, large, curve_def[1:]
elif len(curve_def) == 1:
return clockwise, large, -1
else:
return clockwise, large, float(curve_def[1:])
def split_into_paths(self, path_field):
paths = path_field.split(self.NEW_PATH_CHAR)
return paths
def load_path(self, path_field, edit_mode=False, override_data=None, return_single=None,
point_name_prefix='', round_value=2, enlarge_offset=0):
"""
Reads a PathField string and outputs a list of Path2s
@param path_field: string
@param edit_mode: boolean used for the shape editor
@param override_data:
@param return_single:
@return: [Path2]
@param point_name_prefix:
@param round_value: int required number of decimal places
@param enlarge_offset: enlarge_offset only works for pre-defined shapes ie rect / diamond etc
"""
if override_data is None:
override_data = {}
out_paths = []
self.read_buffer = path_field
path_fields = self.split_into_paths(self.read_buffer)
for path_str in path_fields:
if len(path_str) == 0:
continue
path = Path2()
if path_str[0] == self.TAG_START_CHAR:
index = path_str[1:].find(self.TAG_END_CHAR)
if index != 1:
self.decode_attributes(path, path_str[1:index + 1])
path_str = path_str[index + 2:]
if path_str[0] == self.TYPE_DELIMITER_CHAR:
index = path_str[1:].find(self.TYPE_DELIMITER_CHAR)
if index != 1:
path.type = path_str[1:index + 1]
path_str = path_str[index + 2:]
# Check if layers are specified
index = path_str.find(self.LAYER_CHAR)
if index != -1:
path.layers = path_str[:index].split(',')
path_str = path_str[index + 1:]
# Check if a path name has been specified
index = path_str.find(self.NAME_CHAR)
if index != -1:
path.name = path_str[:index]
# Check if the name has been overridden
if path.name in override_data and 'rename' in override_data[path.name]:
path.name = override_data[path.name]['rename']
path_str = path_str[index + 1:] # strip off the name now we've processed it
# Check for special shapes
if path_str.startswith(self.SPECIAL_SHAPES):
point_separator = path_str.find(';')
if point_separator == -1:
function_data = path_str[1:]
path_str = ''
else:
function_data = path_str[1:point_separator]
path_str = path_str[point_separator + 1:]
special_paths = PathFieldShapes.process_special_functions(path_field_interpreter=self,
function_data=function_data,
path2=path,
previous_paths=out_paths,
override_data=override_data,
enlarge_offset=enlarge_offset)
for special_path in special_paths:
out_paths.append(special_path)
if return_single is not None and special_path.name == return_single:
return special_path
if path_str in ('', ';'):
continue
points = path_str.split(self.POINT_SEPARATOR)
# State variables
last_edge = Edge2()
last_r = 0.0
is_closed = False
is_mirrored = False
mirrored_point = -1
if self.CLOSED_PATH_INDICATOR in points[len(points) - 1]: # Check if path is closed
is_closed = True
if self.MIRRORED_PATH_INDICATOR in points[len(points) - 1]: # Check if path is mirrored
is_mirrored = True
for index, point in enumerate(points):
default_point_name = "%s%d" % (point_name_prefix, index)
edge_d = Edge2(Point2(), Point2(), 0, False, False)
# if the path is closed, process the last point differently as the format could be quite different,
# especially if there is a fill colour specified
if point.startswith(self.INCLUDE_START):
if self.process_include_tag(point, path, last_edge, edit_mode):
continue
elif point.startswith(self.FUNCTION_CHAR):
path_field_functions = PathFieldFunctions()
path_field_functions.process(point, path)
elif is_closed and point is points[len(points) - 1]: # last point of a closed path
self.process_closed_point(point, path, last_edge, last_r, edit_mode)
break
elif is_mirrored: # mirrored point
if point is points[len(points) - 1]:
self.process_mirrored_points(point, edge_d, path,
last_edge, last_r, mirrored_point, edit_mode, default_point_name,
round_value=round_value)
break
else:
if len(point) > 0 and point[0] == self.MIRRORED_PATH_POINT_INDICATOR:
mirrored_point = path.path_length - 1
point = point[1:]
# if edit_mode:
# path.points[-1]['mirror'] = self.MIRRORED_PATH_POINT_INDICATOR
self.process_normal_point(point, edge_d, path, last_edge, last_r,
edit_mode, default_point_name,
round_value=round_value)
else: # Normal point
self.process_normal_point(point, edge_d, path, last_edge, last_r,
edit_mode, default_point_name,
round_value=round_value)
if last_edge.is_arc():
last_r = last_edge.radius
last_edge = path.list_of_edges[-1]
if not is_closed and path.path_length > 1:
del path.list_of_edges[-1]
if path.is_incomplete_circle():
path.complete_circle()
if return_single is not None and path.name == return_single:
return path
out_paths.append(path)
if return_single is None:
return out_paths
else:
return None
def process_include_tag(self, tag, path, last_edge, edit_mode):
function_data = tag.lstrip(self.INCLUDE_START)
edge_type = 'pp'
offset_vector = last_edge.p1.to_vector2()
valid = True
main_include_data = function_data.split(self.INCLUDE_CONDITION_DELIMITER)
if len(main_include_data) > 1 and main_include_data[1] != '':
try:
valid = bool(int(main_include_data[1]))
except ValueError:
valid = True
include_data = main_include_data[0].split(self.INCLUDE_DELIMITER)
variable_name = include_data[0]
if len(include_data) > 1 and include_data[1] != '':
edge_type = include_data[1]
if len(include_data) > 2 and include_data[2] != '':
try:
offset_vector.x = float(include_data[2])
except ValueError:
offset_vector.x = include_data[2]
if len(include_data) > 3 and include_data[3] != '':
try:
offset_vector.y = float(include_data[3])
except ValueError:
offset_vector.y = include_data[3]
if edit_mode:
edge = Edge2(Point2(offset_vector.x, offset_vector.y), Point2())
edge.name = variable_name
edge.type = edge_type
path.list_of_edges.append(edge)
return False
if valid:
path_string = self.variables.get(variable_name, ';')
new_path2 = self.load_path(path_string, point_name_prefix=variable_name + '_')[0]
result = new_path2.offset(offset_vector)
path += result
return True
else:
path.list_of_edges.append(Edge2(Point2(offset_vector.x, offset_vector.y), Point2()))
return True
def process_mirrored_points(self, point, edge_d, path, last_edge, last_r, mirrored_point, edit_mode, default_point_name,
round_value):
self.process_normal_point(point[:-1], edge_d, path, last_edge, last_r, edit_mode, default_point_name, round_value)
if edit_mode:
# path.list_of_edges.append('mirror')
return
local_path_edges = copy.deepcopy(path.list_of_edges)
if (path.list_of_edges[0].p1.y == path.list_of_edges[mirrored_point].p1.y or
path.list_of_edges[0].p1.x == path.list_of_edges[mirrored_point].p1.x):
held_arc = None
if path.list_of_edges[0].p1.x == path.list_of_edges[mirrored_point].p1.x:
offset = path.list_of_edges[0].p1.x * 2
mirror_x = True
else:
offset = path.list_of_edges[0].p1.y * 2
mirror_x = False
if mirrored_point != -1:
end_point = path.list_of_edges[-1].p1
for local_path_edge in reversed(local_path_edges[:mirrored_point]):
mirrored_point -= 1
if (not mirror_x and offset - local_path_edge.p1.y == end_point.y and
local_path_edge.p1.x == end_point.x):
break
elif (mirror_x and local_path_edge.p1.y == end_point.y and
offset - local_path_edge.p1.x == end_point.x):
break
for local_path_edge in reversed(local_path_edges[:mirrored_point]):
if mirror_x:
edge_d.p1.x = offset - local_path_edge.p1.x
edge_d.p1.y = local_path_edge.p1.y
edge_d.p2.x = offset - local_path_edge.p2.x
edge_d.p2.y = local_path_edge.p2.y
else:
edge_d.p1.x = local_path_edge.p1.x
edge_d.p1.y = offset - local_path_edge.p1.y
edge_d.p2.x = local_path_edge.p2.x
edge_d.p2.y = offset - local_path_edge.p2.y
if is_path3(path):
edge_d.p1.z = local_path_edge.p1.z
edge_d.p2.z = local_path_edge.p2.z
if held_arc is not None:
edge_d.radius = held_arc.radius
edge_d.clockwise = held_arc.clockwise
edge_d.large = held_arc.large
held_arc = None
if local_path_edge.radius:
held_arc = local_path_edge
path.list_of_edges.append(edge_d)
else:
return
def process_closed_point(self, point, path, last_edge, last_r, edit_mode):
"""
Closed path, last point xyz is same as first point
@param point:
@param path:
@param last_edge:
@param edit_mode:
"""
path.list_of_edges[-1].p2 = copy.deepcopy(path.list_of_edges[0].p1)
if len(point) == 1:
return
point = point[1:] # Strip off the closed path indicator, now we've processed the position
edge_d = path.list_of_edges[-1]
if (point[0] == self.CURVE_SMALL_CLOCK or point[0] == self.CURVE_SMALL_ANTICLOCK or
point[0] == self.CURVE_LARGE_CLOCK or point[0] == self.CURVE_LARGE_ANTICLOCK):
idx = point.find(',')
if idx == -1:
curve_def = point
point = ''
else:
curve_def = point[:idx]
point = point[idx + 1:]
clock, large, radius = self.parse_curve_def(curve_def, edit_mode)
edge_d.clockwise = clock
edge_d.large = large
if radius == -1:
edge_d.radius = last_r
else:
edge_d.radius = radius
if len(point) == 0:
#path.list_of_edges.append(edge_d)
return
if point[0] == ',':
point = point[1:]
idx = point.find(self.FILL_INDICATOR)
if idx == -1:
edge_def = point
point = ''
else:
edge_def = point[:idx]
point = point[idx + 1:]
parts = edge_def.split(self.LINE_STYLE_INDICATOR)
if parts[0] != '':
edge_d.name = parts[0]
if len(parts) > 1 and parts[1] != '':
edge_d.style = parts[1]
if len(point) > 0 and point[0] == self.FILL_INDICATOR:
point = point[1:]
path.fill = point
@staticmethod
def decode_attributes(path, attributes_str):
attributes = attributes_str.split(';')
for attribute_str in attributes:
attribute = attribute_str.split(':')
if len(attribute) == 1:
value = True
else:
value = attribute[1]
path.attributes[attribute[0]] = value
def join_paths_left_right(self, path_field_left, path_field_right, merge_flip=True, edit_mode=False):
path_left_list = []
path_right_list = []
if path_field_left is not None and path_field_left != '':
path_left_list = self.load_path(path_field_left, edit_mode=edit_mode)
if path_field_right is not None and path_field_right != '':
path_right_list = self.load_path(path_field_right, edit_mode=edit_mode)
if ((path_field_left == '' or len(path_left_list) == 0) and
(path_field_right == '' or len(path_right_list) == 0)):
return [None]
elif path_field_left == '' or len(path_left_list) == 0:
return path_right_list
elif path_field_right == '' or len(path_right_list) == 0:
return path_left_list
paths = []
for path_left, path_right in zip(path_left_list, path_right_list):
path = Path2()
if not edit_mode:
offset_y = max(edge.maximum_y() for edge in path_left.list_of_edges)
if merge_flip:
path_right.flip_vertical(offset_y=offset_y)
path.list_of_edges = path_left + path_right[1:]
paths.append(path)
return paths
def process_normal_point(self, point, edge_d, path, last_edge, last_r, edit_mode, default_point_name, round_value):
idx1 = point.find(self.CURVE_SMALL_CLOCK)
if idx1 == -1:
idx1 = point.find(self.CURVE_SMALL_ANTICLOCK)
if idx1 == -1:
idx1 = point.find(self.CURVE_LARGE_CLOCK)
if idx1 == -1:
idx1 = point.find(self.CURVE_LARGE_ANTICLOCK)
if idx1 == -1:
idx1 = point.find(',')
# extract the position part of the point.
if idx1 != -1:
position = point[:idx1]
point = point[idx1:]
else:
position = point
point = ''
xyz = position.split(self.POINT_ELEMENT_SEPARATOR)
while len(xyz) < 3:
xyz.append('')
edge_d.p1.x = self.get_value(xyz[0], last_edge.p1.x, round_value)
edge_d.p1.y = self.get_value(xyz[1], last_edge.p1.y, round_value)
# if is_path3(path):
# edge_d.p1.z = self.get_value(xyz[2], last_edge.p1.z, round_value)
# Now process the curve definition if there is one
if len(point) == 0:
edge_d.p1.name = default_point_name
path.list_of_edges.append(edge_d)
path.make_continuous()
return
# Look for a curve definition, it should be terminated either by a comma or be the whole string
# Extract it from the point
if point[0] in [self.CURVE_LARGE_ANTICLOCK,
self.CURVE_LARGE_CLOCK,
self.CURVE_SMALL_ANTICLOCK,
self.CURVE_SMALL_CLOCK]:
idx = point.find(',')
if idx == -1:
curve_def = point
point = ''
else:
curve_def = point[:idx]
point = point[idx:]
# Process the curve def
clock, large, radius = self.parse_curve_def(curve_def, edit_mode)
edge_d.clockwise = clock
edge_d.large = large
if radius == -1:
edge_d.radius = last_r
else:
edge_d.radius = radius
point = point[1:]
if len(point) == 0:
path.list_of_edges.append(edge_d)
edge_d.p1.name = default_point_name
path.make_continuous()
return
# Look for a point name and edge def if given
parts = point.split(',')
if parts[0] != '':
edge_d.p1.name = parts[0]
else:
edge_d.p1.name = default_point_name
if len(parts) > 1 and self.LINE_STYLE_INDICATOR in parts[1]:
edge_def = parts[1].split(self.LINE_STYLE_INDICATOR)
if edge_def[0] != '':
edge_d.name = edge_def[0]
edge_d.style = edge_def[1]
elif len(parts) > 1 and parts[1] != '':
edge_d.name = parts[1]
if len(parts) > 2 and parts[2] != '':
edge_d.left_name = parts[2]
if len(parts) > 3 and parts[3] != '':
edge_d.right_name = parts[3]
path.list_of_edges.append(edge_d)
path.make_continuous()
def get_value(self, in_value, last_value, round_value):
if in_value == '':
r_value = last_value
return r_value
relative = False
if in_value.startswith(self.RELATIVE_CHAR):
relative = True
in_value = in_value[1:]
try:
r_value = float(in_value)
if relative:
r_value += last_value
r_value = round(r_value, round_value)
except ValueError:
r_value = in_value
return r_value
class PathFieldFunctions:
def __init__(self):
pass
def process(self, point, path):
arguments = point.split(',')
function_type = arguments[0][1:].upper()
if function_type == 'STR':
return self.swept_top_rail(arguments[1:], path)
else:
assert False, 'unknown function type'
def swept_top_rail(self, arguments, path):
current_edge = path.list_of_edges[-1]
end_style = arguments[0]
chord_height = float(arguments[1])
end_x = float(arguments[2])
if len(arguments) > 3:
number_of_inclusive_bars = float(arguments[3])
inclusive_bars_width = float(arguments[4])
else:
number_of_inclusive_bars = 0
inclusive_bars_width = 0
if end_style == "":
chord_width = ((end_x - current_edge.p1.x - number_of_inclusive_bars * inclusive_bars_width) /
(number_of_inclusive_bars + 1))
if chord_height > chord_width / 2:
chord_height = chord_width / 2
new_x = current_edge.p1.x + chord_width
radius = radius_of_chord(chord_width / 2, chord_height)
path.list_of_edges.append(Edge2(Point2(new_x, current_edge.y), Point2(), radius, True, False))
while number_of_inclusive_bars > 0:
new_x += inclusive_bars_width
path.list_of_edges.append(Edge2(Point2(new_x, current_edge.y)))
new_x += chord_width
path.list_of_edges.append(Edge2(Point2(new_x, current_edge.y), Point2(), radius, True, False))
number_of_inclusive_bars -= 1
elif end_style in ('l', 'L', 'r', 'R'):
chord_width = (end_x - current_edge.p1.x) * 2
if chord_height > chord_width:
chord_height = chord_width
radius = radius_of_chord(chord_width / 2, chord_height)
if end_style in ('r', 'R'):
chord_height = - chord_height
end_y = current_edge.p1.y + chord_height
path.points.append(Edge2(Point2(end_x, end_y), Point2(), radius, True, False))
| nilq/baby-python | python |
################################
# OpenCTI Backup Files #
################################
import os
import yaml
import json
from pycti import OpenCTIConnectorHelper, get_config_variable
class BackupFilesConnector:
def __init__(self):
config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
config = (
yaml.load(open(config_file_path), Loader=yaml.FullLoader)
if os.path.isfile(config_file_path)
else {}
)
self.helper = OpenCTIConnectorHelper(config)
# Extra config
self.backup_protocol = get_config_variable(
"BACKUP_PROTOCOL", ["backup", "protocol"], config
)
self.backup_path = get_config_variable(
"BACKUP_PATH", ["backup", "path"], config
)
def write_file(self, entity_type, entity_id, bundle):
path = self.backup_path + "/opencti_data"
if not os.path.exists(path + "/" + entity_type):
os.mkdir(path + "/" + entity_type)
path = path + "/" + entity_type
if not os.path.exists(path + "/" + entity_id.split("--")[1][0]):
os.mkdir(path + "/" + entity_id.split("--")[1][0])
path = path + "/" + entity_id.split("--")[1][0]
with open(path + "/" + entity_id + ".json", "w") as file:
json.dump(bundle, file, indent=4)
def delete_file(self, entity_type, entity_id):
path = (
self.backup_path
+ "/opencti_data/"
+ entity_type
+ "/"
+ entity_id.split("--")[1][0]
)
if not os.path.exists(path):
return
if os.path.isfile(path + "/" + entity_id + ".json"):
os.unlink(path + "/" + entity_id + ".json")
def _process_message(self, msg):
if msg.event == "create" or msg.event == "update" or msg.event == "delete":
self.helper.log_info("Processing event " + msg.id)
data = json.loads(msg.data)
if msg.event == "create":
bundle = {
"type": "bundle",
"x_opencti_event_version": data["version"],
"objects": [data["data"]],
}
self.write_file(data["data"]["type"], data["data"]["id"], bundle)
elif msg.event == "update":
bundle = {
"type": "bundle",
"x_opencti_event_version": data["version"],
"objects": [data["data"]],
}
self.write_file(data["data"]["type"], data["data"]["id"], bundle)
elif msg.event == "delete":
self.delete_file(data["data"]["type"], data["data"]["id"])
def start(self):
# Check if the directory exists
if not os.path.exists(self.backup_path):
raise ValueError("Backup path does not exist")
if not os.path.exists(self.backup_path + "/opencti_data"):
os.mkdir(self.backup_path + "/opencti_data")
self.helper.listen_stream(self._process_message)
if __name__ == "__main__":
BackupFilesInstance = BackupFilesConnector()
BackupFilesInstance.start()
| nilq/baby-python | python |
# import asyncio
import streamlit as st
from constants import *
from utils import get_client
st.title('News Nuggets 📰')
st.sidebar.title("News App preferences! 📝")
country_choice = st.sidebar.selectbox("Country 🎌:", options=countries,
index=5,
help='Choose the country whose news you want to see👇')
search_choice = st.sidebar.radio('Search News by : ', options=['Top Headlines', 'Search Term'])
if search_choice == 'Top Headlines':
Client = get_client()
category = st.sidebar.selectbox('Topics:',
options=topics, index=0)
st.sidebar.write("## Enter search specs 🔎")
time_span = st.sidebar.text_input("Time Span: ⏲ ", '7d',
help="""
- h = hours (eg: 12h)
- d = days (eg: 7d)
- m = months (eg: 6m)
- y = years (eg: 1y)
""")
article_num = st.sidebar.number_input("Number of Articles 🔢 ", 1, 100, 10)
lang = st.sidebar.selectbox("Language 🔠:", options=languages,
index=0,
help='Language of news to be fetched')
Client.period = time_span
Client.country = country_choice
Client.max_results = article_num
Client.language = lang
if category == "GENERAL":
st.write(f'**You are seeing articles about** _{category.upper()}_ **!!**')
# General call of gnews client
news_ls = Client.get_top_news()
else:
st.write(f'**You are seeing articles about** _{category.upper()}_ **!!**')
# Topic call of gnews client
news_ls = Client.get_news_by_topic(category.upper())
elif search_choice == 'Search Term':
Client = get_client()
search_term = st.sidebar.text_input('Enter Search Term:', value= 'Interesting Search term here')
st.sidebar.write("## Enter search specs 🔎")
time_span = st.sidebar.text_input("Time Span: ⏲ ", '7d',
help="""
- h = hours (eg: 12h)
- d = days (eg: 7d)
- m = months (eg: 6m)
- y = years (eg: 1y)
""")
article_num = st.sidebar.number_input("Number of Articles 🔢 ", 5, 100, 10)
lang = st.sidebar.selectbox("Language 🔠:", options=languages,
index=0,
help='Language of news to be fetched')
Client.period = time_span
Client.country = country_choice
Client.max_results = article_num
Client.language = lang
st.write(f'**You are seeing articles about** _{search_term.upper()}_ **!!**')
news_ls = Client.get_news(search_term)
# def get_tasks():
# tasks = []
# for i in range(len(news_ls)):
# tasks.append(asyncio.create_task(Client.get_full_article(news_ls[i]['url'])))
# # print("Tasks:")
# # print(tasks)
# return tasks
#
#
# articles = list()
#
#
# async def get_article_conc():
# tasks = get_tasks()
# responses = await asyncio.gather(*tasks)
# for response in responses:
# articles.append(await response)
#
#
# asyncio.run(get_article_conc())
#
# for i in range(len(articles)):
# st.title(i.title)
# st.image(i.top_image)
# st.write(f"###### Published at: {news_ls[i]['published date']}")
# st.write(f"###### Source: {news_ls[i]['publisher']['title']}")
# st.write(i.text)
# st.write(f"Read more [here]({news_ls[i]['url']})")
for i in range(len(news_ls)):
try:
article = Client.get_full_article(news_ls[i]['url'])
st.title(article.title)
st.image(article.top_image)
st.write(f"###### Published at: {news_ls[i]['published date']}")
st.write(f"###### Source: {news_ls[i]['publisher']['title']}")
with st.expander("Read Full News 📖 "):
st.write(article.text)
st.write(f"[Original article here]({news_ls[i]['url']})")
except Exception as err:
print(err)
| nilq/baby-python | python |
# SPDX-License-Identifier: BSD-3-Clause
import argparse
import json
import logging
import os.path
import sys
from operator_manifest.operator import ImageName, OperatorManifest
from operator_manifest.resolver import resolve_image_reference
logger = logging.getLogger(__name__)
DEFAULT_OUTPUT_EXTRACT = 'references.json'
DEFAULT_OUTPUT_REPLACE = 'replacements.json'
CLI_LOGGER_FORMAT = '%(message)s'
def main(args=None):
logging.basicConfig(level=logging.INFO, format=CLI_LOGGER_FORMAT)
parser = _make_parser()
namespace = parser.parse_args(args)
if namespace.command == 'extract':
extract_image_references(namespace.manifest_dir, output=namespace.output)
elif namespace.command == 'resolve':
resolve_image_references(
namespace.images_file, authfile=namespace.authfile, output=namespace.output
)
elif namespace.command == 'replace':
replace_image_references(
namespace.manifest_dir, namespace.replacements_file, dry_run=namespace.dry_run
)
elif namespace.command == 'pin':
# pin_image_references requires that the output_* parameters are each a seekable file and
# will raise an error otherwise. In order to provide a more meaningful error to the user,
# we explicitly check for stdout since that's likely the only case where a non-seekable
# file is used from the CLI.
if namespace.output_replace.fileno() == sys.stdout.fileno():
raise ValueError('Cannot use stdout for --output-replace parameter')
if namespace.output_extract.fileno() == sys.stdout.fileno():
raise ValueError('Cannot use stdout for --output-extract parameter')
pin_image_references(
namespace.manifest_dir,
output_extract=namespace.output_extract,
output_replace=namespace.output_replace,
authfile=namespace.authfile,
dry_run=namespace.dry_run,
)
else:
parser.error('Insufficient parameters! See usage above')
def _make_parser():
parser = argparse.ArgumentParser(description='Process operator manifest files')
subparsers = parser.add_subparsers(dest='command')
extract_parser = subparsers.add_parser(
'extract',
description='Identify all the image references in the CSVs found in MANIFEST_DIR.',
)
extract_parser.add_argument(
'manifest_dir',
metavar='MANIFEST_DIR',
help='The path to the directory containing the manifest files.',
)
extract_parser.add_argument(
'--output',
metavar='OUTPUT',
default='-',
type=argparse.FileType('w'),
help=(
'The path to store the extracted image references. Use - to specify stdout.'
' By default - is used.'
),
)
resolve_parser = subparsers.add_parser(
'resolve',
description=(
'Resolve a list of image references into their corresponding image reference digests.'
),
)
resolve_parser.add_argument(
'images_file',
metavar='IMAGES_FILE',
type=argparse.FileType('r'),
help=(
'The path to the file containing the image references to be resolved. The format of'
' this file is a JSON Array of Strings where each item is an image reference. Use -'
' to specify stdin.'
),
)
resolve_parser.add_argument(
'--output',
metavar='OUTPUT',
default='-',
type=argparse.FileType('w+'),
help=(
'The path to store the image reference replacements. Use - to specify stdout.'
' By default - is used.'
),
)
resolve_parser.add_argument(
'--authfile',
metavar='AUTHFILE',
help='The path to the authentication file for registry communication.',
)
replace_parser = subparsers.add_parser(
'replace',
description=(
'Modify the image references in the CSVs found in the MANIFEST_DIR based on the given'
' REPLACEMENTS_FILE.'
),
)
replace_parser.add_argument(
'manifest_dir',
metavar='MANIFEST_DIR',
help='The path to the directory containing the manifest files.',
)
replace_parser.add_argument(
'replacements_file',
metavar='REPLACEMENTS_FILE',
type=argparse.FileType('r'),
help=(
'The path to the replacements file. The format of this file is a simple JSON object'
' where each attribute is a string representing the original image reference and the'
' value is a string representing the new value for the image reference. Use - to'
' specify stdin.'
),
)
replace_parser.add_argument(
'--dry-run',
default=False,
action='store_true',
help=(
'When set, replacements are not performed. This is useful to determine if the CSV is'
' in a state that accepts replacements. By default this option is not set.'
),
)
pin_parser = subparsers.add_parser(
'pin',
description=(
'Pins to digest all the image references from the CSVs found in MANIFEST_DIR. For'
' each image reference, if a tag is used, it is resolved to a digest by querying the'
' container image registry. Then, replaces all the image references in the CSVs with'
' the resolved, pinned, version.'
),
)
pin_parser.add_argument(
'manifest_dir',
metavar='MANIFEST_DIR',
help='The path to the directory containing the manifest files.',
)
pin_parser.add_argument(
'--dry-run',
default=False,
action='store_true',
help=('When set, replacements are not performed. By default this option is not set.'),
)
pin_parser.add_argument(
'--output-extract',
metavar='OUTPUT_EXTRACT',
default=DEFAULT_OUTPUT_EXTRACT,
type=argparse.FileType('w+'),
help=(
'The path to store the extracted image references from the CSVs.'
f' By default {DEFAULT_OUTPUT_EXTRACT} is used.'
),
)
pin_parser.add_argument(
'--output-replace',
metavar='OUTPUT_REPLACE',
default=DEFAULT_OUTPUT_REPLACE,
type=argparse.FileType('w+'),
help=(
'The path to store the extracted image reference replacements from the CSVs.'
f' By default {DEFAULT_OUTPUT_REPLACE} is used.'
),
)
pin_parser.add_argument(
'--authfile',
metavar='AUTHFILE',
help='The path to the authentication file for registry communication.',
)
return parser
def extract_image_references(manifest_dir, output):
"""
Identify all the image references from the CSVs found in manifest_dir.
:param str manifest_dir: the path to the directory where the manifest files are stored
:param file output: the file-like object to store the extracted image references
:return: the list of image references extracted from the CSVs
:rtype: list<str>
:raises ValueError: if more than one CSV in manifest_dir
"""
abs_manifest_dir = _normalize_dir_path(manifest_dir)
logger.info('Extracting image references from %s', abs_manifest_dir)
operator_manifest = OperatorManifest.from_directory(abs_manifest_dir)
image_references = [str(pullspec) for pullspec in operator_manifest.csv.get_pullspecs()]
json.dump(image_references, output)
return image_references
def resolve_image_references(images_file, output, authfile=None):
"""
Resolve the image references into their corresponding image reference digests.
:param file images_file: the file-like object to read the image references
:param file output: the file-like object to store the resolved image references
:param str authfile: the path to the authentication file for registry communication
:return: the dict of the original image references mapped to their resolved image references
:rtype: dict<str:str>
"""
references = json.load(images_file)
replacements = {}
for reference in references:
# Skip pinning of image references that already use digest
if '@' in reference:
continue
replacements[reference] = resolve_image_reference(reference, authfile=authfile)
json.dump(replacements, output)
return replacements
def replace_image_references(manifest_dir, replacements_file, dry_run=False):
"""
Use replacements_file to modify the image references in the CSVs found in the manifest_dir.
:param str manifest_dir: the path to the directory where the manifest files are stored
:param file replacements_file: the file-like object to the replacements file. The format of
this file is a simple JSON object where each attribute is a string representing the
original image reference and the value is a string representing the new value for the
image reference
:param bool dry_run: whether or not to apply the replacements
:raises ValueError: if more than one CSV in manifest_dir
:raises ValueError: if validation fails
"""
abs_manifest_dir = _normalize_dir_path(manifest_dir)
logger.info('Replacing image references in CSV')
operator_manifest = OperatorManifest.from_directory(abs_manifest_dir)
replacements = {}
for k, v in json.load(replacements_file).items():
replacements[ImageName.parse(k)] = ImageName.parse(v)
logger.info('%s -> %s', k, v)
operator_manifest.csv.replace_pullspecs_everywhere(replacements)
logger.info('Setting related images section')
operator_manifest.csv.set_related_images()
if not dry_run:
operator_manifest.csv.dump()
logger.info('Image references replaced')
def pin_image_references(
manifest_dir,
output_extract,
output_replace,
authfile=None,
dry_run=False,
):
"""
Pins to digest all the image references from the CSVs found in manifest_dir.
For each image reference, if a tag is used, it is resolved to a digest by querying the
container image registry. Then, each reference is replaced with the resolved, pinned, version.
:param str manifest_dir: the path to the directory where the manifest files are stored
:param file output_extract: the file-like object to store the extracted image references
:param file output_replace: the file-like object to store the image reference replacements
:param str authfile: the path to the authentication file for registry communication
:param bool dry_run: whether or not to apply the replacements
:raises ValueError: if more than one CSV in manifest_dir
:raises ValueError: if validation fails
"""
if not output_replace.seekable():
raise ValueError('output_replace must be a seekable object')
if not output_extract.seekable():
raise ValueError('output_extract must be a seekable object')
extract_image_references(manifest_dir, output=output_extract)
output_extract.flush()
output_extract.seek(0)
resolve_image_references(output_extract, output_replace, authfile=authfile)
output_replace.flush()
output_replace.seek(0)
replace_image_references(manifest_dir, output_replace, dry_run=dry_run)
def _normalize_dir_path(path):
abs_path = _normalize_path(path)
if not os.path.isdir(abs_path):
raise ValueError(f'{path} is not a directory or does not exist')
return abs_path
def _normalize_path(path):
return os.path.abspath(os.path.expanduser(path))
if __name__ == '__main__':
main()
| nilq/baby-python | python |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
@author: george wang
@datetime: 2019-07-09
@file: clock.py
@contact: georgewang1994@163.com
@desc: 定期处理任务
"""
import datetime
import logging
import threading
import time
logger = logging.getLogger(__name__)
class Schedule(threading.Thread):
"""
每隔一段时间检查是否存在符合时间的任务执行
"""
def __init__(self, ready_queue, *args, **kwargs):
super(Schedule, self).__init__(*args, **kwargs)
self.setDaemon(True)
self.ready_queue = ready_queue
self.start_task_list = [] # 开始执行的任务
self.last_sync_time = None
def collect_tasks(self, start_time):
self.start_task_list = [(task[0], task[1]) for task in self.start_task_list
if task[0]["start_time"] >= start_time]
self.start_task_list.sort(key=lambda x: x[0]["start_time"])
def register_task(self, task_body, task_msg):
"""
注册定时任务
:param task_body: 任务数据
:param task_msg: message
:return:
"""
if not task_body.get("start_time"):
logger.error("任务start_time不存在")
return
if not task_body.get("eta") and not task_body.get("wait"):
logger.error("任务不符合定时任务标准")
return
start_time = datetime.datetime.now().timestamp()
if task_body["start_time"] < start_time:
logger.error("任务起始时间应当大于当前时间")
return
if not task_body.get("task_id"):
logger.error("task_id不存在")
return
self.start_task_list.append((task_body, task_msg))
self.collect_tasks(start_time)
def get_need_execute_task(self, start_time):
task_body, task_msg = self.start_task_list[0]
task_start_time = task_body["start_time"]
need_task_body = None
if start_time < task_start_time < start_time + 1:
need_task_body = task_body
# 倒序后首次出现的下一个任务即需要执行的下一个任务
next_task_body, _ = self.start_task_list[1] if len(self.start_task_list) > 1 else (None, None)
if next_task_body:
next_internal_time = next_task_body["start_time"] - datetime.datetime.now().timestamp()
next_internal_time = next_internal_time if next_internal_time > 1 else None # 小于1s,不sleep
else:
next_internal_time = 1
return need_task_body, task_msg, next_internal_time
def run(self):
logging.info("schedule start running...")
while True:
if not self.start_task_list:
time.sleep(1)
continue
start_time = datetime.datetime.now().timestamp()
task_body, task_msg, next_internal_time = self.get_need_execute_task(start_time)
if task_body:
self.ready_queue.put((task_body, task_msg))
logger.debug("schedule execute task:[%s] from start_time:[%s] to end_time:[%s]" %
(task_body, self.last_sync_time, start_time))
self.last_sync_time = start_time
if next_internal_time:
time.sleep(next_internal_time)
def stop(self):
self.join()
| nilq/baby-python | python |
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from huxley.api.tests import (CreateAPITestCase, DestroyAPITestCase,
ListAPITestCase, PartialUpdateAPITestCase,
RetrieveAPITestCase, UpdateAPITestCase)
from huxley.utils.test import TestCommittees, TestUsers
class CommitteeDetailGetTestCase(RetrieveAPITestCase):
url_name = 'api:committee_detail'
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
response = self.get_response(c.id)
self.assertEqual(response.data, {
'id': c.id,
'name': c.name,
'full_name': c.full_name,
'delegation_size': c.delegation_size,
'special': c.special})
class CommitteeDetailPutTestCase(UpdateAPITestCase):
url_name = 'api:committee_detail'
params = {'name':'DISC',
'special':True}
def setUp(self):
self.committee = TestCommittees.new_committee()
def test_anonymous_user(self):
'''Unauthenticated users shouldn't be able to update committees.'''
response = self.get_response(self.committee.id, params=self.params)
self.assertMethodNotAllowed(response, 'PUT')
def test_authenticated_user(self):
'''Authenticated users shouldn't be able to update committees.'''
TestUsers.new_user(username='user', password='user')
self.client.login(username='user', password='user')
response = self.get_response(self.committee.id, params=self.params)
self.assertMethodNotAllowed(response, 'PUT')
def test_superuser(self):
'''Superusers shouldn't be able to update committees.'''
TestUsers.new_superuser(username='user', password='user')
self.client.login(username='user', password='user')
response = self.get_response(self.committee.id, params=self.params)
self.assertMethodNotAllowed(response, 'PUT')
class CommitteeDetailPatchTestCase(PartialUpdateAPITestCase):
url_name = 'api:committee_detail'
params = {'name':'DISC',
'special':True}
def setUp(self):
self.committee = TestCommittees.new_committee()
def test_anonymous_user(self):
'''Unauthenticated users shouldn't be able to update committees.'''
response = self.get_response(self.committee.id, params=self.params)
self.assertMethodNotAllowed(response, 'PATCH')
def test_authenticated_user(self):
'''Authenticated users shouldn't be able to update committees.'''
TestUsers.new_user(username='user', password='user')
self.client.login(username='user', password='user')
response = self.get_response(self.committee.id, params=self.params)
self.assertMethodNotAllowed(response, 'PATCH')
def test_superuser(self):
'''Superusers shouldn't be able to update committees.'''
TestUsers.new_superuser(username='user', password='user')
self.client.login(username='user', password='user')
response = self.get_response(self.committee.id, params=self.params)
self.assertMethodNotAllowed(response, 'PATCH')
class CommitteeDetailDeleteTestCase(DestroyAPITestCase):
url_name = 'api:committee_detail'
def setUp(self):
self.committee = TestCommittees.new_committee()
def test_anonymous_user(self):
'''Unauthenticated users should not be able to delete committees.'''
response = self.get_response(self.committee.id)
self.assertMethodNotAllowed(response, 'DELETE')
def test_self(self):
'''Authenticated users shouldn't have permission to delete committees.'''
TestUsers.new_user(username='user', password='user')
self.client.login(username='user', password='user')
response = self.get_response(self.committee.id)
self.assertMethodNotAllowed(response, 'DELETE')
def test_super_user(self):
'''Countries should not be able to be deleted'''
TestUsers.new_superuser(username='user', password='user')
self.client.login(username='user', password='user')
response = self.get_response(self.committee.id)
self.assertMethodNotAllowed(response, 'DELETE')
class CommitteeListGetTestCase(ListAPITestCase):
url_name = 'api:committee_list'
def test_anonymous_user(self):
'''Anyone should be able to access a list of all the committees.'''
c1 = TestCommittees.new_committee(name='DISC', delegation_size=100)
c2 = TestCommittees.new_committee(name='JCC', special=True,
delegation_size=30)
response = self.get_response()
self.assertEqual(response.data, [
{'delegation_size': c1.delegation_size,
'special': c1.special,
'id': c1.id,
'full_name': c1.full_name,
'name': c1.name},
{'delegation_size': c2.delegation_size,
'special': c2.special,
'id': c2.id,
'full_name': c2.full_name,
'name': c2.name}])
class CommitteeListPostTestCase(CreateAPITestCase):
url_name = 'api:committee_list'
params = {'name': 'DISC',
'full_name': 'Disarmament and International Security',
'delegation_size': 100}
def test_anonymous_user(self):
'''Unauthenticated users shouldn't be able to create committees.'''
response = self.get_response(self.params)
self.assertMethodNotAllowed(response, 'POST')
def test_authenticated_user(self):
'''Authenticated users shouldn't be able to create committees.'''
TestUsers.new_user(username='user', password='user')
self.client.login(username='user', password='user')
response = self.get_response(self.params)
self.assertMethodNotAllowed(response, 'POST')
def test_superuser(self):
'''Superusers shouldn't be able to create committees.'''
TestUsers.new_superuser(username='user', password='user')
self.client.login(username='user', password='user')
response = self.get_response(self.params)
self.assertMethodNotAllowed(response, 'POST')
| nilq/baby-python | python |
"""Test run."""
import logging
import re
from pathlib import Path
from types import ModuleType
from unittest.mock import patch
import pytest
from tests.conftest import import_module
_LOGGER = logging.getLogger(__name__)
@pytest.fixture
def run() -> ModuleType:
"""Import the run module."""
runmod = import_module("run", "hass-addon-sunsynk")
_LOGGER.warning("Module run: %s", dir(runmod))
return runmod
@pytest.mark.addon
def test_run(run):
"""Test Run."""
assert not run.SENSORS
assert not run.OPT.mqtt_host
testargs = ["run.py", "host1", "passw"]
with patch.object(run.sys, "argv", testargs):
run.startup()
assert run.SENSORS
assert run.OPT.mqtt_host == "host1"
assert run.OPT.mqtt_password == "passw"
@pytest.mark.addon
def test_versions(run):
"""Test versions.
config.json - contains the HASS addon version
Dockerfile - installs the specific sunsynk library from pypi
setup.py - sunsynk library on pypi
"""
def _get_version(filename, regex):
txt = Path(filename).read_text()
res = re.compile(regex).search(txt)
assert res, "version not found in setup.py"
return res.group(1)
v_setup = _get_version(
filename="setup.py",
regex=r'VERSION = "(.+)"',
)
v_docker = _get_version(
filename="hass-addon-sunsynk/Dockerfile",
regex=r"sunsynk==(.+)",
)
v_config = _get_version(
filename="hass-addon-sunsynk/config.json",
regex=r'"version": ".+-(.+)"',
)
assert v_setup == v_docker
assert v_setup == v_config
| nilq/baby-python | python |
import gdspy
import pp
from pp.compare_cells import hash_cells
from pp.components.mzi2x2 import mzi2x2
def debug():
c = mzi2x2()
h0 = c.hash_geometry()
gdspath1 = "{}.gds".format(c.name)
gdspath2 = "{}_2.gds".format(c.name)
gdspath3 = "{}_3.gds".format(c.name)
pp.write_gds(c, gdspath1)
c1 = pp.import_gds(gdspath1, overwrite_cache=True)
c2 = pp.import_gds(gdspath2, overwrite_cache=True)
c3 = pp.import_gds(gdspath3, overwrite_cache=True)
dbg = False
dh1 = hash_cells(c1, {}, dbg=dbg)
dh2 = hash_cells(c2, {}, dbg=dbg)
dh3 = hash_cells(c3, {}, dbg=dbg)
h1 = dh1[c1.name]
h2 = dh2[c2.name]
h3 = dh3[c3.name]
print(h1)
print(h2)
print(h3)
print(h0)
print(gdspy.gdsii_hash(gdspath1))
print(gdspy.gdsii_hash(gdspath2))
print(gdspy.gdsii_hash(gdspath3))
def test_hash():
c1 = pp.c.waveguide(length=10)
c2 = pp.c.waveguide(length=11)
h1 = c1.hash_geometry()
h2 = c2.hash_geometry()
assert h1 != h2
if __name__ == "__main__":
debug()
| nilq/baby-python | python |
from machin.frame.buffers import DistributedPrioritizedBuffer
from test.util_run_multi import *
from test.util_platforms import linux_only_forall
import random
import torch as t
import numpy as np
linux_only_forall()
class TestDistributedPrioritizedBuffer:
BUFFER_SIZE = 1
SAMPLE_BUFFER_SIZE = 10
########################################################################
# Test for DistributedPrioritizedBuffer.store_episode and sample_batch
########################################################################
full_episode = [
{
"state": {"state_1": t.zeros([1, 2])},
"action": {"action_1": t.zeros([1, 3])},
"next_state": {"next_state_1": t.zeros([1, 2])},
"reward": 1,
"terminal": True,
"index": 0,
},
{
"state": {"state_1": t.zeros([1, 2])},
"action": {"action_1": t.zeros([1, 3])},
"next_state": {"next_state_1": t.zeros([1, 2])},
"reward": 1,
"terminal": True,
"index": 1,
},
{
"state": {"state_1": t.zeros([1, 2])},
"action": {"action_1": t.zeros([1, 3])},
"next_state": {"next_state_1": t.zeros([1, 2])},
"reward": 1,
"terminal": True,
"index": 2,
},
{
"state": {"state_1": t.zeros([1, 2])},
"action": {"action_1": t.zeros([1, 3])},
"next_state": {"next_state_1": t.zeros([1, 2])},
"reward": 1,
"terminal": True,
"index": 3,
},
{
"state": {"state_1": t.zeros([1, 2])},
"action": {"action_1": t.zeros([1, 3])},
"next_state": {"next_state_1": t.zeros([1, 2])},
"reward": 1,
"terminal": True,
"index": 4,
},
]
full_priorities = [1, 1, 1, 0.3, 0.3]
# test a normal sampling process, where p0 and p1 store to the buffer
# periodically, and p2 sample from the buffer periodically.
@staticmethod
@run_multi(
expected_results=[True, True, True],
args_list=[(full_episode, full_priorities)] * 3,
)
@setup_world
def test_store_episode_and_sample_batch_random(rank, episode, priorities):
world = get_world()
count = 0
default_logger.info(f"{rank} started")
group = world.create_rpc_group("group", ["0", "1", "2"])
buffer = DistributedPrioritizedBuffer("buffer", group, 5)
if rank in (0, 1):
begin = time()
while time() - begin < 10:
buffer.store_episode(episode, priorities=priorities)
default_logger.info(f"{rank} store episode {count} success")
count += 1
sleep(random.random() * 0.5)
else:
sleep(5)
begin = time()
while time() - begin < 5:
batch_size, sample, indexes, priorities = buffer.sample_batch(10)
default_logger.info(f"sampled batch size: {batch_size}")
assert batch_size > 0
# state
assert list(sample[0]["state_1"].shape) == [batch_size, 2]
# action
assert list(sample[1]["action_1"].shape) == [batch_size, 3]
# next state
assert list(sample[2]["next_state_1"].shape) == [batch_size, 2]
# reward
assert list(sample[3].shape) == [batch_size, 1]
# terminal
assert list(sample[4].shape) == [batch_size, 1]
# index
assert len(sample[5]) == batch_size
# simulate the time to perform a backward process
sleep(1)
buffer.update_priority(priorities, indexes)
default_logger.info(f"{rank} sample {count} success")
count += 1
sleep(1)
return True
# controlled test sampling process, where p0 and p1 store to the buffer
# periodically, and p2 sample from the buffer periodically. however, p0 and
# p1 will finish storing before p2, so the test result is always the same.
@staticmethod
@run_multi(
expected_results=[True, True, True],
args_list=[(full_episode, full_priorities)] * 3,
)
@setup_world
def test_store_episode_and_sample_batch_controlled(
rank, episode, priorities,
):
world = get_world()
default_logger.info(f"{rank} started")
np.random.seed(0)
group = world.create_rpc_group("group", ["0", "1", "2"])
buffer = DistributedPrioritizedBuffer("buffer", group, 5)
if rank in (0, 1):
buffer.store_episode(episode, priorities=priorities)
sleep(5)
else:
sleep(2)
batch_size, sample, indexes, priorities = buffer.sample_batch(
10, sample_attrs=["index"]
)
default_logger.info(f"sampled batch size: {batch_size}")
default_logger.info(sample)
default_logger.info(indexes)
default_logger.info(priorities)
assert batch_size == 10
assert sample[0] == [0, 1, 2, 2, 4, 0, 1, 2, 2, 4]
assert list(indexes.keys()) == ["0", "1"]
assert np.all(
np.abs(
priorities
- [
0.75316421,
0.75316421,
0.75316421,
0.75316421,
1.0,
0.75316421,
0.75316421,
0.75316421,
0.75316421,
1.0,
]
)
< 1e-6
)
buffer.update_priority(priorities, indexes)
return True
# sample from two empty buffers
@staticmethod
@run_multi(expected_results=[True, True, True])
@setup_world
def test_store_episode_and_sample_batch_from_empty(rank):
world = get_world()
default_logger.info(f"{rank} started")
np.random.seed(0)
group = world.create_rpc_group("group", ["0", "1", "2"])
buffer = DistributedPrioritizedBuffer("buffer", group, 5)
if rank in (0, 1):
sleep(5)
else:
sleep(2)
batch_size, sample, indexes, priorities = buffer.sample_batch(
10, sample_attrs=["index"]
)
assert batch_size == 0
assert sample is None
assert indexes is None
assert priorities is None
return True
@staticmethod
@run_multi(
expected_results=[True, True, True],
args_list=[(full_episode, full_priorities)] * 3,
)
@setup_world
def test_store_episode_and_sample_empty_batch(rank, episode, priorities):
world = get_world()
default_logger.info(f"{rank} started")
np.random.seed(0)
group = world.create_rpc_group("group", ["0", "1", "2"])
buffer = DistributedPrioritizedBuffer("buffer", group, 5)
if rank in (0, 1):
buffer.store_episode(episode, priorities=priorities)
sleep(5)
else:
sleep(2)
batch_size, sample, indexes, priorities = buffer.sample_batch(0)
assert batch_size == 0
assert sample is None
assert indexes is None
assert priorities is None
return True
########################################################################
# Test for DistributedPrioritizedBuffer.size and all_size
########################################################################
@staticmethod
@run_multi(
expected_results=[True, True, True],
args_list=[(full_episode, full_priorities)] * 3,
)
@setup_world
def test_size_and_all_size(rank, episode, priorities):
world = get_world()
default_logger.info(f"{rank} started")
np.random.seed(0)
group = world.create_rpc_group("group", ["0", "1", "2"])
buffer = DistributedPrioritizedBuffer("buffer", group, 5)
if rank in (0, 1):
if rank == 0:
buffer.store_episode(episode, priorities=priorities)
assert buffer.size() == 5
else:
assert buffer.size() == 0
sleep(5)
else:
sleep(2)
assert buffer.size() == 0
assert buffer.all_size() == 5
return True
########################################################################
# Test for DistributedPrioritizedBuffer.clear
########################################################################
@staticmethod
@run_multi(
expected_results=[True, True, True],
args_list=[(full_episode, full_priorities)] * 3,
)
@setup_world
def test_clear(rank, episode, priorities):
world = get_world()
default_logger.info(f"{rank} started")
np.random.seed(0)
group = world.create_rpc_group("group", ["0", "1", "2"])
buffer = DistributedPrioritizedBuffer("buffer", group, 5)
if rank in (0, 1):
buffer.store_episode(episode, priorities=priorities)
if rank == 0:
buffer.clear()
assert buffer.size() == 0
sleep(5)
else:
sleep(2)
assert buffer.all_size() == 5
buffer.all_clear()
assert buffer.all_size() == 0
return True
| nilq/baby-python | python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Ed Mountjoy
#
# //genetics-portal-raw/uk_biobank_sumstats/neale_v2/raw/135.gwas.imputed_v3.both_sexes.tsv.bgz
import subprocess as sp
import os
import sys
def main():
# Args
in_pheno='manifest/phenotypes.both_sexes.filtered.tsv'
# Iterare over manifest
c = 0
for name, dtype in yeild_name_type(in_pheno):
c += 1
print('Processing ({0}) {1}...'.format(c, name))
old_name = 'gs://genetics-portal-raw/uk_biobank_sumstats/neale_v2/raw/{name}.gwas.imputed_v3.both_sexes.tsv.gz'.format(name=name)
new_name = 'gs://genetics-portal-raw/uk_biobank_sumstats/neale_v2/raw/{name}.{dtype}.gwas.imputed_v3.both_sexes.tsv.gz'.format(name=name, dtype=dtype)
cmd = 'gsutil mv {old} {new}'.format(old=old_name, new=new_name)
# Run
print(cmd)
cp = sp.run(cmd, shell=True)
print('Done')
return 0
def yeild_name_type(manifest):
''' Reads manifest and yields the name and type of file
Params:
manifest (file): input Neale phenotype file
Returns:
(str, str): source and dest paths
'''
with open(manifest, 'r') as in_h:
in_h.readline() # Skip header
for line in in_h:
parts = line.rstrip().split('\t')
yield parts[0], parts[2]
if __name__ == '__main__':
main()
| nilq/baby-python | python |
"""
implement a shuffleNet by pytorch
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import time
dtype = torch.FloatTensor
from collections import OrderedDict
from .ShapeSpec import ShapeSpec
def shuffle_channels(x, groups):
"""shuffle channels of a 4-D Tensor"""
batch_size, channels, height, width = x.size()
assert channels % groups == 0
channels_per_group = channels // groups
# split into groups
x = x.view(batch_size, groups, channels_per_group,
height, width)
# transpose 1, 2 axis
x = x.transpose(1, 2).contiguous()
# reshape into orignal
x = x.view(batch_size, channels, height, width)
return x
class ShuffleNetUnitA(nn.Module):
"""ShuffleNet unit for stride=1"""
def __init__(self, in_channels, out_channels, groups=3):
super(ShuffleNetUnitA, self).__init__()
assert in_channels == out_channels
assert out_channels % 4 == 0
bottleneck_channels = out_channels // 4
self.groups = groups
self.group_conv1 = nn.Conv2d(in_channels, bottleneck_channels,
1, groups=groups, stride=1)
self.bn2 = nn.BatchNorm2d(bottleneck_channels)
self.depthwise_conv3 = nn.Conv2d(bottleneck_channels,
bottleneck_channels,
3, padding=1, stride=1,
groups=bottleneck_channels)
self.bn4 = nn.BatchNorm2d(bottleneck_channels)
self.group_conv5 = nn.Conv2d(bottleneck_channels, out_channels,
1, stride=1, groups=groups)
self.bn6 = nn.BatchNorm2d(out_channels)
def forward(self, x):
out = self.group_conv1(x)
out = F.relu(self.bn2(out))
out = shuffle_channels(out, groups=self.groups)
out = self.depthwise_conv3(out)
out = self.bn4(out)
out = self.group_conv5(out)
out = self.bn6(out)
out = F.relu(x + out)
return out
class ShuffleNetUnitB(nn.Module):
"""ShuffleNet unit for stride=2"""
def __init__(self, in_channels, out_channels, groups=3):
super(ShuffleNetUnitB, self).__init__()
out_channels -= in_channels
assert out_channels % 4 == 0
bottleneck_channels = out_channels // 4
self.groups = groups
self.group_conv1 = nn.Conv2d(in_channels, bottleneck_channels,
1, groups=groups, stride=1)
self.bn2 = nn.BatchNorm2d(bottleneck_channels)
self.depthwise_conv3 = nn.Conv2d(bottleneck_channels,
bottleneck_channels,
3, padding=1, stride=2,
groups=bottleneck_channels)
self.bn4 = nn.BatchNorm2d(bottleneck_channels)
self.group_conv5 = nn.Conv2d(bottleneck_channels, out_channels,
1, stride=1, groups=groups)
self.bn6 = nn.BatchNorm2d(out_channels)
def forward(self, x):
out = self.group_conv1(x)
out = F.relu(self.bn2(out))
out = shuffle_channels(out, groups=self.groups)
out = self.depthwise_conv3(out)
out = self.bn4(out)
out = self.group_conv5(out)
out = self.bn6(out)
x = F.avg_pool2d(x, 3, stride=2, padding=1)
out = F.relu(torch.cat([x, out], dim=1))
return out
class ShuffleNet(nn.Module):
"""ShuffleNet for groups=3"""
def __init__(self, groups=3, in_channels=3):
super(ShuffleNet, self).__init__()
self.conv1 = nn.Conv2d(in_channels, 24, 3, stride=2, padding=1)
stage1_seq = [ShuffleNetUnitB(24, 60, groups=groups)] + \
[ShuffleNetUnitA(60, 60, groups=groups) for _ in range(5)]
self.stage1 = nn.Sequential(*stage1_seq)
stage2_seq = [ShuffleNetUnitB(60, 240, groups=groups)] + \
[ShuffleNetUnitA(240, 240, groups=groups) for _ in range(5)]
self.stage2 = nn.Sequential(*stage2_seq)
stage3_seq = [ShuffleNetUnitB(240, 480, groups=groups)] + \
[ShuffleNetUnitA(480, 480, groups=groups) for _ in range(7)]
self.stage3 = nn.Sequential(*stage3_seq)
stage4_seq = [ShuffleNetUnitB(480, 960, groups=groups)] + \
[ShuffleNetUnitA(960, 960, groups=groups) for _ in range(3)]
self.stage4 = nn.Sequential(*stage4_seq)
self._out_features_channels = [24, 60, 240, 480, 960]
self._out_features_strides = [2 ** i for i in range(1, 6)]
def forward(self, x):
self.features = OrderedDict()
net = self.conv1(x)
# net = F.max_pool2d(net, 3, stride=2, padding=1)
net = self.stage1(net)
self.features['stage_1'] = net
net = self.stage2(net)
self.features['stage_2'] = net
net = self.stage3(net)
self.features['stage_3'] = net
net = self.stage4(net)
self.features['stage_4'] = net
return net
@property
def OutShapeSpec(self):
specs = OrderedDict()
for i, layer in enumerate(self._out_features_channels):
specs['stage_{}'.format(i)] = ShapeSpec(channels=self._out_features_channels[i],
stride=self._out_features_strides[i])
return specs
if __name__ == "__main__":
shuffleNet = ShuffleNet()
shuffleNet.eval()
for _ in range(10):
with torch.no_grad():
x = Variable(torch.randn([1, 3, 224, 224]).type(dtype),
requires_grad=False)
time_st = time.time()
out = shuffleNet(x)
det_t = time.time() - time_st
print('time: ', det_t)
print(shuffleNet.OutShapeSpec)
| nilq/baby-python | python |
# -*- coding: utf-8 -*-
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for multiple bucket configuration commands."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import json
import gslib.tests.testcase as testcase
from gslib.tests.testcase.integration_testcase import SkipForS3
from gslib.tests.util import ObjectToURI as suri
from gslib.utils.constants import UTF8
class TestBucketConfig(testcase.GsUtilIntegrationTestCase):
"""Integration tests for multiple bucket configuration commands."""
_set_cors_command = ['cors', 'set']
_get_cors_command = ['cors', 'get']
empty_cors = '[]'
cors_doc = (
'[{"origin": ["http://origin1.example.com", '
'"http://origin2.example.com"], '
'"responseHeader": ["foo", "bar"], "method": ["GET", "PUT", "POST"], '
'"maxAgeSeconds": 3600},'
'{"origin": ["http://origin3.example.com"], '
'"responseHeader": ["foo2", "bar2"], "method": ["GET", "DELETE"]}]\n')
cors_json_obj = json.loads(cors_doc)
_set_lifecycle_command = ['lifecycle', 'set']
_get_lifecycle_command = ['lifecycle', 'get']
empty_lifecycle = '{}'
lifecycle_doc = (
'{"rule": [{"action": {"type": "Delete"}, "condition": {"age": 365}}]}\n')
lifecycle_json_obj = json.loads(lifecycle_doc)
_set_acl_command = ['acl', 'set']
_get_acl_command = ['acl', 'get']
_set_defacl_command = ['defacl', 'set']
_get_defacl_command = ['defacl', 'get']
@SkipForS3('A number of configs in this test are not supported by S3')
def test_set_multi_config(self):
"""Tests that bucket config patching affects only the desired config."""
bucket_uri = self.CreateBucket()
lifecycle_path = self.CreateTempFile(
contents=self.lifecycle_doc.encode(UTF8))
cors_path = self.CreateTempFile(contents=self.cors_doc.encode(UTF8))
self.RunGsUtil(self._set_cors_command + [cors_path, suri(bucket_uri)])
cors_out = self.RunGsUtil(self._get_cors_command + [suri(bucket_uri)],
return_stdout=True)
self.assertEqual(json.loads(cors_out), self.cors_json_obj)
self.RunGsUtil(self._set_lifecycle_command +
[lifecycle_path, suri(bucket_uri)])
cors_out = self.RunGsUtil(self._get_cors_command + [suri(bucket_uri)],
return_stdout=True)
lifecycle_out = self.RunGsUtil(self._get_lifecycle_command +
[suri(bucket_uri)],
return_stdout=True)
self.assertEqual(json.loads(cors_out), self.cors_json_obj)
self.assertEqual(json.loads(lifecycle_out), self.lifecycle_json_obj)
if not self._ServiceAccountCredentialsPresent():
# See comments in _ServiceAccountCredentialsPresent
self.RunGsUtil(
self._set_acl_command +
['authenticated-read', suri(bucket_uri)])
cors_out = self.RunGsUtil(self._get_cors_command + [suri(bucket_uri)],
return_stdout=True)
lifecycle_out = self.RunGsUtil(self._get_lifecycle_command +
[suri(bucket_uri)],
return_stdout=True)
self.assertEqual(json.loads(cors_out), self.cors_json_obj)
self.assertEqual(json.loads(lifecycle_out), self.lifecycle_json_obj)
if not self._ServiceAccountCredentialsPresent():
acl_out = self.RunGsUtil(self._get_acl_command + [suri(bucket_uri)],
return_stdout=True)
self.assertIn('allAuthenticatedUsers', acl_out)
self.RunGsUtil(self._set_defacl_command + ['public-read', suri(bucket_uri)])
cors_out = self.RunGsUtil(self._get_cors_command + [suri(bucket_uri)],
return_stdout=True)
lifecycle_out = self.RunGsUtil(self._get_lifecycle_command +
[suri(bucket_uri)],
return_stdout=True)
def_acl_out = self.RunGsUtil(self._get_defacl_command + [suri(bucket_uri)],
return_stdout=True)
self.assertEqual(json.loads(cors_out), self.cors_json_obj)
self.assertEqual(json.loads(lifecycle_out), self.lifecycle_json_obj)
self.assertIn('allUsers', def_acl_out)
if not self._ServiceAccountCredentialsPresent():
acl_out = self.RunGsUtil(self._get_acl_command + [suri(bucket_uri)],
return_stdout=True)
self.assertIn('allAuthenticatedUsers', acl_out)
| nilq/baby-python | python |
import torch
import torch.nn as nn
import torch.nn.functional as F
import jmodt.ops.pointnet2.pytorch_utils as pt_utils
from jmodt.config import cfg
from jmodt.detection.layers.proposal_target_layer import ProposalTargetLayer
from jmodt.ops.pointnet2.pointnet2_modules import PointnetSAModule
from jmodt.utils import loss_utils
class RCNN(nn.Module):
def __init__(self, num_classes, input_channels=0, use_xyz=True, mode='TRAIN'):
super().__init__()
self.mode = mode
self.SA_modules = nn.ModuleList()
channel_in = input_channels
if cfg.RCNN.USE_RPN_FEATURES:
self.rcnn_input_channel = 3 + int(cfg.RCNN.USE_INTENSITY) + int(cfg.RCNN.USE_MASK) + int(cfg.RCNN.USE_DEPTH)
self.xyz_up_layer = pt_utils.SharedMLP([self.rcnn_input_channel] + cfg.RCNN.XYZ_UP_LAYER,
bn=cfg.RCNN.USE_BN)
c_out = cfg.RCNN.XYZ_UP_LAYER[-1]
self.merge_down_layer = pt_utils.SharedMLP([c_out * 2, c_out], bn=cfg.RCNN.USE_BN)
for k in range(cfg.RCNN.SA_CONFIG.NPOINTS.__len__()):
mlps = [channel_in] + cfg.RCNN.SA_CONFIG.MLPS[k]
npoint = cfg.RCNN.SA_CONFIG.NPOINTS[k] if cfg.RCNN.SA_CONFIG.NPOINTS[k] != -1 else None
self.SA_modules.append(
PointnetSAModule(
npoint=npoint,
radius=cfg.RCNN.SA_CONFIG.RADIUS[k],
nsample=cfg.RCNN.SA_CONFIG.NSAMPLE[k],
mlp=mlps,
use_xyz=use_xyz,
bn=cfg.RCNN.USE_BN
)
)
channel_in = mlps[-1]
# classification layer
cls_channel = 1 if num_classes == 2 else num_classes
cls_layers = []
pre_channel = channel_in
for k in range(0, cfg.RCNN.CLS_FC.__len__()):
cls_layers.append(pt_utils.Conv1d(pre_channel, cfg.RCNN.CLS_FC[k], bn=cfg.RCNN.USE_BN))
pre_channel = cfg.RCNN.CLS_FC[k]
cls_layers.append(pt_utils.Conv1d(pre_channel, cls_channel, activation=None))
if cfg.RCNN.DP_RATIO >= 0:
cls_layers.insert(1, nn.Dropout(cfg.RCNN.DP_RATIO))
self.cls_layer = nn.Sequential(*cls_layers)
if cfg.RCNN.LOSS_CLS == 'SigmoidFocalLoss':
self.cls_loss_func = loss_utils.SigmoidFocalClassificationLoss(alpha=cfg.RCNN.FOCAL_ALPHA[0],
gamma=cfg.RCNN.FOCAL_GAMMA)
elif cfg.RCNN.LOSS_CLS == 'BinaryCrossEntropy':
self.cls_loss_func = F.binary_cross_entropy
elif cfg.RCNN.LOSS_CLS == 'CrossEntropy':
cls_weight = torch.from_numpy(cfg.RCNN.CLS_WEIGHT).float()
self.cls_loss_func = nn.CrossEntropyLoss(ignore_index=-1, reduce=False, weight=cls_weight)
else:
raise NotImplementedError
if cfg.USE_IOU_BRANCH:
iou_branch = []
iou_branch.append(pt_utils.Conv1d(channel_in, cfg.RCNN.REG_FC[0], bn=cfg.RCNN.USE_BN))
iou_branch.append(pt_utils.Conv1d(cfg.RCNN.REG_FC[0], cfg.RCNN.REG_FC[1], bn=cfg.RCNN.USE_BN))
iou_branch.append(pt_utils.Conv1d(cfg.RCNN.REG_FC[1], 1, activation=None))
if cfg.RCNN.DP_RATIO >= 0:
iou_branch.insert(1, nn.Dropout(cfg.RCNN.DP_RATIO))
self.iou_branch = nn.Sequential(*iou_branch)
# regression layer
per_loc_bin_num = int(cfg.RCNN.LOC_SCOPE / cfg.RCNN.LOC_BIN_SIZE) * 2
loc_y_bin_num = int(cfg.RCNN.LOC_Y_SCOPE / cfg.RCNN.LOC_Y_BIN_SIZE) * 2
reg_channel = per_loc_bin_num * 4 + cfg.RCNN.NUM_HEAD_BIN * 2 + 3
reg_channel += (1 if not cfg.RCNN.LOC_Y_BY_BIN else loc_y_bin_num * 2)
reg_layers = []
pre_channel = channel_in
for k in range(0, cfg.RCNN.REG_FC.__len__()):
reg_layers.append(pt_utils.Conv1d(pre_channel, cfg.RCNN.REG_FC[k], bn=cfg.RCNN.USE_BN))
pre_channel = cfg.RCNN.REG_FC[k]
reg_layers.append(pt_utils.Conv1d(pre_channel, reg_channel, activation=None))
if cfg.RCNN.DP_RATIO >= 0:
reg_layers.insert(1, nn.Dropout(cfg.RCNN.DP_RATIO))
self.reg_layer = nn.Sequential(*reg_layers)
# link layer
link_layers = []
pre_channel = channel_in
for k in range(0, cfg.REID.LINK_FC.__len__()):
link_layers.append(pt_utils.Conv1d(pre_channel, cfg.REID.LINK_FC[k], bn=cfg.REID.USE_BN))
pre_channel = cfg.REID.LINK_FC[k]
link_layers.append(pt_utils.Conv1d(pre_channel, 1, activation=None))
if cfg.REID.DP_RATIO >= 0:
link_layers.insert(1, nn.Dropout(cfg.RCNN.DP_RATIO))
self.link_layer = nn.Sequential(*link_layers)
# start-end layer
se_layers = []
pre_channel = channel_in
for k in range(0, cfg.REID.SE_FC.__len__()):
se_layers.append(pt_utils.Conv1d(pre_channel, cfg.REID.SE_FC[k], bn=cfg.REID.USE_BN))
pre_channel = cfg.REID.SE_FC[k]
se_layers.append(pt_utils.Conv1d(pre_channel, 1, activation=None))
if cfg.REID.DP_RATIO >= 0:
se_layers.insert(1, nn.Dropout(cfg.RCNN.DP_RATIO))
self.se_layer = nn.Sequential(*se_layers)
self.proposal_target_layer = ProposalTargetLayer(mode=self.mode)
self.init_weights(weight_init='xavier')
def init_weights(self, weight_init='xavier'):
if weight_init == 'kaiming':
init_func = nn.init.kaiming_normal_
elif weight_init == 'xavier':
init_func = nn.init.xavier_normal_
elif weight_init == 'normal':
init_func = nn.init.normal_
else:
raise NotImplementedError
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.Conv1d):
if weight_init == 'normal':
init_func(m.weight, mean=0, std=0.001)
else:
init_func(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
nn.init.normal_(self.reg_layer[-1].conv.weight, mean=0, std=0.001)
@staticmethod
def _break_up_pc(pc):
xyz = pc[..., 0:3].contiguous()
features = (
pc[..., 3:].transpose(1, 2).contiguous()
if pc.size(-1) > 3 else None
)
return xyz, features
@staticmethod
def get_unique_tid_feature(prev_fg_tid: torch.Tensor, prev_fg_feat: torch.Tensor):
prev_tid_diff = torch.min(prev_fg_tid)
prev_fg_tid_clip = (prev_fg_tid - prev_tid_diff).long()
m = prev_fg_tid.new_zeros(torch.max(prev_fg_tid_clip) + 1, len(prev_fg_tid))
m[prev_fg_tid_clip, torch.arange(len(prev_fg_tid))] = 1
m = F.normalize(m, p=1, dim=1)
prev_tid_feat_mean = torch.mm(m, prev_fg_feat)
prev_fg_tid_clip_unique = torch.unique(prev_fg_tid_clip)
prev_unique_feat = prev_tid_feat_mean[prev_fg_tid_clip_unique]
prev_fg_tid_unique = prev_fg_tid_clip_unique + prev_tid_diff
return prev_fg_tid_unique, prev_unique_feat
def forward(self, input_data):
"""
:param input_data: input dict
:return:
"""
if cfg.RCNN.ROI_SAMPLE_JIT:
with torch.no_grad():
pts_input, target_dict = self.proposal_target_layer(input_data) # generate labels
else:
pts_input = input_data['pts_input']
target_dict = {}
target_dict['pts_input'] = input_data['pts_input']
target_dict['roi_boxes3d'] = input_data['roi_boxes3d']
if self.training:
target_dict['cls_label'] = input_data['cls_label']
target_dict['reg_valid_mask'] = input_data['reg_valid_mask']
target_dict['gt_of_rois'] = input_data['gt_boxes3d']
xyz, features = self._break_up_pc(pts_input)
if cfg.RCNN.USE_RPN_FEATURES:
xyz_input = pts_input[..., 0:self.rcnn_input_channel].transpose(1, 2).contiguous().unsqueeze(dim=3)
xyz_feature = self.xyz_up_layer(xyz_input)
rpn_feature = pts_input[..., self.rcnn_input_channel:].transpose(1, 2).contiguous().unsqueeze(dim=3)
merged_feature = torch.cat((xyz_feature, rpn_feature), dim=1)
merged_feature = self.merge_down_layer(merged_feature)
l_xyz, l_features = [xyz], [merged_feature.squeeze(dim=3)]
else:
l_xyz, l_features = [xyz], [features]
for i in range(len(self.SA_modules)):
li_xyz, li_features, _ = self.SA_modules[i](l_xyz[i], l_features[i])
l_xyz.append(li_xyz)
l_features.append(li_features)
rcnn_cls = self.cls_layer(l_features[-1]).squeeze(-1) # (B, 1)
rcnn_reg = self.reg_layer(l_features[-1]).squeeze(-1) # (B, C)
if cfg.USE_IOU_BRANCH:
rcnn_iou_branch = self.iou_branch(l_features[-1]).squeeze(-1) # (B,1)
ret_dict = {'rcnn_cls': rcnn_cls, 'rcnn_reg': rcnn_reg, 'rcnn_iou_branch': rcnn_iou_branch}
else:
ret_dict = {'rcnn_cls': rcnn_cls, 'rcnn_reg': rcnn_reg}
if self.mode == 'TRAIN':
ret_dict.update(target_dict)
# reid
gt_tids = target_dict['gt_tids']
num_frames = gt_tids.shape[0]
# assert num_frames == 2, str(num_frames)
input_features = l_features[-1]
# split rois into prev and next
prev_tids = gt_tids[range(0, num_frames, 2)]
next_tids = gt_tids[range(1, num_frames, 2)]
feat_len = input_features.shape[-2]
input_features = input_features.view(num_frames, -1, feat_len)
prev_features = input_features[range(0, num_frames, 2)]
next_features = input_features[range(1, num_frames, 2)]
prev_fg_mask = prev_tids > 0
next_fg_mask = next_tids > 0
rcnn_link = []
start_features = []
end_features = []
gt_links = []
gt_starts = []
gt_ends = []
for i in range(num_frames // 2):
prev_fg_tid = prev_tids[i][prev_fg_mask[i]]
next_fg_tid = next_tids[i][next_fg_mask[i]]
prev_fg_feat = prev_features[i][prev_fg_mask[i]]
next_fg_feat = next_features[i][next_fg_mask[i]]
n_prev = len(prev_fg_feat)
n_next = len(next_fg_feat)
if n_prev > 0 and n_next > 0:
# link
prev_tid_unique, prev_feat_unique = self.get_unique_tid_feature(prev_fg_tid, prev_fg_feat)
next_tid_unique, next_feat_unique = self.get_unique_tid_feature(next_fg_tid, next_fg_feat)
unique_link = (prev_tid_unique.unsqueeze(1) == next_tid_unique).float()
gt_links.append(unique_link.view(-1))
cor_feat = torch.abs(
prev_feat_unique.unsqueeze(1).repeat(1, len(next_tid_unique), 1)
- next_feat_unique.unsqueeze(0).repeat(len(prev_tid_unique), 1, 1)
)
# link + softmax
link_feat = cor_feat.view(len(prev_tid_unique) * len(next_tid_unique), feat_len, 1)
link_scores = self.link_layer(link_feat).view(len(prev_tid_unique), len(next_tid_unique))
link_prev = torch.softmax(link_scores, dim=1)
link_next = torch.softmax(link_scores, dim=0)
link_scores = (link_prev + link_next) / 2
rcnn_link.append(link_scores.view(len(prev_tid_unique) * len(next_tid_unique), 1))
# start end
gt_start = 1 - unique_link.sum(0)
gt_end = 1 - unique_link.sum(1)
gt_starts.append(gt_start)
gt_ends.append(gt_end)
start_feat = cor_feat.mean(dim=0)
end_feat = cor_feat.mean(dim=1)
start_features.append(start_feat)
end_features.append(end_feat)
if len(gt_links) > 0:
gt_links = torch.cat(gt_links)
rcnn_link = torch.cat(rcnn_link)
ret_dict['gt_links'] = gt_links
ret_dict['rcnn_link'] = rcnn_link
else:
ret_dict['gt_links'] = gt_tids.new(0)
ret_dict['rcnn_link'] = gt_tids.new(0, 1)
if len(gt_starts) > 0:
gt_starts = torch.cat(gt_starts)
start_features = torch.cat(start_features).unsqueeze(-1)
rcnn_start = self.se_layer(start_features).squeeze(-1)
ret_dict['gt_starts'] = gt_starts
ret_dict['rcnn_start'] = rcnn_start
else:
ret_dict['gt_starts'] = gt_tids.new(0)
ret_dict['rcnn_start'] = gt_tids.new(0, 1)
if len(gt_ends) > 0:
gt_ends = torch.cat(gt_ends)
end_features = torch.cat(end_features).unsqueeze(-1)
rcnn_end = self.se_layer(end_features).squeeze(-1)
ret_dict['gt_ends'] = gt_ends
ret_dict['rcnn_end'] = rcnn_end
else:
ret_dict['gt_ends'] = gt_tids.new(0)
ret_dict['rcnn_end'] = gt_tids.new(0, 1)
else:
ret_dict['rcnn_feat'] = l_features[-1]
return ret_dict
| nilq/baby-python | python |
# -*- coding: utf-8 -*-
'''
助手函数
'''
__author__ = 'alex'
import os
from pathlib import Path
def find_all_file_by_path(suffix='', path=''):
'''
查找出指定目录下指定文件后缀的所有文件
:param suffix: 文件名后缀
:param path: 指定目录,当不指定目录时,则默认为当前目录
:return: 所有指定后缀文件列表
'''
if not suffix: return []
if (not bool(path)):
path = os.getcwd() + os.sep # 当前目录绝路路径
p = Path(path) # 初始化构造 Path 对象
file_list = list(p.glob('*.' + suffix)) # 查找出指定目录下指定文件后缀的所有文件
return file_list
def mkdir(dir=''):
'''
创建目录
:param dir: 需要创建的目录字符串
:return:
'''
if not dir: return False
if not os.path.exists(dir):
os.makedirs(dir)
return True
| nilq/baby-python | python |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'depot_tools/bot_update',
'depot_tools/gclient',
'file',
'depot_tools/gsutil',
'recipe_engine/context',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
'test_utils',
'zip',
]
def RunSteps(api):
builder_name = api.properties.get('buildername')
builder_fragments = builder_name.split('-')
assert len(builder_fragments) == 4
assert builder_fragments[0] == 'ddc'
system = builder_fragments[1]
assert system in ['linux', 'mac', 'win']
mode = builder_fragments[2]
assert mode == 'release'
channel = builder_fragments[3]
assert channel in ['be', 'dev', 'stable', 'integration']
api.gclient.set_config('dart')
api.path.c.dynamic_paths['tools'] = None
api.bot_update.ensure_checkout()
api.path['tools'] = api.path['checkout'].join('tools')
api.gclient.runhooks()
with api.context(cwd=api.path['checkout']):
with api.step.defer_results():
api.python('taskkill before building',
api.path['checkout'].join('tools', 'task_kill.py'),
args=['--kill_browsers=True'],
ok_ret='any')
build_args = ['-mrelease', 'dart2js_bot']
api.python('build dart',
api.path['checkout'].join('tools', 'build.py'),
args=build_args)
with api.step.defer_results():
api.python('ddc tests',
api.path['checkout'].join('tools', 'bots', 'ddc_tests.py'),
args=[])
api.python('taskkill after testing',
api.path['checkout'].join('tools', 'task_kill.py'),
args=['--kill_browsers=True'],
ok_ret='any')
def GenTests(api):
yield (
api.test('ddc-linux-release-be') +
api.platform('linux', 64) +
api.properties.generic(
mastername='client.dart',
buildername='ddc-linux-release-be',
revision='hash_of_revision'))
| nilq/baby-python | python |
# encoding: utf-8
from sdsstools import get_config, get_logger, get_package_version
# pip package name
NAME = 'sdss-tron-lite'
# Loads config. config name is the package name.
config = get_config('tron_lite')
log = get_logger(NAME)
__version__ = get_package_version(path=__file__, package_name=NAME)
| nilq/baby-python | python |
from clang.cindex import Index
from .sample import Sample
from .context import Context
from .path import Path
from .ast_utils import ast_to_graph, is_function, is_class, is_operator_token, is_namespace, make_ast_err_message
from networkx.algorithms import shortest_path
from networkx.drawing.nx_agraph import to_agraph
from itertools import combinations
import uuid
import os
import re
import random
def debug_save_graph(func_node, g):
file_name = func_node.spelling + ".png"
num = 0
while os.path.exists(file_name):
file_name = func_node.spelling + str(num) + ".png"
num += 1
a = to_agraph(g)
a.draw(file_name, prog='dot')
a.clear()
def tokenize(name, max_subtokens_num):
if is_operator_token(name):
return [name]
first_tokens = name.split('_')
str_tokens = []
for token in first_tokens:
internal_tokens = re.findall('[a-z]+|[A-Z]+[a-z]*|[0-9.]+|[-*/&|%=()]+', token)
str_tokens += [t for t in internal_tokens if len(t) > 0]
assert len(str_tokens) > 0, "Can't tokenize expr: {0}".format(name)
if max_subtokens_num != 0:
str_tokens = str_tokens[:max_subtokens_num]
return str_tokens
class AstParser:
def __init__(self, max_contexts_num, max_path_len, max_subtokens_num, max_ast_depth, out_path):
self.validate = False
self.save_buffer_size = 1000
self.out_path = out_path
self.max_subtokens_num = max_subtokens_num
self.max_contexts_num = max_contexts_num
self.max_path_len = max_path_len
self.max_ast_depth = max_ast_depth
self.index = Index.create()
self.samples = set()
self.header_only_functions = set()
def __del__(self):
self.save()
def __parse_node(self, node):
try:
namespaces = [x for x in node.get_children() if is_namespace(x)]
for n in namespaces:
# ignore standard library functions
if n.displayname != 'std' and not n.displayname.startswith('__'):
self.__parse_node(n)
functions = [x for x in node.get_children() if is_function(x)]
for f in functions:
self.__parse_function(f)
classes = [x for x in node.get_children() if is_class(x)]
for c in classes:
methods = [x for x in c.get_children() if is_function(x)]
for m in methods:
self.__parse_function(m)
except Exception as e:
if 'Unknown template argument kind' not in str(e):
msg = make_ast_err_message(str(e), node)
raise Exception(msg)
self.__dump_samples()
def parse(self, compiler_args, file_path=None):
ast = self.index.parse(file_path, compiler_args)
self.__parse_node(ast.cursor)
def __dump_samples(self):
if len(self.samples) >= self.save_buffer_size:
self.save()
def save(self):
if not self.out_path:
return
if not os.path.exists(self.out_path):
os.makedirs(self.out_path)
if len(self.samples) > 0:
file_name = os.path.join(self.out_path, str(uuid.uuid4().hex) + ".c2s")
# print(file_name)
with open(file_name, "w") as file:
for sample in self.samples:
file.write(str(sample.source_mark) + str(sample) + "\n")
self.samples.clear()
def __parse_function(self, func_node):
try:
# ignore standard library functions
if func_node.displayname.startswith('__'):
return
# detect header only function duplicates
file_name = func_node.location.file.name
source_mark = (file_name, func_node.extent.start.line)
if file_name.endswith('.h') and func_node.is_definition:
# print('Header only function: {0}'.format(func_node.displayname))
if source_mark in self.header_only_functions:
# print('Duplicate')
return
else:
self.header_only_functions.add(source_mark)
key = tokenize(func_node.spelling, self.max_subtokens_num)
g = ast_to_graph(func_node, self.max_ast_depth)
# debug_save_graph(func_node, g)
terminal_nodes = [node for (node, degree) in g.degree() if degree == 1]
random.shuffle(terminal_nodes)
contexts = set()
ends = combinations(terminal_nodes, 2)
for start, end in ends:
path = shortest_path(g, start, end)
if path:
if self.max_path_len != 0 and len(path) > self.max_path_len:
continue # skip too long paths
path = path[1:-1]
start_node = g.nodes[start]['label']
tokenize_start_node = not g.nodes[start]['is_reserved']
end_node = g.nodes[end]['label']
tokenize_end_node = not g.nodes[end]['is_reserved']
path_tokens = []
for path_item in path:
path_node = g.nodes[path_item]['label']
path_tokens.append(path_node)
context = Context(
tokenize(start_node, self.max_subtokens_num) if tokenize_start_node else [start_node],
tokenize(end_node, self.max_subtokens_num) if tokenize_end_node else [end_node],
Path(path_tokens, self.validate), self.validate)
contexts.add(context)
if len(contexts) > self.max_contexts_num:
break
if len(contexts) > 0:
sample = Sample(key, contexts, source_mark, self.validate)
self.samples.add(sample)
except Exception as e:
# skip unknown cursor exceptions
if 'Unknown template argument kind' not in str(e):
print('Failed to parse function : ')
print('Filename : ' + func_node.location.file.name)
print('Start {0}:{1}'.format(func_node.extent.start.line, func_node.extent.start.column))
print('End {0}:{1}'.format(func_node.extent.end.line, func_node.extent.end.column))
print(e)
| nilq/baby-python | python |
import onnx
from onnxruntime.quantize import quantize, QuantizationMode
# Load the onnx model
model = onnx.load('/home/lh/pretrain-models/pose_higher_hrnet_256_sim.onnx')
# Quantize
quantized_model = quantize(model, quantization_mode=QuantizationMode.IntegerOps)
# Save the quantized model
onnx.save(quantized_model, '/home/lh/pretrain-models/pose_higher_hrnet_256_sim_int8.onnx') | nilq/baby-python | python |
# Copyright Fortior Blockchain, LLLP 2021
# Open Source under Apache License
from flask import Flask, request, render_template, redirect, url_for
from flask_sock import Sock
from algosdk import account, encoding, mnemonic
from vote import election_voting, hashing, count_votes
from algosdk.future.transaction import AssetTransferTxn, PaymentTxn
from algosdk.v2client import algod
import rsa
import hashlib
import sqlite3 as sl
# Added new sqlite functionality for local devices
con = sl.connect('voters.db', check_same_thread=False)
cur = con.cursor()
app = Flask(__name__)
sock = Sock(app)
finished = False
adminLogin = False
corporate_finished = False
validated = False
my_key = hashing("tee")
@app.route("/")
def start():
""" Start page """
return render_template('index.html')
@app.route('/start', methods=['POST', 'GET'])
def start_voting():
error = ''
message = ''
global finished
if request.method == 'POST':
key = hashing(str(request.form.get('Key')))
if key == my_key:
# message = reset_votes()
finished = False
message = 'Petition Started'
else:
error = "Incorrect admin key"
return render_template("startprocess.html", message=message, error=error)
@app.route('/overview', methods=['POST', 'GET'])
def create():
return render_template('overview.html')
@app.route('/admin', methods=['POST', 'GET'])
def verify():
if request.method == 'POST':
Social = hashing(str(request.form.get('Social')))
Drivers = hashing(str(request.form.get('Drivers')))
Key = hashing(str(request.form.get('Key')))
if str(Key) == my_key:
return render_template('overview.html')
return render_template('adminLogin.html')
@app.route('/end', methods=['POST', 'GET'])
def end():
error = ''
message = ''
global finished
if request.method == 'POST':
key = hashing(str(request.form.get('Key')))
if key == my_key:
message = count_votes()
finished = True
else:
error = "Incorrect admin key"
return render_template("endprocess.html", message=message, error=error)
@app.route('/view', methods=['POST', 'GET'])
def view():
count_votes()
return render_template("viewprogress.html")
@app.route('/vote', methods=['POST', 'GET'])
def vote():
message = ''
if request.method == 'POST':
message = election_voting()
count_votes()
return render_template('vote.html', message=message)
@app.route('/about/')
def about():
"""about"""
return render_template('about.html')
if __name__ == "__main__":
app.run(host='127.0.0.1', debug=True)
| nilq/baby-python | python |
"""
TODO: Shal check that all the needed packages are available before running the program
""" | nilq/baby-python | python |
import os
import time
import logging
from sarpy.io.nitf.nitf_head import NITFDetails
from sarpy.io.nitf.image import ImageSegmentHeader
from sarpy.io.nitf.des import DataExtensionHeader
from . import unittest
def generic_nitf_header_test(instance, test_file):
assert isinstance(instance, unittest.TestCase)
# can we parse it at all? how long does it take?
with instance.subTest(msg="header parsing"):
start = time.time()
details = NITFDetails(test_file)
# how long does it take?
logging.info('unpacked nitf details in {}'.format(time.time() - start))
# how does it look?
logging.debug(details.nitf_header)
# is the output as long as it should be?
with instance.subTest(msg="header length match"):
header_string = details.nitf_header.to_bytes()
equality = (len(header_string) == details.nitf_header.HL)
if not equality:
logging.error(
'len(produced header) = {}, nitf_header.HL = {}'.format(len(header_string),
details.nitf_header.HL))
instance.assertTrue(equality)
# is the output what it should be?
with instance.subTest(msg="header content match"):
with open(test_file, 'rb') as fi:
file_header = fi.read(details.nitf_header.HL)
equality = (file_header == header_string)
if not equality:
chunk_size = 80
start_chunk = 0
while start_chunk < len(header_string):
end_chunk = min(start_chunk + chunk_size, len(header_string))
logging.error('real[{}:{}] = {}'.format(
start_chunk, end_chunk, file_header[start_chunk:end_chunk]))
logging.error('prod[{}:{}] = {}'.format(
start_chunk, end_chunk, header_string[start_chunk:end_chunk]))
start_chunk = end_chunk
instance.assertTrue(equality)
# is each image subheader working?
for i in range(details.img_segment_offsets.size):
with instance.subTest('image subheader {} match'.format(i)):
img_bytes = details.get_image_subheader_bytes(i)
img_sub = ImageSegmentHeader.from_bytes(img_bytes, start=0)
instance.assertEqual(
len(img_bytes), img_sub.get_bytes_length(), msg='image subheader as long as expected')
instance.assertEqual(
img_bytes, img_sub.to_bytes(), msg='image subheader serializes and deserializes as expected')
# is each data extenson subheader working?
for i in range(details.des_segment_offsets.size):
with instance.subTest('des subheader {} match'.format(i)):
des_bytes = details.get_des_subheader_bytes(i)
des_sub = DataExtensionHeader.from_bytes(des_bytes, start=0)
instance.assertEqual(
len(des_bytes), des_sub.get_bytes_length(), msg='des subheader as long as expected')
instance.assertEqual(
des_bytes, des_sub.to_bytes(), msg='des subheader serializes and deserializes as expected')
class TestNITFHeader(unittest.TestCase):
@classmethod
def setUp(cls):
cls.test_root = os.path.expanduser(os.path.join('~', 'Desktop', 'sarpy_testing', 'sicd'))
def test_nitf_header(self):
tested = 0
for fil in [
'sicd_example_RMA_RGZERO_RE16I_IM16I.nitf',
'sicd_example_RMA_RGZERO_RE32F_IM32F.nitf',
'sicd_example_RMA_RGZERO_RE32F_IM32F_cropped_multiple_image_segments_v1.2.nitf']:
test_file = os.path.join(self.test_root, fil)
if os.path.exists(test_file):
tested += 1
generic_nitf_header_test(self, test_file)
else:
logging.info('No file {} found'.format(test_file))
self.assertTrue(tested > 0, msg="No files for testing found")
| nilq/baby-python | python |
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit tests for ProductionSupportedFlagList.java
"""
import os
import sys
def _SetupImportPath(input_api):
android_webview_common_dir = input_api.PresubmitLocalPath()
_CHROMIUM_SRC = os.path.join(android_webview_common_dir, os.pardir, os.pardir,
os.pardir, os.pardir, os.pardir, os.pardir, os.pardir)
sys.path.append(os.path.join(_CHROMIUM_SRC, 'android_webview', 'tools'))
def CheckChangeOnUpload(input_api, output_api):
_SetupImportPath(input_api)
import generate_flag_labels
results = []
results.extend(generate_flag_labels.CheckMissingWebViewEnums(input_api,
output_api))
return results
| nilq/baby-python | python |
"""This is the stock insertion generator"""
import numpy as np
import mitty.lib
import mitty.lib.util as mutil
from mitty.plugins.variants import scale_probability_and_validate
import logging
logger = logging.getLogger(__name__)
__example_param_text = """
{
"p": 0.01, # Per-base probability of having an insertion
"t_mat": [[ 0.32654629, 0.17292732, 0.24524503, 0.25528135], # Base transition matrix
[ 0.3489394, 0.25942695, 0.04942584, 0.3422078],
[ 0.28778188, 0.21087004, 0.25963262, 0.24171546],
[ 0.21644706, 0.20588717, 0.24978216, 0.32788362]],
"p_end": 0.1, # Probability of chain ending
"max_len": 1000 # Maximum length of insertion
}
"""
_description = """
Stock insertion model that generates sequences with same base transition matrix as the human genome and creates a
power-law distribution of insertion lengths.
A typical parameter set resembles
""" + __example_param_text
_example_params = eval(__example_param_text)
class Model:
def __init__(self, p=0.01, t_mat=None, p_end=0.1, max_len=1000, **kwargs):
assert 0 <= p <= 1.0, "Probability out of range"
assert 0 <= p_end <= 1.0, "Probability out of range"
assert 0 < max_len, 'max_len needs to be 1 or more'
if t_mat is None:
t_mat = [[0.32654629, 0.17292732, 0.24524503, 0.25528135],
[0.3489394, 0.25942695, 0.04942584, 0.3422078],
[0.28778188, 0.21087004, 0.25963262, 0.24171546],
[0.21644706, 0.20588717, 0.24978216, 0.32788362]]
self.p, self.t_mat, self.p_end, self.max_len = p, t_mat, p_end, max_len
def get_variants(self, ref, p=None, f=None, seed=1, **kwargs):
"""This function is called by the simulator to obtain variants.
:param ref: reference sequence as a string
:param chrom: chromosome number (1,2,3,4...)
:param p: array/list of probability values
:param f: array/list of frequency values
:param seed: seed for the random number generators
:return: 5 arrays/lists/iterables all of the same length
pos - position of SNPs
stop - stop locations, (pos + 1 for SNPs)
ref - reference base,
alt - alt base,
p - probability value for this variant. These are uniformly distributed random values
"""
assert 0 < seed < mitty.lib.SEED_MAX
logger.debug('Master seed: {:d}'.format(seed))
base_loc_rng, ins_markov_rng = mutil.initialize_rngs(seed, 2)
pt_mat = mutil.add_p_end_to_t_mat(self.t_mat, self.p_end)
p_eff = scale_probability_and_validate(self.p, p, f)
ins_locs = mutil.place_poisson_seq(base_loc_rng, p_eff, 0, len(ref), ref) #np.array([x for x in mutil.place_poisson(base_loc_rng, p_eff, 0, len(ref)) if ref[x] != 'N'], dtype='i4')
ins_list, len_list = mutil.markov_sequences(ref, ins_locs, self.max_len, pt_mat, ins_markov_rng)
lengths = np.array(len_list, dtype='i4')
return ins_locs, ins_locs + 1, [ins[0] for ins in ins_list], ins_list, (1.0 - lengths / float(lengths.max())) if lengths.shape[0] else []
def test0():
"""Edge case - no variants generated"""
ref_seq = 'ACTGACTGACTGACTGACTGACTGACTGACTGACTG'
m = Model(p=0.00001)
pos, stop, ref, alt, p = m.get_variants(ref_seq, seed=10)
assert len(pos) == 0 # This should just run and not crash
def test1():
"""Basic test"""
ref_seq = 'ACTGACTGACTGACTGACTGACTGACTGACTGACTG'
m = Model(p=0.1)
pos, stop, ref, alt, p = m.get_variants(ref_seq, seed=10)
for p, r in zip(pos, alt):
assert r[0] == ref_seq[p]
def test2():
"""Do we discard 'N's?"""
ref_seq = 'ACTGACTGACTGACTGACTGACTGACTGACTGACTG'
m = Model(p=0.1)
pos, stop, ref, alt, p = m.get_variants(ref_seq, seed=10)
assert 20 in pos, pos
ref_seq = 'ACTGACTGACTGACTGACTGNCTGACTGACTGACTG'
m = Model(p=0.1)
pos, stop, ref, alt, p = m.get_variants(ref_seq, seed=10)
assert 20 not in pos
if __name__ == "__main__":
print _description | nilq/baby-python | python |
#! /usr/bin/python
#coding: utf-8
fields = {}
fields["brand"] = (
[
#BrandId #BrandType #BE_ID #BE_CODE
[380043552, 0, 103, '103']
])
fields["BrandTypes"] = (
[
#name #offset
["pps", 0],
["lca", 1],
["mctu", 2],
["mvno", 3]
])
fields["prefix"] = (
[ #prefix squence eventid+cdrType data_store_id start number
["rec", "SEQ_FILE_SEQ_REC", [[1101, 1]], 1000000],
["sms", "SEQ_FILE_SEQ_SMS", [[1102, 1]], 1000010],
["mms", "SEQ_FILE_SEQ_MMS", [[1103, 1]], 1000020],
["data", "SEQ_FILE_SEQ_DATA", [[1104, 1]], 1000030],
["com", "CDRSERIALNO_NORMAL_COM_PPS", [[1206, 1]], 1000040],
["mgr", "SEQ_FILE_SEQ_MGR", [[1304, 4], [1329, 1]], 1000050],
["vou", "SEQ_FILE_SEQ_VOU", [[1302, 2], [1306, 1], [1350, 2]], 1000060],
["Dump", "SEQ_CDR_ID", [[1415, 1]], 1000070],
["clr", "SEQ_FILE_SEQ_CLR", [[1408, 1]], 1000080],
["mon", "SEQ_FILE_SEQ_MON", [[1401, 1]], 1000090],
["b_modif", "SEQ_CDR_ID", [[1, 6, 20000]], 1000100],
["b_del" ,"SEQ_CDR_ID", [[1, 6, 20001]], 1000110],
["b_create","SEQ_CDR_ID", [[1, 6, 20002]], 1000120]
])
def create_BP_DATA_STORE():
print "delete from BP_DATA_STORE where DATA_STORE_ID >= 1000000;"
for prefix in fields["prefix"]:
for brand in fields["BrandTypes"]:
#dump话单的目录是在${CBP_CDRPATH}/output/{BrandID}/dump
if(prefix[0] == "dump"):
print "insert into BP_DATA_STORE values ('" + str(prefix[3] + brand[1]) + "', 'R5_" + prefix[0] + "', '.unl', '36700160', '100000', '600', '${CBP_CDRPATH}/output/" + brand[0] + "/dump', '${CBP_CDRPATH}/output/" + brand[0] + "/dump/temp', 'Y', 'Y');"
#对于b_开头的BMP话单,是在${HOME}/cdr/output/{BrandID}/normal
elif prefix[0][:2] == 'b_':
print "insert into BP_DATA_STORE values ('" + str(prefix[3] + brand[1]) + "', 'R5_" + prefix[0] + "', 'unl', '36700160', '100000', '600', '${HOME}/cdr/output/" + brand[0] + "/normal', '${HOME}/cdr/output/" + brand[0] + "/normal/temp', 'N', 'Y');"
else:
print "insert into BP_DATA_STORE values ('" + str(prefix[3] + brand[1]) + "', 'R5_" + prefix[0] + "', '.unl', '36700160', '100000', '600', '${CBP_CDRPATH}/output/" + brand[0] + "/normal', '${CBP_CDRPATH}/output/" + brand[0] + "/normal/temp', 'Y', 'Y');"
def modify_EF_CDR_OUCTPUT_CFG():
for prefix in fields["prefix"]:
DATA_STORE_ID = prefix[3]
for event in prefix[2]:
STD_EVT_TYPE_ID = event[0]
if STD_EVT_TYPE_ID == 1:
CDR_FILE_OUT_ID = event[2]
else:
CDR_FILE_OUT_ID = STD_EVT_TYPE_ID
NORMAL_DATA_STORE_ID = DATA_STORE_ID
ERROR_DATA_STORE_ID = DATA_STORE_ID
RERATING_DATA_STORE_ID = DATA_STORE_ID
if STD_EVT_TYPE_ID != 1 :
ERROR_ORI_DATA_STORE_ID = 103
ROLLBACK_DATA_STORE_ID = 108
else:
ERROR_ORI_DATA_STORE_ID = DATA_STORE_ID
ROLLBACK_DATA_STORE_ID = DATA_STORE_ID
#修改现有的数据
print "update ef_cdr_output_cfg set NORMAL_DATA_STORE_ID = %d, ERROR_DATA_STORE_ID = %d, RERATING_DATA_STORE_ID = %d, ERROR_ORI_DATA_STORE_ID = %d, ROLLBACK_DATA_STORE_ID = %d where CDR_FILE_OUT_ID = %d;"\
%(NORMAL_DATA_STORE_ID,
ERROR_DATA_STORE_ID,
RERATING_DATA_STORE_ID,
ERROR_ORI_DATA_STORE_ID,
ROLLBACK_DATA_STORE_ID,
CDR_FILE_OUT_ID)
def create_EF_CDR_OUCTPUT_CFG():
REC_ID = 1000000
print "delete from ef_cdr_output_cfg where rec_id >= 1000000;"
for brand in fields["brand"]:
BRAND_ID = brand[0]
BE_ID = brand[2]
BE_CODE = brand[3]
DATA_STORE_ID_offset = brand[1]
for prefix in fields["prefix"]:
SEQ_NORMAL_CDR_ID = prefix[1]
DATA_STORE_ID = prefix[3] + DATA_STORE_ID_offset
for event_cdrType in prefix[2]:
STD_EVT_TYPE_ID = event_cdrType[0]
if STD_EVT_TYPE_ID != 1:
CDR_FILE_OUT_ID = STD_EVT_TYPE_ID
else:
CDR_FILE_OUT_ID = event_cdrType[2]
CDR_FILE_OUT_TYPE = event_cdrType[1]
NORMAL_DATA_STORE_ID = DATA_STORE_ID
ERROR_DATA_STORE_ID = DATA_STORE_ID
RERATING_DATA_STORE_ID = DATA_STORE_ID
if STD_EVT_TYPE_ID != 1 :
ERROR_ORI_DATA_STORE_ID = 103
ROLLBACK_DATA_STORE_ID = 108
else:
ERROR_ORI_DATA_STORE_ID = DATA_STORE_ID
ROLLBACK_DATA_STORE_ID = DATA_STORE_ID
print "insert into ef_cdr_output_cfg values (%d, '%d', %d, '%s', '%s', %d, %d, %d, %d, '%s', %d, %d, '%s', '%s', %d, %d, '%s', '%s');" \
%(CDR_FILE_OUT_ID,
CDR_FILE_OUT_TYPE,
STD_EVT_TYPE_ID,
'*', #PAYMENT_MODE,
'N', #TEST_CDR_FLAG,
NORMAL_DATA_STORE_ID,
ERROR_DATA_STORE_ID,
ERROR_ORI_DATA_STORE_ID,
BE_ID,
BE_CODE,
RERATING_DATA_STORE_ID,
ROLLBACK_DATA_STORE_ID,
'', #COND_EXPR_TEXT,
'', #COND_EXPR_CODE,
BRAND_ID,
REC_ID,
"SEQ_ERR_CDR_ID", #SEQ_ERR_CDR_ID,
SEQ_NORMAL_CDR_ID)
REC_ID += 1
if __name__ == '__main__':
create_BP_DATA_STORE()
modify_EF_CDR_OUCTPUT_CFG()
create_EF_CDR_OUCTPUT_CFG()
| nilq/baby-python | python |
import logging
from django.core.management import BaseCommand
from django.core.management import call_command
class Command(BaseCommand):
help = 'This command invoke all the importing data command'
def handle(self, *args, **options):
logger = logging.getLogger(__name__)
try:
call_command('import_organization_data')
except Exception as ex:
logger.error(ex)
try:
call_command('import_affiliated_committers_data')
except Exception as ex:
logger.error(ex)
try:
call_command('import_outside_committers_data')
except Exception as ex:
logger.error(ex)
try:
call_command('import_outside_projects_data')
except Exception as ex:
logger.error(ex)
try:
call_command('import_portfolio_projects_data')
except Exception as ex:
logger.error(ex)
return
logger.info('All OpenHub data is imported')
| nilq/baby-python | python |
"""Preprocess"""
import numpy as np
from scipy.sparse import (
csr_matrix,
)
from sklearn.utils import sparsefuncs
from skmisc.loess import loess
def select_variable_genes(adata,
layer='raw',
span=0.3,
n_top_genes=2000,
):
"""Select highly variable genes.
This function implenments the method 'vst' in Seurat v3.
Inspired by Scanpy.
Parameters
----------
adata: AnnData
Annotated data matrix.
layer: `str`, optional (default: 'raw')
The layer to use for calculating variable genes.
span: `float`, optional (default: 0.3)
Loess smoothing factor
n_top_genes: `int`, optional (default: 2000)
The number of genes to keep
Returns
-------
updates `adata` with the following fields.
variances_norm: `float`, (`adata.var['variances_norm']`)
Normalized variance per gene
variances: `float`, (`adata.var['variances']`)
Variance per gene.
means: `float`, (`adata.var['means']`)
Means per gene
highly_variable: `bool` (`adata.var['highly_variable']`)
Indicator of variable genes
"""
if layer is None:
X = adata.X
else:
X = adata.layers[layer].astype(np.float64).copy()
mean, variance = sparsefuncs.mean_variance_axis(X, axis=0)
variance_expected = np.zeros(adata.shape[1], dtype=np.float64)
not_const = variance > 0
model = loess(np.log10(mean[not_const]),
np.log10(variance[not_const]),
span=span,
degree=2)
model.fit()
variance_expected[not_const] = 10**model.outputs.fitted_values
N = adata.shape[0]
clip_max = np.sqrt(N)
clip_val = np.sqrt(variance_expected) * clip_max + mean
X = csr_matrix(X)
mask = X.data > clip_val[X.indices]
X.data[mask] = clip_val[X.indices[mask]]
squared_X_sum = np.array(X.power(2).sum(axis=0))
X_sum = np.array(X.sum(axis=0))
norm_gene_var = (1 / ((N - 1) * variance_expected)) \
* ((N * np.square(mean))
+ squared_X_sum
- 2 * X_sum * mean
)
norm_gene_var = norm_gene_var.flatten()
adata.var['variances_norm'] = norm_gene_var
adata.var['variances'] = variance
adata.var['means'] = mean
ids_top = norm_gene_var.argsort()[-n_top_genes:][::-1]
adata.var['highly_variable'] = np.isin(range(adata.shape[1]), ids_top)
print(f'{n_top_genes} variable genes are selected.')
| nilq/baby-python | python |
import asyncio
import pandas as pd # type:ignore
from PoEQuery import account_name, league_id, realm
from PoEQuery.official_api_async import stash_tab
from PoEQuery.stash_tab_result import StashTabResult
STASH_URL = "https://www.pathofexile.com/character-window/get-stash-items"
def get_tab_overview():
params = {
"accountName": account_name,
"realm": realm,
"league": league_id,
"tabIndex": 0,
"tabs": 1,
}
response = asyncio.run(stash_tab(params=params))
return response.json()
def get_tab_index(tab_index):
params = {
"accountName": account_name,
"realm": realm,
"league": league_id,
"tabIndex": tab_index,
}
response = asyncio.run(stash_tab(params=params))
return response.json()
df = pd.DataFrame()
stash_tab_results = StashTabResult(get_tab_overview())
print(stash_tab_results.tabs)
for tab in stash_tab_results.tabs:
if tab.name in ["LOW LEVEL BREACH"]:
df = pd.DataFrame()
tab_results = StashTabResult(get_tab_index(tab_index=tab.index))
for item in tab_results.items:
df = df.append(
{"type": item.type, "count": item.stack_size}, ignore_index=True
)
print(tab.name, df)
| nilq/baby-python | python |
import aws_cdk as cdk
import constants
from deployment import UserManagementBackend
from toolchain import Toolchain
app = cdk.App()
# Development stage
UserManagementBackend(
app,
f"{constants.APP_NAME}-Dev",
env=constants.DEV_ENV,
api_lambda_reserved_concurrency=constants.DEV_API_LAMBDA_RESERVED_CONCURRENCY,
database_dynamodb_billing_mode=constants.DEV_DATABASE_DYNAMODB_BILLING_MODE,
)
# Continuous deployment and pull request validation
Toolchain(
app,
f"{constants.APP_NAME}-Toolchain",
env=constants.TOOLCHAIN_ENV,
)
app.synth()
| nilq/baby-python | python |
import os
from psycopg2 import connect
def connect_to_db(config=None):
db_name = os.getenv("DATABASE_URL")
conn = connect(db_name)
conn.set_session(autocommit=True)
return conn
def create_users_table(cur):
cur.execute(
"""CREATE TABLE IF NOT EXISTS politico.user (
id SERIAL NOT NULL,
national_id int NOT NULL PRIMARY KEY,
firstname VARCHAR (100) NOT NULL,
lastname VARCHAR (100) NOT NULL,
othername VARCHAR (100),
email VARCHAR (100) NOT NULL,
phone VARCHAR (100) NOT NULL,
isadmin BOOLEAN NOT NULL,
password VARCHAR (250) NOT NULL,
passporturl VARCHAR (100) NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP);""")
def init_db(config=None):
conn = connect_to_db()
cur = conn.cursor()
cur.execute("""CREATE SCHEMA IF NOT EXISTS politico;""")
create_users_table(cur)
print('Database created successfully')
if __name__ == '__main__':
init_db()
| nilq/baby-python | python |
# -*- encoding: utf-8 -*-
# Copyright 2015 - Alcatel-Lucent
# Copyright © 2014-2015 eNovance
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import cProfile
def recursive_keypairs(d, separator='.'):
# taken from ceilometer and gnocchi
for name, value in sorted(d.items()):
if isinstance(value, dict):
for subname, subvalue in recursive_keypairs(value, separator):
yield ('%s%s%s' % (name, separator, subname), subvalue)
else:
yield name, value
def opt_exists(conf_parent, opt):
try:
return conf_parent[opt]
except cfg.NoSuchOptError:
return False
def do_cprofile(func):
def profiled_func(*args, **kwargs):
profile = cProfile.Profile()
try:
profile.enable()
result = func(*args, **kwargs)
profile.disable()
return result
finally:
profile.print_stats('cumulative')
return profiled_func
| nilq/baby-python | python |
# Copyright 2016 Chr. Hansen A/S and The Novo Nordisk Foundation Center for Biosustainability, DTU.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from cameo import fba
from cameo.core.strain_design import StrainDesign
from cameo.strain_design.heuristic.evolutionary.objective_functions import biomass_product_coupled_yield
from marsi.cobra.strain_design.evolutionary import OptMet, process_metabolite_knockout_solution
CURRENT_DIRECTORY = os.path.dirname(__file__)
FIXTURES = os.path.join(CURRENT_DIRECTORY, 'fixtures')
def test_design_processing_function(model):
orignal_oxigen_uptake = model.reactions.EX_o2_e.lower_bound
target = "EX_succ_e"
substrate = "EX_glc__D_e"
objective_function = biomass_product_coupled_yield(model.biomass, target, substrate)
solution = ["mal__D"]
try:
model.reactions.EX_o2_e.lower_bound = 0
result = process_metabolite_knockout_solution(model, solution, fba, {}, model.biomass, target,
substrate, objective_function)
finally:
model.reactions.EX_o2_e.lower_bound = orignal_oxigen_uptake
design, size, fva_min, fva_max, target_flux, biomass_flux, _yield, fitness = result
assert isinstance(design, StrainDesign)
assert size == len(solution)
assert size == 1
assert fva_min > 0
assert fva_max >= fva_min
assert target_flux > 0
assert biomass_flux > 0
assert _yield > 0
assert fitness > 0
def test_succinate(model):
optimization = OptMet(model=model, plot=False)
# optimization_kwargs = dict(max_evaluations=1000, max_knockouts=6, target="succ_e",
# substrate="EX_glc__D_e", biomass=model.biomass)
assert optimization.manipulation_type == "metabolites"
# result = optimization.run(**optimization_kwargs)
#
# assert isinstance(result, OptMetResult)
# assert len(result) > 0
| nilq/baby-python | python |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
"""
Created on Sun Sep 13 15:45:26 2020
@author: samuel
"""
import numpy as np
import pandas as pd
df = pd.read_csv(
'/home/samuel/Bureau/zip.train', sep=" ", header=None)
digits = df.to_numpy()
classes = digits[:, 0]
digits = digits[:, 1:-1]
# %%
bdd = []
X = []
y = []
for i in range(10):
bdd.append(digits[classes == i][:100])
X.append(digits[classes == i][:100][:128])
y.append(digits[classes == i][:100][128:])
# %%
gamma = 0.01
kernel = ("gaussian", gamma)
# %%
from sklearn.utils.random import sample_without_replacement
from sklearn.model_selection import train_test_split
n_train = 800
bdd_train = [None] * 10
bdd_test = [None] * 10
for i in range(10):
# bdd_train.append(bdd[i][sample_without_replacement(n_population=100, n_samples=n_train//10)])
bdd_train[i], bdd_test[i] = train_test_split(bdd[i], train_size=n_train // 10)
bdd_train = np.concatenate(bdd_train)
bdd_test = np.concatenate(bdd_test)
np.random.shuffle(bdd_train)
np.random.shuffle(bdd_test)
X_train = bdd_train[:, :128]
y_train = bdd_train[:, 128:]
X_test = bdd_test[:, :128]
y_test = bdd_test[:, 128:]
# %%
from stpredictions.models.OK3._classes import OK3Regressor, ExtraOK3Regressor
from stpredictions.models.OK3._forest import RandomOKForestRegressor, ExtraOKTreesRegressor
ok3 = OK3Regressor(kernel=kernel, max_leaf_nodes=50).fit(X_train, y_train)
extraok3 = ExtraOK3Regressor(kernel=kernel, max_leaf_nodes=50).fit(X_train, y_train)
okforest = RandomOKForestRegressor(kernel=kernel, max_leaf_nodes=50).fit(X_train, y_train)
extraokforest = ExtraOKTreesRegressor(kernel=kernel, max_leaf_nodes=50).fit(X_train, y_train)
# %%
y_pred1 = ok3.predict(X_test)
y_pred2 = extraok3.predict(X_test)
y_pred3 = okforest.predict(X_test)
y_pred4 = extraokforest.predict(X_test)
# %%
mse1 = np.mean(
np.sum((y_test - y_pred1) ** 2, axis=1)) # gamma 0.01, maxleaf=50 ==> 70 ; gamma 0.01, maxleaf=10 ==> 77
mse2 = np.mean(np.sum((y_test - y_pred2) ** 2, axis=1))
mse3 = np.mean(np.sum((y_test - y_pred3) ** 2, axis=1))
mse4 = np.mean(
np.sum((y_test - y_pred4) ** 2, axis=1)) # gamma 0.01, maxleaf=50 ==> 55 ; gamma 0.01, maxleaf=10 ==> 70
rbf_loss1 = 2 * (1 - np.exp(- gamma * mse1))
rbf_loss2 = 2 * (1 - np.exp(- gamma * mse2))
rbf_loss3 = 2 * (1 - np.exp(- gamma * mse3))
rbf_loss4 = 2 * (1 - np.exp(- gamma * mse4))
print("MSE 1 :", mse1)
print("MSE 2 :", mse2)
print("MSE 3 :", mse3)
print("MSE 4 :", mse4)
print("RBF loss 1 : ", rbf_loss1)
print("RBF loss 2 : ", rbf_loss2)
print("RBF loss 3 : ", rbf_loss3)
print("RBF loss 4 : ", rbf_loss4)
# %%
# import matplotlib.pyplot as plt
test_ex = 3
plt.imshow(X_test[test_ex].reshape(8, 16), cmap='gray')
plt.title("Input upper image")
plt.show()
plt.imshow(y_test[test_ex].reshape(8, 16), cmap='gray')
plt.title("True output lower image")
plt.show()
plt.imshow(y_pred[test_ex].reshape(8, 16), cmap='gray')
plt.title("Predicted output lower image")
plt.show()
plt.imshow(np.vstack((X_test[test_ex].reshape(8, 16),
y_test[test_ex].reshape(8, 16),
-np.ones((1, 16)),
X_test[test_ex].reshape(8, 16),
y_pred[test_ex].reshape(8, 16))),
cmap='gray')
plt.title("Up : True image\nDown : Image with the predicted lower half")
# plt.imsave('/home/samuel/Bureau/prediction_ex_'+str(test_ex)+'.png', np.vstack((X_test[test_ex].reshape(8,16),
# y_test[test_ex].reshape(8,16),
# -np.ones((1,16)),
# X_test[test_ex].reshape(8,16),
# y_pred[test_ex].reshape(8,16))),
# cmap='gray')
# %%
pixels_importances = ok3.feature_importances_
plt.imshow(pixels_importances.reshape(8, 16), cmap='gray')
plt.title("Image of pixels (features) importances")
plt.show()
''' | nilq/baby-python | python |
# -*- python -*-
import os
import crochet
from twisted.application.internet import StreamServerEndpointService
from twisted.application import service
from twisted.internet import reactor, endpoints
from twisted.web.wsgi import WSGIResource
import weasyl.polecat
import weasyl.wsgi
import weasyl.define as d
from libweasyl import cache
threadPool = reactor.getThreadPool()
threadPool.adjustPoolsize(minthreads=6, maxthreads=12)
weasylResource = WSGIResource(reactor, threadPool, weasyl.wsgi.wsgi_app)
if os.environ.get('WEASYL_SERVE_STATIC_FILES'):
weasylResource = weasyl.polecat.TryChildrenBeforeLeaf(weasylResource)
staticResource = weasyl.polecat.NoDirectoryListingFile(
os.path.join(os.environ['WEASYL_APP_ROOT'], 'static'))
cssResource = weasyl.polecat.NoDirectoryListingFile(
os.path.join(os.environ['WEASYL_APP_ROOT'], 'build/css'))
weasylResource.putChild('static', staticResource)
weasylResource.putChild('css', cssResource)
rewriters = [weasyl.polecat.rewriteSubmissionUploads]
if os.environ.get('WEASYL_REVERSE_PROXY_STATIC'):
from twisted.web import proxy
weasylResource.putChild(
'_weasyl_static', proxy.ReverseProxyResource('www.weasyl.com', 80, '/static'))
rewriters.append(weasyl.polecat.rewriteNonlocalImages)
from twisted.web.rewrite import RewriterResource
weasylResource = RewriterResource(weasylResource, *rewriters)
requestLogHost = d.config_read_setting('request_log_host', section='backend')
if requestLogHost:
requestLogHost, _, requestLogPort = requestLogHost.partition(':')
requestLogPort = int(requestLogPort)
requestLogHost = requestLogHost, requestLogPort
site = weasyl.polecat.WeasylSite(weasylResource)
siteStats = weasyl.polecat.WeasylSiteStatsFactory(site, threadPool, reactor, requestLogHost=requestLogHost)
weasyl.define.statsFactory = siteStats
application = service.Application('weasyl')
def attachServerEndpoint(factory, endpointEnvironKey, defaultString=None):
"Generates a server endpoint from an environment variable and attaches it to the application."
description = os.environ.get(endpointEnvironKey, defaultString)
if not description:
return
endpoint = endpoints.serverFromString(reactor, description)
StreamServerEndpointService(endpoint, factory).setServiceParent(application)
attachServerEndpoint(site, 'WEASYL_WEB_ENDPOINT', 'tcp:8080:interface=127.0.0.1')
attachServerEndpoint(siteStats, 'WEASYL_WEB_STATS_ENDPOINT', 'tcp:8267:interface=127.0.0.1')
if d.config_read_bool('run_periodic_tasks', section='backend'):
from weasyl.cron import run_periodic_tasks
weasyl.polecat.PeriodicTasksService(reactor, run_periodic_tasks).setServiceParent(application)
if not d.config_read_bool('rough_shutdowns', section='backend'):
reactor.addSystemEventTrigger('before', 'shutdown', site.gracefullyStopActiveClients)
statsdServer = d.config_read_setting('server', section='statsd')
if statsdServer:
statsdHost, _, statsdPort = statsdServer.rpartition(':')
statsdPort = int(statsdPort)
import socket
from txstatsd.client import TwistedStatsDClient, StatsDClientProtocol
from txstatsd.metrics.metrics import Metrics
from txstatsd.report import ReportingService
namespace = d.config_read_setting('namespace', section='statsd')
if namespace is None:
namespace = os.environ.get('WEASYL_STATSD_NAMESPACE')
if namespace is None:
namespace = socket.gethostname().split('.')[0]
statsdClient = TwistedStatsDClient.create(statsdHost, statsdPort)
site.metrics = Metrics(connection=statsdClient, namespace=namespace)
reporting = ReportingService()
reporting.setServiceParent(application)
siteStats.metricService().setServiceParent(application)
protocol = StatsDClientProtocol(statsdClient)
reactor.listenUDP(0, protocol)
crochet.no_setup()
cache.region.configure(
'txyam',
arguments=dict(
reactor=reactor,
url=d.config_read_setting(
'servers', 'tcp:127.0.0.1:11211', 'memcached').split(),
retryDelay=10,
timeOut=0.4,
),
wrap=[cache.ThreadCacheProxy, cache.JSONProxy],
replace_existing_backend=True
)
| nilq/baby-python | python |
# -*- coding: utf8 -*-
from datetime import date
from nba.model.utils import oddsshark_team_id_lookup
from sqlalchemy import Column, Date, Float, Integer, String, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref
NOP_TO_NOH_DATE = date(2013, 10, 29)
CHA_TO_CHO_DATE = date(2014, 10, 27)
Base = declarative_base()
class Team(Base):
"""
Represents an NBA team
"""
__tablename__ = 'team'
__table_args__ = {'sqlite_autoincrement': True}
id = Column(Integer, primary_key=True)
name = Column(String)
abbr = Column(String)
city = Column(String)
def get_odds_url(self, year):
return "http://www.oddsshark.com/stats/gamelog/basketball/nba/{0}/{1}".format(oddsshark_team_id_lookup.get(self.name), year)
class GameFeature(Base):
"""
Represents the statistics associated with a game or range of games.
"""
__tablename__ = 'game_feature'
__table_args__ = {'sqlite_autoincrement': True}
id = Column(Integer, primary_key=True)
score = Column(Integer) # Final score of team
fg = Column(Integer) # Field Goals made
fga = Column(Integer) # Field Goals attempted
fgp = Column(Float) # Field goal percentage
threep = Column(Integer) # three pointers made
threepa = Column(Integer) # three pointers attempted
threepp = Column(Float) # three pointers percentage
ft = Column(Integer) # Free Throws made
fta = Column(Integer) # Free Throws attempted
ftp = Column(Float) # Free throws %
orb = Column(Integer) # Offensive Rebounds
drb = Column(Integer) # Defensive Rebounds
trb = Column(Integer) # Total Rebounds
ast = Column(Integer) # Assists
stl = Column(Integer) # Steals
blk = Column(Integer) # Blocks
tov = Column(Integer) # Turnovers
pf = Column(Integer) # Personal Fouls
tsp = Column(Float) # True Shooting Percentage
efgp = Column(Float) # Effective Field Goal Percentage
threepar = Column(Float) # three Point attempt rate
ftr = Column(Float) # FT attempt rate
orbp = Column(Float) # Offensive Rebound Percentage
drbp = Column(Float) # Defensive Rebound Percentage
trpb = Column(Float) # Total Rebound Percentage
astp = Column(Float) # Assist rate percentage
stlp = Column(Float) # Steal rate percentage
blkp = Column(Float) # Block rate percentage
tovp = Column(Float) # Turn over rate percentage
ortg = Column(Float) # Offensive Rating
drtg = Column(Float) # Defensive Rating
ftfga = Column(Float) # Ft/FGA Rating
pace = Column(Float) # PACE
class Odds(Base):
__tablename__ = 'odds'
__table_args__ = {'sqlite_autoincrement': True}
id = Column(Integer, primary_key=True)
spread = Column(Float)
overunder = Column(Float)
class Game(Base):
"""
Represents a game with keys to the teams and features
"""
__tablename__ = 'game'
__table_args__ = {'sqlite_autoincrement': True}
id = Column(Integer, primary_key=True)
home_id = Column(ForeignKey('team.id'))
home = relationship("Team", backref=backref("game_home", order_by=id), foreign_keys=[home_id])
home_features_id = Column(ForeignKey('game_feature.id'))
home_features = relationship("GameFeature", backref=backref("game_home_features", order_by=id), foreign_keys=[home_features_id])
away_id = Column(ForeignKey('team.id'))
away = relationship("Team", backref=backref("game_away", order_by=id), foreign_keys=[away_id])
away_features_id = Column(ForeignKey('game_feature.id'))
away_features = relationship("GameFeature", backref=backref("game_away_features", order_by=id), foreign_keys=[away_features_id])
date = Column(Date)
odds_id = Column(ForeignKey('odds.id'))
odds = relationship("Odds", backref=backref("game", order_by=id))
def get_br_url(self):
"""Returns the URL for the basketball-reference.com box scores"""
if self.home.abbr == 'NOP' and self.date < NOP_TO_NOH_DATE:
abbr = 'NOH'
elif self.home.abbr == "CHA" and self.date > CHA_TO_CHO_DATE:
abbr = "CHO"
else:
abbr = self.home.abbr
return "http://www.basketball-reference.com/boxscores/{0}{1}{2}0{3}.html".format(self.date.year, str(self.date.month).zfill(2), str(self.date.day).zfill(2), abbr)
class Rollup(Base):
"""
Contains rollup data for a set of features betweeen an inclusive
range of games.
"""
__tablename__ = "game_rollup"
__table_args__ = {'sqlite_autoincrement': True}
id = Column(Integer, primary_key=True)
team_id = Column(ForeignKey('team.id'))
team = relationship("Team", backref=backref("game_rollup", order_by=id))
start_id = Column(ForeignKey('game.id'))
start = relationship("Game", backref=backref("game_rollup_start", order_by=id), foreign_keys=[start_id])
end_id = Column(ForeignKey('game.id'))
end = relationship("Game", backref=backref("game_rollup_end", order_by=id), foreign_keys=[end_id])
features_id = Column(ForeignKey('game_feature.id'))
features = relationship("GameFeature", backref=backref("game_rollup", order_by=id))
| nilq/baby-python | python |
"""
Quick and dirty MQTT door sensor
"""
import time
import network
import ubinascii
import machine
from umqttsimple import MQTTClient
import esp
import adcmode
try:
import secrets
except:
import secrets_sample as secrets
try:
### Create wifi network
sta_if = network.WLAN(network.STA_IF)
sta_if.active(True)
print("wifi: connecting")
sta_if.connect(secrets.SSID, secrets.PASSWD) # Connect to an AP
try:
sta_if.ifconfig((secrets.IPADDR, secrets.MASK, secrets.GW, secrets.DNS))
except:
print("using DHCP...")
### Setup ADC to measure VCC
if not adcmode.set_adc_mode(adcmode.ADC_MODE_VCC):
print("ADC mdode changed in flash - restart needed")
machine.reset()
vcc = machine.ADC(1).read()/1024.0
while not sta_if.isconnected():
time.sleep(0.5)
print("wifi connected: ", sta_if.ifconfig())
### connect to MQTT
CLIENT_ID = ubinascii.hexlify(machine.unique_id())
client = MQTTClient(CLIENT_ID, secrets.MQTT_SVR, user=secrets.MQTT_USER, password=secrets.MQTT_PWD )
client.connect()
print("mqtt: connected")
payload = secrets.MQTT_PAYLOAD.format(vcc)
client.publish(secrets.MQTT_TOPIC, payload)
print("mqtt: published %s: %s"%(secrets.MQTT_TOPIC, payload))
client.disconnect()
print("mqtt: disconnected")
except Exception as e:
print( "FATAL: ", type(e) )
print( " ", repr(e) )
time.sleep(0.1) # without this, deepsleep doesn't work well
esp.deepsleep(0)
| nilq/baby-python | python |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
def test_invoked_commands_still_work_even_though_they_are_no_customizable(lib, pythondir):
# given a command that is calling another using ctx.invoke
(pythondir / 'mygroup.py').write_text("""
import click
from clk.decorators import group, flag
@group()
def mygroup():
pass
@mygroup.command()
@flag("--shout")
def invokedcommand(shout):
message = "invokedcommand"
if shout:
message = message.upper()
print(message)
@mygroup.command()
def invokingcommand():
ctx = click.get_current_context()
ctx.invoke(invokedcommand)
""")
# and I customize the invokedcommand
lib.cmd('parameter set mygroup.invokedcommand --shout')
# when I call the customized command alone
output = lib.cmd('mygroup invokedcommand')
# then I can see the customization in action
assert output == 'INVOKEDCOMMAND'
# when I call the invoking command
output = lib.cmd('mygroup invokingcommand')
# then I can see the output of the invokedcommand but without the
# customization (because it was not called using a path, hence the notion of
# path itself does not make sense in this context).
assert output == 'invokedcommand'
def test_broken_command_dont_make_clk_crash(lib, pythondir):
# given a command that is poorly written
(pythondir / 'a.py').write_text("""
raise Exception("test")
""")
# when I create an alias to that command
output = lib.cmd('alias set b a', with_err=True)
# then the output indicates the command could not be loaded
assert 'error: Found the command a in the resolver customcommand but could not load it.' in output
def test_param_config_default_value_callback_that_depends_on_another_param(pythondir, lib):
# given a command to perform http request with a default url lazily computed
# that depends on some other value
(pythondir / 'http.py').write_text("""
from clk.config import config
from clk.decorators import group, param_config
def default():
if config.http.api:
return f"http://{config.http.api}"
@group()
@param_config('http', '--api')
@param_config('http', '--url', default=default)
def http():
""
@http.command()
def get():
print("Getting " + config.http.url)
""")
# when I use the command without providing the first value, then I get the
# appropriate default value
assert lib.cmd('http --api myapi get') == 'Getting http://myapi'
def test_dynamic_option(pythondir, lib):
# given a command to perform http request with a default url lazily computed
# that depends on some other value
(pythondir / 'http.py').write_text("""
from clk.config import config
from clk.decorators import group, option
class Http:
def dump(self):
print(self.url)
def default():
if config.http.api:
return f"http://{config.http.api}"
@group()
@option('--api', dynamic=Http)
@option('--url', dynamic=Http, default=default)
def http(api, url):
""
@http.command()
def get():
print("Getting " + config.http.url)
@http.command()
def dump():
config.http.dump()
""")
# when I use the command without providing the first value, then I get the
# appropriate default value
assert lib.cmd('http --api myapi get') == 'Getting http://myapi'
assert lib.cmd('http --api myapi dump') == 'http://myapi'
def test_param_config_default_value_callback(pythondir, lib):
# given a command to perform http request with a default url lazily computed
(pythondir / 'http.py').write_text("""
from clk.config import config
from clk.decorators import group, param_config
def default():
return 'http://myapi'
@group()
@param_config('http', '--url', default=default)
def http():
""
@http.command()
def get():
print("Getting " + config.http.url)
""")
# when I use the command without providing a value, then I get the default value
assert lib.cmd('http get') == 'Getting http://myapi'
def test_param_config_default_value(pythondir, lib):
# given a command to perform http request with a default url
(pythondir / 'http.py').write_text("""
from clk.config import config
from clk.decorators import group, param_config
@group()
@param_config('http', '--url', default='http://myapi')
def http():
""
@http.command()
def get():
print("Getting " + config.http.url)
""")
# when I use the command without providing a value, then I get the default value
assert lib.cmd('http get') == 'Getting http://myapi'
def test_command(lib):
output = lib.cmd('command display')
assert re.search(r'flowdep\s+Manipulate command flow dependencies\.', output)
| nilq/baby-python | python |
from unittest import TestCase
import pytest
import torch
import pyro
import pyro.infer
from pyro.distributions import Bernoulli, Normal
from pyro.infer import EmpiricalMarginal
from tests.common import assert_equal
class HMMSamplingTestCase(TestCase):
def setUp(self):
# simple Gaussian-emission HMM
def model():
p_latent = pyro.param("p1", torch.tensor([[0.7], [0.3]]))
p_obs = pyro.param("p2", torch.tensor([[0.9], [0.1]]))
latents = [torch.ones(1, 1)]
observes = []
for t in range(self.model_steps):
latents.append(
pyro.sample("latent_{}".format(str(t)),
Bernoulli(torch.index_select(p_latent, 0, latents[-1].view(-1).long()))))
observes.append(
pyro.sample("observe_{}".format(str(t)),
Bernoulli(torch.index_select(p_obs, 0, latents[-1].view(-1).long())),
obs=self.data[t]))
return torch.sum(torch.cat(latents))
self.model_steps = 3
self.data = [torch.ones(1, 1) for _ in range(self.model_steps)]
self.model = model
class NormalNormalSamplingTestCase(TestCase):
def setUp(self):
pyro.clear_param_store()
def model():
loc = pyro.sample("loc", Normal(torch.zeros(1),
torch.ones(1)))
xd = Normal(loc, torch.ones(1))
pyro.sample("xs", xd, obs=self.data)
return loc
def guide():
return pyro.sample("loc", Normal(torch.zeros(1),
torch.ones(1)))
# data
self.data = torch.zeros(50, 1)
self.loc_mean = torch.zeros(1)
self.loc_stddev = torch.sqrt(torch.ones(1) / 51.0)
# model and guide
self.model = model
self.guide = guide
class ImportanceTest(NormalNormalSamplingTestCase):
@pytest.mark.init(rng_seed=0)
def test_importance_guide(self):
posterior = pyro.infer.Importance(self.model, guide=self.guide, num_samples=5000).run()
marginal = EmpiricalMarginal(posterior)
assert_equal(0, torch.norm(marginal.mean - self.loc_mean).item(), prec=0.01)
assert_equal(0, torch.norm(marginal.variance.sqrt() - self.loc_stddev).item(), prec=0.1)
@pytest.mark.init(rng_seed=0)
def test_importance_prior(self):
posterior = pyro.infer.Importance(self.model, guide=None, num_samples=10000).run()
marginal = EmpiricalMarginal(posterior)
assert_equal(0, torch.norm(marginal.mean - self.loc_mean).item(), prec=0.01)
assert_equal(0, torch.norm(marginal.variance.sqrt() - self.loc_stddev).item(), prec=0.1)
| nilq/baby-python | python |
#!/usr/bin/env python
"""
Setup script for fio-buffer
"""
import os
from setuptools import setup
from setuptools import find_packages
with open('README.rst') as f:
readme = f.read().strip()
version = None
author = None
email = None
source = None
with open(os.path.join('fio_buffer', '__init__.py')) as f:
for line in f:
if line.strip().startswith('__version__'):
version = line.split('=')[1].strip().replace('"', '').replace("'", '')
elif line.strip().startswith('__author__'):
author = line.split('=')[1].strip().replace('"', '').replace("'", '')
elif line.strip().startswith('__email__'):
email = line.split('=')[1].strip().replace('"', '').replace("'", '')
elif line.strip().startswith('__source__'):
source = line.split('=')[1].strip().replace('"', '').replace("'", '')
elif None not in (version, author, email, source):
break
setup(
author=author,
author_email=email,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: GIS'
],
description="A Fiona CLI plugin for buffering geometries.",
entry_points="""
[fiona.fio_plugins]
buffer=fio_buffer.core:buffer
""",
extras_require={
'dev': ['pytest', 'pytest-cov']
},
include_package_data=True,
install_requires=[
'click>=0.3',
'shapely',
'fiona>=1.6'
],
keywords='Fiona fio GIS vector buffer plugin',
license="New BSD",
long_description=readme,
name='fio-buffer',
packages=find_packages(),
url=source,
version=version,
zip_safe=True
)
| nilq/baby-python | python |
# from http://www.calazan.com/a-simple-python-script-for-backing-up-a-postgresql-database-and-uploading-it-to-amazon-s3/
import os
import sys
import subprocess
from optparse import OptionParser
from datetime import date, datetime, timedelta
import boto
from boto.s3.key import Key
# Amazon S3 settings.
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
DB_APP_NAME = "total-impact-core" # should be in sync with AWS bucket name wrt staging/production
def get_database_cred_from_heroku_app():
cmd_list = ['heroku', 'pg:credentials', 'DATABASE', '--app', DB_APP_NAME]
ps = subprocess.Popen(cmd_list, stdout=subprocess.PIPE)
output = ps.communicate()[0]
cred_dict = dict([t.split("=") for t in output.splitlines()[1].replace('"',"").split(' ') if t])
return cred_dict
def call_pg_dump(cred_dict, tablename, dumped_file):
# -Fc is a compressed format
cmd_list = ['PGPASSWORD='+cred_dict["password"],
'pg_dump',
'-h', cred_dict["host"],
'-p', cred_dict["port"],
'-U', cred_dict["user"],
'-Fc', cred_dict["dbname"],
'-f', dumped_file,
'--verbose',
'--data-only']
if tablename:
cmd_list += ['-t', tablename]
print cmd_list
ps = subprocess.Popen(" ".join(cmd_list), stdout=subprocess.PIPE, shell=True)
output = ps.communicate()[0]
print output
return output
def upload_to_s3(dumped_file, aws_filename, bucket_name=None):
"""
Upload a file to an AWS S3 bucket.
"""
if not bucket_name:
bucket_name = os.getenv("AWS_BUCKET", "impactstory-uploads-local")
conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket = conn.get_bucket(bucket_name)
k = Key(bucket)
k.key = aws_filename
k.set_contents_from_filename(dumped_file)
def backup_table(cred_dict, tablename):
dumped_file = tablename
aws_filename='old-snaps/' + dumped_file + ".dump"
output = call_pg_dump(cred_dict, tablename, dumped_file)
upload_to_s3(dumped_file, aws_filename, bucket_name)
try:
print 'Uploading %s to Amazon S3...' % aws_filename
upload_to_s3(dumped_file, aws_filename)
except boto.exception.S3ResponseError:
print 'Upload did not complete'
# from http://stackoverflow.com/questions/10688006/generate-a-list-of-datetimes-between-an-interval-in-python
def perdelta(start, end, delta):
curr = start
while curr < end:
yield curr
curr += delta
def main():
parser = OptionParser()
now = datetime.now()
# four_months_ago = now + timedelta(days=-124)
# two_months_ago = now + timedelta(days=-62)
four_months_ago = now + timedelta(days=-4)
two_months_ago = now + timedelta(days=-3)
cred_dict = get_database_cred_from_heroku_app()
for a_month in perdelta(four_months_ago, two_months_ago, timedelta(days=31)):
tablename = a_month.strftime("snap_%Y%m")
print tablename
backup_table(cred_dict, tablename)
if __name__ == '__main__':
main()
# restore the tables again with this
# heroku pgbackups:restore DATABASE 'https://s3.amazonaws.com/bucket_name/properties.dump'
| nilq/baby-python | python |
"""
Для поступления в вуз абитуриент должен предъявить результаты трех экзаменов в виде ЕГЭ, каждый из них оценивается
целым числом от 0 до 100 баллов. При этом абитуриенты, набравшие менее 40 баллов (неудовлетворительную оценку) по
любому экзамену из конкурса выбывают. Остальные абитуриенты участвуют в конкурсе по сумме баллов за три экзамена.
В конкурсе участвует N человек, при этом количество мест равно K. Определите проходной балл, то есть такое количество
баллов, что количество участников, набравших столько или больше баллов не превосходит K, а при добавлении к ним
абитуриентов, набравших наибольшее количество баллов среди непринятых абитуриентов, общее число принятых абитуриентов
станет больше K.
Формат ввода
Программа получает на вход количество мест K. Далее идут строки с информацией об абитуриентах, каждая из которых
состоит из имени (текстовая строка содержащая произвольное число пробелов) и трех чисел от 0 до 100, разделенных
пробелами.
Используйте для ввода файл input.txt с указанием кодировки utf8 (для создания такого файла на своем компьютере в
программе Notepad++ следует использовать кодировку UTF-8 без BOM).
Формат вывода
Программа должна вывести проходной балл в конкурсе. Выведенное значение должно быть минимальным баллом,
который набрал абитуриент, прошедший по конкурсу.
Также возможны две ситуации, когда проходной балл не определен.
Если будут зачислены все абитуриенты, не имеющие неудовлетворительных оценок, программа должна вывести число 0.
Если количество имеющих равный максимальный балл абитуриентов больше чем K, программа должна вывести число 1.
Используйте для вывода файл output.txt с указанием кодировки utf8.
Предупреждение
Пожалуйста, тестируйте файловый ввод и вывод на своем компьютере. В этой задаче слушатели часто получают ошибки вроде
RE на первом тесте, протестировав у себя с помощью консоли и просто заменив input() на чтение из файла перед сдачей.
К сожалению, такую замену не всегда удается сделать без ошибок, и решение слушателей действительно перестает
правильно работать даже на первом тесте.
"""
myFile = open("input.txt", "r", encoding="utf8")
k = int(myFile.readline())
myList = []
for line in myFile:
newLine = line.split()
if int(newLine[-1]) >= 40 and int(newLine[-2]) >= 40 \
and int(newLine[-3]) >= 40:
myList.append(newLine)
myFile.close()
myList.sort(key=lambda a: int(a[-1]) + int(a[-2]) + int(a[-3]))
myList.reverse()
konk = []
for i in myList:
sum = int(i[-1]) + int(i[-2]) + int(i[-3])
konk.append(sum)
n = len(konk)
def konkurs(n, k, konk):
if n <= k:
return 0
elif konk[k] == konk[0]:
return 1
for i in range(k, 0, -1):
if konk[i] < konk[i - 1]:
return konk[i - 1]
print(konkurs(n, k, konk))
| nilq/baby-python | python |
# -*- coding: utf-8 -*-
from discord.ext.commands import context
import settings
class GeneralContext(context.Context):
"""Expanded version of the Discord Context class.
This class can be used outside of command functions, such as
inside event handlers. It needs to be created manually.
Attributes:
channel(discord.Channel):
server(discord.Server):
user(discord.Member/User):
"""
def __init__(self, **attrs):
attrs["prefix"] = settings.BOT_PREFIX
super().__init__(**attrs)
self.channel = attrs.pop("channel", None)
self.context = attrs.pop("context", None)
self.server = attrs.pop("server", None)
self.user = attrs.pop("user", None)
self._extract_message()
def _extract_message(self):
"""Assigns some of the message variables to this class's variables."""
if self.context:
self.message = self.context.message
if self.message:
self.channel = self.message.channel if not self.channel else self.channel
self.server = self.message.server if not self.server else self.server
self.user = self.message.author if not self.user else self.user
| nilq/baby-python | python |
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Classes and functions to generate the OI Challenge 2019 dataset using Apache Beam."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import csv
import io
import json
import os
from absl import logging
import numpy as np
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
beam = tfds.core.lazy_imports.apache_beam
cv2 = tfds.core.lazy_imports.cv2
Metrics = beam.metrics.Metrics
class ReadZipFn(beam.DoFn):
"""Iterates a zip file, yielding filenames and file contents."""
def process(self, zip_filepath):
for filename, file in tfds.download.iter_archive(
zip_filepath, tfds.download.ExtractMethod.ZIP):
if filename.endswith(".jpg"):
yield filename, file.read()
class ProcessImageFn(beam.DoFn):
"""Resizes images, re-compresses them in JPEG and yields the result."""
def __init__(self, target_pixels, jpeg_quality=72):
self._target_pixels = target_pixels
self._jpeg_quality = [int(cv2.IMWRITE_JPEG_QUALITY), jpeg_quality]
self._images_failed = Metrics.counter(self.__class__, "images_failed")
def __getstate__(self):
return (self._target_pixels, self._jpeg_quality, self._images_failed)
def __setstate__(self, state):
self._target_pixels, self._jpeg_quality, self._images_failed = state
def process(self, element):
filename, content = element
try:
image = cv2.imdecode(np.fromstring(content, dtype=np.uint8), flags=3)
except:
logging.info("Exception raised while decoding image %s", filename)
raise
if image is None:
self._images_failed.inc()
logging.info("Image %s could not be decoded", filename)
else:
# GIF images contain a single frame.
if len(image.shape) == 4: # rank=4 -> rank=3
image = image.reshape(image.shape[1:])
# Get image height and width.
height, width, _ = image.shape
actual_pixels = height * width
# If necessary, resize the image to have at most self._target_pixels,
# keeping the aspect ratio.
if self._target_pixels and actual_pixels > self._target_pixels:
factor = np.sqrt(self._target_pixels / actual_pixels)
image = cv2.resize(image, dsize=None, fx=factor, fy=factor)
# Encode the image with quality=72 and store it in a BytesIO object.
_, buff = cv2.imencode(".jpg", image, self._jpeg_quality)
yield filename, io.BytesIO(buff.tostring())
class CreateDetectionExampleFn(beam.DoFn):
"""Creates TFDS examples for the Detection track."""
def __init__(self, image_labels_filepath, box_labels_filepath,
hierarchy_filepath, classes_filepath):
self._image_labels_filepath = image_labels_filepath
self._box_labels_filepath = box_labels_filepath
self._hierarchy_filepath = hierarchy_filepath
self._classes_filepath = classes_filepath
self._load_info_from_files()
def __getstate__(self):
return (self._image_labels_filepath, self._box_labels_filepath,
self._hierarchy_filepath, self._classes_filepath)
def __setstate__(self, state):
(self._image_labels_filepath, self._box_labels_filepath,
self._hierarchy_filepath, self._classes_filepath) = state
self._load_info_from_files()
def _load_info_from_files(self):
self._image2labels = None
self._image2boxes = None
self._hierarchy = None
self._mid2int = None
if self._image_labels_filepath:
self._image2labels = load_image_level_labels(self._image_labels_filepath)
if self._box_labels_filepath:
self._image2boxes = load_box_level_labels(self._box_labels_filepath)
if self._hierarchy_filepath:
self._hierarchy = load_class_hierarchy(self._hierarchy_filepath)
if self._classes_filepath:
class_descriptions = load_class_descriptions(self._classes_filepath)
self._mid2int = {mid: i for i, (mid, _) in enumerate(class_descriptions)}
def process(self, element):
filename, image_bytes = element
image_id = os.path.basename(filename).split(".")[0]
# Image-level annotations.
objects = []
if self._image2labels:
for label, source, confidence in self._image2labels[image_id]:
objects.append({
"label": self._mid2int[label],
"source": source,
"confidence": confidence,
})
# Bounding box-level annotations.
bobjects = []
if self._image2boxes:
for annotation in self._image2boxes[image_id]:
label, xmin, xmax, ymin, ymax, is_group_of = annotation
bbox = tfds.features.BBox(xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax)
bobjects.append({
"label": self._mid2int[label],
"bbox": bbox,
"is_group_of": is_group_of,
})
yield image_id, {
"id": image_id,
"image": image_bytes,
"objects": objects,
"bobjects": bobjects,
}
def load_image_level_labels(filepath):
"""Returns a dictionary mapping image IDs to a list of image-level labels."""
image2labels = collections.defaultdict(list)
with tf.io.gfile.GFile(filepath, "r") as csvfile:
reader = csv.reader(csvfile)
next(reader) # Skip header.
for row in reader:
if len(row) == 3:
image_id, label, confidence = row
source = "verification"
elif len(row) == 4:
image_id, source, label, confidence = row
image2labels[image_id].append((label, source, float(confidence)))
return image2labels
def load_box_level_labels(filepath):
"""Returns a dictionary mapping image IDs to a list of bounding box annotations."""
image2boxes = collections.defaultdict(list)
with tf.io.gfile.GFile(filepath, "r") as csvfile:
reader = csv.reader(csvfile)
next(reader) # Skip header.
for row in reader:
if len(row) == 7:
image_id, label, xmin_s, xmax_s, ymin_s, ymax_s, is_group_of_s = row
elif len(row) == 13:
image_id, label = row[0], row[2]
xmin_s, xmax_s, ymin_s, ymax_s = row[4:8]
is_group_of_s = row[10]
xmin, xmax, ymin, ymax = map(float, (xmin_s, xmax_s, ymin_s, ymax_s))
is_group_of = bool(int(is_group_of_s))
image2boxes[image_id].append((label, xmin, xmax, ymin, ymax, is_group_of))
return image2boxes
def load_class_hierarchy(filepath):
with tf.io.gfile.GFile(filepath, "r") as jsonfile:
return json.load(jsonfile)
def load_class_descriptions(filepath):
with tf.io.gfile.GFile(filepath, "r") as csvfile:
reader = csv.reader(csvfile)
# Note: this file doesn't have any header.
return [row for row in reader]
def fill_class_names_in_tfds_info(classes_filepath, tfds_info_features):
"""Fills the class names in ClassLabel features."""
class_descriptions = load_class_descriptions(classes_filepath)
mids = [mid for mid, _ in class_descriptions]
tfds_info_features["objects"]["label"].names = mids
tfds_info_features["bobjects"]["label"].names = mids
| nilq/baby-python | python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.