text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
dictionary = {
"000000": "XEROX CORPORATION",
"000001": "XEROX CORPORATION",
"000002": "XEROX CORPORATION",
"000003": "XEROX CORPORATION",
"000004": "XEROX CORPORATION",
"000005": "XEROX CORPORATION",
"000006": "XEROX CORPORATION",
"000007": "XEROX CORPORATION",
"000008": "XEROX CORPORATION",
"000009": "XEROX CORPORATION",
"00000A": "OMRON TATEISI ELECTRONICS CO.",
"00000B": "MATRIX CORPORATION",
"00000C": "CISCO SYSTEMS, INC.",
"00000D": "FIBRONICS LTD.",
"00000E": "FUJITSU LIMITED",
"00000F": "NEXT, INC.",
"000010": "SYTEK INC.",
"000011": "NORMEREL SYSTEMES",
"000012": "INFORMATION TECHNOLOGY LIMITED",
"000013": "CAMEX",
"000014": "NETRONIX",
"000015": "DATAPOINT CORPORATION",
"000016": "DU PONT PIXEL SYSTEMS",
"000017": "Oracle",
"000018": "WEBSTER COMPUTER CORPORATION",
"000019": "APPLIED DYNAMICS INTERNATIONAL",
"00001A": "ADVANCED MICRO DEVICES",
"00001B": "NOVELL INC.",
"00001C": "BELL TECHNOLOGIES",
"00001D": "CABLETRON SYSTEMS, INC.",
"00001E": "TELSIST INDUSTRIA ELECTRONICA",
"00001F": "Telco Systems, Inc.",
"000020": "DATAINDUSTRIER DIAB AB",
"000021": "SUREMAN COMP. & COMMUN. CORP.",
"000022": "VISUAL TECHNOLOGY INC.",
"000023": "ABB INDUSTRIAL SYSTEMS AB",
"000024": "CONNECT AS",
"000025": "RAMTEK CORP.",
"000026": "SHA-KEN CO., LTD.",
"000027": "JAPAN RADIO COMPANY",
"000028": "PRODIGY SYSTEMS CORPORATION",
"000029": "IMC NETWORKS CORP.",
"00002A": "TRW - SEDD/INP",
"00002B": "CRISP AUTOMATION, INC",
"00002C": "AUTOTOTE LIMITED",
"00002D": "CHROMATICS INC",
"00002E": "SOCIETE EVIRA",
"00002F": "TIMEPLEX INC.",
"000030": "VG LABORATORY SYSTEMS LTD",
"000031": "QPSX COMMUNICATIONS PTY LTD",
"000032": "Marconi plc",
"000033": "EGAN MACHINERY COMPANY",
"000034": "NETWORK RESOURCES CORPORATION",
"000035": "SPECTRAGRAPHICS CORPORATION",
"000036": "ATARI CORPORATION",
"000037": "OXFORD METRICS LIMITED",
"000038": "CSS LABS",
"000039": "TOSHIBA CORPORATION",
"00003A": "CHYRON CORPORATION",
"00003B": "i Controls, Inc.",
"00003C": "AUSPEX SYSTEMS INC.",
"00003D": "UNISYS",
"00003E": "SIMPACT",
"00003F": "SYNTREX, INC.",
"000040": "APPLICON, INC.",
"000041": "ICE CORPORATION",
"000042": "METIER MANAGEMENT SYSTEMS LTD.",
"000043": "MICRO TECHNOLOGY",
"000044": "CASTELLE CORPORATION",
"000045": "FORD AEROSPACE & COMM. CORP.",
"000046": "OLIVETTI NORTH AMERICA",
"000047": "NICOLET INSTRUMENTS CORP.",
"000048": "SEIKO EPSON CORPORATION",
"000049": "APRICOT COMPUTERS, LTD",
"00004A": "ADC CODENOLL TECHNOLOGY CORP.",
"00004B": "ICL DATA OY",
"00004C": "NEC CORPORATION",
"00004D": "DCI CORPORATION",
"00004E": "AMPEX CORPORATION",
"00004F": "LOGICRAFT, INC.",
"000050": "RADISYS CORPORATION",
"000051": "HOB ELECTRONIC GMBH & CO. KG",
"000052": "Intrusion.com, Inc.",
"000053": "COMPUCORP",
"000054": "Schnieder Electric",
"000055": "COMMISSARIAT A L`ENERGIE ATOM.",
"000056": "DR. B. STRUCK",
"000057": "SCITEX CORPORATION LTD.",
"000058": "RACORE COMPUTER PRODUCTS INC.",
"000059": "HELLIGE GMBH",
"00005A": "SysKonnect GmbH",
"00005B": "ELTEC ELEKTRONIK AG",
"00005C": "TELEMATICS INTERNATIONAL INC.",
"00005D": "CS TELECOM",
"00005E": "ICANN, IANA Department",
"00005F": "SUMITOMO ELECTRIC IND., LTD.",
"000060": "KONTRON ELEKTRONIK GMBH",
"000061": "GATEWAY COMMUNICATIONS",
"000062": "BULL HN INFORMATION SYSTEMS",
"000063": "BARCO CONTROL ROOMS GMBH",
"000064": "YOKOGAWA DIGITAL COMPUTER CORP",
"000065": "Network General Corporation",
"000066": "TALARIS SYSTEMS, INC.",
"000067": "SOFT * RITE, INC.",
"000068": "ROSEMOUNT CONTROLS",
"000069": "CONCORD COMMUNICATIONS INC",
"00006A": "COMPUTER CONSOLES INC.",
"00006B": "SILICON GRAPHICS INC./MIPS",
"00006C": "PRIVATE",
"00006D": "CRAY COMMUNICATIONS, LTD.",
"00006E": "ARTISOFT, INC.",
"00006F": "Madge Ltd.",
"000070": "HCL LIMITED",
"000071": "ADRA SYSTEMS INC.",
"000072": "MINIWARE TECHNOLOGY",
"000073": "SIECOR CORPORATION",
"000074": "RICOH COMPANY LTD.",
"000075": "Nortel Networks",
"000076": "ABEKAS VIDEO SYSTEM",
"000077": "INTERPHASE CORPORATION",
"000078": "LABTAM LIMITED",
"000079": "NETWORTH INCORPORATED",
"00007A": "DANA COMPUTER INC.",
"00007B": "RESEARCH MACHINES",
"00007C": "AMPERE INCORPORATED",
"00007D": "Oracle Corporation",
"00007E": "CLUSTRIX CORPORATION",
"00007F": "LINOTYPE-HELL AG",
"000080": "CRAY COMMUNICATIONS A/S",
"000081": "BAY NETWORKS",
"000082": "LECTRA SYSTEMES SA",
"000083": "TADPOLE TECHNOLOGY PLC",
"000084": "SUPERNET",
"000085": "CANON INC.",
"000086": "MEGAHERTZ CORPORATION",
"000087": "HITACHI, LTD.",
"000088": "Brocade Communications Systems, Inc.",
"000089": "CAYMAN SYSTEMS INC.",
"00008A": "DATAHOUSE INFORMATION SYSTEMS",
"00008B": "INFOTRON",
"00008C": "Alloy Computer Products (Australia) Pty Ltd",
"00008D": "Cryptek Inc.",
"00008E": "SOLBOURNE COMPUTER, INC.",
"00008F": "Raytheon",
"000090": "MICROCOM",
"000091": "ANRITSU CORPORATION",
"000092": "COGENT DATA TECHNOLOGIES",
"000093": "PROTEON INC.",
"000094": "ASANTE TECHNOLOGIES",
"000095": "SONY TEKTRONIX CORP.",
"000096": "MARCONI ELECTRONICS LTD.",
"000097": "EMC Corporation",
"000098": "CROSSCOMM CORPORATION",
"000099": "MTX, INC.",
"00009A": "RC COMPUTER A/S",
"00009B": "INFORMATION INTERNATIONAL, INC",
"00009C": "ROLM MIL-SPEC COMPUTERS",
"00009D": "LOCUS COMPUTING CORPORATION",
"00009E": "MARLI S.A.",
"00009F": "AMERISTAR TECHNOLOGIES INC.",
"0000A0": "SANYO Electric Co., Ltd.",
"0000A1": "MARQUETTE ELECTRIC CO.",
"0000A2": "BAY NETWORKS",
"0000A3": "NETWORK APPLICATION TECHNOLOGY",
"0000A4": "ACORN COMPUTERS LIMITED",
"0000A5": "Tattile SRL",
"0000A6": "NETWORK GENERAL CORPORATION",
"0000A7": "NETWORK COMPUTING DEVICES INC.",
"0000A8": "STRATUS COMPUTER INC.",
"0000A9": "NETWORK SYSTEMS CORP.",
"0000AA": "XEROX CORPORATION",
"0000AB": "LOGIC MODELING CORPORATION",
"0000AC": "CONWARE COMPUTER CONSULTING",
"0000AD": "BRUKER INSTRUMENTS INC.",
"0000AE": "DASSAULT ELECTRONIQUE",
"0000AF": "NUCLEAR DATA INSTRUMENTATION",
"0000B0": "RND-RAD NETWORK DEVICES",
"0000B1": "ALPHA MICROSYSTEMS INC.",
"0000B2": "TELEVIDEO SYSTEMS, INC.",
"0000B3": "CIMLINC INCORPORATED",
"0000B4": "EDIMAX COMPUTER COMPANY",
"0000B5": "DATABILITY SOFTWARE SYS. INC.",
"0000B6": "MICRO-MATIC RESEARCH",
"0000B7": "DOVE COMPUTER CORPORATION",
"0000B8": "SEIKOSHA CO., LTD.",
"0000B9": "MCDONNELL DOUGLAS COMPUTER SYS",
"0000BA": "SIIG, INC.",
"0000BB": "TRI-DATA",
"0000BC": "Rockwell Automation",
"0000BD": "MITSUBISHI CABLE COMPANY",
"0000BE": "THE NTI GROUP",
"0000BF": "SYMMETRIC COMPUTER SYSTEMS",
"0000C0": "WESTERN DIGITAL CORPORATION",
"0000C1": "Madge Ltd.",
"0000C2": "INFORMATION PRESENTATION TECH.",
"0000C3": "HARRIS CORP COMPUTER SYS DIV",
"0000C4": "WATERS DIV. OF MILLIPORE",
"0000C5": "FARALLON COMPUTING/NETOPIA",
"0000C6": "EON SYSTEMS",
"0000C7": "ARIX CORPORATION",
"0000C8": "ALTOS COMPUTER SYSTEMS",
"0000C9": "Emulex Corporation",
"0000CA": "ARRIS International",
"0000CB": "COMPU-SHACK ELECTRONIC GMBH",
"0000CC": "DENSAN CO., LTD.",
"0000CD": "Allied Telesis Labs Ltd",
"0000CE": "MEGADATA CORP.",
"0000CF": "HAYES MICROCOMPUTER PRODUCTS",
"0000D0": "DEVELCON ELECTRONICS LTD.",
"0000D1": "ADAPTEC INCORPORATED",
"0000D2": "SBE, INC.",
"0000D3": "WANG LABORATORIES INC.",
"0000D4": "PURE DATA LTD.",
"0000D5": "MICROGNOSIS INTERNATIONAL",
"0000D6": "PUNCH LINE HOLDING",
"0000D7": "DARTMOUTH COLLEGE",
"0000D8": "NOVELL, INC.",
"0000D9": "NIPPON TELEGRAPH & TELEPHONE",
"0000DA": "ATEX",
"0000DB": "British Telecommunications plc",
"0000DC": "HAYES MICROCOMPUTER PRODUCTS",
"0000DD": "TCL INCORPORATED",
"0000DE": "CETIA",
"0000DF": "BELL & HOWELL PUB SYS DIV",
"0000E0": "QUADRAM CORP.",
"0000E1": "GRID SYSTEMS",
"0000E2": "ACER TECHNOLOGIES CORP.",
"0000E3": "INTEGRATED MICRO PRODUCTS LTD",
"0000E4": "IN2 GROUPE INTERTECHNIQUE",
"0000E5": "SIGMEX LTD.",
"0000E6": "APTOR PRODUITS DE COMM INDUST",
"0000E7": "STAR GATE TECHNOLOGIES",
"0000E8": "ACCTON TECHNOLOGY CORP.",
"0000E9": "ISICAD, INC.",
"0000EA": "UPNOD AB",
"0000EB": "MATSUSHITA COMM. IND. CO. LTD.",
"0000EC": "MICROPROCESS",
"0000ED": "APRIL",
"0000EE": "NETWORK DESIGNERS, LTD.",
"0000EF": "KTI",
"0000F0": "SAMSUNG ELECTRONICS CO., LTD.",
"0000F1": "MAGNA COMPUTER CORPORATION",
"0000F2": "SPIDER COMMUNICATIONS",
"0000F3": "GANDALF DATA LIMITED",
"0000F4": "Allied Telesis",
"0000F5": "DIAMOND SALES LIMITED",
"0000F6": "APPLIED MICROSYSTEMS CORP.",
"0000F7": "YOUTH KEEP ENTERPRISE CO LTD",
"0000F8": "DIGITAL EQUIPMENT CORPORATION",
"0000F9": "QUOTRON SYSTEMS INC.",
"0000FA": "MICROSAGE COMPUTER SYSTEMS INC",
"0000FB": "RECHNER ZUR KOMMUNIKATION",
"0000FC": "MEIKO",
"0000FD": "HIGH LEVEL HARDWARE",
"0000FE": "ANNAPOLIS MICRO SYSTEMS",
"0000FF": "CAMTEC ELECTRONICS LTD.",
"000100": "EQUIP'TRANS",
"000101": "PRIVATE",
"000102": "3COM CORPORATION",
"000103": "3COM CORPORATION",
"000104": "DVICO Co., Ltd.",
"000105": "Beckhoff Automation GmbH",
"000106": "Tews Datentechnik GmbH",
"000107": "Leiser GmbH",
"000108": "AVLAB Technology, Inc.",
"000109": "Nagano Japan Radio Co., Ltd.",
"00010A": "CIS TECHNOLOGY INC.",
"00010B": "Space CyberLink, Inc.",
"00010C": "System Talks Inc.",
"00010D": "CORECO, INC.",
"00010E": "Bri-Link Technologies Co., Ltd",
"00010F": "Brocade Communications Systems, Inc.",
"000110": "Gotham Networks",
"000111": "iDigm Inc.",
"000112": "Shark Multimedia Inc.",
"000113": "OLYMPUS CORPORATION",
"000114": "KANDA TSUSHIN KOGYO CO., LTD.",
"000115": "EXTRATECH CORPORATION",
"000116": "Netspect Technologies, Inc.",
"000117": "CANAL +",
"000118": "EZ Digital Co., Ltd.",
"000119": "RTUnet (Australia)",
"00011A": "Hoffmann und Burmeister GbR",
"00011B": "Unizone Technologies, Inc.",
"00011C": "Universal Talkware Corporation",
"00011D": "Centillium Communications",
"00011E": "Precidia Technologies, Inc.",
"00011F": "RC Networks, Inc.",
"000120": "OSCILLOQUARTZ S.A.",
"000121": "Watchguard Technologies, Inc.",
"000122": "Trend Communications, Ltd.",
"000123": "DIGITAL ELECTRONICS CORP.",
"000124": "Acer Incorporated",
"000125": "YAESU MUSEN CO., LTD.",
"000126": "PAC Labs",
"000127": "OPEN Networks Pty Ltd",
"000128": "EnjoyWeb, Inc.",
"000129": "DFI Inc.",
"00012A": "Telematica Sistems Inteligente",
"00012B": "TELENET Co., Ltd.",
"00012C": "Aravox Technologies, Inc.",
"00012D": "Komodo Technology",
"00012E": "PC Partner Ltd.",
"00012F": "Twinhead International Corp",
"000130": "Extreme Networks",
"000131": "Bosch Security Systems, Inc.",
"000132": "Dranetz - BMI",
"000133": "KYOWA Electronic Instruments C",
"000134": "Selectron Systems AG",
"000135": "KDC Corp.",
"000136": "CyberTAN Technology, Inc.",
"000137": "IT Farm Corporation",
"000138": "XAVi Technologies Corp.",
"000139": "Point Multimedia Systems",
"00013A": "SHELCAD COMMUNICATIONS, LTD.",
"00013B": "BNA SYSTEMS",
"00013C": "TIW SYSTEMS",
"00013D": "RiscStation Ltd.",
"00013E": "Ascom Tateco AB",
"00013F": "Neighbor World Co., Ltd.",
"000140": "Sendtek Corporation",
"000141": "CABLE PRINT",
"000142": "CISCO SYSTEMS, INC.",
"000143": "CISCO SYSTEMS, INC.",
"000144": "EMC Corporation",
"000145": "WINSYSTEMS, INC.",
"000146": "Tesco Controls, Inc.",
"000147": "Zhone Technologies",
"000148": "X-traWeb Inc.",
"000149": "T.D.T. Transfer Data Test GmbH",
"00014A": "Sony Corporation",
"00014B": "Ennovate Networks, Inc.",
"00014C": "Berkeley Process Control",
"00014D": "Shin Kin Enterprises Co., Ltd",
"00014E": "WIN Enterprises, Inc.",
"00014F": "ADTRAN INC",
"000150": "GILAT COMMUNICATIONS, LTD.",
"000151": "Ensemble Communications",
"000152": "CHROMATEK INC.",
"000153": "ARCHTEK TELECOM CORPORATION",
"000154": "G3M Corporation",
"000155": "Promise Technology, Inc.",
"000156": "FIREWIREDIRECT.COM, INC.",
"000157": "SYSWAVE CO., LTD",
"000158": "Electro Industries/Gauge Tech",
"000159": "S1 Corporation",
"00015A": "Digital Video Broadcasting",
"00015B": "ITALTEL S.p.A/RF-UP-I",
"00015C": "CADANT INC.",
"00015D": "Oracle Corporation",
"00015E": "BEST TECHNOLOGY CO., LTD.",
"00015F": "DIGITAL DESIGN GmbH",
"000160": "ELMEX Co., LTD.",
"000161": "Meta Machine Technology",
"000162": "Cygnet Technologies, Inc.",
"000163": "CISCO SYSTEMS, INC.",
"000164": "CISCO SYSTEMS, INC.",
"000165": "AirSwitch Corporation",
"000166": "TC GROUP A/S",
"000167": "HIOKI E.E. CORPORATION",
"000168": "VITANA CORPORATION",
"000169": "Celestix Networks Pte Ltd.",
"00016A": "ALITEC",
"00016B": "LightChip, Inc.",
"00016C": "FOXCONN",
"00016D": "CarrierComm Inc.",
"00016E": "Conklin Corporation",
"00016F": "Inkel Corp.",
"000170": "ESE Embedded System Engineer'g",
"000171": "Allied Data Technologies",
"000172": "TechnoLand Co., LTD.",
"000173": "AMCC",
"000174": "CyberOptics Corporation",
"000175": "Radiant Communications Corp.",
"000176": "Orient Silver Enterprises",
"000177": "EDSL",
"000178": "MARGI Systems, Inc.",
"000179": "WIRELESS TECHNOLOGY, INC.",
"00017A": "Chengdu Maipu Electric Industrial Co., Ltd.",
"00017B": "Heidelberger Druckmaschinen AG",
"00017C": "AG-E GmbH",
"00017D": "ThermoQuest",
"00017E": "ADTEK System Science Co., Ltd.",
"00017F": "Experience Music Project",
"000180": "AOpen, Inc.",
"000181": "Nortel Networks",
"000182": "DICA TECHNOLOGIES AG",
"000183": "ANITE TELECOMS",
"000184": "SIEB & MEYER AG",
"000185": "Hitachi Aloka Medical, Ltd.",
"000186": "Uwe Disch",
"000187": "I2SE GmbH",
"000188": "LXCO Technologies ag",
"000189": "Refraction Technology, Inc.",
"00018A": "ROI COMPUTER AG",
"00018B": "NetLinks Co., Ltd.",
"00018C": "Mega Vision",
"00018D": "AudeSi Technologies",
"00018E": "Logitec Corporation",
"00018F": "Kenetec, Inc.",
"000190": "SMK-M",
"000191": "SYRED Data Systems",
"000192": "Texas Digital Systems",
"000193": "Hanbyul Telecom Co., Ltd.",
"000194": "Capital Equipment Corporation",
"000195": "Sena Technologies, Inc.",
"000196": "CISCO SYSTEMS, INC.",
"000197": "CISCO SYSTEMS, INC.",
"000198": "Darim Vision",
"000199": "HeiSei Electronics",
"00019A": "LEUNIG GmbH",
"00019B": "Kyoto Microcomputer Co., Ltd.",
"00019C": "JDS Uniphase Inc.",
"00019D": "E-Control Systems, Inc.",
"00019E": "ESS Technology, Inc.",
"00019F": "ReadyNet",
"0001A0": "Infinilink Corporation",
"0001A1": "Mag-Tek, Inc.",
"0001A2": "Logical Co., Ltd.",
"0001A3": "GENESYS LOGIC, INC.",
"0001A4": "Microlink Corporation",
"0001A5": "Nextcomm, Inc.",
"0001A6": "Scientific-Atlanta Arcodan A/S",
"0001A7": "UNEX TECHNOLOGY CORPORATION",
"0001A8": "Welltech Computer Co., Ltd.",
"0001A9": "BMW AG",
"0001AA": "Airspan Communications, Ltd.",
"0001AB": "Main Street Networks",
"0001AC": "Sitara Networks, Inc.",
"0001AD": "Coach Master International d.b.a. CMI Worldwide, Inc.",
"0001AE": "Trex Enterprises",
"0001AF": "Emerson Network Power",
"0001B0": "Fulltek Technology Co., Ltd.",
"0001B1": "General Bandwidth",
"0001B2": "Digital Processing Systems, Inc.",
"0001B3": "Precision Electronic Manufacturing",
"0001B4": "Wayport, Inc.",
"0001B5": "Turin Networks, Inc.",
"0001B6": "SAEJIN T&M Co., Ltd.",
"0001B7": "Centos, Inc.",
"0001B8": "Netsensity, Inc.",
"0001B9": "SKF Condition Monitoring",
"0001BA": "IC-Net, Inc.",
"0001BB": "Frequentis",
"0001BC": "Brains Corporation",
"0001BD": "Peterson Electro-Musical Products, Inc.",
"0001BE": "Gigalink Co., Ltd.",
"0001BF": "Teleforce Co., Ltd.",
"0001C0": "CompuLab, Ltd.",
"0001C1": "Vitesse Semiconductor Corporation",
"0001C2": "ARK Research Corp.",
"0001C3": "Acromag, Inc.",
"0001C4": "NeoWave, Inc.",
"0001C5": "Simpler Networks",
"0001C6": "Quarry Technologies",
"0001C7": "CISCO SYSTEMS, INC.",
"0001C8": "CONRAD CORP.",
"0001C9": "CISCO SYSTEMS, INC.",
"0001CA": "Geocast Network Systems, Inc.",
"0001CB": "EVR",
"0001CC": "Japan Total Design Communication Co., Ltd.",
"0001CD": "ARtem",
"0001CE": "Custom Micro Products, Ltd.",
"0001CF": "Alpha Data Parallel Systems, Ltd.",
"0001D0": "VitalPoint, Inc.",
"0001D1": "CoNet Communications, Inc.",
"0001D2": "inXtron, Inc.",
"0001D3": "PAXCOMM, Inc.",
"0001D4": "Leisure Time, Inc.",
"0001D5": "HAEDONG INFO & COMM CO., LTD",
"0001D6": "manroland AG",
"0001D7": "F5 Networks, Inc.",
"0001D8": "Teltronics, Inc.",
"0001D9": "Sigma, Inc.",
"0001DA": "WINCOMM Corporation",
"0001DB": "Freecom Technologies GmbH",
"0001DC": "Activetelco",
"0001DD": "Avail Networks",
"0001DE": "Trango Systems, Inc.",
"0001DF": "ISDN Communications, Ltd.",
"0001E0": "Fast Systems, Inc.",
"0001E1": "Kinpo Electronics, Inc.",
"0001E2": "Ando Electric Corporation",
"0001E3": "Siemens AG",
"0001E4": "Sitera, Inc.",
"0001E5": "Supernet, Inc.",
"0001E6": "Hewlett-Packard Company",
"0001E7": "Hewlett-Packard Company",
"0001E8": "Force10 Networks, Inc.",
"0001E9": "Litton Marine Systems B.V.",
"0001EA": "Cirilium Corp.",
"0001EB": "C-COM Corporation",
"0001EC": "Ericsson Group",
"0001ED": "SETA Corp.",
"0001EE": "Comtrol Europe, Ltd.",
"0001EF": "Camtel Technology Corp.",
"0001F0": "Tridium, Inc.",
"0001F1": "Innovative Concepts, Inc.",
"0001F2": "Mark of the Unicorn, Inc.",
"0001F3": "QPS, Inc.",
"0001F4": "Enterasys Networks",
"0001F5": "ERIM S.A.",
"0001F6": "Association of Musical Electronics Industry",
"0001F7": "Image Display Systems, Inc.",
"0001F8": "Texio Technology Corporation",
"0001F9": "TeraGlobal Communications Corp.",
"0001FA": "HOROSCAS",
"0001FB": "DoTop Technology, Inc.",
"0001FC": "Keyence Corporation",
"0001FD": "Digital Voice Systems, Inc.",
"0001FE": "DIGITAL EQUIPMENT CORPORATION",
"0001FF": "Data Direct Networks, Inc.",
"000200": "Net & Sys Co., Ltd.",
"000201": "IFM Electronic gmbh",
"000202": "Amino Communications, Ltd.",
"000203": "Woonsang Telecom, Inc.",
"000204": "Bodmann Industries Elektronik GmbH",
"000205": "Hitachi Denshi, Ltd.",
"000206": "Telital R&D Denmark A/S",
"000207": "VisionGlobal Network Corp.",
"000208": "Unify Networks, Inc.",
"000209": "Shenzhen SED Information Technology Co., Ltd.",
"00020A": "Gefran Spa",
"00020B": "Native Networks, Inc.",
"00020C": "Metro-Optix",
"00020D": "Micronpc.com",
"00020E": "ECI Telecom, Ltd",
"00020F": "AATR",
"000210": "Fenecom",
"000211": "Nature Worldwide Technology Corp.",
"000212": "SierraCom",
"000213": "S.D.E.L.",
"000214": "DTVRO",
"000215": "Cotas Computer Technology A/B",
"000216": "CISCO SYSTEMS, INC.",
"000217": "CISCO SYSTEMS, INC.",
"000218": "Advanced Scientific Corp",
"000219": "Paralon Technologies",
"00021A": "Zuma Networks",
"00021B": "Kollmorgen-Servotronix",
"00021C": "Network Elements, Inc.",
"00021D": "Data General Communication Ltd.",
"00021E": "SIMTEL S.R.L.",
"00021F": "Aculab PLC",
"000220": "CANON FINETECH INC.",
"000221": "DSP Application, Ltd.",
"000222": "Chromisys, Inc.",
"000223": "ClickTV",
"000224": "C-COR",
"000225": "One Stop Systems",
"000226": "XESystems, Inc.",
"000227": "ESD Electronic System Design GmbH",
"000228": "Necsom, Ltd.",
"000229": "Adtec Corporation",
"00022A": "Asound Electronic",
"00022B": "SAXA, Inc.",
"00022C": "ABB Bomem, Inc.",
"00022D": "Agere Systems",
"00022E": "TEAC Corp. R& D",
"00022F": "P-Cube, Ltd.",
"000230": "Intersoft Electronics",
"000231": "Ingersoll-Rand",
"000232": "Avision, Inc.",
"000233": "Mantra Communications, Inc.",
"000234": "Imperial Technology, Inc.",
"000235": "Paragon Networks International",
"000236": "INIT GmbH",
"000237": "Cosmo Research Corp.",
"000238": "Serome Technology, Inc.",
"000239": "Visicom",
"00023A": "ZSK Stickmaschinen GmbH",
"00023B": "Ericsson",
"00023C": "Creative Technology, Ltd.",
"00023D": "Cisco Systems, Inc.",
"00023E": "Selta Telematica S.p.a",
"00023F": "Compal Electronics, Inc.",
"000240": "Seedek Co., Ltd.",
"000241": "Amer.com",
"000242": "Videoframe Systems",
"000243": "Raysis Co., Ltd.",
"000244": "SURECOM Technology Co.",
"000245": "Lampus Co, Ltd.",
"000246": "All-Win Tech Co., Ltd.",
"000247": "Great Dragon Information Technology (Group) Co., Ltd.",
"000248": "Pilz GmbH & Co.",
"000249": "Aviv Infocom Co, Ltd.",
"00024A": "CISCO SYSTEMS, INC.",
"00024B": "CISCO SYSTEMS, INC.",
"00024C": "SiByte, Inc.",
"00024D": "Mannesman Dematic Colby Pty. Ltd.",
"00024E": "Datacard Group",
"00024F": "IPM Datacom S.R.L.",
"000250": "Geyser Networks, Inc.",
"000251": "Soma Networks, Inc.",
"000252": "Carrier Corporation",
"000253": "Televideo, Inc.",
"000254": "WorldGate",
"000255": "IBM Corp",
"000256": "Alpha Processor, Inc.",
"000257": "Microcom Corp.",
"000258": "Flying Packets Communications",
"000259": "Tsann Kuen China (Shanghai)Enterprise Co., Ltd. IT Group",
"00025A": "Catena Networks",
"00025B": "Cambridge Silicon Radio",
"00025C": "SCI Systems (Kunshan) Co., Ltd.",
"00025D": "Calix Networks",
"00025E": "High Technology Ltd",
"00025F": "Nortel Networks",
"000260": "Accordion Networks, Inc.",
"000261": "Tilgin AB",
"000262": "Soyo Group Soyo Com Tech Co., Ltd",
"000263": "UPS Manufacturing SRL",
"000264": "AudioRamp.com",
"000265": "Virditech Co. Ltd.",
"000266": "Thermalogic Corporation",
"000267": "NODE RUNNER, INC.",
"000268": "Harris Government Communications",
"000269": "Nadatel Co., Ltd",
"00026A": "Cocess Telecom Co., Ltd.",
"00026B": "BCM Computers Co., Ltd.",
"00026C": "Philips CFT",
"00026D": "Adept Telecom",
"00026E": "NeGeN Access, Inc.",
"00026F": "Senao International Co., Ltd.",
"000270": "Crewave Co., Ltd.",
"000271": "Zhone Technologies",
"000272": "CC&C Technologies, Inc.",
"000273": "Coriolis Networks",
"000274": "Tommy Technologies Corp.",
"000275": "SMART Technologies, Inc.",
"000276": "Primax Electronics Ltd.",
"000277": "Cash Systemes Industrie",
"000278": "Samsung Electro-Mechanics Co., Ltd.",
"000279": "Control Applications, Ltd.",
"00027A": "IOI Technology Corporation",
"00027B": "Amplify Net, Inc.",
"00027C": "Trilithic, Inc.",
"00027D": "CISCO SYSTEMS, INC.",
"00027E": "CISCO SYSTEMS, INC.",
"00027F": "ask-technologies.com",
"000280": "Mu Net, Inc.",
"000281": "Madge Ltd.",
"000282": "ViaClix, Inc.",
"000283": "Spectrum Controls, Inc.",
"000284": "AREVA T&D",
"000285": "Riverstone Networks",
"000286": "Occam Networks",
"000287": "Adapcom",
"000288": "GLOBAL VILLAGE COMMUNICATION",
"000289": "DNE Technologies",
"00028A": "Ambit Microsystems Corporation",
"00028B": "VDSL Systems OY",
"00028C": "Micrel-Synergy Semiconductor",
"00028D": "Movita Technologies, Inc.",
"00028E": "Rapid 5 Networks, Inc.",
"00028F": "Globetek, Inc.",
"000290": "Woorigisool, Inc.",
"000291": "Open Network Co., Ltd.",
"000292": "Logic Innovations, Inc.",
"000293": "Solid Data Systems",
"000294": "Tokyo Sokushin Co., Ltd.",
"000295": "IP.Access Limited",
"000296": "Lectron Co,. Ltd.",
"000297": "C-COR.net",
"000298": "Broadframe Corporation",
"000299": "Apex, Inc.",
"00029A": "Storage Apps",
"00029B": "Kreatel Communications AB",
"00029C": "3COM",
"00029D": "Merix Corp.",
"00029E": "Information Equipment Co., Ltd.",
"00029F": "L-3 Communication Aviation Recorders",
"0002A0": "Flatstack Ltd.",
"0002A1": "World Wide Packets",
"0002A2": "Hilscher GmbH",
"0002A3": "ABB Switzerland Ltd, Power Systems",
"0002A4": "AddPac Technology Co., Ltd.",
"0002A5": "Hewlett-Packard Company",
"0002A6": "Effinet Systems Co., Ltd.",
"0002A7": "Vivace Networks",
"0002A8": "Air Link Technology",
"0002A9": "RACOM, s.r.o.",
"0002AA": "PLcom Co., Ltd.",
"0002AB": "CTC Union Technologies Co., Ltd.",
"0002AC": "3PAR data",
"0002AD": "HOYA Corporation",
"0002AE": "Scannex Electronics Ltd.",
"0002AF": "TeleCruz Technology, Inc.",
"0002B0": "Hokubu Communication & Industrial Co., Ltd.",
"0002B1": "Anritsu, Ltd.",
"0002B2": "Cablevision",
"0002B3": "Intel Corporation",
"0002B4": "DAPHNE",
"0002B5": "Avnet, Inc.",
"0002B6": "Acrosser Technology Co., Ltd.",
"0002B7": "Watanabe Electric Industry Co., Ltd.",
"0002B8": "WHI KONSULT AB",
"0002B9": "CISCO SYSTEMS, INC.",
"0002BA": "CISCO SYSTEMS, INC.",
"0002BB": "Continuous Computing Corp",
"0002BC": "LVL 7 Systems, Inc.",
"0002BD": "Bionet Co., Ltd.",
"0002BE": "Totsu Engineering, Inc.",
"0002BF": "dotRocket, Inc.",
"0002C0": "Bencent Tzeng Industry Co., Ltd.",
"0002C1": "Innovative Electronic Designs, Inc.",
"0002C2": "Net Vision Telecom",
"0002C3": "Arelnet Ltd.",
"0002C4": "Vector International BVBA",
"0002C5": "Evertz Microsystems Ltd.",
"0002C6": "Data Track Technology PLC",
"0002C7": "ALPS ELECTRIC Co., Ltd.",
"0002C8": "Technocom Communications Technology (pte) Ltd",
"0002C9": "Mellanox Technologies",
"0002CA": "EndPoints, Inc.",
"0002CB": "TriState Ltd.",
"0002CC": "M.C.C.I",
"0002CD": "TeleDream, Inc.",
"0002CE": "FoxJet, Inc.",
"0002CF": "ZyGate Communications, Inc.",
"0002D0": "Comdial Corporation",
"0002D1": "Vivotek, Inc.",
"0002D2": "Workstation AG",
"0002D3": "NetBotz, Inc.",
"0002D4": "PDA Peripherals, Inc.",
"0002D5": "ACR",
"0002D6": "NICE Systems",
"0002D7": "EMPEG Ltd",
"0002D8": "BRECIS Communications Corporation",
"0002D9": "Reliable Controls",
"0002DA": "ExiO Communications, Inc.",
"0002DB": "NETSEC",
"0002DC": "Fujitsu General Limited",
"0002DD": "Bromax Communications, Ltd.",
"0002DE": "Astrodesign, Inc.",
"0002DF": "Net Com Systems, Inc.",
"0002E0": "ETAS GmbH",
"0002E1": "Integrated Network Corporation",
"0002E2": "NDC Infared Engineering",
"0002E3": "LITE-ON Communications, Inc.",
"0002E4": "JC HYUN Systems, Inc.",
"0002E5": "Timeware Ltd.",
"0002E6": "Gould Instrument Systems, Inc.",
"0002E7": "CAB GmbH & Co KG",
"0002E8": "E.D.&A.",
"0002E9": "CS Systemes De Securite - C3S",
"0002EA": "Focus Enhancements",
"0002EB": "Pico Communications",
"0002EC": "Maschoff Design Engineering",
"0002ED": "DXO Telecom Co., Ltd.",
"0002EE": "Nokia Danmark A/S",
"0002EF": "CCC Network Systems Group Ltd.",
"0002F0": "AME Optimedia Technology Co., Ltd.",
"0002F1": "Pinetron Co., Ltd.",
"0002F2": "eDevice, Inc.",
"0002F3": "Media Serve Co., Ltd.",
"0002F4": "PCTEL, Inc.",
"0002F5": "VIVE Synergies, Inc.",
"0002F6": "Equipe Communications",
"0002F7": "ARM",
"0002F8": "SEAKR Engineering, Inc.",
"0002F9": "MIMOS Berhad",
"0002FA": "DX Antenna Co., Ltd.",
"0002FB": "Baumuller Aulugen-Systemtechnik GmbH",
"0002FC": "CISCO SYSTEMS, INC.",
"0002FD": "CISCO SYSTEMS, INC.",
"0002FE": "Viditec, Inc.",
"0002FF": "Handan BroadInfoCom",
"000300": "Barracuda Networks, Inc.",
"000301": "EXFO",
"000302": "Charles Industries, Ltd.",
"000303": "JAMA Electronics Co., Ltd.",
"000304": "Pacific Broadband Communications",
"000305": "MSC Vertriebs GmbH",
"000306": "Fusion In Tech Co., Ltd.",
"000307": "Secure Works, Inc.",
"000308": "AM Communications, Inc.",
"000309": "Texcel Technology PLC",
"00030A": "Argus Technologies",
"00030B": "Hunter Technology, Inc.",
"00030C": "Telesoft Technologies Ltd.",
"00030D": "Uniwill Computer Corp.",
"00030E": "Core Communications Co., Ltd.",
"00030F": "Digital China (Shanghai) Networks Ltd.",
"000310": "E-Globaledge Corporation",
"000311": "Micro Technology Co., Ltd.",
"000312": "TR-Systemtechnik GmbH",
"000313": "Access Media SPA",
"000314": "Teleware Network Systems",
"000315": "Cidco Incorporated",
"000316": "Nobell Communications, Inc.",
"000317": "Merlin Systems, Inc.",
"000318": "Cyras Systems, Inc.",
"000319": "Infineon AG",
"00031A": "Beijing Broad Telecom Ltd., China",
"00031B": "Cellvision Systems, Inc.",
"00031C": "Svenska Hardvarufabriken AB",
"00031D": "Taiwan Commate Computer, Inc.",
"00031E": "Optranet, Inc.",
"00031F": "Condev Ltd.",
"000320": "Xpeed, Inc.",
"000321": "Reco Research Co., Ltd.",
"000322": "IDIS Co., Ltd.",
"000323": "Cornet Technology, Inc.",
"000324": "SANYO Consumer Electronics Co., Ltd.",
"000325": "Arima Computer Corp.",
"000326": "Iwasaki Information Systems Co., Ltd.",
"000327": "ACT'L",
"000328": "Mace Group, Inc.",
"000329": "F3, Inc.",
"00032A": "UniData Communication Systems, Inc.",
"00032B": "GAI Datenfunksysteme GmbH",
"00032C": "ABB Switzerland Ltd",
"00032D": "IBASE Technology, Inc.",
"00032E": "Scope Information Management, Ltd.",
"00032F": "Global Sun Technology, Inc.",
"000330": "Imagenics, Co., Ltd.",
"000331": "CISCO SYSTEMS, INC.",
"000332": "CISCO SYSTEMS, INC.",
"000333": "Digitel Co., Ltd.",
"000334": "Newport Electronics",
"000335": "Mirae Technology",
"000336": "Zetes Technologies",
"000337": "Vaone, Inc.",
"000338": "Oak Technology",
"000339": "Eurologic Systems, Ltd.",
"00033A": "Silicon Wave, Inc.",
"00033B": "TAMI Tech Co., Ltd.",
"00033C": "Daiden Co., Ltd.",
"00033D": "ILSHin Lab",
"00033E": "Tateyama System Laboratory Co., Ltd.",
"00033F": "BigBand Networks, Ltd.",
"000340": "Floware Wireless Systems, Ltd.",
"000341": "Axon Digital Design",
"000342": "Nortel Networks",
"000343": "Martin Professional A/S",
"000344": "Tietech.Co., Ltd.",
"000345": "Routrek Networks Corporation",
"000346": "Hitachi Kokusai Electric, Inc.",
"000347": "Intel Corporation",
"000348": "Norscan Instruments, Ltd.",
"000349": "Vidicode Datacommunicatie B.V.",
"00034A": "RIAS Corporation",
"00034B": "Nortel Networks",
"00034C": "Shanghai DigiVision Technology Co., Ltd.",
"00034D": "Chiaro Networks, Ltd.",
"00034E": "Pos Data Company, Ltd.",
"00034F": "Sur-Gard Security",
"000350": "BTICINO SPA",
"000351": "Diebold, Inc.",
"000352": "Colubris Networks",
"000353": "Mitac, Inc.",
"000354": "Fiber Logic Communications",
"000355": "TeraBeam Internet Systems",
"000356": "Wincor Nixdorf International GmbH",
"000357": "Intervoice-Brite, Inc.",
"000358": "Hanyang Digitech Co., Ltd.",
"000359": "DigitalSis",
"00035A": "Photron Limited",
"00035B": "BridgeWave Communications",
"00035C": "Saint Song Corp.",
"00035D": "Bosung Hi-Net Co., Ltd.",
"00035E": "Metropolitan Area Networks, Inc.",
"00035F": "Pr\u00fcftechnik Condition Monitoring GmbH & Co. KG",
"000360": "PAC Interactive Technology, Inc.",
"000361": "Widcomm, Inc.",
"000362": "Vodtel Communications, Inc.",
"000363": "Miraesys Co., Ltd.",
"000364": "Scenix Semiconductor, Inc.",
"000365": "Kira Information & Communications, Ltd.",
"000366": "ASM Pacific Technology",
"000367": "Jasmine Networks, Inc.",
"000368": "Embedone Co., Ltd.",
"000369": "Nippon Antenna Co., Ltd.",
"00036A": "Mainnet, Ltd.",
"00036B": "CISCO SYSTEMS, INC.",
"00036C": "CISCO SYSTEMS, INC.",
"00036D": "Runtop, Inc.",
"00036E": "Nicon Systems (Pty) Limited",
"00036F": "Telsey SPA",
"000370": "NXTV, Inc.",
"000371": "Acomz Networks Corp.",
"000372": "ULAN",
"000373": "Aselsan A.S",
"000374": "Control Microsystems",
"000375": "NetMedia, Inc.",
"000376": "Graphtec Technology, Inc.",
"000377": "Gigabit Wireless",
"000378": "HUMAX Co., Ltd.",
"000379": "Proscend Communications, Inc.",
"00037A": "Taiyo Yuden Co., Ltd.",
"00037B": "IDEC IZUMI Corporation",
"00037C": "Coax Media",
"00037D": "Stellcom",
"00037E": "PORTech Communications, Inc.",
"00037F": "Atheros Communications, Inc.",
"000380": "SSH Communications Security Corp.",
"000381": "Ingenico International",
"000382": "A-One Co., Ltd.",
"000383": "Metera Networks, Inc.",
"000384": "AETA",
"000385": "Actelis Networks, Inc.",
"000386": "Ho Net, Inc.",
"000387": "Blaze Network Products",
"000388": "Fastfame Technology Co., Ltd.",
"000389": "Plantronics",
"00038A": "America Online, Inc.",
"00038B": "PLUS-ONE I&T, Inc.",
"00038C": "Total Impact",
"00038D": "PCS Revenue Control Systems, Inc.",
"00038E": "Atoga Systems, Inc.",
"00038F": "Weinschel Corporation",
"000390": "Digital Video Communications, Inc.",
"000391": "Advanced Digital Broadcast, Ltd.",
"000392": "Hyundai Teletek Co., Ltd.",
"000393": "Apple",
"000394": "Connect One",
"000395": "California Amplifier",
"000396": "EZ Cast Co., Ltd.",
"000397": "Watchfront Limited",
"000398": "WISI",
"000399": "Dongju Informations & Communications Co., Ltd.",
"00039A": "SiConnect",
"00039B": "NetChip Technology, Inc.",
"00039C": "OptiMight Communications, Inc.",
"00039D": "Qisda Corporation",
"00039E": "Tera System Co., Ltd.",
"00039F": "CISCO SYSTEMS, INC.",
"0003A0": "CISCO SYSTEMS, INC.",
"0003A1": "HIPER Information & Communication, Inc.",
"0003A2": "Catapult Communications",
"0003A3": "MAVIX, Ltd.",
"0003A4": "Imation Corp.",
"0003A5": "Medea Corporation",
"0003A6": "Traxit Technology, Inc.",
"0003A7": "Unixtar Technology, Inc.",
"0003A8": "IDOT Computers, Inc.",
"0003A9": "AXCENT Media AG",
"0003AA": "Watlow",
"0003AB": "Bridge Information Systems",
"0003AC": "Fronius Schweissmaschinen",
"0003AD": "Emerson Energy Systems AB",
"0003AE": "Allied Advanced Manufacturing Pte, Ltd.",
"0003AF": "Paragea Communications",
"0003B0": "Xsense Technology Corp.",
"0003B1": "Hospira Inc.",
"0003B2": "Radware",
"0003B3": "IA Link Systems Co., Ltd.",
"0003B4": "Macrotek International Corp.",
"0003B5": "Entra Technology Co.",
"0003B6": "QSI Corporation",
"0003B7": "ZACCESS Systems",
"0003B8": "NetKit Solutions, LLC",
"0003B9": "Hualong Telecom Co., Ltd.",
"0003BA": "Oracle Corporation",
"0003BB": "Signal Communications Limited",
"0003BC": "COT GmbH",
"0003BD": "OmniCluster Technologies, Inc.",
"0003BE": "Netility",
"0003BF": "Centerpoint Broadband Technologies, Inc.",
"0003C0": "RFTNC Co., Ltd.",
"0003C1": "Packet Dynamics Ltd",
"0003C2": "Solphone K.K.",
"0003C3": "Micronik Multimedia",
"0003C4": "Tomra Systems ASA",
"0003C5": "Mobotix AG",
"0003C6": "ICUE Systems, Inc.",
"0003C7": "hopf Elektronik GmbH",
"0003C8": "CML Emergency Services",
"0003C9": "TECOM Co., Ltd.",
"0003CA": "MTS Systems Corp.",
"0003CB": "Nippon Systems Development Co., Ltd.",
"0003CC": "Momentum Computer, Inc.",
"0003CD": "Clovertech, Inc.",
"0003CE": "ETEN Technologies, Inc.",
"0003CF": "Muxcom, Inc.",
"0003D0": "KOANKEISO Co., Ltd.",
"0003D1": "Takaya Corporation",
"0003D2": "Crossbeam Systems, Inc.",
"0003D3": "Internet Energy Systems, Inc.",
"0003D4": "Alloptic, Inc.",
"0003D5": "Advanced Communications Co., Ltd.",
"0003D6": "RADVision, Ltd.",
"0003D7": "NextNet Wireless, Inc.",
"0003D8": "iMPath Networks, Inc.",
"0003D9": "Secheron SA",
"0003DA": "Takamisawa Cybernetics Co., Ltd.",
"0003DB": "Apogee Electronics Corp.",
"0003DC": "Lexar Media, Inc.",
"0003DD": "Comark Corp.",
"0003DE": "OTC Wireless",
"0003DF": "Desana Systems",
"0003E0": "ARRIS Group, Inc.",
"0003E1": "Winmate Communication, Inc.",
"0003E2": "Comspace Corporation",
"0003E3": "CISCO SYSTEMS, INC.",
"0003E4": "CISCO SYSTEMS, INC.",
"0003E5": "Hermstedt SG",
"0003E6": "Entone, Inc.",
"0003E7": "Logostek Co. Ltd.",
"0003E8": "Wavelength Digital Limited",
"0003E9": "Akara Canada, Inc.",
"0003EA": "Mega System Technologies, Inc.",
"0003EB": "Atrica",
"0003EC": "ICG Research, Inc.",
"0003ED": "Shinkawa Electric Co., Ltd.",
"0003EE": "MKNet Corporation",
"0003EF": "Oneline AG",
"0003F0": "Redfern Broadband Networks",
"0003F1": "Cicada Semiconductor, Inc.",
"0003F2": "Seneca Networks",
"0003F3": "Dazzle Multimedia, Inc.",
"0003F4": "NetBurner",
"0003F5": "Chip2Chip",
"0003F6": "Allegro Networks, Inc.",
"0003F7": "Plast-Control GmbH",
"0003F8": "SanCastle Technologies, Inc.",
"0003F9": "Pleiades Communications, Inc.",
"0003FA": "TiMetra Networks",
"0003FB": "ENEGATE Co.,Ltd.",
"0003FC": "Intertex Data AB",
"0003FD": "CISCO SYSTEMS, INC.",
"0003FE": "CISCO SYSTEMS, INC.",
"0003FF": "Microsoft Corporation",
"000400": "LEXMARK INTERNATIONAL, INC.",
"000401": "Osaki Electric Co., Ltd.",
"000402": "Nexsan Technologies, Ltd.",
"000403": "Nexsi Corporation",
"000404": "Makino Milling Machine Co., Ltd.",
"000405": "ACN Technologies",
"000406": "Fa. Metabox AG",
"000407": "Topcon Positioning Systems, Inc.",
"000408": "Sanko Electronics Co., Ltd.",
"000409": "Cratos Networks",
"00040A": "Sage Systems",
"00040B": "3com Europe Ltd.",
"00040C": "Kanno Works, Ltd.",
"00040D": "Avaya, Inc.",
"00040E": "AVM GmbH",
"00040F": "Asus Network Technologies, Inc.",
"000410": "Spinnaker Networks, Inc.",
"000411": "Inkra Networks, Inc.",
"000412": "WaveSmith Networks, Inc.",
"000413": "SNOM Technology AG",
"000414": "Umezawa Musen Denki Co., Ltd.",
"000415": "Rasteme Systems Co., Ltd.",
"000416": "Parks S/A Comunicacoes Digitais",
"000417": "ELAU AG",
"000418": "Teltronic S.A.U.",
"000419": "Fibercycle Networks, Inc.",
"00041A": "Ines Test and Measurement GmbH & CoKG",
"00041B": "Bridgeworks Ltd.",
"00041C": "ipDialog, Inc.",
"00041D": "Corega of America",
"00041E": "Shikoku Instrumentation Co., Ltd.",
"00041F": "Sony Computer Entertainment, Inc.",
"000420": "Slim Devices, Inc.",
"000421": "Ocular Networks",
"000422": "Gordon Kapes, Inc.",
"000423": "Intel Corporation",
"000424": "TMC s.r.l.",
"000425": "Atmel Corporation",
"000426": "Autosys",
"000427": "CISCO SYSTEMS, INC.",
"000428": "CISCO SYSTEMS, INC.",
"000429": "Pixord Corporation",
"00042A": "Wireless Networks, Inc.",
"00042B": "IT Access Co., Ltd.",
"00042C": "Minet, Inc.",
"00042D": "Sarian Systems, Ltd.",
"00042E": "Netous Technologies, Ltd.",
"00042F": "International Communications Products, Inc.",
"000430": "Netgem",
"000431": "GlobalStreams, Inc.",
"000432": "Voyetra Turtle Beach, Inc.",
"000433": "Cyberboard A/S",
"000434": "Accelent Systems, Inc.",
"000435": "Comptek International, Inc.",
"000436": "ELANsat Technologies, Inc.",
"000437": "Powin Information Technology, Inc.",
"000438": "Nortel Networks",
"000439": "Rosco Entertainment Technology, Inc.",
"00043A": "Intelligent Telecommunications, Inc.",
"00043B": "Lava Computer Mfg., Inc.",
"00043C": "SONOS Co., Ltd.",
"00043D": "INDEL AG",
"00043E": "Telencomm",
"00043F": "ESTeem Wireless Modems, Inc",
"000440": "cyberPIXIE, Inc.",
"000441": "Half Dome Systems, Inc.",
"000442": "NACT",
"000443": "Agilent Technologies, Inc.",
"000444": "Western Multiplex Corporation",
"000445": "LMS Skalar Instruments GmbH",
"000446": "CYZENTECH Co., Ltd.",
"000447": "Acrowave Systems Co., Ltd.",
"000448": "Polaroid Corporation",
"000449": "Mapletree Networks",
"00044A": "iPolicy Networks, Inc.",
"00044B": "NVIDIA",
"00044C": "JENOPTIK",
"00044D": "CISCO SYSTEMS, INC.",
"00044E": "CISCO SYSTEMS, INC.",
"00044F": "Leukhardt Systemelektronik GmbH",
"000450": "DMD Computers SRL",
"000451": "Medrad, Inc.",
"000452": "RocketLogix, Inc.",
"000453": "YottaYotta, Inc.",
"000454": "Quadriga UK",
"000455": "ANTARA.net",
"000456": "Cambium Networks Limited",
"000457": "Universal Access Technology, Inc.",
"000458": "Fusion X Co., Ltd.",
"000459": "Veristar Corporation",
"00045A": "The Linksys Group, Inc.",
"00045B": "Techsan Electronics Co., Ltd.",
"00045C": "Mobiwave Pte Ltd",
"00045D": "BEKA Elektronik",
"00045E": "PolyTrax Information Technology AG",
"00045F": "Avalue Technology, Inc.",
"000460": "Knilink Technology, Inc.",
"000461": "EPOX Computer Co., Ltd.",
"000462": "DAKOS Data & Communication Co., Ltd.",
"000463": "Bosch Security Systems",
"000464": "Pulse-Link Inc",
"000465": "i.s.t isdn-support technik GmbH",
"000466": "ARMITEL Co.",
"000467": "Wuhan Research Institute of MII",
"000468": "Vivity, Inc.",
"000469": "Innocom, Inc.",
"00046A": "Navini Networks",
"00046B": "Palm Wireless, Inc.",
"00046C": "Cyber Technology Co., Ltd.",
"00046D": "CISCO SYSTEMS, INC.",
"00046E": "CISCO SYSTEMS, INC.",
"00046F": "Digitel S/A Industria Eletronica",
"000470": "ipUnplugged AB",
"000471": "IPrad",
"000472": "Telelynx, Inc.",
"000473": "Photonex Corporation",
"000474": "LEGRAND",
"000475": "3 Com Corporation",
"000476": "3 Com Corporation",
"000477": "Scalant Systems, Inc.",
"000478": "G. Star Technology Corporation",
"000479": "Radius Co., Ltd.",
"00047A": "AXXESSIT ASA",
"00047B": "Schlumberger",
"00047C": "Skidata AG",
"00047D": "Pelco",
"00047E": "Siqura B.V.",
"00047F": "Chr. Mayr GmbH & Co. KG",
"000480": "Brocade Communications Systems, Inc",
"000481": "Econolite Control Products, Inc.",
"000482": "Medialogic Corp.",
"000483": "Deltron Technology, Inc.",
"000484": "Amann GmbH",
"000485": "PicoLight",
"000486": "ITTC, University of Kansas",
"000487": "Cogency Semiconductor, Inc.",
"000488": "Eurotherm Controls",
"000489": "YAFO Networks, Inc.",
"00048A": "Temia Vertriebs GmbH",
"00048B": "Poscon Corporation",
"00048C": "Nayna Networks, Inc.",
"00048D": "Tone Commander Systems, Inc.",
"00048E": "Ohm Tech Labs, Inc.",
"00048F": "TD Systems Corporation",
"000490": "Optical Access",
"000491": "Technovision, Inc.",
"000492": "Hive Internet, Ltd.",
"000493": "Tsinghua Unisplendour Co., Ltd.",
"000494": "Breezecom, Ltd.",
"000495": "Tejas Networks India Limited",
"000496": "Extreme Networks",
"000497": "MacroSystem Digital Video AG",
"000498": "Mahi Networks",
"000499": "Chino Corporation",
"00049A": "CISCO SYSTEMS, INC.",
"00049B": "CISCO SYSTEMS, INC.",
"00049C": "Surgient Networks, Inc.",
"00049D": "Ipanema Technologies",
"00049E": "Wirelink Co., Ltd.",
"00049F": "Freescale Semiconductor",
"0004A0": "Verity Instruments, Inc.",
"0004A1": "Pathway Connectivity",
"0004A2": "L.S.I. Japan Co., Ltd.",
"0004A3": "Microchip Technology, Inc.",
"0004A4": "NetEnabled, Inc.",
"0004A5": "Barco Projection Systems NV",
"0004A6": "SAF Tehnika Ltd.",
"0004A7": "FabiaTech Corporation",
"0004A8": "Broadmax Technologies, Inc.",
"0004A9": "SandStream Technologies, Inc.",
"0004AA": "Jetstream Communications",
"0004AB": "Comverse Network Systems, Inc.",
"0004AC": "IBM Corp",
"0004AD": "Malibu Networks",
"0004AE": "Sullair Corporation",
"0004AF": "Digital Fountain, Inc.",
"0004B0": "ELESIGN Co., Ltd.",
"0004B1": "Signal Technology, Inc.",
"0004B2": "ESSEGI SRL",
"0004B3": "Videotek, Inc.",
"0004B4": "CIAC",
"0004B5": "Equitrac Corporation",
"0004B6": "Stratex Networks, Inc.",
"0004B7": "AMB i.t. Holding",
"0004B8": "Kumahira Co., Ltd.",
"0004B9": "S.I. Soubou, Inc.",
"0004BA": "KDD Media Will Corporation",
"0004BB": "Bardac Corporation",
"0004BC": "Giantec, Inc.",
"0004BD": "ARRIS Group, Inc.",
"0004BE": "OptXCon, Inc.",
"0004BF": "VersaLogic Corp.",
"0004C0": "CISCO SYSTEMS, INC.",
"0004C1": "CISCO SYSTEMS, INC.",
"0004C2": "Magnipix, Inc.",
"0004C3": "CASTOR Informatique",
"0004C4": "Allen & Heath Limited",
"0004C5": "ASE Technologies, USA",
"0004C6": "Yamaha Motor Co., Ltd.",
"0004C7": "NetMount",
"0004C8": "LIBA Maschinenfabrik GmbH",
"0004C9": "Micro Electron Co., Ltd.",
"0004CA": "FreeMs Corp.",
"0004CB": "Tdsoft Communication, Ltd.",
"0004CC": "Peek Traffic B.V.",
"0004CD": "Extenway Solutions Inc",
"0004CE": "Patria Ailon",
"0004CF": "Seagate Technology",
"0004D0": "Softlink s.r.o.",
"0004D1": "Drew Technologies, Inc.",
"0004D2": "Adcon Telemetry GmbH",
"0004D3": "Toyokeiki Co., Ltd.",
"0004D4": "Proview Electronics Co., Ltd.",
"0004D5": "Hitachi Information & Communication Engineering, Ltd.",
"0004D6": "Takagi Industrial Co., Ltd.",
"0004D7": "Omitec Instrumentation Ltd.",
"0004D8": "IPWireless, Inc.",
"0004D9": "Titan Electronics, Inc.",
"0004DA": "Relax Technology, Inc.",
"0004DB": "Tellus Group Corp.",
"0004DC": "Nortel Networks",
"0004DD": "CISCO SYSTEMS, INC.",
"0004DE": "CISCO SYSTEMS, INC.",
"0004DF": "Teracom Telematica Ltda.",
"0004E0": "Procket Networks",
"0004E1": "Infinior Microsystems",
"0004E2": "SMC Networks, Inc.",
"0004E3": "Accton Technology Corp.",
"0004E4": "Daeryung Ind., Inc.",
"0004E5": "Glonet Systems, Inc.",
"0004E6": "Banyan Network Private Limited",
"0004E7": "Lightpointe Communications, Inc",
"0004E8": "IER, Inc.",
"0004E9": "Infiniswitch Corporation",
"0004EA": "Hewlett-Packard Company",
"0004EB": "Paxonet Communications, Inc.",
"0004EC": "Memobox SA",
"0004ED": "Billion Electric Co., Ltd.",
"0004EE": "Lincoln Electric Company",
"0004EF": "Polestar Corp.",
"0004F0": "International Computers, Ltd",
"0004F1": "WhereNet",
"0004F2": "Polycom",
"0004F3": "FS FORTH-SYSTEME GmbH",
"0004F4": "Infinite Electronics Inc.",
"0004F5": "SnowShore Networks, Inc.",
"0004F6": "Amphus",
"0004F7": "Omega Band, Inc.",
"0004F8": "QUALICABLE TV Industria E Com., Ltda",
"0004F9": "Xtera Communications, Inc.",
"0004FA": "NBS Technologies Inc.",
"0004FB": "Commtech, Inc.",
"0004FC": "Stratus Computer (DE), Inc.",
"0004FD": "Japan Control Engineering Co., Ltd.",
"0004FE": "Pelago Networks",
"0004FF": "Acronet Co., Ltd.",
"000500": "CISCO SYSTEMS, INC.",
"000501": "CISCO SYSTEMS, INC.",
"000502": "Apple",
"000503": "ICONAG",
"000504": "Naray Information & Communication Enterprise",
"000505": "Systems Integration Solutions, Inc.",
"000506": "Reddo Networks AB",
"000507": "Fine Appliance Corp.",
"000508": "Inetcam, Inc.",
"000509": "AVOC Nishimura Ltd.",
"00050A": "ICS Spa",
"00050B": "SICOM Systems, Inc.",
"00050C": "Network Photonics, Inc.",
"00050D": "Midstream Technologies, Inc.",
"00050E": "3ware, Inc.",
"00050F": "Tanaka S/S Ltd.",
"000510": "Infinite Shanghai Communication Terminals Ltd.",
"000511": "Complementary Technologies Ltd",
"000512": "MeshNetworks, Inc.",
"000513": "VTLinx Multimedia Systems, Inc.",
"000514": "KDT Systems Co., Ltd.",
"000515": "Nuark Co., Ltd.",
"000516": "SMART Modular Technologies",
"000517": "Shellcomm, Inc.",
"000518": "Jupiters Technology",
"000519": "Siemens Building Technologies AG,",
"00051A": "3Com Europe Ltd.",
"00051B": "Magic Control Technology Corporation",
"00051C": "Xnet Technology Corp.",
"00051D": "Airocon, Inc.",
"00051E": "Brocade Communications Systems, Inc.",
"00051F": "Taijin Media Co., Ltd.",
"000520": "Smartronix, Inc.",
"000521": "Control Microsystems",
"000522": "LEA*D Corporation, Inc.",
"000523": "AVL List GmbH",
"000524": "BTL System (HK) Limited",
"000525": "Puretek Industrial Co., Ltd.",
"000526": "IPAS GmbH",
"000527": "SJ Tek Co. Ltd",
"000528": "New Focus, Inc.",
"000529": "Shanghai Broadan Communication Technology Co., Ltd",
"00052A": "Ikegami Tsushinki Co., Ltd.",
"00052B": "HORIBA, Ltd.",
"00052C": "Supreme Magic Corporation",
"00052D": "Zoltrix International Limited",
"00052E": "Cinta Networks",
"00052F": "Leviton Network Solutions",
"000530": "Andiamo Systems, Inc.",
"000531": "CISCO SYSTEMS, INC.",
"000532": "CISCO SYSTEMS, INC.",
"000533": "Brocade Communications Systems, Inc.",
"000534": "Northstar Engineering Ltd.",
"000535": "Chip PC Ltd.",
"000536": "Danam Communications, Inc.",
"000537": "Nets Technology Co., Ltd.",
"000538": "Merilus, Inc.",
"000539": "A Brand New World in Sweden AB",
"00053A": "Willowglen Services Pte Ltd",
"00053B": "Harbour Networks Ltd., Co. Beijing",
"00053C": "Xircom",
"00053D": "Agere Systems",
"00053E": "KID Systeme GmbH",
"00053F": "VisionTek, Inc.",
"000540": "FAST Corporation",
"000541": "Advanced Systems Co., Ltd.",
"000542": "Otari, Inc.",
"000543": "IQ Wireless GmbH",
"000544": "Valley Technologies, Inc.",
"000545": "Internet Photonics",
"000546": "KDDI Network & Solultions Inc.",
"000547": "Starent Networks",
"000548": "Disco Corporation",
"000549": "Salira Optical Network Systems",
"00054A": "Ario Data Networks, Inc.",
"00054B": "Eaton Automation AG",
"00054C": "RF Innovations Pty Ltd",
"00054D": "Brans Technologies, Inc.",
"00054E": "Philips",
"00054F": "PRIVATE",
"000550": "Vcomms Connect Limited",
"000551": "F & S Elektronik Systeme GmbH",
"000552": "Xycotec Computer GmbH",
"000553": "DVC Company, Inc.",
"000554": "Rangestar Wireless",
"000555": "Japan Cash Machine Co., Ltd.",
"000556": "360 Systems",
"000557": "Agile TV Corporation",
"000558": "Synchronous, Inc.",
"000559": "Intracom S.A.",
"00055A": "Power Dsine Ltd.",
"00055B": "Charles Industries, Ltd.",
"00055C": "Kowa Company, Ltd.",
"00055D": "D-Link Systems, Inc.",
"00055E": "CISCO SYSTEMS, INC.",
"00055F": "CISCO SYSTEMS, INC.",
"000560": "LEADER COMM.CO., LTD",
"000561": "nac Image Technology, Inc.",
"000562": "Digital View Limited",
"000563": "J-Works, Inc.",
"000564": "Tsinghua Bitway Co., Ltd.",
"000565": "Tailyn Communication Company Ltd.",
"000566": "Secui.com Corporation",
"000567": "Etymonic Design, Inc.",
"000568": "Piltofish Networks AB",
"000569": "VMware, Inc.",
"00056A": "Heuft Systemtechnik GmbH",
"00056B": "C.P. Technology Co., Ltd.",
"00056C": "Hung Chang Co., Ltd.",
"00056D": "Pacific Corporation",
"00056E": "National Enhance Technology, Inc.",
"00056F": "Innomedia Technologies Pvt. Ltd.",
"000570": "Baydel Ltd.",
"000571": "Seiwa Electronics Co.",
"000572": "Deonet Co., Ltd.",
"000573": "CISCO SYSTEMS, INC.",
"000574": "CISCO SYSTEMS, INC.",
"000575": "CDS-Electronics BV",
"000576": "NSM Technology Ltd.",
"000577": "SM Information & Communication",
"000578": "PRIVATE",
"000579": "Universal Control Solution Corp.",
"00057A": "Overture Networks",
"00057B": "Chung Nam Electronic Co., Ltd.",
"00057C": "RCO Security AB",
"00057D": "Sun Communications, Inc.",
"00057E": "Eckelmann Steuerungstechnik GmbH",
"00057F": "Acqis Technology",
"000580": "FibroLAN Ltd.",
"000581": "Snell",
"000582": "ClearCube Technology",
"000583": "ImageCom Limited",
"000584": "AbsoluteValue Systems, Inc.",
"000585": "Juniper Networks, Inc.",
"000586": "Lucent Technologies",
"000587": "Locus, Incorporated",
"000588": "Sensoria Corp.",
"000589": "National Datacomputer",
"00058A": "Netcom Co., Ltd.",
"00058B": "IPmental, Inc.",
"00058C": "Opentech Inc.",
"00058D": "Lynx Photonic Networks, Inc.",
"00058E": "Flextronics International GmbH & Co. Nfg. KG",
"00058F": "CLCsoft co.",
"000590": "Swissvoice Ltd.",
"000591": "Active Silicon Ltd",
"000592": "Pultek Corp.",
"000593": "Grammar Engine Inc.",
"000594": "IXXAT Automation GmbH",
"000595": "Alesis Corporation",
"000596": "Genotech Co., Ltd.",
"000597": "Eagle Traffic Control Systems",
"000598": "CRONOS S.r.l.",
"000599": "DRS Test and Energy Management or DRS-TEM",
"00059A": "CISCO SYSTEMS, INC.",
"00059B": "CISCO SYSTEMS, INC.",
"00059C": "Kleinknecht GmbH, Ing. B\u00fcro",
"00059D": "Daniel Computing Systems, Inc.",
"00059E": "Zinwell Corporation",
"00059F": "Yotta Networks, Inc.",
"0005A0": "MOBILINE Kft.",
"0005A1": "Zenocom",
"0005A2": "CELOX Networks",
"0005A3": "QEI, Inc.",
"0005A4": "Lucid Voice Ltd.",
"0005A5": "KOTT",
"0005A6": "Extron Electronics",
"0005A7": "Hyperchip, Inc.",
"0005A8": "WYLE ELECTRONICS",
"0005A9": "Princeton Networks, Inc.",
"0005AA": "Moore Industries International Inc.",
"0005AB": "Cyber Fone, Inc.",
"0005AC": "Northern Digital, Inc.",
"0005AD": "Topspin Communications, Inc.",
"0005AE": "Mediaport USA",
"0005AF": "InnoScan Computing A/S",
"0005B0": "Korea Computer Technology Co., Ltd.",
"0005B1": "ASB Technology BV",
"0005B2": "Medison Co., Ltd.",
"0005B3": "Asahi-Engineering Co., Ltd.",
"0005B4": "Aceex Corporation",
"0005B5": "Broadcom Technologies",
"0005B6": "INSYS Microelectronics GmbH",
"0005B7": "Arbor Technology Corp.",
"0005B8": "Electronic Design Associates, Inc.",
"0005B9": "Airvana, Inc.",
"0005BA": "Area Netwoeks, Inc.",
"0005BB": "Myspace AB",
"0005BC": "Resource Data Management Ltd",
"0005BD": "ROAX BV",
"0005BE": "Kongsberg Seatex AS",
"0005BF": "JustEzy Technology, Inc.",
"0005C0": "Digital Network Alacarte Co., Ltd.",
"0005C1": "A-Kyung Motion, Inc.",
"0005C2": "Soronti, Inc.",
"0005C3": "Pacific Instruments, Inc.",
"0005C4": "Telect, Inc.",
"0005C5": "Flaga HF",
"0005C6": "Triz Communications",
"0005C7": "I/F-COM A/S",
"0005C8": "VERYTECH",
"0005C9": "LG Innotek Co., Ltd.",
"0005CA": "Hitron Technology, Inc.",
"0005CB": "ROIS Technologies, Inc.",
"0005CC": "Sumtel Communications, Inc.",
"0005CD": "Denon, Ltd.",
"0005CE": "Prolink Microsystems Corporation",
"0005CF": "Thunder River Technologies, Inc.",
"0005D0": "Solinet Systems",
"0005D1": "Metavector Technologies",
"0005D2": "DAP Technologies",
"0005D3": "eProduction Solutions, Inc.",
"0005D4": "FutureSmart Networks, Inc.",
"0005D5": "Speedcom Wireless",
"0005D6": "L-3 Linkabit",
"0005D7": "Vista Imaging, Inc.",
"0005D8": "Arescom, Inc.",
"0005D9": "Techno Valley, Inc.",
"0005DA": "Apex Automationstechnik",
"0005DB": "PSI Nentec GmbH",
"0005DC": "CISCO SYSTEMS, INC.",
"0005DD": "CISCO SYSTEMS, INC.",
"0005DE": "Gi Fone Korea, Inc.",
"0005DF": "Electronic Innovation, Inc.",
"0005E0": "Empirix Corp.",
"0005E1": "Trellis Photonics, Ltd.",
"0005E2": "Creativ Network Technologies",
"0005E3": "LightSand Communications, Inc.",
"0005E4": "Red Lion Controls Inc.",
"0005E5": "Renishaw PLC",
"0005E6": "Egenera, Inc.",
"0005E7": "Netrake an AudioCodes Company",
"0005E8": "TurboWave, Inc.",
"0005E9": "Unicess Network, Inc.",
"0005EA": "Rednix",
"0005EB": "Blue Ridge Networks, Inc.",
"0005EC": "Mosaic Systems Inc.",
"0005ED": "Technikum Joanneum GmbH",
"0005EE": "Siemens AB, Infrastructure & Cities, Building Technologies Division, IC BT SSP SP BA PR",
"0005EF": "ADOIR Digital Technology",
"0005F0": "SATEC",
"0005F1": "Vrcom, Inc.",
"0005F2": "Power R, Inc.",
"0005F3": "Webyn",
"0005F4": "System Base Co., Ltd.",
"0005F5": "Geospace Technologies",
"0005F6": "Young Chang Co. Ltd.",
"0005F7": "Analog Devices, Inc.",
"0005F8": "Real Time Access, Inc.",
"0005F9": "TOA Corporation",
"0005FA": "IPOptical, Inc.",
"0005FB": "ShareGate, Inc.",
"0005FC": "Schenck Pegasus Corp.",
"0005FD": "PacketLight Networks Ltd.",
"0005FE": "Traficon N.V.",
"0005FF": "SNS Solutions, Inc.",
"000600": "Toshiba Teli Corporation",
"000601": "Otanikeiki Co., Ltd.",
"000602": "Cirkitech Electronics Co.",
"000603": "Baker Hughes Inc.",
"000604": "@Track Communications, Inc.",
"000605": "Inncom International, Inc.",
"000606": "RapidWAN, Inc.",
"000607": "Omni Directional Control Technology Inc.",
"000608": "At-Sky SAS",
"000609": "Crossport Systems",
"00060A": "Blue2space",
"00060B": "Emerson Network Power",
"00060C": "Melco Industries, Inc.",
"00060D": "Wave7 Optics",
"00060E": "IGYS Systems, Inc.",
"00060F": "Narad Networks Inc",
"000610": "Abeona Networks Inc",
"000611": "Zeus Wireless, Inc.",
"000612": "Accusys, Inc.",
"000613": "Kawasaki Microelectronics Incorporated",
"000614": "Prism Holdings",
"000615": "Kimoto Electric Co., Ltd.",
"000616": "Tel Net Co., Ltd.",
"000617": "Redswitch Inc.",
"000618": "DigiPower Manufacturing Inc.",
"000619": "Connection Technology Systems",
"00061A": "Zetari Inc.",
"00061B": "Notebook Development Lab. Lenovo Japan Ltd.",
"00061C": "Hoshino Metal Industries, Ltd.",
"00061D": "MIP Telecom, Inc.",
"00061E": "Maxan Systems",
"00061F": "Vision Components GmbH",
"000620": "Serial System Ltd.",
"000621": "Hinox, Co., Ltd.",
"000622": "Chung Fu Chen Yeh Enterprise Corp.",
"000623": "MGE UPS Systems France",
"000624": "Gentner Communications Corp.",
"000625": "The Linksys Group, Inc.",
"000626": "MWE GmbH",
"000627": "Uniwide Technologies, Inc.",
"000628": "CISCO SYSTEMS, INC.",
"000629": "IBM Corp",
"00062A": "CISCO SYSTEMS, INC.",
"00062B": "INTRASERVER TECHNOLOGY",
"00062C": "Bivio Networks",
"00062D": "TouchStar Technologies, L.L.C.",
"00062E": "Aristos Logic Corp.",
"00062F": "Pivotech Systems Inc.",
"000630": "Adtranz Sweden",
"000631": "Calix",
"000632": "Mesco Engineering GmbH",
"000633": "Cross Match Technologies GmbH",
"000634": "GTE Airfone Inc.",
"000635": "PacketAir Networks, Inc.",
"000636": "Jedai Broadband Networks",
"000637": "Toptrend-Meta Information (ShenZhen) Inc.",
"000638": "Sungjin C&C Co., Ltd.",
"000639": "Newtec",
"00063A": "Dura Micro, Inc.",
"00063B": "Arcturus Networks Inc.",
"00063C": "Intrinsyc Software International Inc.",
"00063D": "Microwave Data Systems Inc.",
"00063E": "Opthos Inc.",
"00063F": "Everex Communications Inc.",
"000640": "White Rock Networks",
"000641": "ITCN",
"000642": "Genetel Systems Inc.",
"000643": "SONO Computer Co., Ltd.",
"000644": "Neix,Inc",
"000645": "Meisei Electric Co. Ltd.",
"000646": "ShenZhen XunBao Network Technology Co Ltd",
"000647": "Etrali S.A.",
"000648": "Seedsware, Inc.",
"000649": "3M Deutschland GmbH",
"00064A": "Honeywell Co., Ltd. (KOREA)",
"00064B": "Alexon Co., Ltd.",
"00064C": "Invicta Networks, Inc.",
"00064D": "Sencore",
"00064E": "Broad Net Technology Inc.",
"00064F": "PRO-NETS Technology Corporation",
"000650": "Tiburon Networks, Inc.",
"000651": "Aspen Networks Inc.",
"000652": "CISCO SYSTEMS, INC.",
"000653": "CISCO SYSTEMS, INC.",
"000654": "Winpresa Building Automation Technologies GmbH",
"000655": "Yipee, Inc.",
"000656": "Tactel AB",
"000657": "Market Central, Inc.",
"000658": "Helmut Fischer GmbH Institut f\u00fcr Elektronik und Messtechnik",
"000659": "EAL (Apeldoorn) B.V.",
"00065A": "Strix Systems",
"00065B": "Dell Computer Corp.",
"00065C": "Malachite Technologies, Inc.",
"00065D": "Heidelberg Web Systems",
"00065E": "Photuris, Inc.",
"00065F": "ECI Telecom - NGTS Ltd.",
"000660": "NADEX Co., Ltd.",
"000661": "NIA Home Technologies Corp.",
"000662": "MBM Technology Ltd.",
"000663": "Human Technology Co., Ltd.",
"000664": "Fostex Corporation",
"000665": "Sunny Giken, Inc.",
"000666": "Roving Networks",
"000667": "Tripp Lite",
"000668": "Vicon Industries Inc.",
"000669": "Datasound Laboratories Ltd",
"00066A": "InfiniCon Systems, Inc.",
"00066B": "Sysmex Corporation",
"00066C": "Robinson Corporation",
"00066D": "Compuprint S.P.A.",
"00066E": "Delta Electronics, Inc.",
"00066F": "Korea Data Systems",
"000670": "Upponetti Oy",
"000671": "Softing AG",
"000672": "Netezza",
"000673": "TKH Security Solutions USA",
"000674": "Spectrum Control, Inc.",
"000675": "Banderacom, Inc.",
"000676": "Novra Technologies Inc.",
"000677": "SICK AG",
"000678": "Marantz Brand Company",
"000679": "Konami Corporation",
"00067A": "JMP Systems",
"00067B": "Toplink C&C Corporation",
"00067C": "CISCO SYSTEMS, INC.",
"00067D": "Takasago Ltd.",
"00067E": "WinCom Systems, Inc.",
"00067F": "Digeo, Inc.",
"000680": "Card Access, Inc.",
"000681": "Goepel Electronic GmbH",
"000682": "Convedia",
"000683": "Bravara Communications, Inc.",
"000684": "Biacore AB",
"000685": "NetNearU Corporation",
"000686": "ZARDCOM Co., Ltd.",
"000687": "Omnitron Systems Technology, Inc.",
"000688": "Telways Communication Co., Ltd.",
"000689": "yLez Technologies Pte Ltd",
"00068A": "NeuronNet Co. Ltd. R&D Center",
"00068B": "AirRunner Technologies, Inc.",
"00068C": "3Com Corporation",
"00068D": "SEPATON, Inc.",
"00068E": "HID Corporation",
"00068F": "Telemonitor, Inc.",
"000690": "Euracom Communication GmbH",
"000691": "PT Inovacao",
"000692": "Intruvert Networks, Inc.",
"000693": "Flexus Computer Technology, Inc.",
"000694": "Mobillian Corporation",
"000695": "Ensure Technologies, Inc.",
"000696": "Advent Networks",
"000697": "R & D Center",
"000698": "egnite GmbH",
"000699": "Vida Design Co.",
"00069A": "e & Tel",
"00069B": "AVT Audio Video Technologies GmbH",
"00069C": "Transmode Systems AB",
"00069D": "Petards Ltd",
"00069E": "UNIQA, Inc.",
"00069F": "Kuokoa Networks",
"0006A0": "Mx Imaging",
"0006A1": "Celsian Technologies, Inc.",
"0006A2": "Microtune, Inc.",
"0006A3": "Bitran Corporation",
"0006A4": "INNOWELL Corp.",
"0006A5": "PINON Corp.",
"0006A6": "Artistic Licence Engineering Ltd",
"0006A7": "Primarion",
"0006A8": "KC Technology, Inc.",
"0006A9": "Universal Instruments Corp.",
"0006AA": "VT Miltope",
"0006AB": "W-Link Systems, Inc.",
"0006AC": "Intersoft Co.",
"0006AD": "KB Electronics Ltd.",
"0006AE": "Himachal Futuristic Communications Ltd",
"0006AF": "Xalted Networks",
"0006B0": "Comtech EF Data Corp.",
"0006B1": "Sonicwall",
"0006B2": "Linxtek Co.",
"0006B3": "Diagraph Corporation",
"0006B4": "Vorne Industries, Inc.",
"0006B5": "Source Photonics, Inc.",
"0006B6": "Nir-Or Israel Ltd.",
"0006B7": "TELEM GmbH",
"0006B8": "Bandspeed Pty Ltd",
"0006B9": "A5TEK Corp.",
"0006BA": "Westwave Communications",
"0006BB": "ATI Technologies Inc.",
"0006BC": "Macrolink, Inc.",
"0006BD": "BNTECHNOLOGY Co., Ltd.",
"0006BE": "Baumer Optronic GmbH",
"0006BF": "Accella Technologies Co., Ltd.",
"0006C0": "United Internetworks, Inc.",
"0006C1": "CISCO SYSTEMS, INC.",
"0006C2": "Smartmatic Corporation",
"0006C3": "Schindler Elevator Ltd.",
"0006C4": "Piolink Inc.",
"0006C5": "INNOVI Technologies Limited",
"0006C6": "lesswire AG",
"0006C7": "RFNET Technologies Pte Ltd (S)",
"0006C8": "Sumitomo Metal Micro Devices, Inc.",
"0006C9": "Technical Marketing Research, Inc.",
"0006CA": "American Computer & Digital Components, Inc. (ACDC)",
"0006CB": "Jotron Electronics A/S",
"0006CC": "JMI Electronics Co., Ltd.",
"0006CD": "Leaf Imaging Ltd.",
"0006CE": "DATENO",
"0006CF": "Thales Avionics In-Flight Systems, LLC",
"0006D0": "Elgar Electronics Corp.",
"0006D1": "Tahoe Networks, Inc.",
"0006D2": "Tundra Semiconductor Corp.",
"0006D3": "Alpha Telecom, Inc. U.S.A.",
"0006D4": "Interactive Objects, Inc.",
"0006D5": "Diamond Systems Corp.",
"0006D6": "CISCO SYSTEMS, INC.",
"0006D7": "CISCO SYSTEMS, INC.",
"0006D8": "Maple Optical Systems",
"0006D9": "IPM-Net S.p.A.",
"0006DA": "ITRAN Communications Ltd.",
"0006DB": "ICHIPS Co., Ltd.",
"0006DC": "Syabas Technology (Amquest)",
"0006DD": "AT & T Laboratories - Cambridge Ltd",
"0006DE": "Flash Technology",
"0006DF": "AIDONIC Corporation",
"0006E0": "MAT Co., Ltd.",
"0006E1": "Techno Trade s.a",
"0006E2": "Ceemax Technology Co., Ltd.",
"0006E3": "Quantitative Imaging Corporation",
"0006E4": "Citel Technologies Ltd.",
"0006E5": "Fujian Newland Computer Ltd. Co.",
"0006E6": "DongYang Telecom Co., Ltd.",
"0006E7": "Bit Blitz Communications Inc.",
"0006E8": "Optical Network Testing, Inc.",
"0006E9": "Intime Corp.",
"0006EA": "ELZET80 Mikrocomputer GmbH&Co. KG",
"0006EB": "Global Data",
"0006EC": "Harris Corporation",
"0006ED": "Inara Networks",
"0006EE": "Shenyang Neu-era Information & Technology Stock Co., Ltd",
"0006EF": "Maxxan Systems, Inc.",
"0006F0": "Digeo, Inc.",
"0006F1": "Optillion",
"0006F2": "Platys Communications",
"0006F3": "AcceLight Networks",
"0006F4": "Prime Electronics & Satellitics Inc.",
"0006F5": "ALPS Co,. Ltd.",
"0006F6": "CISCO SYSTEMS, INC.",
"0006F7": "ALPS Co,. Ltd.",
"0006F8": "The Boeing Company",
"0006F9": "Mitsui Zosen Systems Research Inc.",
"0006FA": "IP SQUARE Co, Ltd.",
"0006FB": "Hitachi Printing Solutions, Ltd.",
"0006FC": "Fnet Co., Ltd.",
"0006FD": "Comjet Information Systems Corp.",
"0006FE": "Ambrado, Inc",
"0006FF": "Sheba Systems Co., Ltd.",
"000700": "Zettamedia Korea",
"000701": "RACAL-DATACOM",
"000702": "Varian Medical Systems",
"000703": "CSEE Transport",
"000704": "ALPS Co,. Ltd.",
"000705": "Endress & Hauser GmbH & Co",
"000706": "Sanritz Corporation",
"000707": "Interalia Inc.",
"000708": "Bitrage Inc.",
"000709": "Westerstrand Urfabrik AB",
"00070A": "Unicom Automation Co., Ltd.",
"00070B": "Novabase SGPS, SA",
"00070C": "SVA-Intrusion.com Co. Ltd.",
"00070D": "CISCO SYSTEMS, INC.",
"00070E": "CISCO SYSTEMS, INC.",
"00070F": "Fujant, Inc.",
"000710": "Adax, Inc.",
"000711": "Acterna",
"000712": "JAL Information Technology",
"000713": "IP One, Inc.",
"000714": "Brightcom",
"000715": "General Research of Electronics, Inc.",
"000716": "J & S Marine Ltd.",
"000717": "Wieland Electric GmbH",
"000718": "iCanTek Co., Ltd.",
"000719": "Mobiis Co., Ltd.",
"00071A": "Finedigital Inc.",
"00071B": "CDVI Americas Ltd",
"00071C": "AT&T Fixed Wireless Services",
"00071D": "Satelsa Sistemas Y Aplicaciones De Telecomunicaciones, S.A.",
"00071E": "Tri-M Engineering / Nupak Dev. Corp.",
"00071F": "European Systems Integration",
"000720": "Trutzschler GmbH & Co. KG",
"000721": "Formac Elektronik GmbH",
"000722": "The Nielsen Company",
"000723": "ELCON Systemtechnik GmbH",
"000724": "Telemax Co., Ltd.",
"000725": "Bematech International Corp.",
"000726": "Shenzhen Gongjin Electronics Co., Ltd.",
"000727": "Zi Corporation (HK) Ltd.",
"000728": "Neo Telecom",
"000729": "Kistler Instrumente AG",
"00072A": "Innovance Networks",
"00072B": "Jung Myung Telecom Co., Ltd.",
"00072C": "Fabricom",
"00072D": "CNSystems",
"00072E": "North Node AB",
"00072F": "Intransa, Inc.",
"000730": "Hutchison OPTEL Telecom Technology Co., Ltd.",
"000731": "Ophir-Spiricon LLC",
"000732": "AAEON Technology Inc.",
"000733": "DANCONTROL Engineering",
"000734": "ONStor, Inc.",
"000735": "Flarion Technologies, Inc.",
"000736": "Data Video Technologies Co., Ltd.",
"000737": "Soriya Co. Ltd.",
"000738": "Young Technology Co., Ltd.",
"000739": "Scotty Group Austria Gmbh",
"00073A": "Inventel Systemes",
"00073B": "Tenovis GmbH & Co KG",
"00073C": "Telecom Design",
"00073D": "Nanjing Postel Telecommunications Co., Ltd.",
"00073E": "China Great-Wall Computer Shenzhen Co., Ltd.",
"00073F": "Woojyun Systec Co., Ltd.",
"000740": "Buffalo Inc.",
"000741": "Sierra Automated Systems",
"000742": "Current Technologies, LLC",
"000743": "Chelsio Communications",
"000744": "Unico, Inc.",
"000745": "Radlan Computer Communications Ltd.",
"000746": "TURCK, Inc.",
"000747": "Mecalc",
"000748": "The Imaging Source Europe",
"000749": "CENiX Inc.",
"00074A": "Carl Valentin GmbH",
"00074B": "Daihen Corporation",
"00074C": "Beicom Inc.",
"00074D": "Zebra Technologies Corp.",
"00074E": "IPFRONT Inc",
"00074F": "CISCO SYSTEMS, INC.",
"000750": "CISCO SYSTEMS, INC.",
"000751": "m-u-t AG",
"000752": "Rhythm Watch Co., Ltd.",
"000753": "Beijing Qxcomm Technology Co., Ltd.",
"000754": "Xyterra Computing, Inc.",
"000755": "Lafon",
"000756": "Juyoung Telecom",
"000757": "Topcall International AG",
"000758": "Dragonwave",
"000759": "Boris Manufacturing Corp.",
"00075A": "Air Products and Chemicals, Inc.",
"00075B": "Gibson Guitars",
"00075C": "Eastman Kodak Company",
"00075D": "Celleritas Inc.",
"00075E": "Ametek Power Instruments",
"00075F": "VCS Video Communication Systems AG",
"000760": "TOMIS Information & Telecom Corp.",
"000761": "Logitech Europe SA",
"000762": "Group Sense Limited",
"000763": "Sunniwell Cyber Tech. Co., Ltd.",
"000764": "YoungWoo Telecom Co. Ltd.",
"000765": "Jade Quantum Technologies, Inc.",
"000766": "Chou Chin Industrial Co., Ltd.",
"000767": "Yuxing Electronics Company Limited",
"000768": "Danfoss A/S",
"000769": "Italiana Macchi SpA",
"00076A": "NEXTEYE Co., Ltd.",
"00076B": "Stralfors AB",
"00076C": "Daehanet, Inc.",
"00076D": "Flexlight Networks",
"00076E": "Sinetica Corporation Limited",
"00076F": "Synoptics Limited",
"000770": "Ubiquoss Inc",
"000771": "Embedded System Corporation",
"000772": "Alcatel Shanghai Bell Co., Ltd.",
"000773": "Ascom Powerline Communications Ltd.",
"000774": "GuangZhou Thinker Technology Co. Ltd.",
"000775": "Valence Semiconductor, Inc.",
"000776": "Federal APD",
"000777": "Motah Ltd.",
"000778": "GERSTEL GmbH & Co. KG",
"000779": "Sungil Telecom Co., Ltd.",
"00077A": "Infoware System Co., Ltd.",
"00077B": "Millimetrix Broadband Networks",
"00077C": "Westermo Teleindustri AB",
"00077D": "CISCO SYSTEMS, INC.",
"00077E": "Elrest GmbH",
"00077F": "J Communications Co., Ltd.",
"000780": "Bluegiga Technologies OY",
"000781": "Itron Inc.",
"000782": "Oracle Corporation",
"000783": "SynCom Network, Inc.",
"000784": "CISCO SYSTEMS, INC.",
"000785": "CISCO SYSTEMS, INC.",
"000786": "Wireless Networks Inc.",
"000787": "Idea System Co., Ltd.",
"000788": "Clipcomm, Inc.",
"000789": "DONGWON SYSTEMS",
"00078A": "Mentor Data System Inc.",
"00078B": "Wegener Communications, Inc.",
"00078C": "Elektronikspecialisten i Borlange AB",
"00078D": "NetEngines Ltd.",
"00078E": "Garz & Friche GmbH",
"00078F": "Emkay Innovative Products",
"000790": "Tri-M Technologies (s) Limited",
"000791": "International Data Communications, Inc.",
"000792": "S\u00fctron Electronic GmbH",
"000793": "Shin Satellite Public Company Limited",
"000794": "Simple Devices, Inc.",
"000795": "Elitegroup Computer System Co. (ECS)",
"000796": "LSI Systems, Inc.",
"000797": "Netpower Co., Ltd.",
"000798": "Selea SRL",
"000799": "Tipping Point Technologies, Inc.",
"00079A": "Verint Systems Inc",
"00079B": "Aurora Networks",
"00079C": "Golden Electronics Technology Co., Ltd.",
"00079D": "Musashi Co., Ltd.",
"00079E": "Ilinx Co., Ltd.",
"00079F": "Action Digital Inc.",
"0007A0": "e-Watch Inc.",
"0007A1": "VIASYS Healthcare GmbH",
"0007A2": "Opteon Corporation",
"0007A3": "Ositis Software, Inc.",
"0007A4": "GN Netcom Ltd.",
"0007A5": "Y.D.K Co. Ltd.",
"0007A6": "Home Automation, Inc.",
"0007A7": "A-Z Inc.",
"0007A8": "Haier Group Technologies Ltd.",
"0007A9": "Novasonics",
"0007AA": "Quantum Data Inc.",
"0007AB": "Samsung Electronics Co.,Ltd",
"0007AC": "Eolring",
"0007AD": "Pentacon GmbH Foto-und Feinwerktechnik",
"0007AE": "Britestream Networks, Inc.",
"0007AF": "N-TRON Corporation",
"0007B0": "Office Details, Inc.",
"0007B1": "Equator Technologies",
"0007B2": "Transaccess S.A.",
"0007B3": "CISCO SYSTEMS, INC.",
"0007B4": "CISCO SYSTEMS, INC.",
"0007B5": "Any One Wireless Ltd.",
"0007B6": "Telecom Technology Ltd.",
"0007B7": "Samurai Ind. Prods Eletronicos Ltda",
"0007B8": "Corvalent Corporation",
"0007B9": "Ginganet Corporation",
"0007BA": "UTStarcom, Inc.",
"0007BB": "Candera Inc.",
"0007BC": "Identix Inc.",
"0007BD": "Radionet Ltd.",
"0007BE": "DataLogic SpA",
"0007BF": "Armillaire Technologies, Inc.",
"0007C0": "NetZerver Inc.",
"0007C1": "Overture Networks, Inc.",
"0007C2": "Netsys Telecom",
"0007C3": "Thomson",
"0007C4": "JEAN Co. Ltd.",
"0007C5": "Gcom, Inc.",
"0007C6": "VDS Vosskuhler GmbH",
"0007C7": "Synectics Systems Limited",
"0007C8": "Brain21, Inc.",
"0007C9": "Technol Seven Co., Ltd.",
"0007CA": "Creatix Polymedia Ges Fur Kommunikaitonssysteme",
"0007CB": "Freebox SA",
"0007CC": "Kaba Benzing GmbH",
"0007CD": "Kumoh Electronic Co, Ltd",
"0007CE": "Cabletime Limited",
"0007CF": "Anoto AB",
"0007D0": "Automat Engenharia de Automa\u00e7\u00e3o Ltda.",
"0007D1": "Spectrum Signal Processing Inc.",
"0007D2": "Logopak Systeme GmbH & Co. KG",
"0007D3": "SPGPrints B.V.",
"0007D4": "Zhejiang Yutong Network Communication Co Ltd.",
"0007D5": "3e Technologies Int;., Inc.",
"0007D6": "Commil Ltd.",
"0007D7": "Caporis Networks AG",
"0007D8": "Hitron Systems Inc.",
"0007D9": "Splicecom",
"0007DA": "Neuro Telecom Co., Ltd.",
"0007DB": "Kirana Networks, Inc.",
"0007DC": "Atek Co, Ltd.",
"0007DD": "Cradle Technologies",
"0007DE": "eCopilt AB",
"0007DF": "Vbrick Systems Inc.",
"0007E0": "Palm Inc.",
"0007E1": "WIS Communications Co. Ltd.",
"0007E2": "Bitworks, Inc.",
"0007E3": "Navcom Technology, Inc.",
"0007E4": "SoftRadio Co., Ltd.",
"0007E5": "Coup Corporation",
"0007E6": "edgeflow Canada Inc.",
"0007E7": "FreeWave Technologies",
"0007E8": "EdgeWave",
"0007E9": "Intel Corporation",
"0007EA": "Massana, Inc.",
"0007EB": "CISCO SYSTEMS, INC.",
"0007EC": "CISCO SYSTEMS, INC.",
"0007ED": "Altera Corporation",
"0007EE": "telco Informationssysteme GmbH",
"0007EF": "Lockheed Martin Tactical Systems",
"0007F0": "LogiSync LLC",
"0007F1": "TeraBurst Networks Inc.",
"0007F2": "IOA Corporation",
"0007F3": "Thinkengine Networks",
"0007F4": "Eletex Co., Ltd.",
"0007F5": "Bridgeco Co AG",
"0007F6": "Qqest Software Systems",
"0007F7": "Galtronics",
"0007F8": "ITDevices, Inc.",
"0007F9": "Sensaphone",
"0007FA": "ITT Co., Ltd.",
"0007FB": "Giga Stream UMTS Technologies GmbH",
"0007FC": "Adept Systems Inc.",
"0007FD": "LANergy Ltd.",
"0007FE": "Rigaku Corporation",
"0007FF": "Gluon Networks",
"000800": "MULTITECH SYSTEMS, INC.",
"000801": "HighSpeed Surfing Inc.",
"000802": "Hewlett-Packard Company",
"000803": "Cos Tron",
"000804": "ICA Inc.",
"000805": "Techno-Holon Corporation",
"000806": "Raonet Systems, Inc.",
"000807": "Access Devices Limited",
"000808": "PPT Vision, Inc.",
"000809": "Systemonic AG",
"00080A": "Espera-Werke GmbH",
"00080B": "Birka BPA Informationssystem AB",
"00080C": "VDA Elettronica spa",
"00080D": "Toshiba",
"00080E": "ARRIS Group, Inc.",
"00080F": "Proximion Fiber Optics AB",
"000810": "Key Technology, Inc.",
"000811": "VOIX Corporation",
"000812": "GM-2 Corporation",
"000813": "Diskbank, Inc.",
"000814": "TIL Technologies",
"000815": "CATS Co., Ltd.",
"000816": "Bluelon ApS",
"000817": "EmergeCore Networks LLC",
"000818": "Pixelworks, Inc.",
"000819": "Banksys",
"00081A": "Sanrad Intelligence Storage Communications (2000) Ltd.",
"00081B": "Windigo Systems",
"00081C": "@pos.com",
"00081D": "Ipsil, Incorporated",
"00081E": "Repeatit AB",
"00081F": "Pou Yuen Tech Corp. Ltd.",
"000820": "CISCO SYSTEMS, INC.",
"000821": "CISCO SYSTEMS, INC.",
"000822": "InPro Comm",
"000823": "Texa Corp.",
"000824": "Nuance Document Imaging",
"000825": "Acme Packet",
"000826": "Colorado Med Tech",
"000827": "ADB Broadband Italia",
"000828": "Koei Engineering Ltd.",
"000829": "Aval Nagasaki Corporation",
"00082A": "Powerwallz Network Security",
"00082B": "Wooksung Electronics, Inc.",
"00082C": "Homag AG",
"00082D": "Indus Teqsite Private Limited",
"00082E": "Multitone Electronics PLC",
"00082F": "CISCO SYSTEMS, INC.",
"000830": "CISCO SYSTEMS, INC.",
"000831": "CISCO SYSTEMS, INC.",
"000832": "Cisco",
"00084E": "DivergeNet, Inc.",
"00084F": "Qualstar Corporation",
"000850": "Arizona Instrument Corp.",
"000851": "Canadian Bank Note Company, Ltd.",
"000852": "Davolink Co. Inc.",
"000853": "Schleicher GmbH & Co. Relaiswerke KG",
"000854": "Netronix, Inc.",
"000855": "NASA-Goddard Space Flight Center",
"000856": "Gamatronic Electronic Industries Ltd.",
"000857": "Polaris Networks, Inc.",
"000858": "Novatechnology Inc.",
"000859": "ShenZhen Unitone Electronics Co., Ltd.",
"00085A": "IntiGate Inc.",
"00085B": "Hanbit Electronics Co., Ltd.",
"00085C": "Shanghai Dare Technologies Co. Ltd.",
"00085D": "Aastra",
"00085E": "PCO AG",
"00085F": "Picanol N.V.",
"000860": "LodgeNet Entertainment Corp.",
"000861": "SoftEnergy Co., Ltd.",
"000862": "NEC Eluminant Technologies, Inc.",
"000863": "Entrisphere Inc.",
"000864": "Fasy S.p.A.",
"000865": "JASCOM CO., LTD",
"000866": "DSX Access Systems, Inc.",
"000867": "Uptime Devices",
"000868": "PurOptix",
"000869": "Command-e Technology Co.,Ltd.",
"00086A": "Securiton Gmbh",
"00086B": "MIPSYS",
"00086C": "Plasmon LMS",
"00086D": "Missouri FreeNet",
"00086E": "Hyglo AB",
"00086F": "Resources Computer Network Ltd.",
"000870": "Rasvia Systems, Inc.",
"000871": "NORTHDATA Co., Ltd.",
"000872": "Sorenson Communications",
"000873": "DapTechnology B.V.",
"000874": "Dell Computer Corp.",
"000875": "Acorp Electronics Corp.",
"000876": "SDSystem",
"000877": "Liebert-Hiross Spa",
"000878": "Benchmark Storage Innovations",
"000879": "CEM Corporation",
"00087A": "Wipotec GmbH",
"00087B": "RTX Telecom A/S",
"00087C": "CISCO SYSTEMS, INC.",
"00087D": "CISCO SYSTEMS, INC.",
"00087E": "Bon Electro-Telecom Inc.",
"00087F": "SPAUN electronic GmbH & Co. KG",
"000880": "BroadTel Canada Communications inc.",
"000881": "DIGITAL HANDS CO.,LTD.",
"000882": "SIGMA CORPORATION",
"000883": "Hewlett-Packard Company",
"000884": "Index Braille AB",
"000885": "EMS Dr. Thomas W\u00fcnsche",
"000886": "Hansung Teliann, Inc.",
"000887": "Maschinenfabrik Reinhausen GmbH",
"000888": "OULLIM Information Technology Inc,.",
"000889": "Echostar Technologies Corp",
"00088A": "Minds@Work",
"00088B": "Tropic Networks Inc.",
"00088C": "Quanta Network Systems Inc.",
"00088D": "Sigma-Links Inc.",
"00088E": "Nihon Computer Co., Ltd.",
"00088F": "ADVANCED DIGITAL TECHNOLOGY",
"000890": "AVILINKS SA",
"000891": "Lyan Inc.",
"000892": "EM Solutions",
"000893": "LE INFORMATION COMMUNICATION INC.",
"000894": "InnoVISION Multimedia Ltd.",
"000895": "DIRC Technologie GmbH & Co.KG",
"000896": "Printronix, Inc.",
"000897": "Quake Technologies",
"000898": "Gigabit Optics Corporation",
"000899": "Netbind, Inc.",
"00089A": "Alcatel Microelectronics",
"00089B": "ICP Electronics Inc.",
"00089C": "Elecs Industry Co., Ltd.",
"00089D": "UHD-Elektronik",
"00089E": "Beijing Enter-Net co.LTD",
"00089F": "EFM Networks",
"0008A0": "Stotz Feinmesstechnik GmbH",
"0008A1": "CNet Technology Inc.",
"0008A2": "ADI Engineering, Inc.",
"0008A3": "CISCO SYSTEMS, INC.",
"0008A4": "CISCO SYSTEMS, INC.",
"0008A5": "Peninsula Systems Inc.",
"0008A6": "Multiware & Image Co., Ltd.",
"0008A7": "iLogic Inc.",
"0008A8": "Systec Co., Ltd.",
"0008A9": "SangSang Technology, Inc.",
"0008AA": "KARAM",
"0008AB": "EnerLinx.com, Inc.",
"0008AC": "Eltromat GmbH",
"0008AD": "Toyo-Linx Co., Ltd.",
"0008AE": "PacketFront Network Products AB",
"0008AF": "Novatec Corporation",
"0008B0": "BKtel communications GmbH",
"0008B1": "ProQuent Systems",
"0008B2": "SHENZHEN COMPASS TECHNOLOGY DEVELOPMENT CO.,LTD",
"0008B3": "Fastwel",
"0008B4": "SYSPOL",
"0008B5": "TAI GUEN ENTERPRISE CO., LTD",
"0008B6": "RouteFree, Inc.",
"0008B7": "HIT Incorporated",
"0008B8": "E.F. Johnson",
"0008B9": "KAON MEDIA Co., Ltd.",
"0008BA": "Erskine Systems Ltd",
"0008BB": "NetExcell",
"0008BC": "Ilevo AB",
"0008BD": "TEPG-US",
"0008BE": "XENPAK MSA Group",
"0008BF": "Aptus Elektronik AB",
"0008C0": "ASA SYSTEMS",
"0008C1": "Avistar Communications Corporation",
"0008C2": "CISCO SYSTEMS, INC.",
"0008C3": "Contex A/S",
"0008C4": "Hikari Co.,Ltd.",
"0008C5": "Liontech Co., Ltd.",
"0008C6": "Philips Consumer Communications",
"0008C7": "Hewlett-Packard Company",
"0008C8": "Soneticom, Inc.",
"0008C9": "TechniSat Digital GmbH",
"0008CA": "TwinHan Technology Co.,Ltd",
"0008CB": "Zeta Broadband Inc.",
"0008CC": "Remotec, Inc.",
"0008CD": "With-Net Inc",
"0008CE": "IPMobileNet Inc.",
"0008CF": "Nippon Koei Power Systems Co., Ltd.",
"0008D0": "Musashi Engineering Co., LTD.",
"0008D1": "KAREL INC.",
"0008D2": "ZOOM Networks Inc.",
"0008D3": "Hercules Technologies S.A.S.",
"0008D4": "IneoQuest Technologies, Inc",
"0008D5": "Vanguard Networks Solutions, LLC",
"0008D6": "HASSNET Inc.",
"0008D7": "HOW CORPORATION",
"0008D8": "Dowkey Microwave",
"0008D9": "Mitadenshi Co.,LTD",
"0008DA": "SofaWare Technologies Ltd.",
"0008DB": "Corrigent Systems",
"0008DC": "Wiznet",
"0008DD": "Telena Communications, Inc.",
"0008DE": "3UP Systems",
"0008DF": "Alistel Inc.",
"0008E0": "ATO Technology Ltd.",
"0008E1": "Barix AG",
"0008E2": "CISCO SYSTEMS, INC.",
"0008E3": "CISCO SYSTEMS, INC.",
"0008E4": "Envenergy Inc",
"0008E5": "IDK Corporation",
"0008E6": "Littlefeet",
"0008E7": "SHI ControlSystems,Ltd.",
"0008E8": "Excel Master Ltd.",
"0008E9": "NextGig",
"0008EA": "Motion Control Engineering, Inc",
"0008EB": "ROMWin Co.,Ltd.",
"0008EC": "Optical Zonu Corporation",
"0008ED": "ST&T Instrument Corp.",
"0008EE": "Logic Product Development",
"0008EF": "DIBAL,S.A.",
"0008F0": "Next Generation Systems, Inc.",
"0008F1": "Voltaire",
"0008F2": "C&S Technology",
"0008F3": "WANY",
"0008F4": "Bluetake Technology Co., Ltd.",
"0008F5": "YESTECHNOLOGY Co.,Ltd.",
"0008F6": "Sumitomo Electric System Solutions Co., Ltd.",
"0008F7": "Hitachi Ltd, Semiconductor & Integrated Circuits Gr",
"0008F8": "UTC CCS",
"0008F9": "Emerson Network Power",
"0008FA": "Karl E.Brinkmann GmbH",
"0008FB": "SonoSite, Inc.",
"0008FC": "Gigaphoton Inc.",
"0008FD": "BlueKorea Co., Ltd.",
"0008FE": "UNIK C&C Co.,Ltd.",
"0008FF": "Trilogy Communications Ltd",
"000900": "TMT",
"000901": "Shenzhen Shixuntong Information & Technoligy Co",
"000902": "Redline Communications Inc.",
"000903": "Panasas, Inc",
"000904": "MONDIAL electronic",
"000905": "iTEC Technologies Ltd.",
"000906": "Esteem Networks",
"000907": "Chrysalis Development",
"000908": "VTech Technology Corp.",
"000909": "Telenor Connect A/S",
"00090A": "SnedFar Technology Co., Ltd.",
"00090B": "MTL Instruments PLC",
"00090C": "Mayekawa Mfg. Co. Ltd.",
"00090D": "LEADER ELECTRONICS CORP.",
"00090E": "Helix Technology Inc.",
"00090F": "Fortinet Inc.",
"000910": "Simple Access Inc.",
"000911": "CISCO SYSTEMS, INC.",
"000912": "CISCO SYSTEMS, INC.",
"000913": "SystemK Corporation",
"000914": "COMPUTROLS INC.",
"000915": "CAS Corp.",
"000916": "Listman Home Technologies, Inc.",
"000917": "WEM Technology Inc",
"000918": "SAMSUNG TECHWIN CO.,LTD",
"000919": "MDS Gateways",
"00091A": "Macat Optics & Electronics Co., Ltd.",
"00091B": "Digital Generation Inc.",
"00091C": "CacheVision, Inc",
"00091D": "Proteam Computer Corporation",
"00091E": "Firstech Technology Corp.",
"00091F": "A&D Co., Ltd.",
"000920": "EpoX COMPUTER CO.,LTD.",
"000921": "Planmeca Oy",
"000922": "TST Biometrics GmbH",
"000923": "Heaman System Co., Ltd",
"000924": "Telebau GmbH",
"000925": "VSN Systemen BV",
"000926": "YODA COMMUNICATIONS, INC.",
"000927": "TOYOKEIKI CO.,LTD.",
"000928": "Telecore",
"000929": "Sanyo Industries (UK) Limited",
"00092A": "MYTECS Co.,Ltd.",
"00092B": "iQstor Networks, Inc.",
"00092C": "Hitpoint Inc.",
"00092D": "HTC Corporation",
"00092E": "B&Tech System Inc.",
"00092F": "Akom Technology Corporation",
"000930": "AeroConcierge Inc.",
"000931": "Future Internet, Inc.",
"000932": "Omnilux",
"000933": "Ophit Co.Ltd.",
"000934": "Dream-Multimedia-Tv GmbH",
"000935": "Sandvine Incorporated",
"000936": "Ipetronik GmbH & Co. KG",
"000937": "Inventec Appliance Corp",
"000938": "Allot Communications",
"000939": "ShibaSoku Co.,Ltd.",
"00093A": "Molex Fiber Optics",
"00093B": "HYUNDAI NETWORKS INC.",
"00093C": "Jacques Technologies P/L",
"00093D": "Newisys,Inc.",
"00093E": "C&I Technologies",
"00093F": "Double-Win Enterpirse CO., LTD",
"000940": "AGFEO GmbH & Co. KG",
"000941": "Allied Telesis K.K.",
"000942": "Wireless Technologies, Inc",
"000943": "CISCO SYSTEMS, INC.",
"000944": "CISCO SYSTEMS, INC.",
"000945": "Palmmicro Communications Inc",
"000946": "Cluster Labs GmbH",
"000947": "Aztek, Inc.",
"000948": "Vista Control Systems, Corp.",
"000949": "Glyph Technologies Inc.",
"00094A": "Homenet Communications",
"00094B": "FillFactory NV",
"00094C": "Communication Weaver Co.,Ltd.",
"00094D": "Braintree Communications Pty Ltd",
"00094E": "BARTECH SYSTEMS INTERNATIONAL, INC",
"00094F": "elmegt GmbH & Co. KG",
"000950": "Independent Storage Corporation",
"000951": "Apogee Imaging Systems",
"000952": "Auerswald GmbH & Co. KG",
"000953": "Linkage System Integration Co.Ltd.",
"000954": "AMiT spol. s. r. o.",
"000955": "Young Generation International Corp.",
"000956": "Network Systems Group, Ltd. (NSG)",
"000957": "Supercaller, Inc.",
"000958": "INTELNET S.A.",
"000959": "Sitecsoft",
"00095A": "RACEWOOD TECHNOLOGY",
"00095B": "Netgear, Inc.",
"00095C": "Philips Medical Systems - Cardiac and Monitoring Systems (CM",
"00095D": "Dialogue Technology Corp.",
"00095E": "Masstech Group Inc.",
"00095F": "Telebyte, Inc.",
"000960": "YOZAN Inc.",
"000961": "Switchgear and Instrumentation Ltd",
"000962": "Sonitor Technologies AS",
"000963": "Dominion Lasercom Inc.",
"000964": "Hi-Techniques, Inc.",
"000965": "HyunJu Computer Co., Ltd.",
"000966": "Thales Navigation",
"000967": "Tachyon, Inc",
"000968": "TECHNOVENTURE, INC.",
"000969": "Meret Optical Communications",
"00096A": "Cloverleaf Communications Inc.",
"00096B": "IBM Corp",
"00096C": "Imedia Semiconductor Corp.",
"00096D": "Powernet Technologies Corp.",
"00096E": "GIANT ELECTRONICS LTD.",
"00096F": "Beijing Zhongqing Elegant Tech. Corp.,Limited",
"000970": "Vibration Research Corporation",
"000971": "Time Management, Inc.",
"000972": "Securebase,Inc",
"000973": "Lenten Technology Co., Ltd.",
"000974": "Innopia Technologies, Inc.",
"000975": "fSONA Communications Corporation",
"000976": "Datasoft ISDN Systems GmbH",
"000977": "Brunner Elektronik AG",
"000978": "AIJI System Co., Ltd.",
"000979": "Advanced Television Systems Committee, Inc.",
"00097A": "Louis Design Labs.",
"00097B": "CISCO SYSTEMS, INC.",
"00097C": "CISCO SYSTEMS, INC.",
"00097D": "SecWell Networks Oy",
"00097E": "IMI TECHNOLOGY CO., LTD",
"00097F": "Vsecure 2000 LTD.",
"000980": "Power Zenith Inc.",
"000981": "Newport Networks",
"000982": "Loewe Opta GmbH",
"000983": "GlobalTop Technology, Inc.",
"000984": "MyCasa Network Inc.",
"000985": "Auto Telecom Company",
"000986": "Metalink LTD.",
"000987": "NISHI NIPPON ELECTRIC WIRE & CABLE CO.,LTD.",
"000988": "Nudian Electron Co., Ltd.",
"000989": "VividLogic Inc.",
"00098A": "EqualLogic Inc",
"00098B": "Entropic Communications, Inc.",
"00098C": "Option Wireless Sweden",
"00098D": "Velocity Semiconductor",
"00098E": "ipcas GmbH",
"00098F": "Cetacean Networks",
"000990": "ACKSYS Communications & systems",
"000991": "GE Fanuc Automation Manufacturing, Inc.",
"000992": "InterEpoch Technology,INC.",
"000993": "Visteon Corporation",
"000994": "Cronyx Engineering",
"000995": "Castle Technology Ltd",
"000996": "RDI",
"000997": "Nortel Networks",
"000998": "Capinfo Company Limited",
"000999": "CP GEORGES RENAULT",
"00099A": "ELMO COMPANY, LIMITED",
"00099B": "Western Telematic Inc.",
"00099C": "Naval Research Laboratory",
"00099D": "Haliplex Communications",
"00099E": "Testech, Inc.",
"00099F": "VIDEX INC.",
"0009A0": "Microtechno Corporation",
"0009A1": "Telewise Communications, Inc.",
"0009A2": "Interface Co., Ltd.",
"0009A3": "Leadfly Techologies Corp. Ltd.",
"0009A4": "HARTEC Corporation",
"0009A5": "HANSUNG ELETRONIC INDUSTRIES DEVELOPMENT CO., LTD",
"0009A6": "Ignis Optics, Inc.",
"0009A7": "Bang & Olufsen A/S",
"0009A8": "Eastmode Pte Ltd",
"0009A9": "Ikanos Communications",
"0009AA": "Data Comm for Business, Inc.",
"0009AB": "Netcontrol Oy",
"0009AC": "LANVOICE",
"0009AD": "HYUNDAI SYSCOMM, INC.",
"0009AE": "OKANO ELECTRIC CO.,LTD",
"0009AF": "e-generis",
"0009B0": "Onkyo Corporation",
"0009B1": "Kanematsu Electronics, Ltd.",
"0009B2": "L&F Inc.",
"0009B3": "MCM Systems Ltd",
"0009B4": "KISAN TELECOM CO., LTD.",
"0009B5": "3J Tech. Co., Ltd.",
"0009B6": "CISCO SYSTEMS, INC.",
"0009B7": "CISCO SYSTEMS, INC.",
"0009B8": "Entise Systems",
"0009B9": "Action Imaging Solutions",
"0009BA": "MAKU Informationstechik GmbH",
"0009BB": "MathStar, Inc.",
"0009BC": "Digital Safety Technologies, Inc",
"0009BD": "Epygi Technologies, Ltd.",
"0009BE": "Mamiya-OP Co.,Ltd.",
"0009BF": "Nintendo Co., Ltd.",
"0009C0": "6WIND",
"0009C1": "PROCES-DATA A/S",
"0009C2": "Onity, Inc.",
"0009C3": "NETAS",
"0009C4": "Medicore Co., Ltd",
"0009C5": "KINGENE Technology Corporation",
"0009C6": "Visionics Corporation",
"0009C7": "Movistec",
"0009C8": "SINAGAWA TSUSHIN KEISOU SERVICE",
"0009C9": "BlueWINC Co., Ltd.",
"0009CA": "iMaxNetworks(Shenzhen)Limited.",
"0009CB": "HBrain",
"0009CC": "Moog GmbH",
"0009CD": "HUDSON SOFT CO.,LTD.",
"0009CE": "SpaceBridge Semiconductor Corp.",
"0009CF": "iAd GmbH",
"0009D0": "Solacom Technologies Inc.",
"0009D1": "SERANOA NETWORKS INC",
"0009D2": "Mai Logic Inc.",
"0009D3": "Western DataCom Co., Inc.",
"0009D4": "Transtech Networks",
"0009D5": "Signal Communication, Inc.",
"0009D6": "KNC One GmbH",
"0009D7": "DC Security Products",
"0009D8": "F\u00e4lt Communications AB",
"0009D9": "Neoscale Systems, Inc",
"0009DA": "Control Module Inc.",
"0009DB": "eSpace",
"0009DC": "Galaxis Technology AG",
"0009DD": "Mavin Technology Inc.",
"0009DE": "Samjin Information & Communications Co., Ltd.",
"0009DF": "Vestel Komunikasyon Sanayi ve Ticaret A.S.",
"0009E0": "XEMICS S.A.",
"0009E1": "Gemtek Technology Co., Ltd.",
"0009E2": "Sinbon Electronics Co., Ltd.",
"0009E3": "Angel Iglesias S.A.",
"0009E4": "K Tech Infosystem Inc.",
"0009E5": "Hottinger Baldwin Messtechnik GmbH",
"0009E6": "Cyber Switching Inc.",
"0009E7": "ADC Techonology",
"0009E8": "CISCO SYSTEMS, INC.",
"0009E9": "CISCO SYSTEMS, INC.",
"0009EA": "YEM Inc.",
"0009EB": "HuMANDATA LTD.",
"0009EC": "Daktronics, Inc.",
"0009ED": "CipherOptics",
"0009EE": "MEIKYO ELECTRIC CO.,LTD",
"0009EF": "Vocera Communications",
"0009F0": "Shimizu Technology Inc.",
"0009F1": "Yamaki Electric Corporation",
"0009F2": "Cohu, Inc., Electronics Division",
"0009F3": "WELL Communication Corp.",
"0009F4": "Alcon Laboratories, Inc.",
"0009F5": "Emerson Network Power Co.,Ltd",
"0009F6": "Shenzhen Eastern Digital Tech Ltd.",
"0009F7": "SED, a division of Calian",
"0009F8": "UNIMO TECHNOLOGY CO., LTD.",
"0009F9": "ART JAPAN CO., LTD.",
"0009FB": "Philips Patient Monitoring",
"0009FC": "IPFLEX Inc.",
"0009FD": "Ubinetics Limited",
"0009FE": "Daisy Technologies, Inc.",
"0009FF": "X.net 2000 GmbH",
"000A00": "Mediatek Corp.",
"000A01": "SOHOware, Inc.",
"000A02": "ANNSO CO., LTD.",
"000A03": "ENDESA SERVICIOS, S.L.",
"000A04": "3Com Ltd",
"000A05": "Widax Corp.",
"000A06": "Teledex LLC",
"000A07": "WebWayOne Ltd",
"000A08": "ALPINE ELECTRONICS, INC.",
"000A09": "TaraCom Integrated Products, Inc.",
"000A0A": "SUNIX Co., Ltd.",
"000A0B": "Sealevel Systems, Inc.",
"000A0C": "Scientific Research Corporation",
"000A0D": "FCI Deutschland GmbH",
"000A0E": "Invivo Research Inc.",
"000A0F": "Ilryung Telesys, Inc",
"000A10": "FAST media integrations AG",
"000A11": "ExPet Technologies, Inc",
"000A12": "Azylex Technology, Inc",
"000A13": "Honeywell Video Systems",
"000A14": "TECO a.s.",
"000A15": "Silicon Data, Inc",
"000A16": "Lassen Research",
"000A17": "NESTAR COMMUNICATIONS, INC",
"000A18": "Vichel Inc.",
"000A19": "Valere Power, Inc.",
"000A1A": "Imerge Ltd",
"000A1B": "Stream Labs",
"000A1C": "Bridge Information Co., Ltd.",
"000A1D": "Optical Communications Products Inc.",
"000A1E": "Red-M Products Limited",
"000A1F": "ART WARE Telecommunication Co., Ltd.",
"000A20": "SVA Networks, Inc.",
"000A21": "Integra Telecom Co. Ltd",
"000A22": "Amperion Inc",
"000A23": "Parama Networks Inc",
"000A24": "Octave Communications",
"000A25": "CERAGON NETWORKS",
"000A26": "CEIA S.p.A.",
"000A27": "Apple",
"000A28": "Motorola",
"000A29": "Pan Dacom Networking AG",
"000A2A": "QSI Systems Inc.",
"000A2B": "Etherstuff",
"000A2C": "Active Tchnology Corporation",
"000A2D": "Cabot Communications Limited",
"000A2E": "MAPLE NETWORKS CO., LTD",
"000A2F": "Artnix Inc.",
"000A30": "Johnson Controls-ASG",
"000A31": "HCV Consulting",
"000A32": "Xsido Corporation",
"000A33": "Emulex Corporation",
"000A34": "Identicard Systems Incorporated",
"000A35": "Xilinx",
"000A36": "Synelec Telecom Multimedia",
"000A37": "Procera Networks, Inc.",
"000A38": "Apani Networks",
"000A39": "LoPA Information Technology",
"000A3A": "J-THREE INTERNATIONAL Holding Co., Ltd.",
"000A3B": "GCT Semiconductor, Inc",
"000A3C": "Enerpoint Ltd.",
"000A3D": "Elo Sistemas Eletronicos S.A.",
"000A3E": "EADS Telecom",
"000A3F": "Data East Corporation",
"000A40": "Crown Audio -- Harmanm International",
"000A41": "CISCO SYSTEMS, INC.",
"000A42": "CISCO SYSTEMS, INC.",
"000A43": "Chunghwa Telecom Co., Ltd.",
"000A44": "Avery Dennison Deutschland GmbH",
"000A45": "Audio-Technica Corp.",
"000A46": "ARO WELDING TECHNOLOGIES SAS",
"000A47": "Allied Vision Technologies",
"000A48": "Albatron Technology",
"000A49": "F5 Networks, Inc.",
"000A4A": "Targa Systems Ltd.",
"000A4B": "DataPower Technology, Inc.",
"000A4C": "Molecular Devices Corporation",
"000A4D": "Noritz Corporation",
"000A4E": "UNITEK Electronics INC.",
"000A4F": "Brain Boxes Limited",
"000A50": "REMOTEK CORPORATION",
"000A51": "GyroSignal Technology Co., Ltd.",
"000A52": "AsiaRF Ltd.",
"000A53": "Intronics, Incorporated",
"000A54": "Laguna Hills, Inc.",
"000A55": "MARKEM Corporation",
"000A56": "HITACHI Maxell Ltd.",
"000A57": "Hewlett-Packard Company - Standards",
"000A58": "Freyer & Siegel Elektronik GmbH & Co. KG",
"000A59": "HW server",
"000A5A": "GreenNET Technologies Co.,Ltd.",
"000A5B": "Power-One as",
"000A5C": "Carel s.p.a.",
"000A5D": "FingerTec Worldwide Sdn Bhd",
"000A5E": "3COM Corporation",
"000A5F": "almedio inc.",
"000A60": "Autostar Technology Pte Ltd",
"000A61": "Cellinx Systems Inc.",
"000A62": "Crinis Networks, Inc.",
"000A63": "DHD GmbH",
"000A64": "Eracom Technologies",
"000A65": "GentechMedia.co.,ltd.",
"000A66": "MITSUBISHI ELECTRIC SYSTEM & SERVICE CO.,LTD.",
"000A67": "OngCorp",
"000A68": "SolarFlare Communications, Inc.",
"000A69": "SUNNY bell Technology Co., Ltd.",
"000A6A": "SVM Microwaves s.r.o.",
"000A6B": "Tadiran Telecom Business Systems LTD",
"000A6C": "Walchem Corporation",
"000A6D": "EKS Elektronikservice GmbH",
"000A6E": "Harmonic, Inc",
"000A6F": "ZyFLEX Technologies Inc",
"000A70": "MPLS Forum",
"000A71": "Avrio Technologies, Inc",
"000A72": "STEC, INC.",
"000A73": "Scientific Atlanta",
"000A74": "Manticom Networks Inc.",
"000A75": "Caterpillar, Inc",
"000A76": "Beida Jade Bird Huaguang Technology Co.,Ltd",
"000A77": "Bluewire Technologies LLC",
"000A78": "OLITEC",
"000A79": "corega K.K",
"000A7A": "Kyoritsu Electric Co., Ltd.",
"000A7B": "Cornelius Consult",
"000A7C": "Tecton Ltd",
"000A7D": "Valo, Inc.",
"000A7E": "The Advantage Group",
"000A7F": "Teradon Industries, Inc",
"000A80": "Telkonet Inc.",
"000A81": "TEIMA Audiotex S.L.",
"000A82": "TATSUTA SYSTEM ELECTRONICS CO.,LTD.",
"000A83": "SALTO SYSTEMS S.L.",
"000A84": "Rainsun Enterprise Co., Ltd.",
"000A85": "PLAT'C2,Inc",
"000A86": "Lenze",
"000A87": "Integrated Micromachines Inc.",
"000A88": "InCypher S.A.",
"000A89": "Creval Systems, Inc.",
"000A8A": "CISCO SYSTEMS, INC.",
"000A8B": "CISCO SYSTEMS, INC.",
"000A8C": "Guardware Systems Ltd.",
"000A8D": "EUROTHERM LIMITED",
"000A8E": "Invacom Ltd",
"000A8F": "Aska International Inc.",
"000A90": "Bayside Interactive, Inc.",
"000A91": "HemoCue AB",
"000A92": "Presonus Corporation",
"000A93": "W2 Networks, Inc.",
"000A94": "ShangHai cellink CO., LTD",
"000A95": "Apple",
"000A96": "MEWTEL TECHNOLOGY INC.",
"000A97": "SONICblue, Inc.",
"000A98": "M+F Gwinner GmbH & Co",
"000A99": "Calamp Wireless Networks Inc",
"000A9A": "Aiptek International Inc",
"000A9B": "TB Group Inc",
"000A9C": "Server Technology, Inc.",
"000A9D": "King Young Technology Co. Ltd.",
"000A9E": "BroadWeb Corportation",
"000A9F": "Pannaway Technologies, Inc.",
"000AA0": "Cedar Point Communications",
"000AA1": "V V S Limited",
"000AA2": "SYSTEK INC.",
"000AA3": "SHIMAFUJI ELECTRIC CO.,LTD.",
"000AA4": "SHANGHAI SURVEILLANCE TECHNOLOGY CO,LTD",
"000AA5": "MAXLINK INDUSTRIES LIMITED",
"000AA6": "Hochiki Corporation",
"000AA7": "FEI Electron Optics",
"000AA8": "ePipe Pty. Ltd.",
"000AA9": "Brooks Automation GmbH",
"000AAA": "AltiGen Communications Inc.",
"000AAB": "Toyota Technical Development Corporation",
"000AAC": "TerraTec Electronic GmbH",
"000AAD": "Stargames Corporation",
"000AAE": "Rosemount Process Analytical",
"000AAF": "Pipal Systems",
"000AB0": "LOYTEC electronics GmbH",
"000AB1": "GENETEC Corporation",
"000AB2": "Fresnel Wireless Systems",
"000AB3": "Fa. GIRA",
"000AB4": "ETIC Telecommunications",
"000AB5": "Digital Electronic Network",
"000AB6": "COMPUNETIX, INC",
"000AB7": "CISCO SYSTEMS, INC.",
"000AB8": "CISCO SYSTEMS, INC.",
"000AB9": "Astera Technologies Corp.",
"000ABA": "Arcon Technology Limited",
"000ABB": "Taiwan Secom Co,. Ltd",
"000ABC": "Seabridge Ltd.",
"000ABD": "Rupprecht & Patashnick Co.",
"000ABE": "OPNET Technologies CO., LTD.",
"000ABF": "HIROTA SS",
"000AC0": "Fuyoh Video Industry CO., LTD.",
"000AC1": "Futuretel",
"000AC2": "FiberHome Telecommunication Technologies CO.,LTD",
"000AC3": "eM Technics Co., Ltd.",
"000AC4": "Daewoo Teletech Co., Ltd",
"000AC5": "Color Kinetics",
"000AC6": "Overture Networks.",
"000AC7": "Unication Group",
"000AC8": "ZPSYS CO.,LTD. (Planning&Management)",
"000AC9": "Zambeel Inc",
"000ACA": "YOKOYAMA SHOKAI CO.,Ltd.",
"000ACB": "XPAK MSA Group",
"000ACC": "Winnow Networks, Inc.",
"000ACD": "Sunrich Technology Limited",
"000ACE": "RADIANTECH, INC.",
"000ACF": "PROVIDEO Multimedia Co. Ltd.",
"000AD0": "Niigata Develoment Center, F.I.T. Co., Ltd.",
"000AD1": "MWS",
"000AD2": "JEPICO Corporation",
"000AD3": "INITECH Co., Ltd",
"000AD4": "CoreBell Systems Inc.",
"000AD5": "Brainchild Electronic Co., Ltd.",
"000AD6": "BeamReach Networks",
"000AD7": "Origin ELECTRIC CO.,LTD.",
"000AD8": "IPCserv Technology Corp.",
"000AD9": "Sony Ericsson Mobile Communications AB",
"000ADA": "Vindicator Technologies",
"000ADB": "SkyPilot Network, Inc",
"000ADC": "RuggedCom Inc.",
"000ADD": "Allworx Corp.",
"000ADE": "Happy Communication Co., Ltd.",
"000ADF": "Gennum Corporation",
"000AE0": "Fujitsu Softek",
"000AE1": "EG Technology",
"000AE2": "Binatone Electronics International, Ltd",
"000AE3": "YANG MEI TECHNOLOGY CO., LTD",
"000AE4": "Wistron Corp.",
"000AE5": "ScottCare Corporation",
"000AE6": "Elitegroup Computer System Co. (ECS)",
"000AE7": "ELIOP S.A.",
"000AE8": "Cathay Roxus Information Technology Co. LTD",
"000AE9": "AirVast Technology Inc.",
"000AEA": "ADAM ELEKTRONIK LTD. \u015eTI",
"000AEB": "Shenzhen Tp-Link Technology Co; Ltd.",
"000AEC": "Koatsu Gas Kogyo Co., Ltd.",
"000AED": "HARTING Systems GmbH & Co KG",
"000AEE": "GCD Hard- & Software GmbH",
"000AEF": "OTRUM ASA",
"000AF0": "SHIN-OH ELECTRONICS CO., LTD. R&D",
"000AF1": "Clarity Design, Inc.",
"000AF2": "NeoAxiom Corp.",
"000AF3": "CISCO SYSTEMS, INC.",
"000AF4": "CISCO SYSTEMS, INC.",
"000AF5": "Airgo Networks, Inc.",
"000AF6": "Emerson Climate Technologies Retail Solutions, Inc.",
"000AF7": "Broadcom Corp.",
"000AF8": "American Telecare Inc.",
"000AF9": "HiConnect, Inc.",
"000AFA": "Traverse Technologies Australia",
"000AFB": "Ambri Limited",
"000AFC": "Core Tec Communications, LLC",
"000AFD": "Viking Electronic Services",
"000AFE": "NovaPal Ltd",
"000AFF": "Kilchherr Elektronik AG",
"000B00": "FUJIAN START COMPUTER EQUIPMENT CO.,LTD",
"000B01": "DAIICHI ELECTRONICS CO., LTD.",
"000B02": "Dallmeier electronic",
"000B03": "Taekwang Industrial Co., Ltd",
"000B04": "Volktek Corporation",
"000B05": "Pacific Broadband Networks",
"000B06": "ARRIS Group, Inc.",
"000B07": "Voxpath Networks",
"000B08": "Pillar Data Systems",
"000B09": "Ifoundry Systems Singapore",
"000B0A": "dBm Optics",
"000B0B": "Corrent Corporation",
"000B0C": "Agile Systems Inc.",
"000B0D": "Air2U, Inc.",
"000B0E": "Trapeze Networks",
"000B0F": "Bosch Rexroth",
"000B10": "11wave Technonlogy Co.,Ltd",
"000B11": "HIMEJI ABC TRADING CO.,LTD.",
"000B12": "NURI Telecom Co., Ltd.",
"000B13": "ZETRON INC",
"000B14": "ViewSonic Corporation",
"000B15": "Platypus Technology",
"000B16": "Communication Machinery Corporation",
"000B17": "MKS Instruments",
"000B18": "PRIVATE",
"000B19": "Vernier Networks, Inc.",
"000B1A": "Industrial Defender, Inc.",
"000B1B": "Systronix, Inc.",
"000B1C": "SIBCO bv",
"000B1D": "LayerZero Power Systems, Inc.",
"000B1E": "KAPPA opto-electronics GmbH",
"000B1F": "I CON Computer Co.",
"000B20": "Hirata corporation",
"000B21": "G-Star Communications Inc.",
"000B22": "Environmental Systems and Services",
"000B23": "Siemens Subscriber Networks",
"000B24": "AirLogic",
"000B25": "Aeluros",
"000B26": "Wetek Corporation",
"000B27": "Scion Corporation",
"000B28": "Quatech Inc.",
"000B29": "LS(LG) Industrial Systems co.,Ltd",
"000B2A": "HOWTEL Co., Ltd.",
"000B2B": "HOSTNET CORPORATION",
"000B2C": "Eiki Industrial Co. Ltd.",
"000B2D": "Danfoss Inc.",
"000B2E": "Cal-Comp Electronics (Thailand) Public Company Limited Taipe",
"000B2F": "bplan GmbH",
"000B30": "Beijing Gongye Science & Technology Co.,Ltd",
"000B31": "Yantai ZhiYang Scientific and technology industry CO., LTD",
"000B32": "VORMETRIC, INC.",
"000B33": "Vivato Technologies",
"000B34": "ShangHai Broadband Technologies CO.LTD",
"000B35": "Quad Bit System co., Ltd.",
"000B36": "Productivity Systems, Inc.",
"000B37": "MANUFACTURE DES MONTRES ROLEX SA",
"000B38": "Kn\u00fcrr GmbH",
"000B39": "Keisoku Giken Co.,Ltd.",
"000B3A": "QuStream Corporation",
"000B3B": "devolo AG",
"000B3C": "Cygnal Integrated Products, Inc.",
"000B3D": "CONTAL OK Ltd.",
"000B3E": "BittWare, Inc",
"000B3F": "Anthology Solutions Inc.",
"000B40": "Oclaro",
"000B41": "Ing. B\u00fcro Dr. Beutlhauser",
"000B42": "commax Co., Ltd.",
"000B43": "Microscan Systems, Inc.",
"000B44": "Concord IDea Corp.",
"000B45": "CISCO SYSTEMS, INC.",
"000B46": "CISCO SYSTEMS, INC.",
"000B47": "Advanced Energy",
"000B48": "sofrel",
"000B49": "RF-Link System Inc.",
"000B4A": "Visimetrics (UK) Ltd",
"000B4B": "VISIOWAVE SA",
"000B4C": "Clarion (M) Sdn Bhd",
"000B4D": "Emuzed",
"000B4E": "VertexRSI, General Dynamics SatCOM Technologies, Inc.",
"000B4F": "Verifone, INC.",
"000B50": "Oxygnet",
"000B51": "Micetek International Inc.",
"000B52": "JOYMAX ELECTRONICS CO. LTD.",
"000B53": "INITIUM Co., Ltd.",
"000B54": "BiTMICRO Networks, Inc.",
"000B55": "ADInstruments",
"000B56": "Cybernetics",
"000B57": "Silicon Laboratories",
"000B58": "Astronautics C.A LTD",
"000B59": "ScriptPro, LLC",
"000B5A": "HyperEdge",
"000B5B": "Rincon Research Corporation",
"000B5C": "Newtech Co.,Ltd",
"000B5D": "FUJITSU LIMITED",
"000B5E": "Audio Engineering Society Inc.",
"000B5F": "CISCO SYSTEMS, INC.",
"000B60": "CISCO SYSTEMS, INC.",
"000B61": "Friedrich L\u00fctze GmbH & Co. KG",
"000B62": "ib-mohnen KG",
"000B63": "Kaleidescape",
"000B64": "Kieback & Peter GmbH & Co KG",
"000B65": "Sy.A.C. srl",
"000B66": "Teralink Communications",
"000B67": "Topview Technology Corporation",
"000B68": "Addvalue Communications Pte Ltd",
"000B69": "Franke Finland Oy",
"000B6A": "Asiarock Incorporation",
"000B6B": "Wistron Neweb Corp.",
"000B6C": "Sychip Inc.",
"000B6D": "SOLECTRON JAPAN NAKANIIDA",
"000B6E": "Neff Instrument Corp.",
"000B6F": "Media Streaming Networks Inc",
"000B70": "Load Technology, Inc.",
"000B71": "Litchfield Communications Inc.",
"000B72": "Lawo AG",
"000B73": "Kodeos Communications",
"000B74": "Kingwave Technology Co., Ltd.",
"000B75": "Iosoft Ltd.",
"000B76": "ET&T Technology Co. Ltd.",
"000B77": "Cogent Systems, Inc.",
"000B78": "TAIFATECH INC.",
"000B79": "X-COM, Inc.",
"000B7A": "L-3 Linkabit",
"000B7B": "Test-Um Inc.",
"000B7C": "Telex Communications",
"000B7D": "SOLOMON EXTREME INTERNATIONAL LTD.",
"000B7E": "SAGINOMIYA Seisakusho Inc.",
"000B7F": "Align Engineering LLC",
"000B80": "Lycium Networks",
"000B81": "Kaparel Corporation",
"000B82": "Grandstream Networks, Inc.",
"000B83": "DATAWATT B.V.",
"000B84": "BODET",
"000B85": "CISCO SYSTEMS, INC.",
"000B86": "Aruba Networks",
"000B87": "American Reliance Inc.",
"000B88": "Vidisco ltd.",
"000B89": "Top Global Technology, Ltd.",
"000B8A": "MITEQ Inc.",
"000B8B": "KERAJET, S.A.",
"000B8C": "Flextronics",
"000B8D": "Avvio Networks",
"000B8E": "Ascent Corporation",
"000B8F": "AKITA ELECTRONICS SYSTEMS CO.,LTD.",
"000B90": "ADVA Optical Networking Ltd.",
"000B91": "Aglaia Gesellschaft f\u00fcr Bildverarbeitung und Kommunikation mbH",
"000B92": "Ascom Danmark A/S",
"000B93": "Ritter Elektronik",
"000B94": "Digital Monitoring Products, Inc.",
"000B95": "eBet Gaming Systems Pty Ltd",
"000B96": "Innotrac Diagnostics Oy",
"000B97": "Matsushita Electric Industrial Co.,Ltd.",
"000B98": "NiceTechVision",
"000B99": "SensAble Technologies, Inc.",
"000B9A": "Shanghai Ulink Telecom Equipment Co. Ltd.",
"000B9B": "Sirius System Co, Ltd.",
"000B9C": "TriBeam Technologies, Inc.",
"000B9D": "TwinMOS Technologies Inc.",
"000B9E": "Yasing Technology Corp.",
"000B9F": "Neue ELSA GmbH",
"000BA0": "T&L Information Inc.",
"000BA1": "SYSCOM Ltd.",
"000BA2": "Sumitomo Electric Networks, Inc",
"000BA3": "Siemens AG, I&S",
"000BA4": "Shiron Satellite Communications Ltd. (1996)",
"000BA5": "Quasar Cipta Mandiri, PT",
"000BA6": "Miyakawa Electric Works Ltd.",
"000BA7": "Maranti Networks",
"000BA8": "HANBACK ELECTRONICS CO., LTD.",
"000BA9": "CloudShield Technologies, Inc.",
"000BAA": "Aiphone co.,Ltd",
"000BAB": "Advantech Technology (CHINA) Co., Ltd.",
"000BAC": "3Com Ltd",
"000BAD": "PC-PoS Inc.",
"000BAE": "Vitals System Inc.",
"000BAF": "WOOJU COMMUNICATIONS Co,.Ltd",
"000BB0": "Sysnet Telematica srl",
"000BB1": "Super Star Technology Co., Ltd.",
"000BB2": "SMALLBIG TECHNOLOGY",
"000BB3": "RiT technologies Ltd.",
"000BB4": "RDC Semiconductor Inc.,",
"000BB5": "nStor Technologies, Inc.",
"000BB6": "Metalligence Technology Corp.",
"000BB7": "Micro Systems Co.,Ltd.",
"000BB8": "Kihoku Electronic Co.",
"000BB9": "Imsys AB",
"000BBA": "Harmonic, Inc",
"000BBB": "Etin Systems Co., Ltd",
"000BBC": "En Garde Systems, Inc.",
"000BBD": "Connexionz Limited",
"000BBE": "CISCO SYSTEMS, INC.",
"000BBF": "CISCO SYSTEMS, INC.",
"000BC0": "China IWNComm Co., Ltd.",
"000BC1": "Bay Microsystems, Inc.",
"000BC2": "Corinex Communication Corp.",
"000BC3": "Multiplex, Inc.",
"000BC4": "BIOTRONIK GmbH & Co",
"000BC5": "SMC Networks, Inc.",
"000BC6": "ISAC, Inc.",
"000BC7": "ICET S.p.A.",
"000BC8": "AirFlow Networks",
"000BC9": "Electroline Equipment",
"000BCA": "DATAVAN International Corporation",
"000BCB": "Fagor Automation , S. Coop",
"000BCC": "JUSAN, S.A.",
"000BCD": "Hewlett-Packard Company",
"000BCE": "Free2move AB",
"000BCF": "AGFA NDT INC.",
"000BD0": "XiMeta Technology Americas Inc.",
"000BD1": "Aeronix, Inc.",
"000BD2": "Remopro Technology Inc.",
"000BD3": "cd3o",
"000BD4": "Beijing Wise Technology & Science Development Co.Ltd",
"000BD5": "Nvergence, Inc.",
"000BD6": "Paxton Access Ltd",
"000BD7": "DORMA Time + Access GmbH",
"000BD8": "Industrial Scientific Corp.",
"000BD9": "General Hydrogen",
"000BDA": "EyeCross Co.,Inc.",
"000BDB": "Dell Inc",
"000BDC": "AKCP",
"000BDD": "TOHOKU RICOH Co., LTD.",
"000BDE": "TELDIX GmbH",
"000BDF": "Shenzhen RouterD Networks Limited",
"000BE0": "SercoNet Ltd.",
"000BE1": "Nokia NET Product Operations",
"000BE2": "Lumenera Corporation",
"000BE3": "Key Stream Co., Ltd.",
"000BE4": "Hosiden Corporation",
"000BE5": "HIMS International Corporation",
"000BE6": "Datel Electronics",
"000BE7": "COMFLUX TECHNOLOGY INC.",
"000BE8": "AOIP",
"000BE9": "Actel Corporation",
"000BEA": "Zultys Technologies",
"000BEB": "Systegra AG",
"000BEC": "NIPPON ELECTRIC INSTRUMENT, INC.",
"000BED": "ELM Inc.",
"000BEE": "inc.jet, Incorporated",
"000BEF": "Code Corporation",
"000BF0": "MoTEX Products Co., Ltd.",
"000BF1": "LAP Laser Applikations",
"000BF2": "Chih-Kan Technology Co., Ltd.",
"000BF3": "BAE SYSTEMS",
"000BF4": "PRIVATE",
"000BF5": "Shanghai Sibo Telecom Technology Co.,Ltd",
"000BF6": "Nitgen Co., Ltd",
"000BF7": "NIDEK CO.,LTD",
"000BF8": "Infinera",
"000BF9": "Gemstone communications, Inc.",
"000BFA": "EXEMYS SRL",
"000BFB": "D-NET International Corporation",
"000BFC": "CISCO SYSTEMS, INC.",
"000BFD": "CISCO SYSTEMS, INC.",
"000BFE": "CASTEL Broadband Limited",
"000BFF": "Berkeley Camera Engineering",
"000C00": "BEB Industrie-Elektronik AG",
"000C01": "Abatron AG",
"000C02": "ABB Oy",
"000C03": "HDMI Licensing, LLC",
"000C04": "Tecnova",
"000C05": "RPA Reserch Co., Ltd.",
"000C06": "Nixvue Systems Pte Ltd",
"000C07": "Iftest AG",
"000C08": "HUMEX Technologies Corp.",
"000C09": "Hitachi IE Systems Co., Ltd",
"000C0A": "Guangdong Province Electronic Technology Research Institute",
"000C0B": "Broadbus Technologies",
"000C0C": "APPRO TECHNOLOGY INC.",
"000C0D": "Communications & Power Industries / Satcom Division",
"000C0E": "XtremeSpectrum, Inc.",
"000C0F": "Techno-One Co., Ltd",
"000C10": "PNI Corporation",
"000C11": "NIPPON DEMPA CO.,LTD.",
"000C12": "Micro-Optronic-Messtechnik GmbH",
"000C13": "MediaQ",
"000C14": "Diagnostic Instruments, Inc.",
"000C15": "CyberPower Systems, Inc.",
"000C16": "Concorde Microsystems Inc.",
"000C17": "AJA Video Systems Inc",
"000C18": "Zenisu Keisoku Inc.",
"000C19": "Telio Communications GmbH",
"000C1A": "Quest Technical Solutions Inc.",
"000C1B": "ORACOM Co, Ltd.",
"000C1C": "MicroWeb Co., Ltd.",
"000C1D": "Mettler & Fuchs AG",
"000C1E": "Global Cache",
"000C1F": "Glimmerglass Networks",
"000C20": "Fi WIn, Inc.",
"000C21": "Faculty of Science and Technology, Keio University",
"000C22": "Double D Electronics Ltd",
"000C23": "Beijing Lanchuan Tech. Co., Ltd.",
"000C24": "ANATOR",
"000C25": "Allied Telesis Labs, Inc.",
"000C26": "Weintek Labs. Inc.",
"000C27": "Sammy Corporation",
"000C28": "RIFATRON",
"000C29": "VMware, Inc.",
"000C2A": "OCTTEL Communication Co., Ltd.",
"000C2B": "ELIAS Technology, Inc.",
"000C2C": "Enwiser Inc.",
"000C2D": "FullWave Technology Co., Ltd.",
"000C2E": "Openet information technology(shenzhen) Co., Ltd.",
"000C2F": "SeorimTechnology Co.,Ltd.",
"000C30": "CISCO SYSTEMS, INC.",
"000C31": "CISCO SYSTEMS, INC.",
"000C32": "Avionic Design Development GmbH",
"000C33": "Compucase Enterprise Co. Ltd.",
"000C34": "Vixen Co., Ltd.",
"000C35": "KaVo Dental GmbH & Co. KG",
"000C36": "SHARP TAKAYA ELECTRONICS INDUSTRY CO.,LTD.",
"000C37": "Geomation, Inc.",
"000C38": "TelcoBridges Inc.",
"000C39": "Sentinel Wireless Inc.",
"000C3A": "Oxance",
"000C3B": "Orion Electric Co., Ltd.",
"000C3C": "MediaChorus, Inc.",
"000C3D": "Glsystech Co., Ltd.",
"000C3E": "Crest Audio",
"000C3F": "Cogent Defence & Security Networks,",
"000C40": "Altech Controls",
"000C41": "Cisco-Linksys",
"000C42": "Routerboard.com",
"000C43": "Ralink Technology, Corp.",
"000C44": "Automated Interfaces, Inc.",
"000C45": "Animation Technologies Inc.",
"000C46": "Allied Telesyn Inc.",
"000C47": "SK Teletech(R&D Planning Team)",
"000C48": "QoStek Corporation",
"000C49": "Dangaard Telecom RTC Division A/S",
"000C4A": "Cygnus Microsystems (P) Limited",
"000C4B": "Cheops Elektronik",
"000C4C": "Arcor AG&Co.",
"000C4D": "Curtiss-Wright Controls Avionics & Electronics",
"000C4E": "Winbest Technology CO,LT",
"000C4F": "UDTech Japan Corporation",
"000C50": "Seagate Technology",
"000C51": "Scientific Technologies Inc.",
"000C52": "Roll Systems Inc.",
"000C53": "PRIVATE",
"000C54": "Pedestal Networks, Inc",
"000C55": "Microlink Communications Inc.",
"000C56": "Megatel Computer (1986) Corp.",
"000C57": "MACKIE Engineering Services Belgium BVBA",
"000C58": "M&S Systems",
"000C59": "Indyme Electronics, Inc.",
"000C5A": "IBSmm Embedded Electronics Consulting",
"000C5B": "HANWANG TECHNOLOGY CO.,LTD",
"000C5C": "GTN Systems B.V.",
"000C5D": "CHIC TECHNOLOGY (CHINA) CORP.",
"000C5E": "Calypso Medical",
"000C5F": "Avtec, Inc.",
"000C60": "ACM Systems",
"000C61": "AC Tech corporation DBA Advanced Digital",
"000C62": "ABB AB, Cewe-Control",
"000C63": "Zenith Electronics Corporation",
"000C64": "X2 MSA Group",
"000C65": "Sunin Telecom",
"000C66": "Pronto Networks Inc",
"000C67": "OYO ELECTRIC CO.,LTD",
"000C68": "SigmaTel, Inc.",
"000C69": "National Radio Astronomy Observatory",
"000C6A": "MBARI",
"000C6B": "Kurz Industrie-Elektronik GmbH",
"000C6C": "Elgato Systems LLC",
"000C6D": "Edwards Ltd.",
"000C6E": "ASUSTEK COMPUTER INC.",
"000C6F": "Amtek system co.,LTD.",
"000C70": "ACC GmbH",
"000C71": "Wybron, Inc",
"000C72": "Tempearl Industrial Co., Ltd.",
"000C73": "TELSON ELECTRONICS CO., LTD",
"000C74": "RIVERTEC CORPORATION",
"000C75": "Oriental integrated electronics. LTD",
"000C76": "MICRO-STAR INTERNATIONAL CO., LTD.",
"000C77": "Life Racing Ltd",
"000C78": "In-Tech Electronics Limited",
"000C79": "Extel Communications P/L",
"000C7A": "DaTARIUS Technologies GmbH",
"000C7B": "ALPHA PROJECT Co.,Ltd.",
"000C7C": "Internet Information Image Inc.",
"000C7D": "TEIKOKU ELECTRIC MFG. CO., LTD",
"000C7E": "Tellium Incorporated",
"000C7F": "synertronixx GmbH",
"000C80": "Opelcomm Inc.",
"000C81": "Schneider Electric (Australia)",
"000C82": "NETWORK TECHNOLOGIES INC",
"000C83": "Logical Solutions",
"000C84": "Eazix, Inc.",
"000C85": "CISCO SYSTEMS, INC.",
"000C86": "CISCO SYSTEMS, INC.",
"000C87": "AMD",
"000C88": "Apache Micro Peripherals, Inc.",
"000C89": "AC Electric Vehicles, Ltd.",
"000C8A": "Bose Corporation",
"000C8B": "Connect Tech Inc",
"000C8C": "KODICOM CO.,LTD.",
"000C8D": "MATRIX VISION GmbH",
"000C8E": "Mentor Engineering Inc",
"000C8F": "Nergal s.r.l.",
"000C90": "Octasic Inc.",
"000C91": "Riverhead Networks Inc.",
"000C92": "WolfVision Gmbh",
"000C93": "Xeline Co., Ltd.",
"000C94": "United Electronic Industries, Inc. (EUI)",
"000C95": "PrimeNet",
"000C96": "OQO, Inc.",
"000C97": "NV ADB TTV Technologies SA",
"000C98": "LETEK Communications Inc.",
"000C99": "HITEL LINK Co.,Ltd",
"000C9A": "Hitech Electronics Corp.",
"000C9B": "EE Solutions, Inc",
"000C9C": "Chongho information & communications",
"000C9D": "UbeeAirWalk, Inc.",
"000C9E": "MemoryLink Corp.",
"000C9F": "NKE Corporation",
"000CA0": "StorCase Technology, Inc.",
"000CA1": "SIGMACOM Co., LTD.",
"000CA2": "Harmonic Video Network",
"000CA3": "Rancho Technology, Inc.",
"000CA4": "Prompttec Product Management GmbH",
"000CA5": "Naman NZ LTd",
"000CA6": "Mintera Corporation",
"000CA7": "Metro (Suzhou) Technologies Co., Ltd.",
"000CA8": "Garuda Networks Corporation",
"000CA9": "Ebtron Inc.",
"000CAA": "Cubic Transportation Systems Inc",
"000CAB": "COMMEND International",
"000CAC": "Citizen Watch Co., Ltd.",
"000CAD": "BTU International",
"000CAE": "Ailocom Oy",
"000CAF": "TRI TERM CO.,LTD.",
"000CB0": "Star Semiconductor Corporation",
"000CB1": "Salland Engineering (Europe) BV",
"000CB2": "Comstar Co., Ltd.",
"000CB3": "ROUND Co.,Ltd.",
"000CB4": "AutoCell Laboratories, Inc.",
"000CB5": "Premier Technolgies, Inc",
"000CB6": "NANJING SEU MOBILE & INTERNET TECHNOLOGY CO.,LTD",
"000CB7": "Nanjing Huazhuo Electronics Co., Ltd.",
"000CB8": "MEDION AG",
"000CB9": "LEA",
"000CBA": "Jamex, Inc.",
"000CBB": "ISKRAEMECO",
"000CBC": "Iscutum",
"000CBD": "Interface Masters, Inc",
"000CBE": "Innominate Security Technologies AG",
"000CBF": "Holy Stone Ent. Co., Ltd.",
"000CC0": "Genera Oy",
"000CC1": "Cooper Industries Inc.",
"000CC2": "ControlNet (India) Private Limited",
"000CC3": "BeWAN systems",
"000CC4": "Tiptel AG",
"000CC5": "Nextlink Co., Ltd.",
"000CC6": "Ka-Ro electronics GmbH",
"000CC7": "Intelligent Computer Solutions Inc.",
"000CC8": "Xytronix Research & Design, Inc.",
"000CC9": "ILWOO DATA & TECHNOLOGY CO.,LTD",
"000CCA": "HGST a Western Digital Company",
"000CCB": "Design Combus Ltd",
"000CCC": "Aeroscout Ltd.",
"000CCD": "IEC - TC57",
"000CCE": "CISCO SYSTEMS, INC.",
"000CCF": "CISCO SYSTEMS, INC.",
"000CD0": "Symetrix",
"000CD1": "SFOM Technology Corp.",
"000CD2": "Schaffner EMV AG",
"000CD3": "Prettl Elektronik Radeberg GmbH",
"000CD4": "Positron Public Safety Systems inc.",
"000CD5": "Passave Inc.",
"000CD6": "PARTNER TECH",
"000CD7": "Nallatech Ltd",
"000CD8": "M. K. Juchheim GmbH & Co",
"000CD9": "Itcare Co., Ltd",
"000CDA": "FreeHand Systems, Inc.",
"000CDB": "Brocade Communications Systems, Inc",
"000CDC": "BECS Technology, Inc",
"000CDD": "AOS Technologies AG",
"000CDE": "ABB STOTZ-KONTAKT GmbH",
"000CDF": "PULNiX America, Inc",
"000CE0": "Trek Diagnostics Inc.",
"000CE1": "The Open Group",
"000CE2": "Rolls-Royce",
"000CE3": "Option International N.V.",
"000CE4": "NeuroCom International, Inc.",
"000CE5": "ARRIS Group, Inc.",
"000CE6": "Meru Networks Inc",
"000CE7": "MediaTek Inc.",
"000CE8": "GuangZhou AnJuBao Co., Ltd",
"000CE9": "BLOOMBERG L.P.",
"000CEA": "aphona Kommunikationssysteme",
"000CEB": "CNMP Networks, Inc.",
"000CEC": "Spectracom Corp.",
"000CED": "Real Digital Media",
"000CEE": "jp-embedded",
"000CEF": "Open Networks Engineering Ltd",
"000CF0": "M & N GmbH",
"000CF1": "Intel Corporation",
"000CF2": "GAMESA E\u00f3lica",
"000CF3": "CALL IMAGE SA",
"000CF4": "AKATSUKI ELECTRIC MFG.CO.,LTD.",
"000CF5": "InfoExpress",
"000CF6": "Sitecom Europe BV",
"000CF7": "Nortel Networks",
"000CF8": "Nortel Networks",
"000CF9": "Xylem Water Solutions",
"000CFA": "Digital Systems Corp",
"000CFB": "Korea Network Systems",
"000CFC": "S2io Technologies Corp",
"000CFD": "Hyundai ImageQuest Co.,Ltd.",
"000CFE": "Grand Electronic Co., Ltd",
"000CFF": "MRO-TEK LIMITED",
"000D00": "Seaway Networks Inc.",
"000D01": "P&E Microcomputer Systems, Inc.",
"000D02": "NEC AccessTechnica, Ltd.",
"000D03": "Matrics, Inc.",
"000D04": "Foxboro Eckardt Development GmbH",
"000D05": "cybernet manufacturing inc.",
"000D06": "Compulogic Limited",
"000D07": "Calrec Audio Ltd",
"000D08": "AboveCable, Inc.",
"000D09": "Yuehua(Zhuhai) Electronic CO. LTD",
"000D0A": "Projectiondesign as",
"000D0B": "Buffalo Inc.",
"000D0C": "MDI Security Systems",
"000D0D": "ITSupported, LLC",
"000D0E": "Inqnet Systems, Inc.",
"000D0F": "Finlux Ltd",
"000D10": "Embedtronics Oy",
"000D11": "DENTSPLY - Gendex",
"000D12": "AXELL Corporation",
"000D13": "Wilhelm Rutenbeck GmbH&Co.KG",
"000D14": "Vtech Innovation LP dba Advanced American Telephones",
"000D15": "Voipac s.r.o.",
"000D16": "UHS Systems Pty Ltd",
"000D17": "Turbo Networks Co.Ltd",
"000D18": "Mega-Trend Electronics CO., LTD.",
"000D19": "ROBE Show lighting",
"000D1A": "Mustek System Inc.",
"000D1B": "Kyoto Electronics Manufacturing Co., Ltd.",
"000D1C": "Amesys Defense",
"000D1D": "HIGH-TEK HARNESS ENT. CO., LTD.",
"000D1E": "Control Techniques",
"000D1F": "AV Digital",
"000D20": "ASAHIKASEI TECHNOSYSTEM CO.,LTD.",
"000D21": "WISCORE Inc.",
"000D22": "Unitronics LTD",
"000D23": "Smart Solution, Inc",
"000D24": "SENTEC E&E CO., LTD.",
"000D25": "SANDEN CORPORATION",
"000D26": "Primagraphics Limited",
"000D27": "MICROPLEX Printware AG",
"000D28": "CISCO SYSTEMS, INC.",
"000D29": "CISCO SYSTEMS, INC.",
"000D2A": "Scanmatic AS",
"000D2B": "Racal Instruments",
"000D2C": "Patapsco Designs Ltd",
"000D2D": "NCT Deutschland GmbH",
"000D2E": "Matsushita Avionics Systems Corporation",
"000D2F": "AIN Comm.Tech.Co., LTD",
"000D30": "IceFyre Semiconductor",
"000D31": "Compellent Technologies, Inc.",
"000D32": "DispenseSource, Inc.",
"000D33": "Prediwave Corp.",
"000D34": "Shell International Exploration and Production, Inc.",
"000D35": "PAC International Ltd",
"000D36": "Wu Han Routon Electronic Co., Ltd",
"000D37": "WIPLUG",
"000D38": "NISSIN INC.",
"000D39": "Network Electronics",
"000D3A": "Microsoft Corp.",
"000D3B": "Microelectronics Technology Inc.",
"000D3C": "i.Tech Dynamic Ltd",
"000D3D": "Hammerhead Systems, Inc.",
"000D3E": "APLUX Communications Ltd.",
"000D3F": "VTI Instruments Corporation",
"000D40": "Verint Loronix Video Solutions",
"000D41": "Siemens AG ICM MP UC RD IT KLF1",
"000D42": "Newbest Development Limited",
"000D43": "DRS Tactical Systems Inc.",
"000D44": "Audio BU - Logitech",
"000D45": "Tottori SANYO Electric Co., Ltd.",
"000D46": "Parker SSD Drives",
"000D47": "Collex",
"000D48": "AEWIN Technologies Co., Ltd.",
"000D49": "Triton Systems of Delaware, Inc.",
"000D4A": "Steag ETA-Optik",
"000D4B": "Roku, LLC",
"000D4C": "Outline Electronics Ltd.",
"000D4D": "Ninelanes",
"000D4E": "NDR Co.,LTD.",
"000D4F": "Kenwood Corporation",
"000D50": "Galazar Networks",
"000D51": "DIVR Systems, Inc.",
"000D52": "Comart system",
"000D53": "Beijing 5w Communication Corp.",
"000D54": "3Com Ltd",
"000D55": "SANYCOM Technology Co.,Ltd",
"000D56": "Dell Inc",
"000D57": "Fujitsu I-Network Systems Limited.",
"000D58": "PRIVATE",
"000D59": "Amity Systems, Inc.",
"000D5A": "Tiesse SpA",
"000D5B": "Smart Empire Investments Limited",
"000D5C": "Robert Bosch GmbH, VT-ATMO",
"000D5D": "Raritan Computer, Inc",
"000D5E": "NEC Personal Products",
"000D5F": "Minds Inc",
"000D60": "IBM Corp",
"000D61": "Giga-Byte Technology Co., Ltd.",
"000D62": "Funkwerk Dabendorf GmbH",
"000D63": "DENT Instruments, Inc.",
"000D64": "COMAG Handels AG",
"000D65": "CISCO SYSTEMS, INC.",
"000D66": "CISCO SYSTEMS, INC.",
"000D67": "Ericsson",
"000D68": "Vinci Systems, Inc.",
"000D69": "TMT&D Corporation",
"000D6A": "Redwood Technologies LTD",
"000D6B": "Mita-Teknik A/S",
"000D6C": "M-Audio",
"000D6D": "K-Tech Devices Corp.",
"000D6E": "K-Patents Oy",
"000D6F": "Ember Corporation",
"000D70": "Datamax Corporation",
"000D71": "boca systems",
"000D72": "2Wire, Inc",
"000D73": "Technical Support, Inc.",
"000D74": "Sand Network Systems, Inc.",
"000D75": "Kobian Pte Ltd - Taiwan Branch",
"000D76": "Hokuto Denshi Co,. Ltd.",
"000D77": "FalconStor Software",
"000D78": "Engineering & Security",
"000D79": "Dynamic Solutions Co,.Ltd.",
"000D7A": "DiGATTO Asia Pacific Pte Ltd",
"000D7B": "Consensys Computers Inc.",
"000D7C": "Codian Ltd",
"000D7D": "Afco Systems",
"000D7E": "Axiowave Networks, Inc.",
"000D7F": "MIDAS COMMUNICATION TECHNOLOGIES PTE LTD ( Foreign Branch)",
"000D80": "Online Development Inc",
"000D81": "Pepperl+Fuchs GmbH",
"000D82": "PHS srl",
"000D83": "Sanmina-SCI Hungary Ltd.",
"000D84": "Makus Inc.",
"000D85": "Tapwave, Inc.",
"000D86": "Huber + Suhner AG",
"000D87": "Elitegroup Computer System Co. (ECS)",
"000D88": "D-Link Corporation",
"000D89": "Bils Technology Inc",
"000D8A": "Winners Electronics Co., Ltd.",
"000D8B": "T&D Corporation",
"000D8C": "Shanghai Wedone Digital Ltd. CO.",
"000D8D": "Prosoft Technology, Inc",
"000D8E": "Koden Electronics Co., Ltd.",
"000D8F": "King Tsushin Kogyo Co., LTD.",
"000D90": "Factum Electronics AB",
"000D91": "Eclipse (HQ Espana) S.L.",
"000D92": "Arima Communication Corporation",
"000D93": "Apple",
"000D94": "AFAR Communications,Inc",
"000D95": "Opti-cell, Inc.",
"000D96": "Vtera Technology Inc.",
"000D97": "Tropos Networks, Inc.",
"000D98": "S.W.A.C. Schmitt-Walter Automation Consult GmbH",
"000D99": "Orbital Sciences Corp.; Launch Systems Group",
"000D9A": "INFOTEC LTD",
"000D9B": "Heraeus Electro-Nite International N.V.",
"000D9C": "Elan GmbH & Co KG",
"000D9D": "Hewlett-Packard Company",
"000D9E": "TOKUDEN OHIZUMI SEISAKUSYO Co.,Ltd.",
"000D9F": "RF Micro Devices",
"000DA0": "NEDAP N.V.",
"000DA1": "MIRAE ITS Co.,LTD.",
"000DA2": "Infrant Technologies, Inc.",
"000DA3": "Emerging Technologies Limited",
"000DA4": "DOSCH & AMAND SYSTEMS AG",
"000DA5": "Fabric7 Systems, Inc",
"000DA6": "Universal Switching Corporation",
"000DA7": "PRIVATE",
"000DA8": "Teletronics Technology Corporation",
"000DA9": "T.E.A.M. S.L.",
"000DAA": "S.A.Tehnology co.,Ltd.",
"000DAB": "Parker Hannifin GmbH Electromechanical Division Europe",
"000DAC": "Japan CBM Corporation",
"000DAD": "Dataprobe, Inc.",
"000DAE": "SAMSUNG HEAVY INDUSTRIES CO., LTD.",
"000DAF": "Plexus Corp (UK) Ltd",
"000DB0": "Olym-tech Co.,Ltd.",
"000DB1": "Japan Network Service Co., Ltd.",
"000DB2": "Ammasso, Inc.",
"000DB3": "SDO Communication Corperation",
"000DB4": "NETASQ",
"000DB5": "GLOBALSAT TECHNOLOGY CORPORATION",
"000DB6": "Broadcom Corporation",
"000DB7": "SANKO ELECTRIC CO,.LTD",
"000DB8": "SCHILLER AG",
"000DB9": "PC Engines GmbH",
"000DBA": "Oc\u00e9 Document Technologies GmbH",
"000DBB": "Nippon Dentsu Co.,Ltd.",
"000DBC": "CISCO SYSTEMS, INC.",
"000DBD": "CISCO SYSTEMS, INC.",
"000DBE": "Bel Fuse Europe Ltd.,UK",
"000DBF": "TekTone Sound & Signal Mfg., Inc.",
"000DC0": "Spagat AS",
"000DC1": "SafeWeb Inc",
"000DC2": "PRIVATE",
"000DC3": "First Communication, Inc.",
"000DC4": "Emcore Corporation",
"000DC5": "EchoStar Global B.V.",
"000DC6": "DigiRose Technology Co., Ltd.",
"000DC7": "COSMIC ENGINEERING INC.",
"000DC8": "AirMagnet, Inc",
"000DC9": "THALES Elektronik Systeme GmbH",
"000DCA": "Tait Electronics",
"000DCB": "Petcomkorea Co., Ltd.",
"000DCC": "NEOSMART Corp.",
"000DCD": "GROUPE TXCOM",
"000DCE": "Dynavac Technology Pte Ltd",
"000DCF": "Cidra Corp.",
"000DD0": "TetraTec Instruments GmbH",
"000DD1": "Stryker Corporation",
"000DD2": "Simrad Optronics ASA",
"000DD3": "SAMWOO Telecommunication Co.,Ltd.",
"000DD4": "Symantec Corporation",
"000DD5": "O'RITE TECHNOLOGY CO.,LTD",
"000DD6": "ITI LTD",
"000DD7": "Bright",
"000DD8": "BBN",
"000DD9": "Anton Paar GmbH",
"000DDA": "ALLIED TELESIS K.K.",
"000DDB": "AIRWAVE TECHNOLOGIES INC.",
"000DDC": "VAC",
"000DDD": "Profilo Telra Elektronik Sanayi ve Ticaret. A.\u015e",
"000DDE": "Joyteck Co., Ltd.",
"000DDF": "Japan Image & Network Inc.",
"000DE0": "ICPDAS Co.,LTD",
"000DE1": "Control Products, Inc.",
"000DE2": "CMZ Sistemi Elettronici",
"000DE3": "AT Sweden AB",
"000DE4": "DIGINICS, Inc.",
"000DE5": "Samsung Thales",
"000DE6": "YOUNGBO ENGINEERING CO.,LTD",
"000DE7": "Snap-on OEM Group",
"000DE8": "Nasaco Electronics Pte. Ltd",
"000DE9": "Napatech Aps",
"000DEA": "Kingtel Telecommunication Corp.",
"000DEB": "CompXs Limited",
"000DEC": "CISCO SYSTEMS, INC.",
"000DED": "CISCO SYSTEMS, INC.",
"000DEE": "Andrew RF Power Amplifier Group",
"000DEF": "Soc. Coop. Bilanciai",
"000DF0": "QCOM TECHNOLOGY INC.",
"000DF1": "IONIX INC.",
"000DF2": "PRIVATE",
"000DF3": "Asmax Solutions",
"000DF4": "Watertek Co.",
"000DF5": "Teletronics International Inc.",
"000DF6": "Technology Thesaurus Corp.",
"000DF7": "Space Dynamics Lab",
"000DF8": "ORGA Kartensysteme GmbH",
"000DF9": "NDS Limited",
"000DFA": "Micro Control Systems Ltd.",
"000DFB": "Komax AG",
"000DFC": "ITFOR Inc.",
"000DFD": "Huges Hi-Tech Inc.,",
"000DFE": "Hauppauge Computer Works, Inc.",
"000DFF": "CHENMING MOLD INDUSTRY CORP.",
"000E00": "Atrie",
"000E01": "ASIP Technologies Inc.",
"000E02": "Advantech AMT Inc.",
"000E03": "Emulex Corporation",
"000E04": "CMA/Microdialysis AB",
"000E05": "WIRELESS MATRIX CORP.",
"000E06": "Team Simoco Ltd",
"000E07": "Sony Ericsson Mobile Communications AB",
"000E08": "Cisco Linksys LLC",
"000E09": "Shenzhen Coship Software Co.,LTD.",
"000E0A": "SAKUMA DESIGN OFFICE",
"000E0B": "Netac Technology Co., Ltd.",
"000E0C": "Intel Corporation",
"000E0D": "Hesch Schr\u00f6der GmbH",
"000E0E": "ESA elettronica S.P.A.",
"000E0F": "ERMME",
"000E10": "C-guys, Inc.",
"000E11": "BDT B\u00fcro und Datentechnik GmbH & Co.KG",
"000E12": "Adaptive Micro Systems Inc.",
"000E13": "Accu-Sort Systems inc.",
"000E14": "Visionary Solutions, Inc.",
"000E15": "Tadlys LTD",
"000E16": "SouthWing S.L.",
"000E17": "PRIVATE",
"000E18": "MyA Technology",
"000E19": "LogicaCMG Pty Ltd",
"000E1A": "JPS Communications",
"000E1B": "IAV GmbH",
"000E1C": "Hach Company",
"000E1D": "ARION Technology Inc.",
"000E1E": "QLogic Corporation",
"000E1F": "TCL Networks Equipment Co., Ltd.",
"000E20": "ACCESS Systems Americas, Inc.",
"000E21": "MTU Friedrichshafen GmbH",
"000E22": "PRIVATE",
"000E23": "Incipient, Inc.",
"000E24": "Huwell Technology Inc.",
"000E25": "Hannae Technology Co., Ltd",
"000E26": "Gincom Technology Corp.",
"000E27": "Crere Networks, Inc.",
"000E28": "Dynamic Ratings P/L",
"000E29": "Shester Communications Inc",
"000E2A": "PRIVATE",
"000E2B": "Safari Technologies",
"000E2C": "Netcodec co.",
"000E2D": "Hyundai Digital Technology Co.,Ltd.",
"000E2E": "Edimax Technology Co., Ltd.",
"000E2F": "Roche Diagnostics GmbH",
"000E30": "AERAS Networks, Inc.",
"000E31": "Olympus Soft Imaging Solutions GmbH",
"000E32": "Kontron Medical",
"000E33": "Shuko Electronics Co.,Ltd",
"000E34": "NexGen City, LP",
"000E35": "Intel Corp",
"000E36": "HEINESYS, Inc.",
"000E37": "Harms & Wende GmbH & Co.KG",
"000E38": "CISCO SYSTEMS, INC.",
"000E39": "CISCO SYSTEMS, INC.",
"000E3A": "Cirrus Logic",
"000E3B": "Hawking Technologies, Inc.",
"000E3C": "Transact Technologies Inc",
"000E3D": "Televic N.V.",
"000E3E": "Sun Optronics Inc",
"000E3F": "Soronti, Inc.",
"000E40": "Nortel Networks",
"000E41": "NIHON MECHATRONICS CO.,LTD.",
"000E42": "Motic Incoporation Ltd.",
"000E43": "G-Tek Electronics Sdn. Bhd.",
"000E44": "Digital 5, Inc.",
"000E45": "Beijing Newtry Electronic Technology Ltd",
"000E46": "Niigata Seimitsu Co.,Ltd.",
"000E47": "NCI System Co.,Ltd.",
"000E48": "Lipman TransAction Solutions",
"000E49": "Forsway Scandinavia AB",
"000E4A": "Changchun Huayu WEBPAD Co.,LTD",
"000E4B": "atrium c and i",
"000E4C": "Bermai Inc.",
"000E4D": "Numesa Inc.",
"000E4E": "Waveplus Technology Co., Ltd.",
"000E4F": "Trajet GmbH",
"000E50": "Thomson Telecom Belgium",
"000E51": "tecna elettronica srl",
"000E52": "Optium Corporation",
"000E53": "AV TECH CORPORATION",
"000E54": "AlphaCell Wireless Ltd.",
"000E55": "AUVITRAN",
"000E56": "4G Systems GmbH & Co. KG",
"000E57": "Iworld Networking, Inc.",
"000E58": "Sonos, Inc.",
"000E59": "SAGEM SA",
"000E5A": "TELEFIELD inc.",
"000E5B": "ParkerVision - Direct2Data",
"000E5C": "ARRIS Group, Inc.",
"000E5D": "Triple Play Technologies A/S",
"000E5E": "Raisecom Technology",
"000E5F": "activ-net GmbH & Co. KG",
"000E60": "360SUN Digital Broadband Corporation",
"000E61": "MICROTROL LIMITED",
"000E62": "Nortel Networks",
"000E63": "Lemke Diagnostics GmbH",
"000E64": "Elphel, Inc",
"000E65": "TransCore",
"000E66": "Hitachi Advanced Digital, Inc.",
"000E67": "Eltis Microelectronics Ltd.",
"000E68": "E-TOP Network Technology Inc.",
"000E69": "China Electric Power Research Institute",
"000E6A": "3Com Ltd",
"000E6B": "Janitza electronics GmbH",
"000E6C": "Device Drivers Limited",
"000E6D": "Murata Manufacturing Co., Ltd.",
"000E6E": "MAT S.A. (Mircrelec Advanced Technology)",
"000E6F": "IRIS Corporation Berhad",
"000E70": "in2 Networks",
"000E71": "Gemstar Technology Development Ltd.",
"000E72": "CTS electronics",
"000E73": "Tpack A/S",
"000E74": "Solar Telecom. Tech",
"000E75": "New York Air Brake Corp.",
"000E76": "GEMSOC INNOVISION INC.",
"000E77": "Decru, Inc.",
"000E78": "Amtelco",
"000E79": "Ample Communications Inc.",
"000E7A": "GemWon Communications Co., Ltd.",
"000E7B": "Toshiba",
"000E7C": "Televes S.A.",
"000E7D": "Electronics Line 3000 Ltd.",
"000E7E": "ionSign Oy",
"000E7F": "Hewlett-Packard Company",
"000E80": "Thomson Technology Inc",
"000E81": "Devicescape Software, Inc.",
"000E82": "Commtech Wireless",
"000E83": "CISCO SYSTEMS, INC.",
"000E84": "CISCO SYSTEMS, INC.",
"000E85": "Catalyst Enterprises, Inc.",
"000E86": "Alcatel North America",
"000E87": "adp Gauselmann GmbH",
"000E88": "VIDEOTRON CORP.",
"000E89": "CLEMATIC",
"000E8A": "Avara Technologies Pty. Ltd.",
"000E8B": "Astarte Technology Co, Ltd.",
"000E8C": "Siemens AG A&D ET",
"000E8D": "Systems in Progress Holding GmbH",
"000E8E": "SparkLAN Communications, Inc.",
"000E8F": "Sercomm Corp.",
"000E90": "PONICO CORP.",
"000E91": "Navico Auckland Ltd",
"000E92": "Open Telecom",
"000E93": "Mil\u00e9nio 3 Sistemas Electr\u00f3nicos, Lda.",
"000E94": "Maas International BV",
"000E95": "Fujiya Denki Seisakusho Co.,Ltd.",
"000E96": "Cubic Defense Applications, Inc.",
"000E97": "Ultracker Technology CO., Inc",
"000E98": "HME Clear-Com LTD.",
"000E99": "Spectrum Digital, Inc",
"000E9A": "BOE TECHNOLOGY GROUP CO.,LTD",
"000E9B": "Ambit Microsystems Corporation",
"000E9C": "Benchmark Electronics",
"000E9D": "Tiscali UK Ltd",
"000E9E": "Topfield Co., Ltd",
"000E9F": "TEMIC SDS GmbH",
"000EA0": "NetKlass Technology Inc.",
"000EA1": "Formosa Teletek Corporation",
"000EA2": "McAfee, Inc",
"000EA3": "CNCR-IT CO.,LTD,HangZhou P.R.CHINA",
"000EA4": "Certance Inc.",
"000EA5": "BLIP Systems",
"000EA6": "ASUSTEK COMPUTER INC.",
"000EA7": "Endace Technology",
"000EA8": "United Technologists Europe Limited",
"000EA9": "Shanghai Xun Shi Communications Equipment Ltd. Co.",
"000EAA": "Scalent Systems, Inc.",
"000EAB": "Cray Inc",
"000EAC": "MINTRON ENTERPRISE CO., LTD.",
"000EAD": "Metanoia Technologies, Inc.",
"000EAE": "GAWELL TECHNOLOGIES CORP.",
"000EAF": "CASTEL",
"000EB0": "Solutions Radio BV",
"000EB1": "Newcotech,Ltd",
"000EB2": "Micro-Research Finland Oy",
"000EB3": "Hewlett-Packard",
"000EB4": "GUANGZHOU GAOKE COMMUNICATIONS TECHNOLOGY CO.LTD.",
"000EB5": "Ecastle Electronics Co., Ltd.",
"000EB6": "Riverbed Technology, Inc.",
"000EB7": "Knovative, Inc.",
"000EB8": "Iiga co.,Ltd",
"000EB9": "HASHIMOTO Electronics Industry Co.,Ltd.",
"000EBA": "HANMI SEMICONDUCTOR CO., LTD.",
"000EBB": "Everbee Networks",
"000EBC": "Paragon Fidelity GmbH",
"000EBD": "Burdick, a Quinton Compny",
"000EBE": "B&B Electronics Manufacturing Co.",
"000EBF": "Remsdaq Limited",
"000EC0": "Nortel Networks",
"000EC1": "MYNAH Technologies",
"000EC2": "Lowrance Electronics, Inc.",
"000EC3": "Logic Controls, Inc.",
"000EC4": "Iskra Transmission d.d.",
"000EC5": "Digital Multitools Inc",
"000EC6": "ASIX ELECTRONICS CORP.",
"000EC7": "Motorola Korea",
"000EC8": "Zoran Corporation",
"000EC9": "YOKO Technology Corp.",
"000ECA": "WTSS Inc",
"000ECB": "VineSys Technology",
"000ECC": "Tableau, LLC",
"000ECD": "SKOV A/S",
"000ECE": "S.I.T.T.I. S.p.A.",
"000ECF": "PROFIBUS Nutzerorganisation e.V.",
"000ED0": "Privaris, Inc.",
"000ED1": "Osaka Micro Computer.",
"000ED2": "Filtronic plc",
"000ED3": "Epicenter, Inc.",
"000ED4": "CRESITT INDUSTRIE",
"000ED5": "COPAN Systems Inc.",
"000ED6": "CISCO SYSTEMS, INC.",
"000ED7": "CISCO SYSTEMS, INC.",
"000ED8": "Aktino, Inc.",
"000ED9": "Aksys, Ltd.",
"000EDA": "C-TECH UNITED CORP.",
"000EDB": "XiNCOM Corp.",
"000EDC": "Tellion INC.",
"000EDD": "SHURE INCORPORATED",
"000EDE": "REMEC, Inc.",
"000EDF": "PLX Technology",
"000EE0": "Mcharge",
"000EE1": "ExtremeSpeed Inc.",
"000EE2": "Custom Engineering S.p.A.",
"000EE3": "Chiyu Technology Co.,Ltd",
"000EE4": "BOE TECHNOLOGY GROUP CO.,LTD",
"000EE5": "bitWallet, Inc.",
"000EE6": "Adimos Systems LTD",
"000EE7": "AAC ELECTRONICS CORP.",
"000EE8": "zioncom",
"000EE9": "WayTech Development, Inc.",
"000EEA": "Shadong Luneng Jicheng Electronics,Co.,Ltd",
"000EEB": "Sandmartin(zhong shan)Electronics Co.,Ltd",
"000EEC": "Orban",
"000EED": "Nokia Danmark A/S",
"000EEE": "Muco Industrie BV",
"000EEF": "PRIVATE",
"000EF0": "Festo AG & Co. KG",
"000EF1": "EZQUEST INC.",
"000EF2": "Infinico Corporation",
"000EF3": "Smarthome",
"000EF4": "Kasda Digital Technology Co.,Ltd",
"000EF5": "iPAC Technology Co., Ltd.",
"000EF6": "E-TEN Information Systems Co., Ltd.",
"000EF7": "Vulcan Portals Inc",
"000EF8": "SBC ASI",
"000EF9": "REA Elektronik GmbH",
"000EFA": "Optoway Technology Incorporation",
"000EFB": "Macey Enterprises",
"000EFC": "JTAG Technologies B.V.",
"000EFD": "FUJINON CORPORATION",
"000EFE": "EndRun Technologies LLC",
"000EFF": "Megasolution,Inc.",
"000F00": "Legra Systems, Inc.",
"000F01": "DIGITALKS INC",
"000F02": "Digicube Technology Co., Ltd",
"000F03": "COM&C CO., LTD",
"000F04": "cim-usa inc",
"000F05": "3B SYSTEM INC.",
"000F06": "Nortel Networks",
"000F07": "Mangrove Systems, Inc.",
"000F08": "Indagon Oy",
"000F09": "PRIVATE",
"000F0A": "Clear Edge Networks",
"000F0B": "Kentima Technologies AB",
"000F0C": "SYNCHRONIC ENGINEERING",
"000F0D": "Hunt Electronic Co., Ltd.",
"000F0E": "WaveSplitter Technologies, Inc.",
"000F0F": "Real ID Technology Co., Ltd.",
"000F10": "RDM Corporation",
"000F11": "Prodrive B.V.",
"000F12": "Panasonic Europe Ltd.",
"000F13": "Nisca corporation",
"000F14": "Mindray Co., Ltd.",
"000F15": "Kjaerulff1 A/S",
"000F16": "JAY HOW TECHNOLOGY CO.,",
"000F17": "Insta Elektro GmbH",
"000F18": "Industrial Control Systems",
"000F19": "Boston Scientific",
"000F1A": "Gaming Support B.V.",
"000F1B": "Ego Systems Inc.",
"000F1C": "DigitAll World Co., Ltd",
"000F1D": "Cosmo Techs Co., Ltd.",
"000F1E": "Chengdu KT Electric Co.of High & New Technology",
"000F1F": "Dell Inc",
"000F20": "Hewlett-Packard Company",
"000F21": "Scientific Atlanta, Inc",
"000F22": "Helius, Inc.",
"000F23": "CISCO SYSTEMS, INC.",
"000F24": "CISCO SYSTEMS, INC.",
"000F25": "AimValley B.V.",
"000F26": "WorldAccxx LLC",
"000F27": "TEAL Electronics, Inc.",
"000F28": "Itronix Corporation",
"000F29": "Augmentix Corporation",
"000F2A": "Cableware Electronics",
"000F2B": "GREENBELL SYSTEMS",
"000F2C": "Uplogix, Inc.",
"000F2D": "CHUNG-HSIN ELECTRIC & MACHINERY MFG.CORP.",
"000F2E": "Megapower International Corp.",
"000F2F": "W-LINX TECHNOLOGY CO., LTD.",
"000F30": "Raza Microelectronics Inc",
"000F31": "Allied Vision Technologies Canada Inc",
"000F32": "Lootom Telcovideo Network Wuxi Co Ltd",
"000F33": "DUALi Inc.",
"000F34": "CISCO SYSTEMS, INC.",
"000F35": "CISCO SYSTEMS, INC.",
"000F36": "Accurate Techhnologies, Inc.",
"000F37": "Xambala Incorporated",
"000F38": "Netstar",
"000F39": "IRIS SENSORS",
"000F3A": "HISHARP",
"000F3B": "Fuji System Machines Co., Ltd.",
"000F3C": "Endeleo Limited",
"000F3D": "D-Link Corporation",
"000F3E": "CardioNet, Inc",
"000F3F": "Big Bear Networks",
"000F40": "Optical Internetworking Forum",
"000F41": "Zipher Ltd",
"000F42": "Xalyo Systems",
"000F43": "Wasabi Systems Inc.",
"000F44": "Tivella Inc.",
"000F45": "Stretch, Inc.",
"000F46": "SINAR AG",
"000F47": "ROBOX SPA",
"000F48": "Polypix Inc.",
"000F49": "Northover Solutions Limited",
"000F4A": "Kyushu-kyohan co.,ltd",
"000F4B": "Oracle Corporation",
"000F4C": "Elextech INC",
"000F4D": "TalkSwitch",
"000F4E": "Cellink",
"000F4F": "Cadmus Technology Ltd",
"000F50": "StreamScale Limited",
"000F51": "Azul Systems, Inc.",
"000F52": "YORK Refrigeration, Marine & Controls",
"000F53": "Solarflare Communications Inc",
"000F54": "Entrelogic Corporation",
"000F55": "Datawire Communication Networks Inc.",
"000F56": "Continuum Photonics Inc",
"000F57": "CABLELOGIC Co., Ltd.",
"000F58": "Adder Technology Limited",
"000F59": "Phonak Communications AG",
"000F5A": "Peribit Networks",
"000F5B": "Delta Information Systems, Inc.",
"000F5C": "Day One Digital Media Limited",
"000F5D": "Genexis BV",
"000F5E": "Veo",
"000F5F": "Nicety Technologies Inc. (NTS)",
"000F60": "Lifetron Co.,Ltd",
"000F61": "Hewlett-Packard Company",
"000F62": "Alcatel Bell Space N.V.",
"000F63": "Obzerv Technologies",
"000F64": "D&R Electronica Weesp BV",
"000F65": "icube Corp.",
"000F66": "Cisco-Linksys",
"000F67": "West Instruments",
"000F68": "Vavic Network Technology, Inc.",
"000F69": "SEW Eurodrive GmbH & Co. KG",
"000F6A": "Nortel Networks",
"000F6B": "GateWare Communications GmbH",
"000F6C": "ADDI-DATA GmbH",
"000F6D": "Midas Engineering",
"000F6E": "BBox",
"000F6F": "FTA Communication Technologies",
"000F70": "Wintec Industries, inc.",
"000F71": "Sanmei Electronics Co.,Ltd",
"000F72": "Sandburst",
"000F73": "RS Automation Co., Ltd",
"000F74": "Qamcom Technology AB",
"000F75": "First Silicon Solutions",
"000F76": "Digital Keystone, Inc.",
"000F77": "DENTUM CO.,LTD",
"000F78": "Datacap Systems Inc",
"000F79": "Bluetooth Interest Group Inc.",
"000F7A": "BeiJing NuQX Technology CO.,LTD",
"000F7B": "Arce Sistemas, S.A.",
"000F7C": "ACTi Corporation",
"000F7D": "Xirrus",
"000F7E": "Ablerex Electronics Co., LTD",
"000F7F": "UBSTORAGE Co.,Ltd.",
"000F80": "Trinity Security Systems,Inc.",
"000F81": "PAL Pacific Inc.",
"000F82": "Mortara Instrument, Inc.",
"000F83": "Brainium Technologies Inc.",
"000F84": "Astute Networks, Inc.",
"000F85": "ADDO-Japan Corporation",
"000F86": "Research In Motion Limited",
"000F87": "Maxcess International",
"000F88": "AMETEK, Inc.",
"000F89": "Winnertec System Co., Ltd.",
"000F8A": "WideView",
"000F8B": "Orion MultiSystems Inc",
"000F8C": "Gigawavetech Pte Ltd",
"000F8D": "FAST TV-Server AG",
"000F8E": "DONGYANG TELECOM CO.,LTD.",
"000F8F": "CISCO SYSTEMS, INC.",
"000F90": "CISCO SYSTEMS, INC.",
"000F91": "Aerotelecom Co.,Ltd.",
"000F92": "Microhard Systems Inc.",
"000F93": "Landis+Gyr Ltd.",
"000F94": "Genexis BV",
"000F95": "ELECOM Co.,LTD Laneed Division",
"000F96": "Telco Systems, Inc.",
"000F97": "Avanex Corporation",
"000F98": "Avamax Co. Ltd.",
"000F99": "APAC opto Electronics Inc.",
"000F9A": "Synchrony, Inc.",
"000F9B": "Ross Video Limited",
"000F9C": "Panduit Corp",
"000F9D": "DisplayLink (UK) Ltd",
"000F9E": "Murrelektronik GmbH",
"000F9F": "ARRIS Group, Inc.",
"000FA0": "CANON KOREA BUSINESS SOLUTIONS INC.",
"000FA1": "Gigabit Systems Inc.",
"000FA2": "2xWireless",
"000FA3": "Alpha Networks Inc.",
"000FA4": "Sprecher Automation GmbH",
"000FA5": "BWA Technology GmbH",
"000FA6": "S2 Security Corporation",
"000FA7": "Raptor Networks Technology",
"000FA8": "Photometrics, Inc.",
"000FA9": "PC Fabrik",
"000FAA": "Nexus Technologies",
"000FAB": "Kyushu Electronics Systems Inc.",
"000FAC": "IEEE 802.11",
"000FAD": "FMN communications GmbH",
"000FAE": "E2O Communications",
"000FAF": "Dialog Inc.",
"000FB0": "Compal Electronics,INC.",
"000FB1": "Cognio Inc.",
"000FB2": "Broadband Pacenet (India) Pvt. Ltd.",
"000FB3": "Actiontec Electronics, Inc",
"000FB4": "Timespace Technology",
"000FB5": "NETGEAR Inc",
"000FB6": "Europlex Technologies",
"000FB7": "Cavium Networks",
"000FB8": "CallURL Inc.",
"000FB9": "Adaptive Instruments",
"000FBA": "Tevebox AB",
"000FBB": "Nokia Siemens Networks GmbH & Co. KG.",
"000FBC": "Onkey Technologies, Inc.",
"000FBD": "MRV Communications (Networks) LTD",
"000FBE": "e-w/you Inc.",
"000FBF": "DGT Sp. z o.o.",
"000FC0": "DELCOMp",
"000FC1": "WAVE Corporation",
"000FC2": "Uniwell Corporation",
"000FC3": "PalmPalm Technology, Inc.",
"000FC4": "NST co.,LTD.",
"000FC5": "KeyMed Ltd",
"000FC6": "Eurocom Industries A/S",
"000FC7": "Dionica R&D Ltd.",
"000FC8": "Chantry Networks",
"000FC9": "Allnet GmbH",
"000FCA": "A-JIN TECHLINE CO, LTD",
"000FCB": "3Com Ltd",
"000FCC": "Netopia, Inc.",
"000FCD": "Nortel Networks",
"000FCE": "Kikusui Electronics Corp.",
"000FCF": "Datawind Research",
"000FD0": "ASTRI",
"000FD1": "Applied Wireless Identifications Group, Inc.",
"000FD2": "EWA Technologies, Inc.",
"000FD3": "Digium",
"000FD4": "Soundcraft",
"000FD5": "Schwechat - RISE",
"000FD6": "Sarotech Co., Ltd",
"000FD7": "Harman Music Group",
"000FD8": "Force, Inc.",
"000FD9": "FlexDSL Telecommunications AG",
"000FDA": "YAZAKI CORPORATION",
"000FDB": "Westell Technologies",
"000FDC": "Ueda Japan Radio Co., Ltd.",
"000FDD": "SORDIN AB",
"000FDE": "Sony Ericsson Mobile Communications AB",
"000FDF": "SOLOMON Technology Corp.",
"000FE0": "NComputing Co.,Ltd.",
"000FE1": "ID DIGITAL CORPORATION",
"000FE2": "Hangzhou H3C Technologies Co., Ltd.",
"000FE3": "Damm Cellular Systems A/S",
"000FE4": "Pantech Co.,Ltd",
"000FE5": "MERCURY SECURITY CORPORATION",
"000FE6": "MBTech Systems, Inc.",
"000FE7": "Lutron Electronics Co., Inc.",
"000FE8": "Lobos, Inc.",
"000FE9": "GW TECHNOLOGIES CO.,LTD.",
"000FEA": "Giga-Byte Technology Co.,LTD.",
"000FEB": "Cylon Controls",
"000FEC": "ARKUS Inc.",
"000FED": "Anam Electronics Co., Ltd",
"000FEE": "XTec, Incorporated",
"000FEF": "Thales e-Transactions GmbH",
"000FF0": "Sunray Co. Ltd.",
"000FF1": "nex-G Systems Pte.Ltd",
"000FF2": "Loud Technologies Inc.",
"000FF3": "Jung Myoung Communications&Technology",
"000FF4": "Guntermann & Drunck GmbH",
"000FF5": "GN&S company",
"000FF6": "Darfon Electronics Corp.",
"000FF7": "CISCO SYSTEMS, INC.",
"000FF8": "CISCO SYSTEMS, INC.",
"000FF9": "Valcretec, Inc.",
"000FFA": "Optinel Systems, Inc.",
"000FFB": "Nippon Denso Industry Co., Ltd.",
"000FFC": "Merit Li-Lin Ent.",
"000FFD": "Glorytek Network Inc.",
"000FFE": "G-PRO COMPUTER",
"000FFF": "Control4",
"001000": "CABLE TELEVISION LABORATORIES, INC.",
"001001": "Citel",
"001002": "ACTIA",
"001003": "IMATRON, INC.",
"001004": "THE BRANTLEY COILE COMPANY,INC",
"001005": "UEC COMMERCIAL",
"001006": "Thales Contact Solutions Ltd.",
"001007": "CISCO SYSTEMS, INC.",
"001008": "VIENNA SYSTEMS CORPORATION",
"001009": "HORO QUARTZ",
"00100A": "WILLIAMS COMMUNICATIONS GROUP",
"00100B": "CISCO SYSTEMS, INC.",
"00100C": "ITO CO., LTD.",
"00100D": "CISCO SYSTEMS, INC.",
"00100E": "MICRO LINEAR COPORATION",
"00100F": "INDUSTRIAL CPU SYSTEMS",
"001010": "INITIO CORPORATION",
"001011": "CISCO SYSTEMS, INC.",
"001012": "PROCESSOR SYSTEMS (I) PVT LTD",
"001013": "Kontron America, Inc.",
"001014": "CISCO SYSTEMS, INC.",
"001015": "OOmon Inc.",
"001016": "T.SQWARE",
"001017": "Bosch Access Systems GmbH",
"001018": "BROADCOM CORPORATION",
"001019": "SIRONA DENTAL SYSTEMS GmbH & Co. KG",
"00101A": "PictureTel Corp.",
"00101B": "CORNET TECHNOLOGY, INC.",
"00101C": "OHM TECHNOLOGIES INTL, LLC",
"00101D": "WINBOND ELECTRONICS CORP.",
"00101E": "MATSUSHITA ELECTRONIC INSTRUMENTS CORP.",
"00101F": "CISCO SYSTEMS, INC.",
"001020": "Hand Held Products Inc",
"001021": "ENCANTO NETWORKS, INC.",
"001022": "SatCom Media Corporation",
"001023": "Network Equipment Technologies",
"001024": "NAGOYA ELECTRIC WORKS CO., LTD",
"001025": "Grayhill, Inc",
"001026": "ACCELERATED NETWORKS, INC.",
"001027": "L-3 COMMUNICATIONS EAST",
"001028": "COMPUTER TECHNICA, INC.",
"001029": "CISCO SYSTEMS, INC.",
"00102A": "ZF MICROSYSTEMS, INC.",
"00102B": "UMAX DATA SYSTEMS, INC.",
"00102C": "Lasat Networks A/S",
"00102D": "HITACHI SOFTWARE ENGINEERING",
"00102E": "NETWORK SYSTEMS & TECHNOLOGIES PVT. LTD.",
"00102F": "CISCO SYSTEMS, INC.",
"001030": "EION Inc.",
"001031": "OBJECTIVE COMMUNICATIONS, INC.",
"001032": "ALTA TECHNOLOGY",
"001033": "ACCESSLAN COMMUNICATIONS, INC.",
"001034": "GNP Computers",
"001035": "ELITEGROUP COMPUTER SYSTEMS CO., LTD",
"001036": "INTER-TEL INTEGRATED SYSTEMS",
"001037": "CYQ've Technology Co., Ltd.",
"001038": "MICRO RESEARCH INSTITUTE, INC.",
"001039": "Vectron Systems AG",
"00103A": "DIAMOND NETWORK TECH",
"00103B": "HIPPI NETWORKING FORUM",
"00103C": "IC ENSEMBLE, INC.",
"00103D": "PHASECOM, LTD.",
"00103E": "NETSCHOOLS CORPORATION",
"00103F": "TOLLGRADE COMMUNICATIONS, INC.",
"001040": "INTERMEC CORPORATION",
"001041": "BRISTOL BABCOCK, INC.",
"001042": "Alacritech, Inc.",
"001043": "A2 CORPORATION",
"001044": "InnoLabs Corporation",
"001045": "Nortel Networks",
"001046": "ALCORN MCBRIDE INC.",
"001047": "ECHO ELETRIC CO. LTD.",
"001048": "HTRC AUTOMATION, INC.",
"001049": "ShoreTel, Inc",
"00104A": "The Parvus Corporation",
"00104B": "3COM CORPORATION",
"00104C": "Teledyne LeCroy, Inc",
"00104D": "SURTEC INDUSTRIES, INC.",
"00104E": "CEOLOGIC",
"00104F": "Oracle Corporation",
"001050": "RION CO., LTD.",
"001051": "CMICRO CORPORATION",
"001052": "METTLER-TOLEDO (ALBSTADT) GMBH",
"001053": "COMPUTER TECHNOLOGY CORP.",
"001054": "CISCO SYSTEMS, INC.",
"001055": "FUJITSU MICROELECTRONICS, INC.",
"001056": "SODICK CO., LTD.",
"001057": "Rebel.com, Inc.",
"001058": "ArrowPoint Communications",
"001059": "DIABLO RESEARCH CO. LLC",
"00105A": "3COM CORPORATION",
"00105B": "NET INSIGHT AB",
"00105C": "QUANTUM DESIGNS (H.K.) LTD.",
"00105D": "Draeger Medical",
"00105E": "HEKIMIAN LABORATORIES, INC.",
"00105F": "ZODIAC DATA SYSTEMS",
"001060": "BILLIONTON SYSTEMS, INC.",
"001061": "HOSTLINK CORP.",
"001062": "NX SERVER, ILNC.",
"001063": "STARGUIDE DIGITAL NETWORKS",
"001064": "DNPG, LLC",
"001065": "RADYNE CORPORATION",
"001066": "ADVANCED CONTROL SYSTEMS, INC.",
"001067": "Ericsson",
"001068": "COMOS TELECOM",
"001069": "HELIOSS COMMUNICATIONS, INC.",
"00106A": "DIGITAL MICROWAVE CORPORATION",
"00106B": "SONUS NETWORKS, INC.",
"00106C": "EDNT GmbH",
"00106D": "Axxcelera Broadband Wireless",
"00106E": "TADIRAN COM. LTD.",
"00106F": "TRENTON TECHNOLOGY INC.",
"001070": "CARADON TREND LTD.",
"001071": "ADVANET INC.",
"001072": "GVN TECHNOLOGIES, INC.",
"001073": "Technobox, Inc.",
"001074": "ATEN INTERNATIONAL CO., LTD.",
"001075": "Segate Technology LLC",
"001076": "EUREM GmbH",
"001077": "SAF DRIVE SYSTEMS, LTD.",
"001078": "NUERA COMMUNICATIONS, INC.",
"001079": "CISCO SYSTEMS, INC.",
"00107A": "AmbiCom, Inc.",
"00107B": "CISCO SYSTEMS, INC.",
"00107C": "P-COM, INC.",
"00107D": "AURORA COMMUNICATIONS, LTD.",
"00107E": "BACHMANN ELECTRONIC GmbH",
"00107F": "CRESTRON ELECTRONICS, INC.",
"001080": "METAWAVE COMMUNICATIONS",
"001081": "DPS, INC.",
"001082": "JNA TELECOMMUNICATIONS LIMITED",
"001083": "HEWLETT-PACKARD COMPANY",
"001084": "K-BOT COMMUNICATIONS",
"001085": "POLARIS COMMUNICATIONS, INC.",
"001086": "ATTO Technology, Inc.",
"001087": "Xstreamis PLC",
"001088": "AMERICAN NETWORKS INC.",
"001089": "WebSonic",
"00108A": "TeraLogic, Inc.",
"00108B": "LASERANIMATION SOLLINGER GmbH",
"00108C": "FUJITSU TELECOMMUNICATIONS EUROPE, LTD.",
"00108D": "Johnson Controls, Inc.",
"00108E": "HUGH SYMONS CONCEPT Technologies Ltd.",
"00108F": "RAPTOR SYSTEMS",
"001090": "CIMETRICS, INC.",
"001091": "NO WIRES NEEDED BV",
"001092": "NETCORE INC.",
"001093": "CMS COMPUTERS, LTD.",
"001094": "Performance Analysis Broadband, Spirent plc",
"001095": "Thomson Inc.",
"001096": "TRACEWELL SYSTEMS, INC.",
"001097": "WinNet Metropolitan Communications Systems, Inc.",
"001098": "STARNET TECHNOLOGIES, INC.",
"001099": "InnoMedia, Inc.",
"00109A": "NETLINE",
"00109B": "Emulex Corporation",
"00109C": "M-SYSTEM CO., LTD.",
"00109D": "CLARINET SYSTEMS, INC.",
"00109E": "AWARE, INC.",
"00109F": "PAVO, INC.",
"0010A0": "INNOVEX TECHNOLOGIES, INC.",
"0010A1": "KENDIN SEMICONDUCTOR, INC.",
"0010A2": "TNS",
"0010A3": "OMNITRONIX, INC.",
"0010A4": "XIRCOM",
"0010A5": "OXFORD INSTRUMENTS",
"0010A6": "CISCO SYSTEMS, INC.",
"0010A7": "UNEX TECHNOLOGY CORPORATION",
"0010A8": "RELIANCE COMPUTER CORP.",
"0010A9": "ADHOC TECHNOLOGIES",
"0010AA": "MEDIA4, INC.",
"0010AB": "KOITO ELECTRIC INDUSTRIES, LTD.",
"0010AC": "IMCI TECHNOLOGIES",
"0010AD": "SOFTRONICS USB, INC.",
"0010AE": "SHINKO ELECTRIC INDUSTRIES CO.",
"0010AF": "TAC SYSTEMS, INC.",
"0010B0": "MERIDIAN TECHNOLOGY CORP.",
"0010B1": "FOR-A CO., LTD.",
"0010B2": "COACTIVE AESTHETICS",
"0010B3": "NOKIA MULTIMEDIA TERMINALS",
"0010B4": "ATMOSPHERE NETWORKS",
"0010B5": "ACCTON TECHNOLOGY CORPORATION",
"0010B6": "ENTRATA COMMUNICATIONS CORP.",
"0010B7": "COYOTE TECHNOLOGIES, LLC",
"0010B8": "ISHIGAKI COMPUTER SYSTEM CO.",
"0010B9": "MAXTOR CORP.",
"0010BA": "MARTINHO-DAVIS SYSTEMS, INC.",
"0010BB": "DATA & INFORMATION TECHNOLOGY",
"0010BC": "Aastra Telecom",
"0010BD": "THE TELECOMMUNICATION TECHNOLOGY COMMITTEE (TTC)",
"0010BE": "MARCH NETWORKS CORPORATION",
"0010BF": "InterAir Wireless",
"0010C0": "ARMA, Inc.",
"0010C1": "OI ELECTRIC CO., LTD.",
"0010C2": "WILLNET, INC.",
"0010C3": "CSI-CONTROL SYSTEMS",
"0010C4": "MEDIA LINKS CO., LTD.",
"0010C5": "PROTOCOL TECHNOLOGIES, INC.",
"0010C6": "Universal Global Scientific Industrial Co., Ltd.",
"0010C7": "DATA TRANSMISSION NETWORK",
"0010C8": "COMMUNICATIONS ELECTRONICS SECURITY GROUP",
"0010C9": "MITSUBISHI ELECTRONICS LOGISTIC SUPPORT CO.",
"0010CA": "Telco Systems, Inc.",
"0010CB": "FACIT K.K.",
"0010CC": "CLP COMPUTER LOGISTIK PLANUNG GmbH",
"0010CD": "INTERFACE CONCEPT",
"0010CE": "VOLAMP, LTD.",
"0010CF": "FIBERLANE COMMUNICATIONS",
"0010D0": "WITCOM, LTD.",
"0010D1": "Top Layer Networks, Inc.",
"0010D2": "NITTO TSUSHINKI CO., LTD",
"0010D3": "GRIPS ELECTRONIC GMBH",
"0010D4": "STORAGE COMPUTER CORPORATION",
"0010D5": "IMASDE CANARIAS, S.A.",
"0010D6": "Exelis",
"0010D7": "ARGOSY RESEARCH INC.",
"0010D8": "CALISTA",
"0010D9": "IBM JAPAN, FUJISAWA MT+D",
"0010DA": "Kollmorgen Corp",
"0010DB": "Juniper Networks, Inc.",
"0010DC": "MICRO-STAR INTERNATIONAL CO., LTD.",
"0010DD": "ENABLE SEMICONDUCTOR, INC.",
"0010DE": "INTERNATIONAL DATACASTING CORPORATION",
"0010DF": "RISE COMPUTER INC.",
"0010E0": "Oracle Corporation",
"0010E1": "S.I. TECH, INC.",
"0010E2": "ArrayComm, Inc.",
"0010E3": "Hewlett-Packard Company",
"0010E4": "NSI CORPORATION",
"0010E5": "SOLECTRON TEXAS",
"0010E6": "APPLIED INTELLIGENT SYSTEMS, INC.",
"0010E7": "BreezeCom",
"0010E8": "TELOCITY, INCORPORATED",
"0010E9": "RAIDTEC LTD.",
"0010EA": "ADEPT TECHNOLOGY",
"0010EB": "SELSIUS SYSTEMS, INC.",
"0010EC": "RPCG, LLC",
"0010ED": "SUNDANCE TECHNOLOGY, INC.",
"0010EE": "CTI PRODUCTS, INC.",
"0010EF": "DBTEL INCORPORATED",
"0010F0": "RITTAL-WERK RUDOLF LOH GmbH & Co.",
"0010F1": "I-O CORPORATION",
"0010F2": "ANTEC",
"0010F3": "Nexcom International Co., Ltd.",
"0010F4": "Vertical Communications",
"0010F5": "AMHERST SYSTEMS, INC.",
"0010F6": "CISCO SYSTEMS, INC.",
"0010F7": "IRIICHI TECHNOLOGIES Inc.",
"0010F8": "TEXIO TECHNOLOGY CORPORATION",
"0010F9": "UNIQUE SYSTEMS, INC.",
"0010FA": "Apple",
"0010FB": "ZIDA TECHNOLOGIES LIMITED",
"0010FC": "BROADBAND NETWORKS, INC.",
"0010FD": "COCOM A/S",
"0010FE": "DIGITAL EQUIPMENT CORPORATION",
"0010FF": "CISCO SYSTEMS, INC.",
"001100": "Schneider Electric",
"001101": "CET Technologies Pte Ltd",
"001102": "Aurora Multimedia Corp.",
"001103": "kawamura electric inc.",
"001104": "TELEXY",
"001105": "Sunplus Technology Co., Ltd.",
"001106": "Siemens NV (Belgium)",
"001107": "RGB Networks Inc.",
"001108": "Orbital Data Corporation",
"001109": "Micro-Star International",
"00110A": "Hewlett-Packard Company",
"00110B": "Franklin Technology Systems",
"00110C": "Atmark Techno, Inc.",
"00110D": "SANBlaze Technology, Inc.",
"00110E": "Tsurusaki Sealand Transportation Co. Ltd.",
"00110F": "netplat,Inc.",
"001110": "Maxanna Technology Co., Ltd.",
"001111": "Intel Corporation",
"001112": "Honeywell CMSS",
"001113": "Fraunhofer FOKUS",
"001114": "EverFocus Electronics Corp.",
"001115": "EPIN Technologies, Inc.",
"001116": "COTEAU VERT CO., LTD.",
"001117": "CESNET",
"001118": "BLX IC Design Corp., Ltd.",
"001119": "Solteras, Inc.",
"00111A": "ARRIS Group, Inc.",
"00111B": "Targa Systems Div L-3 Communications Canada",
"00111C": "Pleora Technologies Inc.",
"00111D": "Hectrix Limited",
"00111E": "EPSG (Ethernet Powerlink Standardization Group)",
"00111F": "Doremi Labs, Inc.",
"001120": "CISCO SYSTEMS, INC.",
"001121": "CISCO SYSTEMS, INC.",
"001122": "CIMSYS Inc",
"001123": "Appointech, Inc.",
"001124": "Apple",
"001125": "IBM Corp",
"001126": "Venstar Inc.",
"001127": "TASI, Inc",
"001128": "Streamit",
"001129": "Paradise Datacom Ltd.",
"00112A": "Niko NV",
"00112B": "NetModule AG",
"00112C": "IZT GmbH",
"00112D": "iPulse Systems",
"00112E": "CEICOM",
"00112F": "ASUSTek Computer Inc.",
"001130": "Allied Telesis (Hong Kong) Ltd.",
"001131": "UNATECH. CO.,LTD",
"001132": "Synology Incorporated",
"001133": "Siemens Austria SIMEA",
"001134": "MediaCell, Inc.",
"001135": "Grandeye Ltd",
"001136": "Goodrich Sensor Systems",
"001137": "AICHI ELECTRIC CO., LTD.",
"001138": "TAISHIN CO., LTD.",
"001139": "STOEBER ANTRIEBSTECHNIK GmbH + Co. KG.",
"00113A": "SHINBORAM",
"00113B": "Micronet Communications Inc.",
"00113C": "Micronas GmbH",
"00113D": "KN SOLTEC CO.,LTD.",
"00113E": "JL Corporation",
"00113F": "Alcatel DI",
"001140": "Nanometrics Inc.",
"001141": "GoodMan Corporation",
"001142": "e-SMARTCOM INC.",
"001143": "Dell Inc",
"001144": "Assurance Technology Corp",
"001145": "ValuePoint Networks",
"001146": "Telecard-Pribor Ltd",
"001147": "Secom-Industry co.LTD.",
"001148": "Prolon Control Systems",
"001149": "Proliphix Inc.",
"00114A": "KAYABA INDUSTRY Co,.Ltd.",
"00114B": "Francotyp-Postalia GmbH",
"00114C": "caffeina applied research ltd.",
"00114D": "Atsumi Electric Co.,LTD.",
"00114E": "690885 Ontario Inc.",
"00114F": "US Digital Television, Inc",
"001150": "Belkin Corporation",
"001151": "Mykotronx",
"001152": "Eidsvoll Electronics AS",
"001153": "Trident Tek, Inc.",
"001154": "Webpro Technologies Inc.",
"001155": "Sevis Systems",
"001156": "Pharos Systems NZ",
"001157": "OF Networks Co., Ltd.",
"001158": "Nortel Networks",
"001159": "MATISSE NETWORKS INC",
"00115A": "Ivoclar Vivadent AG",
"00115B": "Elitegroup Computer System Co. (ECS)",
"00115C": "CISCO SYSTEMS, INC.",
"00115D": "CISCO SYSTEMS, INC.",
"00115E": "ProMinent Dosiertechnik GmbH",
"00115F": "ITX Security Co., Ltd.",
"001160": "ARTDIO Company Co., LTD",
"001161": "NetStreams, LLC",
"001162": "STAR MICRONICS CO.,LTD.",
"001163": "SYSTEM SPA DEPT. ELECTRONICS",
"001164": "ACARD Technology Corp.",
"001165": "Znyx Networks",
"001166": "Taelim Electronics Co., Ltd.",
"001167": "Integrated System Solution Corp.",
"001168": "HomeLogic LLC",
"001169": "EMS Satcom",
"00116A": "Domo Ltd",
"00116B": "Digital Data Communications Asia Co.,Ltd",
"00116C": "Nanwang Multimedia Inc.,Ltd",
"00116D": "American Time and Signal",
"00116E": "PePLink Ltd.",
"00116F": "Netforyou Co., LTD.",
"001170": "GSC SRL",
"001171": "DEXTER Communications, Inc.",
"001172": "COTRON CORPORATION",
"001173": "SMART Storage Systems",
"001174": "Wibhu Technologies, Inc.",
"001175": "PathScale, Inc.",
"001176": "Intellambda Systems, Inc.",
"001177": "Coaxial Networks, Inc.",
"001178": "Chiron Technology Ltd",
"001179": "Singular Technology Co. Ltd.",
"00117A": "Singim International Corp.",
"00117B": "B\u00fcchi Labortechnik AG",
"00117C": "e-zy.net",
"00117D": "ZMD America, Inc.",
"00117E": "Progeny, A division of Midmark Corp",
"00117F": "Neotune Information Technology Corporation,.LTD",
"001180": "ARRIS Group, Inc.",
"001181": "InterEnergy Co.Ltd,",
"001182": "IMI Norgren Ltd",
"001183": "Datalogic ADC, Inc.",
"001184": "Humo Laboratory,Ltd.",
"001185": "Hewlett-Packard Company",
"001186": "Prime Systems, Inc.",
"001187": "Category Solutions, Inc",
"001188": "Enterasys",
"001189": "Aerotech Inc",
"00118A": "Viewtran Technology Limited",
"00118B": "Alcatel-Lucent, Enterprise Business Group",
"00118C": "Missouri Department of Transportation",
"00118D": "Hanchang System Corp.",
"00118E": "Halytech Mace",
"00118F": "EUTECH INSTRUMENTS PTE. LTD.",
"001190": "Digital Design Corporation",
"001191": "CTS-Clima Temperatur Systeme GmbH",
"001192": "CISCO SYSTEMS, INC.",
"001193": "CISCO SYSTEMS, INC.",
"001194": "Chi Mei Communication Systems, Inc.",
"001195": "D-Link Corporation",
"001196": "Actuality Systems, Inc.",
"001197": "Monitoring Technologies Limited",
"001198": "Prism Media Products Limited",
"001199": "2wcom Systems GmbH",
"00119A": "Alkeria srl",
"00119B": "Telesynergy Research Inc.",
"00119C": "EP&T Energy",
"00119D": "Diginfo Technology Corporation",
"00119E": "Solectron Brazil",
"00119F": "Nokia Danmark A/S",
"0011A0": "Vtech Engineering Canada Ltd",
"0011A1": "VISION NETWARE CO.,LTD",
"0011A2": "Manufacturing Technology Inc",
"0011A3": "LanReady Technologies Inc.",
"0011A4": "JStream Technologies Inc.",
"0011A5": "Fortuna Electronic Corp.",
"0011A6": "Sypixx Networks",
"0011A7": "Infilco Degremont Inc.",
"0011A8": "Quest Technologies",
"0011A9": "MOIMSTONE Co., LTD",
"0011AA": "Uniclass Technology, Co., LTD",
"0011AB": "TRUSTABLE TECHNOLOGY CO.,LTD.",
"0011AC": "Simtec Electronics",
"0011AD": "Shanghai Ruijie Technology",
"0011AE": "ARRIS Group, Inc.",
"0011AF": "Medialink-i,Inc",
"0011B0": "Fortelink Inc.",
"0011B1": "BlueExpert Technology Corp.",
"0011B2": "2001 Technology Inc.",
"0011B3": "YOSHIMIYA CO.,LTD.",
"0011B4": "Westermo Teleindustri AB",
"0011B5": "Shenzhen Powercom Co.,Ltd",
"0011B6": "Open Systems International",
"0011B7": "Octalix B.V.",
"0011B8": "Liebherr - Elektronik GmbH",
"0011B9": "Inner Range Pty. Ltd.",
"0011BA": "Elexol Pty Ltd",
"0011BB": "CISCO SYSTEMS, INC.",
"0011BC": "CISCO SYSTEMS, INC.",
"0011BD": "Bombardier Transportation",
"0011BE": "AGP Telecom Co. Ltd",
"0011BF": "AESYS S.p.A.",
"0011C0": "Aday Technology Inc",
"0011C1": "4P MOBILE DATA PROCESSING",
"0011C2": "United Fiber Optic Communication",
"0011C3": "Transceiving System Technology Corporation",
"0011C4": "Terminales de Telecomunicacion Terrestre, S.L.",
"0011C5": "TEN Technology",
"0011C6": "Seagate Technology",
"0011C7": "Raymarine UK Ltd",
"0011C8": "Powercom Co., Ltd.",
"0011C9": "MTT Corporation",
"0011CA": "Long Range Systems, Inc.",
"0011CB": "Jacobsons AB",
"0011CC": "Guangzhou Jinpeng Group Co.,Ltd.",
"0011CD": "Axsun Technologies",
"0011CE": "Ubisense Limited",
"0011CF": "Thrane & Thrane A/S",
"0011D0": "Tandberg Data ASA",
"0011D1": "Soft Imaging System GmbH",
"0011D2": "Perception Digital Ltd",
"0011D3": "NextGenTel Holding ASA",
"0011D4": "NetEnrich, Inc",
"0011D5": "Hangzhou Sunyard System Engineering Co.,Ltd.",
"0011D6": "HandEra, Inc.",
"0011D7": "eWerks Inc",
"0011D8": "ASUSTek Computer Inc.",
"0011D9": "TiVo",
"0011DA": "Vivaas Technology Inc.",
"0011DB": "Land-Cellular Corporation",
"0011DC": "Glunz & Jensen",
"0011DD": "FROMUS TEC. Co., Ltd.",
"0011DE": "EURILOGIC",
"0011DF": "Current Energy",
"0011E0": "U-MEDIA Communications, Inc.",
"0011E1": "Arcelik A.S",
"0011E2": "Hua Jung Components Co., Ltd.",
"0011E3": "Thomson, Inc.",
"0011E4": "Danelec Electronics A/S",
"0011E5": "KCodes Corporation",
"0011E6": "Scientific Atlanta",
"0011E7": "WORLDSAT - Texas de France",
"0011E8": "Tixi.Com",
"0011E9": "STARNEX CO., LTD.",
"0011EA": "IWICS Inc.",
"0011EB": "Innovative Integration",
"0011EC": "AVIX INC.",
"0011ED": "802 Global",
"0011EE": "Estari, Inc.",
"0011EF": "Conitec Datensysteme GmbH",
"0011F0": "Wideful Limited",
"0011F1": "QinetiQ Ltd",
"0011F2": "Institute of Network Technologies",
"0011F3": "NeoMedia Europe AG",
"0011F4": "woori-net",
"0011F5": "ASKEY COMPUTER CORP.",
"0011F6": "Asia Pacific Microsystems , Inc.",
"0011F7": "Shenzhen Forward Industry Co., Ltd",
"0011F8": "AIRAYA Corp",
"0011F9": "Nortel Networks",
"0011FA": "Rane Corporation",
"0011FB": "Heidelberg Engineering GmbH",
"0011FC": "HARTING Electric Gmbh & Co.KG",
"0011FD": "KORG INC.",
"0011FE": "Keiyo System Research, Inc.",
"0011FF": "Digitro Tecnologia Ltda",
"001200": "CISCO SYSTEMS, INC.",
"001201": "CISCO SYSTEMS, INC.",
"001202": "Decrane Aerospace - Audio International Inc.",
"001203": "ActivNetworks",
"001204": "u10 Networks, Inc.",
"001205": "Terrasat Communications, Inc.",
"001206": "iQuest (NZ) Ltd",
"001207": "Head Strong International Limited",
"001208": "Gantner Instruments GmbH",
"001209": "Fastrax Ltd",
"00120A": "Emerson Climate Technologies GmbH",
"00120B": "Chinasys Technologies Limited",
"00120C": "CE-Infosys Pte Ltd",
"00120D": "Advanced Telecommunication Technologies, Inc.",
"00120E": "AboCom",
"00120F": "IEEE 802.3",
"001210": "WideRay Corp",
"001211": "Protechna Herbst GmbH & Co. KG",
"001212": "PLUS Corporation",
"001213": "Metrohm AG",
"001214": "Koenig & Bauer AG",
"001215": "iStor Networks, Inc.",
"001216": "ICP Internet Communication Payment AG",
"001217": "Cisco-Linksys, LLC",
"001218": "ARUZE Corporation",
"001219": "Ahead Communication Systems Inc",
"00121A": "Techno Soft Systemnics Inc.",
"00121B": "Sound Devices, LLC",
"00121C": "PARROT S.A.",
"00121D": "Netfabric Corporation",
"00121E": "Juniper Networks, Inc.",
"00121F": "Harding Instruments",
"001220": "Cadco Systems",
"001221": "B.Braun Melsungen AG",
"001222": "Skardin (UK) Ltd",
"001223": "Pixim",
"001224": "NexQL Corporation",
"001225": "ARRIS Group, Inc.",
"001226": "Japan Direx Corporation",
"001227": "Franklin Electric Co., Inc.",
"001228": "Data Ltd.",
"001229": "BroadEasy Technologies Co.,Ltd",
"00122A": "VTech Telecommunications Ltd.",
"00122B": "Virbiage Pty Ltd",
"00122C": "Soenen Controls N.V.",
"00122D": "SiNett Corporation",
"00122E": "Signal Technology - AISD",
"00122F": "Sanei Electric Inc.",
"001230": "Picaso Infocommunication CO., LTD.",
"001231": "Motion Control Systems, Inc.",
"001232": "LeWiz Communications Inc.",
"001233": "JRC TOKKI Co.,Ltd.",
"001234": "Camille Bauer",
"001235": "Andrew Corporation",
"001236": "ConSentry Networks",
"001237": "Texas Instruments",
"001238": "SetaBox Technology Co., Ltd.",
"001239": "S Net Systems Inc.",
"00123A": "Posystech Inc., Co.",
"00123B": "KeRo Systems ApS",
"00123C": "Second Rule LLC",
"00123D": "GES",
"00123E": "ERUNE technology Co., Ltd.",
"00123F": "Dell Inc",
"001240": "AMOI ELECTRONICS CO.,LTD",
"001241": "a2i marketing center",
"001242": "Millennial Net",
"001243": "CISCO SYSTEMS, INC.",
"001244": "CISCO SYSTEMS, INC.",
"001245": "Zellweger Analytics, Inc.",
"001246": "T.O.M TECHNOLOGY INC..",
"001247": "Samsung Electronics Co., Ltd.",
"001248": "EMC Corporation (Kashya)",
"001249": "Delta Elettronica S.p.A.",
"00124A": "Dedicated Devices, Inc.",
"00124B": "Texas Instruments",
"00124C": "BBWM Corporation",
"00124D": "Inducon BV",
"00124E": "XAC AUTOMATION CORP.",
"00124F": "Pentair Thermal Management",
"001250": "Tokyo Aircaft Instrument Co., Ltd.",
"001251": "SILINK",
"001252": "Citronix, LLC",
"001253": "AudioDev AB",
"001254": "Spectra Technologies Holdings Company Ltd",
"001255": "NetEffect Incorporated",
"001256": "LG INFORMATION & COMM.",
"001257": "LeapComm Communication Technologies Inc.",
"001258": "Activis Polska",
"001259": "THERMO ELECTRON KARLSRUHE",
"00125A": "Microsoft Corporation",
"00125B": "KAIMEI ELECTRONI",
"00125C": "Green Hills Software, Inc.",
"00125D": "CyberNet Inc.",
"00125E": "CAEN",
"00125F": "AWIND Inc.",
"001260": "Stanton Magnetics,inc.",
"001261": "Adaptix, Inc",
"001262": "Nokia Danmark A/S",
"001263": "Data Voice Technologies GmbH",
"001264": "daum electronic gmbh",
"001265": "Enerdyne Technologies, Inc.",
"001266": "Swisscom Hospitality Services SA",
"001267": "Panasonic Corporation",
"001268": "IPS d.o.o.",
"001269": "Value Electronics",
"00126A": "OPTOELECTRONICS Co., Ltd.",
"00126B": "Ascalade Communications Limited",
"00126C": "Visonic Ltd.",
"00126D": "University of California, Berkeley",
"00126E": "Seidel Elektronik GmbH Nfg.KG",
"00126F": "Rayson Technology Co., Ltd.",
"001270": "NGES Denro Systems",
"001271": "Measurement Computing Corp",
"001272": "Redux Communications Ltd.",
"001273": "Stoke Inc",
"001274": "NIT lab",
"001275": "Sentilla Corporation",
"001276": "CG Power Systems Ireland Limited",
"001277": "Korenix Technologies Co., Ltd.",
"001278": "International Bar Code",
"001279": "Hewlett-Packard Company",
"00127A": "Sanyu Industry Co.,Ltd.",
"00127B": "VIA Networking Technologies, Inc.",
"00127C": "SWEGON AB",
"00127D": "MobileAria",
"00127E": "Digital Lifestyles Group, Inc.",
"00127F": "CISCO SYSTEMS, INC.",
"001280": "CISCO SYSTEMS, INC.",
"001281": "March Networks S.p.A.",
"001282": "Qovia",
"001283": "Nortel Networks",
"001284": "Lab33 Srl",
"001285": "Gizmondo Europe Ltd",
"001286": "ENDEVCO CORP",
"001287": "Digital Everywhere Unterhaltungselektronik GmbH",
"001288": "2Wire, Inc",
"001289": "Advance Sterilization Products",
"00128A": "ARRIS Group, Inc.",
"00128B": "Sensory Networks Inc",
"00128C": "Woodward Governor",
"00128D": "STB Datenservice GmbH",
"00128E": "Q-Free ASA",
"00128F": "Montilio",
"001290": "KYOWA Electric & Machinery Corp.",
"001291": "KWS Computersysteme GmbH",
"001292": "Griffin Technology",
"001293": "GE Energy",
"001294": "SUMITOMO ELECTRIC DEVICE INNOVATIONS, INC",
"001295": "Aiware Inc.",
"001296": "Addlogix",
"001297": "O2Micro, Inc.",
"001298": "MICO ELECTRIC(SHENZHEN) LIMITED",
"001299": "Ktech Telecommunications Inc",
"00129A": "IRT Electronics Pty Ltd",
"00129B": "E2S Electronic Engineering Solutions, S.L.",
"00129C": "Yulinet",
"00129D": "First International Computer do Brasil",
"00129E": "Surf Communications Inc.",
"00129F": "RAE Systems",
"0012A0": "NeoMeridian Sdn Bhd",
"0012A1": "BluePacket Communications Co., Ltd.",
"0012A2": "VITA",
"0012A3": "Trust International B.V.",
"0012A4": "ThingMagic, LLC",
"0012A5": "Stargen, Inc.",
"0012A6": "Dolby Australia",
"0012A7": "ISR TECHNOLOGIES Inc",
"0012A8": "intec GmbH",
"0012A9": "3Com Ltd",
"0012AA": "IEE, Inc.",
"0012AB": "WiLife, Inc.",
"0012AC": "ONTIMETEK INC.",
"0012AD": "IDS GmbH",
"0012AE": "HLS HARD-LINE Solutions Inc.",
"0012AF": "ELPRO Technologies",
"0012B0": "Efore Oyj (Plc)",
"0012B1": "Dai Nippon Printing Co., Ltd",
"0012B2": "AVOLITES LTD.",
"0012B3": "Advance Wireless Technology Corp.",
"0012B4": "Work Microwave GmbH",
"0012B5": "Vialta, Inc.",
"0012B6": "Santa Barbara Infrared, Inc.",
"0012B7": "PTW Freiburg",
"0012B8": "G2 Microsystems",
"0012B9": "Fusion Digital Technology",
"0012BA": "FSI Systems, Inc.",
"0012BB": "Telecommunications Industry Association TR-41 Committee",
"0012BC": "Echolab LLC",
"0012BD": "Avantec Manufacturing Limited",
"0012BE": "Astek Corporation",
"0012BF": "Arcadyan Technology Corporation",
"0012C0": "HotLava Systems, Inc.",
"0012C1": "Check Point Software Technologies",
"0012C2": "Apex Electronics Factory",
"0012C3": "WIT S.A.",
"0012C4": "Viseon, Inc.",
"0012C5": "V-Show Technology (China) Co.,Ltd",
"0012C6": "TGC America, Inc",
"0012C7": "SECURAY Technologies Ltd.Co.",
"0012C8": "Perfect tech",
"0012C9": "ARRIS Group, Inc.",
"0012CA": "Mechatronic Brick Aps",
"0012CB": "CSS Inc.",
"0012CC": "Bitatek CO., LTD",
"0012CD": "ASEM SpA",
"0012CE": "Advanced Cybernetics Group",
"0012CF": "Accton Technology Corporation",
"0012D0": "Gossen-Metrawatt-GmbH",
"0012D1": "Texas Instruments Inc",
"0012D2": "Texas Instruments",
"0012D3": "Zetta Systems, Inc.",
"0012D4": "Princeton Technology, Ltd",
"0012D5": "Motion Reality Inc.",
"0012D6": "Jiangsu Yitong High-Tech Co.,Ltd",
"0012D7": "Invento Networks, Inc.",
"0012D8": "International Games System Co., Ltd.",
"0012D9": "CISCO SYSTEMS, INC.",
"0012DA": "CISCO SYSTEMS, INC.",
"0012DB": "ZIEHL industrie-elektronik GmbH + Co KG",
"0012DC": "SunCorp Industrial Limited",
"0012DD": "Shengqu Information Technology (Shanghai) Co., Ltd.",
"0012DE": "Radio Components Sweden AB",
"0012DF": "Novomatic AG",
"0012E0": "Codan Limited",
"0012E1": "Alliant Networks, Inc",
"0012E2": "ALAXALA Networks Corporation",
"0012E3": "Agat-RT, Ltd.",
"0012E4": "ZIEHL industrie-electronik GmbH + Co KG",
"0012E5": "Time America, Inc.",
"0012E6": "SPECTEC COMPUTER CO., LTD.",
"0012E7": "Projectek Networking Electronics Corp.",
"0012E8": "Fraunhofer IMS",
"0012E9": "Abbey Systems Ltd",
"0012EA": "Trane",
"0012EB": "PDH Solutions, LLC",
"0012EC": "Movacolor b.v.",
"0012ED": "AVG Advanced Technologies",
"0012EE": "Sony Ericsson Mobile Communications AB",
"0012EF": "OneAccess SA",
"0012F0": "Intel Corporate",
"0012F1": "IFOTEC",
"0012F2": "Brocade Communications Systems, Inc",
"0012F3": "connectBlue AB",
"0012F4": "Belco International Co.,Ltd.",
"0012F5": "Imarda New Zealand Limited",
"0012F6": "MDK CO.,LTD.",
"0012F7": "Xiamen Xinglian Electronics Co., Ltd.",
"0012F8": "WNI Resources, LLC",
"0012F9": "URYU SEISAKU, LTD.",
"0012FA": "THX LTD",
"0012FB": "Samsung Electronics",
"0012FC": "PLANET System Co.,LTD",
"0012FD": "OPTIMUS IC S.A.",
"0012FE": "Lenovo Mobile Communication Technology Ltd.",
"0012FF": "Lely Industries N.V.",
"001300": "IT-FACTORY, INC.",
"001301": "IronGate S.L.",
"001302": "Intel Corporate",
"001303": "GateConnect",
"001304": "Flaircomm Technologies Co. LTD",
"001305": "Epicom, Inc.",
"001306": "Always On Wireless",
"001307": "Paravirtual Corporation",
"001308": "Nuvera Fuel Cells",
"001309": "Ocean Broadband Networks",
"00130A": "Nortel",
"00130B": "Mextal B.V.",
"00130C": "HF System Corporation",
"00130D": "GALILEO AVIONICA",
"00130E": "Focusrite Audio Engineering Limited",
"00130F": "EGEMEN Bilgisayar Muh San ve Tic LTD STI",
"001310": "Cisco-Linksys, LLC",
"001311": "ARRIS International",
"001312": "Amedia Networks Inc.",
"001313": "GuangZhou Post & Telecom Equipment ltd",
"001314": "Asiamajor Inc.",
"001315": "SONY Computer Entertainment inc,",
"001316": "L-S-B Broadcast Technologies GmbH",
"001317": "GN Netcom as",
"001318": "DGSTATION Co., Ltd.",
"001319": "CISCO SYSTEMS, INC.",
"00131A": "CISCO SYSTEMS, INC.",
"00131B": "BeCell Innovations Corp.",
"00131C": "LiteTouch, Inc.",
"00131D": "Scanvaegt International A/S",
"00131E": "Peiker acustic GmbH & Co. KG",
"00131F": "NxtPhase T&D, Corp.",
"001320": "Intel Corporate",
"001321": "Hewlett-Packard Company",
"001322": "DAQ Electronics, Inc.",
"001323": "Cap Co., Ltd.",
"001324": "Schneider Electric Ultra Terminal",
"001325": "Cortina Systems Inc",
"001326": "ECM Systems Ltd",
"001327": "Data Acquisitions limited",
"001328": "Westech Korea Inc.,",
"001329": "VSST Co., LTD",
"00132A": "Sitronics Telecom Solutions",
"00132B": "Phoenix Digital",
"00132C": "MAZ Brandenburg GmbH",
"00132D": "iWise Communications",
"00132E": "ITian Coporation",
"00132F": "Interactek",
"001330": "EURO PROTECTION SURVEILLANCE",
"001331": "CellPoint Connect",
"001332": "Beijing Topsec Network Security Technology Co., Ltd.",
"001333": "BaudTec Corporation",
"001334": "Arkados, Inc.",
"001335": "VS Industry Berhad",
"001336": "Tianjin 712 Communication Broadcasting co., ltd.",
"001337": "Orient Power Home Network Ltd.",
"001338": "FRESENIUS-VIAL",
"001339": "CCV Deutschland GmbH",
"00133A": "VadaTech Inc.",
"00133B": "Speed Dragon Multimedia Limited",
"00133C": "QUINTRON SYSTEMS INC.",
"00133D": "Micro Memory Curtiss Wright Co",
"00133E": "MetaSwitch",
"00133F": "Eppendorf Instrumente GmbH",
"001340": "AD.EL s.r.l.",
"001341": "Shandong New Beiyang Information Technology Co.,Ltd",
"001342": "Vision Research, Inc.",
"001343": "Matsushita Electronic Components (Europe) GmbH",
"001344": "Fargo Electronics Inc.",
"001345": "Eaton Corporation",
"001346": "D-Link Corporation",
"001347": "BlueTree Wireless Data Inc.",
"001348": "Artila Electronics Co., Ltd.",
"001349": "ZyXEL Communications Corporation",
"00134A": "Engim, Inc.",
"00134B": "ToGoldenNet Technology Inc.",
"00134C": "YDT Technology International",
"00134D": "Inepro BV",
"00134E": "Valox Systems, Inc.",
"00134F": "Tranzeo Wireless Technologies Inc.",
"001350": "Silver Spring Networks, Inc",
"001351": "Niles Audio Corporation",
"001352": "Naztec, Inc.",
"001353": "HYDAC Filtertechnik GMBH",
"001354": "Zcomax Technologies, Inc.",
"001355": "TOMEN Cyber-business Solutions, Inc.",
"001356": "FLIR Radiation Inc",
"001357": "Soyal Technology Co., Ltd.",
"001358": "Realm Systems, Inc.",
"001359": "ProTelevision Technologies A/S",
"00135A": "Project T&E Limited",
"00135B": "PanelLink Cinema, LLC",
"00135C": "OnSite Systems, Inc.",
"00135D": "NTTPC Communications, Inc.",
"00135E": "EAB/RWI/K",
"00135F": "CISCO SYSTEMS, INC.",
"001360": "CISCO SYSTEMS, INC.",
"001361": "Biospace Co., Ltd.",
"001362": "ShinHeung Precision Co., Ltd.",
"001363": "Verascape, Inc.",
"001364": "Paradigm Technology Inc..",
"001365": "Nortel",
"001366": "Neturity Technologies Inc.",
"001367": "Narayon. Co., Ltd.",
"001368": "Saab Danmark A/S",
"001369": "Honda Electron Co., LED.",
"00136A": "Hach Lange Sarl",
"00136B": "E-TEC",
"00136C": "TomTom",
"00136D": "Tentaculus AB",
"00136E": "Techmetro Corp.",
"00136F": "PacketMotion, Inc.",
"001370": "Nokia Danmark A/S",
"001371": "ARRIS Group, Inc.",
"001372": "Dell Inc",
"001373": "BLwave Electronics Co., Ltd",
"001374": "Atheros Communications, Inc.",
"001375": "American Security Products Co.",
"001376": "Tabor Electronics Ltd.",
"001377": "Samsung Electronics CO., LTD",
"001378": "Qsan Technology, Inc.",
"001379": "PONDER INFORMATION INDUSTRIES LTD.",
"00137A": "Netvox Technology Co., Ltd.",
"00137B": "Movon Corporation",
"00137C": "Kaicom co., Ltd.",
"00137D": "Dynalab, Inc.",
"00137E": "CorEdge Networks, Inc.",
"00137F": "CISCO SYSTEMS, INC.",
"001380": "CISCO SYSTEMS, INC.",
"001381": "CHIPS & Systems, Inc.",
"001382": "Cetacea Networks Corporation",
"001383": "Application Technologies and Engineering Research Laboratory",
"001384": "Advanced Motion Controls",
"001385": "Add-On Technology Co., LTD.",
"001386": "ABB Inc./Totalflow",
"001387": "27M Technologies AB",
"001388": "WiMedia Alliance",
"001389": "Redes de Telefon\u00eda M\u00f3vil S.A.",
"00138A": "QINGDAO GOERTEK ELECTRONICS CO.,LTD.",
"00138B": "Phantom Technologies LLC",
"00138C": "Kumyoung.Co.Ltd",
"00138D": "Kinghold",
"00138E": "FOAB Elektronik AB",
"00138F": "Asiarock Incorporation",
"001390": "Termtek Computer Co., Ltd",
"001391": "OUEN CO.,LTD.",
"001392": "Ruckus Wireless",
"001393": "Panta Systems, Inc.",
"001394": "Infohand Co.,Ltd",
"001395": "congatec AG",
"001396": "Acbel Polytech Inc.",
"001397": "Oracle Corporation",
"001398": "TrafficSim Co.,Ltd",
"001399": "STAC Corporation.",
"00139A": "K-ubique ID Corp.",
"00139B": "ioIMAGE Ltd.",
"00139C": "Exavera Technologies, Inc.",
"00139D": "Marvell Hispana S.L.",
"00139E": "Ciara Technologies Inc.",
"00139F": "Electronics Design Services, Co., Ltd.",
"0013A0": "ALGOSYSTEM Co., Ltd.",
"0013A1": "Crow Electronic Engeneering",
"0013A2": "MaxStream, Inc",
"0013A3": "Siemens Com CPE Devices",
"0013A4": "KeyEye Communications",
"0013A5": "General Solutions, LTD.",
"0013A6": "Extricom Ltd",
"0013A7": "BATTELLE MEMORIAL INSTITUTE",
"0013A8": "Tanisys Technology",
"0013A9": "Sony Corporation",
"0013AA": "ALS & TEC Ltd.",
"0013AB": "Telemotive AG",
"0013AC": "Sunmyung Electronics Co., LTD",
"0013AD": "Sendo Ltd",
"0013AE": "Radiance Technologies, Inc.",
"0013AF": "NUMA Technology,Inc.",
"0013B0": "Jablotron",
"0013B1": "Intelligent Control Systems (Asia) Pte Ltd",
"0013B2": "Carallon Limited",
"0013B3": "Ecom Communications Technology Co., Ltd.",
"0013B4": "Appear TV",
"0013B5": "Wavesat",
"0013B6": "Sling Media, Inc.",
"0013B7": "Scantech ID",
"0013B8": "RyCo Electronic Systems Limited",
"0013B9": "BM SPA",
"0013BA": "ReadyLinks Inc",
"0013BB": "Smartvue Corporation",
"0013BC": "Artimi Ltd",
"0013BD": "HYMATOM SA",
"0013BE": "Virtual Conexions",
"0013BF": "Media System Planning Corp.",
"0013C0": "Trix Tecnologia Ltda.",
"0013C1": "Asoka USA Corporation",
"0013C2": "WACOM Co.,Ltd",
"0013C3": "CISCO SYSTEMS, INC.",
"0013C4": "CISCO SYSTEMS, INC.",
"0013C5": "LIGHTRON FIBER-OPTIC DEVICES INC.",
"0013C6": "OpenGear, Inc",
"0013C7": "IONOS Co.,Ltd.",
"0013C8": "ADB Broadband Italia",
"0013C9": "Beyond Achieve Enterprises Ltd.",
"0013CA": "Pico Digital",
"0013CB": "Zenitel Norway AS",
"0013CC": "Tall Maple Systems",
"0013CD": "MTI co. LTD",
"0013CE": "Intel Corporate",
"0013CF": "4Access Communications",
"0013D0": "t+ Medical Ltd",
"0013D1": "KIRK telecom A/S",
"0013D2": "PAGE IBERICA, S.A.",
"0013D3": "MICRO-STAR INTERNATIONAL CO., LTD.",
"0013D4": "ASUSTek COMPUTER INC.",
"0013D5": "RuggedCom",
"0013D6": "TII NETWORK TECHNOLOGIES, INC.",
"0013D7": "SPIDCOM Technologies SA",
"0013D8": "Princeton Instruments",
"0013D9": "Matrix Product Development, Inc.",
"0013DA": "Diskware Co., Ltd",
"0013DB": "SHOEI Electric Co.,Ltd",
"0013DC": "IBTEK INC.",
"0013DD": "Abbott Diagnostics",
"0013DE": "Adapt4, LLC",
"0013DF": "Ryvor Corp.",
"0013E0": "Murata Manufacturing Co., Ltd.",
"0013E1": "Iprobe AB",
"0013E2": "GeoVision Inc.",
"0013E3": "CoVi Technologies, Inc.",
"0013E4": "YANGJAE SYSTEMS CORP.",
"0013E5": "TENOSYS, INC.",
"0013E6": "Technolution",
"0013E7": "Halcro",
"0013E8": "Intel Corporate",
"0013E9": "VeriWave, Inc.",
"0013EA": "Kamstrup A/S",
"0013EB": "Sysmaster Corporation",
"0013EC": "Sunbay Software AG",
"0013ED": "PSIA",
"0013EE": "JBX Designs Inc.",
"0013EF": "Kingjon Digital Technology Co.,Ltd",
"0013F0": "Wavefront Semiconductor",
"0013F1": "AMOD Technology Co., Ltd.",
"0013F2": "Klas Ltd",
"0013F3": "Giga-byte Communications Inc.",
"0013F4": "Psitek (Pty) Ltd",
"0013F5": "Akimbi Systems",
"0013F6": "Cintech",
"0013F7": "SMC Networks, Inc.",
"0013F8": "Dex Security Solutions",
"0013F9": "Cavera Systems",
"0013FA": "LifeSize Communications, Inc",
"0013FB": "RKC INSTRUMENT INC.",
"0013FC": "SiCortex, Inc",
"0013FD": "Nokia Danmark A/S",
"0013FE": "GRANDTEC ELECTRONIC CORP.",
"0013FF": "Dage-MTI of MC, Inc.",
"001400": "MINERVA KOREA CO., LTD",
"001401": "Rivertree Networks Corp.",
"001402": "kk-electronic a/s",
"001403": "Renasis, LLC",
"001404": "ARRIS Group, Inc.",
"001405": "OpenIB, Inc.",
"001406": "Go Networks",
"001407": "Sperian Protection Instrumentation",
"001408": "Eka Systems Inc.",
"001409": "MAGNETI MARELLI S.E. S.p.A.",
"00140A": "WEPIO Co., Ltd.",
"00140B": "FIRST INTERNATIONAL COMPUTER, INC.",
"00140C": "GKB CCTV CO., LTD.",
"00140D": "Nortel",
"00140E": "Nortel",
"00140F": "Federal State Unitary Enterprise Leningrad R&D Institute of",
"001410": "Suzhou Keda Technology CO.,Ltd",
"001411": "Deutschmann Automation GmbH & Co. KG",
"001412": "S-TEC electronics AG",
"001413": "Trebing & Himstedt Proze\u00dfautomation GmbH & Co. KG",
"001414": "Jumpnode Systems LLC.",
"001415": "Intec Automation Inc.",
"001416": "Scosche Industries, Inc.",
"001417": "RSE Informations Technologie GmbH",
"001418": "C4Line",
"001419": "SIDSA",
"00141A": "DEICY CORPORATION",
"00141B": "CISCO SYSTEMS, INC.",
"00141C": "CISCO SYSTEMS, INC.",
"00141D": "LTi DRIVES GmbH",
"00141E": "P.A. Semi, Inc.",
"00141F": "SunKwang Electronics Co., Ltd",
"001420": "G-Links networking company",
"001421": "Total Wireless Technologies Pte. Ltd.",
"001422": "Dell Inc",
"001423": "J-S Co. NEUROCOM",
"001424": "Merry Electrics CO., LTD.",
"001425": "Galactic Computing Corp.",
"001426": "NL Technology",
"001427": "JazzMutant",
"001428": "Vocollect, Inc",
"001429": "V Center Technologies Co., Ltd.",
"00142A": "Elitegroup Computer System Co., Ltd",
"00142B": "Edata Communication Inc.",
"00142C": "Koncept International, Inc.",
"00142D": "Toradex AG",
"00142E": "77 Elektronika Kft.",
"00142F": "WildPackets",
"001430": "ViPowER, Inc",
"001431": "PDL Electronics Ltd",
"001432": "Tarallax Wireless, Inc.",
"001433": "Empower Technologies(Canada) Inc.",
"001434": "Keri Systems, Inc",
"001435": "CityCom Corp.",
"001436": "Qwerty Elektronik AB",
"001437": "GSTeletech Co.,Ltd.",
"001438": "Hewlett-Packard Company",
"001439": "Blonder Tongue Laboratories, Inc.",
"00143A": "RAYTALK INTERNATIONAL SRL",
"00143B": "Sensovation AG",
"00143C": "Rheinmetall Canada Inc.",
"00143D": "Aevoe Inc.",
"00143E": "AirLink Communications, Inc.",
"00143F": "Hotway Technology Corporation",
"001440": "ATOMIC Corporation",
"001441": "Innovation Sound Technology Co., LTD.",
"001442": "ATTO CORPORATION",
"001443": "Consultronics Europe Ltd",
"001444": "Grundfos Holding",
"001445": "Telefon-Gradnja d.o.o.",
"001446": "SuperVision Solutions LLC",
"001447": "BOAZ Inc.",
"001448": "Inventec Multimedia & Telecom Corporation",
"001449": "Sichuan Changhong Electric Ltd.",
"00144A": "Taiwan Thick-Film Ind. Corp.",
"00144B": "Hifn, Inc.",
"00144C": "General Meters Corp.",
"00144D": "Intelligent Systems",
"00144E": "SRISA",
"00144F": "Oracle Corporation",
"001450": "Heim Systems GmbH",
"001451": "Apple",
"001452": "CALCULEX,INC.",
"001453": "ADVANTECH TECHNOLOGIES CO.,LTD",
"001454": "Symwave",
"001455": "Coder Electronics Corporation",
"001456": "Edge Products",
"001457": "T-VIPS AS",
"001458": "HS Automatic ApS",
"001459": "Moram Co., Ltd.",
"00145A": "Neratec Solutions AG",
"00145B": "SeekerNet Inc.",
"00145C": "Intronics B.V.",
"00145D": "WJ Communications, Inc.",
"00145E": "IBM Corp",
"00145F": "ADITEC CO. LTD",
"001460": "Kyocera Wireless Corp.",
"001461": "CORONA CORPORATION",
"001462": "Digiwell Technology, inc",
"001463": "IDCS N.V.",
"001464": "Cryptosoft",
"001465": "Novo Nordisk A/S",
"001466": "Kleinhenz Elektronik GmbH",
"001467": "ArrowSpan Inc.",
"001468": "CelPlan International, Inc.",
"001469": "CISCO SYSTEMS, INC.",
"00146A": "CISCO SYSTEMS, INC.",
"00146B": "Anagran, Inc.",
"00146C": "Netgear Inc.",
"00146D": "RF Technologies",
"00146E": "H. Stoll GmbH & Co. KG",
"00146F": "Kohler Co",
"001470": "Prokom Software SA",
"001471": "Eastern Asia Technology Limited",
"001472": "China Broadband Wireless IP Standard Group",
"001473": "Bookham Inc",
"001474": "K40 Electronics",
"001475": "Wiline Networks, Inc.",
"001476": "MultiCom Industries Limited",
"001477": "Nertec Inc.",
"001478": "ShenZhen TP-LINK Technologies Co., Ltd.",
"001479": "NEC Magnus Communications,Ltd.",
"00147A": "Eubus GmbH",
"00147B": "Iteris, Inc.",
"00147C": "3Com Ltd",
"00147D": "Aeon Digital International",
"00147E": "InnerWireless",
"00147F": "Thomson Telecom Belgium",
"001480": "Hitachi-LG Data Storage Korea, Inc",
"001481": "Multilink Inc",
"001482": "Aurora Networks",
"001483": "eXS Inc.",
"001484": "Cermate Technologies Inc.",
"001485": "Giga-Byte",
"001486": "Echo Digital Audio Corporation",
"001487": "American Technology Integrators",
"001488": "Akorri",
"001489": "B15402100 - JANDEI, S.L.",
"00148A": "Elin Ebg Traction Gmbh",
"00148B": "Globo Electronic GmbH & Co. KG",
"00148C": "Fortress Technologies",
"00148D": "Cubic Defense Simulation Systems",
"00148E": "Tele Power Inc.",
"00148F": "Protronic (Far East) Ltd.",
"001490": "ASP Corporation",
"001491": "Daniels Electronics Ltd. dbo Codan Rado Communications",
"001492": "Liteon, Mobile Media Solution SBU",
"001493": "Systimax Solutions",
"001494": "ESU AG",
"001495": "2Wire, Inc.",
"001496": "Phonic Corp.",
"001497": "ZHIYUAN Eletronics co.,ltd.",
"001498": "Viking Design Technology",
"001499": "Helicomm Inc",
"00149A": "ARRIS Group, Inc.",
"00149B": "Nokota Communications, LLC",
"00149C": "HF Company",
"00149D": "Sound ID Inc.",
"00149E": "UbONE Co., Ltd",
"00149F": "System and Chips, Inc.",
"0014A0": "Accsense, Inc.",
"0014A1": "Synchronous Communication Corp",
"0014A2": "Core Micro Systems Inc.",
"0014A3": "Vitelec BV",
"0014A4": "Hon Hai Precision Ind. Co., Ltd.",
"0014A5": "Gemtek Technology Co., Ltd.",
"0014A6": "Teranetics, Inc.",
"0014A7": "Nokia Danmark A/S",
"0014A8": "CISCO SYSTEMS, INC.",
"0014A9": "CISCO SYSTEMS, INC.",
"0014AA": "Ashly Audio, Inc.",
"0014AB": "Senhai Electronic Technology Co., Ltd.",
"0014AC": "Bountiful WiFi",
"0014AD": "Gassner Wiege- und Me\u00dftechnik GmbH",
"0014AE": "Wizlogics Co., Ltd.",
"0014AF": "Datasym POS Inc.",
"0014B0": "Naeil Community",
"0014B1": "Avitec AB",
"0014B2": "mCubelogics Corporation",
"0014B3": "CoreStar International Corp",
"0014B4": "General Dynamics United Kingdom Ltd",
"0014B5": "PHYSIOMETRIX,INC",
"0014B6": "Enswer Technology Inc.",
"0014B7": "AR Infotek Inc.",
"0014B8": "Hill-Rom",
"0014B9": "MSTAR SEMICONDUCTOR",
"0014BA": "Carvers SA de CV",
"0014BB": "Open Interface North America",
"0014BC": "SYNECTIC TELECOM EXPORTS PVT. LTD.",
"0014BD": "incNETWORKS, Inc",
"0014BE": "Wink communication technology CO.LTD",
"0014BF": "Cisco-Linksys LLC",
"0014C0": "Symstream Technology Group Ltd",
"0014C1": "U.S. Robotics Corporation",
"0014C2": "Hewlett-Packard Company",
"0014C3": "Seagate Technology",
"0014C4": "Vitelcom Mobile Technology",
"0014C5": "Alive Technologies Pty Ltd",
"0014C6": "Quixant Ltd",
"0014C7": "Nortel",
"0014C8": "Contemporary Research Corp",
"0014C9": "Brocade Communications Systems, Inc.",
"0014CA": "Key Radio Systems Limited",
"0014CB": "LifeSync Corporation",
"0014CC": "Zetec, Inc.",
"0014CD": "DigitalZone Co., Ltd.",
"0014CE": "NF CORPORATION",
"0014CF": "INVISIO Communications",
"0014D0": "BTI Systems Inc.",
"0014D1": "TRENDnet",
"0014D2": "Kyuden Technosystems Corporation",
"0014D3": "SEPSA",
"0014D4": "K Technology Corporation",
"0014D5": "Datang Telecom Technology CO. , LCD,Optical Communication Br",
"0014D6": "Jeongmin Electronics Co.,Ltd.",
"0014D7": "Datastore Technology Corp",
"0014D8": "bio-logic SA",
"0014D9": "IP Fabrics, Inc.",
"0014DA": "Huntleigh Healthcare",
"0014DB": "Elma Trenew Electronic GmbH",
"0014DC": "Communication System Design & Manufacturing (CSDM)",
"0014DD": "Covergence Inc.",
"0014DE": "Sage Instruments Inc.",
"0014DF": "HI-P Tech Corporation",
"0014E0": "LET'S Corporation",
"0014E1": "Data Display AG",
"0014E2": "datacom systems inc.",
"0014E3": "mm-lab GmbH",
"0014E4": "infinias, LLC",
"0014E5": "Alticast",
"0014E6": "AIM Infrarotmodule GmbH",
"0014E7": "Stolinx,. Inc",
"0014E8": "ARRIS Group, Inc.",
"0014E9": "Nortech International",
"0014EA": "S Digm Inc. (Safe Paradigm Inc.)",
"0014EB": "AwarePoint Corporation",
"0014EC": "Acro Telecom",
"0014ED": "Airak, Inc.",
"0014EE": "Western Digital Technologies, Inc.",
"0014EF": "TZero Technologies, Inc.",
"0014F0": "Business Security OL AB",
"0014F1": "CISCO SYSTEMS, INC.",
"0014F2": "CISCO SYSTEMS, INC.",
"0014F3": "ViXS Systems Inc",
"0014F4": "DekTec Digital Video B.V.",
"0014F5": "OSI Security Devices",
"0014F6": "Juniper Networks, Inc.",
"0014F7": "CREVIS Co., LTD",
"0014F8": "Scientific Atlanta",
"0014F9": "Vantage Controls",
"0014FA": "AsGa S.A.",
"0014FB": "Technical Solutions Inc.",
"0014FC": "Extandon, Inc.",
"0014FD": "Thecus Technology Corp.",
"0014FE": "Artech Electronics",
"0014FF": "Precise Automation, Inc.",
"001500": "Intel Corporate",
"001501": "LexBox",
"001502": "BETA tech",
"001503": "PROFIcomms s.r.o.",
"001504": "GAME PLUS CO., LTD.",
"001505": "Actiontec Electronics, Inc",
"001506": "Neo Photonics",
"001507": "Renaissance Learning Inc",
"001508": "Global Target Enterprise Inc",
"001509": "Plus Technology Co., Ltd",
"00150A": "Sonoa Systems, Inc",
"00150B": "SAGE INFOTECH LTD.",
"00150C": "AVM GmbH",
"00150D": "Hoana Medical, Inc.",
"00150E": "OPENBRAIN TECHNOLOGIES CO., LTD.",
"00150F": "mingjong",
"001510": "Techsphere Co., Ltd",
"001511": "Data Center Systems",
"001512": "Zurich University of Applied Sciences",
"001513": "EFS sas",
"001514": "Hu Zhou NAVA Networks&Electronics Ltd.",
"001515": "Leipold+Co.GmbH",
"001516": "URIEL SYSTEMS INC.",
"001517": "Intel Corporate",
"001518": "Shenzhen 10MOONS Technology Development CO.,Ltd",
"001519": "StoreAge Networking Technologies",
"00151A": "Hunter Engineering Company",
"00151B": "Isilon Systems Inc.",
"00151C": "LENECO",
"00151D": "M2I CORPORATION",
"00151E": "Ethernet Powerlink Standardization Group (EPSG)",
"00151F": "Multivision Intelligent Surveillance (Hong Kong) Ltd",
"001520": "Radiocrafts AS",
"001521": "Horoquartz",
"001522": "Dea Security",
"001523": "Meteor Communications Corporation",
"001524": "Numatics, Inc.",
"001525": "Chamberlain Access Solutions",
"001526": "Remote Technologies Inc",
"001527": "Balboa Instruments",
"001528": "Beacon Medical Products LLC d.b.a. BeaconMedaes",
"001529": "N3 Corporation",
"00152A": "Nokia GmbH",
"00152B": "CISCO SYSTEMS, INC.",
"00152C": "CISCO SYSTEMS, INC.",
"00152D": "TenX Networks, LLC",
"00152E": "PacketHop, Inc.",
"00152F": "ARRIS Group, Inc.",
"001530": "EMC Corporation",
"001531": "KOCOM",
"001532": "Consumer Technologies Group, LLC",
"001533": "NADAM.CO.,LTD",
"001534": "A Beltr\u00f3nica-Companhia de Comunica\u00e7\u00f5es, Lda",
"001535": "OTE Spa",
"001536": "Powertech co.,Ltd",
"001537": "Ventus Networks",
"001538": "RFID, Inc.",
"001539": "Technodrive SRL",
"00153A": "Shenzhen Syscan Technology Co.,Ltd.",
"00153B": "EMH metering GmbH & Co. KG",
"00153C": "Kprotech Co., Ltd.",
"00153D": "ELIM PRODUCT CO.",
"00153E": "Q-Matic Sweden AB",
"00153F": "Alcatel Alenia Space Italia",
"001540": "Nortel",
"001541": "StrataLight Communications, Inc.",
"001542": "MICROHARD S.R.L.",
"001543": "Aberdeen Test Center",
"001544": "coM.s.a.t. AG",
"001545": "SEECODE Co., Ltd.",
"001546": "ITG Worldwide Sdn Bhd",
"001547": "AiZen Solutions Inc.",
"001548": "CUBE TECHNOLOGIES",
"001549": "Dixtal Biomedica Ind. Com. Ltda",
"00154A": "WANSHIH ELECTRONIC CO., LTD",
"00154B": "Wonde Proud Technology Co., Ltd",
"00154C": "Saunders Electronics",
"00154D": "Netronome Systems, Inc.",
"00154E": "IEC",
"00154F": "one RF Technology",
"001550": "Nits Technology Inc",
"001551": "RadioPulse Inc.",
"001552": "Wi-Gear Inc.",
"001553": "Cytyc Corporation",
"001554": "Atalum Wireless S.A.",
"001555": "DFM GmbH",
"001556": "SAGEM COMMUNICATION",
"001557": "Olivetti",
"001558": "FOXCONN",
"001559": "Securaplane Technologies, Inc.",
"00155A": "DAINIPPON PHARMACEUTICAL CO., LTD.",
"00155B": "Sampo Corporation",
"00155C": "Dresser Wayne",
"00155D": "Microsoft Corporation",
"00155E": "Morgan Stanley",
"00155F": "GreenPeak Technologies",
"001560": "Hewlett-Packard Company",
"001561": "JJPlus Corporation",
"001562": "CISCO SYSTEMS, INC.",
"001563": "CISCO SYSTEMS, INC.",
"001564": "BEHRINGER Spezielle Studiotechnik GmbH",
"001565": "XIAMEN YEALINK NETWORK TECHNOLOGY CO.,LTD",
"001566": "A-First Technology Co., Ltd.",
"001567": "RADWIN Inc.",
"001568": "Dilithium Networks",
"001569": "PECO II, Inc.",
"00156A": "DG2L Technologies Pvt. Ltd.",
"00156B": "Perfisans Networks Corp.",
"00156C": "SANE SYSTEM CO., LTD",
"00156D": "Ubiquiti Networks Inc.",
"00156E": "A. W. Communication Systems Ltd",
"00156F": "Xiranet Communications GmbH",
"001570": "Symbol TechnologiesWholly owned Subsidiary of Motorola",
"001571": "Nolan Systems",
"001572": "Red-Lemon",
"001573": "NewSoft Technology Corporation",
"001574": "Horizon Semiconductors Ltd.",
"001575": "Nevis Networks Inc.",
"001576": "LABiTec - Labor Biomedical Technologies GmbH",
"001577": "Allied Telesis",
"001578": "Audio / Video Innovations",
"001579": "Lunatone Industrielle Elektronik GmbH",
"00157A": "Telefin S.p.A.",
"00157B": "Leuze electronic GmbH + Co. KG",
"00157C": "Dave Networks, Inc.",
"00157D": "POSDATA CO., LTD.",
"00157E": "Weidm\u00fcller Interface GmbH & Co. KG",
"00157F": "ChuanG International Holding CO.,LTD.",
"001580": "U-WAY CORPORATION",
"001581": "MAKUS Inc.",
"001582": "Pulse Eight Limited",
"001583": "IVT corporation",
"001584": "Schenck Process GmbH",
"001585": "Aonvision Technolopy Corp.",
"001586": "Xiamen Overseas Chinese Electronic Co., Ltd.",
"001587": "Takenaka Seisakusho Co.,Ltd",
"001588": "Salutica Allied Solutions Sdn Bhd",
"001589": "D-MAX Technology Co.,Ltd",
"00158A": "SURECOM Technology Corp.",
"00158B": "Park Air Systems Ltd",
"00158C": "Liab ApS",
"00158D": "Jennic Ltd",
"00158E": "Plustek.INC",
"00158F": "NTT Advanced Technology Corporation",
"001590": "Hectronic GmbH",
"001591": "RLW Inc.",
"001592": "Facom UK Ltd (Melksham)",
"001593": "U4EA Technologies Inc.",
"001594": "BIXOLON CO.,LTD",
"001595": "Quester Tangent Corporation",
"001596": "ARRIS International",
"001597": "AETA AUDIO SYSTEMS",
"001598": "Kolektor group",
"001599": "Samsung Electronics Co., LTD",
"00159A": "ARRIS Group, Inc.",
"00159B": "Nortel",
"00159C": "B-KYUNG SYSTEM Co.,Ltd.",
"00159D": "Tripp Lite",
"00159E": "Mad Catz Interactive Inc",
"00159F": "Terascala, Inc.",
"0015A0": "Nokia Danmark A/S",
"0015A1": "ECA-SINTERS",
"0015A2": "ARRIS International",
"0015A3": "ARRIS International",
"0015A4": "ARRIS International",
"0015A5": "DCI Co., Ltd.",
"0015A6": "Digital Electronics Products Ltd.",
"0015A7": "Robatech AG",
"0015A8": "ARRIS Group, Inc.",
"0015A9": "KWANG WOO I&C CO.,LTD",
"0015AA": "Rextechnik International Co.,",
"0015AB": "PRO CO SOUND INC",
"0015AC": "Capelon AB",
"0015AD": "Accedian Networks",
"0015AE": "kyung il",
"0015AF": "AzureWave Technologies, Inc.",
"0015B0": "AUTOTELENET CO.,LTD",
"0015B1": "Ambient Corporation",
"0015B2": "Advanced Industrial Computer, Inc.",
"0015B3": "Caretech AB",
"0015B4": "Polymap Wireless LLC",
"0015B5": "CI Network Corp.",
"0015B6": "ShinMaywa Industries, Ltd.",
"0015B7": "Toshiba",
"0015B8": "Tahoe",
"0015B9": "Samsung Electronics Co., Ltd.",
"0015BA": "iba AG",
"0015BB": "SMA Solar Technology AG",
"0015BC": "Develco",
"0015BD": "Group 4 Technology Ltd",
"0015BE": "Iqua Ltd.",
"0015BF": "technicob",
"0015C0": "DIGITAL TELEMEDIA CO.,LTD.",
"0015C1": "SONY Computer Entertainment inc,",
"0015C2": "3M Germany",
"0015C3": "Ruf Telematik AG",
"0015C4": "FLOVEL CO., LTD.",
"0015C5": "Dell Inc",
"0015C6": "CISCO SYSTEMS, INC.",
"0015C7": "CISCO SYSTEMS, INC.",
"0015C8": "FlexiPanel Ltd",
"0015C9": "Gumstix, Inc",
"0015CA": "TeraRecon, Inc.",
"0015CB": "Surf Communication Solutions Ltd.",
"0015CC": "UQUEST, LTD.",
"0015CD": "Exartech International Corp.",
"0015CE": "ARRIS International",
"0015CF": "ARRIS International",
"0015D0": "ARRIS International",
"0015D1": "ARRIS Group, Inc.",
"0015D2": "Xantech Corporation",
"0015D3": "Pantech&Curitel Communications, Inc.",
"0015D4": "Emitor AB",
"0015D5": "NICEVT",
"0015D6": "OSLiNK Sp. z o.o.",
"0015D7": "Reti Corporation",
"0015D8": "Interlink Electronics",
"0015D9": "PKC Electronics Oy",
"0015DA": "IRITEL A.D.",
"0015DB": "Canesta Inc.",
"0015DC": "KT&C Co., Ltd.",
"0015DD": "IP Control Systems Ltd.",
"0015DE": "Nokia Danmark A/S",
"0015DF": "Clivet S.p.A.",
"0015E0": "ST-Ericsson",
"0015E1": "Picochip Ltd",
"0015E2": "Dr.Ing. Herbert Knauer GmbH",
"0015E3": "Dream Technologies Corporation",
"0015E4": "Zimmer Elektromedizin",
"0015E5": "Cheertek Inc.",
"0015E6": "MOBILE TECHNIKA Inc.",
"0015E7": "Quantec Tontechnik",
"0015E8": "Nortel",
"0015E9": "D-Link Corporation",
"0015EA": "Tellumat (Pty) Ltd",
"0015EB": "ZTE CORPORATION",
"0015EC": "Boca Devices LLC",
"0015ED": "Fulcrum Microsystems, Inc.",
"0015EE": "Omnex Control Systems",
"0015EF": "NEC TOKIN Corporation",
"0015F0": "EGO BV",
"0015F1": "KYLINK Communications Corp.",
"0015F2": "ASUSTek COMPUTER INC.",
"0015F3": "PELTOR AB",
"0015F4": "Eventide",
"0015F5": "Sustainable Energy Systems",
"0015F6": "SCIENCE AND ENGINEERING SERVICES, INC.",
"0015F7": "Wintecronics Ltd.",
"0015F8": "Kingtronics Industrial Co. Ltd.",
"0015F9": "CISCO SYSTEMS, INC.",
"0015FA": "CISCO SYSTEMS, INC.",
"0015FB": "setex schermuly textile computer gmbh",
"0015FC": "Littelfuse Startco",
"0015FD": "Complete Media Systems",
"0015FE": "SCHILLING ROBOTICS LLC",
"0015FF": "Novatel Wireless, Inc.",
"001600": "CelleBrite Mobile Synchronization",
"001601": "Buffalo Inc.",
"001602": "CEYON TECHNOLOGY CO.,LTD.",
"001603": "COOLKSKY Co., LTD",
"001604": "Sigpro",
"001605": "YORKVILLE SOUND INC.",
"001606": "Ideal Industries",
"001607": "Curves International Inc.",
"001608": "Sequans Communications",
"001609": "Unitech electronics co., ltd.",
"00160A": "SWEEX Europe BV",
"00160B": "TVWorks LLC",
"00160C": "LPL DEVELOPMENT S.A. DE C.V",
"00160D": "Be Here Corporation",
"00160E": "Optica Technologies Inc.",
"00160F": "BADGER METER INC",
"001610": "Carina Technology",
"001611": "Altecon Srl",
"001612": "Otsuka Electronics Co., Ltd.",
"001613": "LibreStream Technologies Inc.",
"001614": "Picosecond Pulse Labs",
"001615": "Nittan Company, Limited",
"001616": "BROWAN COMMUNICATION INC.",
"001617": "MSI",
"001618": "HIVION Co., Ltd.",
"001619": "Lancelan Technologies S.L.",
"00161A": "Dametric AB",
"00161B": "Micronet Corporation",
"00161C": "e:cue",
"00161D": "Innovative Wireless Technologies, Inc.",
"00161E": "Woojinnet",
"00161F": "SUNWAVETEC Co., Ltd.",
"001620": "Sony Ericsson Mobile Communications AB",
"001621": "Colorado Vnet",
"001622": "BBH SYSTEMS GMBH",
"001623": "Interval Media",
"001624": "Teneros, Inc.",
"001625": "Impinj, Inc.",
"001626": "ARRIS Group, Inc.",
"001627": "embedded-logic DESIGN AND MORE GmbH",
"001628": "Ultra Electronics Manufacturing and Card Systems",
"001629": "Nivus GmbH",
"00162A": "Antik computers & communications s.r.o.",
"00162B": "Togami Electric Mfg.co.,Ltd.",
"00162C": "Xanboo",
"00162D": "STNet Co., Ltd.",
"00162E": "Space Shuttle Hi-Tech Co., Ltd.",
"00162F": "Geutebr\u00fcck GmbH",
"001630": "Vativ Technologies",
"001631": "Xteam",
"001632": "SAMSUNG ELECTRONICS CO., LTD.",
"001633": "Oxford Diagnostics Ltd.",
"001634": "Mathtech, Inc.",
"001635": "Hewlett-Packard Company",
"001636": "Quanta Computer Inc.",
"001637": "CITEL SpA",
"001638": "TECOM Co., Ltd.",
"001639": "UBIQUAM Co.,Ltd",
"00163A": "YVES TECHNOLOGY CO., LTD.",
"00163B": "VertexRSI/General Dynamics",
"00163C": "Rebox B.V.",
"00163D": "Tsinghua Tongfang Legend Silicon Tech. Co., Ltd.",
"00163E": "Xensource, Inc.",
"00163F": "CReTE SYSTEMS Inc.",
"001640": "Asmobile Communication Inc.",
"001641": "Universal Global Scientific Industrial Co., Ltd.",
"001642": "Pangolin",
"001643": "Sunhillo Corporation",
"001644": "LITE-ON Technology Corp.",
"001645": "Power Distribution, Inc.",
"001646": "CISCO SYSTEMS, INC.",
"001647": "CISCO SYSTEMS, INC.",
"001648": "SSD Company Limited",
"001649": "SetOne GmbH",
"00164A": "Vibration Technology Limited",
"00164B": "Quorion Data Systems GmbH",
"00164C": "PLANET INT Co., Ltd",
"00164D": "Alcatel North America IP Division",
"00164E": "Nokia Danmark A/S",
"00164F": "World Ethnic Broadcastin Inc.",
"001650": "Herley General Microwave Israel.",
"001651": "Exeo Systems",
"001652": "Hoatech Technologies, Inc.",
"001653": "LEGO System A/S IE Electronics Division",
"001654": "Flex-P Industries Sdn. Bhd.",
"001655": "FUHO TECHNOLOGY Co., LTD",
"001656": "Nintendo Co., Ltd.",
"001657": "Aegate Ltd",
"001658": "Fusiontech Technologies Inc.",
"001659": "Z.M.P. RADWAG",
"00165A": "Harman Specialty Group",
"00165B": "Grip Audio",
"00165C": "Trackflow Ltd",
"00165D": "AirDefense, Inc.",
"00165E": "Precision I/O",
"00165F": "Fairmount Automation",
"001660": "Nortel",
"001661": "Novatium Solutions (P) Ltd",
"001662": "Liyuh Technology Ltd.",
"001663": "KBT Mobile",
"001664": "Prod-El SpA",
"001665": "Cellon France",
"001666": "Quantier Communication Inc.",
"001667": "A-TEC Subsystem INC.",
"001668": "Eishin Electronics",
"001669": "MRV Communication (Networks) LTD",
"00166A": "TPS",
"00166B": "Samsung Electronics",
"00166C": "Samsung Electonics Digital Video System Division",
"00166D": "Yulong Computer Telecommunication Scientific(shenzhen)Co.,Lt",
"00166E": "Arbitron Inc.",
"00166F": "Intel Corporate",
"001670": "SKNET Corporation",
"001671": "Symphox Information Co.",
"001672": "Zenway enterprise ltd",
"001673": "Bury GmbH & Co. KG",
"001674": "EuroCB (Phils.), Inc.",
"001675": "ARRIS Group, Inc.",
"001676": "Intel Corporate",
"001677": "Bihl + Wiedemann GmbH",
"001678": "SHENZHEN BAOAN GAOKE ELECTRONICS CO., LTD",
"001679": "eOn Communications",
"00167A": "Skyworth Overseas Dvelopment Ltd.",
"00167B": "Haver&Boecker",
"00167C": "iRex Technologies BV",
"00167D": "Sky-Line Information Co., Ltd.",
"00167E": "DIBOSS.CO.,LTD",
"00167F": "Bluebird Soft Inc.",
"001680": "Bally Gaming + Systems",
"001681": "Vector Informatik GmbH",
"001682": "Pro Dex, Inc",
"001683": "WEBIO International Co.,.Ltd.",
"001684": "Donjin Co.,Ltd.",
"001685": "Elisa Oyj",
"001686": "Karl Storz Imaging",
"001687": "Chubb CSC-Vendor AP",
"001688": "ServerEngines LLC",
"001689": "Pilkor Electronics Co., Ltd",
"00168A": "id-Confirm Inc",
"00168B": "Paralan Corporation",
"00168C": "DSL Partner AS",
"00168D": "KORWIN CO., Ltd.",
"00168E": "Vimicro corporation",
"00168F": "GN Netcom as",
"001690": "J-TEK INCORPORATION",
"001691": "Moser-Baer AG",
"001692": "Scientific-Atlanta, Inc.",
"001693": "PowerLink Technology Inc.",
"001694": "Sennheiser Communications A/S",
"001695": "AVC Technology (International) Limited",
"001696": "QDI Technology (H.K.) Limited",
"001697": "NEC Corporation",
"001698": "T&A Mobile Phones",
"001699": "Tonic DVB Marketing Ltd",
"00169A": "Quadrics Ltd",
"00169B": "Alstom Transport",
"00169C": "CISCO SYSTEMS, INC.",
"00169D": "CISCO SYSTEMS, INC.",
"00169E": "TV One Ltd",
"00169F": "Vimtron Electronics Co., Ltd.",
"0016A0": "Auto-Maskin",
"0016A1": "3Leaf Networks",
"0016A2": "CentraLite Systems, Inc.",
"0016A3": "Ingeteam Transmission&Distribution, S.A.",
"0016A4": "Ezurio Ltd",
"0016A5": "Tandberg Storage ASA",
"0016A6": "Dovado FZ-LLC",
"0016A7": "AWETA G&P",
"0016A8": "CWT CO., LTD.",
"0016A9": "2EI",
"0016AA": "Kei Communication Technology Inc.",
"0016AB": "Dansensor A/S",
"0016AC": "Toho Technology Corp.",
"0016AD": "BT-Links Company Limited",
"0016AE": "INVENTEL",
"0016AF": "Shenzhen Union Networks Equipment Co.,Ltd.",
"0016B0": "VK Corporation",
"0016B1": "KBS",
"0016B2": "DriveCam Inc",
"0016B3": "Photonicbridges (China) Co., Ltd.",
"0016B4": "PRIVATE",
"0016B5": "ARRIS Group, Inc.",
"0016B6": "Cisco-Linksys",
"0016B7": "Seoul Commtech",
"0016B8": "Sony Ericsson Mobile Communications",
"0016B9": "ProCurve Networking",
"0016BA": "WEATHERNEWS INC.",
"0016BB": "Law-Chain Computer Technology Co Ltd",
"0016BC": "Nokia Danmark A/S",
"0016BD": "ATI Industrial Automation",
"0016BE": "INFRANET, Inc.",
"0016BF": "PaloDEx Group Oy",
"0016C0": "Semtech Corporation",
"0016C1": "Eleksen Ltd",
"0016C2": "Avtec Systems Inc",
"0016C3": "BA Systems Inc",
"0016C4": "SiRF Technology, Inc.",
"0016C5": "Shenzhen Xing Feng Industry Co.,Ltd",
"0016C6": "North Atlantic Industries",
"0016C7": "CISCO SYSTEMS, INC.",
"0016C8": "CISCO SYSTEMS, INC.",
"0016C9": "NAT Seattle, Inc.",
"0016CA": "Nortel",
"0016CB": "Apple",
"0016CC": "Xcute Mobile Corp.",
"0016CD": "HIJI HIGH-TECH CO., LTD.",
"0016CE": "Hon Hai Precision Ind. Co., Ltd.",
"0016CF": "Hon Hai Precision Ind. Co., Ltd.",
"0016D0": "ATech elektronika d.o.o.",
"0016D1": "ZAT a.s.",
"0016D2": "Caspian",
"0016D3": "Wistron Corporation",
"0016D4": "Compal Communications, Inc.",
"0016D5": "Synccom Co., Ltd",
"0016D6": "TDA Tech Pty Ltd",
"0016D7": "Sunways AG",
"0016D8": "Senea AB",
"0016D9": "NINGBO BIRD CO.,LTD.",
"0016DA": "Futronic Technology Co. Ltd.",
"0016DB": "Samsung Electronics Co., Ltd.",
"0016DC": "ARCHOS",
"0016DD": "Gigabeam Corporation",
"0016DE": "FAST Inc",
"0016DF": "Lundinova AB",
"0016E0": "3Com Ltd",
"0016E1": "SiliconStor, Inc.",
"0016E2": "American Fibertek, Inc.",
"0016E3": "ASKEY COMPUTER CORP.",
"0016E4": "VANGUARD SECURITY ENGINEERING CORP.",
"0016E5": "FORDLEY DEVELOPMENT LIMITED",
"0016E6": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"0016E7": "Dynamix Promotions Limited",
"0016E8": "Sigma Designs, Inc.",
"0016E9": "Tiba Medical Inc",
"0016EA": "Intel Corporate",
"0016EB": "Intel Corporate",
"0016EC": "Elitegroup Computer Systems Co., Ltd.",
"0016ED": "Digital Safety Technologies, Inc",
"0016EE": "RoyalDigital Inc.",
"0016EF": "Koko Fitness, Inc.",
"0016F0": "Dell",
"0016F1": "OmniSense, LLC",
"0016F2": "Dmobile System Co., Ltd.",
"0016F3": "CAST Information Co., Ltd",
"0016F4": "Eidicom Co., Ltd.",
"0016F5": "Dalian Golden Hualu Digital Technology Co.,Ltd",
"0016F6": "Video Products Group",
"0016F7": "L-3 Communications, Aviation Recorders",
"0016F8": "AVIQTECH TECHNOLOGY CO., LTD.",
"0016F9": "CETRTA POT, d.o.o., Kranj",
"0016FA": "ECI Telecom Ltd.",
"0016FB": "SHENZHEN MTC CO.,LTD.",
"0016FC": "TOHKEN CO.,LTD.",
"0016FD": "Jaty Electronics",
"0016FE": "Alps Electric Co., Ltd",
"0016FF": "Wamin Optocomm Mfg Corp",
"001700": "ARRIS Group, Inc.",
"001701": "KDE, Inc.",
"001702": "Osung Midicom Co., Ltd",
"001703": "MOSDAN Internation Co.,Ltd",
"001704": "Shinco Electronics Group Co.,Ltd",
"001705": "Methode Electronics",
"001706": "Techfaith Wireless Communication Technology Limited.",
"001707": "InGrid, Inc",
"001708": "Hewlett-Packard Company",
"001709": "Exalt Communications",
"00170A": "INEW DIGITAL COMPANY",
"00170B": "Contela, Inc.",
"00170C": "Twig Com Ltd.",
"00170D": "Dust Networks Inc.",
"00170E": "CISCO SYSTEMS, INC.",
"00170F": "CISCO SYSTEMS, INC.",
"001710": "Casa Systems Inc.",
"001711": "GE Healthcare Bio-Sciences AB",
"001712": "ISCO International",
"001713": "Tiger NetCom",
"001714": "BR Controls Nederland bv",
"001715": "Qstik",
"001716": "Qno Technology Inc.",
"001717": "Leica Geosystems AG",
"001718": "Vansco Electronics Oy",
"001719": "AudioCodes USA, Inc",
"00171A": "Winegard Company",
"00171B": "Innovation Lab Corp.",
"00171C": "NT MicroSystems, Inc.",
"00171D": "DIGIT",
"00171E": "Theo Benning GmbH & Co. KG",
"00171F": "IMV Corporation",
"001720": "Image Sensing Systems, Inc.",
"001721": "FITRE S.p.A.",
"001722": "Hanazeder Electronic GmbH",
"001723": "Summit Data Communications",
"001724": "Studer Professional Audio GmbH",
"001725": "Liquid Computing",
"001726": "m2c Electronic Technology Ltd.",
"001727": "Thermo Ramsey Italia s.r.l.",
"001728": "Selex Communications",
"001729": "Ubicod Co.LTD",
"00172A": "Proware Technology Corp.(By Unifosa)",
"00172B": "Global Technologies Inc.",
"00172C": "TAEJIN INFOTECH",
"00172D": "Axcen Photonics Corporation",
"00172E": "FXC Inc.",
"00172F": "NeuLion Incorporated",
"001730": "Automation Electronics",
"001731": "ASUSTek COMPUTER INC.",
"001732": "Science-Technical Center \"RISSA\"",
"001733": "SFR",
"001734": "ADC Telecommunications",
"001735": "PRIVATE",
"001736": "iiTron Inc.",
"001737": "Industrie Dial Face S.p.A.",
"001738": "International Business Machines",
"001739": "Bright Headphone Electronics Company",
"00173A": "Reach Systems Inc.",
"00173B": "Cisco Systems, Inc.",
"00173C": "Extreme Engineering Solutions",
"00173D": "Neology",
"00173E": "LeucotronEquipamentos Ltda.",
"00173F": "Belkin Corporation",
"001740": "Bluberi Gaming Technologies Inc",
"001741": "DEFIDEV",
"001742": "FUJITSU LIMITED",
"001743": "Deck Srl",
"001744": "Araneo Ltd.",
"001745": "INNOTZ CO., Ltd",
"001746": "Freedom9 Inc.",
"001747": "Trimble",
"001748": "Neokoros Brasil Ltda",
"001749": "HYUNDAE YONG-O-SA CO.,LTD",
"00174A": "SOCOMEC",
"00174B": "Nokia Danmark A/S",
"00174C": "Millipore",
"00174D": "DYNAMIC NETWORK FACTORY, INC.",
"00174E": "Parama-tech Co.,Ltd.",
"00174F": "iCatch Inc.",
"001750": "GSI Group, MicroE Systems",
"001751": "Online Corporation",
"001752": "DAGS, Inc",
"001753": "nFore Technology Inc.",
"001754": "Arkino HiTOP Corporation Limited",
"001755": "GE Security",
"001756": "Vinci Labs Oy",
"001757": "RIX TECHNOLOGY LIMITED",
"001758": "ThruVision Ltd",
"001759": "CISCO SYSTEMS, INC.",
"00175A": "CISCO SYSTEMS, INC.",
"00175B": "ACS Solutions Switzerland Ltd.",
"00175C": "SHARP CORPORATION",
"00175D": "Dongseo system.",
"00175E": "Zed-3",
"00175F": "XENOLINK Communications Co., Ltd.",
"001760": "Naito Densei Machida MFG.CO.,LTD",
"001761": "PRIVATE",
"001762": "Solar Technology, Inc.",
"001763": "Essentia S.p.A.",
"001764": "ATMedia GmbH",
"001765": "Nortel",
"001766": "Accense Technology, Inc.",
"001767": "Earforce AS",
"001768": "Zinwave Ltd",
"001769": "Cymphonix Corp",
"00176A": "Avago Technologies",
"00176B": "Kiyon, Inc.",
"00176C": "Pivot3, Inc.",
"00176D": "CORE CORPORATION",
"00176E": "DUCATI SISTEMI",
"00176F": "PAX Computer Technology(Shenzhen) Ltd.",
"001770": "Arti Industrial Electronics Ltd.",
"001771": "APD Communications Ltd",
"001772": "ASTRO Strobel Kommunikationssysteme GmbH",
"001773": "Laketune Technologies Co. Ltd",
"001774": "Elesta GmbH",
"001775": "TTE Germany GmbH",
"001776": "Meso Scale Diagnostics, LLC",
"001777": "Obsidian Research Corporation",
"001778": "Central Music Co.",
"001779": "QuickTel",
"00177A": "ASSA ABLOY AB",
"00177B": "Azalea Networks inc",
"00177C": "Smartlink Network Systems Limited",
"00177D": "IDT International Limited",
"00177E": "Meshcom Technologies Inc.",
"00177F": "Worldsmart Retech",
"001780": "Applied Biosystems B.V.",
"001781": "Greystone Data System, Inc.",
"001782": "LoBenn Inc.",
"001783": "Texas Instruments",
"001784": "ARRIS Group, Inc.",
"001785": "Sparr Electronics Ltd",
"001786": "wisembed",
"001787": "Brother, Brother & Sons ApS",
"001788": "Philips Lighting BV",
"001789": "Zenitron Corporation",
"00178A": "DARTS TECHNOLOGIES CORP.",
"00178B": "Teledyne Technologies Incorporated",
"00178C": "Independent Witness, Inc",
"00178D": "Checkpoint Systems, Inc.",
"00178E": "Gunnebo Cash Automation AB",
"00178F": "NINGBO YIDONG ELECTRONIC CO.,LTD.",
"001790": "HYUNDAI DIGITECH Co, Ltd.",
"001791": "LinTech GmbH",
"001792": "Falcom Wireless Comunications Gmbh",
"001793": "Tigi Corporation",
"001794": "CISCO SYSTEMS, INC.",
"001795": "CISCO SYSTEMS, INC.",
"001796": "Rittmeyer AG",
"001797": "Telsy Elettronica S.p.A.",
"001798": "Azonic Technology Co., LTD",
"001799": "SmarTire Systems Inc.",
"00179A": "D-Link Corporation",
"00179B": "Chant Sincere CO., LTD.",
"00179C": "DEPRAG SCHULZ GMBH u. CO.",
"00179D": "Kelman Limited",
"00179E": "Sirit Inc",
"00179F": "Apricorn",
"0017A0": "RoboTech srl",
"0017A1": "3soft inc.",
"0017A2": "Camrivox Ltd.",
"0017A3": "MIX s.r.l.",
"0017A4": "Hewlett-Packard Company",
"0017A5": "Ralink Technology Corp",
"0017A6": "YOSIN ELECTRONICS CO., LTD.",
"0017A7": "Mobile Computing Promotion Consortium",
"0017A8": "EDM Corporation",
"0017A9": "Sentivision",
"0017AA": "elab-experience inc.",
"0017AB": "Nintendo Co., Ltd.",
"0017AC": "O'Neil Product Development Inc.",
"0017AD": "AceNet Corporation",
"0017AE": "GAI-Tronics",
"0017AF": "Enermet",
"0017B0": "Nokia Danmark A/S",
"0017B1": "ACIST Medical Systems, Inc.",
"0017B2": "SK Telesys",
"0017B3": "Aftek Infosys Limited",
"0017B4": "Remote Security Systems, LLC",
"0017B5": "Peerless Systems Corporation",
"0017B6": "Aquantia",
"0017B7": "Tonze Technology Co.",
"0017B8": "NOVATRON CO., LTD.",
"0017B9": "Gambro Lundia AB",
"0017BA": "SEDO CO., LTD.",
"0017BB": "Syrinx Industrial Electronics",
"0017BC": "Touchtunes Music Corporation",
"0017BD": "Tibetsystem",
"0017BE": "Tratec Telecom B.V.",
"0017BF": "Coherent Research Limited",
"0017C0": "PureTech Systems, Inc.",
"0017C1": "CM Precision Technology LTD.",
"0017C2": "ADB Broadband Italia",
"0017C3": "KTF Technologies Inc.",
"0017C4": "Quanta Microsystems, INC.",
"0017C5": "SonicWALL",
"0017C6": "Cross Match Technologies Inc",
"0017C7": "MARA Systems Consulting AB",
"0017C8": "KYOCERA Document Solutions Inc.",
"0017C9": "Samsung Electronics Co., Ltd.",
"0017CA": "Qisda Corporation",
"0017CB": "Juniper Networks",
"0017CC": "Alcatel-Lucent",
"0017CD": "CEC Wireless R&D Ltd.",
"0017CE": "Screen Service Spa",
"0017CF": "iMCA-GmbH",
"0017D0": "Opticom Communications, LLC",
"0017D1": "Nortel",
"0017D2": "THINLINX PTY LTD",
"0017D3": "Etymotic Research, Inc.",
"0017D4": "Monsoon Multimedia, Inc",
"0017D5": "Samsung Electronics Co., Ltd.",
"0017D6": "Bluechips Microhouse Co.,Ltd.",
"0017D7": "ION Geophysical Corporation Inc.",
"0017D8": "Magnum Semiconductor, Inc.",
"0017D9": "AAI Corporation",
"0017DA": "Spans Logic",
"0017DB": "CANKO TECHNOLOGIES INC.",
"0017DC": "DAEMYUNG ZERO1",
"0017DD": "Clipsal Australia",
"0017DE": "Advantage Six Ltd",
"0017DF": "CISCO SYSTEMS, INC.",
"0017E0": "CISCO SYSTEMS, INC.",
"0017E1": "DACOS Technologies Co., Ltd.",
"0017E2": "ARRIS Group, Inc.",
"0017E3": "Texas Instruments",
"0017E4": "Texas Instruments",
"0017E5": "Texas Instruments",
"0017E6": "Texas Instruments",
"0017E7": "Texas Instruments",
"0017E8": "Texas Instruments",
"0017E9": "Texas Instruments",
"0017EA": "Texas Instruments",
"0017EB": "Texas Instruments",
"0017EC": "Texas Instruments",
"0017ED": "WooJooIT Ltd.",
"0017EE": "ARRIS Group, Inc.",
"0017EF": "IBM Corp",
"0017F0": "SZCOM Broadband Network Technology Co.,Ltd",
"0017F1": "Renu Electronics Pvt Ltd",
"0017F2": "Apple",
"0017F3": "Harris Corparation",
"0017F4": "ZERON ALLIANCE",
"0017F5": "LIG NEOPTEK",
"0017F6": "Pyramid Meriden Inc.",
"0017F7": "CEM Solutions Pvt Ltd",
"0017F8": "Motech Industries Inc.",
"0017F9": "Forcom Sp. z o.o.",
"0017FA": "Microsoft Corporation",
"0017FB": "FA",
"0017FC": "Suprema Inc.",
"0017FD": "Amulet Hotkey",
"0017FE": "TALOS SYSTEM INC.",
"0017FF": "PLAYLINE Co.,Ltd.",
"001800": "UNIGRAND LTD",
"001801": "Actiontec Electronics, Inc",
"001802": "Alpha Networks Inc.",
"001803": "ArcSoft Shanghai Co. LTD",
"001804": "E-TEK DIGITAL TECHNOLOGY LIMITED",
"001805": "Beijing InHand Networking Technology Co.,Ltd.",
"001806": "Hokkei Industries Co., Ltd.",
"001807": "Fanstel Corp.",
"001808": "SightLogix, Inc.",
"001809": "CRESYN",
"00180A": "Meraki, Inc.",
"00180B": "Brilliant Telecommunications",
"00180C": "Optelian Access Networks",
"00180D": "Terabytes Server Storage Tech Corp",
"00180E": "Avega Systems",
"00180F": "Nokia Danmark A/S",
"001810": "IPTrade S.A.",
"001811": "Neuros Technology International, LLC.",
"001812": "Beijing Xinwei Telecom Technology Co., Ltd.",
"001813": "Sony Ericsson Mobile Communications",
"001814": "Mitutoyo Corporation",
"001815": "GZ Technologies, Inc.",
"001816": "Ubixon Co., Ltd.",
"001817": "D. E. Shaw Research, LLC",
"001818": "CISCO SYSTEMS, INC.",
"001819": "CISCO SYSTEMS, INC.",
"00181A": "AVerMedia Information Inc.",
"00181B": "TaiJin Metal Co., Ltd.",
"00181C": "Exterity Limited",
"00181D": "ASIA ELECTRONICS CO.,LTD",
"00181E": "GDX Technologies Ltd.",
"00181F": "Palmmicro Communications",
"001820": "w5networks",
"001821": "SINDORICOH",
"001822": "CEC TELECOM CO.,LTD.",
"001823": "Delta Electronics, Inc.",
"001824": "Kimaldi Electronics, S.L.",
"001825": "PRIVATE",
"001826": "Cale Access AB",
"001827": "NEC UNIFIED SOLUTIONS NEDERLAND B.V.",
"001828": "e2v technologies (UK) ltd.",
"001829": "Gatsometer",
"00182A": "Taiwan Video & Monitor",
"00182B": "Softier",
"00182C": "Ascend Networks, Inc.",
"00182D": "Artec Design",
"00182E": "XStreamHD, LLC",
"00182F": "Texas Instruments",
"001830": "Texas Instruments",
"001831": "Texas Instruments",
"001832": "Texas Instruments",
"001833": "Texas Instruments",
"001834": "Texas Instruments",
"001835": "Thoratec / ITC",
"001836": "Reliance Electric Limited",
"001837": "Universal ABIT Co., Ltd.",
"001838": "PanAccess Communications,Inc.",
"001839": "Cisco-Linksys LLC",
"00183A": "Westell Technologies",
"00183B": "CENITS Co., Ltd.",
"00183C": "Encore Software Limited",
"00183D": "Vertex Link Corporation",
"00183E": "Digilent, Inc",
"00183F": "2Wire, Inc",
"001840": "3 Phoenix, Inc.",
"001841": "High Tech Computer Corp",
"001842": "Nokia Danmark A/S",
"001843": "Dawevision Ltd",
"001844": "Heads Up Technologies, Inc.",
"001845": "Pulsar-Telecom LLC.",
"001846": "Crypto S.A.",
"001847": "AceNet Technology Inc.",
"001848": "Vecima Networks Inc.",
"001849": "Pigeon Point Systems LLC",
"00184A": "Catcher, Inc.",
"00184B": "Las Vegas Gaming, Inc.",
"00184C": "Bogen Communications",
"00184D": "Netgear Inc.",
"00184E": "Lianhe Technologies, Inc.",
"00184F": "8 Ways Technology Corp.",
"001850": "Secfone Kft",
"001851": "SWsoft",
"001852": "StorLink Semiconductors, Inc.",
"001853": "Atera Networks LTD.",
"001854": "Argard Co., Ltd",
"001855": "Aeromaritime Systembau GmbH",
"001856": "EyeFi, Inc",
"001857": "Unilever R&D",
"001858": "TagMaster AB",
"001859": "Strawberry Linux Co.,Ltd.",
"00185A": "uControl, Inc.",
"00185B": "Network Chemistry, Inc",
"00185C": "EDS Lab Pte Ltd",
"00185D": "TAIGUEN TECHNOLOGY (SHEN-ZHEN) CO., LTD.",
"00185E": "Nexterm Inc.",
"00185F": "TAC Inc.",
"001860": "SIM Technology Group Shanghai Simcom Ltd.,",
"001861": "Ooma, Inc.",
"001862": "Seagate Technology",
"001863": "Veritech Electronics Limited",
"001864": "Eaton Corporation",
"001865": "Siemens Healthcare Diagnostics Manufacturing Ltd",
"001866": "Leutron Vision",
"001867": "Datalogic ADC",
"001868": "Scientific Atlanta, A Cisco Company",
"001869": "KINGJIM",
"00186A": "Global Link Digital Technology Co,.LTD",
"00186B": "Sambu Communics CO., LTD.",
"00186C": "Neonode AB",
"00186D": "Zhenjiang Sapphire Electronic Industry CO.",
"00186E": "3Com Ltd",
"00186F": "Setha Industria Eletronica LTDA",
"001870": "E28 Shanghai Limited",
"001871": "Hewlett-Packard Company",
"001872": "Expertise Engineering",
"001873": "CISCO SYSTEMS, INC.",
"001874": "CISCO SYSTEMS, INC.",
"001875": "AnaCise Testnology Pte Ltd",
"001876": "WowWee Ltd.",
"001877": "Amplex A/S",
"001878": "Mackware GmbH",
"001879": "dSys",
"00187A": "Wiremold",
"00187B": "4NSYS Co. Ltd.",
"00187C": "INTERCROSS, LLC",
"00187D": "Armorlink shanghai Co. Ltd",
"00187E": "RGB Spectrum",
"00187F": "ZODIANET",
"001880": "Maxim Integrated Products",
"001881": "Buyang Electronics Industrial Co., Ltd",
"001882": "Huawei Technologies Co., Ltd.",
"001883": "FORMOSA21 INC.",
"001884": "Fon Technology S.L.",
"001885": "Avigilon Corporation",
"001886": "EL-TECH, INC.",
"001887": "Metasystem SpA",
"001888": "GOTIVE a.s.",
"001889": "WinNet Solutions Limited",
"00188A": "Infinova LLC",
"00188B": "Dell Inc",
"00188C": "Mobile Action Technology Inc.",
"00188D": "Nokia Danmark A/S",
"00188E": "Ekahau, Inc.",
"00188F": "Montgomery Technology, Inc.",
"001890": "RadioCOM, s.r.o.",
"001891": "Zhongshan General K-mate Electronics Co., Ltd",
"001892": "ads-tec GmbH",
"001893": "SHENZHEN PHOTON BROADBAND TECHNOLOGY CO.,LTD",
"001894": "zimocom",
"001895": "Hansun Technologies Inc.",
"001896": "Great Well Electronic LTD",
"001897": "JESS-LINK PRODUCTS Co., LTD",
"001898": "KINGSTATE ELECTRONICS CORPORATION",
"001899": "ShenZhen jieshun Science&Technology Industry CO,LTD.",
"00189A": "HANA Micron Inc.",
"00189B": "Thomson Inc.",
"00189C": "Weldex Corporation",
"00189D": "Navcast Inc.",
"00189E": "OMNIKEY GmbH.",
"00189F": "Lenntek Corporation",
"0018A0": "Cierma Ascenseurs",
"0018A1": "Tiqit Computers, Inc.",
"0018A2": "XIP Technology AB",
"0018A3": "ZIPPY TECHNOLOGY CORP.",
"0018A4": "ARRIS Group, Inc.",
"0018A5": "ADigit Technologies Corp.",
"0018A6": "Persistent Systems, LLC",
"0018A7": "Yoggie Security Systems LTD.",
"0018A8": "AnNeal Technology Inc.",
"0018A9": "Ethernet Direct Corporation",
"0018AA": "Protec Fire Detection plc",
"0018AB": "BEIJING LHWT MICROELECTRONICS INC.",
"0018AC": "Shanghai Jiao Da HISYS Technology Co. Ltd.",
"0018AD": "NIDEC SANKYO CORPORATION",
"0018AE": "TVT CO.,LTD",
"0018AF": "Samsung Electronics Co., Ltd.",
"0018B0": "Nortel",
"0018B1": "IBM Corp",
"0018B2": "ADEUNIS RF",
"0018B3": "TEC WizHome Co., Ltd.",
"0018B4": "Dawon Media Inc.",
"0018B5": "Magna Carta",
"0018B6": "S3C, Inc.",
"0018B7": "D3 LED, LLC",
"0018B8": "New Voice International AG",
"0018B9": "CISCO SYSTEMS, INC.",
"0018BA": "CISCO SYSTEMS, INC.",
"0018BB": "Eliwell Controls srl",
"0018BC": "ZAO NVP Bolid",
"0018BD": "SHENZHEN DVBWORLD TECHNOLOGY CO., LTD.",
"0018BE": "ANSA Corporation",
"0018BF": "Essence Technology Solution, Inc.",
"0018C0": "ARRIS Group, Inc.",
"0018C1": "Almitec Inform\u00e1tica e Com\u00e9rcio",
"0018C2": "Firetide, Inc",
"0018C3": "CS Corporation",
"0018C4": "Raba Technologies LLC",
"0018C5": "Nokia Danmark A/S",
"0018C6": "OPW Fuel Management Systems",
"0018C7": "Real Time Automation",
"0018C8": "ISONAS Inc.",
"0018C9": "EOps Technology Limited",
"0018CA": "Viprinet GmbH",
"0018CB": "Tecobest Technology Limited",
"0018CC": "AXIOHM SAS",
"0018CD": "Erae Electronics Industry Co., Ltd",
"0018CE": "Dreamtech Co., Ltd",
"0018CF": "Baldor Electric Company",
"0018D0": "AtRoad, A Trimble Company",
"0018D1": "Siemens Home & Office Comm. Devices",
"0018D2": "High-Gain Antennas LLC",
"0018D3": "TEAMCAST",
"0018D4": "Unified Display Interface SIG",
"0018D5": "REIGNCOM",
"0018D6": "Swirlnet A/S",
"0018D7": "Javad Navigation Systems Inc.",
"0018D8": "ARCH METER Corporation",
"0018D9": "Santosha Internatonal, Inc",
"0018DA": "AMBER wireless GmbH",
"0018DB": "EPL Technology Ltd",
"0018DC": "Prostar Co., Ltd.",
"0018DD": "Silicondust Engineering Ltd",
"0018DE": "Intel Corporate",
"0018DF": "The Morey Corporation",
"0018E0": "ANAVEO",
"0018E1": "Verkerk Service Systemen",
"0018E2": "Topdata Sistemas de Automacao Ltda",
"0018E3": "Visualgate Systems, Inc.",
"0018E4": "YIGUANG",
"0018E5": "Adhoco AG",
"0018E6": "Computer Hardware Design SIA",
"0018E7": "Cameo Communications, INC.",
"0018E8": "Hacetron Corporation",
"0018E9": "Numata Corporation",
"0018EA": "Alltec GmbH",
"0018EB": "BroVis Wireless Networks",
"0018EC": "Welding Technology Corporation",
"0018ED": "Accutech Ultrasystems Co., Ltd.",
"0018EE": "Videology Imaging Solutions, Inc.",
"0018EF": "Escape Communications, Inc.",
"0018F0": "JOYTOTO Co., Ltd.",
"0018F1": "Chunichi Denshi Co.,LTD.",
"0018F2": "Beijing Tianyu Communication Equipment Co., Ltd",
"0018F3": "ASUSTek COMPUTER INC.",
"0018F4": "EO TECHNICS Co., Ltd.",
"0018F5": "Shenzhen Streaming Video Technology Company Limited",
"0018F6": "Thomson Telecom Belgium",
"0018F7": "Kameleon Technologies",
"0018F8": "Cisco-Linksys LLC",
"0018F9": "VVOND, Inc.",
"0018FA": "Yushin Precision Equipment Co.,Ltd.",
"0018FB": "Compro Technology",
"0018FC": "Altec Electronic AG",
"0018FD": "Optimal Technologies International Inc.",
"0018FE": "Hewlett-Packard Company",
"0018FF": "PowerQuattro Co.",
"001900": "Intelliverese - DBA Voicecom",
"001901": "F1MEDIA",
"001902": "Cambridge Consultants Ltd",
"001903": "Bigfoot Networks Inc",
"001904": "WB Electronics Sp. z o.o.",
"001905": "SCHRACK Seconet AG",
"001906": "CISCO SYSTEMS, INC.",
"001907": "CISCO SYSTEMS, INC.",
"001908": "Duaxes Corporation",
"001909": "DEVI - Danfoss A/S",
"00190A": "HASWARE INC.",
"00190B": "Southern Vision Systems, Inc.",
"00190C": "Encore Electronics, Inc.",
"00190D": "IEEE 1394c",
"00190E": "Atech Technology Co., Ltd.",
"00190F": "Advansus Corp.",
"001910": "Knick Elektronische Messgeraete GmbH & Co. KG",
"001911": "Just In Mobile Information Technologies (Shanghai) Co., Ltd.",
"001912": "Welcat Inc",
"001913": "Chuang-Yi Network Equipment Co.Ltd.",
"001914": "Winix Co., Ltd",
"001915": "TECOM Co., Ltd.",
"001916": "PayTec AG",
"001917": "Posiflex Inc.",
"001918": "Interactive Wear AG",
"001919": "ASTEL Inc.",
"00191A": "IRLINK",
"00191B": "Sputnik Engineering AG",
"00191C": "Sensicast Systems",
"00191D": "Nintendo Co., Ltd.",
"00191E": "Beyondwiz Co., Ltd.",
"00191F": "Microlink communications Inc.",
"001920": "KUME electric Co.,Ltd.",
"001921": "Elitegroup Computer System Co.",
"001922": "CM Comandos Lineares",
"001923": "Phonex Korea Co., LTD.",
"001924": "LBNL Engineering",
"001925": "Intelicis Corporation",
"001926": "BitsGen Co., Ltd.",
"001927": "ImCoSys Ltd",
"001928": "Siemens AG, Transportation Systems",
"001929": "2M2B Montadora de Maquinas Bahia Brasil LTDA",
"00192A": "Antiope Associates",
"00192B": "Aclara RF Systems Inc.",
"00192C": "ARRIS Group, Inc.",
"00192D": "Nokia Corporation",
"00192E": "Spectral Instruments, Inc.",
"00192F": "CISCO SYSTEMS, INC.",
"001930": "CISCO SYSTEMS, INC.",
"001931": "Balluff GmbH",
"001932": "Gude Analog- und Digialsysteme GmbH",
"001933": "Strix Systems, Inc.",
"001934": "TRENDON TOUCH TECHNOLOGY CORP.",
"001935": "DUERR DENTAL AG",
"001936": "STERLITE OPTICAL TECHNOLOGIES LIMITED",
"001937": "CommerceGuard AB",
"001938": "UMB Communications Co., Ltd.",
"001939": "Gigamips",
"00193A": "OESOLUTIONS",
"00193B": "Wilibox Deliberant Group LLC",
"00193C": "HighPoint Technologies Incorporated",
"00193D": "GMC Guardian Mobility Corp.",
"00193E": "ADB Broadband Italia",
"00193F": "RDI technology(Shenzhen) Co.,LTD",
"001940": "Rackable Systems",
"001941": "Pitney Bowes, Inc",
"001942": "ON SOFTWARE INTERNATIONAL LIMITED",
"001943": "Belden",
"001944": "Fossil Partners, L.P.",
"001945": "Ten-Tec Inc.",
"001946": "Cianet Industria e Comercio S/A",
"001947": "Scientific Atlanta, A Cisco Company",
"001948": "AireSpider Networks",
"001949": "TENTEL COMTECH CO., LTD.",
"00194A": "TESTO AG",
"00194B": "SAGEM COMMUNICATION",
"00194C": "Fujian Stelcom information & Technology CO.,Ltd",
"00194D": "Avago Technologies Sdn Bhd",
"00194E": "Ultra Electronics - TCS (Tactical Communication Systems)",
"00194F": "Nokia Danmark A/S",
"001950": "Harman Multimedia",
"001951": "NETCONS, s.r.o.",
"001952": "ACOGITO Co., Ltd",
"001953": "Chainleader Communications Corp.",
"001954": "Leaf Corporation.",
"001955": "CISCO SYSTEMS, INC.",
"001956": "CISCO SYSTEMS, INC.",
"001957": "Saafnet Canada Inc.",
"001958": "Bluetooth SIG, Inc.",
"001959": "Staccato Communications Inc.",
"00195A": "Jenaer Antriebstechnik GmbH",
"00195B": "D-Link Corporation",
"00195C": "Innotech Corporation",
"00195D": "ShenZhen XinHuaTong Opto Electronics Co.,Ltd",
"00195E": "ARRIS Group, Inc.",
"00195F": "Valemount Networks Corporation",
"001960": "DoCoMo Systems, Inc.",
"001961": "Blaupunkt Embedded Systems GmbH",
"001962": "Commerciant, LP",
"001963": "Sony Ericsson Mobile Communications AB",
"001964": "Doorking Inc.",
"001965": "YuHua TelTech (ShangHai) Co., Ltd.",
"001966": "Asiarock Technology Limited",
"001967": "TELDAT Sp.J.",
"001968": "Digital Video Networks(Shanghai) CO. LTD.",
"001969": "Nortel",
"00196A": "MikroM GmbH",
"00196B": "Danpex Corporation",
"00196C": "ETROVISION TECHNOLOGY",
"00196D": "Raybit Systems Korea, Inc",
"00196E": "Metacom (Pty) Ltd.",
"00196F": "SensoPart GmbH",
"001970": "Z-Com, Inc.",
"001971": "Guangzhou Unicomp Technology Co.,Ltd",
"001972": "Plexus (Xiamen) Co.,ltd",
"001973": "Zeugma Systems",
"001974": "AboCom Systems, Inc.",
"001975": "Beijing Huisen networks technology Inc",
"001976": "Xipher Technologies, LLC",
"001977": "Aerohive Networks, Inc.",
"001978": "Datum Systems, Inc.",
"001979": "Nokia Danmark A/S",
"00197A": "MAZeT GmbH",
"00197B": "Picotest Corp.",
"00197C": "Riedel Communications GmbH",
"00197D": "Hon Hai Precision Ind. Co., Ltd",
"00197E": "Hon Hai Precision Ind. Co., Ltd",
"00197F": "PLANTRONICS, INC.",
"001980": "Gridpoint Systems",
"001981": "Vivox Inc",
"001982": "SmarDTV",
"001983": "CCT R&D Limited",
"001984": "ESTIC Corporation",
"001985": "IT Watchdogs, Inc",
"001986": "Cheng Hongjian",
"001987": "Panasonic Mobile Communications Co., Ltd.",
"001988": "Wi2Wi, Inc",
"001989": "Sonitrol Corporation",
"00198A": "Northrop Grumman Systems Corp.",
"00198B": "Novera Optics Korea, Inc.",
"00198C": "iXSea",
"00198D": "Ocean Optics, Inc.",
"00198E": "Oticon A/S",
"00198F": "Alcatel Bell N.V.",
"001990": "ELM DATA Co., Ltd.",
"001991": "avinfo",
"001992": "ADTRAN INC.",
"001993": "Changshu Switchgear MFG. Co.,Ltd. (Former Changshu Switchgea",
"001994": "Jorjin Technologies Inc.",
"001995": "Jurong Hi-Tech (Suzhou)Co.ltd",
"001996": "TurboChef Technologies Inc.",
"001997": "Soft Device Sdn Bhd",
"001998": "SATO CORPORATION",
"001999": "Fujitsu Technology Solutions",
"00199A": "EDO-EVI",
"00199B": "Diversified Technical Systems, Inc.",
"00199C": "CTRING",
"00199D": "VIZIO, Inc.",
"00199E": "Nifty",
"00199F": "DKT A/S",
"0019A0": "NIHON DATA SYSTENS, INC.",
"0019A1": "LG INFORMATION & COMM.",
"0019A2": "ORDYN TECHNOLOGIES",
"0019A3": "asteel electronique atlantique",
"0019A4": "Austar Technology (hang zhou) Co.,Ltd",
"0019A5": "RadarFind Corporation",
"0019A6": "ARRIS Group, Inc.",
"0019A7": "ITU-T",
"0019A8": "WiQuest Communications",
"0019A9": "CISCO SYSTEMS, INC.",
"0019AA": "CISCO SYSTEMS, INC.",
"0019AB": "Raycom CO ., LTD",
"0019AC": "GSP SYSTEMS Inc.",
"0019AD": "BOBST SA",
"0019AE": "Hopling Technologies b.v.",
"0019AF": "Rigol Technologies, Inc.",
"0019B0": "HanYang System",
"0019B1": "Arrow7 Corporation",
"0019B2": "XYnetsoft Co.,Ltd",
"0019B3": "Stanford Research Systems",
"0019B4": "VideoCast Ltd.",
"0019B5": "Famar Fueguina S.A.",
"0019B6": "Euro Emme s.r.l.",
"0019B7": "Nokia Danmark A/S",
"0019B8": "Boundary Devices",
"0019B9": "Dell Inc.",
"0019BA": "Paradox Security Systems Ltd",
"0019BB": "Hewlett-Packard Company",
"0019BC": "ELECTRO CHANCE SRL",
"0019BD": "New Media Life",
"0019BE": "Altai Technologies Limited",
"0019BF": "Citiway technology Co.,ltd",
"0019C0": "ARRIS Group, Inc.",
"0019C1": "Alps Electric Co., Ltd",
"0019C2": "Equustek Solutions, Inc.",
"0019C3": "Qualitrol",
"0019C4": "Infocrypt Inc.",
"0019C5": "SONY Computer Entertainment inc,",
"0019C6": "ZTE Corporation",
"0019C7": "Cambridge Industries(Group) Co.,Ltd.",
"0019C8": "AnyDATA Corporation",
"0019C9": "S&C ELECTRIC COMPANY",
"0019CA": "Broadata Communications, Inc",
"0019CB": "ZyXEL Communications Corporation",
"0019CC": "RCG (HK) Ltd",
"0019CD": "Chengdu ethercom information technology Ltd.",
"0019CE": "Progressive Gaming International",
"0019CF": "SALICRU, S.A.",
"0019D0": "Cathexis",
"0019D1": "Intel Corporate",
"0019D2": "Intel Corporate",
"0019D3": "TRAK Microwave",
"0019D4": "ICX Technologies",
"0019D5": "IP Innovations, Inc.",
"0019D6": "LS Cable and System Ltd.",
"0019D7": "FORTUNETEK CO., LTD",
"0019D8": "MAXFOR",
"0019D9": "Zeutschel GmbH",
"0019DA": "Welltrans O&E Technology Co. , Ltd.",
"0019DB": "MICRO-STAR INTERNATIONAL CO., LTD.",
"0019DC": "ENENSYS Technologies",
"0019DD": "FEI-Zyfer, Inc.",
"0019DE": "MOBITEK",
"0019DF": "Thomson Inc.",
"0019E0": "TP-LINK Technologies Co., Ltd.",
"0019E1": "Nortel",
"0019E2": "Juniper Networks",
"0019E3": "Apple",
"0019E4": "2Wire, Inc",
"0019E5": "Lynx Studio Technology, Inc.",
"0019E6": "TOYO MEDIC CO.,LTD.",
"0019E7": "CISCO SYSTEMS, INC.",
"0019E8": "CISCO SYSTEMS, INC.",
"0019E9": "S-Information Technolgy, Co., Ltd.",
"0019EA": "TeraMage Technologies Co., Ltd.",
"0019EB": "Pyronix Ltd",
"0019EC": "Sagamore Systems, Inc.",
"0019ED": "Axesstel Inc.",
"0019EE": "CARLO GAVAZZI CONTROLS SPA-Controls Division",
"0019EF": "SHENZHEN LINNKING ELECTRONICS CO.,LTD",
"0019F0": "UNIONMAN TECHNOLOGY CO.,LTD",
"0019F1": "Star Communication Network Technology Co.,Ltd",
"0019F2": "Teradyne K.K.",
"0019F3": "Cetis, Inc",
"0019F4": "Convergens Oy Ltd",
"0019F5": "Imagination Technologies Ltd",
"0019F6": "Acconet (PTE) Ltd",
"0019F7": "Onset Computer Corporation",
"0019F8": "Embedded Systems Design, Inc.",
"0019F9": "TDK-Lambda",
"0019FA": "Cable Vision Electronics CO., LTD.",
"0019FB": "BSkyB Ltd",
"0019FC": "PT. Ufoakses Sukses Luarbiasa",
"0019FD": "Nintendo Co., Ltd.",
"0019FE": "SHENZHEN SEECOMM TECHNOLOGY CO.,LTD.",
"0019FF": "Finnzymes",
"001A00": "MATRIX INC.",
"001A01": "Smiths Medical",
"001A02": "SECURE CARE PRODUCTS, INC",
"001A03": "Angel Electronics Co., Ltd.",
"001A04": "Interay Solutions BV",
"001A05": "OPTIBASE LTD",
"001A06": "OpVista, Inc.",
"001A07": "Arecont Vision",
"001A08": "Simoco Ltd.",
"001A09": "Wayfarer Transit Systems Ltd",
"001A0A": "Adaptive Micro-Ware Inc.",
"001A0B": "BONA TECHNOLOGY INC.",
"001A0C": "Swe-Dish Satellite Systems AB",
"001A0D": "HandHeld entertainment, Inc.",
"001A0E": "Cheng Uei Precision Industry Co.,Ltd",
"001A0F": "Sistemas Avanzados de Control, S.A.",
"001A10": "LUCENT TRANS ELECTRONICS CO.,LTD",
"001A11": "Google Inc.",
"001A12": "Essilor",
"001A13": "Wanlida Group Co., LTD",
"001A14": "Xin Hua Control Engineering Co.,Ltd.",
"001A15": "gemalto e-Payment",
"001A16": "Nokia Danmark A/S",
"001A17": "Teak Technologies, Inc.",
"001A18": "Advanced Simulation Technology inc.",
"001A19": "Computer Engineering Limited",
"001A1A": "Gentex Corporation/Electro-Acoustic Products",
"001A1B": "ARRIS Group, Inc.",
"001A1C": "GT&T Engineering Pte Ltd",
"001A1D": "PChome Online Inc.",
"001A1E": "Aruba Networks",
"001A1F": "Coastal Environmental Systems",
"001A20": "CMOTECH Co. Ltd.",
"001A21": "Indac B.V.",
"001A22": "eQ-3 Entwicklung GmbH",
"001A23": "Ice Qube, Inc",
"001A24": "Galaxy Telecom Technologies Ltd",
"001A25": "DELTA DORE",
"001A26": "Deltanode Solutions AB",
"001A27": "Ubistar",
"001A28": "ASWT Co., LTD. Taiwan Branch H.K.",
"001A29": "Johnson Outdoors Marine Electronics, Inc",
"001A2A": "Arcadyan Technology Corporation",
"001A2B": "Ayecom Technology Co., Ltd.",
"001A2C": "SATEC Co.,LTD",
"001A2D": "The Navvo Group",
"001A2E": "Ziova Coporation",
"001A2F": "CISCO SYSTEMS, INC.",
"001A30": "CISCO SYSTEMS, INC.",
"001A31": "SCAN COIN Industries AB",
"001A32": "ACTIVA MULTIMEDIA",
"001A33": "ASI Communications, Inc.",
"001A34": "Konka Group Co., Ltd.",
"001A35": "BARTEC GmbH",
"001A36": "Aipermon GmbH & Co. KG",
"001A37": "Lear Corporation",
"001A38": "Sanmina-SCI",
"001A39": "Merten GmbH&CoKG",
"001A3A": "Dongahelecomm",
"001A3B": "Doah Elecom Inc.",
"001A3C": "Technowave Ltd.",
"001A3D": "Ajin Vision Co.,Ltd",
"001A3E": "Faster Technology LLC",
"001A3F": "intelbras",
"001A40": "A-FOUR TECH CO., LTD.",
"001A41": "INOCOVA Co.,Ltd",
"001A42": "Techcity Technology co., Ltd.",
"001A43": "Logical Link Communications",
"001A44": "JWTrading Co., Ltd",
"001A45": "GN Netcom as",
"001A46": "Digital Multimedia Technology Co., Ltd",
"001A47": "Agami Systems, Inc.",
"001A48": "Takacom Corporation",
"001A49": "Micro Vision Co.,LTD",
"001A4A": "Qumranet Inc.",
"001A4B": "Hewlett-Packard Company",
"001A4C": "Crossbow Technology, Inc",
"001A4D": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"001A4E": "NTI AG / LinMot",
"001A4F": "AVM GmbH",
"001A50": "PheeNet Technology Corp.",
"001A51": "Alfred Mann Foundation",
"001A52": "Meshlinx Wireless Inc.",
"001A53": "Zylaya",
"001A54": "Hip Shing Electronics Ltd.",
"001A55": "ACA-Digital Corporation",
"001A56": "ViewTel Co,. Ltd.",
"001A57": "Matrix Design Group, LLC",
"001A58": "CCV Deutschland GmbH - Celectronic eHealth Div.",
"001A59": "Ircona",
"001A5A": "Korea Electric Power Data Network (KDN) Co., Ltd",
"001A5B": "NetCare Service Co., Ltd.",
"001A5C": "Euchner GmbH+Co. KG",
"001A5D": "Mobinnova Corp.",
"001A5E": "Thincom Technology Co.,Ltd",
"001A5F": "KitWorks.fi Ltd.",
"001A60": "Wave Electronics Co.,Ltd.",
"001A61": "PacStar Corp.",
"001A62": "Data Robotics, Incorporated",
"001A63": "Elster Solutions, LLC,",
"001A64": "IBM Corp",
"001A65": "Seluxit",
"001A66": "ARRIS Group, Inc.",
"001A67": "Infinite QL Sdn Bhd",
"001A68": "Weltec Enterprise Co., Ltd.",
"001A69": "Wuhan Yangtze Optical Technology CO.,Ltd.",
"001A6A": "Tranzas, Inc.",
"001A6B": "Universal Global Scientific Industrial Co., Ltd.",
"001A6C": "CISCO SYSTEMS, INC.",
"001A6D": "CISCO SYSTEMS, INC.",
"001A6E": "Impro Technologies",
"001A6F": "MI.TEL s.r.l.",
"001A70": "Cisco-Linksys, LLC",
"001A71": "Diostech Co., Ltd.",
"001A72": "Mosart Semiconductor Corp.",
"001A73": "Gemtek Technology Co., Ltd.",
"001A74": "Procare International Co",
"001A75": "Sony Ericsson Mobile Communications",
"001A76": "SDT information Technology Co.,LTD.",
"001A77": "ARRIS Group, Inc.",
"001A78": "ubtos",
"001A79": "TELECOMUNICATION TECHNOLOGIES LTD.",
"001A7A": "Lismore Instruments Limited",
"001A7B": "Teleco, Inc.",
"001A7C": "Hirschmann Multimedia B.V.",
"001A7D": "cyber-blue(HK)Ltd",
"001A7E": "LN Srithai Comm Ltd.",
"001A7F": "GCI Science&Technology Co.,Ltd.",
"001A80": "Sony Corporation",
"001A81": "Zelax",
"001A82": "PROBA Building Automation Co.,LTD",
"001A83": "Pegasus Technologies Inc.",
"001A84": "V One Multimedia Pte Ltd",
"001A85": "NV Michel Van de Wiele",
"001A86": "AdvancedIO Systems Inc",
"001A87": "Canhold International Limited",
"001A88": "Venergy,Co,Ltd",
"001A89": "Nokia Danmark A/S",
"001A8A": "Samsung Electronics Co., Ltd.",
"001A8B": "CHUNIL ELECTRIC IND., CO.",
"001A8C": "Astaro AG",
"001A8D": "AVECS Bergen GmbH",
"001A8E": "3Way Networks Ltd",
"001A8F": "Nortel",
"001A90": "Tr\u00f3pico Sistemas e Telecomunica\u00e7\u00f5es da Amaz\u00f4nia LTDA.",
"001A91": "FusionDynamic Ltd.",
"001A92": "ASUSTek COMPUTER INC.",
"001A93": "ERCO Leuchten GmbH",
"001A94": "Votronic GmbH",
"001A95": "Hisense Mobile Communications Technoligy Co.,Ltd.",
"001A96": "ECLER S.A.",
"001A97": "fitivision technology Inc.",
"001A98": "Asotel Communication Limited Taiwan Branch",
"001A99": "Smarty (HZ) Information Electronics Co., Ltd",
"001A9A": "Skyworth Digital technology(shenzhen)co.ltd.",
"001A9B": "ADEC & Parter AG",
"001A9C": "RightHand Technologies, Inc.",
"001A9D": "Skipper Wireless, Inc.",
"001A9E": "ICON Digital International Limited",
"001A9F": "A-Link Ltd",
"001AA0": "Dell Inc",
"001AA1": "CISCO SYSTEMS, INC.",
"001AA2": "CISCO SYSTEMS, INC.",
"001AA3": "DELORME",
"001AA4": "Future University-Hakodate",
"001AA5": "BRN Phoenix",
"001AA6": "Telefunken Radio Communication Systems GmbH &CO.KG",
"001AA7": "Torian Wireless",
"001AA8": "Mamiya Digital Imaging Co., Ltd.",
"001AA9": "FUJIAN STAR-NET COMMUNICATION CO.,LTD",
"001AAA": "Analogic Corp.",
"001AAB": "eWings s.r.l.",
"001AAC": "Corelatus AB",
"001AAD": "ARRIS Group, Inc.",
"001AAE": "Savant Systems LLC",
"001AAF": "BLUSENS TECHNOLOGY",
"001AB0": "Signal Networks Pvt. Ltd.,",
"001AB1": "Asia Pacific Satellite Industries Co., Ltd.",
"001AB2": "Cyber Solutions Inc.",
"001AB3": "VISIONITE INC.",
"001AB4": "FFEI Ltd.",
"001AB5": "Home Network System",
"001AB6": "Texas Instruments",
"001AB7": "Ethos Networks LTD.",
"001AB8": "Anseri Corporation",
"001AB9": "PMC",
"001ABA": "Caton Overseas Limited",
"001ABB": "Fontal Technology Incorporation",
"001ABC": "U4EA Technologies Ltd",
"001ABD": "Impatica Inc.",
"001ABE": "COMPUTER HI-TECH INC.",
"001ABF": "TRUMPF Laser Marking Systems AG",
"001AC0": "JOYBIEN TECHNOLOGIES CO., LTD.",
"001AC1": "3Com Ltd",
"001AC2": "YEC Co.,Ltd.",
"001AC3": "Scientific-Atlanta, Inc",
"001AC4": "2Wire, Inc",
"001AC5": "BreakingPoint Systems, Inc.",
"001AC6": "Micro Control Designs",
"001AC7": "UNIPOINT",
"001AC8": "ISL (Instrumentation Scientifique de Laboratoire)",
"001AC9": "SUZUKEN CO.,LTD",
"001ACA": "Tilera Corporation",
"001ACB": "Autocom Products Ltd",
"001ACC": "Celestial Semiconductor, Ltd",
"001ACD": "Tidel Engineering LP",
"001ACE": "YUPITERU CORPORATION",
"001ACF": "C.T. ELETTRONICA",
"001AD0": "Albis Technologies AG",
"001AD1": "FARGO CO., LTD.",
"001AD2": "Eletronica Nitron Ltda",
"001AD3": "Vamp Ltd.",
"001AD4": "iPOX Technology Co., Ltd.",
"001AD5": "KMC CHAIN INDUSTRIAL CO., LTD.",
"001AD6": "JIAGNSU AETNA ELECTRIC CO.,LTD",
"001AD7": "Christie Digital Systems, Inc.",
"001AD8": "AlsterAero GmbH",
"001AD9": "International Broadband Electric Communications, Inc.",
"001ADA": "Biz-2-Me Inc.",
"001ADB": "ARRIS Group, Inc.",
"001ADC": "Nokia Danmark A/S",
"001ADD": "PePWave Ltd",
"001ADE": "ARRIS Group, Inc.",
"001ADF": "Interactivetv Pty Limited",
"001AE0": "Mythology Tech Express Inc.",
"001AE1": "EDGE ACCESS INC",
"001AE2": "CISCO SYSTEMS, INC.",
"001AE3": "CISCO SYSTEMS, INC.",
"001AE4": "Medicis Technologies Corporation",
"001AE5": "Mvox Technologies Inc.",
"001AE6": "Atlanta Advanced Communications Holdings Limited",
"001AE7": "Aztek Networks, Inc.",
"001AE8": "Unify GmbH and Co KG",
"001AE9": "Nintendo Co., Ltd.",
"001AEA": "Radio Terminal Systems Pty Ltd",
"001AEB": "Allied Telesis K.K.",
"001AEC": "Keumbee Electronics Co.,Ltd.",
"001AED": "INCOTEC GmbH",
"001AEE": "Shenztech Ltd",
"001AEF": "Loopcomm Technology, Inc.",
"001AF0": "Alcatel - IPD",
"001AF1": "Embedded Artists AB",
"001AF2": "Dynavisions Schweiz AG",
"001AF3": "Samyoung Electronics",
"001AF4": "Handreamnet",
"001AF5": "PENTAONE. CO., LTD.",
"001AF6": "Woven Systems, Inc.",
"001AF7": "dataschalt e+a GmbH",
"001AF8": "Copley Controls Corporation",
"001AF9": "AeroVIronment (AV Inc)",
"001AFA": "Welch Allyn, Inc.",
"001AFB": "Joby Inc.",
"001AFC": "ModusLink Corporation",
"001AFD": "EVOLIS",
"001AFE": "SOFACREAL",
"001AFF": "Wizyoung Tech.",
"001B00": "Neopost Technologies",
"001B01": "Applied Radio Technologies",
"001B02": "ED Co.Ltd",
"001B03": "Action Technology (SZ) Co., Ltd",
"001B04": "Affinity International S.p.a",
"001B05": "YMC AG",
"001B06": "Ateliers R. LAUMONIER",
"001B07": "Mendocino Software",
"001B08": "Danfoss Drives A/S",
"001B09": "Matrix Telecom Pvt. Ltd.",
"001B0A": "Intelligent Distributed Controls Ltd",
"001B0B": "Phidgets Inc.",
"001B0C": "CISCO SYSTEMS, INC.",
"001B0D": "CISCO SYSTEMS, INC.",
"001B0E": "InoTec GmbH Organisationssysteme",
"001B0F": "Petratec",
"001B10": "ShenZhen Kang Hui Technology Co.,ltd",
"001B11": "D-Link Corporation",
"001B12": "Apprion",
"001B13": "Icron Technologies Corporation",
"001B14": "Carex Lighting Equipment Factory",
"001B15": "Voxtel, Inc.",
"001B16": "Celtro Ltd.",
"001B17": "Palo Alto Networks",
"001B18": "Tsuken Electric Ind. Co.,Ltd",
"001B19": "IEEE I&M Society TC9",
"001B1A": "e-trees Japan, Inc.",
"001B1B": "Siemens AG,",
"001B1C": "Coherent",
"001B1D": "Phoenix International Co., Ltd",
"001B1E": "HART Communication Foundation",
"001B1F": "DELTA - Danish Electronics, Light & Acoustics",
"001B20": "TPine Technology",
"001B21": "Intel Corporate",
"001B22": "Palit Microsystems ( H.K.) Ltd.",
"001B23": "SimpleComTools",
"001B24": "Quanta Computer Inc.",
"001B25": "Nortel",
"001B26": "RON-Telecom ZAO",
"001B27": "Merlin CSI",
"001B28": "POLYGON, JSC",
"001B29": "Avantis.Co.,Ltd",
"001B2A": "CISCO SYSTEMS, INC.",
"001B2B": "CISCO SYSTEMS, INC.",
"001B2C": "ATRON electronic GmbH",
"001B2D": "Med-Eng Systems Inc.",
"001B2E": "Sinkyo Electron Inc",
"001B2F": "NETGEAR Inc.",
"001B30": "Solitech Inc.",
"001B31": "Neural Image. Co. Ltd.",
"001B32": "QLogic Corporation",
"001B33": "Nokia Danmark A/S",
"001B34": "Focus System Inc.",
"001B35": "ChongQing JINOU Science & Technology Development CO.,Ltd",
"001B36": "Tsubata Engineering Co.,Ltd. (Head Office)",
"001B37": "Computec Oy",
"001B38": "COMPAL INFORMATION (KUNSHAN) CO., LTD.",
"001B39": "Proxicast",
"001B3A": "SIMS Corp.",
"001B3B": "Yi-Qing CO., LTD",
"001B3C": "Software Technologies Group,Inc.",
"001B3D": "EuroTel Spa",
"001B3E": "Curtis, Inc.",
"001B3F": "ProCurve Networking by HP",
"001B40": "Network Automation mxc AB",
"001B41": "General Infinity Co.,Ltd.",
"001B42": "Wise & Blue",
"001B43": "Beijing DG Telecommunications equipment Co.,Ltd",
"001B44": "SanDisk Corporation",
"001B45": "ABB AS, Division Automation Products",
"001B46": "Blueone Technology Co.,Ltd",
"001B47": "Futarque A/S",
"001B48": "Shenzhen Lantech Electronics Co., Ltd.",
"001B49": "Roberts Radio limited",
"001B4A": "W&W Communications, Inc.",
"001B4B": "SANION Co., Ltd.",
"001B4C": "Signtech",
"001B4D": "Areca Technology Corporation",
"001B4E": "Navman New Zealand",
"001B4F": "Avaya Inc.",
"001B50": "Nizhny Novgorod Factory named after M.Frunze, FSUE (NZiF)",
"001B51": "Vector Technology Corp.",
"001B52": "ARRIS Group, Inc.",
"001B53": "CISCO SYSTEMS, INC.",
"001B54": "CISCO SYSTEMS, INC.",
"001B55": "Hurco Automation Ltd.",
"001B56": "Tehuti Networks Ltd.",
"001B57": "SEMINDIA SYSTEMS PRIVATE LIMITED",
"001B58": "ACE CAD Enterprise Co., Ltd.",
"001B59": "Sony Ericsson Mobile Communications AB",
"001B5A": "Apollo Imaging Technologies, Inc.",
"001B5B": "2Wire, Inc.",
"001B5C": "Azuretec Co., Ltd.",
"001B5D": "Vololink Pty Ltd",
"001B5E": "BPL Limited",
"001B5F": "Alien Technology",
"001B60": "NAVIGON AG",
"001B61": "Digital Acoustics, LLC",
"001B62": "JHT Optoelectronics Co.,Ltd.",
"001B63": "Apple",
"001B64": "IsaacLandKorea Co., Ltd,",
"001B65": "China Gridcom Co., Ltd",
"001B66": "Sennheiser electronic GmbH & Co. KG",
"001B67": "Cisco Systems Inc",
"001B68": "Modnnet Co., Ltd",
"001B69": "Equaline Corporation",
"001B6A": "Powerwave Technologies Sweden AB",
"001B6B": "Swyx Solutions AG",
"001B6C": "LookX Digital Media BV",
"001B6D": "Midtronics, Inc.",
"001B6E": "Anue Systems, Inc.",
"001B6F": "Teletrak Ltd",
"001B70": "IRI Ubiteq, INC.",
"001B71": "Telular Corp.",
"001B72": "Sicep s.p.a.",
"001B73": "DTL Broadcast Ltd",
"001B74": "MiraLink Corporation",
"001B75": "Hypermedia Systems",
"001B76": "Ripcode, Inc.",
"001B77": "Intel Corporate",
"001B78": "Hewlett-Packard Company",
"001B79": "FAIVELEY TRANSPORT",
"001B7A": "Nintendo Co., Ltd.",
"001B7B": "The Tintometer Ltd",
"001B7C": "A & R Cambridge",
"001B7D": "CXR Anderson Jacobson",
"001B7E": "Beckmann GmbH",
"001B7F": "TMN Technologies Telecomunicacoes Ltda",
"001B80": "LORD Corporation",
"001B81": "DATAQ Instruments, Inc.",
"001B82": "Taiwan Semiconductor Co., Ltd.",
"001B83": "Finsoft Ltd",
"001B84": "Scan Engineering Telecom",
"001B85": "MAN Diesel SE",
"001B86": "Bosch Access Systems GmbH",
"001B87": "Deepsound Tech. Co., Ltd",
"001B88": "Divinet Access Technologies Ltd",
"001B89": "EMZA Visual Sense Ltd.",
"001B8A": "2M Electronic A/S",
"001B8B": "NEC AccessTechnica, Ltd.",
"001B8C": "JMicron Technology Corp.",
"001B8D": "Electronic Computer Systems, Inc.",
"001B8E": "Hulu Sweden AB",
"001B8F": "CISCO SYSTEMS, INC.",
"001B90": "CISCO SYSTEMS, INC.",
"001B91": "EFKON AG",
"001B92": "l-acoustics",
"001B93": "JC Decaux SA DNT",
"001B94": "T.E.M.A. S.p.A.",
"001B95": "VIDEO SYSTEMS SRL",
"001B96": "General Sensing",
"001B97": "Violin Technologies",
"001B98": "Samsung Electronics Co., Ltd.",
"001B99": "KS System GmbH",
"001B9A": "Apollo Fire Detectors Ltd",
"001B9B": "Hose-McCann Communications",
"001B9C": "SATEL sp. z o.o.",
"001B9D": "Novus Security Sp. z o.o.",
"001B9E": "ASKEY COMPUTER CORP",
"001B9F": "Calyptech Pty Ltd",
"001BA0": "Awox",
"001BA1": "\u00c5mic AB",
"001BA2": "IDS Imaging Development Systems GmbH",
"001BA3": "Flexit Group GmbH",
"001BA4": "S.A.E Afikim",
"001BA5": "MyungMin Systems, Inc.",
"001BA6": "intotech inc.",
"001BA7": "Lorica Solutions",
"001BA8": "UBI&MOBI,.Inc",
"001BA9": "BROTHER INDUSTRIES, LTD.",
"001BAA": "XenICs nv",
"001BAB": "Telchemy, Incorporated",
"001BAC": "Curtiss Wright Controls Embedded Computing",
"001BAD": "iControl Incorporated",
"001BAE": "Micro Control Systems, Inc",
"001BAF": "Nokia Danmark A/S",
"001BB0": "BHARAT ELECTRONICS",
"001BB1": "Wistron Neweb Corp.",
"001BB2": "Intellect International NV",
"001BB3": "Condalo GmbH",
"001BB4": "Airvod Limited",
"001BB5": "ZF Electronics GmbH",
"001BB6": "Bird Electronic Corp.",
"001BB7": "Alta Heights Technology Corp.",
"001BB8": "BLUEWAY ELECTRONIC CO;LTD",
"001BB9": "Elitegroup Computer System Co.",
"001BBA": "Nortel",
"001BBB": "RFTech Co.,Ltd",
"001BBC": "Silver Peak Systems, Inc.",
"001BBD": "FMC Kongsberg Subsea AS",
"001BBE": "ICOP Digital",
"001BBF": "SAGEM COMMUNICATION",
"001BC0": "Juniper Networks",
"001BC1": "HOLUX Technology, Inc.",
"001BC2": "Integrated Control Technology Limitied",
"001BC3": "Mobisolution Co.,Ltd",
"001BC4": "Ultratec, Inc.",
"001BC5": "IEEE Registration Authority",
"001BC6": "Strato Rechenzentrum AG",
"001BC7": "StarVedia Technology Inc.",
"001BC8": "MIURA CO.,LTD",
"001BC9": "FSN DISPLAY INC",
"001BCA": "Beijing Run Technology LTD. Company",
"001BCB": "PEMPEK SYSTEMS PTY LTD",
"001BCC": "KINGTEK CCTV ALLIANCE CO., LTD.",
"001BCD": "DAVISCOMMS (S) PTE LTD",
"001BCE": "Measurement Devices Ltd",
"001BCF": "Dataupia Corporation",
"001BD0": "IDENTEC SOLUTIONS",
"001BD1": "SOGESTMATIC",
"001BD2": "ULTRA-X ASIA PACIFIC Inc.",
"001BD3": "Panasonic Corp. AVC Company",
"001BD4": "CISCO SYSTEMS, INC.",
"001BD5": "CISCO SYSTEMS, INC.",
"001BD6": "Kelvin Hughes Ltd",
"001BD7": "Scientific Atlanta, A Cisco Company",
"001BD8": "DVTel LTD",
"001BD9": "Edgewater Computer Systems",
"001BDA": "UTStarcom Inc",
"001BDB": "Valeo VECS",
"001BDC": "Vencer Co., Ltd.",
"001BDD": "ARRIS Group, Inc.",
"001BDE": "Renkus-Heinz, Inc.",
"001BDF": "Iskra Sistemi d.d.",
"001BE0": "TELENOT ELECTRONIC GmbH",
"001BE1": "ViaLogy",
"001BE2": "AhnLab,Inc.",
"001BE3": "Health Hero Network, Inc.",
"001BE4": "TOWNET SRL",
"001BE5": "802automation Limited",
"001BE6": "VR AG",
"001BE7": "Postek Electronics Co., Ltd.",
"001BE8": "Ultratronik GmbH",
"001BE9": "Broadcom Corporation",
"001BEA": "Nintendo Co., Ltd.",
"001BEB": "DMP Electronics INC.",
"001BEC": "Netio Technologies Co., Ltd",
"001BED": "Brocade Communications Systems, Inc",
"001BEE": "Nokia Danmark A/S",
"001BEF": "Blossoms Digital Technology Co.,Ltd.",
"001BF0": "Value Platforms Limited",
"001BF1": "Nanjing SilverNet Software Co., Ltd.",
"001BF2": "KWORLD COMPUTER CO., LTD",
"001BF3": "TRANSRADIO SenderSysteme Berlin AG",
"001BF4": "KENWIN INDUSTRIAL(HK) LTD.",
"001BF5": "Tellink Sistemas de Telecomunicaci\u00f3n S.L.",
"001BF6": "CONWISE Technology Corporation Ltd.",
"001BF7": "Lund IP Products AB",
"001BF8": "Digitrax Inc.",
"001BF9": "Intellitect Water Ltd",
"001BFA": "G.i.N. mbH",
"001BFB": "Alps Electric Co., Ltd",
"001BFC": "ASUSTek COMPUTER INC.",
"001BFD": "Dignsys Inc.",
"001BFE": "Zavio Inc.",
"001BFF": "Millennia Media inc.",
"001C00": "Entry Point, LLC",
"001C01": "ABB Oy Drives",
"001C02": "Pano Logic",
"001C03": "Betty TV Technology AG",
"001C04": "Airgain, Inc.",
"001C05": "Nonin Medical Inc.",
"001C06": "Siemens Numerical Control Ltd., Nanjing",
"001C07": "Cwlinux Limited",
"001C08": "Echo360, Inc.",
"001C09": "SAE Electronic Co.,Ltd.",
"001C0A": "Shenzhen AEE Technology Co.,Ltd.",
"001C0B": "SmartAnt Telecom",
"001C0C": "TANITA Corporation",
"001C0D": "G-Technology, Inc.",
"001C0E": "CISCO SYSTEMS, INC.",
"001C0F": "CISCO SYSTEMS, INC.",
"001C10": "Cisco-Linksys, LLC",
"001C11": "ARRIS Group, Inc.",
"001C12": "ARRIS Group, Inc.",
"001C13": "OPTSYS TECHNOLOGY CO., LTD.",
"001C14": "VMware, Inc",
"001C15": "TXP Corporation",
"001C16": "ThyssenKrupp Elevator",
"001C17": "Nortel",
"001C18": "Sicert S.r.L.",
"001C19": "secunet Security Networks AG",
"001C1A": "Thomas Instrumentation, Inc",
"001C1B": "Hyperstone GmbH",
"001C1C": "Center Communication Systems GmbH",
"001C1D": "CHENZHOU GOSPELL DIGITAL TECHNOLOGY CO.,LTD",
"001C1E": "emtrion GmbH",
"001C1F": "Quest Retail Technology Pty Ltd",
"001C20": "CLB Benelux",
"001C21": "Nucsafe Inc.",
"001C22": "Aeris Elettronica s.r.l.",
"001C23": "Dell Inc",
"001C24": "Formosa Wireless Systems Corp.",
"001C25": "Hon Hai Precision Ind. Co.,Ltd.",
"001C26": "Hon Hai Precision Ind. Co.,Ltd.",
"001C27": "Sunell Electronics Co.",
"001C28": "Sphairon Technologies GmbH",
"001C29": "CORE DIGITAL ELECTRONICS CO., LTD",
"001C2A": "Envisacor Technologies Inc.",
"001C2B": "Alertme.com Limited",
"001C2C": "Synapse",
"001C2D": "FlexRadio Systems",
"001C2E": "HPN Supply Chain",
"001C2F": "Pfister GmbH",
"001C30": "Mode Lighting (UK ) Ltd.",
"001C31": "Mobile XP Technology Co., LTD",
"001C32": "Telian Corporation",
"001C33": "Sutron",
"001C34": "HUEY CHIAO INTERNATIONAL CO., LTD.",
"001C35": "Nokia Danmark A/S",
"001C36": "iNEWiT NV",
"001C37": "Callpod, Inc.",
"001C38": "Bio-Rad Laboratories, Inc.",
"001C39": "S Netsystems Inc.",
"001C3A": "Element Labs, Inc.",
"001C3B": "AmRoad Technology Inc.",
"001C3C": "Seon Design Inc.",
"001C3D": "WaveStorm",
"001C3E": "ECKey Corporation",
"001C3F": "International Police Technologies, Inc.",
"001C40": "VDG-Security bv",
"001C41": "scemtec Transponder Technology GmbH",
"001C42": "Parallels, Inc.",
"001C43": "Samsung Electronics Co.,Ltd",
"001C44": "Bosch Security Systems BV",
"001C45": "Chenbro Micom Co., Ltd.",
"001C46": "QTUM",
"001C47": "Hangzhou Hollysys Automation Co., Ltd",
"001C48": "WiDeFi, Inc.",
"001C49": "Zoltan Technology Inc.",
"001C4A": "AVM GmbH",
"001C4B": "Gener8, Inc.",
"001C4C": "Petrotest Instruments",
"001C4D": "Aplix IP Holdings Corporation",
"001C4E": "TASA International Limited",
"001C4F": "MACAB AB",
"001C50": "TCL Technoly Electronics(Huizhou)Co.,Ltd",
"001C51": "Celeno Communications",
"001C52": "VISIONEE SRL",
"001C53": "Synergy Lighting Controls",
"001C54": "Hillstone Networks Inc",
"001C55": "Shenzhen Kaifa Technology Co.",
"001C56": "Pado Systems, Inc.",
"001C57": "CISCO SYSTEMS, INC.",
"001C58": "CISCO SYSTEMS, INC.",
"001C59": "DEVON IT",
"001C5A": "Advanced Relay Corporation",
"001C5B": "Chubb Electronic Security Systems Ltd",
"001C5C": "Integrated Medical Systems, Inc.",
"001C5D": "Leica Microsystems",
"001C5E": "ASTON France",
"001C5F": "Winland Electronics, Inc.",
"001C60": "CSP Frontier Technologies,Inc.",
"001C61": "Galaxy Microsystems LImited",
"001C62": "LG Electronics Inc",
"001C63": "TRUEN",
"001C64": "Landis+Gyr",
"001C65": "JoeScan, Inc.",
"001C66": "UCAMP CO.,LTD",
"001C67": "Pumpkin Networks, Inc.",
"001C68": "Anhui Sun Create Electronics Co., Ltd",
"001C69": "Packet Vision Ltd",
"001C6A": "Weiss Engineering Ltd.",
"001C6B": "COVAX Co. Ltd",
"001C6C": "Jabil Circuit (Guangzhou) Limited",
"001C6D": "KYOHRITSU ELECTRONIC INDUSTRY CO., LTD.",
"001C6E": "Newbury Networks, Inc.",
"001C6F": "Emfit Ltd",
"001C70": "NOVACOMM LTDA",
"001C71": "Emergent Electronics",
"001C72": "Mayer & Cie GmbH & Co KG",
"001C73": "Arista Networks, Inc.",
"001C74": "Syswan Technologies Inc.",
"001C75": "Segnet Ltd.",
"001C76": "The Wandsworth Group Ltd",
"001C77": "Prodys",
"001C78": "WYPLAY SAS",
"001C79": "Cohesive Financial Technologies LLC",
"001C7A": "Perfectone Netware Company Ltd",
"001C7B": "Castlenet Technology Inc.",
"001C7C": "PERQ SYSTEMS CORPORATION",
"001C7D": "Excelpoint Manufacturing Pte Ltd",
"001C7E": "Toshiba",
"001C7F": "Check Point Software Technologies",
"001C80": "New Business Division/Rhea-Information CO., LTD.",
"001C81": "NextGen Venturi LTD",
"001C82": "Genew Technologies",
"001C83": "New Level Telecom Co., Ltd.",
"001C84": "STL Solution Co.,Ltd.",
"001C85": "Eunicorn",
"001C86": "Cranite Systems, Inc.",
"001C87": "Uriver Inc.",
"001C88": "TRANSYSTEM INC.",
"001C89": "Force Communications, Inc.",
"001C8A": "Cirrascale Corporation",
"001C8B": "MJ Innovations Ltd.",
"001C8C": "DIAL TECHNOLOGY LTD.",
"001C8D": "Mesa Imaging",
"001C8E": "Alcatel-Lucent IPD",
"001C8F": "Advanced Electronic Design, Inc.",
"001C90": "Empacket Corporation",
"001C91": "Gefen Inc.",
"001C92": "Tervela",
"001C93": "ExaDigm Inc",
"001C94": "LI-COR Biosciences",
"001C95": "Opticomm Corporation",
"001C96": "Linkwise Technology Pte Ltd",
"001C97": "Enzytek Technology Inc.,",
"001C98": "LUCKY TECHNOLOGY (HK) COMPANY LIMITED",
"001C99": "Shunra Software Ltd.",
"001C9A": "Nokia Danmark A/S",
"001C9B": "FEIG ELECTRONIC GmbH",
"001C9C": "Nortel",
"001C9D": "Liecthi AG",
"001C9E": "Dualtech IT AB",
"001C9F": "Razorstream, LLC",
"001CA0": "Production Resource Group, LLC",
"001CA1": "AKAMAI TECHNOLOGIES, INC.",
"001CA2": "ADB Broadband Italia",
"001CA3": "Terra",
"001CA4": "Sony Ericsson Mobile Communications",
"001CA5": "Zygo Corporation",
"001CA6": "Win4NET",
"001CA7": "International Quartz Limited",
"001CA8": "AirTies Wireless Networks",
"001CA9": "Audiomatica Srl",
"001CAA": "Bellon Pty Ltd",
"001CAB": "Meyer Sound Laboratories, Inc.",
"001CAC": "Qniq Technology Corp.",
"001CAD": "Wuhan Telecommunication Devices Co.,Ltd",
"001CAE": "WiChorus, Inc.",
"001CAF": "Plato Networks Inc.",
"001CB0": "CISCO SYSTEMS, INC.",
"001CB1": "CISCO SYSTEMS, INC.",
"001CB2": "BPT SPA",
"001CB3": "Apple",
"001CB4": "Iridium Satellite LLC",
"001CB5": "Neihua Network Technology Co.,LTD.(NHN)",
"001CB6": "Duzon CNT Co., Ltd.",
"001CB7": "USC DigiArk Corporation",
"001CB8": "CBC Co., Ltd",
"001CB9": "KWANG SUNG ELECTRONICS CO., LTD.",
"001CBA": "VerScient, Inc.",
"001CBB": "MusicianLink",
"001CBC": "CastGrabber, LLC",
"001CBD": "Ezze Mobile Tech., Inc.",
"001CBE": "Nintendo Co., Ltd.",
"001CBF": "Intel Corporate",
"001CC0": "Intel Corporate",
"001CC1": "ARRIS Group, Inc.",
"001CC2": "Part II Research, Inc.",
"001CC3": "Pace plc",
"001CC4": "Hewlett-Packard Company",
"001CC5": "3COM LTD",
"001CC6": "ProStor Systems",
"001CC7": "Rembrandt Technologies, LLC d/b/a REMSTREAM",
"001CC8": "INDUSTRONIC Industrie-Electronic GmbH & Co. KG",
"001CC9": "Kaise Electronic Technology Co., Ltd.",
"001CCA": "Shanghai Gaozhi Science & Technology Development Co.",
"001CCB": "Forth Corporation Public Company Limited",
"001CCC": "Research In Motion Limited",
"001CCD": "Alektrona Corporation",
"001CCE": "By Techdesign",
"001CCF": "LIMETEK",
"001CD0": "Circleone Co.,Ltd.",
"001CD1": "Waves Audio LTD",
"001CD2": "King Champion (Hong Kong) Limited",
"001CD3": "ZP Engineering SEL",
"001CD4": "Nokia Danmark A/S",
"001CD5": "ZeeVee, Inc.",
"001CD6": "Nokia Danmark A/S",
"001CD7": "Harman/Becker Automotive Systems GmbH",
"001CD8": "BlueAnt Wireless",
"001CD9": "GlobalTop Technology Inc.",
"001CDA": "Exegin Technologies Limited",
"001CDB": "CARPOINT CO.,LTD",
"001CDC": "Custom Computer Services, Inc.",
"001CDD": "COWBELL ENGINEERING CO., LTD.",
"001CDE": "Interactive Multimedia eXchange Inc.",
"001CDF": "Belkin International Inc.",
"001CE0": "DASAN TPS",
"001CE1": "INDRA SISTEMAS, S.A.",
"001CE2": "Attero Tech, LLC.",
"001CE3": "Optimedical Systems",
"001CE4": "EleSy JSC",
"001CE5": "MBS Electronic Systems GmbH",
"001CE6": "INNES",
"001CE7": "Rocon PLC Research Centre",
"001CE8": "Cummins Inc",
"001CE9": "Galaxy Technology Limited",
"001CEA": "Scientific-Atlanta, Inc",
"001CEB": "Nortel",
"001CEC": "Mobilesoft (Aust.) Pty Ltd",
"001CED": "ENVIRONNEMENT SA",
"001CEE": "SHARP Corporation",
"001CEF": "Primax Electronics LTD",
"001CF0": "D-Link Corporation",
"001CF1": "SUPoX Technology Co. , LTD.",
"001CF2": "Tenlon Technology Co.,Ltd.",
"001CF3": "EVS BROADCAST EQUIPMENT",
"001CF4": "Media Technology Systems Inc",
"001CF5": "Wiseblue Technology Limited",
"001CF6": "CISCO SYSTEMS, INC.",
"001CF7": "AudioScience",
"001CF8": "Parade Technologies, Ltd.",
"001CF9": "CISCO SYSTEMS, INC.",
"001CFA": "Alarm.com",
"001CFB": "ARRIS Group, Inc.",
"001CFC": "Suminet Communication Technologies (Shanghai) Co., Ltd.",
"001CFD": "Universal Electronics",
"001CFE": "Quartics Inc",
"001CFF": "Napera Networks Inc",
"001D00": "Brivo Systems, LLC",
"001D01": "Neptune Digital",
"001D02": "Cybertech Telecom Development",
"001D03": "Design Solutions Inc.",
"001D04": "Zipit Wireless, Inc.",
"001D05": "iLight",
"001D06": "HM Electronics, Inc.",
"001D07": "Shenzhen Sang Fei Consumer Communications Co.,Ltd",
"001D08": "JIANGSU YINHE ELECTRONICS CO., LTD",
"001D09": "Dell Inc",
"001D0A": "Davis Instruments, Inc.",
"001D0B": "Power Standards Lab",
"001D0C": "MobileCompia",
"001D0D": "Sony Computer Entertainment inc.",
"001D0E": "Agapha Technology co., Ltd.",
"001D0F": "TP-LINK Technologies Co., Ltd.",
"001D10": "LightHaus Logic, Inc.",
"001D11": "Analogue & Micro Ltd",
"001D12": "ROHM CO., LTD.",
"001D13": "NextGTV",
"001D14": "SPERADTONE INFORMATION TECHNOLOGY LIMITED",
"001D15": "Shenzhen Dolphin Electronic Co., Ltd",
"001D16": "SFR",
"001D17": "Digital Sky Corporation",
"001D18": "Power Innovation GmbH",
"001D19": "Arcadyan Technology Corporation",
"001D1A": "OvisLink S.A.",
"001D1B": "Sangean Electronics Inc.",
"001D1C": "Gennet s.a.",
"001D1D": "Inter-M Corporation",
"001D1E": "KYUSHU TEN CO.,LTD",
"001D1F": "Siauliu Tauro Televizoriai, JSC",
"001D20": "COMTREND CO.",
"001D21": "Alcad SL",
"001D22": "Foss Analytical A/S",
"001D23": "SENSUS",
"001D24": "Aclara Power-Line Systems Inc.",
"001D25": "Samsung Electronics Co.,Ltd",
"001D26": "Rockridgesound Technology Co.",
"001D27": "NAC-INTERCOM",
"001D28": "Sony Ericsson Mobile Communications AB",
"001D29": "Doro AB",
"001D2A": "SHENZHEN BUL-TECH CO.,LTD.",
"001D2B": "Wuhan Pont Technology CO. , LTD",
"001D2C": "Wavetrend Technologies (Pty) Limited",
"001D2D": "Pylone, Inc.",
"001D2E": "Ruckus Wireless",
"001D2F": "QuantumVision Corporation",
"001D30": "YX Wireless S.A.",
"001D31": "HIGHPRO INTERNATIONAL R&D CO,.LTD.",
"001D32": "Longkay Communication & Technology (Shanghai) Co. Ltd",
"001D33": "Maverick Systems Inc.",
"001D34": "SYRIS Technology Corp",
"001D35": "Viconics Electronics Inc.",
"001D36": "ELECTRONICS CORPORATION OF INDIA LIMITED",
"001D37": "Thales-Panda Transportation System",
"001D38": "Seagate Technology",
"001D39": "MOOHADIGITAL CO., LTD",
"001D3A": "mh acoustics LLC",
"001D3B": "Nokia Danmark A/S",
"001D3C": "Muscle Corporation",
"001D3D": "Avidyne Corporation",
"001D3E": "SAKA TECHNO SCIENCE CO.,LTD",
"001D3F": "Mitron Pty Ltd",
"001D40": " Intel \u2013 GE Care Innovations LLC",
"001D41": "Hardy Instruments",
"001D42": "Nortel",
"001D43": "Shenzhen G-link Digital Technology Co., Ltd.",
"001D44": "KROHNE",
"001D45": "CISCO SYSTEMS, INC.",
"001D46": "CISCO SYSTEMS, INC.",
"001D47": "Covote GmbH & Co KG",
"001D48": "Sensor-Technik Wiedemann GmbH",
"001D49": "Innovation Wireless Inc.",
"001D4A": "Carestream Health, Inc.",
"001D4B": "Grid Connect Inc.",
"001D4C": "Alcatel-Lucent",
"001D4D": "Adaptive Recognition Hungary, Inc",
"001D4E": "TCM Mobile LLC",
"001D4F": "Apple",
"001D50": "SPINETIX SA",
"001D51": "Babcock & Wilcox Power Generation Group, Inc",
"001D52": "Defzone B.V.",
"001D53": "S&O Electronics (Malaysia) Sdn. Bhd.",
"001D54": "Sunnic Technology & Merchandise INC.",
"001D55": "ZANTAZ, Inc",
"001D56": "Kramer Electronics Ltd.",
"001D57": "CAETEC Messtechnik",
"001D58": "CQ Inc",
"001D59": "Mitra Energy & Infrastructure",
"001D5A": "2Wire Inc.",
"001D5B": "Tecvan Inform\u00e1tica Ltda",
"001D5C": "Tom Communication Industrial Co.,Ltd.",
"001D5D": "Control Dynamics Pty. Ltd.",
"001D5E": "COMING MEDIA CORP.",
"001D5F": "OverSpeed SARL",
"001D60": "ASUSTek COMPUTER INC.",
"001D61": "BIJ Corporation",
"001D62": "InPhase Technologies",
"001D63": "Miele & Cie. KG",
"001D64": "Adam Communications Systems Int Ltd",
"001D65": "Microwave Radio Communications",
"001D66": "Hyundai Telecom",
"001D67": "AMEC",
"001D68": "Thomson Telecom Belgium",
"001D69": "Knorr-Bremse IT-Services GmbH",
"001D6A": "Alpha Networks Inc.",
"001D6B": "ARRIS Group, Inc.",
"001D6C": "ClariPhy Communications, Inc.",
"001D6D": "Confidant International LLC",
"001D6E": "Nokia Danmark A/S",
"001D6F": "Chainzone Technology Co., Ltd",
"001D70": "CISCO SYSTEMS, INC.",
"001D71": "CISCO SYSTEMS, INC.",
"001D72": "Wistron Corporation",
"001D73": "Buffalo Inc.",
"001D74": "Tianjin China-Silicon Microelectronics Co., Ltd.",
"001D75": "Radioscape PLC",
"001D76": "Eyeheight Ltd.",
"001D77": "NSGate",
"001D78": "Invengo Information Technology Co.,Ltd",
"001D79": "SIGNAMAX LLC",
"001D7A": "Wideband Semiconductor, Inc.",
"001D7B": "Ice Energy, Inc.",
"001D7C": "ABE Elettronica S.p.A.",
"001D7D": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"001D7E": "Cisco-Linksys, LLC",
"001D7F": "Tekron International Ltd",
"001D80": "Beijing Huahuan Eletronics Co.,Ltd",
"001D81": "GUANGZHOU GATEWAY ELECTRONICS CO., LTD",
"001D82": "GN A/S (GN Netcom A/S)",
"001D83": "Emitech Corporation",
"001D84": "Gateway, Inc.",
"001D85": "Call Direct Cellular Solutions",
"001D86": "Shinwa Industries(China) Ltd.",
"001D87": "VigTech Labs Sdn Bhd",
"001D88": "Clearwire",
"001D89": "VaultStor Corporation",
"001D8A": "TechTrex Inc",
"001D8B": "ADB Broadband Italia",
"001D8C": "La Crosse Technology LTD",
"001D8D": "Raytek GmbH",
"001D8E": "Alereon, Inc.",
"001D8F": "PureWave Networks",
"001D90": "EMCO Flow Systems",
"001D91": "Digitize, Inc",
"001D92": "MICRO-STAR INT'L CO.,LTD.",
"001D93": "Modacom",
"001D94": "Climax Technology Co., Ltd",
"001D95": "Flash, Inc.",
"001D96": "WatchGuard Video",
"001D97": "Alertus Technologies LLC",
"001D98": "Nokia Danmark A/S",
"001D99": "Cyan Optic, Inc.",
"001D9A": "GODEX INTERNATIONAL CO., LTD",
"001D9B": "Hokuyo Automatic Co., Ltd.",
"001D9C": "Rockwell Automation",
"001D9D": "ARTJOY INTERNATIONAL LIMITED",
"001D9E": "AXION TECHNOLOGIES",
"001D9F": "MATT R.P.Traczynscy Sp.J.",
"001DA0": "Heng Yu Electronic Manufacturing Company Limited",
"001DA1": "CISCO SYSTEMS, INC.",
"001DA2": "CISCO SYSTEMS, INC.",
"001DA3": "SabiOso",
"001DA4": "Hangzhou System Technology CO., LTD",
"001DA5": "WB Electronics",
"001DA6": "Media Numerics Limited",
"001DA7": "Seamless Internet",
"001DA8": "Takahata Electronics Co.,Ltd",
"001DA9": "Castles Technology, Co., LTD",
"001DAA": "DrayTek Corp.",
"001DAB": "SwissQual License AG",
"001DAC": "Gigamon Systems LLC",
"001DAD": "Sinotech Engineering Consultants, Inc. Geotechnical Enginee",
"001DAE": "CHANG TSENG TECHNOLOGY CO., LTD",
"001DAF": "Nortel",
"001DB0": "FuJian HengTong Information Technology Co.,Ltd",
"001DB1": "Crescendo Networks",
"001DB2": "HOKKAIDO ELECTRIC ENGINEERING CO.,LTD.",
"001DB3": "HPN Supply Chain",
"001DB4": "KUMHO ENG CO.,LTD",
"001DB5": "Juniper networks",
"001DB6": "BestComm Networks, Inc.",
"001DB7": "Tendril Networks, Inc.",
"001DB8": "Intoto Inc.",
"001DB9": "Wellspring Wireless",
"001DBA": "Sony Corporation",
"001DBB": "Dynamic System Electronics Corp.",
"001DBC": "Nintendo Co., Ltd.",
"001DBD": "Versamed Inc.",
"001DBE": "ARRIS Group, Inc.",
"001DBF": "Radiient Technologies, Inc.",
"001DC0": "Enphase Energy",
"001DC1": "Audinate Pty L",
"001DC2": "XORTEC OY",
"001DC3": "RIKOR TV, Ltd",
"001DC4": "AIOI Systems Co., Ltd.",
"001DC5": "Beijing Jiaxun Feihong Electricial Co., Ltd.",
"001DC6": "SNR Inc.",
"001DC7": "L-3 Communications Geneva Aerospace",
"001DC8": "Navionics Research Inc., dba SCADAmetrics",
"001DC9": "GainSpan Corp.",
"001DCA": "PAV Electronics Limited",
"001DCB": "Ex\u00e9ns Development Oy",
"001DCC": "Hetra Secure Solutions",
"001DCD": "ARRIS Group, Inc.",
"001DCE": "ARRIS Group, Inc.",
"001DCF": "ARRIS Group, Inc.",
"001DD0": "ARRIS Group, Inc.",
"001DD1": "ARRIS Group, Inc.",
"001DD2": "ARRIS Group, Inc.",
"001DD3": "ARRIS Group, Inc.",
"001DD4": "ARRIS Group, Inc.",
"001DD5": "ARRIS Group, Inc.",
"001DD6": "ARRIS Group, Inc.",
"001DD7": "Algolith",
"001DD8": "Microsoft Corporation",
"001DD9": "Hon Hai Precision Ind.Co.,Ltd.",
"001DDA": "Mikroelektronika spol. s r. o.",
"001DDB": "C-BEL Corporation",
"001DDC": "HangZhou DeChangLong Tech&Info Co.,Ltd",
"001DDD": "DAT H.K. LIMITED",
"001DDE": "Zhejiang Broadcast&Television Technology Co.,Ltd.",
"001DDF": "Sunitec Enterprise Co., Ltd.",
"001DE0": "Intel Corporate",
"001DE1": "Intel Corporate",
"001DE2": "Radionor Communications",
"001DE3": "Intuicom",
"001DE4": "Visioneered Image Systems",
"001DE5": "CISCO SYSTEMS, INC.",
"001DE6": "CISCO SYSTEMS, INC.",
"001DE7": "Marine Sonic Technology, Ltd.",
"001DE8": "Nikko Denki Tsushin Corporation(NDTC)",
"001DE9": "Nokia Danmark A/S",
"001DEA": "Commtest Instruments Ltd",
"001DEB": "DINEC International",
"001DEC": "Marusys",
"001DED": "Grid Net, Inc.",
"001DEE": "NEXTVISION SISTEMAS DIGITAIS DE TELEVIS\u00c3O LTDA.",
"001DEF": "TRIMM, INC.",
"001DF0": "Vidient Systems, Inc.",
"001DF1": "Intego Systems, Inc.",
"001DF2": "Netflix, Inc.",
"001DF3": "SBS Science & Technology Co., Ltd",
"001DF4": "Magellan Technology Pty Limited",
"001DF5": "Sunshine Co,LTD",
"001DF6": "Samsung Electronics Co.,Ltd",
"001DF7": "R. STAHL Schaltger\u00e4te GmbH",
"001DF8": "Webpro Vision Technology Corporation",
"001DF9": "Cybiotronics (Far East) Limited",
"001DFA": "Fujian LANDI Commercial Equipment Co.,Ltd",
"001DFB": "NETCLEUS Systems Corporation",
"001DFC": "KSIC",
"001DFD": "Nokia Danmark A/S",
"001DFE": "Palm, Inc",
"001DFF": "Network Critical Solutions Ltd",
"001E00": "Shantou Institute of Ultrasonic Instruments",
"001E01": "Renesas Technology Sales Co., Ltd.",
"001E02": "Sougou Keikaku Kougyou Co.,Ltd.",
"001E03": "LiComm Co., Ltd.",
"001E04": "Hanson Research Corporation",
"001E05": "Xseed Technologies & Computing",
"001E06": "WIBRAIN",
"001E07": "Winy Technology Co., Ltd.",
"001E08": "Centec Networks Inc",
"001E09": "ZEFATEK Co.,LTD",
"001E0A": "Syba Tech Limited",
"001E0B": "Hewlett-Packard Company",
"001E0C": "Sherwood Information Partners, Inc.",
"001E0D": "Micran Ltd.",
"001E0E": "MAXI VIEW HOLDINGS LIMITED",
"001E0F": "Briot International",
"001E10": "ShenZhen Huawei Communication Technologies Co.,Ltd.",
"001E11": "ELELUX INTERNATIONAL LTD",
"001E12": "Ecolab",
"001E13": "CISCO SYSTEMS, INC.",
"001E14": "CISCO SYSTEMS, INC.",
"001E15": "Beech Hill Electronics",
"001E16": "Keytronix",
"001E17": "STN BV",
"001E18": "Radio Activity srl",
"001E19": "GTRI",
"001E1A": "Best Source Taiwan Inc.",
"001E1B": "Digital Stream Technology, Inc.",
"001E1C": "SWS Australia Pty Limited",
"001E1D": "East Coast Datacom, Inc.",
"001E1E": "Honeywell Life Safety",
"001E1F": "Nortel",
"001E20": "Intertain Inc.",
"001E21": "Qisda Co.",
"001E22": "ARVOO Imaging Products BV",
"001E23": "Electronic Educational Devices, Inc",
"001E24": "Zhejiang Bell Technology Co.,ltd",
"001E25": "Intek Digital Inc",
"001E26": "Digifriends Co. Ltd",
"001E27": "SBN TECH Co.,Ltd.",
"001E28": "Lumexis Corporation",
"001E29": "Hypertherm Inc",
"001E2A": "Netgear Inc.",
"001E2B": "Radio Systems Design, Inc.",
"001E2C": "CyVerse Corporation",
"001E2D": "STIM",
"001E2E": "SIRTI S.p.A.",
"001E2F": "DiMoto Pty Ltd",
"001E30": "Shireen Inc",
"001E31": "INFOMARK CO.,LTD.",
"001E32": "Zensys",
"001E33": "Inventec Corporation",
"001E34": "CryptoMetrics",
"001E35": "Nintendo Co., Ltd.",
"001E36": "IPTE",
"001E37": "Universal Global Scientific Industrial Co., Ltd.",
"001E38": "Bluecard Software Technology Co., Ltd.",
"001E39": "Comsys Communication Ltd.",
"001E3A": "Nokia Danmark A/S",
"001E3B": "Nokia Danmark A/S",
"001E3C": "Lyngbox Media AB",
"001E3D": "Alps Electric Co., Ltd",
"001E3E": "KMW Inc.",
"001E3F": "TrellisWare Technologies, Inc.",
"001E40": "Shanghai DareGlobal Technologies Co.,Ltd.",
"001E41": "Microwave Communication & Component, Inc.",
"001E42": "Teltonika",
"001E43": "AISIN AW CO.,LTD.",
"001E44": "SANTEC",
"001E45": "Sony Ericsson Mobile Communications AB",
"001E46": "ARRIS Group, Inc.",
"001E47": "PT. Hariff Daya Tunggal Engineering",
"001E48": "Wi-Links",
"001E49": "CISCO SYSTEMS, INC.",
"001E4A": "CISCO SYSTEMS, INC.",
"001E4B": "City Theatrical",
"001E4C": "Hon Hai Precision Ind.Co., Ltd.",
"001E4D": "Welkin Sciences, LLC",
"001E4E": "DAKO EDV-Ingenieur- und Systemhaus GmbH",
"001E4F": "Dell Inc.",
"001E50": "BATTISTONI RESEARCH",
"001E51": "Converter Industry Srl",
"001E52": "Apple",
"001E53": "Further Tech Co., LTD",
"001E54": "TOYO ELECTRIC Corporation",
"001E55": "COWON SYSTEMS,Inc.",
"001E56": "Bally Wulff Entertainment GmbH",
"001E57": "ALCOMA, spol. s r.o.",
"001E58": "D-Link Corporation",
"001E59": "Silicon Turnkey Express, LLC",
"001E5A": "ARRIS Group, Inc.",
"001E5B": "Unitron Company, Inc.",
"001E5C": "RB GeneralEkonomik",
"001E5D": "Holosys d.o.o.",
"001E5E": "COmputime Ltd.",
"001E5F": "KwikByte, LLC",
"001E60": "Digital Lighting Systems, Inc",
"001E61": "ITEC GmbH",
"001E62": "Siemon",
"001E63": "Vibro-Meter SA",
"001E64": "Intel Corporate",
"001E65": "Intel Corporate",
"001E66": "RESOL Elektronische Regelungen GmbH",
"001E67": "Intel Corporate",
"001E68": "Quanta Computer",
"001E69": "Thomson Inc.",
"001E6A": "Beijing Bluexon Technology Co.,Ltd",
"001E6B": "Cisco SPVTG",
"001E6C": "Carbon Mountain LLC",
"001E6D": "IT R&D Center",
"001E6E": "Shenzhen First Mile Communications Ltd",
"001E6F": "Magna-Power Electronics, Inc.",
"001E70": "Cobham Defence Communications Ltd",
"001E71": "MIrcom Group of Companies",
"001E72": "PCS",
"001E73": "ZTE CORPORATION",
"001E74": "SAGEM COMMUNICATION",
"001E75": "LG Electronics",
"001E76": "Thermo Fisher Scientific",
"001E77": "Air2App",
"001E78": "Owitek Technology Ltd.,",
"001E79": "CISCO SYSTEMS, INC.",
"001E7A": "CISCO SYSTEMS, INC.",
"001E7B": "R.I.CO. S.r.l.",
"001E7C": "Taiwick Limited",
"001E7D": "Samsung Electronics Co.,Ltd",
"001E7E": "Nortel",
"001E7F": "CBM of America",
"001E80": "Last Mile Ltd.",
"001E81": "CNB Technology Inc.",
"001E82": "SanDisk Corporation",
"001E83": "LAN/MAN Standards Association (LMSC)",
"001E84": "Pika Technologies Inc.",
"001E85": "Lagotek Corporation",
"001E86": "MEL Co.,Ltd.",
"001E87": "Realease Limited",
"001E88": "ANDOR SYSTEM SUPPORT CO., LTD.",
"001E89": "CRFS Limited",
"001E8A": "eCopy, Inc",
"001E8B": "Infra Access Korea Co., Ltd.",
"001E8C": "ASUSTek COMPUTER INC.",
"001E8D": "ARRIS Group, Inc.",
"001E8E": "Hunkeler AG",
"001E8F": "CANON INC.",
"001E90": "Elitegroup Computer Systems Co",
"001E91": "KIMIN Electronic Co., Ltd.",
"001E92": "JEULIN S.A.",
"001E93": "CiriTech Systems Inc",
"001E94": "SUPERCOM TECHNOLOGY CORPORATION",
"001E95": "SIGMALINK",
"001E96": "Sepura Plc",
"001E97": "Medium Link System Technology CO., LTD,",
"001E98": "GreenLine Communications",
"001E99": "Vantanol Industrial Corporation",
"001E9A": "HAMILTON Bonaduz AG",
"001E9B": "San-Eisha, Ltd.",
"001E9C": "Fidustron INC",
"001E9D": "Recall Technologies, Inc.",
"001E9E": "ddm hopt + schuler Gmbh + Co. KG",
"001E9F": "Visioneering Systems, Inc.",
"001EA0": "XLN-t",
"001EA1": "Brunata a/s",
"001EA2": "Symx Systems, Inc.",
"001EA3": "Nokia Danmark A/S",
"001EA4": "Nokia Danmark A/S",
"001EA5": "ROBOTOUS, Inc.",
"001EA6": "Best IT World (India) Pvt. Ltd.",
"001EA7": "ActionTec Electronics, Inc",
"001EA8": "Datang Mobile Communications Equipment CO.,LTD",
"001EA9": "Nintendo Co., Ltd.",
"001EAA": "E-Senza Technologies GmbH",
"001EAB": "TeleWell Oy",
"001EAC": "Armadeus Systems",
"001EAD": "Wingtech Group Limited",
"001EAE": "Continental Automotive Systems",
"001EAF": "Ophir Optronics Ltd",
"001EB0": "ImesD Electronica S.L.",
"001EB1": "Cryptsoft Pty Ltd",
"001EB2": "LG innotek",
"001EB3": "Primex Wireless",
"001EB4": "UNIFAT TECHNOLOGY LTD.",
"001EB5": "Ever Sparkle Technologies Ltd",
"001EB6": "TAG Heuer SA",
"001EB7": "TBTech, Co., Ltd.",
"001EB8": "Fortis, Inc.",
"001EB9": "Sing Fai Technology Limited",
"001EBA": "High Density Devices AS",
"001EBB": "BLUELIGHT TECHNOLOGY INC.",
"001EBC": "WINTECH AUTOMATION CO.,LTD.",
"001EBD": "CISCO SYSTEMS, INC.",
"001EBE": "CISCO SYSTEMS, INC.",
"001EBF": "Haas Automation Inc.",
"001EC0": "Microchip Technology Inc.",
"001EC1": "3COM EUROPE LTD",
"001EC2": "Apple",
"001EC3": "Kozio, Inc.",
"001EC4": "Celio Corp",
"001EC5": "Middle Atlantic Products Inc",
"001EC6": "Obvius Holdings LLC",
"001EC7": "2Wire, Inc.",
"001EC8": "2Wire, Inc.",
"001EC9": "2Wire, Inc.",
"001ECA": "2Wire, Inc.",
"001ECB": "2Wire, Inc.",
"001ECC": "2Wire, Inc.",
"001ECD": "KYLAND Technology Co. LTD",
"001ECE": "2Wire, Inc.",
"001ECF": "2Wire, Inc.",
"001ED0": "2Wire, Inc.",
"001ED1": "2Wire, Inc.",
"001ED2": "2Wire, Inc.",
"001ED3": "2Wire, Inc.",
"001ED4": "2Wire, Inc.",
"001ED5": "2Wire, Inc.",
"001ED6": "2Wire, Inc.",
"001ED7": "2Wire, Inc.",
"001ED8": "2Wire, Inc.",
"001ED9": "2Wire, Inc.",
"001EDA": "2Wire, Inc.",
"001EDB": "2Wire, Inc.",
"001EDC": "2Wire, Inc.",
"001EDD": "2Wire, Inc.",
"001EDE": "2Wire, Inc.",
"001EDF": "2Wire, Inc.",
"001EE0": "Urmet Domus SpA",
"001EE1": "Samsung Electronics Co.,Ltd",
"001EE2": "Samsung Electronics Co.,Ltd",
"001EE3": "T&W Electronics (ShenZhen) Co.,Ltd",
"001EE4": "ACS Solutions France",
"001EE5": "Cisco-Linksys, LLC",
"001EE6": "Shenzhen Advanced Video Info-Tech Co., Ltd.",
"001EE7": "Epic Systems Inc",
"001EE8": "Mytek",
"001EE9": "Stoneridge Electronics AB",
"001EEA": "Sensor Switch, Inc.",
"001EEB": "Talk-A-Phone Co.",
"001EEC": "COMPAL INFORMATION (KUNSHAN) CO., LTD.",
"001EED": "Adventiq Ltd.",
"001EEE": "ETL Systems Ltd",
"001EEF": "Cantronic International Limited",
"001EF0": "Gigafin Networks",
"001EF1": "Servimat",
"001EF2": "Micro Motion Inc",
"001EF3": "From2",
"001EF4": "L-3 Communications Display Systems",
"001EF5": "Hitek Automated Inc.",
"001EF6": "CISCO SYSTEMS, INC.",
"001EF7": "CISCO SYSTEMS, INC.",
"001EF8": "Emfinity Inc.",
"001EF9": "Pascom Kommunikations systeme GmbH.",
"001EFA": "PROTEI Ltd.",
"001EFB": "Trio Motion Technology Ltd",
"001EFC": "JSC \"MASSA-K\"",
"001EFD": "Microbit 2.0 AB",
"001EFE": "LEVEL s.r.o.",
"001EFF": "Mueller-Elektronik GmbH & Co. KG",
"001F00": "Nokia Danmark A/S",
"001F01": "Nokia Danmark A/S",
"001F02": "Pixelmetrix Corporation Pte Ltd",
"001F03": "NUM AG",
"001F04": "Granch Ltd.",
"001F05": "iTAS Technology Corp.",
"001F06": "Integrated Dispatch Solutions",
"001F07": "AZTEQ Mobile",
"001F08": "RISCO LTD",
"001F09": "JASTEC CO., LTD.",
"001F0A": "Nortel",
"001F0B": "Federal State Unitary Enterprise Industrial Union\"Electropribor\"",
"001F0C": "Intelligent Digital Services GmbH",
"001F0D": "L3 Communications - Telemetry West",
"001F0E": "Japan Kyastem Co., Ltd",
"001F0F": "Select Engineered Systems",
"001F10": "TOLEDO DO BRASIL INDUSTRIA DE BALANCAS LTDA",
"001F11": "OPENMOKO, INC.",
"001F12": "Juniper Networks",
"001F13": "S.& A.S. Ltd.",
"001F14": "NexG",
"001F15": "Bioscrypt Inc",
"001F16": "Wistron Corporation",
"001F17": "IDX Company, Ltd.",
"001F18": "Hakusan.Mfg.Co,.Ltd",
"001F19": "BEN-RI ELECTRONICA S.A.",
"001F1A": "Prominvest",
"001F1B": "RoyalTek Company Ltd.",
"001F1C": "KOBISHI ELECTRIC Co.,Ltd.",
"001F1D": "Atlas Material Testing Technology LLC",
"001F1E": "Astec Technology Co., Ltd",
"001F1F": "Edimax Technology Co. Ltd.",
"001F20": "Logitech Europe SA",
"001F21": "Inner Mongolia Yin An Science & Technology Development Co.,L",
"001F22": "Source Photonics, Inc.",
"001F23": "Interacoustics",
"001F24": "DIGITVIEW TECHNOLOGY CO., LTD.",
"001F25": "MBS GmbH",
"001F26": "CISCO SYSTEMS, INC.",
"001F27": "CISCO SYSTEMS, INC.",
"001F28": "HPN Supply Chain",
"001F29": "Hewlett-Packard Company",
"001F2A": "ACCM",
"001F2B": "Orange Logic",
"001F2C": "Starbridge Networks",
"001F2D": "Electro-Optical Imaging, Inc.",
"001F2E": "Triangle Research Int'l Pte Ltd",
"001F2F": "Berker GmbH & Co. KG",
"001F30": "Travelping",
"001F31": "Radiocomp",
"001F32": "Nintendo Co., Ltd.",
"001F33": "Netgear Inc.",
"001F34": "Lung Hwa Electronics Co., Ltd.",
"001F35": "AIR802 LLC",
"001F36": "Bellwin Information Co. Ltd.,",
"001F37": "Genesis I&C",
"001F38": "POSITRON",
"001F39": "Construcciones y Auxiliar de Ferrocarriles, S.A.",
"001F3A": "Hon Hai Precision Ind.Co., Ltd.",
"001F3B": "Intel Corporate",
"001F3C": "Intel Corporate",
"001F3D": "Qbit GmbH",
"001F3E": "RP-Technik e.K.",
"001F3F": "AVM GmbH",
"001F40": "Speakercraft Inc.",
"001F41": "Ruckus Wireless",
"001F42": "Etherstack plc",
"001F43": "ENTES ELEKTRONIK",
"001F44": "GE Transportation Systems",
"001F45": "Enterasys",
"001F46": "Nortel",
"001F47": "MCS Logic Inc.",
"001F48": "Mojix Inc.",
"001F49": "Eurosat Distribution Ltd",
"001F4A": "Albentia Systems S.A.",
"001F4B": "Lineage Power",
"001F4C": "Roseman Engineering Ltd",
"001F4D": "Segnetics LLC",
"001F4E": "ConMed Linvatec",
"001F4F": "Thinkware Co. Ltd.",
"001F50": "Swissdis AG",
"001F51": "HD Communications Corp",
"001F52": "UVT Unternehmensberatung fur Verkehr und Technik GmbH",
"001F53": "GEMAC Gesellschaft f\u00fcr Mikroelektronikanwendung Chemnitz mbH",
"001F54": "Lorex Technology Inc.",
"001F55": "Honeywell Security (China) Co., Ltd.",
"001F56": "DIGITAL FORECAST",
"001F57": "Phonik Innovation Co.,LTD",
"001F58": "EMH Energiemesstechnik GmbH",
"001F59": "Kronback Tracers",
"001F5A": "Beckwith Electric Co.",
"001F5B": "Apple",
"001F5C": "Nokia Danmark A/S",
"001F5D": "Nokia Danmark A/S",
"001F5E": "Dyna Technology Co.,Ltd.",
"001F5F": "Blatand GmbH",
"001F60": "COMPASS SYSTEMS CORP.",
"001F61": "Talent Communication Networks Inc.",
"001F62": "JSC \"Stilsoft\"",
"001F63": "JSC Goodwin-Europa",
"001F64": "Beijing Autelan Technology Inc.",
"001F65": "KOREA ELECTRIC TERMINAL CO., LTD.",
"001F66": "PLANAR LLC",
"001F67": "Hitachi,Ltd.",
"001F68": "Martinsson Elektronik AB",
"001F69": "Pingood Technology Co., Ltd.",
"001F6A": "PacketFlux Technologies, Inc.",
"001F6B": "LG Electronics",
"001F6C": "CISCO SYSTEMS, INC.",
"001F6D": "CISCO SYSTEMS, INC.",
"001F6E": "Vtech Engineering Corporation",
"001F6F": "Fujian Sunnada Communication Co.,Ltd.",
"001F70": "Botik Technologies LTD",
"001F71": "xG Technology, Inc.",
"001F72": "QingDao Hiphone Technology Co,.Ltd",
"001F73": "Teraview Technology Co., Ltd.",
"001F74": "Eigen Development",
"001F75": "GiBahn Media",
"001F76": "AirLogic Systems Inc.",
"001F77": "HEOL DESIGN",
"001F78": "Blue Fox Porini Textile",
"001F79": "Lodam Electronics A/S",
"001F7A": "WiWide Inc.",
"001F7B": "TechNexion Ltd.",
"001F7C": "Witelcom AS",
"001F7D": "embedded wireless GmbH",
"001F7E": "ARRIS Group, Inc.",
"001F7F": "Phabrix Limited",
"001F80": "Lucas Holding bv",
"001F81": "Accel Semiconductor Corp",
"001F82": "Cal-Comp Electronics & Communications Co., Ltd",
"001F83": "Teleplan Technology Services Sdn Bhd",
"001F84": "Gigle Semiconductor",
"001F85": "Apriva ISS, LLC",
"001F86": "digEcor",
"001F87": "Skydigital Inc.",
"001F88": "FMS Force Measuring Systems AG",
"001F89": "Signalion GmbH",
"001F8A": "Ellion Digital Inc.",
"001F8B": "Cache IQ",
"001F8C": "CCS Inc.",
"001F8D": "Ingenieurbuero Stark GmbH und Ko. KG",
"001F8E": "Metris USA Inc.",
"001F8F": "Shanghai Bellmann Digital Source Co.,Ltd.",
"001F90": "Actiontec Electronics, Inc",
"001F91": "DBS Lodging Technologies, LLC",
"001F92": "VideoIQ, Inc.",
"001F93": "Xiotech Corporation",
"001F94": "Lascar Electronics Ltd",
"001F95": "SAGEM COMMUNICATION",
"001F96": "APROTECH CO.LTD",
"001F97": "BERTANA SRL",
"001F98": "DAIICHI-DENTSU LTD.",
"001F99": "SERONICS co.ltd",
"001F9A": "Nortel Networks",
"001F9B": "POSBRO",
"001F9C": "LEDCO",
"001F9D": "CISCO SYSTEMS, INC.",
"001F9E": "CISCO SYSTEMS, INC.",
"001F9F": "Thomson Telecom Belgium",
"001FA0": "A10 Networks",
"001FA1": "Gtran Inc",
"001FA2": "Datron World Communications, Inc.",
"001FA3": "T&W Electronics(Shenzhen)Co.,Ltd.",
"001FA4": "ShenZhen Gongjin Electronics Co.,Ltd",
"001FA5": "Blue-White Industries",
"001FA6": "Stilo srl",
"001FA7": "Sony Computer Entertainment Inc.",
"001FA8": "Smart Energy Instruments Inc.",
"001FA9": "Atlanta DTH, Inc.",
"001FAA": "Taseon, Inc.",
"001FAB": "I.S HIGH TECH.INC",
"001FAC": "Goodmill Systems Ltd",
"001FAD": "Brown Innovations, Inc",
"001FAE": "Blick South Africa (Pty) Ltd",
"001FAF": "NextIO, Inc.",
"001FB0": "TimeIPS, Inc.",
"001FB1": "Cybertech Inc.",
"001FB2": "Sontheim Industrie Elektronik GmbH",
"001FB3": "2Wire",
"001FB4": "SmartShare Systems",
"001FB5": "I/O Interconnect Inc.",
"001FB6": "Chi Lin Technology Co., Ltd.",
"001FB7": "WiMate Technologies Corp.",
"001FB8": "Universal Remote Control, Inc.",
"001FB9": "Paltronics",
"001FBA": "BoYoung Tech. & Marketing, Inc.",
"001FBB": "Xenatech Co.,LTD",
"001FBC": "EVGA Corporation",
"001FBD": "Kyocera Wireless Corp.",
"001FBE": "Shenzhen Mopnet Industrial Co.,Ltd",
"001FBF": "Fulhua Microelectronics Corp. Taiwan Branch",
"001FC0": "Control Express Finland Oy",
"001FC1": "Hanlong Technology Co.,LTD",
"001FC2": "Jow Tong Technology Co Ltd",
"001FC3": "SmartSynch, Inc",
"001FC4": "ARRIS Group, Inc.",
"001FC5": "Nintendo Co., Ltd.",
"001FC6": "ASUSTek COMPUTER INC.",
"001FC7": "Casio Hitachi Mobile Comunications Co., Ltd.",
"001FC8": "Up-Today Industrial Co., Ltd.",
"001FC9": "CISCO SYSTEMS, INC.",
"001FCA": "CISCO SYSTEMS, INC.",
"001FCB": "NIW Solutions",
"001FCC": "Samsung Electronics Co.,Ltd",
"001FCD": "Samsung Electronics",
"001FCE": "QTECH LLC",
"001FCF": "MSI Technology GmbH",
"001FD0": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"001FD1": "OPTEX CO.,LTD.",
"001FD2": "COMMTECH TECHNOLOGY MACAO COMMERCIAL OFFSHORE LTD.",
"001FD3": "RIVA Networks Inc.",
"001FD4": "4IPNET, INC.",
"001FD5": "MICRORISC s.r.o.",
"001FD6": "Shenzhen Allywll",
"001FD7": "TELERAD SA",
"001FD8": "A-TRUST COMPUTER CORPORATION",
"001FD9": "RSD Communications Ltd",
"001FDA": "Nortel Networks",
"001FDB": "Network Supply Corp.,",
"001FDC": "Mobile Safe Track Ltd",
"001FDD": "GDI LLC",
"001FDE": "Nokia Danmark A/S",
"001FDF": "Nokia Danmark A/S",
"001FE0": "EdgeVelocity Corp",
"001FE1": "Hon Hai Precision Ind. Co., Ltd.",
"001FE2": "Hon Hai Precision Ind. Co., Ltd.",
"001FE3": "LG Electronics",
"001FE4": "Sony Ericsson Mobile Communications",
"001FE5": "In-Circuit GmbH",
"001FE6": "Alphion Corporation",
"001FE7": "Simet",
"001FE8": "KURUSUGAWA Electronics Industry Inc,.",
"001FE9": "Printrex, Inc.",
"001FEA": "Applied Media Technologies Corporation",
"001FEB": "Trio Datacom Pty Ltd",
"001FEC": "Synapse \u00c9lectronique",
"001FED": "Tecan Systems Inc.",
"001FEE": "ubisys technologies GmbH",
"001FEF": "SHINSEI INDUSTRIES CO.,LTD",
"001FF0": "Audio Partnership",
"001FF1": "Paradox Hellas S.A.",
"001FF2": "VIA Technologies, Inc.",
"001FF3": "Apple",
"001FF4": "Power Monitors, Inc.",
"001FF5": "Kongsberg Defence & Aerospace",
"001FF6": "PS Audio International",
"001FF7": "Nakajima All Precision Co., Ltd.",
"001FF8": "Siemens AG, Sector Industry, Drive Technologies, Motion Control Systems",
"001FF9": "Advanced Knowledge Associates",
"001FFA": "Coretree, Co, Ltd",
"001FFB": "Green Packet Bhd",
"001FFC": "Riccius+Sohn GmbH",
"001FFD": "Indigo Mobile Technologies Corp.",
"001FFE": "HPN Supply Chain",
"001FFF": "Respironics, Inc.",
"002000": "LEXMARK INTERNATIONAL, INC.",
"002001": "DSP SOLUTIONS, INC.",
"002002": "SERITECH ENTERPRISE CO., LTD.",
"002003": "PIXEL POWER LTD.",
"002004": "YAMATAKE-HONEYWELL CO., LTD.",
"002005": "SIMPLE TECHNOLOGY",
"002006": "GARRETT COMMUNICATIONS, INC.",
"002007": "SFA, INC.",
"002008": "CABLE & COMPUTER TECHNOLOGY",
"002009": "PACKARD BELL ELEC., INC.",
"00200A": "SOURCE-COMM CORP.",
"00200B": "OCTAGON SYSTEMS CORP.",
"00200C": "ADASTRA SYSTEMS CORP.",
"00200D": "CARL ZEISS",
"00200E": "SATELLITE TECHNOLOGY MGMT, INC",
"00200F": "TANBAC CO., LTD.",
"002010": "JEOL SYSTEM TECHNOLOGY CO. LTD",
"002011": "CANOPUS CO., LTD.",
"002012": "CAMTRONICS MEDICAL SYSTEMS",
"002013": "DIVERSIFIED TECHNOLOGY, INC.",
"002014": "GLOBAL VIEW CO., LTD.",
"002015": "ACTIS COMPUTER SA",
"002016": "SHOWA ELECTRIC WIRE & CABLE CO",
"002017": "ORBOTECH",
"002018": "CIS TECHNOLOGY INC.",
"002019": "OHLER GmbH",
"00201A": "MRV Communications, Inc.",
"00201B": "NORTHERN TELECOM/NETWORK",
"00201C": "EXCEL, INC.",
"00201D": "KATANA PRODUCTS",
"00201E": "NETQUEST CORPORATION",
"00201F": "BEST POWER TECHNOLOGY, INC.",
"002020": "MEGATRON COMPUTER INDUSTRIES PTY, LTD.",
"002021": "ALGORITHMS SOFTWARE PVT. LTD.",
"002022": "NMS Communications",
"002023": "T.C. TECHNOLOGIES PTY. LTD",
"002024": "PACIFIC COMMUNICATION SCIENCES",
"002025": "CONTROL TECHNOLOGY, INC.",
"002026": "AMKLY SYSTEMS, INC.",
"002027": "MING FORTUNE INDUSTRY CO., LTD",
"002028": "WEST EGG SYSTEMS, INC.",
"002029": "TELEPROCESSING PRODUCTS, INC.",
"00202A": "N.V. DZINE",
"00202B": "ADVANCED TELECOMMUNICATIONS MODULES, LTD.",
"00202C": "WELLTRONIX CO., LTD.",
"00202D": "TAIYO CORPORATION",
"00202E": "DAYSTAR DIGITAL",
"00202F": "ZETA COMMUNICATIONS, LTD.",
"002030": "ANALOG & DIGITAL SYSTEMS",
"002031": "Tattile SRL",
"002032": "ALCATEL TAISEL",
"002033": "SYNAPSE TECHNOLOGIES, INC.",
"002034": "ROTEC INDUSTRIEAUTOMATION GMBH",
"002035": "IBM Corp",
"002036": "BMC SOFTWARE",
"002037": "SEAGATE TECHNOLOGY",
"002038": "VME MICROSYSTEMS INTERNATIONAL CORPORATION",
"002039": "SCINETS",
"00203A": "DIGITAL BI0METRICS INC.",
"00203B": "WISDM LTD.",
"00203C": "EUROTIME AB",
"00203D": "Honeywell ECC",
"00203E": "LogiCan Technologies, Inc.",
"00203F": "JUKI CORPORATION",
"002040": "ARRIS Group, Inc.",
"002041": "DATA NET",
"002042": "DATAMETRICS CORP.",
"002043": "NEURON COMPANY LIMITED",
"002044": "GENITECH PTY LTD",
"002045": "ION Networks, Inc.",
"002046": "CIPRICO, INC.",
"002047": "STEINBRECHER CORP.",
"002048": "Marconi Communications",
"002049": "COMTRON, INC.",
"00204A": "PRONET GMBH",
"00204B": "AUTOCOMPUTER CO., LTD.",
"00204C": "MITRON COMPUTER PTE LTD.",
"00204D": "INOVIS GMBH",
"00204E": "NETWORK SECURITY SYSTEMS, INC.",
"00204F": "DEUTSCHE AEROSPACE AG",
"002050": "KOREA COMPUTER INC.",
"002051": "Verilink Corporation",
"002052": "RAGULA SYSTEMS",
"002053": "HUNTSVILLE MICROSYSTEMS, INC.",
"002054": "Sycamore Networks",
"002055": "ALTECH CO., LTD.",
"002056": "NEOPRODUCTS",
"002057": "TITZE DATENTECHNIK GmbH",
"002058": "ALLIED SIGNAL INC.",
"002059": "MIRO COMPUTER PRODUCTS AG",
"00205A": "COMPUTER IDENTICS",
"00205B": "Kentrox, LLC",
"00205C": "InterNet Systems of Florida, Inc.",
"00205D": "NANOMATIC OY",
"00205E": "CASTLE ROCK, INC.",
"00205F": "GAMMADATA COMPUTER GMBH",
"002060": "ALCATEL ITALIA S.p.A.",
"002061": "GarrettCom, Inc.",
"002062": "SCORPION LOGIC, LTD.",
"002063": "WIPRO INFOTECH LTD.",
"002064": "PROTEC MICROSYSTEMS, INC.",
"002065": "SUPERNET NETWORKING INC.",
"002066": "GENERAL MAGIC, INC.",
"002067": "PRIVATE",
"002068": "ISDYNE",
"002069": "ISDN SYSTEMS CORPORATION",
"00206A": "OSAKA COMPUTER CORP.",
"00206B": "KONICA MINOLTA HOLDINGS, INC.",
"00206C": "EVERGREEN TECHNOLOGY CORP.",
"00206D": "DATA RACE, INC.",
"00206E": "XACT, INC.",
"00206F": "FLOWPOINT CORPORATION",
"002070": "HYNET, LTD.",
"002071": "IBR GMBH",
"002072": "WORKLINK INNOVATIONS",
"002073": "FUSION SYSTEMS CORPORATION",
"002074": "SUNGWOON SYSTEMS",
"002075": "MOTOROLA COMMUNICATION ISRAEL",
"002076": "REUDO CORPORATION",
"002077": "KARDIOS SYSTEMS CORP.",
"002078": "RUNTOP, INC.",
"002079": "MIKRON GMBH",
"00207A": "WiSE Communications, Inc.",
"00207B": "Intel Corporation",
"00207C": "AUTEC GmbH",
"00207D": "ADVANCED COMPUTER APPLICATIONS",
"00207E": "FINECOM Co., Ltd.",
"00207F": "KYOEI SANGYO CO., LTD.",
"002080": "SYNERGY (UK) LTD.",
"002081": "TITAN ELECTRONICS",
"002082": "ONEAC CORPORATION",
"002083": "PRESTICOM INCORPORATED",
"002084": "OCE PRINTING SYSTEMS, GMBH",
"002085": "EXIDE ELECTRONICS",
"002086": "MICROTECH ELECTRONICS LIMITED",
"002087": "MEMOTEC, INC.",
"002088": "GLOBAL VILLAGE COMMUNICATION",
"002089": "T3PLUS NETWORKING, INC.",
"00208A": "SONIX COMMUNICATIONS, LTD.",
"00208B": "LAPIS TECHNOLOGIES, INC.",
"00208C": "GALAXY NETWORKS, INC.",
"00208D": "CMD TECHNOLOGY",
"00208E": "CHEVIN SOFTWARE ENG. LTD.",
"00208F": "ECI TELECOM LTD.",
"002090": "ADVANCED COMPRESSION TECHNOLOGY, INC.",
"002091": "J125, NATIONAL SECURITY AGENCY",
"002092": "CHESS ENGINEERING B.V.",
"002093": "LANDINGS TECHNOLOGY CORP.",
"002094": "CUBIX CORPORATION",
"002095": "RIVA ELECTRONICS",
"002096": "Invensys",
"002097": "APPLIED SIGNAL TECHNOLOGY",
"002098": "HECTRONIC AB",
"002099": "BON ELECTRIC CO., LTD.",
"00209A": "THE 3DO COMPANY",
"00209B": "ERSAT ELECTRONIC GMBH",
"00209C": "PRIMARY ACCESS CORP.",
"00209D": "LIPPERT AUTOMATIONSTECHNIK",
"00209E": "BROWN'S OPERATING SYSTEM SERVICES, LTD.",
"00209F": "MERCURY COMPUTER SYSTEMS, INC.",
"0020A0": "OA LABORATORY CO., LTD.",
"0020A1": "DOVATRON",
"0020A2": "GALCOM NETWORKING LTD.",
"0020A3": "Harmonic, Inc",
"0020A4": "MULTIPOINT NETWORKS",
"0020A5": "API ENGINEERING",
"0020A6": "Proxim Wireless",
"0020A7": "PAIRGAIN TECHNOLOGIES, INC.",
"0020A8": "SAST TECHNOLOGY CORP.",
"0020A9": "WHITE HORSE INDUSTRIAL",
"0020AA": "Ericsson Television Limited",
"0020AB": "MICRO INDUSTRIES CORP.",
"0020AC": "INTERFLEX DATENSYSTEME GMBH",
"0020AD": "LINQ SYSTEMS",
"0020AE": "ORNET DATA COMMUNICATION TECH.",
"0020AF": "3COM CORPORATION",
"0020B0": "GATEWAY DEVICES, INC.",
"0020B1": "COMTECH RESEARCH INC.",
"0020B2": "GKD Gesellschaft Fur Kommunikation Und Datentechnik",
"0020B3": "Tattile SRL",
"0020B4": "TERMA ELEKTRONIK AS",
"0020B5": "YASKAWA ELECTRIC CORPORATION",
"0020B6": "AGILE NETWORKS, INC.",
"0020B7": "NAMAQUA COMPUTERWARE",
"0020B8": "PRIME OPTION, INC.",
"0020B9": "METRICOM, INC.",
"0020BA": "CENTER FOR HIGH PERFORMANCE",
"0020BB": "ZAX CORPORATION",
"0020BC": "Long Reach Networks Pty Ltd",
"0020BD": "NIOBRARA R & D CORPORATION",
"0020BE": "LAN ACCESS CORP.",
"0020BF": "AEHR TEST SYSTEMS",
"0020C0": "PULSE ELECTRONICS, INC.",
"0020C1": "SAXA, Inc.",
"0020C2": "TEXAS MEMORY SYSTEMS, INC.",
"0020C3": "COUNTER SOLUTIONS LTD.",
"0020C4": "INET,INC.",
"0020C5": "EAGLE TECHNOLOGY",
"0020C6": "NECTEC",
"0020C7": "AKAI Professional M.I. Corp.",
"0020C8": "LARSCOM INCORPORATED",
"0020C9": "VICTRON BV",
"0020CA": "DIGITAL OCEAN",
"0020CB": "PRETEC ELECTRONICS CORP.",
"0020CC": "DIGITAL SERVICES, LTD.",
"0020CD": "HYBRID NETWORKS, INC.",
"0020CE": "LOGICAL DESIGN GROUP, INC.",
"0020CF": "TEST & MEASUREMENT SYSTEMS INC",
"0020D0": "VERSALYNX CORPORATION",
"0020D1": "MICROCOMPUTER SYSTEMS (M) SDN.",
"0020D2": "RAD DATA COMMUNICATIONS, LTD.",
"0020D3": "OST (OUEST STANDARD TELEMATIQU",
"0020D4": "CABLETRON - ZEITTNET INC.",
"0020D5": "VIPA GMBH",
"0020D6": "BREEZECOM",
"0020D7": "JAPAN MINICOMPUTER SYSTEMS CO., Ltd.",
"0020D8": "Nortel Networks",
"0020D9": "PANASONIC TECHNOLOGIES, INC./MIECO-US",
"0020DA": "Alcatel North America ESD",
"0020DB": "XNET TECHNOLOGY, INC.",
"0020DC": "DENSITRON TAIWAN LTD.",
"0020DD": "Cybertec Pty Ltd",
"0020DE": "JAPAN DIGITAL LABORAT'Y CO.LTD",
"0020DF": "KYOSAN ELECTRIC MFG. CO., LTD.",
"0020E0": "Actiontec Electronics, Inc.",
"0020E1": "ALAMAR ELECTRONICS",
"0020E2": "INFORMATION RESOURCE ENGINEERING",
"0020E3": "MCD KENCOM CORPORATION",
"0020E4": "HSING TECH ENTERPRISE CO., LTD",
"0020E5": "APEX DATA, INC.",
"0020E6": "LIDKOPING MACHINE TOOLS AB",
"0020E7": "B&W NUCLEAR SERVICE COMPANY",
"0020E8": "DATATREK CORPORATION",
"0020E9": "DANTEL",
"0020EA": "EFFICIENT NETWORKS, INC.",
"0020EB": "CINCINNATI MICROWAVE, INC.",
"0020EC": "TECHWARE SYSTEMS CORP.",
"0020ED": "GIGA-BYTE TECHNOLOGY CO., LTD.",
"0020EE": "GTECH CORPORATION",
"0020EF": "USC CORPORATION",
"0020F0": "UNIVERSAL MICROELECTRONICS CO.",
"0020F1": "ALTOS INDIA LIMITED",
"0020F2": "Oracle Corporation",
"0020F3": "RAYNET CORPORATION",
"0020F4": "SPECTRIX CORPORATION",
"0020F5": "PANDATEL AG",
"0020F6": "NET TEK AND KARLNET, INC.",
"0020F7": "CYBERDATA CORPORATION",
"0020F8": "CARRERA COMPUTERS, INC.",
"0020F9": "PARALINK NETWORKS, INC.",
"0020FA": "GDE SYSTEMS, INC.",
"0020FB": "OCTEL COMMUNICATIONS CORP.",
"0020FC": "MATROX",
"0020FD": "ITV TECHNOLOGIES, INC.",
"0020FE": "TOPWARE INC. / GRAND COMPUTER",
"0020FF": "SYMMETRICAL TECHNOLOGIES",
"002100": "GemTek Technology Co., Ltd.",
"002101": "Aplicaciones Electronicas Quasar (AEQ)",
"002102": "UpdateLogic Inc.",
"002103": "GHI Electronics, LLC",
"002104": "Gigaset Communications GmbH",
"002105": "Alcatel-Lucent",
"002106": "RIM Testing Services",
"002107": "Seowonintech Co Ltd.",
"002108": "Nokia Danmark A/S",
"002109": "Nokia Danmark A/S",
"00210A": "byd:sign Corporation",
"00210B": "GEMINI TRAZE RFID PVT. LTD.",
"00210C": "Cymtec Systems, Inc.",
"00210D": "SAMSIN INNOTEC",
"00210E": "Orpak Systems L.T.D.",
"00210F": "Cernium Corp",
"002110": "Clearbox Systems",
"002111": "Uniphone Inc.",
"002112": "WISCOM SYSTEM CO.,LTD",
"002113": "Padtec S/A",
"002114": "Hylab Technology Inc.",
"002115": "PHYWE Systeme GmbH & Co. KG",
"002116": "Transcon Electronic Systems, spol. s r. o.",
"002117": "Tellord",
"002118": "Athena Tech, Inc.",
"002119": "Samsung Electro-Mechanics",
"00211A": "LInTech Corporation",
"00211B": "CISCO SYSTEMS, INC.",
"00211C": "CISCO SYSTEMS, INC.",
"00211D": "Dataline AB",
"00211E": "ARRIS Group, Inc.",
"00211F": "SHINSUNG DELTATECH CO.,LTD.",
"002120": "Sequel Technologies",
"002121": "VRmagic GmbH",
"002122": "Chip-pro Ltd.",
"002123": "Aerosat Avionics",
"002124": "Optos Plc",
"002125": "KUK JE TONG SHIN Co.,LTD",
"002126": "Shenzhen Torch Equipment Co., Ltd.",
"002127": "TP-LINK Technology Co., Ltd.",
"002128": "Oracle Corporation",
"002129": "Cisco-Linksys, LLC",
"00212A": "Audiovox Corporation",
"00212B": "MSA Auer",
"00212C": "SemIndia System Private Limited",
"00212D": "SCIMOLEX CORPORATION",
"00212E": "dresden-elektronik",
"00212F": "Phoebe Micro Inc.",
"002130": "Keico Hightech Inc.",
"002131": "Blynke Inc.",
"002132": "Masterclock, Inc.",
"002133": "Building B, Inc",
"002134": "Brandywine Communications",
"002135": "ALCATEL-LUCENT",
"002136": "ARRIS Group, Inc.",
"002137": "Bay Controls, LLC",
"002138": "Cepheid",
"002139": "Escherlogic Inc.",
"00213A": "Winchester Systems Inc.",
"00213B": "Berkshire Products, Inc",
"00213C": "AliphCom",
"00213D": "Cermetek Microelectronics, Inc.",
"00213E": "TomTom",
"00213F": "A-Team Technology Ltd.",
"002140": "EN Technologies Inc.",
"002141": "RADLIVE",
"002142": "Advanced Control Systems doo",
"002143": "ARRIS Group, Inc.",
"002144": "SS Telecoms",
"002145": "Semptian Technologies Ltd.",
"002146": "Sanmina-SCI",
"002147": "Nintendo Co., Ltd.",
"002148": "Kaco Solar Korea",
"002149": "China Daheng Group ,Inc.",
"00214A": "Pixel Velocity, Inc",
"00214B": "Shenzhen HAMP Science & Technology Co.,Ltd",
"00214C": "SAMSUNG ELECTRONICS CO., LTD.",
"00214D": "Guangzhou Skytone Transmission Technology Com. Ltd.",
"00214E": "GS Yuasa Power Supply Ltd.",
"00214F": "ALPS Electric Co., Ltd",
"002150": "EYEVIEW ELECTRONICS",
"002151": "Millinet Co., Ltd.",
"002152": "General Satellite Research & Development Limited",
"002153": "SeaMicro Inc.",
"002154": "D-TACQ Solutions Ltd",
"002155": "CISCO SYSTEMS, INC.",
"002156": "CISCO SYSTEMS, INC.",
"002157": "National Datacast, Inc.",
"002158": "Style Flying Technology Co.",
"002159": "Juniper Networks",
"00215A": "Hewlett-Packard Company",
"00215B": "Inotive",
"00215C": "Intel Corporate",
"00215D": "Intel Corporate",
"00215E": "IBM Corp",
"00215F": "IHSE GmbH",
"002160": "Hidea Solutions Co. Ltd.",
"002161": "Yournet Inc.",
"002162": "Nortel",
"002163": "ASKEY COMPUTER CORP",
"002164": "Special Design Bureau for Seismic Instrumentation",
"002165": "Presstek Inc.",
"002166": "NovAtel Inc.",
"002167": "HWA JIN T&I Corp.",
"002168": "iVeia, LLC",
"002169": "Prologix, LLC.",
"00216A": "Intel Corporate",
"00216B": "Intel Corporate",
"00216C": "ODVA",
"00216D": "Soltech Co., Ltd.",
"00216E": "Function ATI (Huizhou) Telecommunications Co., Ltd.",
"00216F": "SymCom, Inc.",
"002170": "Dell Inc",
"002171": "Wesung TNC Co., Ltd.",
"002172": "Seoultek Valley",
"002173": "Ion Torrent Systems, Inc.",
"002174": "AvaLAN Wireless",
"002175": "Pacific Satellite International Ltd.",
"002176": "YMax Telecom Ltd.",
"002177": "W. L. Gore & Associates",
"002178": "Matuschek Messtechnik GmbH",
"002179": "IOGEAR, Inc.",
"00217A": "Sejin Electron, Inc.",
"00217B": "Bastec AB",
"00217C": "2Wire",
"00217D": "PYXIS S.R.L.",
"00217E": "Telit Communication s.p.a",
"00217F": "Intraco Technology Pte Ltd",
"002180": "ARRIS Group, Inc.",
"002181": "Si2 Microsystems Limited",
"002182": "SandLinks Systems, Ltd.",
"002183": "VATECH HYDRO",
"002184": "POWERSOFT SRL",
"002185": "MICRO-STAR INT'L CO.,LTD.",
"002186": "Universal Global Scientific Industrial Co., Ltd",
"002187": "Imacs GmbH",
"002188": "EMC Corporation",
"002189": "AppTech, Inc.",
"00218A": "Electronic Design and Manufacturing Company",
"00218B": "Wescon Technology, Inc.",
"00218C": "TopControl GMBH",
"00218D": "AP Router Ind. Eletronica LTDA",
"00218E": "MEKICS CO., LTD.",
"00218F": "Avantgarde Acoustic Lautsprechersysteme GmbH",
"002190": "Goliath Solutions",
"002191": "D-Link Corporation",
"002192": "Baoding Galaxy Electronic Technology Co.,Ltd",
"002193": "Videofon MV",
"002194": "Ping Communication",
"002195": "GWD Media Limited",
"002196": "Telsey S.p.A.",
"002197": "ELITEGROUP COMPUTER SYSTEM",
"002198": "Thai Radio Co, LTD",
"002199": "Vacon Plc",
"00219A": "Cambridge Visual Networks Ltd",
"00219B": "Dell Inc",
"00219C": "Honeywld Technology Corp.",
"00219D": "Adesys BV",
"00219E": "Sony Ericsson Mobile Communications",
"00219F": "SATEL OY",
"0021A0": "CISCO SYSTEMS, INC.",
"0021A1": "CISCO SYSTEMS, INC.",
"0021A2": "EKE-Electronics Ltd.",
"0021A3": "Micromint",
"0021A4": "Dbii Networks",
"0021A5": "ERLPhase Power Technologies Ltd.",
"0021A6": "Videotec Spa",
"0021A7": "Hantle System Co., Ltd.",
"0021A8": "Telephonics Corporation",
"0021A9": "Mobilink Telecom Co.,Ltd",
"0021AA": "Nokia Danmark A/S",
"0021AB": "Nokia Danmark A/S",
"0021AC": "Infrared Integrated Systems Ltd",
"0021AD": "Nordic ID Oy",
"0021AE": "ALCATEL-LUCENT FRANCE - WTD",
"0021AF": "Radio Frequency Systems",
"0021B0": "Tyco Telecommunications",
"0021B1": "DIGITAL SOLUTIONS LTD",
"0021B2": "Fiberblaze A/S",
"0021B3": "Ross Controls",
"0021B4": "APRO MEDIA CO., LTD",
"0021B5": "Galvanic Ltd",
"0021B6": "Triacta Power Technologies Inc.",
"0021B7": "Lexmark International Inc.",
"0021B8": "Inphi Corporation",
"0021B9": "Universal Devices Inc.",
"0021BA": "Texas Instruments",
"0021BB": "Riken Keiki Co., Ltd.",
"0021BC": "ZALA COMPUTER",
"0021BD": "Nintendo Co., Ltd.",
"0021BE": "Cisco, Service Provider Video Technology Group",
"0021BF": "Hitachi High-Tech Control Systems Corporation",
"0021C0": "Mobile Appliance, Inc.",
"0021C1": "ABB Oy / Medium Voltage Products",
"0021C2": "GL Communications Inc",
"0021C3": "CORNELL Communications, Inc.",
"0021C4": "Consilium AB",
"0021C5": "3DSP Corp",
"0021C6": "CSJ Global, Inc.",
"0021C7": "Russound",
"0021C8": "LOHUIS Networks",
"0021C9": "Wavecom Asia Pacific Limited",
"0021CA": "ART System Co., Ltd.",
"0021CB": "SMS TECNOLOGIA ELETRONICA LTDA",
"0021CC": "Flextronics International",
"0021CD": "LiveTV",
"0021CE": "NTC-Metrotek",
"0021CF": "The Crypto Group",
"0021D0": "Global Display Solutions Spa",
"0021D1": "Samsung Electronics Co.,Ltd",
"0021D2": "Samsung Electronics Co.,Ltd",
"0021D3": "BOCOM SECURITY(ASIA PACIFIC) LIMITED",
"0021D4": "Vollmer Werke GmbH",
"0021D5": "X2E GmbH",
"0021D6": "LXI Consortium",
"0021D7": "CISCO SYSTEMS, INC.",
"0021D8": "CISCO SYSTEMS, INC.",
"0021D9": "SEKONIC CORPORATION",
"0021DA": "Automation Products Group Inc.",
"0021DB": "Santachi Video Technology (Shenzhen) Co., Ltd.",
"0021DC": "TECNOALARM S.r.l.",
"0021DD": "Northstar Systems Corp",
"0021DE": "Firepro Wireless",
"0021DF": "Martin Christ GmbH",
"0021E0": "CommAgility Ltd",
"0021E1": "Nortel Networks",
"0021E2": "Creative Electronic GmbH",
"0021E3": "SerialTek LLC",
"0021E4": "I-WIN",
"0021E5": "Display Solution AG",
"0021E6": "Starlight Video Limited",
"0021E7": "Informatics Services Corporation",
"0021E8": "Murata Manufacturing Co., Ltd.",
"0021E9": "Apple",
"0021EA": "Bystronic Laser AG",
"0021EB": "ESP SYSTEMS, LLC",
"0021EC": "Solutronic GmbH",
"0021ED": "Telegesis",
"0021EE": "Full Spectrum Inc.",
"0021EF": "Kapsys",
"0021F0": "EW3 Technologies LLC",
"0021F1": "Tutus Data AB",
"0021F2": "EASY3CALL Technology Limited",
"0021F3": "Si14 SpA",
"0021F4": "INRange Systems, Inc",
"0021F5": "Western Engravers Supply, Inc.",
"0021F6": "Oracle Corporation",
"0021F7": "HPN Supply Chain",
"0021F8": "Enseo, Inc.",
"0021F9": "WIRECOM Technologies",
"0021FA": "A4SP Technologies Ltd.",
"0021FB": "LG Electronics",
"0021FC": "Nokia Danmark A/S",
"0021FD": "DSTA S.L.",
"0021FE": "Nokia Danmark A/S",
"0021FF": "Cyfrowy Polsat SA",
"002200": "IBM Corp",
"002201": "Aksys Networks Inc",
"002202": "Excito Elektronik i Sk\u00e5ne AB",
"002203": "Glensound Electronics Ltd",
"002204": "KORATEK",
"002205": "WeLink Solutions, Inc.",
"002206": "Cyberdyne Inc.",
"002207": "Inteno Broadband Technology AB",
"002208": "Certicom Corp",
"002209": "Omron Healthcare Co., Ltd",
"00220A": "OnLive, Inc",
"00220B": "National Source Coding Center",
"00220C": "CISCO SYSTEMS, INC.",
"00220D": "CISCO SYSTEMS, INC.",
"00220E": "Indigo Security Co., Ltd.",
"00220F": "MoCA (Multimedia over Coax Alliance)",
"002210": "ARRIS Group, Inc.",
"002211": "Rohati Systems",
"002212": "CAI Networks, Inc.",
"002213": "PCI CORPORATION",
"002214": "RINNAI KOREA",
"002215": "ASUSTek COMPUTER INC.",
"002216": "SHIBAURA VENDING MACHINE CORPORATION",
"002217": "Neat Electronics",
"002218": "Verivue Inc.",
"002219": "Dell Inc",
"00221A": "Audio Precision",
"00221B": "Morega Systems",
"00221C": "PRIVATE",
"00221D": "Freegene Technology LTD",
"00221E": "Media Devices Co., Ltd.",
"00221F": "eSang Technologies Co., Ltd.",
"002220": "Mitac Technology Corp",
"002221": "ITOH DENKI CO,LTD.",
"002222": "Schaffner Deutschland GmbH",
"002223": "TimeKeeping Systems, Inc.",
"002224": "Good Will Instrument Co., Ltd.",
"002225": "Thales Avionics Ltd",
"002226": "Avaak, Inc.",
"002227": "uv-electronic GmbH",
"002228": "Breeze Innovations Ltd.",
"002229": "Compumedics Ltd",
"00222A": "SoundEar A/S",
"00222B": "Nucomm, Inc.",
"00222C": "Ceton Corp",
"00222D": "SMC Networks Inc.",
"00222E": "maintech GmbH",
"00222F": "Open Grid Computing, Inc.",
"002230": "FutureLogic Inc.",
"002231": "SMT&C Co., Ltd.",
"002232": "Design Design Technology Ltd",
"002233": "ADB Broadband Italia",
"002234": "Corventis Inc.",
"002235": "Strukton Systems bv",
"002236": "VECTOR SP. Z O.O.",
"002237": "Shinhint Group",
"002238": "LOGIPLUS",
"002239": "Indiana Life Sciences Incorporated",
"00223A": "Scientific Atlanta, Cisco SPVT Group",
"00223B": "Communication Networks, LLC",
"00223C": "RATIO Entwicklungen GmbH",
"00223D": "JumpGen Systems, LLC",
"00223E": "IRTrans GmbH",
"00223F": "Netgear Inc.",
"002240": "Universal Telecom S/A",
"002241": "Apple",
"002242": "Alacron Inc.",
"002243": "AzureWave Technologies, Inc.",
"002244": "Chengdu Linkon Communications Device Co., Ltd",
"002245": "Leine & Linde AB",
"002246": "Evoc Intelligent Technology Co.,Ltd.",
"002247": "DAC ENGINEERING CO., LTD.",
"002248": "Microsoft Corporation",
"002249": "HOME MULTIENERGY SL",
"00224A": "RAYLASE AG",
"00224B": "AIRTECH TECHNOLOGIES, INC.",
"00224C": "Nintendo Co., Ltd.",
"00224D": "MITAC INTERNATIONAL CORP.",
"00224E": "SEEnergy Corp.",
"00224F": "Byzoro Networks Ltd.",
"002250": "Point Six Wireless, LLC",
"002251": "Lumasense Technologies",
"002252": "ZOLL Lifecor Corporation",
"002253": "Entorian Technologies",
"002254": "Bigelow Aerospace",
"002255": "CISCO SYSTEMS, INC.",
"002256": "CISCO SYSTEMS, INC.",
"002257": "3Com Europe Ltd",
"002258": "Taiyo Yuden Co., Ltd.",
"002259": "Guangzhou New Postcom Equipment Co.,Ltd.",
"00225A": "Garde Security AB",
"00225B": "Teradici Corporation",
"00225C": "Multimedia & Communication Technology",
"00225D": "Digicable Network India Pvt. Ltd.",
"00225E": "Uwin Technologies Co.,LTD",
"00225F": "Liteon Technology Corporation",
"002260": "AFREEY Inc.",
"002261": "Frontier Silicon Ltd",
"002262": "BEP Marine",
"002263": "Koos Technical Services, Inc.",
"002264": "Hewlett-Packard Company",
"002265": "Nokia Danmark A/S",
"002266": "Nokia Danmark A/S",
"002267": "Nortel Networks",
"002268": "Hon Hai Precision Ind. Co., Ltd.",
"002269": "Hon Hai Precision Ind. Co., Ltd.",
"00226A": "Honeywell",
"00226B": "Cisco-Linksys, LLC",
"00226C": "LinkSprite Technologies, Inc.",
"00226D": "Shenzhen GIEC Electronics Co., Ltd.",
"00226E": "Gowell Electronic Limited",
"00226F": "3onedata Technology Co. Ltd.",
"002270": "ABK North America, LLC",
"002271": "J\u00e4ger Computergesteuerte Me\u00dftechnik GmbH.",
"002272": "American Micro-Fuel Device Corp.",
"002273": "Techway",
"002274": "FamilyPhone AB",
"002275": "Belkin International Inc.",
"002276": "Triple EYE B.V.",
"002277": "NEC Australia Pty Ltd",
"002278": "Shenzhen Tongfang Multimedia Technology Co.,Ltd.",
"002279": "Nippon Conlux Co., Ltd.",
"00227A": "Telecom Design",
"00227B": "Apogee Labs, Inc.",
"00227C": "Woori SMT Co.,ltd",
"00227D": "YE DATA INC.",
"00227E": "Chengdu 30Kaitian Communication Industry Co.Ltd",
"00227F": "Ruckus Wireless",
"002280": "A2B Electronics AB",
"002281": "Daintree Networks Pty",
"002282": "8086 Consultancy",
"002283": "Juniper Networks",
"002284": "DESAY A&V SCIENCE AND TECHNOLOGY CO.,LTD",
"002285": "NOMUS COMM SYSTEMS",
"002286": "ASTRON",
"002287": "Titan Wireless LLC",
"002288": "Sagrad, Inc.",
"002289": "Optosecurity Inc.",
"00228A": "Teratronik elektronische systeme gmbh",
"00228B": "Kensington Computer Products Group",
"00228C": "Photon Europe GmbH",
"00228D": "GBS Laboratories LLC",
"00228E": "TV-NUMERIC",
"00228F": "CNRS",
"002290": "CISCO SYSTEMS, INC.",
"002291": "CISCO SYSTEMS, INC.",
"002292": "Cinetal",
"002293": "ZTE Corporation",
"002294": "Kyocera Corporation",
"002295": "SGM Technology for lighting spa",
"002296": "LinoWave Corporation",
"002297": "XMOS Semiconductor",
"002298": "Sony Ericsson Mobile Communications",
"002299": "SeaMicro Inc.",
"00229A": "Lastar, Inc.",
"00229B": "AverLogic Technologies, Inc.",
"00229C": "Verismo Networks Inc",
"00229D": "PYUNG-HWA IND.CO.,LTD",
"00229E": "Social Aid Research Co., Ltd.",
"00229F": "Sensys Traffic AB",
"0022A0": "Delphi Corporation",
"0022A1": "Huawei Symantec Technologies Co.,Ltd.",
"0022A2": "Xtramus Technologies",
"0022A3": "California Eastern Laboratories",
"0022A4": "2Wire",
"0022A5": "Texas Instruments",
"0022A6": "Sony Computer Entertainment America",
"0022A7": "Tyco Electronics AMP GmbH",
"0022A8": "Ouman Oy",
"0022A9": "LG Electronics Inc",
"0022AA": "Nintendo Co., Ltd.",
"0022AB": "Shenzhen Turbosight Technology Ltd",
"0022AC": "Hangzhou Siyuan Tech. Co., Ltd",
"0022AD": "TELESIS TECHNOLOGIES, INC.",
"0022AE": "Mattel Inc.",
"0022AF": "Safety Vision",
"0022B0": "D-Link Corporation",
"0022B1": "Elbit Systems",
"0022B2": "4RF Communications Ltd",
"0022B3": "Sei S.p.A.",
"0022B4": "ARRIS Group, Inc.",
"0022B5": "NOVITA",
"0022B6": "Superflow Technologies Group",
"0022B7": "GSS Grundig SAT-Systems GmbH",
"0022B8": "Norcott",
"0022B9": "Analogix Seminconductor, Inc",
"0022BA": "HUTH Elektronik Systeme GmbH",
"0022BB": "beyerdynamic GmbH & Co. KG",
"0022BC": "JDSU France SAS",
"0022BD": "CISCO SYSTEMS, INC.",
"0022BE": "CISCO SYSTEMS, INC.",
"0022BF": "SieAmp Group of Companies",
"0022C0": "Shenzhen Forcelink Electronic Co, Ltd",
"0022C1": "Active Storage Inc.",
"0022C2": "Proview Eletr\u00f4nica do Brasil LTDA",
"0022C3": "Zeeport Technology Inc.",
"0022C4": "epro GmbH",
"0022C5": "INFORSON Co,Ltd.",
"0022C6": "Sutus Inc",
"0022C7": "SEGGER Microcontroller GmbH & Co. KG",
"0022C8": "Applied Instruments B.V.",
"0022C9": "Lenord, Bauer & Co GmbH",
"0022CA": "Anviz Biometric Tech. Co., Ltd.",
"0022CB": "IONODES Inc.",
"0022CC": "SciLog, Inc.",
"0022CD": "Ared Technology Co., Ltd.",
"0022CE": "Cisco, Service Provider Video Technology Group",
"0022CF": "PLANEX Communications INC",
"0022D0": "Polar Electro Oy",
"0022D1": "Albrecht Jung GmbH & Co. KG",
"0022D2": "All Earth Com\u00e9rcio de Eletr\u00f4nicos LTDA.",
"0022D3": "Hub-Tech",
"0022D4": "ComWorth Co., Ltd.",
"0022D5": "Eaton Corp. Electrical Group Data Center Solutions - Pulizzi",
"0022D6": "Cypak AB",
"0022D7": "Nintendo Co., Ltd.",
"0022D8": "Shenzhen GST Security and Safety Technology Limited",
"0022D9": "Fortex Industrial Ltd.",
"0022DA": "ANATEK, LLC",
"0022DB": "Translogic Corporation",
"0022DC": "Vigil Health Solutions Inc.",
"0022DD": "Protecta Electronics Ltd",
"0022DE": "OPPO Digital, Inc.",
"0022DF": "TAMUZ Monitors",
"0022E0": "Atlantic Software Technologies S.r.L.",
"0022E1": "ZORT Labs, LLC.",
"0022E2": "WABTEC Transit Division",
"0022E3": "Amerigon",
"0022E4": "APASS TECHNOLOGY CO., LTD.",
"0022E5": "Fisher-Rosemount Systems Inc.",
"0022E6": "Intelligent Data",
"0022E7": "WPS Parking Systems",
"0022E8": "Applition Co., Ltd.",
"0022E9": "ProVision Communications",
"0022EA": "Rustelcom Inc.",
"0022EB": "Data Respons A/S",
"0022EC": "IDEALBT TECHNOLOGY CORPORATION",
"0022ED": "TSI Power Corporation",
"0022EE": "Algo Communication Products Ltd",
"0022EF": "iWDL Technologies",
"0022F0": "3 Greens Aviation Limited",
"0022F1": "PRIVATE",
"0022F2": "SunPower Corp",
"0022F3": "SHARP Corporation",
"0022F4": "AMPAK Technology, Inc.",
"0022F5": "Advanced Realtime Tracking GmbH",
"0022F6": "Syracuse Research Corporation",
"0022F7": "Conceptronic",
"0022F8": "PIMA Electronic Systems Ltd.",
"0022F9": "Pollin Electronic GmbH",
"0022FA": "Intel Corporate",
"0022FB": "Intel Corporate",
"0022FC": "Nokia Danmark A/S",
"0022FD": "Nokia Danmark A/S",
"0022FE": "Microprocessor Designs Inc",
"0022FF": "iWDL Technologies",
"002300": "Cayee Computer Ltd.",
"002301": "Witron Technology Limited",
"002302": "Cobalt Digital, Inc.",
"002303": "LITE-ON IT Corporation",
"002304": "CISCO SYSTEMS, INC.",
"002305": "CISCO SYSTEMS, INC.",
"002306": "ALPS Electric Co., Ltd",
"002307": "FUTURE INNOVATION TECH CO.,LTD",
"002308": "Arcadyan Technology Corporation",
"002309": "Janam Technologies LLC",
"00230A": "ARBURG GmbH & Co KG",
"00230B": "ARRIS Group, Inc.",
"00230C": "CLOVER ELECTRONICS CO.,LTD.",
"00230D": "Nortel Networks",
"00230E": "Gorba AG",
"00230F": "Hirsch Electronics Corporation",
"002310": "LNC Technology Co., Ltd.",
"002311": "Gloscom Co., Ltd.",
"002312": "Apple",
"002313": "Qool Technologies Ltd.",
"002314": "Intel Corporate",
"002315": "Intel Corporate",
"002316": "KISAN ELECTRONICS CO",
"002317": "Lasercraft Inc",
"002318": "Toshiba",
"002319": "Sielox LLC",
"00231A": "ITF Co., Ltd.",
"00231B": "Danaher Motion - Kollmorgen",
"00231C": "Fourier Systems Ltd.",
"00231D": "Deltacom Electronics Ltd",
"00231E": "Cezzer Multimedia Technologies",
"00231F": "Guangda Electronic & Telecommunication Technology Development Co., Ltd.",
"002320": "Nicira Networks",
"002321": "Avitech International Corp",
"002322": "KISS Teknical Solutions, Inc.",
"002323": "Zylin AS",
"002324": "G-PRO COMPUTER",
"002325": "IOLAN Holding",
"002326": "Fujitsu Limited",
"002327": "Shouyo Electronics CO., LTD",
"002328": "ALCON TELECOMMUNICATIONS CO., LTD.",
"002329": "DDRdrive LLC",
"00232A": "eonas IT-Beratung und -Entwicklung GmbH",
"00232B": "IRD A/S",
"00232C": "Senticare",
"00232D": "SandForce",
"00232E": "Kedah Electronics Engineering, LLC",
"00232F": "Advanced Card Systems Ltd.",
"002330": "DIZIPIA, INC.",
"002331": "Nintendo Co., Ltd.",
"002332": "Apple",
"002333": "CISCO SYSTEMS, INC.",
"002334": "CISCO SYSTEMS, INC.",
"002335": "Linkflex Co.,Ltd",
"002336": "METEL s.r.o.",
"002337": "Global Star Solutions ULC",
"002338": "OJ-Electronics A/S",
"002339": "Samsung Electronics",
"00233A": "Samsung Electronics Co.,Ltd",
"00233B": "C-Matic Systems Ltd",
"00233C": "Alflex",
"00233D": "Novero holding B.V.",
"00233E": "Alcatel-Lucent-IPD",
"00233F": "Purechoice Inc",
"002340": "MiX Telematics",
"002341": "Siemens AB, Infrastructure & Cities, Building Technologies Division, IC BT SSP SP BA PR",
"002342": "Coffee Equipment Company",
"002343": "TEM AG",
"002344": "Objective Interface Systems, Inc.",
"002345": "Sony Ericsson Mobile Communications",
"002346": "Vestac",
"002347": "ProCurve Networking by HP",
"002348": "SAGEM COMMUNICATION",
"002349": "Helmholtz Centre Berlin for Material and Energy",
"00234A": "PRIVATE",
"00234B": "Inyuan Technology Inc.",
"00234C": "KTC AB",
"00234D": "Hon Hai Precision Ind. Co., Ltd.",
"00234E": "Hon Hai Precision Ind. Co., Ltd.",
"00234F": "Luminous Power Technologies Pvt. Ltd.",
"002350": "LynTec",
"002351": "2Wire",
"002352": "DATASENSOR S.p.A.",
"002353": "F E T Elettronica snc",
"002354": "ASUSTek COMPUTER INC.",
"002355": "Kinco Automation(Shanghai) Ltd.",
"002356": "Packet Forensics LLC",
"002357": "Pitronot Technologies and Engineering P.T.E. Ltd.",
"002358": "SYSTEL SA",
"002359": "Benchmark Electronics ( Thailand ) Public Company Limited",
"00235A": "COMPAL INFORMATION (KUNSHAN) CO., Ltd.",
"00235B": "Gulfstream",
"00235C": "Aprius, Inc.",
"00235D": "CISCO SYSTEMS, INC.",
"00235E": "CISCO SYSTEMS, INC.",
"00235F": "Silicon Micro Sensors GmbH",
"002360": "Lookit Technology Co., Ltd",
"002361": "Unigen Corporation",
"002362": "Goldline Controls",
"002363": "Zhuhai RaySharp Technology Co., Ltd.",
"002364": "Power Instruments Pte Ltd",
"002365": "ELKA-Elektronik GmbH",
"002366": "Beijing Siasun Electronic System Co.,Ltd.",
"002367": "UniControls a.s.",
"002368": "Motorola",
"002369": "Cisco-Linksys, LLC",
"00236A": "SmartRG Inc",
"00236B": "Xembedded, Inc.",
"00236C": "Apple",
"00236D": "ResMed Ltd",
"00236E": "Burster GmbH & Co KG",
"00236F": "DAQ System",
"002370": "Snell",
"002371": "SOAM Systel",
"002372": "MORE STAR INDUSTRIAL GROUP LIMITED",
"002373": "GridIron Systems, Inc.",
"002374": "ARRIS Group, Inc.",
"002375": "ARRIS Group, Inc.",
"002376": "HTC Corporation",
"002377": "Isotek Electronics Ltd",
"002378": "GN Netcom A/S",
"002379": "Union Business Machines Co. Ltd.",
"00237A": "RIM",
"00237B": "WHDI LLC",
"00237C": "NEOTION",
"00237D": "Hewlett-Packard Company",
"00237E": "ELSTER GMBH",
"00237F": "PLANTRONICS, INC.",
"002380": "Nanoteq",
"002381": "Lengda Technology(Xiamen) Co.,Ltd.",
"002382": "Lih Rong Electronic Enterprise Co., Ltd.",
"002383": "InMage Systems Inc",
"002384": "GGH Engineering s.r.l.",
"002385": "ANTIPODE",
"002386": "Tour & Andersson AB",
"002387": "ThinkFlood, Inc.",
"002388": "V.T. Telematica S.p.a.",
"002389": "HANGZHOU H3C Technologies Co., Ltd.",
"00238A": "Ciena Corporation",
"00238B": "Quanta Computer Inc.",
"00238C": "PRIVATE",
"00238D": "Techno Design Co., Ltd.",
"00238E": "Pirelli Tyre S.p.A.",
"00238F": "NIDEC COPAL CORPORATION",
"002390": "Algolware Corporation",
"002391": "Maxian",
"002392": "Proteus Industries Inc.",
"002393": "AJINEXTEK",
"002394": "Samjeon",
"002395": "ARRIS Group, Inc.",
"002396": "ANDES TECHNOLOGY CORPORATION",
"002397": "Westell Technologies Inc.",
"002398": "Sky Control",
"002399": "VD Division, Samsung Electronics Co.",
"00239A": "EasyData Hardware GmbH",
"00239B": "Elster Solutions, LLC",
"00239C": "Juniper Networks",
"00239D": "Mapower Electronics Co., Ltd",
"00239E": "Jiangsu Lemote Technology Corporation Limited",
"00239F": "Institut f\u00fcr Pr\u00fcftechnik",
"0023A0": "Hana CNS Co., LTD.",
"0023A1": "Trend Electronics Ltd",
"0023A2": "ARRIS Group, Inc.",
"0023A3": "ARRIS Group, Inc.",
"0023A4": "New Concepts Development Corp.",
"0023A5": "SageTV, LLC",
"0023A6": "E-Mon",
"0023A7": "Redpine Signals, Inc.",
"0023A8": "Marshall Electronics",
"0023A9": "Beijing Detianquan Electromechanical Equipment Co., Ltd",
"0023AA": "HFR, Inc.",
"0023AB": "CISCO SYSTEMS, INC.",
"0023AC": "CISCO SYSTEMS, INC.",
"0023AD": "Xmark Corporation",
"0023AE": "Dell Inc.",
"0023AF": "ARRIS Group, Inc.",
"0023B0": "COMXION Technology Inc.",
"0023B1": "Longcheer Technology (Singapore) Pte Ltd",
"0023B2": "Intelligent Mechatronic Systems Inc",
"0023B3": "Lyyn AB",
"0023B4": "Nokia Danmark A/S",
"0023B5": "ORTANA LTD",
"0023B6": "SECURITE COMMUNICATIONS / HONEYWELL",
"0023B7": "Q-Light Co., Ltd.",
"0023B8": "Sichuan Jiuzhou Electronic Technology Co.,Ltd",
"0023B9": "EADS Deutschland GmbH",
"0023BA": "Chroma",
"0023BB": "Schmitt Industries",
"0023BC": "EQ-SYS GmbH",
"0023BD": "Digital Ally, Inc.",
"0023BE": "Cisco SPVTG",
"0023BF": "Mainpine, Inc.",
"0023C0": "Broadway Networks",
"0023C1": "Securitas Direct AB",
"0023C2": "SAMSUNG Electronics. Co. LTD",
"0023C3": "LogMeIn, Inc.",
"0023C4": "Lux Lumen",
"0023C5": "Radiation Safety and Control Services Inc",
"0023C6": "SMC Corporation",
"0023C7": "AVSystem",
"0023C8": "TEAM-R",
"0023C9": "Sichuan Tianyi Information Science & Technology Stock CO.,LTD",
"0023CA": "Behind The Set, LLC",
"0023CB": "Shenzhen Full-join Technology Co.,Ltd",
"0023CC": "Nintendo Co., Ltd.",
"0023CD": "TP-LINK TECHNOLOGIES CO., LTD.",
"0023CE": "KITA DENSHI CORPORATION",
"0023CF": "CUMMINS-ALLISON CORP.",
"0023D0": "Uniloc USA Inc.",
"0023D1": "TRG",
"0023D2": "Inhand Electronics, Inc.",
"0023D3": "AirLink WiFi Networking Corp.",
"0023D4": "Texas Instruments",
"0023D5": "WAREMA electronic GmbH",
"0023D6": "Samsung Electronics Co.,LTD",
"0023D7": "Samsung Electronics",
"0023D8": "Ball-It Oy",
"0023D9": "Banner Engineering",
"0023DA": "Industrial Computer Source (Deutschland)GmbH",
"0023DB": "saxnet gmbh",
"0023DC": "Benein, Inc",
"0023DD": "ELGIN S.A.",
"0023DE": "Ansync Inc.",
"0023DF": "Apple",
"0023E0": "INO Therapeutics LLC",
"0023E1": "Cavena Image Products AB",
"0023E2": "SEA Signalisation",
"0023E3": "Microtronic AG",
"0023E4": "IPnect co. ltd.",
"0023E5": "IPaXiom Networks",
"0023E6": "Pirkus, Inc.",
"0023E7": "Hinke A/S",
"0023E8": "Demco Corp.",
"0023E9": "F5 Networks, Inc.",
"0023EA": "CISCO SYSTEMS, INC.",
"0023EB": "CISCO SYSTEMS, INC.",
"0023EC": "Algorithmix GmbH",
"0023ED": "ARRIS Group, Inc.",
"0023EE": "ARRIS Group, Inc.",
"0023EF": "Zuend Systemtechnik AG",
"0023F0": "Shanghai Jinghan Weighing Apparatus Co. Ltd.",
"0023F1": "Sony Ericsson Mobile Communications",
"0023F2": "TVLogic",
"0023F3": "Glocom, Inc.",
"0023F4": "Masternaut",
"0023F5": "WILO SE",
"0023F6": "Softwell Technology Co., Ltd.",
"0023F7": "PRIVATE",
"0023F8": "ZyXEL Communications Corporation",
"0023F9": "Double-Take Software, INC.",
"0023FA": "RG Nets, Inc.",
"0023FB": "IP Datatel, LLC.",
"0023FC": "Ultra Stereo Labs, Inc",
"0023FD": "AFT Atlas Fahrzeugtechnik GmbH",
"0023FE": "Biodevices, SA",
"0023FF": "Beijing HTTC Technology Ltd.",
"002400": "Nortel Networks",
"002401": "D-Link Corporation",
"002402": "Op-Tection GmbH",
"002403": "Nokia Danmark A/S",
"002404": "Nokia Danmark A/S",
"002405": "Dilog Nordic AB",
"002406": "Pointmobile",
"002407": "TELEM SAS",
"002408": "Pacific Biosciences",
"002409": "The Toro Company",
"00240A": "US Beverage Net",
"00240B": "Virtual Computer Inc.",
"00240C": "DELEC GmbH",
"00240D": "OnePath Networks LTD.",
"00240E": "Inventec Besta Co., Ltd.",
"00240F": "Ishii Tool & Engineering Corporation",
"002410": "NUETEQ Technology,Inc.",
"002411": "PharmaSmart LLC",
"002412": "Benign Technologies Co, Ltd.",
"002413": "CISCO SYSTEMS, INC.",
"002414": "CISCO SYSTEMS, INC.",
"002415": "Magnetic Autocontrol GmbH",
"002416": "Any Use",
"002417": "Thomson Telecom Belgium",
"002418": "Nextwave Semiconductor",
"002419": "PRIVATE",
"00241A": "Red Beetle Inc.",
"00241B": "iWOW Communications Pte Ltd",
"00241C": "FuGang Electronic (DG) Co.,Ltd",
"00241D": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"00241E": "Nintendo Co., Ltd.",
"00241F": "DCT-Delta GmbH",
"002420": "NetUP Inc.",
"002421": "MICRO-STAR INT'L CO., LTD.",
"002422": "Knapp Logistik Automation GmbH",
"002423": "AzureWave Technologies (Shanghai) Inc.",
"002424": "Axis Network Technology",
"002425": "Shenzhenshi chuangzhicheng Technology Co.,Ltd",
"002426": "NOHMI BOSAI LTD.",
"002427": "SSI COMPUTER CORP",
"002428": "EnergyICT",
"002429": "MK MASTER INC.",
"00242A": "Hittite Microwave Corporation",
"00242B": "Hon Hai Precision Ind.Co.,Ltd.",
"00242C": "Hon Hai Precision Ind. Co., Ltd.",
"00242E": "Datastrip Inc.",
"00242F": "Micron",
"002430": "Ruby Tech Corp.",
"002431": "Uni-v co.,ltd",
"002432": "Neostar Technology Co.,LTD",
"002433": "Alps Electric Co., Ltd",
"002434": "Lectrosonics, Inc.",
"002435": "WIDE CORPORATION",
"002436": "Apple",
"002437": "Motorola - BSG",
"002438": "Brocade Communications Systems, Inc",
"002439": "Digital Barriers Advanced Technologies",
"00243A": "Ludl Electronic Products",
"00243B": "CSSI (S) Pte Ltd",
"00243C": "S.A.A.A.",
"00243D": "Emerson Appliance Motors and Controls",
"00243F": "Storwize, Inc.",
"002440": "Halo Monitoring, Inc.",
"002441": "Wanzl Metallwarenfabrik GmbH",
"002442": "Axona Limited",
"002443": "Nortel Networks",
"002444": "Nintendo Co., Ltd.",
"002445": "CommScope Canada Inc.",
"002446": "MMB Research Inc.",
"002447": "Kaztek Systems",
"002448": "SpiderCloud Wireless, Inc",
"002449": "Shen Zhen Lite Star Electronics Technology Co., Ltd",
"00244A": "Voyant International",
"00244B": "PERCEPTRON INC",
"00244C": "Solartron Metrology Ltd",
"00244D": "Hokkaido Electronics Corporation",
"00244E": "RadChips, Inc.",
"00244F": "Asantron Technologies Ltd.",
"002450": "CISCO SYSTEMS, INC.",
"002451": "CISCO SYSTEMS, INC.",
"002452": "Silicon Software GmbH",
"002453": "Initra d.o.o.",
"002454": "Samsung Electronics CO., LTD",
"002455": "MuLogic BV",
"002456": "2Wire",
"002458": "PA Bastion CC",
"002459": "ABB STOTZ-KONTAKT GmbH",
"00245A": "Nanjing Panda Electronics Company Limited",
"00245B": "RAIDON TECHNOLOGY, INC.",
"00245C": "Design-Com Technologies Pty. Ltd.",
"00245D": "Terberg besturingstechniek B.V.",
"00245E": "Hivision Co.,ltd",
"00245F": "Vine Telecom CO.,Ltd.",
"002460": "Giaval Science Development Co. Ltd.",
"002461": "Shin Wang Tech.",
"002462": "Rayzone Corporation",
"002463": "Phybridge Inc",
"002464": "Bridge Technologies Co AS",
"002465": "Elentec",
"002466": "Unitron nv",
"002467": "AOC International (Europe) GmbH",
"002468": "Sumavision Technologies Co.,Ltd",
"002469": "Smart Doorphones",
"00246A": "Solid Year Co., Ltd.",
"00246B": "Covia, Inc.",
"00246C": "ARUBA NETWORKS, INC.",
"00246D": "Weinzierl Engineering GmbH",
"00246E": "Phihong USA Corp.",
"00246F": "Onda Communication spa",
"002470": "AUROTECH ultrasound AS.",
"002471": "Fusion MultiSystems dba Fusion-io",
"002472": "ReDriven Power Inc.",
"002473": "3Com Europe Ltd",
"002474": "Autronica Fire And Securirty",
"002475": "Compass System(Embedded Dept.)",
"002476": "TAP.tv",
"002477": "Tibbo Technology",
"002478": "Mag Tech Electronics Co Limited",
"002479": "Optec Displays, Inc.",
"00247A": "FU YI CHENG Technology Co., Ltd.",
"00247B": "Actiontec Electronics, Inc",
"00247C": "Nokia Danmark A/S",
"00247D": "Nokia Danmark A/S",
"00247E": "Universal Global Scientific Industrial Co., Ltd",
"00247F": "Nortel Networks",
"002480": "Meteocontrol GmbH",
"002481": "Hewlett-Packard Company",
"002482": "Ruckus Wireless",
"002483": "LG Electronics",
"002484": "Bang and Olufsen Medicom a/s",
"002485": "ConteXtream Ltd",
"002486": "DesignArt Networks",
"002487": "Blackboard Inc.",
"002488": "Centre For Development Of Telematics",
"002489": "Vodafone Omnitel N.V.",
"00248A": "Kaga Electronics Co., Ltd.",
"00248B": "HYBUS CO., LTD.",
"00248C": "ASUSTek COMPUTER INC.",
"00248D": "Sony Computer Entertainment Inc.",
"00248E": "Infoware ZRt.",
"00248F": "DO-MONIX",
"002490": "Samsung Electronics Co.,LTD",
"002491": "Samsung Electronics",
"002492": "Motorola, Broadband Solutions Group",
"002493": "ARRIS Group, Inc.",
"002494": "Shenzhen Baoxin Tech CO., Ltd.",
"002495": "ARRIS Group, Inc.",
"002496": "Ginzinger electronic systems",
"002497": "CISCO SYSTEMS, INC.",
"002498": "CISCO SYSTEMS, INC.",
"002499": "Aquila Technologies",
"00249A": "Beijing Zhongchuang Telecommunication Test Co., Ltd.",
"00249B": "Action Star Enterprise Co., Ltd.",
"00249C": "Bimeng Comunication System Co. Ltd",
"00249D": "NES Technology Inc.",
"00249E": "ADC-Elektronik GmbH",
"00249F": "RIM Testing Services",
"0024A0": "ARRIS Group, Inc.",
"0024A1": "ARRIS Group, Inc.",
"0024A2": "Hong Kong Middleware Technology Limited",
"0024A3": "Sonim Technologies Inc",
"0024A4": "Siklu Communication",
"0024A5": "Buffalo Inc.",
"0024A6": "TELESTAR DIGITAL GmbH",
"0024A7": "Advanced Video Communications Inc.",
"0024A8": "ProCurve Networking by HP",
"0024A9": "Ag Leader Technology",
"0024AA": "Dycor Technologies Ltd.",
"0024AB": "A7 Engineering, Inc.",
"0024AC": "Hangzhou DPtech Technologies Co., Ltd.",
"0024AD": "Adolf Thies Gmbh & Co. KG",
"0024AE": "Morpho",
"0024AF": "EchoStar Technologies",
"0024B0": "ESAB AB",
"0024B1": "Coulomb Technologies",
"0024B2": "Netgear",
"0024B3": "Graf-Syteco GmbH & Co. KG",
"0024B4": "ESCATRONIC GmbH",
"0024B5": "Nortel Networks",
"0024B6": "Seagate Technology",
"0024B7": "GridPoint, Inc.",
"0024B8": "free alliance sdn bhd",
"0024B9": "Wuhan Higheasy Electronic Technology Development Co.Ltd",
"0024BA": "Texas Instruments",
"0024BB": "CENTRAL Corporation",
"0024BC": "HuRob Co.,Ltd",
"0024BD": "Hainzl Industriesysteme GmbH",
"0024BE": "Sony Corporation",
"0024BF": "CIAT",
"0024C0": "NTI COMODO INC",
"0024C1": "ARRIS Group, Inc.",
"0024C2": "Asumo Co.,Ltd.",
"0024C3": "CISCO SYSTEMS, INC.",
"0024C4": "CISCO SYSTEMS, INC.",
"0024C5": "Meridian Audio Limited",
"0024C6": "Hager Electro SAS",
"0024C7": "Mobilarm Ltd",
"0024C8": "Broadband Solutions Group",
"0024C9": "Broadband Solutions Group",
"0024CA": "Tobii Technology AB",
"0024CB": "Autonet Mobile",
"0024CC": "Fascinations Toys and Gifts, Inc.",
"0024CD": "Willow Garage, Inc.",
"0024CE": "Exeltech Inc",
"0024CF": "Inscape Data Corporation",
"0024D0": "Shenzhen SOGOOD Industry CO.,LTD.",
"0024D1": "Thomson Inc.",
"0024D2": "Askey Computer",
"0024D3": "QUALICA Inc.",
"0024D4": "FREEBOX SA",
"0024D5": "Winward Industrial Limited",
"0024D6": "Intel Corporate",
"0024D7": "Intel Corporate",
"0024D8": "IlSung Precision",
"0024D9": "BICOM, Inc.",
"0024DA": "Innovar Systems Limited",
"0024DB": "Alcohol Monitoring Systems",
"0024DC": "Juniper Networks",
"0024DD": "Centrak, Inc.",
"0024DE": "GLOBAL Technology Inc.",
"0024DF": "Digitalbox Europe GmbH",
"0024E0": "DS Tech, LLC",
"0024E1": "Convey Computer Corp.",
"0024E2": "HASEGAWA ELECTRIC CO.,LTD.",
"0024E3": "CAO Group",
"0024E4": "Withings",
"0024E5": "Seer Technology, Inc",
"0024E6": "In Motion Technology Inc.",
"0024E7": "Plaster Networks",
"0024E8": "Dell Inc.",
"0024E9": "Samsung Electronics Co., Ltd., Storage System Division",
"0024EA": "iris-GmbH infrared & intelligent sensors",
"0024EB": "ClearPath Networks, Inc.",
"0024EC": "United Information Technology Co.,Ltd.",
"0024ED": "YT Elec. Co,.Ltd.",
"0024EE": "Wynmax Inc.",
"0024EF": "Sony Ericsson Mobile Communications",
"0024F0": "Seanodes",
"0024F1": "Shenzhen Fanhai Sanjiang Electronics Co., Ltd.",
"0024F2": "Uniphone Telecommunication Co., Ltd.",
"0024F3": "Nintendo Co., Ltd.",
"0024F4": "Kaminario Technologies Ltd.",
"0024F5": "NDS Surgical Imaging",
"0024F6": "MIYOSHI ELECTRONICS CORPORATION",
"0024F7": "CISCO SYSTEMS, INC.",
"0024F8": "Technical Solutions Company Ltd.",
"0024F9": "CISCO SYSTEMS, INC.",
"0024FA": "Hilger u. Kern GMBH",
"0024FB": "PRIVATE",
"0024FC": "QuoPin Co., Ltd.",
"0024FD": "Accedian Networks Inc",
"0024FE": "AVM GmbH",
"0024FF": "QLogic Corporation",
"002500": "Apple",
"002501": "JSC \"Supertel\"",
"002502": "NaturalPoint",
"002503": "IBM Corp",
"002504": "Valiant Communications Limited",
"002505": "eks Engel GmbH & Co. KG",
"002506": "A.I. ANTITACCHEGGIO ITALIA SRL",
"002507": "ASTAK Inc.",
"002508": "Maquet Cardiopulmonary AG",
"002509": "SHARETRONIC Group LTD",
"00250A": "Security Expert Co. Ltd",
"00250B": "CENTROFACTOR INC",
"00250C": "Enertrac",
"00250D": "GZT Telkom-Telmor sp. z o.o.",
"00250E": "gt german telematics gmbh",
"00250F": "On-Ramp Wireless, Inc.",
"002510": "Pico-Tesla Magnetic Therapies",
"002511": "ELITEGROUP COMPUTER SYSTEM CO., LTD.",
"002512": "ZTE Corporation",
"002513": "CXP DIGITAL BV",
"002514": "PC Worth Int'l Co., Ltd.",
"002515": "SFR",
"002516": "Integrated Design Tools, Inc.",
"002517": "Venntis, LLC",
"002518": "Power PLUS Communications AG",
"002519": "Viaas Inc",
"00251A": "Psiber Data Systems Inc.",
"00251B": "Philips CareServant",
"00251C": "EDT",
"00251D": "DSA Encore, LLC",
"00251E": "ROTEL TECHNOLOGIES",
"00251F": "ZYNUS VISION INC.",
"002520": "SMA Railway Technology GmbH",
"002521": "Logitek Electronic Systems, Inc.",
"002522": "ASRock Incorporation",
"002523": "OCP Inc.",
"002524": "Lightcomm Technology Co., Ltd",
"002525": "CTERA Networks Ltd.",
"002526": "Genuine Technologies Co., Ltd.",
"002527": "Bitrode Corp.",
"002528": "Daido Signal Co., Ltd.",
"002529": "COMELIT GROUP S.P.A",
"00252A": "Chengdu GeeYa Technology Co.,LTD",
"00252B": "Stirling Energy Systems",
"00252C": "Entourage Systems, Inc.",
"00252D": "Kiryung Electronics",
"00252E": "Cisco SPVTG",
"00252F": "Energy, Inc.",
"002530": "Aetas Systems Inc.",
"002531": "Cloud Engines, Inc.",
"002532": "Digital Recorders",
"002533": "WITTENSTEIN AG",
"002535": "Minimax GmbH & Co KG",
"002536": "Oki Electric Industry Co., Ltd.",
"002537": "Runcom Technologies Ltd.",
"002538": "Samsung Electronics Co., Ltd., Memory Division",
"002539": "IfTA GmbH",
"00253A": "CEVA, Ltd.",
"00253B": "din Dietmar Nocker Facilitymanagement GmbH",
"00253C": "2Wire",
"00253D": "DRS Consolidated Controls",
"00253E": "Sensus Metering Systems",
"002540": "Quasar Technologies, Inc.",
"002541": "Maquet Critical Care AB",
"002542": "Pittasoft",
"002543": "MONEYTECH",
"002544": "LoJack Corporation",
"002545": "CISCO SYSTEMS, INC.",
"002546": "CISCO SYSTEMS, INC.",
"002547": "Nokia Danmark A/S",
"002548": "Nokia Danmark A/S",
"002549": "Jeorich Tech. Co.,Ltd.",
"00254A": "RingCube Technologies, Inc.",
"00254B": "Apple",
"00254C": "Videon Central, Inc.",
"00254D": "Singapore Technologies Electronics Limited",
"00254E": "Vertex Wireless Co., Ltd.",
"00254F": "ELETTROLAB Srl",
"002550": "Riverbed Technology",
"002551": "SE-Elektronic GmbH",
"002552": "VXI CORPORATION",
"002553": "Pirelli Tyre S.p.A.",
"002554": "Pixel8 Networks",
"002555": "Visonic Technologies 1993 Ltd",
"002556": "Hon Hai Precision Ind. Co., Ltd.",
"002557": "Research In Motion",
"002558": "MPEDIA",
"002559": "Syphan Technologies Ltd",
"00255A": "Tantalus Systems Corp.",
"00255B": "CoachComm, LLC",
"00255C": "NEC Corporation",
"00255D": "Morningstar Corporation",
"00255E": "Shanghai Dare Technologies Co.,Ltd.",
"00255F": "SenTec AG",
"002560": "Ibridge Networks & Communications Ltd.",
"002561": "ProCurve Networking by HP",
"002562": "interbro Co. Ltd.",
"002563": "Luxtera Inc",
"002564": "Dell Inc.",
"002565": "Vizimax Inc.",
"002566": "Samsung Electronics Co.,Ltd",
"002567": "Samsung Electronics",
"002568": "Shenzhen Huawei Communication Technologies Co., Ltd",
"002569": "SAGEM COMMUNICATION",
"00256A": "inIT - Institut Industrial IT",
"00256B": "ATENIX E.E. s.r.l.",
"00256C": "\"Azimut\" Production Association JSC",
"00256D": "Broadband Forum",
"00256E": "Van Breda B.V.",
"00256F": "Dantherm Power",
"002570": "Eastern Communications Company Limited",
"002571": "Zhejiang Tianle Digital Electric Co.,Ltd",
"002572": "Nemo-Q International AB",
"002573": "ST Electronics (Info-Security) Pte Ltd",
"002574": "KUNIMI MEDIA DEVICE Co., Ltd.",
"002575": "FiberPlex Technologies, LLC",
"002576": "NELI TECHNOLOGIES",
"002577": "D-BOX Technologies",
"002578": "JSC \"Concern \"Sozvezdie\"",
"002579": "J & F Labs",
"00257A": "CAMCO Produktions- und Vertriebs-GmbH f\u00fcr Beschallungs- und Beleuchtungsanlagen",
"00257B": "STJ ELECTRONICS PVT LTD",
"00257C": "Huachentel Technology Development Co., Ltd",
"00257D": "PointRed Telecom Private Ltd.",
"00257E": "NEW POS Technology Limited",
"00257F": "CallTechSolution Co.,Ltd",
"002580": "Equipson S.A.",
"002581": "x-star networks Inc.",
"002582": "Maksat Technologies (P) Ltd",
"002583": "CISCO SYSTEMS, INC.",
"002584": "CISCO SYSTEMS, INC.",
"002585": "KOKUYO S&T Co., Ltd.",
"002586": "TP-LINK Technologies Co., Ltd.",
"002587": "Vitality, Inc.",
"002588": "Genie Industries, Inc.",
"002589": "Hills Industries Limited",
"00258A": "Pole/Zero Corporation",
"00258B": "Mellanox Technologies Ltd",
"00258C": "ESUS ELEKTRONIK SAN. VE DIS. TIC. LTD. STI.",
"00258D": "Haier",
"00258E": "The Weather Channel",
"00258F": "Trident Microsystems, Inc.",
"002590": "Super Micro Computer, Inc.",
"002591": "NEXTEK, Inc.",
"002592": "Guangzhou Shirui Electronic Co., Ltd",
"002593": "DatNet Informatikai Kft.",
"002594": "Eurodesign BG LTD",
"002595": "Northwest Signal Supply, Inc",
"002596": "GIGAVISION srl",
"002597": "Kalki Communication Technologies",
"002598": "Zhong Shan City Litai Electronic Industrial Co. Ltd",
"002599": "Hedon e.d. B.V.",
"00259A": "CEStronics GmbH",
"00259B": "Beijing PKUNITY Microsystems Technology Co., Ltd",
"00259C": "Cisco-Linksys, LLC",
"00259D": "PRIVATE",
"00259E": "Huawei Technologies Co., Ltd.",
"00259F": "TechnoDigital Technologies GmbH",
"0025A0": "Nintendo Co., Ltd.",
"0025A1": "Enalasys",
"0025A2": "Alta Definicion LINCEO S.L.",
"0025A3": "Trimax Wireless, Inc.",
"0025A4": "EuroDesign embedded technologies GmbH",
"0025A5": "Walnut Media Network",
"0025A6": "Central Network Solution Co., Ltd.",
"0025A7": "Comverge, Inc.",
"0025A8": "Kontron (BeiJing) Technology Co.,Ltd",
"0025A9": "Shanghai Embedway Information Technologies Co.,Ltd",
"0025AA": "Beijing Soul Technology Co.,Ltd.",
"0025AB": "AIO LCD PC BU / TPV",
"0025AC": "I-Tech corporation",
"0025AD": "Manufacturing Resources International",
"0025AE": "Microsoft Corporation",
"0025AF": "COMFILE Technology",
"0025B0": "Schmartz Inc",
"0025B1": "Maya-Creation Corporation",
"0025B2": "MBDA Deutschland GmbH",
"0025B3": "Hewlett-Packard Company",
"0025B4": "CISCO SYSTEMS, INC.",
"0025B5": "CISCO SYSTEMS, INC.",
"0025B6": "Telecom FM",
"0025B7": "Costar electronics, inc.,",
"0025B8": "Agile Communications, Inc.",
"0025B9": "Cypress Solutions Inc",
"0025BA": "Alcatel-Lucent IPD",
"0025BB": "INNERINT Co., Ltd.",
"0025BC": "Apple",
"0025BD": "Italdata Ingegneria dell'Idea S.p.A.",
"0025BE": "Tektrap Systems Inc.",
"0025BF": "Wireless Cables Inc.",
"0025C0": "ZillionTV Corporation",
"0025C1": "Nawoo Korea Corp.",
"0025C2": "RingBell Co.,Ltd.",
"0025C3": "Nortel Networks",
"0025C4": "Ruckus Wireless",
"0025C5": "Star Link Communication Pvt. Ltd.",
"0025C6": "kasercorp, ltd",
"0025C7": "altek Corporation",
"0025C8": "S-Access GmbH",
"0025C9": "SHENZHEN HUAPU DIGITAL CO., LTD",
"0025CA": "LS Research, LLC",
"0025CB": "Reiner SCT",
"0025CC": "Mobile Communications Korea Incorporated",
"0025CD": "Skylane Optics",
"0025CE": "InnerSpace",
"0025CF": "Nokia Danmark A/S",
"0025D0": "Nokia Danmark A/S",
"0025D1": "Eastern Asia Technology Limited",
"0025D2": "InpegVision Co., Ltd",
"0025D3": "AzureWave Technologies, Inc",
"0025D4": "Fortress Technologies",
"0025D5": "Robonica (Pty) Ltd",
"0025D6": "The Kroger Co.",
"0025D7": "CEDO",
"0025D8": "KOREA MAINTENANCE",
"0025D9": "DataFab Systems Inc.",
"0025DA": "Secura Key",
"0025DB": "ATI Electronics(Shenzhen) Co., LTD",
"0025DC": "Sumitomo Electric Networks, Inc",
"0025DD": "SUNNYTEK INFORMATION CO., LTD.",
"0025DE": "Probits Co., LTD.",
"0025DF": "PRIVATE",
"0025E0": "CeedTec Sdn Bhd",
"0025E1": "SHANGHAI SEEYOO ELECTRONIC & TECHNOLOGY CO., LTD",
"0025E2": "Everspring Industry Co., Ltd.",
"0025E3": "Hanshinit Inc.",
"0025E4": "OMNI-WiFi, LLC",
"0025E5": "LG Electronics Inc",
"0025E6": "Belgian Monitoring Systems bvba",
"0025E7": "Sony Ericsson Mobile Communications",
"0025E8": "Idaho Technology",
"0025E9": "i-mate Development, Inc.",
"0025EA": "Iphion BV",
"0025EB": "Reutech Radar Systems (PTY) Ltd",
"0025EC": "Humanware",
"0025ED": "NuVo Technologies LLC",
"0025EE": "Avtex Ltd",
"0025EF": "I-TEC Co., Ltd.",
"0025F0": "Suga Electronics Limited",
"0025F1": "ARRIS Group, Inc.",
"0025F2": "ARRIS Group, Inc.",
"0025F3": "Nordwestdeutsche Z\u00e4hlerrevision",
"0025F4": "KoCo Connector AG",
"0025F5": "DVS Korea, Co., Ltd",
"0025F6": "netTALK.com, Inc.",
"0025F7": "Ansaldo STS USA",
"0025F9": "GMK electronic design GmbH",
"0025FA": "J&M Analytik AG",
"0025FB": "Tunstall Healthcare A/S",
"0025FC": "ENDA ENDUSTRIYEL ELEKTRONIK LTD. STI.",
"0025FD": "OBR Centrum Techniki Morskiej S.A.",
"0025FE": "Pilot Electronics Corporation",
"0025FF": "CreNova Multimedia Co., Ltd",
"002600": "TEAC Australia Pty Ltd.",
"002601": "Cutera Inc",
"002602": "SMART Temps LLC",
"002603": "Shenzhen Wistar Technology Co., Ltd",
"002604": "Audio Processing Technology Ltd",
"002605": "CC Systems AB",
"002606": "RAUMFELD GmbH",
"002607": "Enabling Technology Pty Ltd",
"002608": "Apple",
"002609": "Phyllis Co., Ltd.",
"00260A": "CISCO SYSTEMS, INC.",
"00260B": "CISCO SYSTEMS, INC.",
"00260C": "Dataram",
"00260D": "Mercury Systems, Inc.",
"00260E": "Ablaze Systems, LLC",
"00260F": "Linn Products Ltd",
"002610": "Apacewave Technologies",
"002611": "Licera AB",
"002612": "Space Exploration Technologies",
"002613": "Engel Axil S.L.",
"002614": "KTNF",
"002615": "Teracom Limited",
"002616": "Rosemount Inc.",
"002617": "OEM Worldwide",
"002618": "ASUSTek COMPUTER INC.",
"002619": "FRC",
"00261A": "Femtocomm System Technology Corp.",
"00261B": "LAUREL BANK MACHINES CO., LTD.",
"00261C": "NEOVIA INC.",
"00261D": "COP SECURITY SYSTEM CORP.",
"00261E": "QINGBANG ELEC(SZ) CO., LTD",
"00261F": "SAE Magnetics (H.K.) Ltd.",
"002620": "ISGUS GmbH",
"002621": "InteliCloud Technology Inc.",
"002622": "COMPAL INFORMATION (KUNSHAN) CO., LTD.",
"002623": "JRD Communication Inc",
"002624": "Thomson Inc.",
"002625": "MediaSputnik",
"002626": "Geophysical Survey Systems, Inc.",
"002627": "Truesell",
"002628": "companytec automa\u00e7\u00e3o e controle ltda.",
"002629": "Juphoon System Software Inc.",
"00262A": "Proxense, LLC",
"00262B": "Wongs Electronics Co. Ltd.",
"00262C": "IKT Advanced Technologies s.r.o.",
"00262D": "Wistron Corporation",
"00262E": "Chengdu Jiuzhou Electronic Technology Inc",
"00262F": "HAMAMATSU TOA ELECTRONICS",
"002630": "ACOREL S.A.S",
"002631": "COMMTACT LTD",
"002632": "Instrumentation Technologies d.d.",
"002633": "MIR - Medical International Research",
"002634": "Infineta Systems, Inc",
"002635": "Bluetechnix GmbH",
"002636": "ARRIS Group, Inc.",
"002637": "Samsung Electro-Mechanics",
"002638": "Xia Men Joyatech Co., Ltd.",
"002639": "T.M. Electronics, Inc.",
"00263A": "Digitec Systems",
"00263B": "Onbnetech",
"00263C": "Bachmann Technology GmbH & Co. KG",
"00263D": "MIA Corporation",
"00263E": "Trapeze Networks",
"00263F": "LIOS Technology GmbH",
"002640": "Baustem Broadband Technologies, Ltd.",
"002641": "ARRIS Group, Inc.",
"002642": "ARRIS Group, Inc.",
"002643": "Alps Electric Co., Ltd",
"002644": "Thomson Telecom Belgium",
"002645": "Circontrol S.A.",
"002646": "SHENYANG TONGFANG MULTIMEDIA TECHNOLOGY COMPANY LIMITED",
"002647": "WFE TECHNOLOGY CORP.",
"002648": "Emitech Corp.",
"00264A": "Apple",
"00264C": "Shanghai DigiVision Technology Co., Ltd.",
"00264D": "Arcadyan Technology Corporation",
"00264E": "Rail & Road Protec GmbH",
"00264F": "Kr\u00fcger &Gothe GmbH",
"002650": "2Wire",
"002651": "CISCO SYSTEMS, INC.",
"002652": "CISCO SYSTEMS, INC.",
"002653": "DaySequerra Corporation",
"002654": "3Com Corporation",
"002655": "Hewlett-Packard Company",
"002656": "Sansonic Electronics USA",
"002657": "OOO NPP EKRA",
"002658": "T-Platforms (Cyprus) Limited",
"002659": "Nintendo Co., Ltd.",
"00265A": "D-Link Corporation",
"00265B": "Hitron Technologies. Inc",
"00265C": "Hon Hai Precision Ind. Co.,Ltd.",
"00265D": "Samsung Electronics",
"00265E": "Hon Hai Precision Ind. Co.,Ltd.",
"00265F": "Samsung Electronics Co.,Ltd",
"002660": "Logiways",
"002661": "Irumtek Co., Ltd.",
"002662": "Actiontec Electronics, Inc",
"002663": "Shenzhen Huitaiwei Tech. Ltd, co.",
"002664": "Core System Japan",
"002665": "ProtectedLogic Corporation",
"002666": "EFM Networks",
"002667": "CARECOM CO.,LTD.",
"002668": "Nokia Danmark A/S",
"002669": "Nokia Danmark A/S",
"00266A": "ESSENSIUM NV",
"00266B": "SHINE UNION ENTERPRISE LIMITED",
"00266C": "Inventec",
"00266D": "MobileAccess Networks",
"00266E": "Nissho-denki Co.,LTD.",
"00266F": "Coordiwise Technology Corp.",
"002670": "Cinch Connectors",
"002671": "AUTOVISION Co., Ltd",
"002672": "AAMP of America",
"002673": "RICOH COMPANY,LTD.",
"002674": "Electronic Solutions, Inc.",
"002675": "Aztech Electronics Pte Ltd",
"002676": "COMMidt AS",
"002677": "DEIF A/S",
"002678": "Logic Instrument SA",
"002679": "Euphonic Technologies, Inc.",
"00267A": "wuhan hongxin telecommunication technologies co.,ltd",
"00267B": "GSI Helmholtzzentrum f\u00fcr Schwerionenforschung GmbH",
"00267C": "Metz-Werke GmbH & Co KG",
"00267D": "A-Max Technology Macao Commercial Offshore Company Limited",
"00267E": "Parrot SA",
"00267F": "Zenterio AB",
"002680": "Lockie Innovation Pty Ltd",
"002681": "Interspiro AB",
"002682": "Gemtek Technology Co., Ltd.",
"002683": "Ajoho Enterprise Co., Ltd.",
"002684": "KISAN SYSTEM",
"002685": "Digital Innovation",
"002686": "Quantenna Communcations, Inc.",
"002687": "Corega K.K",
"002688": "Juniper Networks",
"002689": "General Dynamics Robotic Systems",
"00268A": "Terrier SC Ltd",
"00268B": "Guangzhou Escene Computer Technology Limited",
"00268C": "StarLeaf Ltd.",
"00268D": "CellTel S.p.A.",
"00268E": "Alta Solutions, Inc.",
"00268F": "MTA SpA",
"002690": "I DO IT",
"002691": "SAGEM COMMUNICATION",
"002692": "Mitsubishi Electric Co.",
"002693": "QVidium Technologies, Inc.",
"002694": "Senscient Ltd",
"002695": "ZT Group Int'l Inc",
"002696": "NOOLIX Co., Ltd",
"002697": "Cheetah Technologies, L.P.",
"002698": "CISCO SYSTEMS, INC.",
"002699": "CISCO SYSTEMS, INC.",
"00269A": "Carina System Co., Ltd.",
"00269B": "SOKRAT Ltd.",
"00269C": "ITUS JAPAN CO. LTD",
"00269D": "M2Mnet Co., Ltd.",
"00269E": "Quanta Computer Inc",
"00269F": "PRIVATE",
"0026A0": "moblic",
"0026A1": "Megger",
"0026A2": "Instrumentation Technology Systems",
"0026A3": "FQ Ingenieria Electronica S.A.",
"0026A4": "Novus Produtos Eletronicos Ltda",
"0026A5": "MICROROBOT.CO.,LTD",
"0026A6": "TRIXELL",
"0026A7": "CONNECT SRL",
"0026A8": "DAEHAP HYPER-TECH",
"0026A9": "Strong Technologies Pty Ltd",
"0026AA": "Kenmec Mechanical Engineering Co., Ltd.",
"0026AB": "SEIKO EPSON CORPORATION",
"0026AC": "Shanghai LUSTER Teraband photonic Co., Ltd.",
"0026AD": "Arada Systems, Inc.",
"0026AE": "Wireless Measurement Ltd",
"0026AF": "Duelco A/S",
"0026B0": "Apple",
"0026B1": "Navis Auto Motive Systems, Inc.",
"0026B2": "Setrix GmbH",
"0026B3": "Thales Communications Inc",
"0026B4": "Ford Motor Company",
"0026B5": "ICOMM Tele Ltd",
"0026B6": "Askey Computer",
"0026B7": "Kingston Technology Company, Inc.",
"0026B8": "Actiontec Electronics, Inc",
"0026B9": "Dell Inc",
"0026BA": "ARRIS Group, Inc.",
"0026BB": "Apple",
"0026BC": "General Jack Technology Ltd.",
"0026BD": "JTEC Card & Communication Co., Ltd.",
"0026BE": "Schoonderbeek Elektronica Systemen B.V.",
"0026BF": "ShenZhen Temobi Science&Tech Development Co.,Ltd",
"0026C0": "EnergyHub",
"0026C1": "ARTRAY CO., LTD.",
"0026C2": "SCDI Co. LTD",
"0026C3": "Insightek Corp.",
"0026C4": "Cadmos microsystems S.r.l.",
"0026C5": "Guangdong Gosun Telecommunications Co.,Ltd",
"0026C6": "Intel Corporate",
"0026C7": "Intel Corporate",
"0026C8": "System Sensor",
"0026C9": "Proventix Systems, Inc.",
"0026CA": "CISCO SYSTEMS, INC.",
"0026CB": "CISCO SYSTEMS, INC.",
"0026CC": "Nokia Danmark A/S",
"0026CD": "PurpleComm, Inc.",
"0026CE": "Kozumi USA Corp.",
"0026CF": "DEKA R&D",
"0026D0": "Semihalf",
"0026D1": "S Squared Innovations Inc.",
"0026D2": "Pcube Systems, Inc.",
"0026D3": "Zeno Information System",
"0026D4": "IRCA SpA",
"0026D5": "Ory Solucoes em Comercio de Informatica Ltda.",
"0026D6": "Ningbo Andy Optoelectronic Co., Ltd.",
"0026D7": "KM Electornic Technology Co., Ltd.",
"0026D8": "Magic Point Inc.",
"0026D9": "Pace plc",
"0026DA": "Universal Media Corporation /Slovakia/ s.r.o.",
"0026DB": "Ionics EMS Inc.",
"0026DC": "Optical Systems Design",
"0026DD": "Fival Science & Technology Co.,Ltd.",
"0026DE": "FDI MATELEC",
"0026DF": "TaiDoc Technology Corp.",
"0026E0": "ASITEQ",
"0026E1": "Stanford University, OpenFlow Group",
"0026E2": "LG Electronics",
"0026E3": "DTI",
"0026E4": "CANAL OVERSEAS",
"0026E5": "AEG Power Solutions",
"0026E6": "Visionhitech Co., Ltd.",
"0026E7": "Shanghai ONLAN Communication Tech. Co., Ltd.",
"0026E8": "Murata Manufacturing Co., Ltd.",
"0026E9": "SP Corp",
"0026EA": "Cheerchip Electronic Technology (ShangHai) Co., Ltd.",
"0026EB": "Advanced Spectrum Technology Co., Ltd.",
"0026EC": "Legrand Home Systems, Inc",
"0026ED": "zte corporation",
"0026EE": "TKM GmbH",
"0026EF": "Technology Advancement Group, Inc.",
"0026F0": "cTrixs International GmbH.",
"0026F1": "ProCurve Networking by HP",
"0026F2": "Netgear",
"0026F3": "SMC Networks",
"0026F4": "Nesslab",
"0026F5": "XRPLUS Inc.",
"0026F6": "Military Communication Institute",
"0026F7": "Infosys Technologies Ltd.",
"0026F8": "Golden Highway Industry Development Co., Ltd.",
"0026F9": "S.E.M. srl",
"0026FA": "BandRich Inc.",
"0026FB": "AirDio Wireless, Inc.",
"0026FC": "AcSiP Technology Corp.",
"0026FD": "Interactive Intelligence",
"0026FE": "MKD Technology Inc.",
"0026FF": "Research In Motion",
"002700": "Shenzhen Siglent Technology Co., Ltd.",
"002701": "INCOstartec GmbH",
"002702": "SolarEdge Technologies",
"002703": "Testech Electronics Pte Ltd",
"002704": "Accelerated Concepts, Inc",
"002705": "Sectronic",
"002706": "YOISYS",
"002707": "Lift Complex DS, JSC",
"002708": "Nordiag ASA",
"002709": "Nintendo Co., Ltd.",
"00270A": "IEE S.A.",
"00270B": "Adura Technologies",
"00270C": "CISCO SYSTEMS, INC.",
"00270D": "CISCO SYSTEMS, INC.",
"00270E": "Intel Corporate",
"00270F": "Envisionnovation Inc",
"002710": "Intel Corporate",
"002711": "LanPro Inc",
"002712": "MaxVision LLC",
"002713": "Universal Global Scientific Industrial Co., Ltd.",
"002714": "Grainmustards, Co,ltd.",
"002715": "Rebound Telecom. Co., Ltd",
"002716": "Adachi-Syokai Co., Ltd.",
"002717": "CE Digital(Zhenjiang)Co.,Ltd",
"002718": "Suzhou NEW SEAUNION Video Technology Co.,Ltd",
"002719": "TP-LINK TECHNOLOGIES CO., LTD.",
"00271A": "Geenovo Technology Ltd.",
"00271B": "Alec Sicherheitssysteme GmbH",
"00271C": "MERCURY CORPORATION",
"00271D": "Comba Telecom Systems (China) Ltd.",
"00271E": "Xagyl Communications",
"00271F": "MIPRO Electronics Co., Ltd",
"002720": "NEW-SOL COM",
"002721": "Shenzhen Baoan Fenda Industrial Co., Ltd",
"002722": "Ubiquiti Networks",
"0027F8": "Brocade Communications Systems, Inc.",
"002A6A": "CISCO SYSTEMS, INC.",
"002AAF": "LARsys-Automation GmbH",
"002D76": "TITECH GmbH",
"003000": "ALLWELL TECHNOLOGY CORP.",
"003001": "SMP",
"003002": "Expand Networks",
"003003": "Phasys Ltd.",
"003004": "LEADTEK RESEARCH INC.",
"003005": "Fujitsu Siemens Computers",
"003006": "SUPERPOWER COMPUTER",
"003007": "OPTI, INC.",
"003008": "AVIO DIGITAL, INC.",
"003009": "Tachion Networks, Inc.",
"00300A": "AZTECH Electronics Pte Ltd",
"00300B": "mPHASE Technologies, Inc.",
"00300C": "CONGRUENCY, LTD.",
"00300D": "MMC Technology, Inc.",
"00300E": "Klotz Digital AG",
"00300F": "IMT - Information Management T",
"003010": "VISIONETICS INTERNATIONAL",
"003011": "HMS Industrial Networks",
"003012": "DIGITAL ENGINEERING LTD.",
"003013": "NEC Corporation",
"003014": "DIVIO, INC.",
"003015": "CP CLARE CORP.",
"003016": "ISHIDA CO., LTD.",
"003017": "BlueArc UK Ltd",
"003018": "Jetway Information Co., Ltd.",
"003019": "CISCO SYSTEMS, INC.",
"00301A": "SMARTBRIDGES PTE. LTD.",
"00301B": "SHUTTLE, INC.",
"00301C": "ALTVATER AIRDATA SYSTEMS",
"00301D": "SKYSTREAM, INC.",
"00301E": "3COM Europe Ltd.",
"00301F": "OPTICAL NETWORKS, INC.",
"003020": "TSI, Inc..",
"003021": "HSING TECH. ENTERPRISE CO.,LTD",
"003022": "Fong Kai Industrial Co., Ltd.",
"003023": "COGENT COMPUTER SYSTEMS, INC.",
"003024": "CISCO SYSTEMS, INC.",
"003025": "CHECKOUT COMPUTER SYSTEMS, LTD",
"003026": "HeiTel Digital Video GmbH",
"003027": "KERBANGO, INC.",
"003028": "FASE Saldatura srl",
"003029": "OPICOM",
"00302A": "SOUTHERN INFORMATION",
"00302B": "INALP NETWORKS, INC.",
"00302C": "SYLANTRO SYSTEMS CORPORATION",
"00302D": "QUANTUM BRIDGE COMMUNICATIONS",
"00302E": "Hoft & Wessel AG",
"00302F": "GE Aviation System",
"003030": "HARMONIX CORPORATION",
"003031": "LIGHTWAVE COMMUNICATIONS, INC.",
"003032": "MagicRam, Inc.",
"003033": "ORIENT TELECOM CO., LTD.",
"003034": "SET ENGINEERING",
"003035": "Corning Incorporated",
"003036": "RMP ELEKTRONIKSYSTEME GMBH",
"003037": "Packard Bell Nec Services",
"003038": "XCP, INC.",
"003039": "SOFTBOOK PRESS",
"00303A": "MAATEL",
"00303B": "PowerCom Technology",
"00303C": "ONNTO CORP.",
"00303D": "IVA CORPORATION",
"00303E": "Radcom Ltd.",
"00303F": "TurboComm Tech Inc.",
"003040": "CISCO SYSTEMS, INC.",
"003041": "SAEJIN T & M CO., LTD.",
"003042": "DeTeWe-Deutsche Telephonwerke",
"003043": "IDREAM TECHNOLOGIES, PTE. LTD.",
"003044": "CradlePoint, Inc",
"003045": "Village Networks, Inc. (VNI)",
"003046": "Controlled Electronic Manageme",
"003047": "NISSEI ELECTRIC CO., LTD.",
"003048": "Supermicro Computer, Inc.",
"003049": "BRYANT TECHNOLOGY, LTD.",
"00304A": "Fraunhofer IPMS",
"00304B": "ORBACOM SYSTEMS, INC.",
"00304C": "APPIAN COMMUNICATIONS, INC.",
"00304D": "ESI",
"00304E": "BUSTEC PRODUCTION LTD.",
"00304F": "PLANET Technology Corporation",
"003050": "Versa Technology",
"003051": "ORBIT AVIONIC & COMMUNICATION",
"003052": "ELASTIC NETWORKS",
"003053": "Basler AG",
"003054": "CASTLENET TECHNOLOGY, INC.",
"003055": "Renesas Technology America, Inc.",
"003056": "Beck IPC GmbH",
"003057": "QTelNet, Inc.",
"003058": "API MOTION",
"003059": "KONTRON COMPACT COMPUTERS AG",
"00305A": "TELGEN CORPORATION",
"00305B": "Toko Inc.",
"00305C": "SMAR Laboratories Corp.",
"00305D": "DIGITRA SYSTEMS, INC.",
"00305E": "Abelko Innovation",
"00305F": "Hasselblad",
"003060": "Powerfile, Inc.",
"003061": "MobyTEL",
"003062": "IP Video Networks Inc",
"003063": "SANTERA SYSTEMS, INC.",
"003064": "ADLINK TECHNOLOGY, INC.",
"003065": "Apple",
"003066": "RFM",
"003067": "BIOSTAR MICROTECH INT'L CORP.",
"003068": "CYBERNETICS TECH. CO., LTD.",
"003069": "IMPACCT TECHNOLOGY CORP.",
"00306A": "PENTA MEDIA CO., LTD.",
"00306B": "CMOS SYSTEMS, INC.",
"00306C": "Hitex Holding GmbH",
"00306D": "LUCENT TECHNOLOGIES",
"00306E": "HEWLETT PACKARD",
"00306F": "SEYEON TECH. CO., LTD.",
"003070": "1Net Corporation",
"003071": "CISCO SYSTEMS, INC.",
"003072": "Intellibyte Inc.",
"003073": "International Microsystems, In",
"003074": "EQUIINET LTD.",
"003075": "ADTECH",
"003076": "Akamba Corporation",
"003077": "ONPREM NETWORKS",
"003078": "CISCO SYSTEMS, INC.",
"003079": "CQOS, INC.",
"00307A": "Advanced Technology & Systems",
"00307B": "CISCO SYSTEMS, INC.",
"00307C": "ADID SA",
"00307D": "GRE AMERICA, INC.",
"00307E": "Redflex Communication Systems",
"00307F": "IRLAN LTD.",
"003080": "CISCO SYSTEMS, INC.",
"003081": "ALTOS C&C",
"003082": "TAIHAN ELECTRIC WIRE CO., LTD.",
"003083": "Ivron Systems",
"003084": "ALLIED TELESYN INTERNAIONAL",
"003085": "CISCO SYSTEMS, INC.",
"003086": "Transistor Devices, Inc.",
"003087": "VEGA GRIESHABER KG",
"003088": "Ericsson",
"003089": "Spectrapoint Wireless, LLC",
"00308A": "NICOTRA SISTEMI S.P.A",
"00308B": "Brix Networks",
"00308C": "Quantum Corporation",
"00308D": "Pinnacle Systems, Inc.",
"00308E": "CROSS MATCH TECHNOLOGIES, INC.",
"00308F": "MICRILOR, Inc.",
"003090": "CYRA TECHNOLOGIES, INC.",
"003091": "TAIWAN FIRST LINE ELEC. CORP.",
"003092": "ModuNORM GmbH",
"003093": "Sonnet Technologies, Inc",
"003094": "CISCO SYSTEMS, INC.",
"003095": "Procomp Informatics, Ltd.",
"003096": "CISCO SYSTEMS, INC.",
"003097": "AB Regin",
"003098": "Global Converging Technologies",
"003099": "BOENIG UND KALLENBACH OHG",
"00309A": "ASTRO TERRA CORP.",
"00309B": "Smartware",
"00309C": "Timing Applications, Inc.",
"00309D": "Nimble Microsystems, Inc.",
"00309E": "WORKBIT CORPORATION.",
"00309F": "AMBER NETWORKS",
"0030A0": "TYCO SUBMARINE SYSTEMS, LTD.",
"0030A1": "WEBGATE Inc.",
"0030A2": "Lightner Engineering",
"0030A3": "CISCO SYSTEMS, INC.",
"0030A4": "Woodwind Communications System",
"0030A5": "ACTIVE POWER",
"0030A6": "VIANET TECHNOLOGIES, LTD.",
"0030A7": "SCHWEITZER ENGINEERING",
"0030A8": "OL'E COMMUNICATIONS, INC.",
"0030A9": "Netiverse, Inc.",
"0030AA": "AXUS MICROSYSTEMS, INC.",
"0030AB": "DELTA NETWORKS, INC.",
"0030AC": "Systeme Lauer GmbH & Co., Ltd.",
"0030AD": "SHANGHAI COMMUNICATION",
"0030AE": "Times N System, Inc.",
"0030AF": "Honeywell GmbH",
"0030B0": "Convergenet Technologies",
"0030B1": "TrunkNet",
"0030B2": "L-3 Sonoma EO",
"0030B3": "San Valley Systems, Inc.",
"0030B4": "INTERSIL CORP.",
"0030B5": "Tadiran Microwave Networks",
"0030B6": "CISCO SYSTEMS, INC.",
"0030B7": "Teletrol Systems, Inc.",
"0030B8": "RiverDelta Networks",
"0030B9": "ECTEL",
"0030BA": "AC&T SYSTEM CO., LTD.",
"0030BB": "CacheFlow, Inc.",
"0030BC": "Optronic AG",
"0030BD": "BELKIN COMPONENTS",
"0030BE": "City-Net Technology, Inc.",
"0030BF": "MULTIDATA GMBH",
"0030C0": "Lara Technology, Inc.",
"0030C1": "HEWLETT-PACKARD",
"0030C2": "COMONE",
"0030C3": "FLUECKIGER ELEKTRONIK AG",
"0030C4": "Canon Imaging Systems Inc.",
"0030C5": "CADENCE DESIGN SYSTEMS",
"0030C6": "CONTROL SOLUTIONS, INC.",
"0030C7": "Macromate Corp.",
"0030C8": "GAD LINE, LTD.",
"0030C9": "LuxN, N",
"0030CA": "Discovery Com",
"0030CB": "OMNI FLOW COMPUTERS, INC.",
"0030CC": "Tenor Networks, Inc.",
"0030CD": "CONEXANT SYSTEMS, INC.",
"0030CE": "Zaffire",
"0030CF": "TWO TECHNOLOGIES, INC.",
"0030D0": "Tellabs",
"0030D1": "INOVA CORPORATION",
"0030D2": "WIN TECHNOLOGIES, CO., LTD.",
"0030D3": "Agilent Technologies",
"0030D4": "AAE Systems, Inc.",
"0030D5": "DResearch GmbH",
"0030D6": "MSC VERTRIEBS GMBH",
"0030D7": "Innovative Systems, L.L.C.",
"0030D8": "SITEK",
"0030D9": "DATACORE SOFTWARE CORP.",
"0030DA": "COMTREND CO.",
"0030DB": "Mindready Solutions, Inc.",
"0030DC": "RIGHTECH CORPORATION",
"0030DD": "INDIGITA CORPORATION",
"0030DE": "WAGO Kontakttechnik GmbH",
"0030DF": "KB/TEL TELECOMUNICACIONES",
"0030E0": "OXFORD SEMICONDUCTOR LTD.",
"0030E1": "Network Equipment Technologies, Inc.",
"0030E2": "GARNET SYSTEMS CO., LTD.",
"0030E3": "SEDONA NETWORKS CORP.",
"0030E4": "CHIYODA SYSTEM RIKEN",
"0030E5": "Amper Datos S.A.",
"0030E6": "Draeger Medical Systems, Inc.",
"0030E7": "CNF MOBILE SOLUTIONS, INC.",
"0030E8": "ENSIM CORP.",
"0030E9": "GMA COMMUNICATION MANUFACT'G",
"0030EA": "TeraForce Technology Corporation",
"0030EB": "TURBONET COMMUNICATIONS, INC.",
"0030EC": "BORGARDT",
"0030ED": "Expert Magnetics Corp.",
"0030EE": "DSG Technology, Inc.",
"0030EF": "NEON TECHNOLOGY, INC.",
"0030F0": "Uniform Industrial Corp.",
"0030F1": "Accton Technology Corp.",
"0030F2": "CISCO SYSTEMS, INC.",
"0030F3": "At Work Computers",
"0030F4": "STARDOT TECHNOLOGIES",
"0030F5": "Wild Lab. Ltd.",
"0030F6": "SECURELOGIX CORPORATION",
"0030F7": "RAMIX INC.",
"0030F8": "Dynapro Systems, Inc.",
"0030F9": "Sollae Systems Co., Ltd.",
"0030FA": "TELICA, INC.",
"0030FB": "AZS Technology AG",
"0030FC": "Terawave Communications, Inc.",
"0030FD": "INTEGRATED SYSTEMS DESIGN",
"0030FE": "DSA GmbH",
"0030FF": "DATAFAB SYSTEMS, INC.",
"00336C": "SynapSense Corporation",
"0034F1": "Radicom Research, Inc.",
"003532": "Electro-Metrics Corporation",
"0036F8": "Conti Temic microelectronic GmbH",
"0036FE": "SuperVision",
"00376D": "Murata Manufacturing Co., Ltd.",
"003A98": "CISCO SYSTEMS, INC.",
"003A99": "CISCO SYSTEMS, INC.",
"003A9A": "CISCO SYSTEMS, INC.",
"003A9B": "CISCO SYSTEMS, INC.",
"003A9C": "CISCO SYSTEMS, INC.",
"003A9D": "NEC AccessTechnica, Ltd.",
"003AAF": "BlueBit Ltd.",
"003CC5": "WONWOO Engineering Co., Ltd",
"003D41": "Hatteland Computer AS",
"003EE1": "Apple",
"004000": "PCI COMPONENTES DA AMZONIA LTD",
"004001": "Zero One Technology Co. Ltd.",
"004002": "PERLE SYSTEMS LIMITED",
"004003": "Emerson Process Management Power & Water Solutions, Inc.",
"004004": "ICM CO. LTD.",
"004005": "ANI COMMUNICATIONS INC.",
"004006": "SAMPO TECHNOLOGY CORPORATION",
"004007": "TELMAT INFORMATIQUE",
"004008": "A PLUS INFO CORPORATION",
"004009": "TACHIBANA TECTRON CO., LTD.",
"00400A": "PIVOTAL TECHNOLOGIES, INC.",
"00400B": "CISCO SYSTEMS, INC.",
"00400C": "GENERAL MICRO SYSTEMS, INC.",
"00400D": "LANNET DATA COMMUNICATIONS,LTD",
"00400E": "MEMOTEC, INC.",
"00400F": "DATACOM TECHNOLOGIES",
"004010": "SONIC SYSTEMS, INC.",
"004011": "ANDOVER CONTROLS CORPORATION",
"004012": "WINDATA, INC.",
"004013": "NTT DATA COMM. SYSTEMS CORP.",
"004014": "COMSOFT GMBH",
"004015": "ASCOM INFRASYS AG",
"004016": "ADC - Global Connectivity Solutions Division",
"004017": "Silex Technology America",
"004018": "ADOBE SYSTEMS, INC.",
"004019": "AEON SYSTEMS, INC.",
"00401A": "FUJI ELECTRIC CO., LTD.",
"00401B": "PRINTER SYSTEMS CORP.",
"00401C": "AST RESEARCH, INC.",
"00401D": "INVISIBLE SOFTWARE, INC.",
"00401E": "ICC",
"00401F": "COLORGRAPH LTD",
"004020": "TE Connectivity Ltd.",
"004021": "RASTER GRAPHICS",
"004022": "KLEVER COMPUTERS, INC.",
"004023": "LOGIC CORPORATION",
"004024": "COMPAC INC.",
"004025": "MOLECULAR DYNAMICS",
"004026": "Buffalo Inc.",
"004027": "SMC MASSACHUSETTS, INC.",
"004028": "NETCOMM LIMITED",
"004029": "COMPEX",
"00402A": "CANOGA-PERKINS",
"00402B": "TRIGEM COMPUTER, INC.",
"00402C": "ISIS DISTRIBUTED SYSTEMS, INC.",
"00402D": "HARRIS ADACOM CORPORATION",
"00402E": "PRECISION SOFTWARE, INC.",
"00402F": "XLNT DESIGNS INC.",
"004030": "GK COMPUTER",
"004031": "KOKUSAI ELECTRIC CO., LTD",
"004032": "DIGITAL COMMUNICATIONS",
"004033": "ADDTRON TECHNOLOGY CO., LTD.",
"004034": "BUSTEK CORPORATION",
"004035": "OPCOM",
"004036": "TRIBE COMPUTER WORKS, INC.",
"004037": "SEA-ILAN, INC.",
"004038": "TALENT ELECTRIC INCORPORATED",
"004039": "OPTEC DAIICHI DENKO CO., LTD.",
"00403A": "IMPACT TECHNOLOGIES",
"00403B": "SYNERJET INTERNATIONAL CORP.",
"00403C": "FORKS, INC.",
"00403D": "Teradata Corporation",
"00403E": "RASTER OPS CORPORATION",
"00403F": "SSANGYONG COMPUTER SYSTEMS",
"004040": "RING ACCESS, INC.",
"004041": "FUJIKURA LTD.",
"004042": "N.A.T. GMBH",
"004043": "Nokia Siemens Networks GmbH & Co. KG.",
"004044": "QNIX COMPUTER CO., LTD.",
"004045": "TWINHEAD CORPORATION",
"004046": "UDC RESEARCH LIMITED",
"004047": "WIND RIVER SYSTEMS",
"004048": "SMD INFORMATICA S.A.",
"004049": "Roche Diagnostics International Ltd.",
"00404A": "WEST AUSTRALIAN DEPARTMENT",
"00404B": "MAPLE COMPUTER SYSTEMS",
"00404C": "HYPERTEC PTY LTD.",
"00404D": "TELECOMMUNICATIONS TECHNIQUES",
"00404E": "FLUENT, INC.",
"00404F": "SPACE & NAVAL WARFARE SYSTEMS",
"004050": "IRONICS, INCORPORATED",
"004051": "GRACILIS, INC.",
"004052": "STAR TECHNOLOGIES, INC.",
"004053": "AMPRO COMPUTERS",
"004054": "CONNECTION MACHINES SERVICES",
"004055": "METRONIX GMBH",
"004056": "MCM JAPAN LTD.",
"004057": "LOCKHEED - SANDERS",
"004058": "KRONOS, INC.",
"004059": "YOSHIDA KOGYO K. K.",
"00405A": "GOLDSTAR INFORMATION & COMM.",
"00405B": "FUNASSET LIMITED",
"00405C": "FUTURE SYSTEMS, INC.",
"00405D": "STAR-TEK, INC.",
"00405E": "NORTH HILLS ISRAEL",
"00405F": "AFE COMPUTERS LTD.",
"004060": "COMENDEC LTD",
"004061": "DATATECH ENTERPRISES CO., LTD.",
"004062": "E-SYSTEMS, INC./GARLAND DIV.",
"004063": "VIA TECHNOLOGIES, INC.",
"004064": "KLA INSTRUMENTS CORPORATION",
"004065": "GTE SPACENET",
"004066": "Hitachi Metals, Ltd.",
"004067": "OMNIBYTE CORPORATION",
"004068": "EXTENDED SYSTEMS",
"004069": "LEMCOM SYSTEMS, INC.",
"00406A": "KENTEK INFORMATION SYSTEMS,INC",
"00406B": "SYSGEN",
"00406C": "COPERNIQUE",
"00406D": "LANCO, INC.",
"00406E": "COROLLARY, INC.",
"00406F": "SYNC RESEARCH INC.",
"004070": "INTERWARE CO., LTD.",
"004071": "ATM COMPUTER GMBH",
"004072": "Applied Innovation Inc.",
"004073": "BASS ASSOCIATES",
"004074": "CABLE AND WIRELESS",
"004075": "Tattile SRL",
"004076": "Sun Conversion Technologies",
"004077": "MAXTON TECHNOLOGY CORPORATION",
"004078": "WEARNES AUTOMATION PTE LTD",
"004079": "JUKO MANUFACTURE COMPANY, LTD.",
"00407A": "SOCIETE D'EXPLOITATION DU CNIT",
"00407B": "SCIENTIFIC ATLANTA",
"00407C": "QUME CORPORATION",
"00407D": "EXTENSION TECHNOLOGY CORP.",
"00407E": "EVERGREEN SYSTEMS, INC.",
"00407F": "FLIR Systems",
"004080": "ATHENIX CORPORATION",
"004081": "MANNESMANN SCANGRAPHIC GMBH",
"004082": "LABORATORY EQUIPMENT CORP.",
"004083": "TDA INDUSTRIA DE PRODUTOS",
"004084": "HONEYWELL ACS",
"004085": "SAAB INSTRUMENTS AB",
"004086": "MICHELS & KLEBERHOFF COMPUTER",
"004087": "UBITREX CORPORATION",
"004088": "MOBIUS TECHNOLOGIES, INC.",
"004089": "MEIDENSHA CORPORATION",
"00408A": "TPS TELEPROCESSING SYS. GMBH",
"00408B": "RAYLAN CORPORATION",
"00408C": "AXIS COMMUNICATIONS AB",
"00408D": "THE GOODYEAR TIRE & RUBBER CO.",
"00408E": "Tattile SRL",
"00408F": "WM-DATA MINFO AB",
"004090": "ANSEL COMMUNICATIONS",
"004091": "PROCOMP INDUSTRIA ELETRONICA",
"004092": "ASP COMPUTER PRODUCTS, INC.",
"004093": "PAXDATA NETWORKS LTD.",
"004094": "SHOGRAPHICS, INC.",
"004095": "R.P.T. INTERGROUPS INT'L LTD.",
"004096": "Cisco Systems",
"004097": "DATEX DIVISION OF",
"004098": "DRESSLER GMBH & CO.",
"004099": "NEWGEN SYSTEMS CORP.",
"00409A": "NETWORK EXPRESS, INC.",
"00409B": "HAL COMPUTER SYSTEMS INC.",
"00409C": "TRANSWARE",
"00409D": "DIGIBOARD, INC.",
"00409E": "CONCURRENT TECHNOLOGIES LTD.",
"00409F": "Telco Systems, Inc.",
"0040A0": "GOLDSTAR CO., LTD.",
"0040A1": "ERGO COMPUTING",
"0040A2": "KINGSTAR TECHNOLOGY INC.",
"0040A3": "MICROUNITY SYSTEMS ENGINEERING",
"0040A4": "ROSE ELECTRONICS",
"0040A5": "CLINICOMP INTL.",
"0040A6": "Cray, Inc.",
"0040A7": "ITAUTEC PHILCO S.A.",
"0040A8": "IMF INTERNATIONAL LTD.",
"0040A9": "DATACOM INC.",
"0040AA": "Metso Automation",
"0040AB": "ROLAND DG CORPORATION",
"0040AC": "SUPER WORKSTATION, INC.",
"0040AD": "SMA REGELSYSTEME GMBH",
"0040AE": "DELTA CONTROLS, INC.",
"0040AF": "DIGITAL PRODUCTS, INC.",
"0040B0": "BYTEX CORPORATION, ENGINEERING",
"0040B1": "CODONICS INC.",
"0040B2": "SYSTEMFORSCHUNG",
"0040B3": "ParTech Inc.",
"0040B4": "NEXTCOM K.K.",
"0040B5": "VIDEO TECHNOLOGY COMPUTERS LTD",
"0040B6": "COMPUTERM CORPORATION",
"0040B7": "STEALTH COMPUTER SYSTEMS",
"0040B8": "IDEA ASSOCIATES",
"0040B9": "MACQ ELECTRONIQUE SA",
"0040BA": "ALLIANT COMPUTER SYSTEMS CORP.",
"0040BB": "GOLDSTAR CABLE CO., LTD.",
"0040BC": "ALGORITHMICS LTD.",
"0040BD": "STARLIGHT NETWORKS, INC.",
"0040BE": "BOEING DEFENSE & SPACE",
"0040BF": "CHANNEL SYSTEMS INTERN'L INC.",
"0040C0": "VISTA CONTROLS CORPORATION",
"0040C1": "BIZERBA-WERKE WILHEIM KRAUT",
"0040C2": "APPLIED COMPUTING DEVICES",
"0040C3": "FISCHER AND PORTER CO.",
"0040C4": "KINKEI SYSTEM CORPORATION",
"0040C5": "MICOM COMMUNICATIONS INC.",
"0040C6": "FIBERNET RESEARCH, INC.",
"0040C7": "RUBY TECH CORPORATION",
"0040C8": "MILAN TECHNOLOGY CORPORATION",
"0040C9": "NCUBE",
"0040CA": "FIRST INTERNAT'L COMPUTER, INC",
"0040CB": "LANWAN TECHNOLOGIES",
"0040CC": "SILCOM MANUF'G TECHNOLOGY INC.",
"0040CD": "TERA MICROSYSTEMS, INC.",
"0040CE": "NET-SOURCE, INC.",
"0040CF": "STRAWBERRY TREE, INC.",
"0040D0": "MITAC INTERNATIONAL CORP.",
"0040D1": "FUKUDA DENSHI CO., LTD.",
"0040D2": "PAGINE CORPORATION",
"0040D3": "KIMPSION INTERNATIONAL CORP.",
"0040D4": "GAGE TALKER CORP.",
"0040D5": "Sartorius Mechatronics T&H GmbH",
"0040D6": "LOCAMATION B.V.",
"0040D7": "STUDIO GEN INC.",
"0040D8": "OCEAN OFFICE AUTOMATION LTD.",
"0040D9": "AMERICAN MEGATRENDS INC.",
"0040DA": "TELSPEC LTD",
"0040DB": "ADVANCED TECHNICAL SOLUTIONS",
"0040DC": "TRITEC ELECTRONIC GMBH",
"0040DD": "HONG TECHNOLOGIES",
"0040DE": "Elsag Datamat spa",
"0040DF": "DIGALOG SYSTEMS, INC.",
"0040E0": "ATOMWIDE LTD.",
"0040E1": "MARNER INTERNATIONAL, INC.",
"0040E2": "MESA RIDGE TECHNOLOGIES, INC.",
"0040E3": "QUIN SYSTEMS LTD",
"0040E4": "E-M TECHNOLOGY, INC.",
"0040E5": "SYBUS CORPORATION",
"0040E6": "C.A.E.N.",
"0040E7": "ARNOS INSTRUMENTS & COMPUTER",
"0040E8": "CHARLES RIVER DATA SYSTEMS,INC",
"0040E9": "ACCORD SYSTEMS, INC.",
"0040EA": "PLAIN TREE SYSTEMS INC",
"0040EB": "MARTIN MARIETTA CORPORATION",
"0040EC": "MIKASA SYSTEM ENGINEERING",
"0040ED": "NETWORK CONTROLS INT'NATL INC.",
"0040EE": "OPTIMEM",
"0040EF": "HYPERCOM, INC.",
"0040F0": "MicroBrain,Inc.",
"0040F1": "CHUO ELECTRONICS CO., LTD.",
"0040F2": "JANICH & KLASS COMPUTERTECHNIK",
"0040F3": "NETCOR",
"0040F4": "CAMEO COMMUNICATIONS, INC.",
"0040F5": "OEM ENGINES",
"0040F6": "KATRON COMPUTERS INC.",
"0040F7": "Polaroid Corporation",
"0040F8": "SYSTEMHAUS DISCOM",
"0040F9": "COMBINET",
"0040FA": "MICROBOARDS, INC.",
"0040FB": "CASCADE COMMUNICATIONS CORP.",
"0040FC": "IBR COMPUTER TECHNIK GMBH",
"0040FD": "LXE",
"0040FE": "SYMPLEX COMMUNICATIONS",
"0040FF": "TELEBIT CORPORATION",
"0041B4": "Wuxi Zhongxing Optoelectronics Technology Co.,Ltd.",
"004252": "RLX Technologies",
"0043FF": "KETRON S.R.L.",
"004501": "Versus Technology, Inc.",
"00464B": "HUAWEI TECHNOLOGIES CO.,LTD",
"004D32": "Andon Health Co.,Ltd.",
"005000": "NEXO COMMUNICATIONS, INC.",
"005001": "YAMASHITA SYSTEMS CORP.",
"005002": "OMNISEC AG",
"005003": "Xrite Inc",
"005004": "3COM CORPORATION",
"005006": "TAC AB",
"005007": "SIEMENS TELECOMMUNICATION SYSTEMS LIMITED",
"005008": "TIVA MICROCOMPUTER CORP. (TMC)",
"005009": "PHILIPS BROADBAND NETWORKS",
"00500A": "IRIS TECHNOLOGIES, INC.",
"00500B": "CISCO SYSTEMS, INC.",
"00500C": "e-Tek Labs, Inc.",
"00500D": "SATORI ELECTORIC CO., LTD.",
"00500E": "CHROMATIS NETWORKS, INC.",
"00500F": "CISCO SYSTEMS, INC.",
"005010": "NovaNET Learning, Inc.",
"005012": "CBL - GMBH",
"005013": "Chaparral Network Storage",
"005014": "CISCO SYSTEMS, INC.",
"005015": "BRIGHT STAR ENGINEERING",
"005016": "SST/WOODHEAD INDUSTRIES",
"005017": "RSR S.R.L.",
"005018": "AMIT, Inc.",
"005019": "SPRING TIDE NETWORKS, INC.",
"00501A": "IQinVision",
"00501B": "ABL CANADA, INC.",
"00501C": "JATOM SYSTEMS, INC.",
"00501E": "Miranda Technologies, Inc.",
"00501F": "MRG SYSTEMS, LTD.",
"005020": "MEDIASTAR CO., LTD.",
"005021": "EIS INTERNATIONAL, INC.",
"005022": "ZONET TECHNOLOGY, INC.",
"005023": "PG DESIGN ELECTRONICS, INC.",
"005024": "NAVIC SYSTEMS, INC.",
"005026": "COSYSTEMS, INC.",
"005027": "GENICOM CORPORATION",
"005028": "AVAL COMMUNICATIONS",
"005029": "1394 PRINTER WORKING GROUP",
"00502A": "CISCO SYSTEMS, INC.",
"00502B": "GENRAD LTD.",
"00502C": "SOYO COMPUTER, INC.",
"00502D": "ACCEL, INC.",
"00502E": "CAMBEX CORPORATION",
"00502F": "TollBridge Technologies, Inc.",
"005030": "FUTURE PLUS SYSTEMS",
"005031": "AEROFLEX LABORATORIES, INC.",
"005032": "PICAZO COMMUNICATIONS, INC.",
"005033": "MAYAN NETWORKS",
"005036": "NETCAM, LTD.",
"005037": "KOGA ELECTRONICS CO.",
"005038": "DAIN TELECOM CO., LTD.",
"005039": "MARINER NETWORKS",
"00503A": "DATONG ELECTRONICS LTD.",
"00503B": "MEDIAFIRE CORPORATION",
"00503C": "TSINGHUA NOVEL ELECTRONICS",
"00503E": "CISCO SYSTEMS, INC.",
"00503F": "ANCHOR GAMES",
"005040": "Panasonic Electric Works Co., Ltd.",
"005041": "Coretronic Corporation",
"005042": "SCI MANUFACTURING SINGAPORE PTE, LTD.",
"005043": "MARVELL SEMICONDUCTOR, INC.",
"005044": "ASACA CORPORATION",
"005045": "RIOWORKS SOLUTIONS, INC.",
"005046": "MENICX INTERNATIONAL CO., LTD.",
"005047": "PRIVATE",
"005048": "INFOLIBRIA",
"005049": "Arbor Networks Inc",
"00504A": "ELTECO A.S.",
"00504B": "BARCONET N.V.",
"00504C": "Galil Motion Control",
"00504D": "Tokyo Electron Device Limited",
"00504E": "SIERRA MONITOR CORP.",
"00504F": "OLENCOM ELECTRONICS",
"005050": "CISCO SYSTEMS, INC.",
"005051": "IWATSU ELECTRIC CO., LTD.",
"005052": "TIARA NETWORKS, INC.",
"005053": "CISCO SYSTEMS, INC.",
"005054": "CISCO SYSTEMS, INC.",
"005055": "DOMS A/S",
"005056": "VMware, Inc.",
"005057": "BROADBAND ACCESS SYSTEMS",
"005058": "VegaStream Group Limted",
"005059": "iBAHN",
"00505A": "NETWORK ALCHEMY, INC.",
"00505B": "KAWASAKI LSI U.S.A., INC.",
"00505C": "TUNDO CORPORATION",
"00505E": "DIGITEK MICROLOGIC S.A.",
"00505F": "BRAND INNOVATORS",
"005060": "TANDBERG TELECOM AS",
"005062": "KOUWELL ELECTRONICS CORP. **",
"005063": "OY COMSEL SYSTEM AB",
"005064": "CAE ELECTRONICS",
"005065": "TDK-Lambda Corporation",
"005066": "AtecoM GmbH advanced telecomunication modules",
"005067": "AEROCOMM, INC.",
"005068": "ELECTRONIC INDUSTRIES ASSOCIATION",
"005069": "PixStream Incorporated",
"00506A": "EDEVA, INC.",
"00506B": "SPX-ATEG",
"00506C": "Beijer Electronics Products AB",
"00506D": "VIDEOJET SYSTEMS",
"00506E": "CORDER ENGINEERING CORPORATION",
"00506F": "G-CONNECT",
"005070": "CHAINTECH COMPUTER CO., LTD.",
"005071": "AIWA CO., LTD.",
"005072": "CORVIS CORPORATION",
"005073": "CISCO SYSTEMS, INC.",
"005074": "ADVANCED HI-TECH CORP.",
"005075": "KESTREL SOLUTIONS",
"005076": "IBM Corp",
"005077": "PROLIFIC TECHNOLOGY, INC.",
"005078": "MEGATON HOUSE, LTD.",
"005079": "PRIVATE",
"00507A": "XPEED, INC.",
"00507B": "MERLOT COMMUNICATIONS",
"00507C": "VIDEOCON AG",
"00507D": "IFP",
"00507E": "NEWER TECHNOLOGY",
"00507F": "DrayTek Corp.",
"005080": "CISCO SYSTEMS, INC.",
"005081": "MURATA MACHINERY, LTD.",
"005082": "FORESSON CORPORATION",
"005083": "GILBARCO, INC.",
"005084": "ATL PRODUCTS",
"005086": "TELKOM SA, LTD.",
"005087": "TERASAKI ELECTRIC CO., LTD.",
"005088": "AMANO CORPORATION",
"005089": "SAFETY MANAGEMENT SYSTEMS",
"00508B": "Hewlett-Packard Company",
"00508C": "RSI SYSTEMS",
"00508D": "ABIT COMPUTER CORPORATION",
"00508E": "OPTIMATION, INC.",
"00508F": "ASITA TECHNOLOGIES INT'L LTD.",
"005090": "DCTRI",
"005091": "NETACCESS, INC.",
"005092": "RIGAKU INDUSTRIAL CORPORATION",
"005093": "BOEING",
"005094": "PACE plc",
"005095": "PERACOM NETWORKS",
"005096": "SALIX TECHNOLOGIES, INC.",
"005097": "MMC-EMBEDDED COMPUTERTECHNIK GmbH",
"005098": "GLOBALOOP, LTD.",
"005099": "3COM EUROPE, LTD.",
"00509A": "TAG ELECTRONIC SYSTEMS",
"00509B": "SWITCHCORE AB",
"00509C": "BETA RESEARCH",
"00509D": "THE INDUSTREE B.V.",
"00509E": "Les Technologies SoftAcoustik Inc.",
"00509F": "HORIZON COMPUTER",
"0050A0": "DELTA COMPUTER SYSTEMS, INC.",
"0050A1": "CARLO GAVAZZI, INC.",
"0050A2": "CISCO SYSTEMS, INC.",
"0050A3": "TransMedia Communications, Inc.",
"0050A4": "IO TECH, INC.",
"0050A5": "CAPITOL BUSINESS SYSTEMS, LTD.",
"0050A6": "OPTRONICS",
"0050A7": "CISCO SYSTEMS, INC.",
"0050A8": "OpenCon Systems, Inc.",
"0050A9": "MOLDAT WIRELESS TECHNOLGIES",
"0050AA": "KONICA MINOLTA HOLDINGS, INC.",
"0050AB": "NALTEC, Inc.",
"0050AC": "MAPLE COMPUTER CORPORATION",
"0050AD": "CommUnique Wireless Corp.",
"0050AE": "FDK Co., Ltd",
"0050AF": "INTERGON, INC.",
"0050B0": "TECHNOLOGY ATLANTA CORPORATION",
"0050B1": "GIDDINGS & LEWIS",
"0050B2": "BRODEL GmbH",
"0050B3": "VOICEBOARD CORPORATION",
"0050B4": "SATCHWELL CONTROL SYSTEMS, LTD",
"0050B5": "FICHET-BAUCHE",
"0050B6": "GOOD WAY IND. CO., LTD.",
"0050B7": "BOSER TECHNOLOGY CO., LTD.",
"0050B8": "INOVA COMPUTERS GMBH & CO. KG",
"0050B9": "XITRON TECHNOLOGIES, INC.",
"0050BA": "D-LINK",
"0050BB": "CMS TECHNOLOGIES",
"0050BC": "HAMMER STORAGE SOLUTIONS",
"0050BD": "CISCO SYSTEMS, INC.",
"0050BE": "FAST MULTIMEDIA AG",
"0050BF": "Metalligence Technology Corp.",
"0050C0": "GATAN, INC.",
"0050C1": "GEMFLEX NETWORKS, LTD.",
"0050C2": "IEEE REGISTRATION AUTHORITY - Please see IAB public listing for more information.",
"0050C4": "IMD",
"0050C5": "ADS Technologies, Inc",
"0050C6": "LOOP TELECOMMUNICATION INTERNATIONAL, INC.",
"0050C8": "Addonics Technologies, Inc.",
"0050C9": "MASPRO DENKOH CORP.",
"0050CA": "NET TO NET TECHNOLOGIES",
"0050CB": "JETTER",
"0050CC": "XYRATEX",
"0050CD": "DIGIANSWER A/S",
"0050CE": "LG INTERNATIONAL CORP.",
"0050CF": "VANLINK COMMUNICATION TECHNOLOGY RESEARCH INSTITUTE",
"0050D0": "MINERVA SYSTEMS",
"0050D1": "CISCO SYSTEMS, INC.",
"0050D2": "CMC Electronics Inc",
"0050D3": "DIGITAL AUDIO PROCESSING PTY. LTD.",
"0050D4": "JOOHONG INFORMATION &",
"0050D5": "AD SYSTEMS CORP.",
"0050D6": "ATLAS COPCO TOOLS AB",
"0050D7": "TELSTRAT",
"0050D8": "UNICORN COMPUTER CORP.",
"0050D9": "ENGETRON-ENGENHARIA ELETRONICA IND. e COM. LTDA",
"0050DA": "3COM CORPORATION",
"0050DB": "CONTEMPORARY CONTROL",
"0050DC": "TAS TELEFONBAU A. SCHWABE GMBH & CO. KG",
"0050DD": "SERRA SOLDADURA, S.A.",
"0050DE": "SIGNUM SYSTEMS CORP.",
"0050DF": "AirFiber, Inc.",
"0050E1": "NS TECH ELECTRONICS SDN BHD",
"0050E2": "CISCO SYSTEMS, INC.",
"0050E3": "ARRIS Group, Inc.",
"0050E4": "Apple",
"0050E6": "HAKUSAN CORPORATION",
"0050E7": "PARADISE INNOVATIONS (ASIA)",
"0050E8": "NOMADIX INC.",
"0050EA": "XEL COMMUNICATIONS, INC.",
"0050EB": "ALPHA-TOP CORPORATION",
"0050EC": "OLICOM A/S",
"0050ED": "ANDA NETWORKS",
"0050EE": "TEK DIGITEL CORPORATION",
"0050EF": "SPE Systemhaus GmbH",
"0050F0": "CISCO SYSTEMS, INC.",
"0050F1": "Intel Corporation",
"0050F2": "MICROSOFT CORP.",
"0050F3": "GLOBAL NET INFORMATION CO., Ltd.",
"0050F4": "SIGMATEK GMBH & CO. KG",
"0050F6": "PAN-INTERNATIONAL INDUSTRIAL CORP.",
"0050F7": "VENTURE MANUFACTURING (SINGAPORE) LTD.",
"0050F8": "ENTREGA TECHNOLOGIES, INC.",
"0050F9": "SENSORMATIC ACD",
"0050FA": "OXTEL, LTD.",
"0050FB": "VSK ELECTRONICS",
"0050FC": "EDIMAX TECHNOLOGY CO., LTD.",
"0050FD": "VISIONCOMM CO., LTD.",
"0050FE": "PCTVnet ASA",
"0050FF": "HAKKO ELECTRONICS CO., LTD.",
"005218": "Wuxi Keboda Electron Co.Ltd",
"0054AF": "Continental Automotive Systems Inc.",
"005907": "LenovoEMC Products USA, LLC",
"005CB1": "Gospell DIGITAL TECHNOLOGY CO., LTD",
"005D03": "Xilinx, Inc",
"006000": "XYCOM INC.",
"006001": "InnoSys, Inc.",
"006002": "SCREEN SUBTITLING SYSTEMS, LTD",
"006003": "TERAOKA WEIGH SYSTEM PTE, LTD.",
"006004": "COMPUTADORES MODULARES SA",
"006005": "FEEDBACK DATA LTD.",
"006006": "SOTEC CO., LTD",
"006007": "ACRES GAMING, INC.",
"006008": "3COM CORPORATION",
"006009": "CISCO SYSTEMS, INC.",
"00600A": "SORD COMPUTER CORPORATION",
"00600B": "LOGWARE GmbH",
"00600C": "Eurotech Inc.",
"00600D": "Digital Logic GmbH",
"00600E": "WAVENET INTERNATIONAL, INC.",
"00600F": "WESTELL, INC.",
"006010": "NETWORK MACHINES, INC.",
"006011": "CRYSTAL SEMICONDUCTOR CORP.",
"006012": "POWER COMPUTING CORPORATION",
"006013": "NETSTAL MASCHINEN AG",
"006014": "EDEC CO., LTD.",
"006015": "NET2NET CORPORATION",
"006016": "CLARIION",
"006017": "TOKIMEC INC.",
"006018": "STELLAR ONE CORPORATION",
"006019": "Roche Diagnostics",
"00601A": "KEITHLEY INSTRUMENTS",
"00601B": "MESA ELECTRONICS",
"00601C": "TELXON CORPORATION",
"00601D": "LUCENT TECHNOLOGIES",
"00601E": "SOFTLAB, INC.",
"00601F": "STALLION TECHNOLOGIES",
"006020": "PIVOTAL NETWORKING, INC.",
"006021": "DSC CORPORATION",
"006022": "VICOM SYSTEMS, INC.",
"006023": "PERICOM SEMICONDUCTOR CORP.",
"006024": "GRADIENT TECHNOLOGIES, INC.",
"006025": "ACTIVE IMAGING PLC",
"006026": "VIKING Modular Solutions",
"006027": "Superior Modular Products",
"006028": "MACROVISION CORPORATION",
"006029": "CARY PERIPHERALS INC.",
"00602A": "SYMICRON COMPUTER COMMUNICATIONS, LTD.",
"00602B": "PEAK AUDIO",
"00602C": "LINX Data Terminals, Inc.",
"00602D": "ALERTON TECHNOLOGIES, INC.",
"00602E": "CYCLADES CORPORATION",
"00602F": "CISCO SYSTEMS, INC.",
"006030": "VILLAGE TRONIC ENTWICKLUNG",
"006031": "HRK SYSTEMS",
"006032": "I-CUBE, INC.",
"006033": "ACUITY IMAGING, INC.",
"006034": "ROBERT BOSCH GmbH",
"006035": "DALLAS SEMICONDUCTOR, INC.",
"006036": "AIT Austrian Institute of Technology GmbH",
"006037": "NXP Semiconductors",
"006038": "Nortel Networks",
"006039": "SanCom Technology, Inc.",
"00603A": "QUICK CONTROLS LTD.",
"00603B": "AMTEC spa",
"00603C": "HAGIWARA SYS-COM CO., LTD.",
"00603D": "3CX",
"00603E": "CISCO SYSTEMS, INC.",
"00603F": "PATAPSCO DESIGNS",
"006040": "NETRO CORP.",
"006041": "Yokogawa Electric Corporation",
"006042": "TKS (USA), INC.",
"006043": "iDirect, INC.",
"006044": "LITTON/POLY-SCIENTIFIC",
"006045": "PATHLIGHT TECHNOLOGIES",
"006046": "VMETRO, INC.",
"006047": "CISCO SYSTEMS, INC.",
"006048": "EMC CORPORATION",
"006049": "VINA TECHNOLOGIES",
"00604A": "SAIC IDEAS GROUP",
"00604B": "Safe-com GmbH & Co. KG",
"00604C": "SAGEM COMMUNICATION",
"00604D": "MMC NETWORKS, INC.",
"00604E": "CYCLE COMPUTER CORPORATION, INC.",
"00604F": "Tattile SRL",
"006050": "INTERNIX INC.",
"006051": "QUALITY SEMICONDUCTOR",
"006052": "PERIPHERALS ENTERPRISE CO., Ltd.",
"006053": "TOYODA MACHINE WORKS, LTD.",
"006054": "CONTROLWARE GMBH",
"006055": "CORNELL UNIVERSITY",
"006056": "NETWORK TOOLS, INC.",
"006057": "MURATA MANUFACTURING CO., LTD.",
"006058": "COPPER MOUNTAIN COMMUNICATIONS, INC.",
"006059": "TECHNICAL COMMUNICATIONS CORP.",
"00605A": "CELCORE, INC.",
"00605B": "IntraServer Technology, Inc.",
"00605C": "CISCO SYSTEMS, INC.",
"00605D": "SCANIVALVE CORP.",
"00605E": "LIBERTY TECHNOLOGY NETWORKING",
"00605F": "NIPPON UNISOFT CORPORATION",
"006060": "Data Innovations North America",
"006061": "WHISTLE COMMUNICATIONS CORP.",
"006062": "TELESYNC, INC.",
"006063": "PSION DACOM PLC.",
"006064": "NETCOMM LIMITED",
"006065": "BERNECKER & RAINER INDUSTRIE-ELEKTRONIC GmbH",
"006066": "LACROIX Trafic",
"006067": "ACER NETXUS INC.",
"006068": "Dialogic Corporation",
"006069": "Brocade Communications Systems, Inc.",
"00606A": "MITSUBISHI WIRELESS COMMUNICATIONS. INC.",
"00606B": "Synclayer Inc.",
"00606C": "ARESCOM",
"00606D": "DIGITAL EQUIPMENT CORP.",
"00606E": "DAVICOM SEMICONDUCTOR, INC.",
"00606F": "CLARION CORPORATION OF AMERICA",
"006070": "CISCO SYSTEMS, INC.",
"006071": "MIDAS LAB, INC.",
"006072": "VXL INSTRUMENTS, LIMITED",
"006073": "REDCREEK COMMUNICATIONS, INC.",
"006074": "QSC AUDIO PRODUCTS",
"006075": "PENTEK, INC.",
"006076": "SCHLUMBERGER TECHNOLOGIES RETAIL PETROLEUM SYSTEMS",
"006077": "PRISA NETWORKS",
"006078": "POWER MEASUREMENT LTD.",
"006079": "Mainstream Data, Inc.",
"00607A": "DVS GmbH",
"00607B": "FORE SYSTEMS, INC.",
"00607C": "WaveAccess, Ltd.",
"00607D": "SENTIENT NETWORKS INC.",
"00607E": "GIGALABS, INC.",
"00607F": "AURORA TECHNOLOGIES, INC.",
"006080": "MICROTRONIX DATACOM LTD.",
"006081": "TV/COM INTERNATIONAL",
"006082": "NOVALINK TECHNOLOGIES, INC.",
"006083": "CISCO SYSTEMS, INC.",
"006084": "DIGITAL VIDEO",
"006085": "Storage Concepts",
"006086": "LOGIC REPLACEMENT TECH. LTD.",
"006087": "KANSAI ELECTRIC CO., LTD.",
"006088": "WHITE MOUNTAIN DSP, INC.",
"006089": "XATA",
"00608A": "CITADEL COMPUTER",
"00608B": "ConferTech International",
"00608C": "3COM CORPORATION",
"00608D": "UNIPULSE CORP.",
"00608E": "HE ELECTRONICS, TECHNOLOGIE & SYSTEMTECHNIK GmbH",
"00608F": "TEKRAM TECHNOLOGY CO., LTD.",
"006090": "Artiza Networks Inc",
"006091": "FIRST PACIFIC NETWORKS, INC.",
"006092": "MICRO/SYS, INC.",
"006093": "VARIAN",
"006094": "IBM Corp",
"006095": "ACCU-TIME SYSTEMS, INC.",
"006096": "T.S. MICROTECH INC.",
"006097": "3COM CORPORATION",
"006098": "HT COMMUNICATIONS",
"006099": "SBE, Inc.",
"00609A": "NJK TECHNO CO.",
"00609B": "ASTRO-MED, INC.",
"00609C": "Perkin-Elmer Incorporated",
"00609D": "PMI FOOD EQUIPMENT GROUP",
"00609E": "ASC X3 - INFORMATION TECHNOLOGY STANDARDS SECRETARIATS",
"00609F": "PHAST CORPORATION",
"0060A0": "SWITCHED NETWORK TECHNOLOGIES, INC.",
"0060A1": "VPNet, Inc.",
"0060A2": "NIHON UNISYS LIMITED CO.",
"0060A3": "CONTINUUM TECHNOLOGY CORP.",
"0060A4": "GEW Technologies (PTY)Ltd",
"0060A5": "PERFORMANCE TELECOM CORP.",
"0060A6": "PARTICLE MEASURING SYSTEMS",
"0060A7": "MICROSENS GmbH & CO. KG",
"0060A8": "TIDOMAT AB",
"0060A9": "GESYTEC MbH",
"0060AA": "INTELLIGENT DEVICES INC. (IDI)",
"0060AB": "LARSCOM INCORPORATED",
"0060AC": "RESILIENCE CORPORATION",
"0060AD": "MegaChips Corporation",
"0060AE": "TRIO INFORMATION SYSTEMS AB",
"0060AF": "PACIFIC MICRO DATA, INC.",
"0060B0": "HEWLETT-PACKARD CO.",
"0060B1": "INPUT/OUTPUT, INC.",
"0060B2": "PROCESS CONTROL CORP.",
"0060B3": "Z-COM, INC.",
"0060B4": "GLENAYRE R&D INC.",
"0060B5": "KEBA GmbH",
"0060B6": "LAND COMPUTER CO., LTD.",
"0060B7": "CHANNELMATIC, INC.",
"0060B8": "CORELIS Inc.",
"0060B9": "NEC Infrontia Corporation",
"0060BA": "SAHARA NETWORKS, INC.",
"0060BB": "CABLETRON - NETLINK, INC.",
"0060BC": "KeunYoung Electronics & Communication Co., Ltd.",
"0060BD": "HUBBELL-PULSECOM",
"0060BE": "WEBTRONICS",
"0060BF": "MACRAIGOR SYSTEMS, INC.",
"0060C0": "Nera Networks AS",
"0060C1": "WaveSpan Corporation",
"0060C2": "MPL AG",
"0060C3": "NETVISION CORPORATION",
"0060C4": "SOLITON SYSTEMS K.K.",
"0060C5": "ANCOT CORP.",
"0060C6": "DCS AG",
"0060C7": "AMATI COMMUNICATIONS CORP.",
"0060C8": "KUKA WELDING SYSTEMS & ROBOTS",
"0060C9": "ControlNet, Inc.",
"0060CA": "HARMONIC SYSTEMS INCORPORATED",
"0060CB": "HITACHI ZOSEN CORPORATION",
"0060CC": "EMTRAK, INCORPORATED",
"0060CD": "VideoServer, Inc.",
"0060CE": "ACCLAIM COMMUNICATIONS",
"0060CF": "ALTEON NETWORKS, INC.",
"0060D0": "SNMP RESEARCH INCORPORATED",
"0060D1": "CASCADE COMMUNICATIONS",
"0060D2": "LUCENT TECHNOLOGIES TAIWAN TELECOMMUNICATIONS CO., LTD.",
"0060D3": "AT&T",
"0060D4": "ELDAT COMMUNICATION LTD.",
"0060D5": "MIYACHI TECHNOS CORP.",
"0060D6": "NovAtel Wireless Technologies Ltd.",
"0060D7": "ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE (EPFL)",
"0060D8": "ELMIC SYSTEMS, INC.",
"0060D9": "TRANSYS NETWORKS INC.",
"0060DA": "JBM ELECTRONICS CO.",
"0060DB": "NTP ELEKTRONIK A/S",
"0060DC": "Toyo Network Systems & System Integration Co. LTD",
"0060DD": "MYRICOM, INC.",
"0060DE": "Kayser-Threde GmbH",
"0060DF": "Brocade Communications Systems, Inc.",
"0060E0": "AXIOM TECHNOLOGY CO., LTD.",
"0060E1": "ORCKIT COMMUNICATIONS LTD.",
"0060E2": "QUEST ENGINEERING & DEVELOPMENT",
"0060E3": "ARBIN INSTRUMENTS",
"0060E4": "COMPUSERVE, INC.",
"0060E5": "FUJI AUTOMATION CO., LTD.",
"0060E6": "SHOMITI SYSTEMS INCORPORATED",
"0060E7": "RANDATA",
"0060E8": "HITACHI COMPUTER PRODUCTS (AMERICA), INC.",
"0060E9": "ATOP TECHNOLOGIES, INC.",
"0060EA": "StreamLogic",
"0060EB": "FOURTHTRACK SYSTEMS",
"0060EC": "HERMARY OPTO ELECTRONICS INC.",
"0060ED": "RICARDO TEST AUTOMATION LTD.",
"0060EE": "APOLLO",
"0060EF": "FLYTECH TECHNOLOGY CO., LTD.",
"0060F0": "JOHNSON & JOHNSON MEDICAL, INC",
"0060F1": "EXP COMPUTER, INC.",
"0060F2": "LASERGRAPHICS, INC.",
"0060F3": "Performance Analysis Broadband, Spirent plc",
"0060F4": "ADVANCED COMPUTER SOLUTIONS, Inc.",
"0060F5": "ICON WEST, INC.",
"0060F6": "NEXTEST COMMUNICATIONS PRODUCTS, INC.",
"0060F7": "DATAFUSION SYSTEMS",
"0060F8": "Loran International Technologies Inc.",
"0060F9": "DIAMOND LANE COMMUNICATIONS",
"0060FA": "EDUCATIONAL TECHNOLOGY RESOURCES, INC.",
"0060FB": "PACKETEER, INC.",
"0060FC": "CONSERVATION THROUGH INNOVATION LTD.",
"0060FD": "NetICs, Inc.",
"0060FE": "LYNX SYSTEM DEVELOPERS, INC.",
"0060FF": "QuVis, Inc.",
"006440": "CISCO SYSTEMS, INC.",
"0064A6": "Maquet CardioVascular",
"00664B": "Huawei Technologies Co., Ltd",
"006B9E": "VIZIO Inc",
"006BA0": "SHENZHEN UNIVERSAL INTELLISYS PTE LTD",
"006DFB": "Vutrix (UK) Ltd",
"0070B0": "M/A-COM INC. COMPANIES",
"0070B3": "DATA RECALL LTD.",
"0071CC": "Hon Hai Precision Ind. Co.,Ltd.",
"00738D": "Tinno Mobile Technology Corp",
"0073E0": "Samsung Electronics Co.,Ltd",
"007532": "INID BV",
"0075E1": "Ampt, LLC",
"00789E": "SAGEMCOM",
"007DFA": "Volkswagen Group of America",
"007F28": "Actiontec Electronics, Inc",
"008000": "MULTITECH SYSTEMS, INC.",
"008001": "PERIPHONICS CORPORATION",
"008002": "SATELCOM (UK) LTD",
"008003": "HYTEC ELECTRONICS LTD.",
"008004": "ANTLOW COMMUNICATIONS, LTD.",
"008005": "CACTUS COMPUTER INC.",
"008006": "COMPUADD CORPORATION",
"008007": "DLOG NC-SYSTEME",
"008008": "DYNATECH COMPUTER SYSTEMS",
"008009": "JUPITER SYSTEMS, INC.",
"00800A": "JAPAN COMPUTER CORP.",
"00800B": "CSK CORPORATION",
"00800C": "VIDECOM LIMITED",
"00800D": "VOSSWINKEL F.U.",
"00800E": "ATLANTIX CORPORATION",
"00800F": "STANDARD MICROSYSTEMS",
"008010": "COMMODORE INTERNATIONAL",
"008011": "DIGITAL SYSTEMS INT'L. INC.",
"008012": "INTEGRATED MEASUREMENT SYSTEMS",
"008013": "THOMAS-CONRAD CORPORATION",
"008014": "ESPRIT SYSTEMS",
"008015": "SEIKO SYSTEMS, INC.",
"008016": "WANDEL AND GOLTERMANN",
"008017": "PFU LIMITED",
"008018": "KOBE STEEL, LTD.",
"008019": "DAYNA COMMUNICATIONS, INC.",
"00801A": "BELL ATLANTIC",
"00801B": "KODIAK TECHNOLOGY",
"00801C": "NEWPORT SYSTEMS SOLUTIONS",
"00801D": "INTEGRATED INFERENCE MACHINES",
"00801E": "XINETRON, INC.",
"00801F": "KRUPP ATLAS ELECTRONIK GMBH",
"008020": "NETWORK PRODUCTS",
"008021": "Alcatel Canada Inc.",
"008022": "SCAN-OPTICS",
"008023": "INTEGRATED BUSINESS NETWORKS",
"008024": "KALPANA, INC.",
"008025": "STOLLMANN GMBH",
"008026": "NETWORK PRODUCTS CORPORATION",
"008027": "ADAPTIVE SYSTEMS, INC.",
"008028": "TRADPOST (HK) LTD",
"008029": "EAGLE TECHNOLOGY, INC.",
"00802A": "TEST SYSTEMS & SIMULATIONS INC",
"00802B": "INTEGRATED MARKETING CO",
"00802C": "THE SAGE GROUP PLC",
"00802D": "XYLOGICS INC",
"00802E": "CASTLE ROCK COMPUTING",
"00802F": "NATIONAL INSTRUMENTS CORP.",
"008030": "NEXUS ELECTRONICS",
"008031": "BASYS, CORP.",
"008032": "ACCESS CO., LTD.",
"008033": "EMS Aviation, Inc.",
"008034": "SMT GOUPIL",
"008035": "TECHNOLOGY WORKS, INC.",
"008036": "REFLEX MANUFACTURING SYSTEMS",
"008037": "Ericsson Group",
"008038": "DATA RESEARCH & APPLICATIONS",
"008039": "ALCATEL STC AUSTRALIA",
"00803A": "VARITYPER, INC.",
"00803B": "APT COMMUNICATIONS, INC.",
"00803C": "TVS ELECTRONICS LTD",
"00803D": "SURIGIKEN CO., LTD.",
"00803E": "SYNERNETICS",
"00803F": "TATUNG COMPANY",
"008040": "JOHN FLUKE MANUFACTURING CO.",
"008041": "VEB KOMBINAT ROBOTRON",
"008042": "Emerson Network Power",
"008043": "NETWORLD, INC.",
"008044": "SYSTECH COMPUTER CORP.",
"008045": "MATSUSHITA ELECTRIC IND. CO",
"008046": "Tattile SRL",
"008047": "IN-NET CORP.",
"008048": "COMPEX INCORPORATED",
"008049": "NISSIN ELECTRIC CO., LTD.",
"00804A": "PRO-LOG",
"00804B": "EAGLE TECHNOLOGIES PTY.LTD.",
"00804C": "CONTEC CO., LTD.",
"00804D": "CYCLONE MICROSYSTEMS, INC.",
"00804E": "APEX COMPUTER COMPANY",
"00804F": "DAIKIN INDUSTRIES, LTD.",
"008050": "ZIATECH CORPORATION",
"008051": "FIBERMUX",
"008052": "TECHNICALLY ELITE CONCEPTS",
"008053": "INTELLICOM, INC.",
"008054": "FRONTIER TECHNOLOGIES CORP.",
"008055": "FERMILAB",
"008056": "SPHINX ELEKTRONIK GMBH",
"008057": "ADSOFT, LTD.",
"008058": "PRINTER SYSTEMS CORPORATION",
"008059": "STANLEY ELECTRIC CO., LTD",
"00805A": "TULIP COMPUTERS INTERNAT'L B.V",
"00805B": "CONDOR SYSTEMS, INC.",
"00805C": "AGILIS CORPORATION",
"00805D": "CANSTAR",
"00805E": "LSI LOGIC CORPORATION",
"00805F": "Hewlett-Packard Company",
"008060": "NETWORK INTERFACE CORPORATION",
"008061": "LITTON SYSTEMS, INC.",
"008062": "INTERFACE CO.",
"008063": "Hirschmann Automation and Control GmbH",
"008064": "WYSE TECHNOLOGY LLC",
"008065": "CYBERGRAPHIC SYSTEMS PTY LTD.",
"008066": "ARCOM CONTROL SYSTEMS, LTD.",
"008067": "SQUARE D COMPANY",
"008068": "YAMATECH SCIENTIFIC LTD.",
"008069": "COMPUTONE SYSTEMS",
"00806A": "ERI (EMPAC RESEARCH INC.)",
"00806B": "SCHMID TELECOMMUNICATION",
"00806C": "CEGELEC PROJECTS LTD",
"00806D": "CENTURY SYSTEMS CORP.",
"00806E": "NIPPON STEEL CORPORATION",
"00806F": "ONELAN LTD.",
"008070": "COMPUTADORAS MICRON",
"008071": "SAI TECHNOLOGY",
"008072": "MICROPLEX SYSTEMS LTD.",
"008073": "DWB ASSOCIATES",
"008074": "FISHER CONTROLS",
"008075": "PARSYTEC GMBH",
"008076": "MCNC",
"008077": "BROTHER INDUSTRIES, LTD.",
"008078": "PRACTICAL PERIPHERALS, INC.",
"008079": "MICROBUS DESIGNS LTD.",
"00807A": "AITECH SYSTEMS LTD.",
"00807B": "ARTEL COMMUNICATIONS CORP.",
"00807C": "FIBERCOM, INC.",
"00807D": "EQUINOX SYSTEMS INC.",
"00807E": "SOUTHERN PACIFIC LTD.",
"00807F": "DY-4 INCORPORATED",
"008080": "DATAMEDIA CORPORATION",
"008081": "KENDALL SQUARE RESEARCH CORP.",
"008082": "PEP MODULAR COMPUTERS GMBH",
"008083": "AMDAHL",
"008084": "THE CLOUD INC.",
"008085": "H-THREE SYSTEMS CORPORATION",
"008086": "COMPUTER GENERATION INC.",
"008087": "OKI ELECTRIC INDUSTRY CO., LTD",
"008088": "VICTOR COMPANY OF JAPAN, LTD.",
"008089": "TECNETICS (PTY) LTD.",
"00808A": "SUMMIT MICROSYSTEMS CORP.",
"00808B": "DACOLL LIMITED",
"00808C": "NetScout Systems, Inc.",
"00808D": "WESTCOAST TECHNOLOGY B.V.",
"00808E": "RADSTONE TECHNOLOGY",
"00808F": "C. ITOH ELECTRONICS, INC.",
"008090": "MICROTEK INTERNATIONAL, INC.",
"008091": "TOKYO ELECTRIC CO.,LTD",
"008092": "Silex Technology, Inc.",
"008093": "XYRON CORPORATION",
"008094": "ALFA LAVAL AUTOMATION AB",
"008095": "BASIC MERTON HANDELSGES.M.B.H.",
"008096": "HUMAN DESIGNED SYSTEMS, INC.",
"008097": "CENTRALP AUTOMATISMES",
"008098": "TDK CORPORATION",
"008099": "Eaton Industries GmbH",
"00809A": "NOVUS NETWORKS LTD",
"00809B": "JUSTSYSTEM CORPORATION",
"00809C": "LUXCOM, INC.",
"00809D": "Commscraft Ltd.",
"00809E": "DATUS GMBH",
"00809F": "ALCATEL BUSINESS SYSTEMS",
"0080A0": "EDISA HEWLETT PACKARD S/A",
"0080A1": "MICROTEST, INC.",
"0080A2": "CREATIVE ELECTRONIC SYSTEMS",
"0080A3": "Lantronix",
"0080A4": "LIBERTY ELECTRONICS",
"0080A5": "SPEED INTERNATIONAL",
"0080A6": "REPUBLIC TECHNOLOGY, INC.",
"0080A7": "Honeywell International Inc",
"0080A8": "VITACOM CORPORATION",
"0080A9": "CLEARPOINT RESEARCH",
"0080AA": "MAXPEED",
"0080AB": "DUKANE NETWORK INTEGRATION",
"0080AC": "IMLOGIX, DIVISION OF GENESYS",
"0080AD": "CNET TECHNOLOGY, INC.",
"0080AE": "HUGHES NETWORK SYSTEMS",
"0080AF": "ALLUMER CO., LTD.",
"0080B0": "ADVANCED INFORMATION",
"0080B1": "SOFTCOM A/S",
"0080B2": "NETWORK EQUIPMENT TECHNOLOGIES",
"0080B3": "AVAL DATA CORPORATION",
"0080B4": "SOPHIA SYSTEMS",
"0080B5": "UNITED NETWORKS INC.",
"0080B6": "THEMIS COMPUTER",
"0080B7": "STELLAR COMPUTER",
"0080B8": "B.U.G. MORISEIKI, INCORPORATED",
"0080B9": "ARCHE TECHNOLIGIES INC.",
"0080BA": "SPECIALIX (ASIA) PTE, LTD",
"0080BB": "HUGHES LAN SYSTEMS",
"0080BC": "HITACHI ENGINEERING CO., LTD",
"0080BD": "THE FURUKAWA ELECTRIC CO., LTD",
"0080BE": "ARIES RESEARCH",
"0080BF": "TAKAOKA ELECTRIC MFG. CO. LTD.",
"0080C0": "PENRIL DATACOMM",
"0080C1": "LANEX CORPORATION",
"0080C2": "IEEE 802.1 COMMITTEE",
"0080C3": "BICC INFORMATION SYSTEMS & SVC",
"0080C4": "DOCUMENT TECHNOLOGIES, INC.",
"0080C5": "NOVELLCO DE MEXICO",
"0080C6": "NATIONAL DATACOMM CORPORATION",
"0080C7": "XIRCOM",
"0080C8": "D-LINK SYSTEMS, INC.",
"0080C9": "ALBERTA MICROELECTRONIC CENTRE",
"0080CA": "NETCOM RESEARCH INCORPORATED",
"0080CB": "FALCO DATA PRODUCTS",
"0080CC": "MICROWAVE BYPASS SYSTEMS",
"0080CD": "MICRONICS COMPUTER, INC.",
"0080CE": "BROADCAST TELEVISION SYSTEMS",
"0080CF": "EMBEDDED PERFORMANCE INC.",
"0080D0": "COMPUTER PERIPHERALS, INC.",
"0080D1": "KIMTRON CORPORATION",
"0080D2": "SHINNIHONDENKO CO., LTD.",
"0080D3": "SHIVA CORP.",
"0080D4": "CHASE RESEARCH LTD.",
"0080D5": "CADRE TECHNOLOGIES",
"0080D6": "NUVOTECH, INC.",
"0080D7": "Fantum Engineering",
"0080D8": "NETWORK PERIPHERALS INC.",
"0080D9": "EMK Elektronik GmbH & Co. KG",
"0080DA": "Bruel & Kjaer Sound & Vibration Measurement A/S",
"0080DB": "GRAPHON CORPORATION",
"0080DC": "PICKER INTERNATIONAL",
"0080DD": "GMX INC/GIMIX",
"0080DE": "GIPSI S.A.",
"0080DF": "ADC CODENOLL TECHNOLOGY CORP.",
"0080E0": "XTP SYSTEMS, INC.",
"0080E1": "STMICROELECTRONICS",
"0080E2": "T.D.I. CO., LTD.",
"0080E3": "CORAL NETWORK CORPORATION",
"0080E4": "NORTHWEST DIGITAL SYSTEMS, INC",
"0080E5": "NetApp, Inc",
"0080E6": "PEER NETWORKS, INC.",
"0080E7": "LYNWOOD SCIENTIFIC DEV. LTD.",
"0080E8": "CUMULUS CORPORATIION",
"0080E9": "Madge Ltd.",
"0080EA": "ADVA Optical Networking Ltd.",
"0080EB": "COMPCONTROL B.V.",
"0080EC": "SUPERCOMPUTING SOLUTIONS, INC.",
"0080ED": "IQ TECHNOLOGIES, INC.",
"0080EE": "THOMSON CSF",
"0080EF": "RATIONAL",
"0080F0": "Panasonic Communications Co., Ltd.",
"0080F1": "OPUS SYSTEMS",
"0080F2": "RAYCOM SYSTEMS INC",
"0080F3": "SUN ELECTRONICS CORP.",
"0080F4": "TELEMECANIQUE ELECTRIQUE",
"0080F5": "Quantel Ltd",
"0080F6": "SYNERGY MICROSYSTEMS",
"0080F7": "ZENITH ELECTRONICS",
"0080F8": "MIZAR, INC.",
"0080F9": "HEURIKON CORPORATION",
"0080FA": "RWT GMBH",
"0080FB": "BVM LIMITED",
"0080FC": "AVATAR CORPORATION",
"0080FD": "EXSCEED CORPRATION",
"0080FE": "AZURE TECHNOLOGIES, INC.",
"0080FF": "SOC. DE TELEINFORMATIQUE RTC",
"0086A0": "PRIVATE",
"008865": "Apple",
"008B43": "RFTECH",
"008C10": "Black Box Corp.",
"008C54": "ADB Broadband Italia",
"008CFA": "Inventec Corporation",
"008D4E": "CJSC NII STT",
"008DDA": "Link One Co., Ltd.",
"008EF2": "NETGEAR INC.,",
"009000": "DIAMOND MULTIMEDIA",
"009001": "NISHIMU ELECTRONICS INDUSTRIES CO., LTD.",
"009002": "ALLGON AB",
"009003": "APLIO",
"009004": "3COM EUROPE LTD.",
"009005": "PROTECH SYSTEMS CO., LTD.",
"009006": "HAMAMATSU PHOTONICS K.K.",
"009007": "DOMEX TECHNOLOGY CORP.",
"009008": "HanA Systems Inc.",
"009009": "I Controls, Inc.",
"00900A": "PROTON ELECTRONIC INDUSTRIAL CO., LTD.",
"00900B": "LANNER ELECTRONICS, INC.",
"00900C": "CISCO SYSTEMS, INC.",
"00900D": "Overland Storage Inc.",
"00900E": "HANDLINK TECHNOLOGIES, INC.",
"00900F": "KAWASAKI HEAVY INDUSTRIES, LTD",
"009010": "SIMULATION LABORATORIES, INC.",
"009011": "WAVTrace, Inc.",
"009012": "GLOBESPAN SEMICONDUCTOR, INC.",
"009013": "SAMSAN CORP.",
"009014": "ROTORK INSTRUMENTS, LTD.",
"009015": "CENTIGRAM COMMUNICATIONS CORP.",
"009016": "ZAC",
"009017": "Zypcom, Inc",
"009018": "ITO ELECTRIC INDUSTRY CO, LTD.",
"009019": "HERMES ELECTRONICS CO., LTD.",
"00901A": "UNISPHERE SOLUTIONS",
"00901B": "DIGITAL CONTROLS",
"00901C": "mps Software Gmbh",
"00901D": "PEC (NZ) LTD.",
"00901E": "Selesta Ingegneria S.p.A.",
"00901F": "ADTEC PRODUCTIONS, INC.",
"009020": "PHILIPS ANALYTICAL X-RAY B.V.",
"009021": "CISCO SYSTEMS, INC.",
"009022": "IVEX",
"009023": "ZILOG INC.",
"009024": "PIPELINKS, INC.",
"009025": "BAE Systems Australia (Electronic Systems) Pty Ltd",
"009026": "ADVANCED SWITCHING COMMUNICATIONS, INC.",
"009027": "INTEL CORPORATION",
"009028": "NIPPON SIGNAL CO., LTD.",
"009029": "CRYPTO AG",
"00902A": "COMMUNICATION DEVICES, INC.",
"00902B": "CISCO SYSTEMS, INC.",
"00902C": "DATA & CONTROL EQUIPMENT LTD.",
"00902D": "DATA ELECTRONICS (AUST.) PTY, LTD.",
"00902E": "NAMCO LIMITED",
"00902F": "NETCORE SYSTEMS, INC.",
"009030": "HONEYWELL-DATING",
"009031": "MYSTICOM, LTD.",
"009032": "PELCOMBE GROUP LTD.",
"009033": "INNOVAPHONE AG",
"009034": "IMAGIC, INC.",
"009035": "ALPHA TELECOM, INC.",
"009036": "ens, inc.",
"009037": "ACUCOMM, INC.",
"009038": "FOUNTAIN TECHNOLOGIES, INC.",
"009039": "SHASTA NETWORKS",
"00903A": "NIHON MEDIA TOOL INC.",
"00903B": "TriEMS Research Lab, Inc.",
"00903C": "ATLANTIC NETWORK SYSTEMS",
"00903D": "BIOPAC SYSTEMS, INC.",
"00903E": "N.V. PHILIPS INDUSTRIAL ACTIVITIES",
"00903F": "AZTEC RADIOMEDIA",
"009040": "Siemens Network Convergence LLC",
"009041": "APPLIED DIGITAL ACCESS",
"009042": "ECCS, Inc.",
"009043": "Tattile SRL",
"009044": "ASSURED DIGITAL, INC.",
"009045": "Marconi Communications",
"009046": "DEXDYNE, LTD.",
"009047": "GIGA FAST E. LTD.",
"009048": "ZEAL CORPORATION",
"009049": "ENTRIDIA CORPORATION",
"00904A": "CONCUR SYSTEM TECHNOLOGIES",
"00904B": "GemTek Technology Co., Ltd.",
"00904C": "EPIGRAM, INC.",
"00904D": "SPEC S.A.",
"00904E": "DELEM BV",
"00904F": "ABB POWER T&D COMPANY, INC.",
"009050": "TELESTE OY",
"009051": "ULTIMATE TECHNOLOGY CORP.",
"009052": "SELCOM ELETTRONICA S.R.L.",
"009053": "DAEWOO ELECTRONICS CO., LTD.",
"009054": "INNOVATIVE SEMICONDUCTORS, INC",
"009055": "PARKER HANNIFIN CORPORATION COMPUMOTOR DIVISION",
"009056": "TELESTREAM, INC.",
"009057": "AANetcom, Inc.",
"009058": "Ultra Electronics Ltd., Command and Control Systems",
"009059": "TELECOM DEVICE K.K.",
"00905A": "DEARBORN GROUP, INC.",
"00905B": "RAYMOND AND LAE ENGINEERING",
"00905C": "EDMI",
"00905D": "NETCOM SICHERHEITSTECHNIK GmbH",
"00905E": "RAULAND-BORG CORPORATION",
"00905F": "CISCO SYSTEMS, INC.",
"009060": "SYSTEM CREATE CORP.",
"009061": "PACIFIC RESEARCH & ENGINEERING CORPORATION",
"009062": "ICP VORTEX COMPUTERSYSTEME GmbH",
"009063": "COHERENT COMMUNICATIONS SYSTEMS CORPORATION",
"009064": "Thomson Inc.",
"009065": "FINISAR CORPORATION",
"009066": "Troika Networks, Inc.",
"009067": "WalkAbout Computers, Inc.",
"009068": "DVT CORP.",
"009069": "JUNIPER NETWORKS, INC.",
"00906A": "TURNSTONE SYSTEMS, INC.",
"00906B": "APPLIED RESOURCES, INC.",
"00906C": "Sartorius Hamburg GmbH",
"00906D": "CISCO SYSTEMS, INC.",
"00906E": "PRAXON, INC.",
"00906F": "CISCO SYSTEMS, INC.",
"009070": "NEO NETWORKS, INC.",
"009071": "Applied Innovation Inc.",
"009072": "SIMRAD AS",
"009073": "GAIO TECHNOLOGY",
"009074": "ARGON NETWORKS, INC.",
"009075": "NEC DO BRASIL S.A.",
"009076": "FMT AIRCRAFT GATE SUPPORT SYSTEMS AB",
"009077": "ADVANCED FIBRE COMMUNICATIONS",
"009078": "MER TELEMANAGEMENT SOLUTIONS, LTD.",
"009079": "ClearOne, Inc.",
"00907A": "Spectralink, Inc",
"00907B": "E-TECH, INC.",
"00907C": "DIGITALCAST, INC.",
"00907D": "Lake Communications",
"00907E": "VETRONIX CORP.",
"00907F": "WatchGuard Technologies, Inc.",
"009080": "NOT LIMITED, INC.",
"009081": "ALOHA NETWORKS, INC.",
"009082": "FORCE INSTITUTE",
"009083": "TURBO COMMUNICATION, INC.",
"009084": "ATECH SYSTEM",
"009085": "GOLDEN ENTERPRISES, INC.",
"009086": "CISCO SYSTEMS, INC.",
"009087": "ITIS",
"009088": "BAXALL SECURITY LTD.",
"009089": "SOFTCOM MICROSYSTEMS, INC.",
"00908A": "BAYLY COMMUNICATIONS, INC.",
"00908B": "Tattile SRL",
"00908C": "ETREND ELECTRONICS, INC.",
"00908D": "VICKERS ELECTRONICS SYSTEMS",
"00908E": "Nortel Networks Broadband Access",
"00908F": "AUDIO CODES LTD.",
"009090": "I-BUS",
"009091": "DigitalScape, Inc.",
"009092": "CISCO SYSTEMS, INC.",
"009093": "NANAO CORPORATION",
"009094": "OSPREY TECHNOLOGIES, INC.",
"009095": "UNIVERSAL AVIONICS",
"009096": "ASKEY COMPUTER CORP.",
"009097": "Sycamore Networks",
"009098": "SBC DESIGNS, INC.",
"009099": "ALLIED TELESIS, K.K.",
"00909A": "ONE WORLD SYSTEMS, INC.",
"00909B": "MARKEM-IMAJE",
"00909C": "ARRIS Group, Inc.",
"00909D": "NovaTech Process Solutions, LLC",
"00909E": "Critical IO, LLC",
"00909F": "DIGI-DATA CORPORATION",
"0090A0": "8X8 INC.",
"0090A1": "Flying Pig Systems/High End Systems Inc.",
"0090A2": "CYBERTAN TECHNOLOGY, INC.",
"0090A3": "Corecess Inc.",
"0090A4": "ALTIGA NETWORKS",
"0090A5": "SPECTRA LOGIC",
"0090A6": "CISCO SYSTEMS, INC.",
"0090A7": "CLIENTEC CORPORATION",
"0090A8": "NineTiles Networks, Ltd.",
"0090A9": "WESTERN DIGITAL",
"0090AA": "INDIGO ACTIVE VISION SYSTEMS LIMITED",
"0090AB": "CISCO SYSTEMS, INC.",
"0090AC": "OPTIVISION, INC.",
"0090AD": "ASPECT ELECTRONICS, INC.",
"0090AE": "ITALTEL S.p.A.",
"0090AF": "J. MORITA MFG. CORP.",
"0090B0": "VADEM",
"0090B1": "CISCO SYSTEMS, INC.",
"0090B2": "AVICI SYSTEMS INC.",
"0090B3": "AGRANAT SYSTEMS",
"0090B4": "WILLOWBROOK TECHNOLOGIES",
"0090B5": "NIKON CORPORATION",
"0090B6": "FIBEX SYSTEMS",
"0090B7": "DIGITAL LIGHTWAVE, INC.",
"0090B8": "ROHDE & SCHWARZ GMBH & CO. KG",
"0090B9": "BERAN INSTRUMENTS LTD.",
"0090BA": "VALID NETWORKS, INC.",
"0090BB": "TAINET COMMUNICATION SYSTEM Corp.",
"0090BC": "TELEMANN CO., LTD.",
"0090BD": "OMNIA COMMUNICATIONS, INC.",
"0090BE": "IBC/INTEGRATED BUSINESS COMPUTERS",
"0090BF": "CISCO SYSTEMS, INC.",
"0090C0": "K.J. LAW ENGINEERS, INC.",
"0090C1": "Peco II, Inc.",
"0090C2": "JK microsystems, Inc.",
"0090C3": "TOPIC SEMICONDUCTOR CORP.",
"0090C4": "JAVELIN SYSTEMS, INC.",
"0090C5": "INTERNET MAGIC, INC.",
"0090C6": "OPTIM SYSTEMS, INC.",
"0090C7": "ICOM INC.",
"0090C8": "WAVERIDER COMMUNICATIONS (CANADA) INC.",
"0090C9": "DPAC Technologies",
"0090CA": "ACCORD VIDEO TELECOMMUNICATIONS, LTD.",
"0090CB": "Wireless OnLine, Inc.",
"0090CC": "Planex Communications",
"0090CD": "ENT-EMPRESA NACIONAL DE TELECOMMUNICACOES, S.A.",
"0090CE": "TETRA GmbH",
"0090CF": "NORTEL",
"0090D0": "Thomson Telecom Belgium",
"0090D1": "LEICHU ENTERPRISE CO., LTD.",
"0090D2": "ARTEL VIDEO SYSTEMS",
"0090D3": "GIESECKE & DEVRIENT GmbH",
"0090D4": "BindView Development Corp.",
"0090D5": "EUPHONIX, INC.",
"0090D6": "CRYSTAL GROUP",
"0090D7": "NetBoost Corp.",
"0090D8": "WHITECROSS SYSTEMS",
"0090D9": "CISCO SYSTEMS, INC.",
"0090DA": "DYNARC, INC.",
"0090DB": "NEXT LEVEL COMMUNICATIONS",
"0090DC": "TECO INFORMATION SYSTEMS",
"0090DD": "MIHARU COMMUNICATIONS Inc",
"0090DE": "CARDKEY SYSTEMS, INC.",
"0090DF": "MITSUBISHI CHEMICAL AMERICA, INC.",
"0090E0": "SYSTRAN CORP.",
"0090E1": "TELENA S.P.A.",
"0090E2": "DISTRIBUTED PROCESSING TECHNOLOGY",
"0090E3": "AVEX ELECTRONICS INC.",
"0090E4": "NEC AMERICA, INC.",
"0090E5": "TEKNEMA, INC.",
"0090E6": "ALi Corporation",
"0090E7": "HORSCH ELEKTRONIK AG",
"0090E8": "MOXA TECHNOLOGIES CORP., LTD.",
"0090E9": "JANZ COMPUTER AG",
"0090EA": "ALPHA TECHNOLOGIES, INC.",
"0090EB": "SENTRY TELECOM SYSTEMS",
"0090EC": "PYRESCOM",
"0090ED": "CENTRAL SYSTEM RESEARCH CO., LTD.",
"0090EE": "PERSONAL COMMUNICATIONS TECHNOLOGIES",
"0090EF": "INTEGRIX, INC.",
"0090F0": "Harmonic Video Systems Ltd.",
"0090F1": "DOT HILL SYSTEMS CORPORATION",
"0090F2": "CISCO SYSTEMS, INC.",
"0090F3": "ASPECT COMMUNICATIONS",
"0090F4": "LIGHTNING INSTRUMENTATION",
"0090F5": "CLEVO CO.",
"0090F6": "ESCALATE NETWORKS, INC.",
"0090F7": "NBASE COMMUNICATIONS LTD.",
"0090F8": "MEDIATRIX TELECOM",
"0090F9": "LEITCH",
"0090FA": "Emulex Corporation",
"0090FB": "PORTWELL, INC.",
"0090FC": "NETWORK COMPUTING DEVICES",
"0090FD": "CopperCom, Inc.",
"0090FE": "ELECOM CO., LTD. (LANEED DIV.)",
"0090FF": "TELLUS TECHNOLOGY INC.",
"0091D6": "Crystal Group, Inc.",
"0091FA": "Synapse Product Development",
"0092FA": "SHENZHEN WISKY TECHNOLOGY CO.,LTD",
"009363": "Uni-Link Technology Co., Ltd.",
"009569": "LSD Science and Technology Co.,Ltd.",
"0097FF": "Heimann Sensor GmbH",
"009C02": "Hewlett-Packard Company",
"009D8E": "CARDIAC RECORDERS, INC.",
"00A000": "CENTILLION NETWORKS, INC.",
"00A001": "DRS Signal Solutions",
"00A002": "LEEDS & NORTHRUP AUSTRALIA PTY LTD",
"00A003": "Siemens Switzerland Ltd., I B T HVP",
"00A004": "NETPOWER, INC.",
"00A005": "DANIEL INSTRUMENTS, LTD.",
"00A006": "IMAGE DATA PROCESSING SYSTEM GROUP",
"00A007": "APEXX TECHNOLOGY, INC.",
"00A008": "NETCORP",
"00A009": "WHITETREE NETWORK",
"00A00A": "Airspan",
"00A00B": "COMPUTEX CO., LTD.",
"00A00C": "KINGMAX TECHNOLOGY, INC.",
"00A00D": "THE PANDA PROJECT",
"00A00E": "VISUAL NETWORKS, INC.",
"00A00F": "Broadband Technologies",
"00A010": "SYSLOGIC DATENTECHNIK AG",
"00A011": "MUTOH INDUSTRIES LTD.",
"00A012": "Telco Systems, Inc.",
"00A013": "TELTREND LTD.",
"00A014": "CSIR",
"00A015": "WYLE",
"00A016": "MICROPOLIS CORP.",
"00A017": "J B M CORPORATION",
"00A018": "CREATIVE CONTROLLERS, INC.",
"00A019": "NEBULA CONSULTANTS, INC.",
"00A01A": "BINAR ELEKTRONIK AB",
"00A01B": "PREMISYS COMMUNICATIONS, INC.",
"00A01C": "NASCENT NETWORKS CORPORATION",
"00A01D": "SIXNET",
"00A01E": "EST CORPORATION",
"00A01F": "TRICORD SYSTEMS, INC.",
"00A020": "CITICORP/TTI",
"00A021": "General Dynamics",
"00A022": "CENTRE FOR DEVELOPMENT OF ADVANCED COMPUTING",
"00A023": "APPLIED CREATIVE TECHNOLOGY, INC.",
"00A024": "3COM CORPORATION",
"00A025": "REDCOM LABS INC.",
"00A026": "TELDAT, S.A.",
"00A027": "FIREPOWER SYSTEMS, INC.",
"00A028": "CONNER PERIPHERALS",
"00A029": "COULTER CORPORATION",
"00A02A": "TRANCELL SYSTEMS",
"00A02B": "TRANSITIONS RESEARCH CORP.",
"00A02C": "interWAVE Communications",
"00A02D": "1394 Trade Association",
"00A02E": "BRAND COMMUNICATIONS, LTD.",
"00A02F": "PIRELLI CAVI",
"00A030": "CAPTOR NV/SA",
"00A031": "HAZELTINE CORPORATION, MS 1-17",
"00A032": "GES SINGAPORE PTE. LTD.",
"00A033": "imc MeBsysteme GmbH",
"00A034": "AXEL",
"00A035": "CYLINK CORPORATION",
"00A036": "APPLIED NETWORK TECHNOLOGY",
"00A037": "Mindray DS USA, Inc.",
"00A038": "EMAIL ELECTRONICS",
"00A039": "ROSS TECHNOLOGY, INC.",
"00A03A": "KUBOTEK CORPORATION",
"00A03B": "TOSHIN ELECTRIC CO., LTD.",
"00A03C": "EG&G NUCLEAR INSTRUMENTS",
"00A03D": "OPTO-22",
"00A03E": "ATM FORUM",
"00A03F": "COMPUTER SOCIETY MICROPROCESSOR & MICROPROCESSOR STANDARDS C",
"00A040": "Apple",
"00A041": "INFICON",
"00A042": "SPUR PRODUCTS CORP.",
"00A043": "AMERICAN TECHNOLOGY LABS, INC.",
"00A044": "NTT IT CO., LTD.",
"00A045": "PHOENIX CONTACT GMBH & CO.",
"00A046": "SCITEX CORP. LTD.",
"00A047": "INTEGRATED FITNESS CORP.",
"00A048": "QUESTECH, LTD.",
"00A049": "DIGITECH INDUSTRIES, INC.",
"00A04A": "NISSHIN ELECTRIC CO., LTD.",
"00A04B": "TFL LAN INC.",
"00A04C": "INNOVATIVE SYSTEMS & TECHNOLOGIES, INC.",
"00A04D": "EDA INSTRUMENTS, INC.",
"00A04E": "VOELKER TECHNOLOGIES, INC.",
"00A04F": "AMERITEC CORP.",
"00A050": "CYPRESS SEMICONDUCTOR",
"00A051": "ANGIA COMMUNICATIONS. INC.",
"00A052": "STANILITE ELECTRONICS PTY. LTD",
"00A053": "COMPACT DEVICES, INC.",
"00A054": "PRIVATE",
"00A055": "Data Device Corporation",
"00A056": "MICROPROSS",
"00A057": "LANCOM Systems GmbH",
"00A058": "GLORY, LTD.",
"00A059": "HAMILTON HALLMARK",
"00A05A": "KOFAX IMAGE PRODUCTS",
"00A05B": "MARQUIP, INC.",
"00A05C": "INVENTORY CONVERSION, INC./",
"00A05D": "CS COMPUTER SYSTEME GmbH",
"00A05E": "MYRIAD LOGIC INC.",
"00A05F": "BTG Electronics Design BV",
"00A060": "ACER PERIPHERALS, INC.",
"00A061": "PURITAN BENNETT",
"00A062": "AES PRODATA",
"00A063": "JRL SYSTEMS, INC.",
"00A064": "KVB/ANALECT",
"00A065": "Symantec Corporation",
"00A066": "ISA CO., LTD.",
"00A067": "NETWORK SERVICES GROUP",
"00A068": "BHP LIMITED",
"00A069": "Symmetricom, Inc.",
"00A06A": "Verilink Corporation",
"00A06B": "DMS DORSCH MIKROSYSTEM GMBH",
"00A06C": "SHINDENGEN ELECTRIC MFG. CO., LTD.",
"00A06D": "MANNESMANN TALLY CORPORATION",
"00A06E": "AUSTRON, INC.",
"00A06F": "THE APPCON GROUP, INC.",
"00A070": "COASTCOM",
"00A071": "VIDEO LOTTERY TECHNOLOGIES,INC",
"00A072": "OVATION SYSTEMS LTD.",
"00A073": "COM21, INC.",
"00A074": "PERCEPTION TECHNOLOGY",
"00A075": "MICRON TECHNOLOGY, INC.",
"00A076": "CARDWARE LAB, INC.",
"00A077": "FUJITSU NEXION, INC.",
"00A078": "Marconi Communications",
"00A079": "ALPS ELECTRIC (USA), INC.",
"00A07A": "ADVANCED PERIPHERALS TECHNOLOGIES, INC.",
"00A07B": "DAWN COMPUTER INCORPORATION",
"00A07C": "TONYANG NYLON CO., LTD.",
"00A07D": "SEEQ TECHNOLOGY, INC.",
"00A07E": "AVID TECHNOLOGY, INC.",
"00A07F": "GSM-SYNTEL, LTD.",
"00A080": "Tattile SRL",
"00A081": "ALCATEL DATA NETWORKS",
"00A082": "NKT ELEKTRONIK A/S",
"00A083": "ASIMMPHONY TURKEY",
"00A084": "Dataplex Pty Ltd",
"00A085": "PRIVATE",
"00A086": "AMBER WAVE SYSTEMS, INC.",
"00A087": "Microsemi Corporation",
"00A088": "ESSENTIAL COMMUNICATIONS",
"00A089": "XPOINT TECHNOLOGIES, INC.",
"00A08A": "BROOKTROUT TECHNOLOGY, INC.",
"00A08B": "ASTON ELECTRONIC DESIGNS LTD.",
"00A08C": "MultiMedia LANs, Inc.",
"00A08D": "JACOMO CORPORATION",
"00A08E": "Check Point Software Technologies",
"00A08F": "DESKNET SYSTEMS, INC.",
"00A090": "TimeStep Corporation",
"00A091": "APPLICOM INTERNATIONAL",
"00A092": "H. BOLLMANN MANUFACTURERS, LTD",
"00A093": "B/E AEROSPACE, Inc.",
"00A094": "COMSAT CORPORATION",
"00A095": "ACACIA NETWORKS, INC.",
"00A096": "MITSUMI ELECTRIC CO., LTD.",
"00A097": "JC INFORMATION SYSTEMS",
"00A098": "NetApp",
"00A099": "K-NET LTD.",
"00A09A": "NIHON KOHDEN AMERICA",
"00A09B": "QPSX COMMUNICATIONS, LTD.",
"00A09C": "Xyplex, Inc.",
"00A09D": "JOHNATHON FREEMAN TECHNOLOGIES",
"00A09E": "ICTV",
"00A09F": "COMMVISION CORP.",
"00A0A0": "COMPACT DATA, LTD.",
"00A0A1": "EPIC DATA INC.",
"00A0A2": "DIGICOM S.P.A.",
"00A0A3": "RELIABLE POWER METERS",
"00A0A4": "MICROS SYSTEMS, INC.",
"00A0A5": "TEKNOR MICROSYSTEME, INC.",
"00A0A6": "M.I. SYSTEMS, K.K.",
"00A0A7": "VORAX CORPORATION",
"00A0A8": "RENEX CORPORATION",
"00A0A9": "NAVTEL COMMUNICATIONS INC.",
"00A0AA": "SPACELABS MEDICAL",
"00A0AB": "NETCS INFORMATIONSTECHNIK GMBH",
"00A0AC": "GILAT SATELLITE NETWORKS, LTD.",
"00A0AD": "MARCONI SPA",
"00A0AE": "NUCOM SYSTEMS, INC.",
"00A0AF": "WMS INDUSTRIES",
"00A0B0": "I-O DATA DEVICE, INC.",
"00A0B1": "FIRST VIRTUAL CORPORATION",
"00A0B2": "SHIMA SEIKI",
"00A0B3": "ZYKRONIX",
"00A0B4": "TEXAS MICROSYSTEMS, INC.",
"00A0B5": "3H TECHNOLOGY",
"00A0B6": "SANRITZ AUTOMATION CO., LTD.",
"00A0B7": "CORDANT, INC.",
"00A0B8": "SYMBIOS LOGIC INC.",
"00A0B9": "EAGLE TECHNOLOGY, INC.",
"00A0BA": "PATTON ELECTRONICS CO.",
"00A0BB": "HILAN GMBH",
"00A0BC": "VIASAT, INCORPORATED",
"00A0BD": "I-TECH CORP.",
"00A0BE": "INTEGRATED CIRCUIT SYSTEMS, INC. COMMUNICATIONS GROUP",
"00A0BF": "WIRELESS DATA GROUP MOTOROLA",
"00A0C0": "DIGITAL LINK CORP.",
"00A0C1": "ORTIVUS MEDICAL AB",
"00A0C2": "R.A. SYSTEMS CO., LTD.",
"00A0C3": "UNICOMPUTER GMBH",
"00A0C4": "CRISTIE ELECTRONICS LTD.",
"00A0C5": "ZYXEL COMMUNICATION",
"00A0C6": "QUALCOMM INCORPORATED",
"00A0C7": "TADIRAN TELECOMMUNICATIONS",
"00A0C8": "ADTRAN INC.",
"00A0C9": "INTEL CORPORATION - HF1-06",
"00A0CA": "FUJITSU DENSO LTD.",
"00A0CB": "ARK TELECOMMUNICATIONS, INC.",
"00A0CC": "LITE-ON COMMUNICATIONS, INC.",
"00A0CD": "DR. JOHANNES HEIDENHAIN GmbH",
"00A0CE": "Ecessa",
"00A0CF": "SOTAS, INC.",
"00A0D0": "TEN X TECHNOLOGY, INC.",
"00A0D1": "INVENTEC CORPORATION",
"00A0D2": "ALLIED TELESIS INTERNATIONAL CORPORATION",
"00A0D3": "INSTEM COMPUTER SYSTEMS, LTD.",
"00A0D4": "RADIOLAN, INC.",
"00A0D5": "SIERRA WIRELESS INC.",
"00A0D6": "SBE, INC.",
"00A0D7": "KASTEN CHASE APPLIED RESEARCH",
"00A0D8": "SPECTRA - TEK",
"00A0D9": "CONVEX COMPUTER CORPORATION",
"00A0DA": "INTEGRATED SYSTEMS Technology, Inc.",
"00A0DB": "FISHER & PAYKEL PRODUCTION",
"00A0DC": "O.N. ELECTRONIC CO., LTD.",
"00A0DD": "AZONIX CORPORATION",
"00A0DE": "YAMAHA CORPORATION",
"00A0DF": "STS TECHNOLOGIES, INC.",
"00A0E0": "TENNYSON TECHNOLOGIES PTY LTD",
"00A0E1": "WESTPORT RESEARCH ASSOCIATES, INC.",
"00A0E2": "Keisokugiken Corporation",
"00A0E3": "XKL SYSTEMS CORP.",
"00A0E4": "OPTIQUEST",
"00A0E5": "NHC COMMUNICATIONS",
"00A0E6": "DIALOGIC CORPORATION",
"00A0E7": "CENTRAL DATA CORPORATION",
"00A0E8": "REUTERS HOLDINGS PLC",
"00A0E9": "ELECTRONIC RETAILING SYSTEMS INTERNATIONAL",
"00A0EA": "ETHERCOM CORP.",
"00A0EB": "Encore Networks, Inc.",
"00A0EC": "TRANSMITTON LTD.",
"00A0ED": "Brooks Automation, Inc.",
"00A0EE": "NASHOBA NETWORKS",
"00A0EF": "LUCIDATA LTD.",
"00A0F0": "TORONTO MICROELECTRONICS INC.",
"00A0F1": "MTI",
"00A0F2": "INFOTEK COMMUNICATIONS, INC.",
"00A0F3": "STAUBLI",
"00A0F4": "GE",
"00A0F5": "RADGUARD LTD.",
"00A0F6": "AutoGas Systems Inc.",
"00A0F7": "V.I COMPUTER CORP.",
"00A0F8": "SYMBOL TECHNOLOGIES, INC.",
"00A0F9": "BINTEC COMMUNICATIONS GMBH",
"00A0FA": "Marconi Communication GmbH",
"00A0FB": "TORAY ENGINEERING CO., LTD.",
"00A0FC": "IMAGE SCIENCES, INC.",
"00A0FD": "SCITEX DIGITAL PRINTING, INC.",
"00A0FE": "BOSTON TECHNOLOGY, INC.",
"00A0FF": "TELLABS OPERATIONS, INC.",
"00A1DE": "ShenZhen ShiHua Technology CO.,LTD",
"00A2DA": "INAT GmbH",
"00A2FF": "abatec group AG",
"00AA00": "INTEL CORPORATION",
"00AA01": "INTEL CORPORATION",
"00AA02": "INTEL CORPORATION",
"00AA3C": "OLIVETTI TELECOM SPA (OLTECO)",
"00AA70": "LG Electronics",
"00ACE0": "ARRIS Group, Inc.",
"00B009": "Grass Valley Group",
"00B017": "InfoGear Technology Corp.",
"00B019": "UTC CCS",
"00B01C": "Westport Technologies",
"00B01E": "Rantic Labs, Inc.",
"00B02A": "ORSYS GmbH",
"00B02D": "ViaGate Technologies, Inc.",
"00B033": "OAO \"Izhevskiy radiozavod\"",
"00B03B": "HiQ Networks",
"00B048": "Marconi Communications Inc.",
"00B04A": "CISCO SYSTEMS, INC.",
"00B052": "Atheros Communications",
"00B064": "CISCO SYSTEMS, INC.",
"00B069": "Honewell Oy",
"00B06D": "Jones Futurex Inc.",
"00B080": "Mannesmann Ipulsys B.V.",
"00B086": "LocSoft Limited",
"00B08E": "CISCO SYSTEMS, INC.",
"00B091": "Transmeta Corp.",
"00B094": "Alaris, Inc.",
"00B09A": "Morrow Technologies Corp.",
"00B09D": "Point Grey Research Inc.",
"00B0AC": "SIAE-Microelettronica S.p.A.",
"00B0AE": "Symmetricom",
"00B0B3": "Xstreamis PLC",
"00B0C2": "CISCO SYSTEMS, INC.",
"00B0C7": "Tellabs Operations, Inc.",
"00B0CE": "TECHNOLOGY RESCUE",
"00B0D0": "Dell Computer Corp.",
"00B0DB": "Nextcell, Inc.",
"00B0DF": "Starboard Storage Systems",
"00B0E7": "British Federal Ltd.",
"00B0EC": "EACEM",
"00B0EE": "Ajile Systems, Inc.",
"00B0F0": "CALY NETWORKS",
"00B0F5": "NetWorth Technologies, Inc.",
"00B338": "Kontron Design Manufacturing Services (M) Sdn. Bhd",
"00B342": "MacroSAN Technologies Co., Ltd.",
"00B56D": "David Electronics Co., LTD.",
"00B5D6": "Omnibit Inc.",
"00B78D": "Nanjing Shining Electric Automation Co., Ltd",
"00B9F6": "Shenzhen Super Rich Electronics Co.,Ltd",
"00BAC0": "Biometric Access Company",
"00BB01": "OCTOTHORPE CORP.",
"00BB3A": "PRIVATE",
"00BB8E": "HME Co., Ltd.",
"00BBF0": "UNGERMANN-BASS INC.",
"00BD27": "Exar Corp.",
"00BD3A": "Nokia Corporation",
"00BF15": "Genetec Inc.",
"00C000": "LANOPTICS, LTD.",
"00C001": "DIATEK PATIENT MANAGMENT",
"00C002": "SERCOMM CORPORATION",
"00C003": "GLOBALNET COMMUNICATIONS",
"00C004": "JAPAN BUSINESS COMPUTER CO.LTD",
"00C005": "LIVINGSTON ENTERPRISES, INC.",
"00C006": "NIPPON AVIONICS CO., LTD.",
"00C007": "PINNACLE DATA SYSTEMS, INC.",
"00C008": "SECO SRL",
"00C009": "KT TECHNOLOGY (S) PTE LTD",
"00C00A": "MICRO CRAFT",
"00C00B": "NORCONTROL A.S.",
"00C00C": "RELIA TECHNOLGIES",
"00C00D": "ADVANCED LOGIC RESEARCH, INC.",
"00C00E": "PSITECH, INC.",
"00C00F": "QUANTUM SOFTWARE SYSTEMS LTD.",
"00C010": "HIRAKAWA HEWTECH CORP.",
"00C011": "INTERACTIVE COMPUTING DEVICES",
"00C012": "NETSPAN CORPORATION",
"00C013": "NETRIX",
"00C014": "TELEMATICS CALABASAS INT'L,INC",
"00C015": "NEW MEDIA CORPORATION",
"00C016": "ELECTRONIC THEATRE CONTROLS",
"00C017": "Fluke Corporation",
"00C018": "LANART CORPORATION",
"00C019": "LEAP TECHNOLOGY, INC.",
"00C01A": "COROMETRICS MEDICAL SYSTEMS",
"00C01B": "SOCKET COMMUNICATIONS, INC.",
"00C01C": "INTERLINK COMMUNICATIONS LTD.",
"00C01D": "GRAND JUNCTION NETWORKS, INC.",
"00C01E": "LA FRANCAISE DES JEUX",
"00C01F": "S.E.R.C.E.L.",
"00C020": "ARCO ELECTRONIC, CONTROL LTD.",
"00C021": "NETEXPRESS",
"00C022": "LASERMASTER TECHNOLOGIES, INC.",
"00C023": "TUTANKHAMON ELECTRONICS",
"00C024": "EDEN SISTEMAS DE COMPUTACAO SA",
"00C025": "DATAPRODUCTS CORPORATION",
"00C026": "LANS TECHNOLOGY CO., LTD.",
"00C027": "CIPHER SYSTEMS, INC.",
"00C028": "JASCO CORPORATION",
"00C029": "Nexans Deutschland GmbH - ANS",
"00C02A": "OHKURA ELECTRIC CO., LTD.",
"00C02B": "GERLOFF GESELLSCHAFT FUR",
"00C02C": "CENTRUM COMMUNICATIONS, INC.",
"00C02D": "FUJI PHOTO FILM CO., LTD.",
"00C02E": "NETWIZ",
"00C02F": "OKUMA CORPORATION",
"00C030": "INTEGRATED ENGINEERING B. V.",
"00C031": "DESIGN RESEARCH SYSTEMS, INC.",
"00C032": "I-CUBED LIMITED",
"00C033": "TELEBIT COMMUNICATIONS APS",
"00C034": "TRANSACTION NETWORK",
"00C035": "QUINTAR COMPANY",
"00C036": "RAYTECH ELECTRONIC CORP.",
"00C037": "DYNATEM",
"00C038": "RASTER IMAGE PROCESSING SYSTEM",
"00C039": "Teridian Semiconductor Corporation",
"00C03A": "MEN-MIKRO ELEKTRONIK GMBH",
"00C03B": "MULTIACCESS COMPUTING CORP.",
"00C03C": "TOWER TECH S.R.L.",
"00C03D": "WIESEMANN & THEIS GMBH",
"00C03E": "FA. GEBR. HELLER GMBH",
"00C03F": "STORES AUTOMATED SYSTEMS, INC.",
"00C040": "ECCI",
"00C041": "DIGITAL TRANSMISSION SYSTEMS",
"00C042": "DATALUX CORP.",
"00C043": "STRATACOM",
"00C044": "EMCOM CORPORATION",
"00C045": "ISOLATION SYSTEMS, LTD.",
"00C046": "Blue Chip Technology Ltd",
"00C047": "UNIMICRO SYSTEMS, INC.",
"00C048": "BAY TECHNICAL ASSOCIATES",
"00C049": "U.S. ROBOTICS, INC.",
"00C04A": "GROUP 2000 AG",
"00C04B": "CREATIVE MICROSYSTEMS",
"00C04C": "DEPARTMENT OF FOREIGN AFFAIRS",
"00C04D": "MITEC, INC.",
"00C04E": "COMTROL CORPORATION",
"00C04F": "DELL COMPUTER CORPORATION",
"00C050": "TOYO DENKI SEIZO K.K.",
"00C051": "ADVANCED INTEGRATION RESEARCH",
"00C052": "BURR-BROWN",
"00C053": "Aspect Software Inc.",
"00C054": "NETWORK PERIPHERALS, LTD.",
"00C055": "MODULAR COMPUTING TECHNOLOGIES",
"00C056": "SOMELEC",
"00C057": "MYCO ELECTRONICS",
"00C058": "DATAEXPERT CORP.",
"00C059": "DENSO CORPORATION",
"00C05A": "SEMAPHORE COMMUNICATIONS CORP.",
"00C05B": "NETWORKS NORTHWEST, INC.",
"00C05C": "ELONEX PLC",
"00C05D": "L&N TECHNOLOGIES",
"00C05E": "VARI-LITE, INC.",
"00C05F": "FINE-PAL COMPANY LIMITED",
"00C060": "ID SCANDINAVIA AS",
"00C061": "SOLECTEK CORPORATION",
"00C062": "IMPULSE TECHNOLOGY",
"00C063": "MORNING STAR TECHNOLOGIES, INC",
"00C064": "GENERAL DATACOMM IND. INC.",
"00C065": "SCOPE COMMUNICATIONS, INC.",
"00C066": "DOCUPOINT, INC.",
"00C067": "UNITED BARCODE INDUSTRIES",
"00C068": "HME Clear-Com LTD.",
"00C069": "Axxcelera Broadband Wireless",
"00C06A": "ZAHNER-ELEKTRIK GMBH & CO. KG",
"00C06B": "OSI PLUS CORPORATION",
"00C06C": "SVEC COMPUTER CORP.",
"00C06D": "BOCA RESEARCH, INC.",
"00C06E": "HAFT TECHNOLOGY, INC.",
"00C06F": "KOMATSU LTD.",
"00C070": "SECTRA SECURE-TRANSMISSION AB",
"00C071": "AREANEX COMMUNICATIONS, INC.",
"00C072": "KNX LTD.",
"00C073": "XEDIA CORPORATION",
"00C074": "TOYODA AUTOMATIC LOOM",
"00C075": "XANTE CORPORATION",
"00C076": "I-DATA INTERNATIONAL A-S",
"00C077": "DAEWOO TELECOM LTD.",
"00C078": "COMPUTER SYSTEMS ENGINEERING",
"00C079": "FONSYS CO.,LTD.",
"00C07A": "PRIVA B.V.",
"00C07B": "ASCEND COMMUNICATIONS, INC.",
"00C07C": "HIGHTECH INFORMATION",
"00C07D": "RISC DEVELOPMENTS LTD.",
"00C07E": "KUBOTA CORPORATION ELECTRONIC",
"00C07F": "NUPON COMPUTING CORP.",
"00C080": "NETSTAR, INC.",
"00C081": "METRODATA LTD.",
"00C082": "MOORE PRODUCTS CO.",
"00C083": "TRACE MOUNTAIN PRODUCTS, INC.",
"00C084": "DATA LINK CORP. LTD.",
"00C085": "ELECTRONICS FOR IMAGING, INC.",
"00C086": "THE LYNK CORPORATION",
"00C087": "UUNET TECHNOLOGIES, INC.",
"00C088": "EKF ELEKTRONIK GMBH",
"00C089": "TELINDUS DISTRIBUTION",
"00C08A": "Lauterbach GmbH",
"00C08B": "RISQ MODULAR SYSTEMS, INC.",
"00C08C": "PERFORMANCE TECHNOLOGIES, INC.",
"00C08D": "TRONIX PRODUCT DEVELOPMENT",
"00C08E": "NETWORK INFORMATION TECHNOLOGY",
"00C08F": "Panasonic Electric Works Co., Ltd.",
"00C090": "PRAIM S.R.L.",
"00C091": "JABIL CIRCUIT, INC.",
"00C092": "MENNEN MEDICAL INC.",
"00C093": "ALTA RESEARCH CORP.",
"00C094": "VMX INC.",
"00C095": "ZNYX",
"00C096": "TAMURA CORPORATION",
"00C097": "ARCHIPEL SA",
"00C098": "CHUNTEX ELECTRONIC CO., LTD.",
"00C099": "YOSHIKI INDUSTRIAL CO.,LTD.",
"00C09A": "PHOTONICS CORPORATION",
"00C09B": "RELIANCE COMM/TEC, R-TEC",
"00C09C": "HIOKI E.E. CORPORATION",
"00C09D": "DISTRIBUTED SYSTEMS INT'L, INC",
"00C09E": "CACHE COMPUTERS, INC.",
"00C09F": "QUANTA COMPUTER, INC.",
"00C0A0": "ADVANCE MICRO RESEARCH, INC.",
"00C0A1": "TOKYO DENSHI SEKEI CO.",
"00C0A2": "INTERMEDIUM A/S",
"00C0A3": "DUAL ENTERPRISES CORPORATION",
"00C0A4": "UNIGRAF OY",
"00C0A5": "DICKENS DATA SYSTEMS",
"00C0A6": "EXICOM AUSTRALIA PTY. LTD",
"00C0A7": "SEEL LTD.",
"00C0A8": "GVC CORPORATION",
"00C0A9": "BARRON MCCANN LTD.",
"00C0AA": "SILICON VALLEY COMPUTER",
"00C0AB": "Telco Systems, Inc.",
"00C0AC": "GAMBIT COMPUTER COMMUNICATIONS",
"00C0AD": "MARBEN COMMUNICATION SYSTEMS",
"00C0AE": "TOWERCOM CO. INC. DBA PC HOUSE",
"00C0AF": "TEKLOGIX INC.",
"00C0B0": "GCC TECHNOLOGIES,INC.",
"00C0B1": "GENIUS NET CO.",
"00C0B2": "NORAND CORPORATION",
"00C0B3": "COMSTAT DATACOMM CORPORATION",
"00C0B4": "MYSON TECHNOLOGY, INC.",
"00C0B5": "CORPORATE NETWORK SYSTEMS,INC.",
"00C0B6": "Overland Storage, Inc.",
"00C0B7": "AMERICAN POWER CONVERSION CORP",
"00C0B8": "FRASER'S HILL LTD.",
"00C0B9": "FUNK SOFTWARE, INC.",
"00C0BA": "NETVANTAGE",
"00C0BB": "FORVAL CREATIVE, INC.",
"00C0BC": "TELECOM AUSTRALIA/CSSC",
"00C0BD": "INEX TECHNOLOGIES, INC.",
"00C0BE": "ALCATEL - SEL",
"00C0BF": "TECHNOLOGY CONCEPTS, LTD.",
"00C0C0": "SHORE MICROSYSTEMS, INC.",
"00C0C1": "QUAD/GRAPHICS, INC.",
"00C0C2": "INFINITE NETWORKS LTD.",
"00C0C3": "ACUSON COMPUTED SONOGRAPHY",
"00C0C4": "COMPUTER OPERATIONAL",
"00C0C5": "SID INFORMATICA",
"00C0C6": "PERSONAL MEDIA CORP.",
"00C0C7": "SPARKTRUM MICROSYSTEMS, INC.",
"00C0C8": "MICRO BYTE PTY. LTD.",
"00C0C9": "ELSAG BAILEY PROCESS",
"00C0CA": "ALFA, INC.",
"00C0CB": "CONTROL TECHNOLOGY CORPORATION",
"00C0CC": "TELESCIENCES CO SYSTEMS, INC.",
"00C0CD": "COMELTA, S.A.",
"00C0CE": "CEI SYSTEMS & ENGINEERING PTE",
"00C0CF": "IMATRAN VOIMA OY",
"00C0D0": "RATOC SYSTEM INC.",
"00C0D1": "COMTREE TECHNOLOGY CORPORATION",
"00C0D2": "SYNTELLECT, INC.",
"00C0D3": "OLYMPUS IMAGE SYSTEMS, INC.",
"00C0D4": "AXON NETWORKS, INC.",
"00C0D5": "Werbeagentur J\u00fcrgen Siebert",
"00C0D6": "J1 SYSTEMS, INC.",
"00C0D7": "TAIWAN TRADING CENTER DBA",
"00C0D8": "UNIVERSAL DATA SYSTEMS",
"00C0D9": "QUINTE NETWORK CONFIDENTIALITY",
"00C0DA": "NICE SYSTEMS LTD.",
"00C0DB": "IPC CORPORATION (PTE) LTD.",
"00C0DC": "EOS TECHNOLOGIES, INC.",
"00C0DD": "QLogic Corporation",
"00C0DE": "ZCOMM, INC.",
"00C0DF": "KYE Systems Corp.",
"00C0E0": "DSC COMMUNICATION CORP.",
"00C0E1": "SONIC SOLUTIONS",
"00C0E2": "CALCOMP, INC.",
"00C0E3": "OSITECH COMMUNICATIONS, INC.",
"00C0E4": "SIEMENS BUILDING",
"00C0E5": "GESPAC, S.A.",
"00C0E6": "Verilink Corporation",
"00C0E7": "FIBERDATA AB",
"00C0E8": "PLEXCOM, INC.",
"00C0E9": "OAK SOLUTIONS, LTD.",
"00C0EA": "ARRAY TECHNOLOGY LTD.",
"00C0EB": "SEH COMPUTERTECHNIK GMBH",
"00C0EC": "DAUPHIN TECHNOLOGY",
"00C0ED": "US ARMY ELECTRONIC",
"00C0EE": "KYOCERA CORPORATION",
"00C0EF": "ABIT CORPORATION",
"00C0F0": "KINGSTON TECHNOLOGY CORP.",
"00C0F1": "SHINKO ELECTRIC CO., LTD.",
"00C0F2": "TRANSITION NETWORKS",
"00C0F3": "NETWORK COMMUNICATIONS CORP.",
"00C0F4": "INTERLINK SYSTEM CO., LTD.",
"00C0F5": "METACOMP, INC.",
"00C0F6": "CELAN TECHNOLOGY INC.",
"00C0F7": "ENGAGE COMMUNICATION, INC.",
"00C0F8": "ABOUT COMPUTING INC.",
"00C0F9": "Emerson Network Power",
"00C0FA": "CANARY COMMUNICATIONS, INC.",
"00C0FB": "ADVANCED TECHNOLOGY LABS",
"00C0FC": "ELASTIC REALITY, INC.",
"00C0FD": "PROSUM",
"00C0FE": "APTEC COMPUTER SYSTEMS, INC.",
"00C0FF": "DOT HILL SYSTEMS CORPORATION",
"00C14F": "DDL Co,.ltd.",
"00C2C6": "Intel Corporate",
"00C5DB": "Datatech Sistemas Digitales Avanzados SL",
"00C610": "Apple",
"00CBBD": "Cambridge Broadband Networks Ltd.",
"00CD90": "MAS Elektronik AG",
"00CF1C": "COMMUNICATION MACHINERY CORP.",
"00D000": "FERRAN SCIENTIFIC, INC.",
"00D001": "VST TECHNOLOGIES, INC.",
"00D002": "DITECH CORPORATION",
"00D003": "COMDA ENTERPRISES CORP.",
"00D004": "PENTACOM LTD.",
"00D005": "ZHS ZEITMANAGEMENTSYSTEME",
"00D006": "CISCO SYSTEMS, INC.",
"00D007": "MIC ASSOCIATES, INC.",
"00D008": "MACTELL CORPORATION",
"00D009": "HSING TECH. ENTERPRISE CO. LTD",
"00D00A": "LANACCESS TELECOM S.A.",
"00D00B": "RHK TECHNOLOGY, INC.",
"00D00C": "SNIJDER MICRO SYSTEMS",
"00D00D": "MICROMERITICS INSTRUMENT",
"00D00E": "PLURIS, INC.",
"00D00F": "SPEECH DESIGN GMBH",
"00D010": "CONVERGENT NETWORKS, INC.",
"00D011": "PRISM VIDEO, INC.",
"00D012": "GATEWORKS CORP.",
"00D013": "PRIMEX AEROSPACE COMPANY",
"00D014": "ROOT, INC.",
"00D015": "UNIVEX MICROTECHNOLOGY CORP.",
"00D016": "SCM MICROSYSTEMS, INC.",
"00D017": "SYNTECH INFORMATION CO., LTD.",
"00D018": "QWES. COM, INC.",
"00D019": "DAINIPPON SCREEN CORPORATE",
"00D01A": "URMET TLC S.P.A.",
"00D01B": "MIMAKI ENGINEERING CO., LTD.",
"00D01C": "SBS TECHNOLOGIES,",
"00D01D": "FURUNO ELECTRIC CO., LTD.",
"00D01E": "PINGTEL CORP.",
"00D01F": "CTAM PTY. LTD.",
"00D020": "AIM SYSTEM, INC.",
"00D021": "REGENT ELECTRONICS CORP.",
"00D022": "INCREDIBLE TECHNOLOGIES, INC.",
"00D023": "INFORTREND TECHNOLOGY, INC.",
"00D024": "Cognex Corporation",
"00D025": "XROSSTECH, INC.",
"00D026": "HIRSCHMANN AUSTRIA GMBH",
"00D027": "APPLIED AUTOMATION, INC.",
"00D028": "Harmonic, Inc",
"00D029": "WAKEFERN FOOD CORPORATION",
"00D02A": "Voxent Systems Ltd.",
"00D02B": "JETCELL, INC.",
"00D02C": "CAMPBELL SCIENTIFIC, INC.",
"00D02D": "ADEMCO",
"00D02E": "COMMUNICATION AUTOMATION CORP.",
"00D02F": "VLSI TECHNOLOGY INC.",
"00D030": "Safetran Systems Corp",
"00D031": "INDUSTRIAL LOGIC CORPORATION",
"00D032": "YANO ELECTRIC CO., LTD.",
"00D033": "DALIAN DAXIAN NETWORK",
"00D034": "ORMEC SYSTEMS CORP.",
"00D035": "BEHAVIOR TECH. COMPUTER CORP.",
"00D036": "TECHNOLOGY ATLANTA CORP.",
"00D037": "Pace France",
"00D038": "FIVEMERE, LTD.",
"00D039": "UTILICOM, INC.",
"00D03A": "ZONEWORX, INC.",
"00D03B": "VISION PRODUCTS PTY. LTD.",
"00D03C": "Vieo, Inc.",
"00D03D": "GALILEO TECHNOLOGY, LTD.",
"00D03E": "ROCKETCHIPS, INC.",
"00D03F": "AMERICAN COMMUNICATION",
"00D040": "SYSMATE CO., LTD.",
"00D041": "AMIGO TECHNOLOGY CO., LTD.",
"00D042": "MAHLO GMBH & CO. UG",
"00D043": "ZONAL RETAIL DATA SYSTEMS",
"00D044": "ALIDIAN NETWORKS, INC.",
"00D045": "KVASER AB",
"00D046": "DOLBY LABORATORIES, INC.",
"00D047": "XN TECHNOLOGIES",
"00D048": "ECTON, INC.",
"00D049": "IMPRESSTEK CO., LTD.",
"00D04A": "PRESENCE TECHNOLOGY GMBH",
"00D04B": "LA CIE GROUP S.A.",
"00D04C": "EUROTEL TELECOM LTD.",
"00D04D": "DIV OF RESEARCH & STATISTICS",
"00D04E": "LOGIBAG",
"00D04F": "BITRONICS, INC.",
"00D050": "ISKRATEL",
"00D051": "O2 MICRO, INC.",
"00D052": "ASCEND COMMUNICATIONS, INC.",
"00D053": "CONNECTED SYSTEMS",
"00D054": "SAS INSTITUTE INC.",
"00D055": "KATHREIN-WERKE KG",
"00D056": "SOMAT CORPORATION",
"00D057": "ULTRAK, INC.",
"00D058": "CISCO SYSTEMS, INC.",
"00D059": "AMBIT MICROSYSTEMS CORP.",
"00D05A": "SYMBIONICS, LTD.",
"00D05B": "ACROLOOP MOTION CONTROL",
"00D05C": "TECHNOTREND SYSTEMTECHNIK GMBH",
"00D05D": "INTELLIWORXX, INC.",
"00D05E": "STRATABEAM TECHNOLOGY, INC.",
"00D05F": "VALCOM, INC.",
"00D060": "Panasonic Europe Ltd.",
"00D061": "TREMON ENTERPRISES CO., LTD.",
"00D062": "DIGIGRAM",
"00D063": "CISCO SYSTEMS, INC.",
"00D064": "MULTITEL",
"00D065": "TOKO ELECTRIC",
"00D066": "WINTRISS ENGINEERING CORP.",
"00D067": "CAMPIO COMMUNICATIONS",
"00D068": "IWILL CORPORATION",
"00D069": "TECHNOLOGIC SYSTEMS",
"00D06A": "LINKUP SYSTEMS CORPORATION",
"00D06B": "SR TELECOM INC.",
"00D06C": "SHAREWAVE, INC.",
"00D06D": "ACRISON, INC.",
"00D06E": "TRENDVIEW RECORDERS LTD.",
"00D06F": "KMC CONTROLS",
"00D070": "LONG WELL ELECTRONICS CORP.",
"00D071": "ECHELON CORP.",
"00D072": "BROADLOGIC",
"00D073": "ACN ADVANCED COMMUNICATIONS",
"00D074": "TAQUA SYSTEMS, INC.",
"00D075": "ALARIS MEDICAL SYSTEMS, INC.",
"00D076": "Bank of America",
"00D077": "LUCENT TECHNOLOGIES",
"00D078": "Eltex of Sweden AB",
"00D079": "CISCO SYSTEMS, INC.",
"00D07A": "AMAQUEST COMPUTER CORP.",
"00D07B": "COMCAM INTERNATIONAL INC",
"00D07C": "KOYO ELECTRONICS INC. CO.,LTD.",
"00D07D": "COSINE COMMUNICATIONS",
"00D07E": "KEYCORP LTD.",
"00D07F": "STRATEGY & TECHNOLOGY, LIMITED",
"00D080": "EXABYTE CORPORATION",
"00D081": "RTD Embedded Technologies, Inc.",
"00D082": "IOWAVE INC.",
"00D083": "INVERTEX, INC.",
"00D084": "NEXCOMM SYSTEMS, INC.",
"00D085": "OTIS ELEVATOR COMPANY",
"00D086": "FOVEON, INC.",
"00D087": "MICROFIRST INC.",
"00D088": "ARRIS Group, Inc.",
"00D089": "DYNACOLOR, INC.",
"00D08A": "PHOTRON USA",
"00D08B": "ADVA Optical Networking Ltd.",
"00D08C": "GENOA TECHNOLOGY, INC.",
"00D08D": "PHOENIX GROUP, INC.",
"00D08E": "NVISION INC.",
"00D08F": "ARDENT TECHNOLOGIES, INC.",
"00D090": "CISCO SYSTEMS, INC.",
"00D091": "SMARTSAN SYSTEMS, INC.",
"00D092": "GLENAYRE WESTERN MULTIPLEX",
"00D093": "TQ - COMPONENTS GMBH",
"00D094": "TIMELINE VISTA, INC.",
"00D095": "Alcatel-Lucent, Enterprise Business Group",
"00D096": "3COM EUROPE LTD.",
"00D097": "CISCO SYSTEMS, INC.",
"00D098": "Photon Dynamics Canada Inc.",
"00D099": "Elcard Wireless Systems Oy",
"00D09A": "FILANET CORPORATION",
"00D09B": "SPECTEL LTD.",
"00D09C": "KAPADIA COMMUNICATIONS",
"00D09D": "VERIS INDUSTRIES",
"00D09E": "2WIRE, INC.",
"00D09F": "NOVTEK TEST SYSTEMS",
"00D0A0": "MIPS DENMARK",
"00D0A1": "OSKAR VIERLING GMBH + CO. KG",
"00D0A2": "INTEGRATED DEVICE",
"00D0A3": "VOCAL DATA, INC.",
"00D0A4": "ALANTRO COMMUNICATIONS",
"00D0A5": "AMERICAN ARIUM",
"00D0A6": "LANBIRD TECHNOLOGY CO., LTD.",
"00D0A7": "TOKYO SOKKI KENKYUJO CO., LTD.",
"00D0A8": "NETWORK ENGINES, INC.",
"00D0A9": "SHINANO KENSHI CO., LTD.",
"00D0AA": "CHASE COMMUNICATIONS",
"00D0AB": "DELTAKABEL TELECOM CV",
"00D0AC": "GRAYSON WIRELESS",
"00D0AD": "TL INDUSTRIES",
"00D0AE": "ORESIS COMMUNICATIONS, INC.",
"00D0AF": "CUTLER-HAMMER, INC.",
"00D0B0": "BITSWITCH LTD.",
"00D0B1": "OMEGA ELECTRONICS SA",
"00D0B2": "XIOTECH CORPORATION",
"00D0B3": "DRS Technologies Canada Ltd",
"00D0B4": "KATSUJIMA CO., LTD.",
"00D0B5": "IPricot formerly DotCom",
"00D0B6": "CRESCENT NETWORKS, INC.",
"00D0B7": "INTEL CORPORATION",
"00D0B8": "Iomega Corporation",
"00D0B9": "MICROTEK INTERNATIONAL, INC.",
"00D0BA": "CISCO SYSTEMS, INC.",
"00D0BB": "CISCO SYSTEMS, INC.",
"00D0BC": "CISCO SYSTEMS, INC.",
"00D0BD": "Silicon Image GmbH",
"00D0BE": "EMUTEC INC.",
"00D0BF": "PIVOTAL TECHNOLOGIES",
"00D0C0": "CISCO SYSTEMS, INC.",
"00D0C1": "HARMONIC DATA SYSTEMS, LTD.",
"00D0C2": "BALTHAZAR TECHNOLOGY AB",
"00D0C3": "VIVID TECHNOLOGY PTE, LTD.",
"00D0C4": "TERATECH CORPORATION",
"00D0C5": "COMPUTATIONAL SYSTEMS, INC.",
"00D0C6": "THOMAS & BETTS CORP.",
"00D0C7": "PATHWAY, INC.",
"00D0C8": "Prevas A/S",
"00D0C9": "ADVANTECH CO., LTD.",
"00D0CA": "Intrinsyc Software International Inc.",
"00D0CB": "DASAN CO., LTD.",
"00D0CC": "TECHNOLOGIES LYRE INC.",
"00D0CD": "ATAN TECHNOLOGY INC.",
"00D0CE": "ASYST ELECTRONIC",
"00D0CF": "MORETON BAY",
"00D0D0": "ZHONGXING TELECOM LTD.",
"00D0D1": "Sycamore Networks",
"00D0D2": "EPILOG CORPORATION",
"00D0D3": "CISCO SYSTEMS, INC.",
"00D0D4": "V-BITS, INC.",
"00D0D5": "GRUNDIG AG",
"00D0D6": "AETHRA TELECOMUNICAZIONI",
"00D0D7": "B2C2, INC.",
"00D0D8": "3Com Corporation",
"00D0D9": "DEDICATED MICROCOMPUTERS",
"00D0DA": "TAICOM DATA SYSTEMS CO., LTD.",
"00D0DB": "MCQUAY INTERNATIONAL",
"00D0DC": "MODULAR MINING SYSTEMS, INC.",
"00D0DD": "SUNRISE TELECOM, INC.",
"00D0DE": "PHILIPS MULTIMEDIA NETWORK",
"00D0DF": "KUZUMI ELECTRONICS, INC.",
"00D0E0": "DOOIN ELECTRONICS CO.",
"00D0E1": "AVIONITEK ISRAEL INC.",
"00D0E2": "MRT MICRO, INC.",
"00D0E3": "ELE-CHEM ENGINEERING CO., LTD.",
"00D0E4": "CISCO SYSTEMS, INC.",
"00D0E5": "SOLIDUM SYSTEMS CORP.",
"00D0E6": "IBOND INC.",
"00D0E7": "VCON TELECOMMUNICATION LTD.",
"00D0E8": "MAC SYSTEM CO., LTD.",
"00D0E9": "Advantage Century Telecommunication Corp.",
"00D0EA": "NEXTONE COMMUNICATIONS, INC.",
"00D0EB": "LIGHTERA NETWORKS, INC.",
"00D0EC": "NAKAYO TELECOMMUNICATIONS, INC",
"00D0ED": "XIOX",
"00D0EE": "DICTAPHONE CORPORATION",
"00D0EF": "IGT",
"00D0F0": "CONVISION TECHNOLOGY GMBH",
"00D0F1": "SEGA ENTERPRISES, LTD.",
"00D0F2": "MONTEREY NETWORKS",
"00D0F3": "SOLARI DI UDINE SPA",
"00D0F4": "CARINTHIAN TECH INSTITUTE",
"00D0F5": "ORANGE MICRO, INC.",
"00D0F6": "Alcatel Canada",
"00D0F7": "NEXT NETS CORPORATION",
"00D0F8": "FUJIAN STAR TERMINAL",
"00D0F9": "ACUTE COMMUNICATIONS CORP.",
"00D0FA": "Thales e-Security Ltd.",
"00D0FB": "TEK MICROSYSTEMS, INCORPORATED",
"00D0FC": "GRANITE MICROSYSTEMS",
"00D0FD": "OPTIMA TELE.COM, INC.",
"00D0FE": "ASTRAL POINT",
"00D0FF": "CISCO SYSTEMS, INC.",
"00D11C": "ACETEL",
"00D38D": "Hotel Technology Next Generation",
"00D632": "GE Energy",
"00DB1E": "Albedo Telecom SL",
"00DB45": "THAMWAY CO.,LTD.",
"00DBDF": "Intel Corporate",
"00DD00": "UNGERMANN-BASS INC.",
"00DD01": "UNGERMANN-BASS INC.",
"00DD02": "UNGERMANN-BASS INC.",
"00DD03": "UNGERMANN-BASS INC.",
"00DD04": "UNGERMANN-BASS INC.",
"00DD05": "UNGERMANN-BASS INC.",
"00DD06": "UNGERMANN-BASS INC.",
"00DD07": "UNGERMANN-BASS INC.",
"00DD08": "UNGERMANN-BASS INC.",
"00DD09": "UNGERMANN-BASS INC.",
"00DD0A": "UNGERMANN-BASS INC.",
"00DD0B": "UNGERMANN-BASS INC.",
"00DD0C": "UNGERMANN-BASS INC.",
"00DD0D": "UNGERMANN-BASS INC.",
"00DD0E": "UNGERMANN-BASS INC.",
"00DD0F": "UNGERMANN-BASS INC.",
"00DEFB": "CISCO SYSTEMS, INC.",
"00E000": "Fujitsu Limited",
"00E001": "STRAND LIGHTING LIMITED",
"00E002": "CROSSROADS SYSTEMS, INC.",
"00E003": "NOKIA WIRELESS BUSINESS COMMUN",
"00E004": "PMC-SIERRA, INC.",
"00E005": "TECHNICAL CORP.",
"00E006": "SILICON INTEGRATED SYS. CORP.",
"00E007": "Avaya ECS Ltd",
"00E008": "AMAZING CONTROLS! INC.",
"00E009": "MARATHON TECHNOLOGIES CORP.",
"00E00A": "DIBA, INC.",
"00E00B": "ROOFTOP COMMUNICATIONS CORP.",
"00E00C": "MOTOROLA",
"00E00D": "RADIANT SYSTEMS",
"00E00E": "AVALON IMAGING SYSTEMS, INC.",
"00E00F": "SHANGHAI BAUD DATA",
"00E010": "HESS SB-AUTOMATENBAU GmbH",
"00E011": "Uniden Corporation",
"00E012": "PLUTO TECHNOLOGIES INTERNATIONAL INC.",
"00E013": "EASTERN ELECTRONIC CO., LTD.",
"00E014": "CISCO SYSTEMS, INC.",
"00E015": "HEIWA CORPORATION",
"00E016": "RAPID CITY COMMUNICATIONS",
"00E017": "EXXACT GmbH",
"00E018": "ASUSTEK COMPUTER INC.",
"00E019": "ING. GIORDANO ELETTRONICA",
"00E01A": "COMTEC SYSTEMS. CO., LTD.",
"00E01B": "SPHERE COMMUNICATIONS, INC.",
"00E01C": "Cradlepoint, Inc",
"00E01D": "WebTV NETWORKS, INC.",
"00E01E": "CISCO SYSTEMS, INC.",
"00E01F": "AVIDIA Systems, Inc.",
"00E020": "TECNOMEN OY",
"00E021": "FREEGATE CORP.",
"00E022": "Analog Devices Inc.",
"00E023": "TELRAD",
"00E024": "GADZOOX NETWORKS",
"00E025": "dit Co., Ltd.",
"00E026": "Redlake MASD LLC",
"00E027": "DUX, INC.",
"00E028": "APTIX CORPORATION",
"00E029": "STANDARD MICROSYSTEMS CORP.",
"00E02A": "TANDBERG TELEVISION AS",
"00E02B": "EXTREME NETWORKS",
"00E02C": "AST COMPUTER",
"00E02D": "InnoMediaLogic, Inc.",
"00E02E": "SPC ELECTRONICS CORPORATION",
"00E02F": "MCNS HOLDINGS, L.P.",
"00E030": "MELITA INTERNATIONAL CORP.",
"00E031": "HAGIWARA ELECTRIC CO., LTD.",
"00E032": "MISYS FINANCIAL SYSTEMS, LTD.",
"00E033": "E.E.P.D. GmbH",
"00E034": "CISCO SYSTEMS, INC.",
"00E035": "Emerson Network Power",
"00E036": "PIONEER CORPORATION",
"00E037": "CENTURY CORPORATION",
"00E038": "PROXIMA CORPORATION",
"00E039": "PARADYNE CORP.",
"00E03A": "CABLETRON SYSTEMS, INC.",
"00E03B": "PROMINET CORPORATION",
"00E03C": "AdvanSys",
"00E03D": "FOCON ELECTRONIC SYSTEMS A/S",
"00E03E": "ALFATECH, INC.",
"00E03F": "JATON CORPORATION",
"00E040": "DeskStation Technology, Inc.",
"00E041": "CSPI",
"00E042": "Pacom Systems Ltd.",
"00E043": "VitalCom",
"00E044": "LSICS CORPORATION",
"00E045": "TOUCHWAVE, INC.",
"00E046": "BENTLY NEVADA CORP.",
"00E047": "InFocus Corporation",
"00E048": "SDL COMMUNICATIONS, INC.",
"00E049": "MICROWI ELECTRONIC GmbH",
"00E04A": "ENHANCED MESSAGING SYSTEMS, INC",
"00E04B": "JUMP INDUSTRIELLE COMPUTERTECHNIK GmbH",
"00E04C": "REALTEK SEMICONDUCTOR CORP.",
"00E04D": "INTERNET INITIATIVE JAPAN, INC",
"00E04E": "SANYO DENKI CO., LTD.",
"00E04F": "CISCO SYSTEMS, INC.",
"00E050": "EXECUTONE INFORMATION SYSTEMS, INC.",
"00E051": "TALX CORPORATION",
"00E052": "Brocade Communications Systems, Inc",
"00E053": "CELLPORT LABS, INC.",
"00E054": "KODAI HITEC CO., LTD.",
"00E055": "INGENIERIA ELECTRONICA COMERCIAL INELCOM S.A.",
"00E056": "HOLONTECH CORPORATION",
"00E057": "HAN MICROTELECOM. CO., LTD.",
"00E058": "PHASE ONE DENMARK A/S",
"00E059": "CONTROLLED ENVIRONMENTS, LTD.",
"00E05A": "GALEA NETWORK SECURITY",
"00E05B": "WEST END SYSTEMS CORP.",
"00E05C": "MATSUSHITA KOTOBUKI ELECTRONICS INDUSTRIES, LTD.",
"00E05D": "UNITEC CO., LTD.",
"00E05E": "JAPAN AVIATION ELECTRONICS INDUSTRY, LTD.",
"00E05F": "e-Net, Inc.",
"00E060": "SHERWOOD",
"00E061": "EdgePoint Networks, Inc.",
"00E062": "HOST ENGINEERING",
"00E063": "CABLETRON - YAGO SYSTEMS, INC.",
"00E064": "SAMSUNG ELECTRONICS",
"00E065": "OPTICAL ACCESS INTERNATIONAL",
"00E066": "ProMax Systems, Inc.",
"00E067": "eac AUTOMATION-CONSULTING GmbH",
"00E068": "MERRIMAC SYSTEMS INC.",
"00E069": "JAYCOR",
"00E06A": "KAPSCH AG",
"00E06B": "W&G SPECIAL PRODUCTS",
"00E06C": "Ultra Electronics Limited (AEP Networks)",
"00E06D": "COMPUWARE CORPORATION",
"00E06E": "FAR SYSTEMS S.p.A.",
"00E06F": "ARRIS Group, Inc.",
"00E070": "DH TECHNOLOGY",
"00E071": "EPIS MICROCOMPUTER",
"00E072": "LYNK",
"00E073": "NATIONAL AMUSEMENT NETWORK, INC.",
"00E074": "TIERNAN COMMUNICATIONS, INC.",
"00E075": "Verilink Corporation",
"00E076": "DEVELOPMENT CONCEPTS, INC.",
"00E077": "WEBGEAR, INC.",
"00E078": "BERKELEY NETWORKS",
"00E079": "A.T.N.R.",
"00E07A": "MIKRODIDAKT AB",
"00E07B": "BAY NETWORKS",
"00E07C": "METTLER-TOLEDO, INC.",
"00E07D": "NETRONIX, INC.",
"00E07E": "WALT DISNEY IMAGINEERING",
"00E07F": "LOGISTISTEM s.r.l.",
"00E080": "CONTROL RESOURCES CORPORATION",
"00E081": "TYAN COMPUTER CORP.",
"00E082": "ANERMA",
"00E083": "JATO TECHNOLOGIES, INC.",
"00E084": "COMPULITE R&D",
"00E085": "GLOBAL MAINTECH, INC.",
"00E086": "Emerson Network Power, Avocent Division",
"00E087": "LeCroy - Networking Productions Division",
"00E088": "LTX-Credence CORPORATION",
"00E089": "ION Networks, Inc.",
"00E08A": "GEC AVERY, LTD.",
"00E08B": "QLogic Corp.",
"00E08C": "NEOPARADIGM LABS, INC.",
"00E08D": "PRESSURE SYSTEMS, INC.",
"00E08E": "UTSTARCOM",
"00E08F": "CISCO SYSTEMS, INC.",
"00E090": "BECKMAN LAB. AUTOMATION DIV.",
"00E091": "LG ELECTRONICS, INC.",
"00E092": "ADMTEK INCORPORATED",
"00E093": "ACKFIN NETWORKS",
"00E094": "OSAI SRL",
"00E095": "ADVANCED-VISION TECHNOLGIES CORP.",
"00E096": "SHIMADZU CORPORATION",
"00E097": "CARRIER ACCESS CORPORATION",
"00E098": "AboCom Systems, Inc.",
"00E099": "SAMSON AG",
"00E09A": "Positron Inc.",
"00E09B": "ENGAGE NETWORKS, INC.",
"00E09C": "MII",
"00E09D": "SARNOFF CORPORATION",
"00E09E": "QUANTUM CORPORATION",
"00E09F": "PIXEL VISION",
"00E0A0": "WILTRON CO.",
"00E0A1": "HIMA PAUL HILDEBRANDT GmbH Co. KG",
"00E0A2": "MICROSLATE INC.",
"00E0A3": "CISCO SYSTEMS, INC.",
"00E0A4": "ESAOTE S.p.A.",
"00E0A5": "ComCore Semiconductor, Inc.",
"00E0A6": "TELOGY NETWORKS, INC.",
"00E0A7": "IPC INFORMATION SYSTEMS, INC.",
"00E0A8": "SAT GmbH & Co.",
"00E0A9": "FUNAI ELECTRIC CO., LTD.",
"00E0AA": "ELECTROSONIC LTD.",
"00E0AB": "DIMAT S.A.",
"00E0AC": "MIDSCO, INC.",
"00E0AD": "EES TECHNOLOGY, LTD.",
"00E0AE": "XAQTI CORPORATION",
"00E0AF": "GENERAL DYNAMICS INFORMATION SYSTEMS",
"00E0B0": "CISCO SYSTEMS, INC.",
"00E0B1": "Alcatel-Lucent, Enterprise Business Group",
"00E0B2": "TELMAX COMMUNICATIONS CORP.",
"00E0B3": "EtherWAN Systems, Inc.",
"00E0B4": "TECHNO SCOPE CO., LTD.",
"00E0B5": "ARDENT COMMUNICATIONS CORP.",
"00E0B6": "Entrada Networks",
"00E0B7": "PI GROUP, LTD.",
"00E0B8": "GATEWAY 2000",
"00E0B9": "BYAS SYSTEMS",
"00E0BA": "BERGHOF AUTOMATIONSTECHNIK GmbH",
"00E0BB": "NBX CORPORATION",
"00E0BC": "SYMON COMMUNICATIONS, INC.",
"00E0BD": "INTERFACE SYSTEMS, INC.",
"00E0BE": "GENROCO INTERNATIONAL, INC.",
"00E0BF": "TORRENT NETWORKING TECHNOLOGIES CORP.",
"00E0C0": "SEIWA ELECTRIC MFG. CO., LTD.",
"00E0C1": "MEMOREX TELEX JAPAN, LTD.",
"00E0C2": "NECSY S.p.A.",
"00E0C3": "SAKAI SYSTEM DEVELOPMENT CORP.",
"00E0C4": "HORNER ELECTRIC, INC.",
"00E0C5": "BCOM ELECTRONICS INC.",
"00E0C6": "LINK2IT, L.L.C.",
"00E0C7": "EUROTECH SRL",
"00E0C8": "VIRTUAL ACCESS, LTD.",
"00E0C9": "AutomatedLogic Corporation",
"00E0CA": "BEST DATA PRODUCTS",
"00E0CB": "RESON, INC.",
"00E0CC": "HERO SYSTEMS, LTD.",
"00E0CD": "SAAB SENSIS CORPORATION",
"00E0CE": "ARN",
"00E0CF": "INTEGRATED DEVICE TECHNOLOGY, INC.",
"00E0D0": "NETSPEED, INC.",
"00E0D1": "TELSIS LIMITED",
"00E0D2": "VERSANET COMMUNICATIONS, INC.",
"00E0D3": "DATENTECHNIK GmbH",
"00E0D4": "EXCELLENT COMPUTER",
"00E0D5": "Emulex Corporation",
"00E0D6": "COMPUTER & COMMUNICATION RESEARCH LAB.",
"00E0D7": "SUNSHINE ELECTRONICS, INC.",
"00E0D8": "LANBit Computer, Inc.",
"00E0D9": "TAZMO CO., LTD.",
"00E0DA": "Alcatel North America ESD",
"00E0DB": "ViaVideo Communications, Inc.",
"00E0DC": "NEXWARE CORP.",
"00E0DD": "ZENITH ELECTRONICS CORPORATION",
"00E0DE": "DATAX NV",
"00E0DF": "KEYMILE GmbH",
"00E0E0": "SI ELECTRONICS, LTD.",
"00E0E1": "G2 NETWORKS, INC.",
"00E0E2": "INNOVA CORP.",
"00E0E3": "SK-ELEKTRONIK GmbH",
"00E0E4": "FANUC ROBOTICS NORTH AMERICA, Inc.",
"00E0E5": "CINCO NETWORKS, INC.",
"00E0E6": "INCAA DATACOM B.V.",
"00E0E7": "RAYTHEON E-SYSTEMS, INC.",
"00E0E8": "GRETACODER Data Systems AG",
"00E0E9": "DATA LABS, INC.",
"00E0EA": "INNOVAT COMMUNICATIONS, INC.",
"00E0EB": "DIGICOM SYSTEMS, INCORPORATED",
"00E0EC": "CELESTICA INC.",
"00E0ED": "SILICOM, LTD.",
"00E0EE": "MAREL HF",
"00E0EF": "DIONEX",
"00E0F0": "ABLER TECHNOLOGY, INC.",
"00E0F1": "THAT CORPORATION",
"00E0F2": "ARLOTTO COMNET, INC.",
"00E0F3": "WebSprint Communications, Inc.",
"00E0F4": "INSIDE Technology A/S",
"00E0F5": "TELES AG",
"00E0F6": "DECISION EUROPE",
"00E0F7": "CISCO SYSTEMS, INC.",
"00E0F8": "DICNA CONTROL AB",
"00E0F9": "CISCO SYSTEMS, INC.",
"00E0FA": "TRL TECHNOLOGY, LTD.",
"00E0FB": "LEIGHTRONIX, INC.",
"00E0FC": "HUAWEI TECHNOLOGIES CO., LTD.",
"00E0FD": "A-TREND TECHNOLOGY CO., LTD.",
"00E0FE": "CISCO SYSTEMS, INC.",
"00E0FF": "SECURITY DYNAMICS TECHNOLOGIES, Inc.",
"00E16D": "Cisco",
"00E175": "AK-Systems Ltd",
"00E3B2": "Samsung Electronics Co.,Ltd",
"00E666": "ARIMA Communications Corp.",
"00E6D3": "NIXDORF COMPUTER CORP.",
"00E8AB": "Meggitt Training Systems, Inc.",
"00EB2D": "Sony Mobile Communications AB",
"00EEBD": "HTC Corporation",
"00F051": "KWB Gmbh",
"00F403": "Orbis Systems Oy",
"00F4B9": "Apple",
"00F860": "PT. Panggung Electric Citrabuana",
"00FA3B": "CLOOS ELECTRONIC GMBH",
"00FC58": "WebSilicon Ltd.",
"00FC70": "Intrepid Control Systems, Inc.",
"00FD4C": "NEVATEC",
"020701": "RACAL-DATACOM",
"021C7C": "PERQ SYSTEMS CORPORATION",
"026086": "LOGIC REPLACEMENT TECH. LTD.",
"02608C": "3COM CORPORATION",
"027001": "RACAL-DATACOM",
"0270B0": "M/A-COM INC. COMPANIES",
"0270B3": "DATA RECALL LTD",
"029D8E": "CARDIAC RECORDERS INC.",
"02AA3C": "OLIVETTI TELECOMM SPA (OLTECO)",
"02BB01": "OCTOTHORPE CORP.",
"02C08C": "3COM CORPORATION",
"02CF1C": "COMMUNICATION MACHINERY CORP.",
"02E6D3": "NIXDORF COMPUTER CORPORATION",
"040A83": "Alcatel-Lucent",
"040AE0": "XMIT AG COMPUTER NETWORKS",
"040CCE": "Apple",
"040EC2": "ViewSonic Mobile China Limited",
"041552": "Apple",
"04180F": "Samsung Electronics Co.,Ltd",
"0418B6": "PRIVATE",
"0418D6": "Ubiquiti Networks",
"041A04": "WaveIP",
"041B94": "Host Mobility AB",
"041BBA": "Samsung Electronics Co.,Ltd",
"041D10": "Dream Ware Inc.",
"041E64": "Apple",
"04209A": "Panasonic AVC Networks Company",
"042234": "Wireless Standard Extensions",
"042605": "GFR Gesellschaft f\u00fcr Regelungstechnik und Energieeinsparung mbH",
"042665": "Apple",
"042BBB": "PicoCELA, Inc.",
"042F56": "ATOCS (Shenzhen) LTD",
"0432F4": "Partron",
"043604": "Gyeyoung I&T",
"043D98": "ChongQing QingJia Electronics CO.,LTD",
"0444A1": "TELECON GALICIA,S.A.",
"044665": "Murata Manufacturing Co., Ltd.",
"04489A": "Apple",
"044A50": "Ramaxel Technology (Shenzhen) limited company",
"044BFF": "GuangZhou Hedy Digital Technology Co., Ltd",
"044CEF": "Fujian Sanao Technology Co.,Ltd",
"044E06": "Ericsson AB",
"044F8B": "Adapteva, Inc.",
"044FAA": "Ruckus Wireless",
"045453": "Apple",
"0455CA": "BriView (Xiamen) Corp.",
"04572F": "Sertel Electronics UK Ltd",
"04586F": "Sichuan Whayer information industry Co.,LTD",
"045A95": "Nokia Corporation",
"045C06": "Zmodo Technology Corporation",
"045C8E": "gosund GROUP CO.,LTD",
"045D56": "camtron industrial inc.",
"045FA7": "Shenzhen Yichen Technology Development Co.,LTD",
"0462D7": "ALSTOM HYDRO FRANCE",
"0463E0": "Nome Oy",
"046D42": "Bryston Ltd.",
"046E49": "TaiYear Electronic Technology (Suzhou) Co., Ltd",
"0470BC": "Globalstar Inc.",
"0474A1": "Aligera Equipamentos Digitais Ltda",
"0475F5": "CSST",
"04766E": "ALPS Co,. Ltd.",
"047D7B": "Quanta Computer Inc.",
"0481AE": "Clack Corporation",
"04848A": "7INOVA TECHNOLOGY LIMITED",
"04888C": "Eifelwerk Butler Systeme GmbH",
"0488E2": "Beats Electronics LLC",
"048A15": "Avaya, Inc",
"048B42": "Skspruce Technology Limited",
"048C03": "ThinPAD Technology (Shenzhen)CO.,LTD",
"048D38": "Netcore Technology Inc.",
"0494A1": "CATCH THE WIND INC",
"0498F3": "ALPS Electric Co,. Ltd.",
"049C62": "BMT Medical Technology s.r.o.",
"049F06": "Smobile Co., Ltd.",
"049F81": "Netscout Systems, Inc.",
"04A151": "NETGEAR INC.,",
"04A3F3": "Emicon",
"04A82A": "Nokia Corporation",
"04B3B6": "Seamap (UK) Ltd",
"04B466": "BSP Co., Ltd.",
"04BFA8": "ISB Corporation",
"04C05B": "Tigo Energy",
"04C06F": "Shenzhen Huawei Communication Technologies Co., Ltd",
"04C1B9": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"04C5A4": "CISCO SYSTEMS, INC.",
"04C880": "Samtec Inc",
"04C991": "Phistek INC.",
"04CB1D": "Traka plc",
"04CE14": "Wilocity LTD.",
"04CF25": "MANYCOLORS, INC.",
"04D437": "ZNV",
"04D783": "Y&H E&C Co.,LTD.",
"04DAD2": "Cisco",
"04DB56": "Apple, Inc.",
"04DB8A": "Suntech International Ltd.",
"04DD4C": "Velocytech",
"04DF69": "Car Connectivity Consortium",
"04E0C4": "TRIUMPH-ADLER AG",
"04E1C8": "IMS Solu\u00e7\u00f5es em Energia Ltda.",
"04E2F8": "AEP Ticketing solutions srl",
"04E451": "Texas Instruments",
"04E536": "Apple",
"04E548": "Cohda Wireless Pty Ltd",
"04E662": "Acroname Inc.",
"04E676": "AMPAK Technology Inc.",
"04E9E5": "PJRC.COM, LLC",
"04EE91": "x-fabric GmbH",
"04F021": "Compex Systems Pte Ltd",
"04F13E": "Apple",
"04F17D": "Tarana Wireless",
"04F4BC": "Xena Networks",
"04F7E4": "Apple",
"04F8C2": "Flaircomm Microelectronics, Inc.",
"04F938": "HUAWEI TECHNOLOGIES CO.,LTD",
"04FE31": "Samsung Electronics Co.,Ltd",
"04FE7F": "CISCO SYSTEMS, INC.",
"04FF51": "NOVAMEDIA INNOVISION SP. Z O.O.",
"080001": "COMPUTERVISION CORPORATION",
"080002": "BRIDGE COMMUNICATIONS INC.",
"080003": "ADVANCED COMPUTER COMM.",
"080004": "CROMEMCO INCORPORATED",
"080005": "SYMBOLICS INC.",
"080006": "SIEMENS AG",
"080007": "Apple",
"080008": "BOLT BERANEK AND NEWMAN INC.",
"080009": "HEWLETT PACKARD",
"08000A": "NESTAR SYSTEMS INCORPORATED",
"08000B": "UNISYS CORPORATION",
"08000C": "MIKLYN DEVELOPMENT CO.",
"08000D": "INTERNATIONAL COMPUTERS LTD.",
"08000E": "NCR CORPORATION",
"08000F": "MITEL CORPORATION",
"080011": "TEKTRONIX INC.",
"080012": "BELL ATLANTIC INTEGRATED SYST.",
"080013": "EXXON",
"080014": "EXCELAN",
"080015": "STC BUSINESS SYSTEMS",
"080016": "BARRISTER INFO SYS CORP",
"080017": "NATIONAL SEMICONDUCTOR",
"080018": "PIRELLI FOCOM NETWORKS",
"080019": "GENERAL ELECTRIC CORPORATION",
"08001A": "TIARA/ 10NET",
"08001B": "EMC Corporation",
"08001C": "KDD-KOKUSAI DEBNSIN DENWA CO.",
"08001D": "ABLE COMMUNICATIONS INC.",
"08001E": "APOLLO COMPUTER INC.",
"08001F": "SHARP CORPORATION",
"080020": "Oracle Corporation",
"080021": "3M COMPANY",
"080022": "NBI INC.",
"080023": "Panasonic Communications Co., Ltd.",
"080024": "10NET COMMUNICATIONS/DCA",
"080025": "CONTROL DATA",
"080026": "NORSK DATA A.S.",
"080027": "CADMUS COMPUTER SYSTEMS",
"080028": "Texas Instruments",
"080029": "MEGATEK CORPORATION",
"08002A": "MOSAIC TECHNOLOGIES INC.",
"08002B": "DIGITAL EQUIPMENT CORPORATION",
"08002C": "BRITTON LEE INC.",
"08002D": "LAN-TEC INC.",
"08002E": "METAPHOR COMPUTER SYSTEMS",
"08002F": "PRIME COMPUTER INC.",
"080030": "ROYAL MELBOURNE INST OF TECH",
"080031": "LITTLE MACHINES INC.",
"080032": "TIGAN INCORPORATED",
"080033": "BAUSCH & LOMB",
"080034": "FILENET CORPORATION",
"080035": "MICROFIVE CORPORATION",
"080036": "INTERGRAPH CORPORATION",
"080037": "FUJI-XEROX CO. LTD.",
"080038": "BULL S.A.S.",
"080039": "SPIDER SYSTEMS LIMITED",
"08003A": "ORCATECH INC.",
"08003B": "TORUS SYSTEMS LIMITED",
"08003C": "SCHLUMBERGER WELL SERVICES",
"08003D": "CADNETIX CORPORATIONS",
"08003E": "CODEX CORPORATION",
"08003F": "FRED KOSCHARA ENTERPRISES",
"080040": "FERRANTI COMPUTER SYS. LIMITED",
"080041": "RACAL-MILGO INFORMATION SYS..",
"080042": "JAPAN MACNICS CORP.",
"080043": "PIXEL COMPUTER INC.",
"080044": "DAVID SYSTEMS INC.",
"080045": "CONCURRENT COMPUTER CORP.",
"080046": "Sony Corporation",
"080047": "SEQUENT COMPUTER SYSTEMS INC.",
"080048": "EUROTHERM GAUGING SYSTEMS",
"080049": "UNIVATION",
"08004A": "BANYAN SYSTEMS INC.",
"08004B": "PLANNING RESEARCH CORP.",
"08004C": "HYDRA COMPUTER SYSTEMS INC.",
"08004D": "CORVUS SYSTEMS INC.",
"08004E": "3COM EUROPE LTD.",
"08004F": "CYGNET SYSTEMS",
"080050": "DAISY SYSTEMS CORP.",
"080051": "EXPERDATA",
"080052": "INSYSTEC",
"080053": "MIDDLE EAST TECH. UNIVERSITY",
"080055": "STANFORD TELECOMM. INC.",
"080056": "STANFORD LINEAR ACCEL. CENTER",
"080057": "EVANS & SUTHERLAND",
"080058": "SYSTEMS CONCEPTS",
"080059": "A/S MYCRON",
"08005A": "IBM Corp",
"08005B": "VTA TECHNOLOGIES INC.",
"08005C": "FOUR PHASE SYSTEMS",
"08005D": "GOULD INC.",
"08005E": "COUNTERPOINT COMPUTER INC.",
"08005F": "SABER TECHNOLOGY CORP.",
"080060": "INDUSTRIAL NETWORKING INC.",
"080061": "JAROGATE LTD.",
"080062": "GENERAL DYNAMICS",
"080063": "PLESSEY",
"080064": "Sitasys AG",
"080065": "GENRAD INC.",
"080066": "AGFA CORPORATION",
"080067": "COMDESIGN",
"080068": "RIDGE COMPUTERS",
"080069": "SILICON GRAPHICS INC.",
"08006A": "ATT BELL LABORATORIES",
"08006B": "ACCEL TECHNOLOGIES INC.",
"08006C": "SUNTEK TECHNOLOGY INT'L",
"08006D": "WHITECHAPEL COMPUTER WORKS",
"08006E": "MASSCOMP",
"08006F": "PHILIPS APELDOORN B.V.",
"080070": "MITSUBISHI ELECTRIC CORP.",
"080071": "MATRA (DSIE)",
"080072": "XEROX CORP UNIV GRANT PROGRAM",
"080073": "TECMAR INC.",
"080074": "CASIO COMPUTER CO. LTD.",
"080075": "DANSK DATA ELECTRONIK",
"080076": "PC LAN TECHNOLOGIES",
"080077": "TSL COMMUNICATIONS LTD.",
"080078": "ACCELL CORPORATION",
"080079": "THE DROID WORKS",
"08007A": "INDATA",
"08007B": "SANYO ELECTRIC CO. LTD.",
"08007C": "VITALINK COMMUNICATIONS CORP.",
"08007E": "AMALGAMATED WIRELESS(AUS) LTD",
"08007F": "CARNEGIE-MELLON UNIVERSITY",
"080080": "AES DATA INC.",
"080081": "ASTECH INC.",
"080082": "VERITAS SOFTWARE",
"080083": "Seiko Instruments Inc.",
"080084": "TOMEN ELECTRONICS CORP.",
"080085": "ELXSI",
"080086": "KONICA MINOLTA HOLDINGS, INC.",
"080087": "XYPLEX",
"080088": "Brocade Communications Systems, Inc.",
"080089": "KINETICS",
"08008A": "PerfTech, Inc.",
"08008B": "PYRAMID TECHNOLOGY CORP.",
"08008C": "NETWORK RESEARCH CORPORATION",
"08008D": "XYVISION INC.",
"08008E": "TANDEM COMPUTERS",
"08008F": "CHIPCOM CORPORATION",
"080090": "SONOMA SYSTEMS",
"080371": "KRG CORPORATE",
"0805CD": "DongGuang EnMai Electronic Product Co.Ltd.",
"0808C2": "Samsung Electronics",
"0808EA": "AMSC",
"080C0B": "SysMik GmbH Dresden",
"080CC9": "Mission Technology Group, dba Magma",
"080D84": "GECO, Inc.",
"080EA8": "Velex s.r.l.",
"080FFA": "KSP INC.",
"081196": "Intel Corporate",
"081443": "UNIBRAIN S.A.",
"081651": "Shenzhen Sea Star Technology Co.,Ltd",
"081735": "CISCO SYSTEMS, INC.",
"0817F4": "IBM Corp",
"08181A": "zte corporation",
"08184C": "A. S. Thomas, Inc.",
"0819A6": "HUAWEI TECHNOLOGIES CO.,LTD",
"081DFB": "Shanghai Mexon Communication Technology Co.,Ltd",
"081F3F": "WondaLink Inc.",
"081FF3": "CISCO SYSTEMS, INC.",
"082522": "ADVANSEE",
"082719": "APS systems/electronic AG",
"082AD0": "SRD Innovations Inc.",
"082E5F": "Hewlett Packard",
"083571": "CASwell INC.",
"08373D": "Samsung Electronics Co.,Ltd",
"08379C": "Topaz Co. LTD.",
"0838A5": "Funkwerk plettac electronic GmbH",
"083AB8": "Shinoda Plasma Co., Ltd.",
"083E0C": "ARRIS Group, Inc.",
"083E8E": "Hon Hai Precision Ind.Co.Ltd",
"083F3E": "WSH GmbH",
"083F76": "Intellian Technologies, Inc.",
"084027": "Gridstore Inc.",
"08482C": "Raycore Taiwan Co., LTD.",
"084929": "CYBATI",
"084E1C": "H2A Systems, LLC",
"084EBF": "Broad Net Mux Corporation",
"08512E": "Orion Diagnostica Oy",
"085240": "EbV Elektronikbau- und Vertriebs GmbH",
"085700": "TP-LINK TECHNOLOGIES CO.,LTD.",
"085AE0": "Recovision Technology Co., Ltd.",
"085B0E": "Fortinet, Inc.",
"085DDD": "Mercury Corporation",
"08606E": "ASUSTek COMPUTER INC.",
"086361": "Huawei Technologies Co., Ltd",
"0868D0": "Japan System Design",
"0868EA": "EITO ELECTRONICS CO., LTD.",
"086DF2": "Shenzhen MIMOWAVE Technology Co.,Ltd",
"087045": "Apple",
"0874F6": "Winterhalter Gastronom GmbH",
"087572": "Obelux Oy",
"087618": "ViE Technologies Sdn. Bhd.",
"087695": "Auto Industrial Co., Ltd.",
"0876FF": "Thomson Telecom Belgium",
"087999": "AIM GmbH",
"087A4C": "Huawei Technologies Co., Ltd",
"087BAA": "SVYAZKOMPLEKTSERVICE, LLC",
"087CBE": "Quintic Corp.",
"087D21": "Altasec technology corporation",
"088039": "Cisco SPVTG",
"0881F4": "Juniper Networks",
"08863B": "Belkin International, Inc.",
"088DC8": "Ryowa Electronics Co.,Ltd",
"088E4F": "SF Software Solutions",
"088F2C": "Hills Sound Vision & Lighting",
"0896D7": "AVM GmbH",
"089758": "Shenzhen Strong Rising Electronics Co.,Ltd DongGuan Subsidiary",
"089E01": "QUANTA COMPUTER INC.",
"089F97": "LEROY AUTOMATION",
"08A12B": "ShenZhen EZL Technology Co., Ltd",
"08A95A": "Azurewave",
"08ACA5": "Benu Video, Inc.",
"08AF78": "Totus Solutions, Inc.",
"08B2A3": "Cynny Italia S.r.L.",
"08B4CF": "Abicom International",
"08B738": "Lite-On Technogy Corp.",
"08B7EC": "Wireless Seismic",
"08BBCC": "AK-NORD EDV VERTRIEBSGES. mbH",
"08BD43": "NETGEAR INC.,",
"08BE09": "Astrol Electronic AG",
"08CA45": "Toyou Feiji Electronics Co., Ltd.",
"08CC68": "Cisco",
"08D09F": "CISCO SYSTEMS, INC.",
"08D29A": "Proformatique",
"08D40C": "Intel Corporate",
"08D42B": "Samsung Electronics",
"08D5C0": "Seers Technology Co., Ltd",
"08D833": "Shenzhen RF Technology Co,.Ltd",
"08DF1F": "Bose Corporation",
"08E5DA": "NANJING FUJITSU COMPUTER PRODUCTS CO.,LTD.",
"08E672": "JEBSEE ELECTRONICS CO.,LTD.",
"08EA44": "Aerohive Networks, Inc.",
"08EB74": "Humax",
"08EBED": "World Elite Technology Co.,LTD",
"08EDB9": "Hon Hai Precision Ind. Co.,Ltd.",
"08EF3B": "MCS Logic Inc.",
"08F1B7": "Towerstream Corpration",
"08F2F4": "Net One Partners Co.,Ltd.",
"08F6F8": "GET Engineering",
"08F728": "GLOBO Multimedia Sp. z o.o. Sp.k.",
"08FAE0": "Fohhn Audio AG",
"08FC52": "OpenXS BV",
"08FC88": "Samsung Electronics Co.,Ltd",
"08FD0E": "Samsung Electronics Co.,Ltd",
"0C0400": "Jantar d.o.o.",
"0C0535": "Juniper Systems",
"0C1105": "Ringslink (Xiamen) Network Communication Technologies Co., Ltd",
"0C1262": "zte corporation",
"0C130B": "Uniqoteq Ltd.",
"0C1420": "Samsung Electronics Co.,Ltd",
"0C15C5": "SDTEC Co., Ltd.",
"0C17F1": "TELECSYS",
"0C191F": "Inform Electronik",
"0C1DC2": "SeAH Networks",
"0C2026": "noax Technologies AG",
"0C2724": "Cisco",
"0C2755": "Valuable Techologies Limited",
"0C2A69": "electric imp, incorporated",
"0C2AE7": "Beijing General Research Institute of Mining and Metallurgy",
"0C2D89": "QiiQ Communications Inc.",
"0C3021": "Apple",
"0C37DC": "Huawei Technologies Co., Ltd",
"0C3956": "Observator instruments",
"0C3C65": "Dome Imaging Inc",
"0C3E9F": "Apple, Inc",
"0C469D": "MS Sedco",
"0C473D": "Hitron Technologies. Inc",
"0C4C39": "Mitrastar Technology",
"0C4DE9": "Apple",
"0C4F5A": "ASA-RT s.r.l.",
"0C51F7": "CHAUVIN ARNOUX",
"0C54A5": "PEGATRON CORPORATION",
"0C5521": "Axiros GmbH",
"0C565C": "HyBroad Vision (Hong Kong) Technology Co Ltd",
"0C57EB": "Mueller Systems",
"0C5A19": "Axtion Sdn Bhd",
"0C5CD8": "DOLI Elektronik GmbH",
"0C6076": "Hon Hai Precision Ind. Co.,Ltd.",
"0C63FC": "Nanjing Signway Technology Co., Ltd",
"0C6803": "Cisco",
"0C6E4F": "PrimeVOLT Co., Ltd.",
"0C715D": "Samsung Electronics Co.,Ltd",
"0C722C": "TP-LINK TECHNOLOGIES CO.,LTD.",
"0C74C2": "Apple",
"0C7523": "BEIJING GEHUA CATV NETWORK CO.,LTD",
"0C771A": "Apple",
"0C7D7C": "Kexiang Information Technology Co, Ltd.",
"0C8112": "PRIVATE",
"0C8230": "SHENZHEN MAGNUS TECHNOLOGIES CO.,LTD",
"0C8268": "TP-LINK TECHNOLOGIES CO.,LTD.",
"0C826A": "Wuhan Huagong Genuine Optics Technology Co., Ltd",
"0C8411": "A.O. Smith Water Products",
"0C8484": "Zenovia Electronics Inc.",
"0C84DC": "Hon Hai Precision Ind. Co.,Ltd.",
"0C8525": "CISCO SYSTEMS, INC.",
"0C8910": "Samsung Electronics Co.,LTD",
"0C8BFD": "Intel Corporate",
"0C8CDC": "Suunto Oy",
"0C8D98": "TOP EIGHT IND CORP",
"0C924E": "Rice Lake Weighing Systems",
"0C9301": "PT. Prasimax Inovasi Teknologi",
"0C93FB": "BNS Solutions",
"0C96BF": "Huawei Technologies Co., Ltd",
"0C9B13": "Shanghai Magic Mobile Telecommunication Co.Ltd.",
"0C9D56": "Consort Controls Ltd",
"0C9E91": "Sankosha Corporation",
"0CA138": "Blinq Wireless Inc.",
"0CA2F4": "Chameleon Technology (UK) Limited",
"0CA402": "Alcatel Lucent IPD",
"0CA42A": "OB Telecom Electronic Technology Co., Ltd",
"0CA694": "Sunitec Enterprise Co.,Ltd",
"0CAF5A": "GENUS POWER INFRASTRUCTURES LIMITED",
"0CB4EF": "Digience Co.,Ltd.",
"0CBD51": "TCT Mobile Limited",
"0CBF15": "Genetec",
"0CC0C0": "MAGNETI MARELLI SISTEMAS ELECTRONICOS MEXICO",
"0CC3A7": "Meritec",
"0CC47A": "Super Micro Computer, Inc.",
"0CC47E": "EUCAST Co., Ltd.",
"0CC655": "Wuxi YSTen Technology Co.,Ltd.",
"0CC66A": "Nokia Corporation",
"0CC6AC": "DAGS",
"0CC81F": "Summer Infant, Inc.",
"0CC9C6": "Samwin Hong Kong Limited",
"0CCB8D": "ASCO Numatics GmbH",
"0CCDD3": "EASTRIVER TECHNOLOGY CO., LTD.",
"0CCDFB": "EDIC Systems Inc.",
"0CD292": "Intel Corporate",
"0CD2B5": "Binatone Telecommunication Pvt. Ltd",
"0CD502": "Westell",
"0CD696": "Amimon Ltd",
"0CD7C2": "Axium Technologies, Inc.",
"0CD996": "CISCO SYSTEMS, INC.",
"0CD9C1": "Johnson Controls-ASG",
"0CDA41": "Hangzhou H3C Technologies Co., Limited",
"0CDCCC": "Inala Technologies",
"0CDDEF": "Nokia Corporation",
"0CDFA4": "Samsung Electronics Co.,Ltd",
"0CE0E4": "Plantronics, Inc",
"0CE5D3": "DH electronics GmbH",
"0CE709": "Fox Crypto B.V.",
"0CE82F": "Bonfiglioli Vectron GmbH",
"0CE936": "ELIMOS srl",
"0CEEE6": "Hon Hai Precision Ind. Co.,Ltd.",
"0CEF7C": "AnaCom Inc",
"0CF019": "Malgn Technology Co., Ltd.",
"0CF0B4": "Globalsat International Technology Ltd",
"0CF361": "Java Information",
"0CF3EE": "EM Microelectronic",
"0CF405": "Beijing Signalway Technologies Co.,Ltd",
"0CF893": "ARRIS Group, Inc.",
"0CFC83": "Airoha Technology Corp.,",
"100000": "PRIVATE",
"10005A": "IBM Corp",
"1000E8": "NATIONAL SEMICONDUCTOR",
"1000FD": "LaonPeople",
"1001CA": "Ashley Butterworth",
"1005CA": "Cisco",
"10090C": "Janome Sewing Machine Co., Ltd.",
"100BA9": "Intel Corporate",
"100C24": "pomdevices, LLC",
"100D2F": "Online Security Pty. Ltd.",
"100D32": "Embedian, Inc.",
"100D7F": "NETGEAR INC.,",
"100E2B": "NEC CASIO Mobile Communications",
"100E7E": "Juniper networks",
"100F18": "Fu Gang Electronic(KunShan)CO.,LTD",
"1010B6": "McCain Inc",
"101212": "Vivo International Corporation Pty Ltd",
"101248": "ITG, Inc.",
"1013EE": "Justec International Technology INC.",
"10189E": "Elmo Motion Control",
"101B54": "HUAWEI TECHNOLOGIES CO.,LTD",
"101C0C": "Apple",
"101D51": "ON-Q LLC dba ON-Q Mesh Networks",
"101DC0": "Samsung Electronics Co.,Ltd",
"101F74": "Hewlett-Packard Company",
"102279": "ZeroDesktop, Inc.",
"1027BE": "TVIP",
"102831": "Morion Inc.",
"102D96": "Looxcie Inc.",
"102EAF": "Texas Instruments",
"103047": "Samsung Electronics Co.,Ltd",
"103378": "FLECTRON Co., LTD",
"103711": "Simlink AS",
"103B59": "Samsung Electronics Co.,Ltd",
"103DEA": "HFC Technology (Beijing) Ltd. Co.",
"1040F3": "Apple",
"104369": "Soundmax Electronic Limited",
"10445A": "Shaanxi Hitech Electronic Co., LTD",
"1045BE": "Norphonic AS",
"1045F8": "LNT-Automation GmbH",
"104780": "HUAWEI TECHNOLOGIES CO.,LTD",
"1048B1": "Beijing Duokan Technology Limited",
"104D77": "Innovative Computer Engineering",
"1056CA": "Peplink International Ltd.",
"105C3B": "Perma-Pipe, Inc.",
"105CBF": "DuroByte Inc",
"105F06": "Actiontec Electronics, Inc",
"105F49": "Cisco SPVTG",
"10604B": "Hewlett Packard",
"1062C9": "Adatis GmbH & Co. KG",
"1064E2": "ADFweb.com s.r.l.",
"1065A3": "Core Brands LLC",
"1065CF": "IQSIM",
"106682": "NEC AccessTechnica, Ltd.",
"10683F": "LG Electronics",
"106F3F": "Buffalo Inc.",
"106FEF": "Ad-Sol Nissin Corp",
"1071F9": "Cloud Telecomputers, LLC",
"10768A": "EoCell",
"1077B1": "Samsung Electronics Co.,LTD",
"1078CE": "Hanvit SI, Inc.",
"1078D2": "ELITEGROUP COMPUTER SYSTEM CO., LTD.",
"107A86": "U&U ENGINEERING INC.",
"107BEF": "ZyXEL Communications Corp",
"1083D2": "Microseven Systems, LLC",
"10880F": "Daruma Telecomunica\u00e7\u00f5es e Inform\u00e1tica S.A.",
"1088CE": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"108CCF": "CISCO SYSTEMS, INC.",
"109266": "Samsung Electronics Co.,Ltd",
"1093E9": "Apple",
"109AB9": "Tosibox Oy",
"109ADD": "Apple",
"109FA9": "Actiontec Electronics, Inc",
"10A13B": "FUJIKURA RUBBER LTD.",
"10A5D0": "Murata Manufacturing Co.,Ltd.",
"10A743": "SK Mtek Limited",
"10A932": "Beijing Cyber Cloud Technology Co. ,Ltd.",
"10AE60": "PRIVATE",
"10B26B": "base Co.,Ltd.",
"10B713": "PRIVATE",
"10B7F6": "Plastoform Industries Ltd.",
"10B9FE": "Lika srl",
"10BAA5": "GANA I&C CO., LTD",
"10BD18": "CISCO SYSTEMS, INC.",
"10BF48": "ASUSTEK COMPUTER INC.",
"10C2BA": "UTT Co., Ltd.",
"10C586": "BIO SOUND LAB CO., LTD.",
"10C61F": "Huawei Technologies Co., Ltd",
"10C6FC": "Garmin International",
"10C73F": "Midas Klark Teknik Ltd",
"10CA81": "PRECIA",
"10CCDB": "AXIMUM PRODUITS ELECTRONIQUES",
"10D1DC": "INSTAR Deutschland GmbH",
"10D542": "Samsung Electronics Co.,Ltd",
"10DDB1": "Apple",
"10DDF4": "Maxway Electronics CO.,LTD",
"10DEE4": "automationNEXT GmbH",
"10E2D5": "Qi Hardware Inc.",
"10E3C7": "Seohwa Telecom",
"10E4AF": "APR, LLC",
"10E6AE": "Source Technologies, LLC",
"10E8EE": "PhaseSpace",
"10EA59": "Cisco SPVTG",
"10EED9": "Canoga Perkins Corporation",
"10F311": "Cisco",
"10F3DB": "Gridco Systems, Inc.",
"10F49A": "T3 Innovation",
"10F96F": "LG Electronics",
"10F9EE": "Nokia Corporation",
"10FBF0": "KangSheng LTD.",
"10FC54": "Shany Electronic Co., Ltd.",
"10FEED": "TP-LINK TECHNOLOGIES CO., LTD.",
"1100AA": "PRIVATE",
"140708": "PRIVATE",
"1407E0": "Abrantix AG",
"140C76": "FREEBOX SAS",
"140D4F": "Flextronics International",
"14109F": "Apple",
"141330": "Anakreon UK LLP",
"14144B": "FUJIAN STAR-NET COMMUNICATION CO.,LTD",
"141A51": "Treetech Sistemas Digitais",
"141BBD": "Volex Inc.",
"141BF0": "Intellimedia Systems Ltd",
"1423D7": "EUTRONIX CO., LTD.",
"142BD2": "Armtel Ltd.",
"142BD6": "Guangdong Appscomm Co.,Ltd",
"142D27": "Hon Hai Precision Ind. Co.,Ltd.",
"142D8B": "Incipio Technologies, Inc",
"142DF5": "Amphitech",
"14307A": "Avermetrics",
"1430C6": "Motorola Mobility LLC",
"14358B": "Mediabridge Products, LLC.",
"1435B3": "Future Designs, Inc.",
"143605": "Nokia Corporation",
"14373B": "PROCOM Systems",
"143AEA": "Dynapower Company LLC",
"143DF2": "Beijing Shidai Hongyuan Network Communication Co.,Ltd",
"143E60": "Alcatel-Lucent",
"1441E2": "Monaco Enterprises, Inc.",
"144319": "Creative&Link Technology Limited",
"1446E4": "AVISTEL",
"144978": "Digital Control Incorporated",
"1449E0": "Samsung Electro Mechanics co.,LTD.",
"144C1A": "Max Communication GmbH",
"145412": "Entis Co., Ltd.",
"145645": "Savitech Corp.",
"145A05": "Apple",
"145BD1": "ARRIS Group, Inc.",
"146080": "zte corporation",
"146308": "JABIL CIRCUIT (SHANGHAI) LTD.",
"146A0B": "Cypress Electronics Limited",
"146E0A": "PRIVATE",
"147373": "TUBITAK UEKAE",
"147411": "RIM",
"147DB3": "JOA TELECOM.CO.,LTD",
"147DC5": "Murata Manufacturing Co., Ltd.",
"14825B": "Hefei Radio Communication Technology Co., Ltd",
"148692": "TP-LINK TECHNOLOGIES CO.,LTD.",
"1489FD": "Samsung Electronics",
"148A70": "ADS GmbH",
"148FC6": "Apple",
"149090": "KongTop industrial(shen zhen)CO.,LTD",
"149448": "BLU CASTLE S.A.",
"1499E2": "Apple, Inc",
"149FE8": "Lenovo Mobile Communication Technology Ltd.",
"14A62C": "S.M. Dezac S.A.",
"14A86B": "ShenZhen Telacom Science&Technology Co., Ltd",
"14A9E3": "MST CORPORATION",
"14ABF0": "ARRIS Group, Inc.",
"14B126": "Industrial Software Co",
"14B1C8": "InfiniWing, Inc.",
"14B484": "Samsung Electronics Co.,Ltd",
"14B73D": "ARCHEAN Technologies",
"14B968": "Huawei Technologies Co., Ltd",
"14C089": "DUNE HD LTD",
"14C126": "Nokia Corporation",
"14C21D": "Sabtech Industries",
"14CC20": "TP-LINK TECHNOLOGIES CO.,LTD",
"14CF8D": "OHSUNG ELECTRONICS CO., LTD.",
"14CF92": "TP-LINK TECHNOLOGIES CO., LTD.",
"14CFE2": "ARRIS Group, Inc.",
"14D4FE": "Pace plc",
"14D64D": "D-Link International",
"14D76E": "CONCH ELECTRONIC Co.,Ltd",
"14DAE9": "ASUSTek COMPUTER INC.",
"14DB85": "S NET MEDIA",
"14E4EC": "mLogic LLC",
"14E6E4": "TP-LINK TECHNOLOGIES CO., LTD.",
"14EB33": "BSMediasoft Co., Ltd.",
"14EDA5": "Wa\u0308chter GmbH Sicherheitssysteme",
"14EE9D": "AirNav Systems LLC",
"14F0C5": "Xtremio Ltd.",
"14F28E": "ShenYang ZhongKe-Allwin Technology Co.LTD",
"14F42A": "Samsung Electronics",
"14FEAF": "SAGITTAR LIMITED",
"14FEB5": "Dell Inc",
"18002D": "Sony Mobile Communications AB",
"1800DB": "Fitbit Inc.",
"1801E3": "Elektrobit Wireless Communications Ltd",
"180373": "Dell Inc",
"1803FA": "IBT Interfaces",
"180675": "DILAX Intelcom GmbH",
"180B52": "Nanotron Technologies GmbH",
"180C14": "iSonea Limited",
"180C77": "Westinghouse Electric Company, LLC",
"180CAC": "CANON INC.",
"18104E": "CEDINT-UPM",
"181420": "TEB SAS",
"181456": "Nokia Corporation",
"181714": "DAEWOOIS",
"181725": "Cameo Communications, Inc.",
"18193F": "Tamtron Oy",
"181BEB": "Actiontec Electronics, Inc",
"181EB0": "Samsung Electronics Co.,Ltd",
"182012": "Aztech Associates Inc.",
"182032": "Apple",
"1820A6": "Sage Co., Ltd.",
"18227E": "Samsung Electronics Co.,Ltd",
"182666": "Samsung Electronics Co.,Ltd",
"182861": "AirTies Wireless Networks",
"182A7B": "Nintendo Co., Ltd.",
"182B05": "8D Technologies",
"182C91": "Concept Development, Inc.",
"1832A2": "LAON TECHNOLOGY CO., LTD.",
"18339D": "CISCO SYSTEMS, INC.",
"183451": "Apple",
"1836FC": "Elecsys International Corporation",
"183825": "Wuhan Lingjiu High-tech Co.,Ltd.",
"183919": "Unicoi Systems",
"183BD2": "BYD Precision Manufacture Company Ltd.",
"183DA2": "Intel Corporate",
"183F47": "Samsung Electronics Co.,Ltd",
"18421D": "PRIVATE",
"18422F": "Alcatel Lucent",
"184462": "Riava Networks, Inc.",
"184617": "Samsung Electronics",
"1848D8": "Fastback Networks",
"184A6F": "Alcatel-Lucent Shanghai Bell Co., Ltd",
"184E94": "MESSOA TECHNOLOGIES INC.",
"185253": "Pixord Corporation",
"1853E0": "Hanyang Digitech Co.Ltd",
"18550F": "Cisco SPVTG",
"185933": "Cisco SPVTG",
"185AE8": "Zenotech.Co.,Ltd",
"18622C": "SAGEMCOM SAS",
"186472": "Aruba Networks",
"1866E3": "Veros Systems, Inc.",
"18673F": "Hanover Displays Limited",
"186751": "KOMEG Industrielle Messtechnik GmbH",
"1867B0": "Samsung Electronics Co.,LTD",
"186D99": "Adanis Inc.",
"1879A2": "GMJ ELECTRIC LIMITED",
"187A93": "AMICCOM Electronics Corporation",
"187C81": "Valeo Vision Systems",
"187ED5": "shenzhen kaism technology Co. Ltd",
"1880CE": "Barberry Solutions Ltd",
"1880F5": "Alcatel-Lucent Shanghai Bell Co., Ltd",
"188331": "Samsung Electronics Co.,Ltd",
"188410": "CoreTrust Inc.",
"18863A": "DIGITAL ART SYSTEM",
"1886AC": "Nokia Danmark A/S",
"188796": "HTC Corporation",
"188857": "Beijing Jinhong Xi-Dian Information Technology Corp.",
"1889DF": "CerebrEX Inc.",
"188ED5": "TP Vision Belgium N.V. - innovation site Brugge",
"18922C": "Virtual Instruments",
"1897FF": "TechFaith Wireless Technology Limited",
"189A67": "CSE-Servelec Limited",
"189C5D": "Cisco",
"189EFC": "Apple",
"18A905": "Hewlett-Packard Company",
"18A99B": "Dell Inc",
"18AA45": "Fon Technology",
"18ABF5": "Ultra Electronics - Electrics",
"18AD4D": "Polostar Technology Corporation",
"18AEBB": "Siemens Convergence Creators GmbH&Co.KG",
"18AF61": "Apple, Inc",
"18AF8F": "Apple",
"18AF9F": "DIGITRONIC Automationsanlagen GmbH",
"18B209": "Torrey Pines Logic, Inc",
"18B3BA": "Netlogic AB",
"18B430": "Nest Labs Inc.",
"18B591": "I-Storm",
"18B79E": "Invoxia",
"18C086": "Broadcom Corporation",
"18C451": "Tucson Embedded Systems",
"18C8E7": "Shenzhen Hualistone Technology Co.,Ltd",
"18CC23": "Philio Technology Corporation",
"18D071": "DASAN SMC, Inc.",
"18D5B6": "SMG Holdings LLC",
"18D66A": "Inmarsat",
"18D6CF": "Kurth Electronic GmbH",
"18D949": "Qvis Labs, LLC",
"18DC56": "Yulong Computer Telecommunication Scientific(shenzhen)Co.,Lt",
"18E288": "STT Condigi",
"18E2C2": "Samsung Electronics",
"18E728": "Cisco",
"18E7F4": "Apple",
"18E80F": "Viking Electronics Inc.",
"18E8DD": "MODULETEK",
"18EF63": "CISCO SYSTEMS, INC.",
"18F46A": "Hon Hai Precision Ind. Co.,Ltd.",
"18F650": "Multimedia Pacific Limited",
"18F87A": "i3 International Inc.",
"18FA6F": "ISC applied systems corp",
"18FB7B": "Dell Inc",
"18FC9F": "Changhe Electronics Co., Ltd.",
"18FE34": "Espressif Inc.",
"18FF2E": "Shenzhen Rui Ying Da Technology Co., Ltd",
"1C0656": "IDY Corporation",
"1C08C1": "Lg Innotek",
"1C0B52": "EPICOM S.A",
"1C0FCF": "Sypro Optics GmbH",
"1C11E1": "Wartsila Finland Oy",
"1C129D": "IEEE PES PSRC/SUB",
"1C1448": "ARRIS Group, Inc.",
"1C17D3": "CISCO SYSTEMS, INC.",
"1C184A": "ShenZhen RicherLink Technologies Co.,LTD",
"1C19DE": "eyevis GmbH",
"1C1AC0": "Apple",
"1C1B68": "ARRIS Group, Inc.",
"1C1CFD": "Dalian Hi-Think Computer Technology, Corp",
"1C1D67": "Shenzhen Huawei Communication Technologies Co., Ltd",
"1C1D86": "Cisco",
"1C334D": "ITS Telecom",
"1C3477": "Innovation Wireless",
"1C35F1": "NEW Lift Neue Elektronische Wege Steuerungsbau GmbH",
"1C37BF": "Cloudium Systems Ltd.",
"1C3A4F": "AccuSpec Electronics, LLC",
"1C3DE7": "Sigma Koki Co.,Ltd.",
"1C3E84": "Hon Hai Precision Ind. Co.,Ltd.",
"1C4158": "Gemalto M2M GmbH",
"1C43EC": "JAPAN CIRCUIT CO.,LTD",
"1C4593": "Texas Instruments",
"1C48F9": "GN Netcom A/S",
"1C4AF7": "AMON INC",
"1C4BB9": "SMG ENTERPRISE, LLC",
"1C4BD6": "AzureWave",
"1C51B5": "Techaya LTD",
"1C52D6": "FLAT DISPLAY TECHNOLOGY CORPORATION",
"1C5A3E": "Samsung Eletronics Co., Ltd (Visual Display Divison)",
"1C5A6B": "Philips Electronics Nederland BV",
"1C5C55": "PRIMA Cinema, Inc",
"1C5C60": "Shenzhen Belzon Technology Co.,LTD.",
"1C5FFF": "Beijing Ereneben Information Technology Co.,Ltd Shenzhen Branch",
"1C62B8": "Samsung Electronics Co.,Ltd",
"1C63B7": "OpenProducts 237 AB",
"1C659D": "Liteon Technology Corporation",
"1C666D": "Hon Hai Precision Ind.Co.Ltd",
"1C66AA": "Samsung Electronics",
"1C69A5": "Research In Motion",
"1C6BCA": "Mitsunami Co., Ltd.",
"1C6F65": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"1C7508": "COMPAL INFORMATION (KUNSHAN) CO., LTD.",
"1C76CA": "Terasic Technologies Inc.",
"1C7839": "Shenzhen Tencent Computer System Co., Ltd.",
"1C7B21": "Sony Mobile Communications AB",
"1C7C11": "EID",
"1C7C45": "Vitek Industrial Video Products, Inc.",
"1C7CC7": "Coriant GmbH",
"1C7EE5": "D-Link International",
"1C83B0": "Linked IP GmbH",
"1C8464": "FORMOSA WIRELESS COMMUNICATION CORP.",
"1C86AD": "MCT CO., LTD.",
"1C8E8E": "DB Communication & Systems Co., ltd.",
"1C8F8A": "Phase Motion Control SpA",
"1C9179": "Integrated System Technologies Ltd",
"1C9492": "RUAG Schweiz AG",
"1C955D": "I-LAX ELECTRONICS INC.",
"1C959F": "Veethree Electronics And Marine LLC",
"1C973D": "PRICOM Design",
"1C994C": "Murata Manufactuaring Co.,Ltd.",
"1CA770": "SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LT",
"1CAA07": "CISCO SYSTEMS, INC.",
"1CAB01": "Innovolt",
"1CABA7": "Apple",
"1CAF05": "Samsung Electronics Co.,Ltd",
"1CAFF7": "D-LINK INTERNATIONAL PTE LIMITED",
"1CB094": "HTC Corporation",
"1CB17F": "NEC AccessTechnica, Ltd.",
"1CB243": "TDC A/S",
"1CBA8C": "Texas Instruments",
"1CBBA8": "OJSC \"Ufimskiy Zavod \"Promsvyaz\"",
"1CBD0E": "Amplified Engineering Pty Ltd",
"1CBDB9": "D-LINK INTERNATIONAL PTE LIMITED",
"1CC11A": "Wavetronix",
"1CC1DE": "Hewlett-Packard Company",
"1CC316": "MileSight Technology Co., Ltd.",
"1CC63C": "Arcadyan Technology Corporation",
"1CD40C": "Kriwan Industrie-Elektronik GmbH",
"1CDF0F": "CISCO SYSTEMS, INC.",
"1CE165": "Marshal Corporation",
"1CE192": "Qisda Corporation",
"1CE2CC": "Texas Instruments",
"1CE62B": "Apple",
"1CE6C7": "Cisco",
"1CEEE8": "Ilshin Elecom",
"1CF061": "SCAPS GmbH",
"1CF4CA": "PRIVATE",
"1CF5E7": "Turtle Industry Co., Ltd.",
"1CFA68": "TP-LINK TECHNOLOGIES CO.,LTD.",
"1CFCBB": "Realfiction ApS",
"1CFEA7": "IDentytech Solutins Ltd.",
"20014F": "Linea Research Ltd",
"2002AF": "Murata Manufactuaring Co.,Ltd.",
"200505": "RADMAX COMMUNICATION PRIVATE LIMITED",
"2005E8": "OOO InProMedia",
"2008ED": "Huawei Technologies Co., Ltd",
"200A5E": "Xiangshan Giant Eagle Technology Developing co.,LTD",
"200BC7": "HUAWEI TECHNOLOGIES CO.,LTD",
"200CC8": "NETGEAR INC.,",
"200E95": "IEC \u2013 TC9 WG43",
"20107A": "Gemtek Technology Co., Ltd.",
"201257": "Most Lucky Trading Ltd",
"2013E0": "Samsung Electronics Co.,Ltd",
"2016D8": "Liteon Technology Corporation",
"20180E": "Shenzhen Sunchip Technology Co., Ltd",
"201A06": "COMPAL INFORMATION (KUNSHAN) CO., LTD.",
"201D03": "Elatec GmbH",
"2021A5": "LG Electronics Inc",
"202564": "PEGATRON CORPORATION",
"202598": "Teleview",
"202BC1": "Shenzhen Huawei Communication Technologies Co., Ltd",
"202CB7": "Kong Yue Electronics & Information Industry (Xinhui) Ltd.",
"203706": "CISCO SYSTEMS, INC.",
"2037BC": "Kuipers Electronic Engineering BV",
"203A07": "Cisco",
"204005": "feno GmbH",
"20415A": "Smarteh d.o.o.",
"20443A": "Schneider Electric Asia Pacific Ltd",
"2046A1": "VECOW Co., Ltd",
"2046F9": "Advanced Network Devices (dba:AND)",
"204AAA": "Hanscan Spain S.A.",
"204C6D": "Hugo Brennenstuhl Gmbh & Co. KG.",
"204E6B": "Axxana(israel) ltd",
"204E7F": "NETGEAR",
"2053CA": "Risk Technology Ltd",
"205476": "Sony Mobile Communications AB",
"205721": "Salix Technology CO., Ltd.",
"2059A0": "Paragon Technologies Inc.",
"205A00": "Coval",
"205B2A": "PRIVATE",
"205B5E": "Shenzhen Wonhe Technology Co., Ltd",
"206432": "SAMSUNG ELECTRO MECHANICS CO.,LTD.",
"2067B1": "Pluto inc.",
"20689D": "Liteon Technology Corporation",
"206A8A": "Wistron InfoComm Manufacturing(Kunshan)Co.,Ltd.",
"206AFF": "Atlas Elektronik UK Limited",
"206E9C": "Samsung Electronics Co.,Ltd",
"206FEC": "Braemac CA LLC",
"207355": "ARRIS Group, Inc.",
"2074CF": "Shenzhen Voxtech Co.,Ltd",
"207600": "Actiontec Electronics, Inc",
"207C8F": "Quanta Microsystems,Inc.",
"207D74": "Apple",
"20858C": "Assa",
"2087AC": "AES motomation",
"208984": "COMPAL INFORMATION (KUNSHAN) CO., LTD",
"208986": "zte corporation",
"20918A": "PROFALUX",
"2091D9": "I'M SPA",
"209AE9": "Volacomm Co., Ltd",
"209BA5": "JIAXING GLEAD Electronics Co.,Ltd",
"20A2E7": "Lee-Dickens Ltd",
"20A787": "Bointec Taiwan Corporation Limited",
"20AA25": "IP-NET LLC",
"20AA4B": "Cisco-Linksys, LLC",
"20B0F7": "Enclustra GmbH",
"20B399": "Enterasys",
"20B5C6": "Mimosa Networks",
"20B7C0": "Omicron electronics GmbH",
"20BBC0": "Cisco",
"20BBC6": "Jabil Circuit Hungary Ltd.",
"20BFDB": "DVL",
"20C1AF": "i Wit Digital Co., Limited",
"20C60D": "Shanghai annijie Information technology Co.,LTD",
"20C6EB": "Panasonic Corporation AVC Networks Company",
"20C8B3": "SHENZHEN BUL-TECH CO.,LTD.",
"20C9D0": "Apple",
"20CD39": "Texas Instruments, Inc",
"20CEC4": "Peraso Technologies",
"20CF30": "ASUSTek COMPUTER INC.",
"20D21F": "Wincal Technology Corp.",
"20D390": "Samsung Electronics Co.,Ltd",
"20D5AB": "Korea Infocom Co.,Ltd.",
"20D5BF": "Samsung Eletronics Co., Ltd",
"20D607": "Nokia Corporation",
"20D906": "Iota, Inc.",
"20DC93": "Cheetah Hi-Tech, Inc.",
"20DCE6": "TP-LINK TECHNOLOGIES CO., LTD.",
"20DF3F": "Nanjing SAC Power Grid Automation Co., Ltd.",
"20E52A": "NETGEAR INC.,",
"20E564": "ARRIS Group, Inc.",
"20E791": "Siemens Healthcare Diagnostics, Inc",
"20EAC7": "SHENZHEN RIOPINE ELECTRONICS CO., LTD",
"20EEC6": "Elefirst Science & Tech Co ., ltd",
"20F002": "MTData Developments Pty. Ltd.",
"20F3A3": "Huawei Technologies Co., Ltd",
"20F85E": "Delta Electronics",
"20FABB": "Cambridge Executive Limited",
"20FDF1": "3COM EUROPE LTD",
"20FECD": "System In Frontier Inc.",
"20FEDB": "M2M Solution S.A.S.",
"2401C7": "Cisco",
"24050F": "MTN Electronic Co. Ltd",
"240917": "Devlin Electronics Limited",
"240A11": "TCT Mobile Limited",
"240A64": "AzureWaveTechnologies,Inc",
"240B2A": "Viettel Group",
"240BB1": "KOSTAL Industrie Elektrik GmbH",
"241064": "Shenzhen Ecsino Tecnical Co. Ltd",
"241125": "Hutek Co., Ltd.",
"241148": "Entropix, LLC",
"2411D0": "Chongqing Ehs Science and Technology Development Co.,Ltd.",
"241A8C": "Squarehead Technology AS",
"241B13": "Shanghai Nutshell Electronic Co., Ltd.",
"241F2C": "Calsys, Inc.",
"2421AB": "Sony Ericsson Mobile Communications",
"242642": "SHARP Corporation.",
"242FFA": "Toshiba Global Commerce Solutions",
"24336C": "PRIVATE",
"24374C": "Cisco SPVTG",
"2437EF": "EMC Electronic Media Communication SA",
"243C20": "Dynamode Group",
"244597": "GEMUE Gebr. Mueller Apparatebau",
"24470E": "PentronicAB",
"24497B": "Innovative Converged Devices Inc",
"245FDF": "KYOCERA Corporation",
"246278": "sysmocom - systems for mobile communications GmbH",
"2464EF": "CYG SUNRI CO.,LTD.",
"246511": "AVM GmbH",
"24694A": "Jasmine Systems Inc.",
"2469A5": "Huawei Technologies Co., Ltd",
"246AAB": "IT-IS International",
"24767D": "Cisco SPVTG",
"247703": "Intel Corporate",
"248000": "Westcontrol AS",
"2481AA": "KSH International Co., Ltd.",
"24828A": "Prowave Technologies Ltd.",
"2486F4": "Ctek, Inc.",
"248707": "SEnergy Corporation",
"2493CA": "Voxtronic Technology Computer-Systeme GmbH",
"249442": "OPEN ROAD SOLUTIONS , INC.",
"249504": "SFR",
"24A2E1": "Apple, Inc",
"24A42C": "KOUKAAM a.s.",
"24A43C": "Ubiquiti Networks, INC",
"24A495": "Thales Canada Inc.",
"24A87D": "Panasonic Automotive Systems Asia Pacific(Thailand)Co.,Ltd.",
"24A937": "PURE Storage",
"24AB81": "Apple",
"24AF4A": "Alcatel-Lucent-IPD",
"24AF54": "NEXGEN Mediatech Inc.",
"24B657": "CISCO SYSTEMS, INC.",
"24B6B8": "FRIEM SPA",
"24B6FD": "Dell Inc",
"24B88C": "Crenus Co.,Ltd.",
"24B8D2": "Opzoon Technology Co.,Ltd.",
"24BA30": "Technical Consumer Products, Inc.",
"24BBC1": "Absolute Analysis",
"24BC82": "Dali Wireless, Inc.",
"24BE05": "Hewlett Packard",
"24BF74": "PRIVATE",
"24C0B3": "RSF",
"24C696": "Samsung Electronics Co.,Ltd",
"24C848": "mywerk system GmbH",
"24C86E": "Chaney Instrument Co.",
"24C9A1": "Ruckus Wireless",
"24C9DE": "Genoray",
"24CBE7": "MYK, Inc.",
"24CF21": "Shenzhen State Micro Technology Co., Ltd",
"24D2CC": "SmartDrive Systems Inc.",
"24D921": "Avaya, Inc",
"24DAB6": "Sistemas de Gesti\u00f3n Energ\u00e9tica S.A. de C.V",
"24DBAC": "Shenzhen Huawei Communication Technologies Co., Ltd",
"24DBAD": "ShopperTrak RCT Corporation",
"24DBED": "Samsung Electronics Co.,Ltd",
"24DEC6": "Aruba Networks",
"24E271": "Qingdao Hisense Communications Co.,Ltd",
"24E6BA": "JSC Zavod im. Kozitsky",
"24E9B3": "Cisco",
"24EA40": "Systeme Helmholz GmbH",
"24EB65": "SAET I.S. S.r.l.",
"24EC99": "Askey Computer Corp",
"24ECD6": "CSG Science & Technology Co.,Ltd.Hefei",
"24EE3A": "Chengdu Yingji Electronic Hi-tech Co Ltd",
"24F0FF": "GHT Co., Ltd.",
"24F2DD": "Radiant Zemax LLC",
"24F5AA": "Samsung Electronics Co.,LTD",
"24FD52": "Liteon Technology Corporation",
"2804E0": "FERMAX ELECTRONICA S.A.U.",
"28061E": "NINGBO GLOBAL USEFUL ELECTRIC CO.,LTD",
"28068D": "ITL, LLC",
"280B5C": "Apple",
"280CB8": "Mikrosay Yazilim ve Elektronik A.S.",
"280DFC": "Sony Computer Entertainment Inc.",
"28107B": "D-Link International",
"281471": "Lantis co., LTD.",
"28162E": "2Wire",
"2817CE": "Omnisense Ltd",
"281878": "Microsoft Corporation",
"2818FD": "Aditya Infotech Ltd.",
"282246": "Beijing Sinoix Communication Co., LTD",
"2826A6": "PBR electronics GmbH",
"28285D": "ZyXEL Communications Corporation",
"2829D9": "GlobalBeiMing technology (Beijing)Co. Ltd",
"282CB2": "TP-LINK TECHNOLOGIES CO.,LTD.",
"283152": "HUAWEI TECHNOLOGIES CO.,LTD",
"2832C5": "Humax.co.,ltd",
"283410": "Enigma Diagnostics Limited",
"2834A2": "Cisco",
"283737": "Apple",
"2838CF": "Gen2wave",
"2839E7": "Preceno Technology Pte.Ltd.",
"283B96": "Cool Control LTD",
"283CE4": "Huawei Technologies Co., Ltd",
"28401A": "C8 MediSensors, Inc.",
"284121": "OptiSense Network, LLC",
"284430": "GenesisTechnical Systems (UK) Ltd",
"2847AA": "Nokia Corporation",
"284846": "GridCentric Inc.",
"284C53": "Intune Networks",
"284D92": "Luminator",
"284ED7": "OutSmart Power Systems, Inc.",
"284FCE": "Liaoning Wontel Science and Technology Development Co.,Ltd.",
"285132": "Shenzhen Prayfly Technology Co.,Ltd",
"285767": "Echostar Technologies Corp",
"285FDB": "Shenzhen Huawei Communication Technologies Co., Ltd",
"286046": "Lantech Communications Global, Inc.",
"286094": "CAPELEC",
"286336": "Siemens AG - Industrial Automation - EWA",
"28656B": "Keystone Microtech Corporation",
"286AB8": "Apple",
"286ABA": "Apple",
"286D97": "SAMJIN Co., Ltd.",
"286ED4": "HUAWEI TECHNOLOGIES CO.,LTD",
"287184": "Spire Payments",
"2872C5": "Smartmatic Corp",
"2872F0": "ATHENA",
"287994": "Realplay Digital Technology(Shenzhen) Co.,Ltd",
"288023": "Hewlett Packard",
"28852D": "Touch Networks",
"288915": "CashGuard Sverige AB",
"288A1C": "Juniper networks",
"2891D0": "Stage Tec Entwicklungsgesellschaft f\u00fcr professionelle Audiotechnik mbH",
"28924A": "Hewlett Packard",
"2893FE": "CISCO SYSTEMS, INC.",
"28940F": "CISCO SYSTEMS, INC.",
"2894AF": "Samhwa Telecom",
"28987B": "Samsung Electronics Co.,Ltd",
"289A4B": "SteelSeries ApS",
"289AFA": "TCT Mobile Limited",
"289EDF": "Danfoss Turbocor Compressors, Inc",
"28A186": "enblink",
"28A192": "GERP Solution",
"28A1EB": "ETEK TECHNOLOGY (SHENZHEN) CO.,LTD",
"28A241": "exlar corp",
"28A574": "Miller Electric Mfg. Co.",
"28AF0A": "Sirius XM Radio Inc",
"28B0CC": "Xenya d.o.o.",
"28B2BD": "Intel Corporate",
"28B3AB": "Genmark Automation",
"28BA18": "NextNav, LLC",
"28BAB5": "Samsung Electronics Co.,Ltd",
"28BB59": "RNET Technologies, Inc.",
"28BE9B": "Technicolor USA Inc.",
"28C0DA": "Juniper Networks",
"28C671": "Yota Devices OY",
"28C68E": "NETGEAR INC.,",
"28C718": "Altierre",
"28C7CE": "Cisco",
"28C825": "DellKing Industrial Co., Ltd",
"28C914": "Taimag Corporation",
"28CBEB": "One",
"28CC01": "Samsung Electronics Co.,Ltd",
"28CCFF": "Corporacion Empresarial Altra SL",
"28CD1C": "Espotel Oy",
"28CD4C": "Individual Computers GmbH",
"28CD9C": "Shenzhen Dynamax Software Development Co.,Ltd.",
"28CFDA": "Apple",
"28CFE9": "Apple",
"28D1AF": "Nokia Corporation",
"28D244": "LCFC(HeFei) Electronics Technology Co., Ltd.",
"28D576": "Premier Wireless, Inc.",
"28D93E": "Telecor Inc.",
"28D997": "Yuduan Mobile Co., Ltd.",
"28DB81": "Shanghai Guao Electronic Technology Co., Ltd",
"28DEF6": "bioMerieux Inc.",
"28E02C": "Apple",
"28E14C": "Apple, Inc.",
"28E297": "Shanghai InfoTM Microelectronics Co.,Ltd.",
"28E347": "Liteon Technology Corporation",
"28E608": "Tokheim",
"28E794": "Microtime Computer Inc.",
"28E7CF": "Apple",
"28ED58": "JAG Jakob AG",
"28EE2C": "Frontline Test Equipment",
"28EF01": "PRIVATE",
"28F358": "2C - Trifonov & Co",
"28F532": "ADD-Engineering BV",
"28F606": "Syes srl",
"28FBD3": "Ragentek Technology Group",
"28FC51": "The Electric Controller and Manufacturing Co., LLC",
"28FCF6": "Shenzhen Xin KingBrand enterprises Co.,Ltd",
"2C002C": "UNOWHY",
"2C0033": "EControls, LLC",
"2C00F7": "XOS",
"2C0623": "Win Leader Inc.",
"2C073C": "DEVLINE LIMITED",
"2C10C1": "Nintendo Co., Ltd.",
"2C18AE": "Trend Electronics Co., Ltd.",
"2C1984": "IDN Telecom, Inc.",
"2C1EEA": "AERODEV",
"2C2172": "Juniper Networks",
"2C245F": "Babolat VS",
"2C26C5": "zte corporation",
"2C27D7": "Hewlett-Packard Company",
"2C282D": "BBK COMMUNICATIAO TECHNOLOGY CO.,LTD.",
"2C2D48": "bct electronic GesmbH",
"2C3068": "Pantech Co.,Ltd",
"2C3427": "ERCO & GENER",
"2C3557": "ELLIY Power CO..Ltd",
"2C36A0": "Capisco Limited",
"2C36F8": "CISCO SYSTEMS, INC.",
"2C3731": "ShenZhen Yifang Digital Technology Co.,LTD",
"2C3996": "SAGEMCOM",
"2C39C1": "Ciena Corporation",
"2C3A28": "Fagor Electr\u00f3nica",
"2C3BFD": "Netstor Technology Co., Ltd.",
"2C3ECF": "Cisco",
"2C3F38": "CISCO SYSTEMS, INC.",
"2C3F3E": "Alge-Timing GmbH",
"2C4138": "Hewlett-Packard Company",
"2C4401": "Samsung Electronics Co.,Ltd",
"2C441B": "Spectrum Medical Limited",
"2C44FD": "Hewlett Packard",
"2C534A": "Shenzhen Winyao Electronic Limited",
"2C542D": "CISCO SYSTEMS, INC.",
"2C553C": "Gainspeed, Inc.",
"2C59E5": "Hewlett Packard",
"2C5A05": "Nokia Corporation",
"2C5AA3": "PROMATE ELECTRONIC CO.LTD",
"2C5BE1": "Centripetal Networks, Inc",
"2C5D93": "Ruckus Wireless",
"2C5FF3": "Pertronic Industries",
"2C625A": "Finest Security Systems Co., Ltd",
"2C6289": "Regenersis (Glenrothes) Ltd",
"2C67FB": "ShenZhen Zhengjili Electronics Co., LTD",
"2C69BA": "RF Controls, LLC",
"2C6BF5": "Juniper networks",
"2C7155": "HiveMotion",
"2C72C3": "Soundmatters",
"2C750F": "Shanghai Dongzhou-Lawton Communication Technology Co. Ltd.",
"2C768A": "Hewlett-Packard Company",
"2C7B5A": "Milper Ltd",
"2C7B84": "OOO Petr Telegin",
"2C7ECF": "Onzo Ltd",
"2C8065": "HARTING Inc. of North America",
"2C8158": "Hon Hai Precision Ind. Co.,Ltd",
"2C8A72": "HTC Corporation",
"2C8BF2": "Hitachi Metals America Ltd",
"2C9127": "Eintechno Corporation",
"2C922C": "Kishu Giken Kogyou Company Ltd,.",
"2C9464": "Cincoze Co., Ltd.",
"2C957F": "zte corporation",
"2C9717": "I.C.Y. B.V.",
"2C9AA4": "NGI SpA",
"2C9E5F": "ARRIS Group, Inc.",
"2C9EFC": "CANON INC.",
"2CA157": "acromate, Inc.",
"2CA780": "True Technologies Inc.",
"2CA835": "RIM",
"2CAB25": "Shenzhen Gongjin Electronics Co.,Ltd",
"2CB05D": "NETGEAR",
"2CB0DF": "Soliton Technologies Pvt Ltd",
"2CB43A": "Apple",
"2CB693": "Radware",
"2CB69D": "RED Digital Cinema",
"2CBE08": "Apple",
"2CBE97": "Ingenieurbuero Bickele und Buehler GmbH",
"2CC260": "Ravello Systems",
"2CCC15": "Nokia Corporation",
"2CCD27": "Precor Inc",
"2CCD43": "Summit Technology Group",
"2CCD69": "Aqavi.com",
"2CD05A": "Liteon Technology Corporation",
"2CD1DA": "Sanjole, Inc.",
"2CD2E7": "Nokia Corporation",
"2CD444": "Fujitsu Limited",
"2CDD0C": "Discovergy GmbH",
"2CE2A8": "DeviceDesign",
"2CE412": "SAGEMCOM SAS",
"2CE6CC": "Ruckus Wireless",
"2CE871": "Alert Metalguard ApS",
"2CEDEB": "Alpheus Digital Company Limited",
"2CEE26": "Petroleum Geo-Services",
"2CF203": "EMKO ELEKTRONIK SAN VE TIC AS",
"2CF4C5": "Avaya, Inc",
"30055C": "Brother industries, LTD.",
"300B9C": "Delta Mobile Systems, Inc.",
"300D2A": "Zhejiang Wellcom Technology Co.,Ltd.",
"300ED5": "Hon Hai Precision Ind.Co.Ltd",
"3010E4": "Apple, Inc.",
"30142D": "Piciorgros GmbH",
"30144A": "Wistron Neweb Corp.",
"301518": "Ubiquitous Communication Co. ltd.",
"30168D": "ProLon",
"3017C8": "Sony Ericsson Mobile Communications AB",
"3018CF": "DEOS control systems GmbH",
"301966": "Samsung Electronics Co.,Ltd",
"301A28": "Mako Networks Ltd",
"30215B": "Shenzhen Ostar Display Electronic Co.,Ltd",
"302DE8": "JDA, LLC (JDA Systems)",
"303294": "W-IE-NE-R Plein & Baus GmbH",
"3032D4": "Hanilstm Co., Ltd.",
"3037A6": "CISCO SYSTEMS, INC.",
"303855": "Nokia Corporation",
"303926": "Sony Ericsson Mobile Communications AB",
"303955": "Shenzhen Jinhengjia Electronic Co., Ltd.",
"3039F2": "ADB Broadband Italia",
"303A64": "Intel Corporate",
"303D08": "GLINTT TES S.A.",
"303EAD": "Sonavox Canada Inc",
"304174": "ALTEC LANSING LLC",
"304449": "PLATH GmbH",
"30469A": "NETGEAR",
"30493B": "Nanjing Z-Com Wireless Co.,Ltd",
"304C7E": "Panasonic Electric Works Automation Controls Techno Co.,Ltd.",
"304EC3": "Tianjin Techua Technology Co., Ltd.",
"3051F8": "BYK-Gardner GmbH",
"30525A": "NST Co., LTD",
"3055ED": "Trex Network LLC",
"3057AC": "IRLAB LTD.",
"3059B7": "Microsoft",
"305D38": "Beissbarth",
"306023": "ARRIS Group, Inc.",
"306112": "PAV GmbH",
"306118": "Paradom Inc.",
"3065EC": "Wistron (ChongQing)",
"30688C": "Reach Technology Inc.",
"30694B": "RIM",
"306CBE": "Skymotion Technology (HK) Limited",
"306E5C": "Validus Technologies",
"3071B2": "Hangzhou Prevail Optoelectronic Equipment Co.,LTD.",
"30766F": "LG Electronics",
"30786B": "TIANJIN Golden Pentagon Electronics Co., Ltd.",
"3078C2": "Innowireless, Co. Ltd.",
"307C30": "RIM",
"307ECB": "SFR",
"3085A9": "Asustek Computer Inc",
"308730": "Shenzhen Huawei Communication Technologies Co., Ltd",
"308999": "Guangdong East Power Co.,",
"308CFB": "Dropcam",
"3090AB": "Apple",
"30918F": "Technicolor",
"3092F6": "SHANGHAI SUNMON COMMUNICATION TECHNOGY CO.,LTD",
"309BAD": "BBK Electronics Corp., Ltd.,",
"30A8DB": "Sony Mobile Communications AB",
"30AABD": "Shanghai Reallytek Information Technology Co.,Ltd",
"30AE7B": "Deqing Dusun Electron CO., LTD",
"30AEF6": "Radio Mobile Access",
"30B216": "Hytec Geraetebau GmbH",
"30B3A2": "Shenzhen Heguang Measurement & Control Technology Co.,Ltd",
"30B5C2": "TP-LINK TECHNOLOGIES CO.,LTD.",
"30C750": "MIC Technology Group",
"30C7AE": "Samsung Electronics Co.,Ltd",
"30C82A": "Wi-Next s.r.l.",
"30CDA7": "Samsung Electronics ITS, Printer division",
"30D17E": "HUAWEI TECHNOLOGIES CO.,LTD",
"30D357": "Logosol, Inc.",
"30D46A": "Autosales Incorporated",
"30D6C9": "Samsung Electronics Co.,Ltd",
"30DE86": "Cedac Software S.r.l.",
"30E48E": "Vodafone UK",
"30E4DB": "CISCO SYSTEMS, INC.",
"30EB25": "INTEK DIGITAL",
"30EFD1": "Alstom Strongwish (Shenzhen) Co., Ltd.",
"30F31D": "zte corporation",
"30F33A": "+plugg srl",
"30F42F": "ESP",
"30F70D": "Cisco Systems",
"30F7C5": "Apple",
"30F7D7": "Thread Technology Co., Ltd",
"30F9ED": "Sony Corporation",
"30FD11": "MACROTECH (USA) INC.",
"3407FB": "Ericsson AB",
"340804": "D-Link Corporation",
"340AFF": "Qingdao Hisense Communications Co.,Ltd",
"3413A8": "Mediplan Limited",
"3413E8": "Intel Corporate",
"34159E": "Apple",
"3417EB": "Dell Inc",
"341A4C": "SHENZHEN WEIBU ELECTRONICS CO.,LTD.",
"341B22": "Grandbeing Technology Co., Ltd",
"342109": "Jensen Scandinavia AS",
"342387": "Hon Hai Precision Ind. Co.,Ltd.",
"3423BA": "Samsung Electro Mechanics co.,LTD.",
"34255D": "Shenzhen Loadcom Technology Co.,Ltd",
"3429EA": "MCD ELECTRONICS SP. Z O.O.",
"342F6E": "Anywire corporation",
"343111": "Samsung Electronics Co.,Ltd",
"3440B5": "IBM",
"34466F": "HiTEM Engineering",
"344B3D": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"344B50": "ZTE Corporation",
"344F3F": "IO-Power Technology Co., Ltd.",
"344F5C": "R&M AG",
"344F69": "EKINOPS SAS",
"3451AA": "JID GLOBAL",
"3451C9": "Apple",
"345B11": "EVI HEAT AB",
"345C40": "Cargt Holdings LLC",
"345D10": "Wytek",
"346178": "The Boeing Company",
"34684A": "Teraworks Co., Ltd.",
"346BD3": "Huawei Technologies Co., Ltd",
"346E8A": "Ecosense",
"346F92": "White Rodgers Division",
"3475C7": "Avaya, Inc",
"3476C5": "I-O DATA DEVICE, INC.",
"347877": "O-NET Communications(Shenzhen) Limited",
"347E39": "Nokia Danmark A/S",
"348137": "UNICARD SA",
"3481C4": "AVM GmbH",
"3482DE": "Kayo Technology, Inc.",
"348302": "iFORCOM Co., Ltd",
"348446": "Ericsson AB",
"34862A": "Heinz Lackmann GmbH & Co KG",
"34885D": "Logitech Far East",
"348AAE": "SAGEMCOM SAS",
"3495DB": "Logitec Corporation",
"3497FB": "ADVANCED RF TECHNOLOGIES INC",
"34996F": "VPI Engineering",
"3499D7": "Universal Flow Monitors, Inc.",
"349A0D": "ZBD Displays Ltd",
"349D90": "Heinzmann GmbH & CO. KG",
"34A183": "AWare, Inc",
"34A3BF": "Terewave. Inc.",
"34A55D": "TECHNOSOFT INTERNATIONAL SRL",
"34A5E1": "Sensorist ApS",
"34A68C": "Shine Profit Development Limited",
"34A709": "Trevil srl",
"34A7BA": "Fischer International Systems Corporation",
"34A843": "KYOCERA Display Corporation",
"34A84E": "Cisco",
"34AA8B": "Samsung Electronics Co.,Ltd",
"34AA99": "Alcatel-Lucent",
"34AAEE": "Mikrovisatos Servisas UAB",
"34ADE4": "Shanghai Chint Power Systems Co., Ltd.",
"34AF2C": "Nintendo Co., Ltd.",
"34B1F7": "Texas Instruments",
"34B571": "PLDS",
"34BA51": "Se-Kure Controls, Inc.",
"34BA9A": "Asiatelco Technologies Co.",
"34BB1F": "Research In Motion",
"34BCA6": "Beijing Ding Qing Technology, Ltd.",
"34BDC8": "Cisco Systems",
"34BDF9": "Shanghai WDK Industrial Co.,Ltd.",
"34BDFA": "Cisco SPVTG",
"34BE00": "Samsung Electronics Co.,Ltd",
"34BF90": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"34C059": "Apple",
"34C3AC": "Samsung Electronics",
"34C69A": "Enecsys Ltd",
"34C731": "ALPS Co,. Ltd.",
"34C803": "Nokia Corporation",
"34C99D": "EIDOLON COMMUNICATIONS TECHNOLOGY CO. LTD.",
"34CD6D": "CommSky Technologies",
"34CDBE": "Huawei Technologies Co., Ltd",
"34CE94": "Parsec (Pty) Ltd",
"34D09B": "MobilMAX Technology Inc.",
"34D2C4": "RENA GmbH Print Systeme",
"34D7B4": "Tributary Systems, Inc.",
"34DBFD": "Cisco",
"34DE1A": "Intel Corporate",
"34DE34": "zte corporation",
"34DF2A": "Fujikon Industrial Co.,Limited",
"34E0CF": "zte corporation",
"34E0D7": "DONGGUAN QISHENG ELECTRONICS INDUSTRIAL CO., LTD",
"34E2FD": "Apple",
"34E42A": "Automatic Bar Controls Inc.",
"34EF44": "2Wire",
"34EF8B": "NTT Communications Corporation",
"34F39B": "WizLAN Ltd.",
"34F62D": "SHARP Corporation",
"34F968": "ATEK Products, LLC",
"34FA40": "Guangzhou Robustel Technologies Co., Limited",
"34FC6F": "ALCEA",
"380197": "Toshiba Samsung Storage Technolgoy Korea Corporation",
"3806B4": "A.D.C. GmbH",
"380A0A": "Sky-City Communication and Electronics Limited Company",
"380A94": "Samsung Electronics Co.,Ltd",
"380B40": "Samsung Electronics Co.,Ltd",
"380DD4": "Primax Electronics LTD.",
"380F4A": "Apple",
"380FE4": "Dedicated Network Partners Oy",
"3816D1": "Samsung Electronics Co.,Ltd",
"381766": "PROMZAKAZ LTD.",
"38192F": "Nokia Corporation",
"381C4A": "SIMCom Wireless Solutions Co.,Ltd.",
"38229D": "Pirelli Tyre S.p.A.",
"3822D6": "H3C Technologies Co., Limited",
"3826CD": "ANDTEK",
"3828EA": "Fujian Netcom Technology Co., LTD",
"382DD1": "Samsung Electronics Co.,Ltd",
"3831AC": "WEG",
"383F10": "DBL Technology Ltd.",
"384233": "Wildeboer Bauteile GmbH",
"3842A6": "Ingenieurbuero Stahlkopf",
"384369": "Patrol Products Consortium LLC",
"38458C": "MyCloud Technology corporation",
"384608": "ZTE Corporation",
"38484C": "Apple",
"384FF0": "Azurewave Technologies, Inc.",
"38521A": "Alcatel-Lucent 7705",
"38580C": "Panaccess Systems GmbH",
"3859F8": "MindMade sp. z o.o.",
"3859F9": "Hon Hai Precision Ind. Co.,Ltd.",
"385AA8": "Beijing Zhongdun Security Technology Development Co.",
"385FC3": "Yu Jeong System, Co.Ltd",
"386077": "PEGATRON CORPORATION",
"3863F6": "3NOD MULTIMEDIA(SHENZHEN)CO.,LTD",
"386645": "OOSIC Technology CO.,Ltd",
"386793": "Asia Optical Co., Inc.",
"386BBB": "ARRIS Group, Inc.",
"386C9B": "Ivy Biomedical",
"386E21": "Wasion Group Ltd.",
"3872C0": "COMTREND",
"387B47": "AKELA, Inc.",
"388345": "TP-LINK TECHNOLOGIES CO., LTD.",
"3889DC": "Opticon Sensors Europe B.V.",
"388AB7": "ITC Networks",
"388EE7": "Fanhattan LLC",
"3891FB": "Xenox Holding BV",
"389592": "Beijing Tendyron Corporation",
"389F83": "OTN Systems N.V.",
"38A53C": "Veenstra Instruments",
"38A5B6": "SHENZHEN MEGMEET ELECTRICAL CO.,LTD",
"38A851": "Moog, Ing",
"38A86B": "Orga BV",
"38A95F": "Actifio Inc",
"38AA3C": "SAMSUNG ELECTRO-MECHANICS",
"38B12D": "Sonotronic Nagel GmbH",
"38B5BD": "E.G.O. Elektro-Ger",
"38B74D": "Fijowave Limited",
"38BB23": "OzVision America LLC",
"38BB3C": "Avaya, Inc",
"38BC1A": "Meizu technology co.,ltd",
"38BF2F": "Espec Corp.",
"38BF33": "NEC CASIO Mobile Communications",
"38C096": "ALPS ELECTRIC CO.,LTD.",
"38C7BA": "CS Services Co.,Ltd.",
"38C85C": "Cisco SPVTG",
"38C9A9": "SMART High Reliability Solutions, Inc.",
"38CA97": "Contour Design LLC",
"38D135": "EasyIO Corporation Sdn. Bhd.",
"38DBBB": "Sunbow Telecom Co., Ltd.",
"38DE60": "Mohlenhoff GmbH",
"38E08E": "Mitsubishi Electric Corporation",
"38E595": "Shenzhen Gongjin Electronics Co.,Ltd",
"38E7D8": "HTC Corporation",
"38E8DF": "b gmbh medien + datenbanken",
"38E98C": "Reco S.p.A.",
"38EAA7": "Hewlett Packard",
"38EC11": "Novatek Microelectronics Corp.",
"38ECE4": "Samsung Electronics",
"38EE9D": "Anedo Ltd.",
"38F098": "Vapor Stone Rail Systems",
"38F597": "home2net GmbH",
"38F708": "National Resource Management, Inc.",
"38F8B7": "V2COM PARTICIPACOES S.A.",
"38FEC5": "Ellips B.V.",
"3C02B1": "Creation Technologies LP",
"3C04BF": "PRAVIS SYSTEMS Co.Ltd.,",
"3C05AB": "Product Creation Studio",
"3C0754": "Apple",
"3C0771": "Sony Corporation",
"3C081E": "Beijing Yupont Electric Power Technology Co.,Ltd",
"3C08F6": "Cisco",
"3C096D": "Powerhouse Dynamics",
"3C0C48": "Servergy, Inc.",
"3C0E23": "Cisco",
"3C0FC1": "KBC Networks",
"3C1040": "daesung network",
"3C106F": "ALBAHITH TECHNOLOGIES",
"3C15C2": "Apple",
"3C15EA": "TESCOM CO., LTD.",
"3C18A0": "Luxshare Precision Industry Co.,Ltd.",
"3C1915": "GFI Chrono Time",
"3C197D": "Ericsson AB",
"3C1A57": "Cardiopulmonary Corp",
"3C1A79": "Huayuan Technology CO.,LTD",
"3C1CBE": "JADAK LLC",
"3C25D7": "Nokia Corporation",
"3C26D5": "Sotera Wireless",
"3C2763": "SLE quality engineering GmbH & Co. KG",
"3C2DB7": "Texas Instruments",
"3C2F3A": "SFORZATO Corp.",
"3C300C": "Dewar Electronics Pty Ltd",
"3C363D": "Nokia Corporation",
"3C36E4": "Arris Group, Inc.",
"3C3888": "ConnectQuest, llc",
"3C39C3": "JW Electronics Co., Ltd.",
"3C3A73": "Avaya, Inc",
"3C404F": "Guangdong Pisen Electronics Co. Ltd.",
"3C438E": "ARRIS Group, Inc.",
"3C4A92": "Hewlett-Packard Company",
"3C4C69": "Infinity System S.L.",
"3C4E47": "Etronic A/S",
"3C57BD": "Kessler Crane Inc.",
"3C57D5": "FiveCo",
"3C5A37": "Samsung Electronics",
"3C5F01": "Synerchip Co., Ltd.",
"3C6104": "Juniper Networks",
"3C6200": "Samsung electronics CO., LTD",
"3C6278": "SHENZHEN JETNET TECHNOLOGY CO.,LTD.",
"3C672C": "Sciovid Inc.",
"3C6A7D": "Niigata Power Systems Co., Ltd.",
"3C6E63": "Mitron OY",
"3C6F45": "Fiberpro Inc.",
"3C6FF7": "EnTek Systems, Inc.",
"3C7059": "MakerBot Industries",
"3C7437": "RIM",
"3C754A": "ARRIS Group, Inc.",
"3C77E6": "Hon Hai Precision Ind. Co.,Ltd.",
"3C7DB1": "Texas Instruments",
"3C81D8": "SAGEMCOM SAS",
"3C83B5": "Advance Vision Electronics Co. Ltd.",
"3C86A8": "Sangshin elecom .co,, LTD",
"3C89A6": "KAPELSE",
"3C8AB0": "Juniper Networks",
"3C8AE5": "Tensun Information Technology(Hangzhou) Co.,LTD",
"3C8BFE": "Samsung Electronics",
"3C9157": "Hangzhou Yulong Conmunication Co.,Ltd",
"3C9174": "ALONG COMMUNICATION TECHNOLOGY",
"3C94D5": "Juniper Networks",
"3C970E": "Wistron InfoComm(Kunshan)Co.,Ltd.",
"3C977E": "IPS Technology Limited",
"3C98BF": "Quest Controls, Inc.",
"3C99F7": "Lansentechnology AB",
"3C9F81": "Shenzhen CATIC Bit Communications Technology Co.,Ltd",
"3CA10D": "Samsung Electronics Co.,Ltd",
"3CA315": "Bless Information & Communications Co., Ltd",
"3CA72B": "MRV Communications (Networks) LTD",
"3CA9F4": "Intel Corporate",
"3CAB8E": "Apple",
"3CB15B": "Avaya, Inc",
"3CB17F": "Wattwatchers Pty Ld",
"3CB87A": "PRIVATE",
"3CB9A6": "Belden Deutschland GmbH",
"3CBDD8": "LG ELECTRONICS INC",
"3CC0C6": "d&b audiotechnik GmbH",
"3CC12C": "AES Corporation",
"3CC1F6": "Melange Systems Pvt. Ltd.",
"3CC243": "Nokia Corporation",
"3CC99E": "Huiyang Technology Co., Ltd",
"3CCA87": "Iders Incorporated",
"3CCD5A": "Technische Alternative GmbH",
"3CCD93": "LG ELECTRONICS INC",
"3CCE73": "CISCO SYSTEMS, INC.",
"3CD0F8": "Apple",
"3CD16E": "Telepower Communication Co., Ltd",
"3CD4D6": "WirelessWERX, Inc",
"3CD7DA": "SK Mtek microelectronics(shenzhen)limited",
"3CD92B": "Hewlett-Packard Company",
"3CDF1E": "CISCO SYSTEMS, INC.",
"3CDFBD": "Huawei Technologies Co., Ltd",
"3CE072": "Apple",
"3CE5A6": "Hangzhou H3C Technologies Co., Ltd.",
"3CE5B4": "KIDASEN INDUSTRIA E COMERCIO DE ANTENAS LTDA",
"3CE624": "LG Display",
"3CEA4F": "2Wire",
"3CEAFB": "NSE AG",
"3CF392": "Virtualtek. Co. Ltd",
"3CF52C": "DSPECIALISTS GmbH",
"3CF72A": "Nokia Corporation",
"3CF748": "Shenzhen Linsn Technology Development Co.,Ltd",
"3CF808": "HUAWEI TECHNOLOGIES CO.,LTD",
"3CFB96": "Emcraft Systems LLC",
"400107": "Arista Corp",
"4001C6": "3COM EUROPE LTD",
"40040C": "A&T",
"4007C0": "Railtec Systems GmbH",
"400E67": "Tremol Ltd.",
"400E85": "Samsung Electro Mechanics co.,LTD.",
"4012E4": "Compass-EOS",
"4013D9": "Global ES",
"401597": "Protect America, Inc.",
"40167E": "ASUSTek COMPUTER INC.",
"40169F": "TP-LINK TECHNOLOGIES CO., LTD.",
"4016FA": "EKM Metering",
"4018B1": "Aerohive Networks Inc.",
"4018D7": "Wyle Telemetry and Data Systems",
"401D59": "Biometric Associates, LP",
"4022ED": "Digital Projection Ltd",
"4025C2": "Intel Corporate",
"40270B": "Mobileeco Co., Ltd",
"402BA1": "Sony Ericsson Mobile Communications AB",
"402CF4": "Universal Global Scientific Industrial Co., Ltd.",
"403004": "Apple",
"403067": "Conlog (Pty) Ltd",
"40336C": "Godrej & Boyce Mfg. co. ltd",
"4037AD": "Macro Image Technology, Inc.",
"403CFC": "Apple",
"404022": "ZIV",
"40406B": "Icomera",
"4045DA": "Spreadtrum Communications (Shanghai) Co., Ltd.",
"404A03": "ZyXEL Communications Corporation",
"404A18": "Addrek Smart Solutions",
"404D8E": "Shenzhen Huawei Communication Technologies Co., Ltd",
"4050E0": "Milton Security Group LLC",
"40516C": "Grandex International Corporation",
"40520D": "Pico Technology",
"405539": "CISCO SYSTEMS, INC.",
"40560C": "In Home Displays Ltd",
"405A9B": "ANOVO",
"405FBE": "RIM",
"405FC2": "Texas Instruments",
"40605A": "Hawkeye Tech Co. Ltd",
"406186": "MICRO-STAR INT'L CO.,LTD",
"40618E": "Stella-Green Co",
"40667A": "mediola - connected living AG",
"406826": "Thales UK Limited",
"406AAB": "RIM",
"406C8F": "Apple",
"406F2A": "Research In Motion",
"407009": "ARRIS Group, Inc.",
"40704A": "Power Idea Technology Limited",
"407074": "Life Technology (China) Co., Ltd",
"407496": "aFUN TECHNOLOGY INC.",
"407875": "IMBEL - Industria de Material Belico do Brasil",
"407A80": "Nokia Corporation",
"407B1B": "Mettle Networks Inc.",
"4083DE": "Motorola",
"408493": "Clavister AB",
"4088E0": "Beijing Ereneben Information Technology Limited Shenzhen Branch",
"408A9A": "TITENG CO., Ltd.",
"408B07": "Actiontec Electronics, Inc",
"408BF6": "Shenzhen TCL New Technology Co; Ltd.",
"409558": "Aisino Corporation",
"4097D1": "BK Electronics cc",
"40984C": "Casacom Solutions AG",
"40984E": "Texas Instruments",
"40987B": "Aisino Corporation",
"409FC7": "BAEKCHUN I&C Co., Ltd.",
"40A6A4": "PassivSystems Ltd",
"40A6D9": "Apple",
"40A8F0": "Hewlett Packard",
"40AC8D": "Data Management, Inc.",
"40B0FA": "LG Electronics",
"40B2C8": "Nortel Networks",
"40B395": "Apple",
"40B3CD": "Chiyoda Electronics Co.,Ltd.",
"40B3FC": "Logital Co. Limited",
"40B4F0": "Juniper Networks",
"40B6B1": "SUNGSAM CO,.Ltd",
"40B7F3": "ARRIS Group, Inc.",
"40BA61": "Arima Communications Corp.",
"40BC73": "Cronoplast S.L.",
"40BC8B": "itelio GmbH",
"40BD9E": "Physio-Control, Inc",
"40BF17": "Digistar Telecom. SA",
"40C245": "Shenzhen Hexicom Technology Co., Ltd.",
"40C4D6": "ChongQing Camyu Technology Development Co.,Ltd.",
"40C7C9": "Naviit Inc.",
"40CBA8": "Huawei Technologies Co., Ltd",
"40CD3A": "Z3 Technology",
"40D32D": "Apple",
"40D40E": "Biodata Ltd",
"40D559": "MICRO S.E.R.I.",
"40D855": "IEEE REGISTRATION AUTHORITY",
"40E730": "DEY Storage Systems, Inc.",
"40E793": "Shenzhen Siviton Technology Co.,Ltd",
"40ECF8": "Siemens AG",
"40EF4C": "Fihonest communication co.,Ltd",
"40F02F": "Liteon Technology Corporation",
"40F14C": "ISE Europe SPRL",
"40F201": "SAGEMCOM",
"40F2E9": "IBM",
"40F308": "Murata Manufactuaring Co.,Ltd.",
"40F407": "Nintendo Co., Ltd.",
"40F4EC": "CISCO SYSTEMS, INC.",
"40F52E": "Leica Microsystems (Schweiz) AG",
"40FC89": "ARRIS Group, Inc.",
"4403A7": "Cisco",
"440CFD": "NetMan Co., Ltd.",
"4411C2": "Telegartner Karl Gartner GmbH",
"441319": "WKK TECHNOLOGY LTD.",
"44184F": "Fitview",
"4419B6": "Hangzhou Hikvision Digital Technology Co.,Ltd.",
"441E91": "ARVIDA Intelligent Electronics Technology Co.,Ltd.",
"441EA1": "Hewlett-Packard Company",
"4423AA": "Farmage Co., Ltd.",
"4425BB": "Bamboo Entertainment Corporation",
"442938": "NietZsche enterprise Co.Ltd.",
"442A60": "Apple",
"442AFF": "E3 Technology, Inc.",
"442B03": "CISCO SYSTEMS, INC.",
"443192": "Hewlett Packard",
"44322A": "Avaya, Inc",
"4432C8": "Technicolor USA Inc.",
"44334C": "Shenzhen Bilian electronic CO.,LTD",
"44348F": "MXT INDUSTRIAL LTDA",
"443719": "2 Save Energy Ltd",
"44376F": "Young Electric Sign Co",
"4437E6": "Hon Hai Precision Ind.Co.Ltd",
"443839": "Cumulus Networks, inc",
"4439C4": "Universal Global Scientific Industrial Co.,Ltd",
"443C9C": "Pintsch Tiefenbach GmbH",
"443D21": "Nuvolt",
"443EB2": "DEOTRON Co., LTD.",
"444891": "HDMI Licensing, LLC",
"444A65": "Silverflare Ltd.",
"444C0C": "Apple",
"444E1A": "Samsung Electronics Co.,Ltd",
"444F5E": "Pan Studios Co.,Ltd.",
"4451DB": "Raytheon BBN Technologies",
"4454C0": "Thompson Aerospace",
"44568D": "PNC Technologies Co., Ltd.",
"4456B7": "Spawn Labs, Inc",
"445829": "Cisco SPVTG",
"44599F": "Criticare Systems, Inc",
"445EF3": "Tonalite Holding B.V.",
"445F7A": "Shihlin Electric & Engineering Corp.",
"446132": "ecobee inc",
"44619C": "FONsystem co. ltd.",
"446755": "Orbit Irrigation",
"4468AB": "JUIN COMPANY, LIMITED",
"446C24": "Reallin Electronic Co.,Ltd",
"446D57": "Liteon Technology Corporation",
"44700B": "IFFU",
"447098": "MING HONG TECHNOLOGY (SHEN ZHEN) LIMITED",
"447BC4": "DualShine Technology(SZ)Co.,Ltd",
"447C7F": "Innolight Technology Corporation",
"447DA5": "VTION INFORMATION TECHNOLOGY (FUJIAN) CO.,LTD",
"447E76": "Trek Technology (S) Pte Ltd",
"447E95": "Alpha and Omega, Inc",
"448312": "Star-Net",
"448500": "Intel Corporate",
"4486C1": "Siemens Low Voltage & Products",
"4487FC": "ELITEGROUP COMPUTER SYSTEM CO., LTD.",
"448A5B": "Micro-Star INT'L CO., LTD.",
"448C52": "KTIS CO., Ltd",
"448E12": "DT Research, Inc.",
"448E81": "VIG",
"4491DB": "Shanghai Huaqin Telecom Technology Co.,Ltd",
"4494FC": "NETGEAR INC.,",
"4495FA": "Qingdao Santong Digital Technology Co.Ltd",
"449B78": "The Now Factory",
"449CB5": "Alcomp, Inc",
"44A42D": "TCT Mobile Limited",
"44A689": "PROMAX ELECTRONICA SA",
"44A7CF": "Murata Manufacturing Co., Ltd.",
"44A8C2": "SEWOO TECH CO., LTD",
"44AA27": "udworks Co., Ltd.",
"44AAE8": "Nanotec Electronic GmbH & Co. KG",
"44ADD9": "Cisco",
"44B382": "Kuang-chi Institute of Advanced Technology",
"44C15C": "Texas Instruments",
"44C233": "Guangzhou Comet Technology Development Co.Ltd",
"44C306": "SIFROM Inc.",
"44C39B": "OOO RUBEZH NPO",
"44C4A9": "Opticom Communication, LLC",
"44C56F": "NGN Easy Satfinder (Tianjin) Electronic Co., Ltd",
"44C9A2": "Greenwald Industries",
"44D15E": "Shanghai Kingto Information Technology Ltd",
"44D2CA": "Anvia TV Oy",
"44D3CA": "CISCO SYSTEMS, INC.",
"44D4E0": "Sony Mobile Communications AB",
"44D63D": "Talari Networks",
"44D832": "Azurewave Technologies, Inc.",
"44D884": "Apple",
"44DC91": "PLANEX COMMUNICATIONS INC.",
"44DCCB": "SEMINDIA SYSTEMS PVT LTD",
"44E08E": "Cisco SPVTG",
"44E49A": "OMNITRONICS PTY LTD",
"44E4D9": "CISCO SYSTEMS, INC.",
"44E8A5": "Myreka Technologies Sdn. Bhd.",
"44ED57": "Longicorn, inc.",
"44EE30": "Budelmann Elektronik GmbH",
"44F459": "Samsung Electronics",
"44F849": "Union Pacific Railroad",
"44FB42": "Apple",
"48022A": "B-Link Electronic Limited",
"480362": "DESAY ELECTRONICS(HUIZHOU)CO.,LTD",
"481249": "Luxcom Technologies Inc.",
"4813F3": "BBK Electronics Corp., Ltd.",
"48174C": "MicroPower technologies",
"481842": "Shanghai Winaas Co. Equipment Co. Ltd.",
"481A84": "Pointer Telocation Ltd",
"481BD2": "Intron Scientific co., ltd.",
"4826E8": "Tek-Air Systems, Inc.",
"48282F": "ZTE Corporation",
"482CEA": "Motorola Inc Business Light Radios",
"4833DD": "ZENNIO AVANCE Y TECNOLOGIA, S.L.",
"48343D": "IEP GmbH",
"483D32": "Syscor Controls & Automation",
"484487": "Cisco SPVTG",
"4844F7": "Samsung Electronics Co., LTD",
"4846F1": "Uros Oy",
"4846FB": "HUAWEI TECHNOLOGIES CO.,LTD",
"4851B7": "Intel Corporate",
"485261": "SOREEL",
"485929": "LG Electronics",
"485A3F": "WISOL",
"485AB6": "Hon Hai Precision Ind. Co.,Ltd.",
"485B39": "ASUSTek COMPUTER INC.",
"485D60": "Azurewave Technologies, Inc.",
"4860BC": "Apple",
"4861A3": "Concern \"Axion\" JSC",
"486276": "HUAWEI TECHNOLOGIES CO.,LTD",
"486B91": "Fleetwood Group Inc.",
"486E73": "Pica8, Inc.",
"486FD2": "StorSimple Inc",
"487119": "SGB GROUP LTD.",
"48746E": "Apple",
"487604": "PRIVATE",
"488244": "Life Fitness / Div. of Brunswick",
"488E42": "DIGALOG GmbH",
"489153": "Weinmann Ger\u00e4te f\u00fcr Medizin GmbH + Co. KG",
"4891F6": "Shenzhen Reach software technology CO.,LTD",
"489BE2": "SCI Innovations Ltd",
"489D24": "Research In Motion",
"48A22D": "Shenzhen Huaxuchang Telecom Technology Co.,Ltd",
"48A2B7": "Kodofon JSC",
"48A6D2": "GJsun Optical Science and Tech Co.,Ltd.",
"48AA5D": "Store Electronic Systems",
"48B253": "Marketaxess Corporation",
"48B5A7": "Glory Horse Industries Ltd.",
"48B8DE": "HOMEWINS TECHNOLOGY CO.,LTD.",
"48B977": "PulseOn Oy",
"48B9C2": "Teletics Inc.",
"48BE2D": "Symanitron",
"48C1AC": "PLANTRONICS, INC.",
"48C862": "Simo Wireless,Inc.",
"48C8B6": "SysTec GmbH",
"48CB6E": "Cello Electronics (UK) Ltd",
"48D0CF": "Universal Electronics, Inc.",
"48D18E": "Metis Communication Co.,Ltd",
"48D224": "Liteon Technology Corporation",
"48D54C": "Jeda Networks",
"48D705": "Apple",
"48D7FF": "BLANKOM Antennentechnik GmbH",
"48D855": "IEEE REGISTRATION AUTHORITY",
"48D8FE": "ClarIDy Solutions, Inc.",
"48DCFB": "Nokia Corporation",
"48DF1C": "Wuhan NEC Fibre Optic Communications industry Co. Ltd",
"48E1AF": "Vity",
"48EA63": "Zhejiang Uniview Technologies Co., Ltd.",
"48EB30": "ETERNA TECHNOLOGY, INC.",
"48ED80": "daesung eltec",
"48EE07": "Silver Palm Technologies LLC",
"48EE86": "UTStarcom (China) Co.,Ltd",
"48F230": "Ubizcore Co.,LTD",
"48F317": "PRIVATE",
"48F47D": "TechVision Holding Internation Limited",
"48F7F1": "Alcatel-Lucent",
"48F8B3": "Cisco-Linksys, LLC",
"48F8E1": "Alcatel Lucent WT",
"48F925": "Maestronic",
"48FCB8": "Woodstream Corporation",
"48FEEA": "HOMA B.V.",
"4C0082": "Cisco",
"4C022E": "CMR KOREA CO., LTD",
"4C0289": "LEX COMPUTECH CO., LTD",
"4C068A": "Basler Electric Company",
"4C07C9": "COMPUTER OFFICE Co.,Ltd.",
"4C09B4": "zte corporation",
"4C0B3A": "TCT Mobile Limited",
"4C0DEE": "JABIL CIRCUIT (SHANGHAI) LTD.",
"4C0F6E": "Hon Hai Precision Ind. Co.,Ltd.",
"4C0FC7": "Earda Electronics Co.,Ltd",
"4C11BF": "ZHEJIANG DAHUA TECHNOLOGY CO.,LTD.",
"4C1480": "NOREGON SYSTEMS, INC",
"4C14A3": "TCL Technoly Electronics (Huizhou) Co., Ltd.",
"4C17EB": "SAGEMCOM",
"4C1A3A": "PRIMA Research And Production Enterprise Ltd.",
"4C1A95": "Novakon Co., Ltd.",
"4C1FCC": "HUAWEI TECHNOLOGIES CO.,LTD",
"4C21D0": "Sony Mobile Communications AB",
"4C2258": "cozybit, Inc.",
"4C2578": "Nokia Corporation",
"4C2C80": "Beijing Skyway Technologies Co.,Ltd",
"4C2F9D": "ICM Controls",
"4C3089": "Thales Transportation Systems GmbH",
"4C322D": "TELEDATA NETWORKS",
"4C32D9": "M Rutty Holdings Pty. Ltd.",
"4C3909": "HPL Electric & Power Private Limited",
"4C3910": "Newtek Electronics co., Ltd.",
"4C3B74": "VOGTEC(H.K.) Co., Ltd",
"4C3C16": "Samsung Electronics Co.,Ltd",
"4C4B68": "Mobile Device, Inc.",
"4C4E35": "Cisco",
"4C5427": "Linepro Sp. z o.o.",
"4C5499": "Shenzhen Huawei Communication Technologies Co., Ltd",
"4C5585": "Hamilton Systems",
"4C55B8": "Turkcell Teknoloji",
"4C55CC": "ACKme Networks Pty Ltd",
"4C5DCD": "Oy Finnish Electric Vehicle Technologies Ltd",
"4C5E0C": "Routerboard.com",
"4C5FD2": "Alcatel-Lucent",
"4C60D5": "airPointe of New Hampshire",
"4C60DE": "NETGEAR",
"4C6255": "SANMINA-SCI SYSTEM DE MEXICO S.A. DE C.V.",
"4C63EB": "Application Solutions (Electronics and Vision) Ltd",
"4C64D9": "Guangdong Leawin Group Co., Ltd",
"4C72B9": "Pegatron Corporation",
"4C7367": "Genius Bytes Software Solutions GmbH",
"4C73A5": "KOVE",
"4C774F": "Embedded Wireless Labs",
"4C7897": "Arrowhead Alarm Products Ltd",
"4C79BA": "Intel Corporate",
"4C7F62": "Nokia Corporation",
"4C804F": "Armstrong Monitoring Corp",
"4C8093": "Intel Corporate",
"4C82CF": "Echostar Technologies",
"4C8B30": "Actiontec Electronics, Inc",
"4C8B55": "Grupo Digicon",
"4C8BEF": "Huawei Technologies Co., Ltd",
"4C8D79": "Apple",
"4C8FA5": "Jastec",
"4C9614": "Juniper Networks",
"4C98EF": "Zeo",
"4C9E80": "KYOKKO ELECTRIC Co., Ltd.",
"4C9EE4": "Hanyang Navicom Co.,Ltd.",
"4CA56D": "Samsung Electronics Co.,Ltd",
"4CA74B": "Alcatel Lucent",
"4CAA16": "AzureWave Technologies (Shanghai) Inc.",
"4CAB33": "KST technology",
"4CAC0A": "ZTE Corporation",
"4CB16C": "HUAWEI TECHNOLOGIES CO.,LTD",
"4CB199": "Apple",
"4CB4EA": "HRD (S) PTE., LTD.",
"4CB81C": "SAM Electronics GmbH",
"4CB9C8": "CONET CO., LTD.",
"4CBAA3": "Bison Electronics Inc.",
"4CBCA5": "Samsung Electronics Co.,Ltd",
"4CC452": "Shang Hai Tyd. Electon Technology Ltd.",
"4CC602": "Radios, Inc.",
"4CC94F": "Alcatel-Lucent",
"4CCA53": "Skyera, Inc.",
"4CCBF5": "zte corporation",
"4CCC34": "Motorola Solutions Inc.",
"4CD637": "Qsono Electronics Co., Ltd",
"4CD7B6": "Helmer Scientific",
"4CD9C4": "Magneti Marelli Automotive Electronics (Guangzhou) Co. Ltd",
"4CDF3D": "TEAM ENGINEERS ADVANCE TECHNOLOGIES INDIA PVT LTD",
"4CE1BB": "Zhuhai HiFocus Technology Co., Ltd.",
"4CE676": "Buffalo Inc.",
"4CEB42": "Intel Corporate",
"4CEDDE": "Askey Computer Corp",
"4CF02E": "Vifa Denmark A/S",
"4CF45B": "Blue Clover Devices",
"4CF737": "SamJi Electronics Co., Ltd",
"50008C": "Hong Kong Telecommunications (HKT) Limited",
"5001BB": "Samsung Electronics",
"50053D": "CyWee Group Ltd",
"500604": "Cisco",
"500B32": "Foxda Technology Industrial(ShenZhen)Co.,LTD",
"500E6D": "TrafficCast International",
"5011EB": "SilverNet Ltd",
"5017FF": "Cisco",
"501AC5": "Microsoft",
"501CBF": "Cisco",
"50206B": "Emerson Climate Technologies Transportation Solutions",
"502267": "PixeLINK",
"50252B": "Nethra Imaging Incorporated",
"502690": "Fujitsu Limited",
"5027C7": "TECHNART Co.,Ltd",
"502A7E": "Smart electronic GmbH",
"502A8B": "Telekom Research and Development Sdn Bhd",
"502D1D": "Nokia Corporation",
"502DA2": "Intel Corporate",
"502DF4": "Phytec Messtechnik GmbH",
"502E5C": "HTC Corporation",
"502ECE": "Asahi Electronics Co.,Ltd",
"503275": "Samsung Electronics Co.,Ltd",
"503955": "Cisco SPVTG",
"503CC4": "Lenovo Mobile Communication Technology Ltd.",
"503DE5": "CISCO SYSTEMS, INC.",
"503F56": "Syncmold Enterprise Corp",
"50465D": "ASUSTek COMPUTER INC.",
"5048EB": "BEIJING HAIHEJINSHENG NETWORK TECHNOLOGY CO. LTD.",
"504A5E": "Masimo Corporation",
"504A6E": "NETGEAR INC.,",
"504F94": "Loxone Electronics GmbH",
"505663": "Texas Instruments",
"5056A8": "Jolla Ltd",
"5056BF": "Samsung Electronics Co.,LTD",
"5057A8": "CISCO SYSTEMS, INC.",
"505800": "WyTec International, Inc.",
"505AC6": "GUANGDONG SUPER TELECOM CO.,LTD.",
"506028": "Xirrus Inc.",
"506184": "Avaya, Inc",
"5061D6": "Indu-Sol GmbH",
"506313": "Hon Hai Precision Ind. Co.,Ltd.",
"506441": "Greenlee",
"5067F0": "ZyXEL Communications Corporation",
"506F9A": "Wi-Fi Alliance",
"5070E5": "He Shan World Fair Electronics Technology Limited",
"50724D": "BEG Brueck Electronic GmbH",
"507691": "Tekpea, Inc.",
"5076A6": "Ecil Informatica Ind. Com. Ltda",
"50795B": "Interexport Telecomunicaciones S.A.",
"507D02": "BIODIT",
"507E5D": "Arcadyan Technology Corporation",
"508569": "Samsung Electronics Co.,LTD",
"5087B8": "Nuvyyo Inc",
"508A42": "Uptmate Technology Co., LTD",
"508ACB": "SHENZHEN MAXMADE TECHNOLOGY CO., LTD.",
"508C77": "DIRMEIER Schanktechnik GmbH &Co KG",
"508D6F": "CHAHOO Limited",
"50934F": "Gradual Tecnologia Ltda.",
"509772": "Westinghouse Digital",
"509871": "Inventum Technologies Private Limited",
"509F27": "Huawei Technologies Co., Ltd",
"50A054": "Actineon",
"50A0BF": "Alba Fiber Systems Inc.",
"50A4C8": "Samsung Electronics Co.,Ltd",
"50A6E3": "David Clark Company",
"50A715": "Aboundi, Inc.",
"50A733": "Ruckus Wireless",
"50ABBF": "Hoseo Telecom",
"50AF73": "Shenzhen Bitland Information Technology Co., Ltd.",
"50B695": "Micropoint Biotechnologies,Inc.",
"50B7C3": "Samsung Electronics CO., LTD",
"50B888": "wi2be Tecnologia S/A",
"50B8A2": "ImTech Technologies LLC,",
"50C006": "Carmanah Signs",
"50C271": "SECURETECH INC",
"50C58D": "Juniper Networks",
"50C7BF": "TP-LINK TECHNOLOGIES CO.,LTD.",
"50C971": "GN Netcom A/S",
"50C9A0": "SKIPPER Electronics AS",
"50CCF8": "Samsung Electro Mechanics",
"50CD32": "NanJing Chaoran Science & Technology Co.,Ltd.",
"50CE75": "Measy Electronics Ltd",
"50D274": "Steffes Corporation",
"50D6D7": "Takahata Precision",
"50E0C7": "TurControlSystme AG",
"50E14A": "PRIVATE",
"50E549": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"50EAD6": "Apple",
"50EB1A": "Brocade Communications Systems, Inc.",
"50ED78": "Changzhou Yongse Infotech Co.,Ltd",
"50ED94": "Egatel SL",
"50F003": "Open Stack, Inc.",
"50F520": "Samsung Electronics Co.,Ltd",
"50F61A": "Kunshan JADE Technologies co., Ltd.",
"50FAAB": "L-tek d.o.o.",
"50FC30": "Treehouse Labs",
"50FC9F": "Samsung Electronics Co.,Ltd",
"5403F5": "EBN Technology Corp.",
"540496": "Gigawave LTD",
"5404A6": "ASUSTek COMPUTER INC.",
"540536": "Vivago Oy",
"54055F": "Alcatel Lucent",
"54112F": "Sulzer Pump Solutions Finland Oy",
"54115F": "Atamo Pty Ltd",
"541B5D": "Techno-Innov",
"541DFB": "Freestyle Energy Ltd",
"541FD5": "Advantage Electronics",
"542018": "Tely Labs",
"542160": "Resolution Products",
"5422F8": "zte corporation",
"542696": "Apple",
"54271E": "AzureWave Technonloies, Inc.",
"542A9C": "LSY Defense, LLC.",
"542AA2": "Alpha Networks Inc.",
"542CEA": "PROTECTRON",
"542F89": "Euclid Laboratories, Inc.",
"543131": "Raster Vision Ltd",
"543530": "Hon Hai Precision Ind. Co.,Ltd.",
"5435DF": "Symeo GmbH",
"543968": "Edgewater Networks Inc",
"5439DF": "HUAWEI TECHNOLOGIES CO.,LTD",
"543D37": "Ruckus Wireless",
"544249": "Sony Corporation",
"544408": "Nokia Corporation",
"54466B": "Shenzhen CZTIC Electronic Technology Co., Ltd",
"544A00": "Cisco",
"544A05": "wenglor sensoric gmbh",
"544A16": "Texas Instruments",
"5453ED": "Sony Corporation",
"545414": "Digital RF Corea, Inc",
"545EBD": "NL Technologies",
"545FA9": "Teracom Limited",
"5461EA": "Zaplox AB",
"54724F": "Apple",
"547398": "Toyo Electronics Corporation",
"5474E6": "Webtech Wireless",
"5475D0": "CISCO SYSTEMS, INC.",
"54781A": "Cisco",
"547975": "Nokia Corporation",
"547F54": "INGENICO",
"547FA8": "TELCO systems, s.r.o.",
"547FEE": "CISCO SYSTEMS, INC.",
"5481AD": "Eagle Research Corporation",
"54847B": "Digital Devices GmbH",
"54880E": "Samsung Electro Mechanics co., LTD.",
"548922": "Zelfy Inc",
"548998": "HUAWEI TECHNOLOGIES CO.,LTD",
"5492BE": "Samsung Electronics Co.,Ltd",
"549359": "SHENZHEN TWOWING TECHNOLOGIES CO.,LTD.",
"549478": "Silvershore Technology Partners",
"549A16": "Uzushio Electric Co.,Ltd.",
"549B12": "Samsung Electronics",
"549D85": "EnerAccess inc",
"54A04F": "t-mac Technologies Ltd",
"54A31B": "Shenzhen Linkworld Technology Co,.LTD",
"54A51B": "Shenzhen Huawei Communication Technologies Co., Ltd",
"54A54B": "NSC Communications Siberia Ltd",
"54A619": "Alcatel-Lucent Shanghai Bell Co., Ltd",
"54A9D4": "Minibar Systems",
"54AE27": "Apple",
"54B620": "SUHDOL E&C Co.Ltd.",
"54B753": "Hunan Fenghui Yinjia Science And Technology Co.,Ltd",
"54BEF7": "PEGATRON CORPORATION",
"54C80F": "TP-LINK TECHNOLOGIES CO.,LTD.",
"54CDA7": "Fujian Shenzhou Electronic Co.,Ltd",
"54CDEE": "ShenZhen Apexis Electronic Co.,Ltd",
"54D0ED": "AXIM Communications",
"54D163": "MAX-TECH,INC",
"54D1B0": "Universal Laser Systems, Inc",
"54D46F": "Cisco SPVTG",
"54DF63": "Intrakey technologies GmbH",
"54E032": "Juniper Networks",
"54E2E0": "Pace plc",
"54E3B0": "JVL Industri Elektronik",
"54E43A": "Apple, Inc.",
"54E63F": "ShenZhen LingKeWeiEr Technology Co., Ltd.",
"54E6FC": "TP-LINK TECHNOLOGIES CO., LTD.",
"54EAA8": "Apple, Inc.",
"54EE75": "Wistron InfoComm(Kunshan)Co.,Ltd.",
"54EF92": "Shenzhen Elink Technology Co., LTD",
"54F5B6": "ORIENTAL PACIFIC INTERNATIONAL LIMITED",
"54F666": "Berthold Technologies GmbH and Co.KG",
"54FB58": "WISEWARE, Lda",
"54FDBF": "Scheidt & Bachmann GmbH",
"580528": "LABRIS NETWORKS",
"580556": "Elettronica GF S.r.L.",
"5808FA": "Fiber Optic & telecommunication INC.",
"580943": "PRIVATE",
"5809E5": "Kivic Inc.",
"580A20": "Cisco",
"581243": "AcSiP Technology Corp.",
"581626": "Avaya, Inc",
"58170C": "Sony Ericsson Mobile Communications AB",
"581CBD": "Affinegy",
"581D91": "Advanced Mobile Telecom co.,ltd.",
"581F67": "Open-m technology limited",
"581FAA": "Apple",
"581FEF": "Tuttnaer LTD",
"58238C": "Technicolor CH USA",
"582EFE": "Lighting Science Group",
"582F42": "Universal Electric Corporation",
"58343B": "Glovast Technology Ltd.",
"5835D9": "CISCO SYSTEMS, INC.",
"583CC6": "Omneality Ltd.",
"5842E4": "Sigma International General Medical Apparatus, LLC.",
"58468F": "Koncar Electronics and Informatics",
"5846E1": "Baxter Healthcare",
"5848C0": "COFLEC",
"58493B": "Palo Alto Networks",
"5849BA": "Chitai Electronic Corp.",
"584C19": "Chongqing Guohong Technology Development Company Limited",
"584CEE": "Digital One Technologies, Limited",
"585076": "Linear Equipamentos Eletronicos SA",
"5850AB": "TLS Corporation",
"5850E6": "Best Buy Corporation",
"5855CA": "Apple",
"5856E8": "ARRIS Group, Inc.",
"58570D": "Danfoss Solar Inverters",
"58639A": "TPL SYSTEMES",
"5865E6": "INFOMARK CO., LTD.",
"5866BA": "Hangzhou H3C Technologies Co., Limited",
"58671A": "BARNES&NOBLE.COM",
"58677F": "Clare Controls Inc.",
"58696C": "Fujian Ruijie Networks co, ltd",
"5869F9": "Fusion Transactive Ltd.",
"586D8F": "Cisco-Linksys, LLC",
"586ED6": "PRIVATE",
"587521": "CJSC RTSoft",
"587675": "Beijing ECHO Technologies Co.,Ltd",
"587A4D": "Stonesoft Corporation",
"587E61": "Hisense Electric Co., Ltd",
"587FC8": "S2M",
"5884E4": "IP500 Alliance e.V.",
"58874C": "LITE-ON CLEAN ENERGY TECHNOLOGY CORP.",
"5887E2": "Shenzhen Coship Electronics Co., Ltd.",
"588D09": "CISCO SYSTEMS, INC.",
"5891CF": "Intel Corporate",
"58920D": "Kinetic Avionics Limited",
"589396": "Ruckus Wireless",
"58946B": "Intel Corporate",
"5894CF": "Vertex Standard LMR, Inc.",
"58971E": "Cisco",
"589835": "Technicolor",
"58986F": "Revolution Display",
"589CFC": "FreeBSD Foundation",
"58A2B5": "LG Electronics",
"58A76F": "iD corporation",
"58B035": "Apple",
"58B0D4": "ZuniData Systems Inc.",
"58B961": "SOLEM Electronique",
"58B9E1": "Crystalfontz America, Inc.",
"58BC27": "CISCO SYSTEMS, INC.",
"58BDA3": "Nintendo Co., Ltd.",
"58BDF9": "Sigrand",
"58BFEA": "CISCO SYSTEMS, INC.",
"58C232": "NEC Corporation",
"58C38B": "Samsung Electronics",
"58CF4B": "Lufkin Industries",
"58D071": "BW Broadcast",
"58D08F": "IEEE 1904.1 Working Group",
"58D6D3": "Dairy Cheq Inc",
"58DB8D": "Fast Co., Ltd.",
"58E02C": "Micro Technic A/S",
"58E326": "Compass Technologies Inc.",
"58E476": "CENTRON COMMUNICATIONS TECHNOLOGIES FUJIAN CO.,LTD",
"58E636": "EVRsafe Technologies",
"58E747": "Deltanet AG",
"58E808": "AUTONICS CORPORATION",
"58EB14": "Proteus Digital Health",
"58ECE1": "Newport Corporation",
"58EECE": "Icon Time Systems",
"58F387": "HCCP",
"58F67B": "Xia Men UnionCore Technology LTD.",
"58F6BF": "Kyoto University",
"58F98E": "SECUDOS GmbH",
"58FD20": "Bravida Sakerhet AB",
"5C026A": "Applied Vision Corporation",
"5C076F": "Thought Creator",
"5C0A5B": "SAMSUNG ELECTRO-MECHANICS CO., LTD.",
"5C0CBB": "CELIZION Inc.",
"5C0E8B": "Motorola",
"5C1193": "Seal One AG",
"5C1437": "Thyssenkrupp Aufzugswerke GmbH",
"5C15E1": "AIDC TECHNOLOGY (S) PTE LTD",
"5C16C7": "Big Switch Networks",
"5C1737": "I-View Now, LLC.",
"5C17D3": "LGE",
"5C18B5": "Talon Communications",
"5C20D0": "Asoni Communication Co., Ltd.",
"5C22C4": "DAE EUN ELETRONICS CO., LTD",
"5C2479": "Baltech AG",
"5C254C": "Avire Global Pte Ltd",
"5C260A": "Dell Inc.",
"5C2AEF": "Open Access Pty Ltd",
"5C2E59": "Samsung Electronics Co.,Ltd",
"5C313E": "Texas Instruments",
"5C3327": "Spazio Italia srl",
"5C335C": "Swissphone Telecom AG",
"5C338E": "Alpha Networkc Inc.",
"5C353B": "Compal Broadband Networks Inc.",
"5C35DA": "There Corporation Oy",
"5C36B8": "TCL King Electrical Appliances (Huizhou) Ltd.",
"5C38E0": "Shanghai Super Electronics Technology Co.,LTD",
"5C3C27": "Samsung Electronics Co.,Ltd",
"5C4058": "Jefferson Audio Video Systems, Inc.",
"5C43D2": "HAZEMEYER",
"5C4A26": "Enguity Technology Corp",
"5C4CA9": "Shenzhen Huawei Communication Technologies Co., Ltd",
"5C5015": "CISCO SYSTEMS, INC.",
"5C514F": "Intel Corporate",
"5C56ED": "3pleplay Electronics Private Limited",
"5C571A": "ARRIS Group, Inc.",
"5C57C8": "Nokia Corporation",
"5C5948": "Apple",
"5C5BC2": "YIK Corporation",
"5C5EAB": "Juniper Networks",
"5C63BF": "TP-LINK TECHNOLOGIES CO., LTD.",
"5C6984": "NUVICO",
"5C6A7D": "KENTKART EGE ELEKTRONIK SAN. VE TIC. LTD. STI.",
"5C6B32": "Texas Instruments",
"5C6D20": "Hon Hai Precision Ind. Co.,Ltd.",
"5C6F4F": "S.A. SISTEL",
"5C7757": "Haivision Network Video",
"5C7D5E": "Huawei Technologies Co., Ltd",
"5C8486": "Brightsource Industries Israel LTD",
"5C864A": "Secret Labs LLC",
"5C8778": "Cybertelbridge co.,ltd",
"5C89D4": "Beijing Banner Electric Co.,Ltd",
"5C8D4E": "Apple",
"5C95AE": "Apple",
"5C969D": "Apple",
"5C9AD8": "Fujitsu Limited",
"5CA39D": "SAMSUNG ELECTRO-MECHANICS CO., LTD.",
"5CA3EB": "Lokel s.r.o.",
"5CA48A": "Cisco",
"5CAC4C": "Hon Hai Precision Ind. Co.,Ltd.",
"5CB524": "Sony Ericsson Mobile Communications AB",
"5CBD9E": "HONGKONG MIRACLE EAGLE TECHNOLOGY(GROUP) LIMITED",
"5CC213": "Fr. Sauter AG",
"5CC5D4": "Intel Corporate",
"5CC6D0": "Skyworth Digital technology(shenzhen)co.ltd.",
"5CC9D3": "PALLADIUM ENERGY ELETRONICA DA AMAZONIA LTDA",
"5CCA32": "Theben AG",
"5CCEAD": "CDYNE Corporation",
"5CD135": "Xtreme Power Systems",
"5CD2E4": "Intel Corporate",
"5CD41B": "UCZOON Technology Co., LTD",
"5CD4AB": "Zektor",
"5CD61F": "Qardio, Inc",
"5CD998": "D-Link Corporation",
"5CDAD4": "Murata Manufacturing Co., Ltd.",
"5CDD70": "Hangzhou H3C Technologies Co., Limited",
"5CE0CA": "FeiTian United (Beijing) System Technology Co., Ltd.",
"5CE0F6": "NIC.br- Nucleo de Informacao e Coordenacao do Ponto BR",
"5CE223": "Delphin Technology AG",
"5CE286": "Nortel Networks",
"5CE2F4": "AcSiP Technology Corp.",
"5CE7BF": "New Singularity International Technical Development Co.,Ltd",
"5CE8EB": "Samsung Electronics",
"5CEB4E": "R. STAHL HMI Systems GmbH",
"5CEE79": "Global Digitech Co LTD",
"5CF207": "Speco Technologies",
"5CF370": "CC&C Technologies, Inc",
"5CF3FC": "IBM Corp",
"5CF4AB": "ZyXEL Communications Corp",
"5CF50D": "Institute of microelectronic applications",
"5CF6DC": "Samsung Electronics Co.,LTD",
"5CF8A1": "Murata Manufactuaring Co.,Ltd.",
"5CF938": "Apple, Inc",
"5CF9DD": "Dell Inc",
"5CFF35": "Wistron Corporation",
"5CFFFF": "Shenzhen Kezhonglong Optoelectronic Technology Co., Ltd",
"6002B4": "Wistron NeWeb Corp.",
"600308": "Apple",
"600347": "Billion Electric Co. Ltd.",
"600F77": "SilverPlus, Inc",
"601199": "Siama Systems Inc",
"601283": "Soluciones Tecnologicas para la Salud y el Bienestar SA",
"6015C7": "IdaTech",
"60190C": "RRAMAC",
"601929": "VOLTRONIC POWER TECHNOLOGY(SHENZHEN) CORP.",
"601D0F": "Midnite Solar",
"601E02": "EltexAlatau",
"602103": "STCUBE.INC",
"6021C0": "Murata Manufactuaring Co.,Ltd.",
"6024C1": "Jiangsu Zhongxun Electronic Technology Co., Ltd",
"602A54": "CardioTek B.V.",
"602AD0": "Cisco SPVTG",
"6032F0": "Mplus technology",
"60334B": "Apple",
"603553": "Buwon Technology",
"6036DD": "Intel Corporate",
"60380E": "Alps Electric Co.,",
"60391F": "ABB Ltd",
"603FC5": "COX CO., LTD",
"6044F5": "Easy Digital Ltd.",
"60455E": "Liptel s.r.o.",
"6045BD": "Microsoft",
"604616": "XIAMEN VANN INTELLIGENT CO., LTD",
"6047D4": "FORICS Electronic Technology Co., Ltd.",
"604A1C": "SUYIN Corporation",
"6052D0": "FACTS Engineering",
"605464": "Eyedro Green Solutions Inc.",
"605718": "Intel Corporate",
"60601F": "SZ DJI TECHNOLOGY CO.,LTD",
"6063FD": "Transcend Communication Beijing Co.,Ltd.",
"6064A1": "RADiflow Ltd.",
"606720": "Intel Corporate",
"606944": "Apple, Inc",
"60699B": "isepos GmbH",
"606BBD": "Samsung Electronics Co., LTD",
"606C66": "Intel Corporate",
"60735C": "Cisco",
"60748D": "Atmaca Elektronik",
"607688": "Velodyne",
"60812B": "Custom Control Concepts",
"6083B2": "GkWare e.K.",
"60843B": "Soladigm, Inc.",
"608645": "Avery Weigh-Tronix, LLC",
"60893C": "Thermo Fisher Scientific P.O.A.",
"6089B1": "Key Digital Systems",
"6089B7": "KAEL M\u00dcHEND\u0130SL\u0130K ELEKTRON\u0130K T\u0130CARET SANAY\u0130 L\u0130M\u0130TED \u015e\u0130RKET\u0130",
"608C2B": "Hanson Technology",
"608D17": "Sentrus Government Systems Division, Inc",
"608F5C": "Samsung Electronics Co.,Ltd",
"609084": "DSSD Inc",
"609217": "Apple",
"609620": "PRIVATE",
"609AA4": "GVI SECURITY INC.",
"609E64": "Vivonic GmbH",
"609F9D": "CloudSwitch",
"60A10A": "Samsung Electronics Co.,Ltd",
"60A44C": "ASUSTek COMPUTER INC.",
"60A8FE": "Nokia Solutions and Networks",
"60A9B0": "Merchandising Technologies, Inc",
"60B185": "ATH system",
"60B3C4": "Elber Srl",
"60B606": "Phorus",
"60B617": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"60B933": "Deutron Electronics Corp.",
"60B982": "RO.VE.R. Laboratories S.p.A.",
"60BB0C": "Beijing HuaqinWorld Technology Co,Ltd",
"60BC4C": "EWM Hightec Welding GmbH",
"60BD91": "Move Innovation",
"60BEB5": "Motorola Mobility LLC",
"60C1CB": "Fujian Great Power PLC Equipment Co.,Ltd",
"60C397": "2Wire Inc",
"60C547": "Apple",
"60C5A8": "Beijing LT Honway Technology Co.,Ltd",
"60C980": "Trymus",
"60CBFB": "AirScape Inc.",
"60CDC5": "Taiwan Carol Electronics., Ltd",
"60D0A9": "Samsung Electronics Co.,Ltd",
"60D1AA": "Vishal Telecommunications Pvt Ltd",
"60D2B9": "REALAND BIO CO., LTD.",
"60D30A": "Quatius Limited",
"60D819": "Hon Hai Precision Ind. Co.,Ltd.",
"60D9C7": "Apple",
"60DA23": "Estech Co.,Ltd",
"60DB2A": "HNS",
"60DE44": "HUAWEI TECHNOLOGIES CO.,LTD",
"60E00E": "SHINSEI ELECTRONICS CO LTD",
"60E327": "TP-LINK TECHNOLOGIES CO.,LTD.",
"60E956": "Ayla Networks, Inc",
"60EB69": "Quanta computer Inc.",
"60F13D": "JABLOCOM s.r.o.",
"60F281": "TRANWO TECHNOLOGY CO., LTD.",
"60F2EF": "VisionVera International Co., Ltd.",
"60F3DA": "Logic Way GmbH",
"60F494": "Hon Hai Precision Ind. Co.,Ltd.",
"60F59C": "CRU-Dataport",
"60F673": "TERUMO CORPORATION",
"60FACD": "Apple",
"60FB42": "Apple",
"60FE1E": "China Palms Telecom.Ltd",
"60FE20": "2 Wire",
"60FEC5": "Apple",
"60FEF9": "Thomas & Betts",
"60FFDD": "C.E. ELECTRONICS, INC",
"6400F1": "CISCO SYSTEMS, INC.",
"6405BE": "NEW LIGHT LED",
"64094C": "Beijing Superbee Wireless Technology Co.,Ltd",
"640B4A": "Digital Telecom Technology Limited",
"640E36": "TAZTAG",
"640E94": "Pluribus Networks, Inc.",
"640F28": "2wire",
"641084": "HEXIUM Technical Development Co., Ltd.",
"641225": "Cisco",
"64168D": "CISCO SYSTEMS, INC.",
"6416F0": "Shehzhen Huawei Communication Technologies Co., Ltd.",
"641A22": "Heliospectra/Woodhill Investments",
"641C67": "DIGIBRAS INDUSTRIA DO BRASILS/A",
"641E81": "Dowslake Microsystems",
"64200C": "Apple",
"642184": "Nippon Denki Kagaku Co.,LTD",
"642216": "Shandong Taixin Electronic co.,Ltd",
"642400": "Xorcom Ltd.",
"642737": "Hon Hai Precision Ind. Co.,Ltd.",
"642DB7": "SEUNGIL ELECTRONICS",
"643150": "Hewlett-Packard Company",
"64317E": "Dexin Corporation",
"643409": "BITwave Pte Ltd",
"643F5F": "Exablaze",
"644214": "Swisscom Energy Solutions AG",
"644346": "GuangDong Quick Network Computer CO.,LTD",
"644BC3": "Shanghai WOASiS Telecommunications Ltd., Co.",
"644BF0": "CalDigit, Inc",
"644D70": "dSPACE GmbH",
"644F74": "LENUS Co., Ltd.",
"644FB0": "Hyunjin.com",
"64517E": "LONG BEN (DONGGUAN) ELECTRONIC TECHNOLOGY CO.,LTD.",
"645299": "The Chamberlain Group, Inc",
"64535D": "Frauscher Sensortechnik",
"645422": "Equinox Payments",
"645563": "Intelight Inc.",
"64557F": "NSFOCUS Information Technology Co., Ltd.",
"6455B1": "ARRIS Group, Inc.",
"645601": "TP-LINK TECHNOLOGIES CO.,LTD",
"645A04": "Chicony Electronics Co., Ltd.",
"645DD7": "Shenzhen Lifesense Medical Electronics Co., Ltd.",
"645EBE": "Yahoo! JAPAN",
"645FFF": "Nicolet Neuro",
"646223": "Cellient Co., Ltd.",
"6465C0": "Nuvon, Inc",
"6466B3": "TP-LINK TECHNOLOGIES CO., LTD.",
"646707": "Beijing Omnific Technology, Ltd.",
"64680C": "COMTREND",
"6469BC": "Hytera Communications Co .,ltd",
"646CB2": "Samsung Electronics Co.,Ltd",
"646E6C": "Radio Datacom LLC",
"646EEA": "Iskratel d.o.o.",
"647002": "TP-LINK TECHNOLOGIES CO., LTD.",
"6472D8": "GooWi Technology Co.,Limited",
"6473E2": "Arbiter Systems, Inc.",
"647657": "Innovative Security Designs",
"6476BA": "Apple",
"647791": "Samsung Electronics Co.,Ltd",
"647BD4": "Texas Instruments",
"647C34": "Ubee Interactive Corp.",
"647D81": "YOKOTA INDUSTRIAL CO,.LTD",
"647FDA": "TEKTELIC Communications Inc.",
"64808B": "VG Controls, Inc.",
"648099": "Intel Corporate",
"648125": "Alphatron Marine BV",
"648788": "Juniper Networks",
"6487D7": "Pirelli Tyre S.p.A.",
"6488FF": "Sichuan Changhong Electric Ltd.",
"648D9E": "IVT Electronic Co.,Ltd",
"64995D": "LGE",
"649968": "Elentec",
"6499A0": "AG Elektronik AB",
"649B24": "V Technology Co., Ltd.",
"649C81": "Qualcomm iSkoot, Inc.",
"649C8E": "Texas Instruments",
"649EF3": "CISCO SYSTEMS, INC.",
"649FF7": "Kone OYj",
"64A0E7": "CISCO SYSTEMS, INC.",
"64A232": "OOO Samlight",
"64A341": "Wonderlan (Beijing) Technology Co., Ltd.",
"64A3CB": "Apple",
"64A769": "HTC Corporation",
"64A7DD": "Avaya, Inc",
"64A837": "Juni Korea Co., Ltd",
"64AE0C": "CISCO SYSTEMS, INC.",
"64AE88": "Polytec GmbH",
"64B310": "Samsung Electronics Co.,Ltd",
"64B370": "PowerComm Solutons LLC",
"64B64A": "ViVOtech, Inc.",
"64B9E8": "Apple",
"64BABD": "SDJ Technologies, Inc.",
"64BC11": "CombiQ AB",
"64C5AA": "South African Broadcasting Corporation",
"64C667": "Barnes&Noble",
"64C6AF": "AXERRA Networks Ltd",
"64C944": "LARK Technologies, Inc",
"64D02D": "Next Generation Integration (NGI)",
"64D1A3": "Sitecom Europe BV",
"64D241": "Keith & Koep GmbH",
"64D4BD": "ALPS ELECTRIC CO.,LTD.",
"64D4DA": "Intel Corporate",
"64D814": "CISCO SYSTEMS, INC.",
"64D912": "Solidica, Inc.",
"64D989": "CISCO SYSTEMS, INC.",
"64DB18": "OpenPattern",
"64DC01": "Static Systems Group PLC",
"64DE1C": "Kingnetic Pte Ltd",
"64E161": "DEP Corp.",
"64E599": "EFM Networks",
"64E625": "Woxu Wireless Co., Ltd",
"64E682": "Apple",
"64E84F": "Serialway Communication Technology Co. Ltd",
"64E892": "Morio Denki Co., Ltd.",
"64E8E6": "global moisture management system",
"64E950": "Cisco",
"64EAC5": "SiboTech Automation Co., Ltd.",
"64EB8C": "Seiko Epson Corporation",
"64ED57": "ARRIS Group, Inc.",
"64ED62": "WOORI SYSTEMS Co., Ltd",
"64F242": "Gerdes Aktiengesellschaft",
"64F50E": "Kinion Technology Company Limited",
"64F970": "Kenade Electronics Technology Co.,LTD.",
"64F987": "Avvasi Inc.",
"64FC8C": "Zonar Systems",
"680571": "Samsung Electronics Co.,Ltd",
"6805CA": "Intel Corporate",
"680927": "Apple",
"680AD7": "Yancheng Kecheng Optoelectronic Technology Co., Ltd",
"68122D": "Special Instrument Development Co., Ltd.",
"681590": "SAGEMCOM SAS",
"6815D3": "Zaklady Elektroniki i Mechaniki Precyzyjnej R&G S.A.",
"681605": "Systems And Electronic Development FZCO",
"681729": "Intel Corporate",
"68193F": "Digital Airways",
"681AB2": "zte corporation",
"681CA2": "Rosewill Inc.",
"681D64": "Sunwave Communications Co., Ltd",
"681E8B": "InfoSight Corporation",
"681FD8": "Advanced Telemetry",
"68234B": "Nihon Dengyo Kousaku",
"6828BA": "Dejai",
"682DDC": "Wuhan Changjiang Electro-Communication Equipment CO.,LTD",
"683B1E": "Countwise LTD",
"683EEC": "ERECA",
"684352": "Bhuu Limited",
"684898": "Samsung Electronics Co.,Ltd",
"684B88": "Galtronics Telemetry Inc.",
"684CA8": "Shenzhen Herotel Tech. Co., Ltd.",
"6851B7": "PowerCloud Systems, Inc.",
"6854ED": "Alcatel-Lucent - Nuage",
"6854F5": "enLighted Inc",
"68597F": "Alcatel Lucent",
"685B35": "Apple",
"685B36": "POWERTECH INDUSTRIAL CO., LTD.",
"685D43": "Intel Corporate",
"685E6B": "PowerRay Co., Ltd.",
"686359": "Advanced Digital Broadcast SA",
"68692E": "Zycoo Co.,Ltd",
"6869F2": "ComAp s.r.o.",
"686E23": "Wi3 Inc.",
"686E48": "Prophet Electronic Technology Corp.,Ltd",
"687251": "Ubiquiti Networks",
"68764F": "Sony Mobile Communications AB",
"687848": "Westunitis Co., Ltd.",
"68784C": "Nortel Networks",
"687924": "ELS-GmbH & Co. KG",
"6879ED": "SHARP Corporation",
"687CC8": "Measurement Systems S. de R.L.",
"687CD5": "Y Soft Corporation, a.s.",
"687F74": "Cisco-Linksys, LLC",
"68831A": "Pandora Mobility Corporation",
"688470": "eSSys Co.,Ltd",
"688540": "IGI Mobile, Inc.",
"68856A": "OuterLink Corporation",
"6886A7": "Cisco",
"6886E7": "Orbotix, Inc.",
"68876B": "INQ Mobile Limited",
"688AB5": "EDP Servicos",
"689234": "Ruckus Wireless",
"689423": "Hon Hai Precision Ind. Co.,Ltd.",
"68967B": "Apple",
"68974B": "Shenzhen Costar Electronics Co. Ltd.",
"6897E8": "Society of Motion Picture & Television Engineers",
"689C5E": "AcSiP Technology Corp.",
"689C70": "Apple",
"68A1B7": "Honghao Mingchuan Technology (Beijing) CO.,Ltd.",
"68A3C4": "Liteon Technology Corporation",
"68A40E": "BSH Bosch and Siemens Home Appliances GmbH",
"68A86D": "Apple",
"68AAD2": "DATECS LTD.,",
"68AB8A": "RF IDeas",
"68AF13": "Futura Mobility",
"68B094": "INESA ELECTRON CO.,LTD",
"68B43A": "WaterFurnace International, Inc.",
"68B599": "Hewlett-Packard Company",
"68B6FC": "Hitron Technologies. Inc",
"68B8D9": "Act KDE, Inc.",
"68BC0C": "CISCO SYSTEMS, INC.",
"68BDAB": "CISCO SYSTEMS, INC.",
"68CA00": "Octopus Systems Limited",
"68CC9C": "Mine Site Technologies",
"68CD0F": "U Tek Company Limited",
"68CE4E": "L-3 Communications Infrared Products",
"68D1FD": "Shenzhen Trimax Technology Co.,Ltd",
"68D247": "Portalis LC",
"68D925": "ProSys Development Services",
"68DB67": "Nantong Coship Electronics Co., Ltd",
"68DB96": "OPWILL Technologies CO .,LTD",
"68DCE8": "PacketStorm Communications",
"68DFDD": "Xiaomi inc.",
"68E166": "PRIVATE",
"68E41F": "Unglaube Identech GmbH",
"68EBAE": "Samsung Electronics Co.,Ltd",
"68EBC5": "Angstrem Telecom",
"68EC62": "YODO Technology Corp. Ltd.",
"68ED43": "Research In Motion",
"68EE96": "Cisco SPVTG",
"68EFBD": "CISCO SYSTEMS, INC.",
"68F125": "Data Controls Inc.",
"68F895": "Redflow Limited",
"68FB95": "Generalplus Technology Inc.",
"68FCB3": "Next Level Security Systems, Inc.",
"6C0460": "RBH Access Technologies Inc.",
"6C09D6": "Digiquest Electronics LTD",
"6C0E0D": "Sony Ericsson Mobile Communications AB",
"6C0F6A": "JDC Tech Co., Ltd.",
"6C14F7": "Erhardt+Leimer GmbH",
"6C15F9": "Nautronix Limited",
"6C1811": "Decatur Electronics",
"6C198F": "D-Link International",
"6C2056": "Cisco",
"6C22AB": "Ainsworth Game Technology",
"6C23B9": "Sony Ericsson Mobile Communications AB",
"6C2995": "Intel Corporate",
"6C2C06": "OOO NPP Systemotechnika-NN",
"6C2E33": "Accelink Technologies Co.,Ltd.",
"6C2E85": "SAGEMCOM",
"6C2F2C": "Samsung Electronics Co.,Ltd",
"6C32DE": "Indieon Technologies Pvt. Ltd.",
"6C33A9": "Magicjack LP",
"6C391D": "Beijing ZhongHuaHun Network Information center",
"6C3A84": "Shenzhen Aero-Startech. Co.Ltd",
"6C3BE5": "Hewlett Packard",
"6C3C53": "SoundHawk Corp",
"6C3E6D": "Apple",
"6C3E9C": "KE Knestel Elektronik GmbH",
"6C40C6": "Nimbus Data Systems, Inc.",
"6C416A": "Cisco",
"6C4B7F": "Vossloh-Schwabe Deutschland GmbH",
"6C504D": "CISCO SYSTEMS, INC.",
"6C5779": "Aclima, Inc.",
"6C5A34": "Shenzhen Haitianxiong Electronic Co., Ltd.",
"6C5AB5": "TCL Technoly Electronics (Huizhou) Co., Ltd.",
"6C5CDE": "SunReports, Inc.",
"6C5D63": "ShenZhen Rapoo Technology Co., Ltd.",
"6C5E7A": "Ubiquitous Internet Telecom Co., Ltd",
"6C5F1C": "Lenovo Mobile Communication Technology Ltd.",
"6C6126": "Rinicom Holdings",
"6C626D": "Micro-Star INT'L CO., LTD",
"6C641A": "Penguin Computing",
"6C6F18": "Stereotaxis, Inc.",
"6C7039": "Novar GmbH",
"6C709F": "Apple",
"6C71D9": "AzureWave Technologies, Inc",
"6C81FE": "Mitsuba Corporation",
"6C8336": "Samsung Electronics Co.,Ltd",
"6C8366": "Nanjing SAC Power Grid Automation Co., Ltd.",
"6C8686": "Technonia",
"6C8814": "Intel Corporate",
"6C8B2F": "zte corporation",
"6C8CDB": "Otus Technologies Ltd",
"6C8D65": "Wireless Glue Networks, Inc.",
"6C90B1": "SanLogic Inc",
"6C92BF": "Inspur Electronic Information Industry Co.,Ltd.",
"6C98EB": "Xyne GmbH",
"6C9989": "Cisco",
"6C9AC9": "Valentine Research, Inc.",
"6C9B02": "Nokia Corporation",
"6C9CE9": "Nimble Storage",
"6C9CED": "CISCO SYSTEMS, INC.",
"6CA682": "EDAM information & communications",
"6CA780": "Nokia Corporation",
"6CA906": "Telefield Ltd",
"6CA96F": "TransPacket AS",
"6CAAB3": "Ruckus Wireless",
"6CAB4D": "Digital Payment Technologies",
"6CAC60": "Venetex Corp",
"6CAD3F": "Hubbell Building Automation, Inc.",
"6CADEF": "KZ Broadband Technologies, Ltd.",
"6CADF8": "Azurewave Technologies, Inc.",
"6CAE8B": "IBM Corporation",
"6CB0CE": "NETGEAR",
"6CB311": "Shenzhen Lianrui Electronics Co.,Ltd",
"6CB350": "Anhui comhigher tech co.,ltd",
"6CB7F4": "Samsung Electronics Co.,Ltd",
"6CBEE9": "Alcatel-Lucent-IPD",
"6CC1D2": "ARRIS Group, Inc.",
"6CC26B": "Apple",
"6CD032": "LG Electronics",
"6CD146": "Smartek d.o.o.",
"6CD1B0": "WING SING ELECTRONICS HONG KONG LIMITED",
"6CD68A": "LG Electronics Inc",
"6CDC6A": "Promethean Limited",
"6CE0B0": "SOUND4",
"6CE4CE": "Villiger Security Solutions AG",
"6CE873": "TP-LINK TECHNOLOGIES CO., LTD.",
"6CE907": "Nokia Corporation",
"6CE983": "Gastron Co., LTD.",
"6CECA1": "SHENZHEN CLOU ELECTRONICS CO. LTD.",
"6CECEB": "Texas Instruments",
"6CF049": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"6CF373": "Samsung Electronics Co.,Ltd",
"6CF37F": "Aruba Networks",
"6CF97C": "Nanoptix Inc.",
"6CFA58": "Avaya, Inc",
"6CFAA7": "AMPAK Technology Inc.",
"6CFDB9": "Proware Technologies Co Ltd.",
"6CFFBE": "MPB Communications Inc.",
"700258": "01DB-METRAVIB",
"700514": "LG Electronics",
"700BC0": "Dewav Technology Company",
"700FEC": "Poindus Systems Corp.",
"70105C": "Cisco",
"701124": "Apple",
"701404": "Limited Liability Company",
"70188B": "Hon Hai Precision Ind. Co.,Ltd.",
"701A04": "Liteon Tech Corp.",
"701AED": "ADVAS CO., LTD.",
"701D7F": "Comtech Technology Co., Ltd.",
"702393": "fos4X GmbH",
"702526": "Alcatel-Lucent",
"702559": "CyberTAN Technology, Inc.",
"702B1D": "E-Domus International Limited",
"702C1F": "Wisol",
"702F4B": "PolyVision Inc.",
"702F97": "Aava Mobile Oy",
"703018": "Avaya, Inc",
"70305D": "Ubiquoss Inc",
"70305E": "Nanjing Zhongke Menglian Information Technology Co.,LTD",
"703187": "ACX GmbH",
"7032D5": "Athena Wireless Communications Inc",
"703811": "Invensys Rail",
"7038B4": "Low Tech Solutions",
"7038EE": "Avaya, Inc",
"703AD8": "Shenzhen Afoundry Electronic Co., Ltd",
"703C39": "SEAWING Kft",
"7041B7": "Edwards Lifesciences LLC",
"704642": "CHYNG HONG ELECTRONIC CO., LTD.",
"704AAE": "Xstream Flow (Pty) Ltd",
"704AE4": "Rinstrum Pty Ltd",
"704CED": "TMRG, Inc.",
"704E01": "KWANGWON TECH CO., LTD.",
"7052C5": "Avaya, Inc.",
"70533F": "Alfa Instrumentos Eletronicos Ltda.",
"7054D2": "PEGATRON CORPORATION",
"7054F5": "HUAWEI TECHNOLOGIES CO.,LTD",
"705681": "Apple",
"705812": "Panasonic AVC Networks Company",
"705957": "Medallion Instrumentation Systems",
"705986": "OOO TTV",
"705AB6": "COMPAL INFORMATION (KUNSHAN) CO., LTD.",
"705CAD": "Konami Gaming Inc",
"705EAA": "Action Target, Inc.",
"7060DE": "LaVision GmbH",
"706173": "Calantec GmbH",
"7062B8": "D-Link International",
"706417": "ORBIS TECNOLOGIA ELECTRICA S.A.",
"706582": "Suzhou Hanming Technologies Co., Ltd.",
"706F81": "PRIVATE",
"70704C": "Purple Communications, Inc",
"7071B3": "Brain Corporation",
"7071BC": "PEGATRON CORPORATION",
"70720D": "Lenovo Mobile Communication Technology Ltd.",
"70723C": "Huawei Technologies Co., Ltd",
"7072CF": "EdgeCore Networks",
"7073CB": "Apple",
"707630": "Pace plc.",
"7076DD": "Oxyguard International A/S",
"7076F0": "LevelOne Communications (India) Private Limited",
"707BE8": "HUAWEI TECHNOLOGIES CO.,LTD",
"707C18": "ADATA Technology Co., Ltd",
"707E43": "ARRIS Group, Inc.",
"707EDE": "NASTEC LTD.",
"708105": "CISCO SYSTEMS, INC.",
"70820E": "as electronics GmbH",
"70828E": "OleumTech Corporation",
"7085C6": "Pace plc.",
"708B78": "citygrow technology co., ltd",
"708D09": "Nokia Corporation",
"709383": "Intelligent Optical Network High Tech CO.,LTD.",
"7093F8": "Space Monkey, Inc.",
"709756": "Happyelectronics Co.,Ltd",
"709A0B": "Italian Institute of Technology",
"709BA5": "Shenzhen Y&D Electronics Co.,LTD.",
"709BFC": "Bryton Inc.",
"709E29": "Sony Computer Entertainment Inc.",
"709E86": "X6D Limited",
"70A191": "Trendsetter Medical, LLC",
"70A41C": "Advanced Wireless Dynamics S.L.",
"70A66A": "Prox Dynamics AS",
"70A8E3": "HUAWEI TECHNOLOGIES CO.,LTD",
"70AAB2": "Research In Motion",
"70B035": "Shenzhen Zowee Technology Co., Ltd",
"70B08C": "Shenou Communication Equipment Co.,Ltd",
"70B14E": "Pace plc",
"70B265": "Hiltron s.r.l.",
"70B3D5": "IEEE REGISTRATION AUTHORITY - Please see OUI36 public listing for more information.",
"70B599": "Embedded Technologies s.r.o.",
"70B921": "FiberHome Telecommunication Technologies CO.,LTD",
"70C6AC": "Bosch Automotive Aftermarket",
"70CA9B": "CISCO SYSTEMS, INC.",
"70CD60": "Apple",
"70D4F2": "RIM",
"70D57E": "Scalar Corporation",
"70D5E7": "Wellcore Corporation",
"70D6B6": "Metrum Technologies",
"70D880": "Upos System sp. z o.o.",
"70DDA1": "Tellabs",
"70DEE2": "Apple",
"70E027": "HONGYU COMMUNICATION TECHNOLOGY LIMITED",
"70E139": "3view Ltd",
"70E24C": "SAE IT-systems GmbH & Co. KG",
"70E284": "Wistron InfoComm(Zhongshan) Corporation",
"70E843": "Beijing C&W Optical Communication Technology Co.,Ltd.",
"70EE50": "Netatmo",
"70F176": "Data Modul AG",
"70F1A1": "Liteon Technology Corporation",
"70F1E5": "Xetawave LLC",
"70F395": "Universal Global Scientific Industrial Co., Ltd.",
"70F927": "Samsung Electronics",
"70F96D": "Hangzhou H3C Technologies Co., Limited",
"70FF76": "Texas Instruments",
"740ABC": "JSJS Designs (Europe) Limited",
"740EDB": "Optowiz Co., Ltd",
"741489": "SRT Wireless",
"7415E2": "Tri-Sen Systems Corporation",
"7419F8": "IEEE REGISTRATION AUTHORITY - Please see MAM public listing for more information.",
"741E93": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"74258A": "Hangzhou H3C Technologies Co., Limited",
"7426AC": "Cisco",
"74273C": "ChangYang Technology (Nanjing) Co., LTD",
"7427EA": "Elitegroup Computer Systems Co., Ltd.",
"742B0F": "Infinidat Ltd.",
"742B62": "Fujitsu Limited",
"742D0A": "Norfolk Elektronik AG",
"742F68": "Azurewave Technologies, Inc.",
"743170": "Arcadyan Technology Corporation",
"743256": "NT-ware Systemprg GmbH",
"74372F": "Tongfang Shenzhen Cloudcomputing Technology Co.,Ltd",
"743889": "ANNAX Anzeigesysteme GmbH",
"743ECB": "Gentrice tech",
"744401": "NETGEAR",
"74458A": "Samsung Electronics Co.,Ltd",
"7446A0": "Hewlett Packard",
"744BE9": "EXPLORER HYPERTECH CO.,LTD",
"744D79": "Arrive Systems Inc.",
"745327": "COMMSEN CO., LIMITED",
"745612": "ARRIS Group, Inc.",
"745798": "TRUMPF Laser GmbH + Co. KG",
"745E1C": "PIONEER CORPORATION",
"745F00": "Samsung Semiconductor Inc.",
"745FAE": "TSL PPL",
"7463DF": "VTS GmbH",
"7465D1": "Atlinks",
"746630": "T:mi Ytti",
"746A89": "Rezolt Corporation",
"746A8F": "VS Vision Systems GmbH",
"746B82": "MOVEK",
"746F3D": "Contec GmbH",
"7472F2": "Chipsip Technology Co., Ltd.",
"747818": "ServiceAssure",
"747B7A": "ETH Inc.",
"747DB6": "Aliwei Communications, Inc",
"747E1A": "Red Embedded Design Limited",
"747E2D": "Beijing Thomson CITIC Digital Technology Co. LTD.",
"74867A": "Dell Inc",
"74882A": "HUAWEI TECHNOLOGIES CO.,LTD",
"74888B": "ADB Broadband Italia",
"748E08": "Bestek Corp.",
"748EF8": "Brocade Communications Systems, Inc.",
"748F1B": "MasterImage 3D",
"749050": "Renesas Electronics Corporation",
"74911A": "Ruckus Wireless",
"7493A4": "Zebra Technologies Corp.",
"74943D": "AgJunction",
"749975": "IBM Corporation",
"749C52": "Huizhou Desay SV Automotive Co., Ltd.",
"749DDC": "2Wire",
"74A4A7": "QRS Music Technologies, Inc.",
"74A4B5": "Powerleader Science and Technology Co. Ltd.",
"74A722": "LG Electronics",
"74ADB7": "China Mobile Group Device Co.,Ltd.",
"74AE76": "iNovo Broadband, Inc.",
"74B00C": "Network Video Technologies, Inc",
"74B9EB": "Fujian JinQianMao Electronic Technology Co.,Ltd",
"74BE08": "ATEK Products, LLC",
"74BFA1": "HYUNTECK",
"74C621": "Zhejiang Hite Renewable Energy Co.,LTD",
"74C99A": "Ericsson AB",
"74CA25": "Calxeda, Inc.",
"74CD0C": "Smith Myers Communications Ltd.",
"74CE56": "Packet Force Technology Limited Company",
"74D02B": "ASUSTek COMPUTER INC.",
"74D0DC": "ERICSSON AB",
"74D435": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"74D675": "WYMA Tecnologia",
"74D850": "Evrisko Systems",
"74DA38": "Edimax Technology Co. Ltd.",
"74DE2B": "Liteon Technology Corporation",
"74E06E": "Ergophone GmbH",
"74E1B6": "Apple",
"74E2F5": "Apple",
"74E424": "APISTE CORPORATION",
"74E50B": "Intel Corporate",
"74E537": "RADSPIN",
"74E543": "Liteon Technology Corporation",
"74E7C6": "ARRIS Group, Inc.",
"74EA3A": "TP-LINK Technologies Co.,Ltd.",
"74ECF1": "Acumen",
"74F06D": "AzureWave Technologies, Inc.",
"74F07D": "BnCOM Co.,Ltd",
"74F102": "Beijing HCHCOM Technology Co., Ltd",
"74F612": "ARRIS Group, Inc.",
"74F726": "Neuron Robotics",
"74F85D": "Berkeley Nucleonics Corp",
"74FDA0": "Compupal (Group) Corporation",
"74FE48": "ADVANTECH CO., LTD.",
"74FF7D": "Wren Sound Systems, LLC",
"78028F": "Adaptive Spectrum and Signal Alignment (ASSIA), Inc.",
"780738": "Z.U.K. Elzab S.A.",
"781185": "NBS Payment Solutions Inc.",
"7812B8": "ORANTEK LIMITED",
"781881": "AzureWave Technologies, Inc.",
"78192E": "NASCENT Technology",
"7819F7": "Juniper Networks",
"781C5A": "SHARP Corporation",
"781DBA": "HUAWEI TECHNOLOGIES CO.,LTD",
"781DFD": "Jabil Inc",
"781FDB": "Samsung Electronics Co.,Ltd",
"78223D": "Affirmed Networks",
"782544": "Omnima Limited",
"7825AD": "SAMSUNG ELECTRONICS CO., LTD.",
"782BCB": "Dell Inc",
"782EEF": "Nokia Corporation",
"78303B": "Stephen Technologies Co.,Limited",
"7830E1": "UltraClenz, LLC",
"7831C1": "Apple",
"78324F": "Millennium Group, Inc.",
"783A84": "Apple",
"783CE3": "Kai-EE",
"783D5B": "TELNET Redes Inteligentes S.A.",
"783E53": "BSkyB Ltd",
"783F15": "EasySYNC Ltd.",
"784405": "FUJITU(HONG KONG) ELECTRONIC Co.,LTD.",
"784476": "Zioncom technology co.,ltd",
"7845C4": "Dell Inc",
"7846C4": "DAEHAP HYPER-TECH",
"78471D": "Samsung Electronics Co.,Ltd",
"784859": "Hewlett Packard",
"78491D": "The Will-Burt Company",
"784B08": "f.robotics acquisitions ltd",
"784B87": "Murata Manufacturing Co.,Ltd.",
"78510C": "LiveU Ltd.",
"78521A": "Samsung Electronics Co.,Ltd",
"785262": "Shenzhen Hojy Software Co., Ltd.",
"78542E": "D-Link International",
"785517": "SankyuElectronics",
"785712": "Mobile Integration Workgroup",
"78593E": "RAFI GmbH & Co.KG",
"78595E": "Samsung Electronics Co.,Ltd",
"785968": "Hon Hai Precision Ind.Co.,Ltd.",
"785C72": "Hioso Technology Co., Ltd.",
"78617C": "MITSUMI ELECTRIC CO.,LTD",
"7866AE": "ZTEC Instruments, Inc.",
"786A89": "Huawei Technologies Co., Ltd",
"786C1C": "Apple",
"787F62": "GiK mbH",
"78818F": "Server Racks Australia Pty Ltd",
"78843C": "Sony Corporation",
"7884EE": "INDRA ESPACIO S.A.",
"788973": "CMC",
"788C54": "Eltek Technologies LTD",
"788DF7": "Hitron Technologies. Inc",
"78923E": "Nokia Corporation",
"78929C": "Intel Corporate",
"789684": "ARRIS Group, Inc.",
"7898FD": "Q9 Networks Inc.",
"78995C": "Nationz Technologies Inc",
"789966": "Musilab Electronics (DongGuan)Co.,Ltd.",
"78998F": "MEDILINE ITALIA SRL",
"789ED0": "Samsung Electronics",
"789F4C": "HOERBIGER Elektronik GmbH",
"789F87": "Siemens AG I IA PP PRM",
"78A051": "iiNet Labs Pty Ltd",
"78A106": "TP-LINK TECHNOLOGIES CO.,LTD.",
"78A183": "Advidia",
"78A2A0": "Nintendo Co., Ltd.",
"78A3E4": "Apple",
"78A504": "Texas Instruments",
"78A5DD": "Shenzhen Smarteye Digital Electronics Co., Ltd",
"78A683": "Precidata",
"78A6BD": "DAEYEON Control&Instrument Co,.Ltd",
"78A714": "Amphenol",
"78A873": "Samsung Electronics Co.,Ltd",
"78AB60": "ABB Australia",
"78ABBB": "Samsung Electronics Co.,LTD",
"78ACC0": "Hewlett-Packard Company",
"78AE0C": "Far South Networks",
"78B3CE": "Elo touch solutions",
"78B5D2": "Ever Treasure Industrial Limited",
"78B6C1": "AOBO Telecom Co.,Ltd",
"78B81A": "INTER SALES A/S",
"78BAD0": "Shinybow Technology Co. Ltd.",
"78BEB6": "Enhanced Vision",
"78BEBD": "STULZ GmbH",
"78C40E": "H&D Wireless",
"78C4AB": "Shenzhen Runsil Technology Co.,Ltd",
"78C5E5": "Texas Instruments",
"78C6BB": "Innovasic, Inc.",
"78CA04": "Nokia Corporation",
"78CA39": "Apple",
"78CA5E": "ELNO",
"78CB33": "DHC Software Co.,Ltd",
"78CD8E": "SMC Networks Inc",
"78D004": "Neousys Technology Inc.",
"78D129": "Vicos",
"78D34F": "Pace-O-Matic, Inc.",
"78D38D": "HONGKONG YUNLINK TECHNOLOGY LIMITED",
"78D5B5": "NAVIELEKTRO KY",
"78D66F": "Aristocrat Technologies Australia Pty. Ltd.",
"78D6F0": "Samsung Electro Mechanics",
"78D752": "HUAWEI TECHNOLOGIES CO.,LTD",
"78D99F": "NuCom HK Ltd.",
"78DA6E": "Cisco",
"78DAB3": "GBO Technology",
"78DD08": "Hon Hai Precision Ind. Co.,Ltd.",
"78DDD6": "c-scape",
"78DEE4": "Texas Instruments",
"78E3B5": "Hewlett-Packard Company",
"78E400": "Hon Hai Precision Ind. Co.,Ltd.",
"78E7D1": "Hewlett-Packard Company",
"78E8B6": "zte corporation",
"78EC22": "Shanghai Qihui Telecom Technology Co., LTD",
"78EC74": "Kyland-USA",
"78EF4C": "Unetconvergence Co., Ltd.",
"78F5E5": "BEGA Gantenbrink-Leuchten KG",
"78F5FD": "Huawei Technologies Co., Ltd",
"78F7BE": "Samsung Electronics Co.,Ltd",
"78F7D0": "Silverbrook Research",
"78FD94": "Apple",
"78FE3D": "Juniper Networks",
"78FE41": "Socus networks",
"78FEE2": "Shanghai Diveo Technology Co., Ltd",
"78FF57": "Intel Corporate",
"7C0187": "Curtis Instruments, Inc.",
"7C02BC": "Hansung Electronics Co. LTD",
"7C034C": "SAGEMCOM",
"7C03D8": "SAGEMCOM SAS",
"7C0507": "PEGATRON CORPORATION",
"7C051E": "RAFAEL LTD.",
"7C0623": "Ultra Electronics, CIS",
"7C08D9": "Shanghai B-Star Technology Co",
"7C092B": "Bekey A/S",
"7C0A50": "J-MEX Inc.",
"7C11BE": "Apple",
"7C1476": "Damall Technologies SAS",
"7C160D": "Saia-Burgess Controls AG",
"7C1A03": "8Locations Co., Ltd.",
"7C1AFC": "Dalian Co-Edifice Video Technology Co., Ltd",
"7C1E52": "Microsoft",
"7C1EB3": "2N TELEKOMUNIKACE a.s.",
"7C2048": "KoamTac",
"7C2064": "Alcatel Lucent IPD",
"7C2CF3": "Secure Electrans Ltd",
"7C2E0D": "Blackmagic Design",
"7C2F80": "Gigaset Communications GmbH",
"7C336E": "MEG Electronics Inc.",
"7C386C": "Real Time Logic",
"7C3920": "SSOMA SECURITY",
"7C3BD5": "Imago Group",
"7C3E9D": "PATECH",
"7C438F": "E-Band Communications Corp.",
"7C444C": "Entertainment Solutions, S.L.",
"7C49B9": "Plexus Manufacturing Sdn Bhd",
"7C4A82": "Portsmith LLC",
"7C4AA8": "MindTree Wireless PVT Ltd",
"7C4B78": "Red Sun Synthesis Pte Ltd",
"7C4C58": "Scale Computing, Inc.",
"7C4CA5": "BSkyB Ltd",
"7C4FB5": "Arcadyan Technology Corporation",
"7C55E7": "YSI, Inc.",
"7C6097": "HUAWEI TECHNOLOGIES CO.,LTD",
"7C6193": "HTC Corporation",
"7C669D": "Texas Instruments",
"7C69F6": "Cisco",
"7C6AB3": "IBC TECHNOLOGIES INC.",
"7C6ADB": "SafeTone Technology Co.,Ltd",
"7C6B33": "Tenyu Tech Co. Ltd.",
"7C6B52": "Tigaro Wireless",
"7C6C39": "PIXSYS SRL",
"7C6C8F": "AMS NEVE LTD",
"7C6D62": "Apple",
"7C6DF8": "Apple",
"7C6F06": "Caterpillar Trimble Control Technologies",
"7C6FF8": "ShenZhen ACTO Digital Video Technology Co.,Ltd.",
"7C70BC": "IEEE REGISTRATION AUTHORITY - Please see MAM public listing for more information.",
"7C72E4": "Unikey Technologies",
"7C7673": "ENMAS GmbH",
"7C7A91": "Intel Corporate",
"7C7BE4": "Z'SEDAI KENKYUSHO CORPORATION",
"7C7D41": "Jinmuyu Electronics Co., Ltd.",
"7C822D": "Nortec",
"7C8306": "Glen Dimplex Nordic as",
"7C8D91": "Shanghai Hongzhuo Information Technology co.,LTD",
"7C8EE4": "Texas Instruments",
"7C94B2": "Philips Healthcare PCCI",
"7C95F3": "Cisco",
"7C9763": "Openmatics s.r.o.",
"7C9A9B": "VSE valencia smart energy",
"7CA15D": "GN ReSound A/S",
"7CA29B": "D.SignT GmbH & Co. KG",
"7CA61D": "MHL, LLC",
"7CACB2": "Bosch Software Innovations GmbH",
"7CAD74": "Cisco",
"7CB03E": "OSRAM GmbH",
"7CB21B": "Cisco SPVTG",
"7CB232": "TCL King High Frequency EI,Co.,LTD",
"7CB542": "ACES Technology",
"7CB733": "ASKEY COMPUTER CORP",
"7CB77B": "Paradigm Electronics Inc",
"7CBB6F": "Cosco Electronics Co., Ltd.",
"7CBD06": "AE REFUsol",
"7CBF88": "Mobilicom LTD",
"7CBFB1": "ARRIS Group, Inc.",
"7CC3A1": "Apple",
"7CC4EF": "Devialet",
"7CC537": "Apple",
"7CC8AB": "Acro Associates, Inc.",
"7CC8D0": "TIANJIN YAAN TECHNOLOGY CO., LTD.",
"7CC8D7": "Damalisk",
"7CCB0D": "Antaira Technologies, LLC",
"7CCD11": "MS-Magnet",
"7CCD3C": "Guangzhou Juzing Technology Co., Ltd",
"7CCFCF": "Shanghai SEARI Intelligent System Co., Ltd",
"7CD1C3": "Apple",
"7CD30A": "INVENTEC Corporation",
"7CD762": "Freestyle Technology Pty Ltd",
"7CD844": "Enmotus Inc",
"7CD9FE": "New Cosmos Electric Co., Ltd.",
"7CDA84": "Dongnian Networks Inc.",
"7CDD11": "Chongqing MAS SCI&TECH.Co.,Ltd",
"7CDD20": "IOXOS Technologies S.A.",
"7CDD90": "Shenzhen Ogemray Technology Co., Ltd.",
"7CE044": "NEON Inc",
"7CE1FF": "Computer Performance, Inc. DBA Digital Loggers, Inc.",
"7CE4AA": "PRIVATE",
"7CE56B": "ESEN Optoelectronics Technology Co.,Ltd.",
"7CE9D3": "Hon Hai Precision Ind. Co.,Ltd.",
"7CEBEA": "ASCT",
"7CED8D": "MICROSOFT",
"7CEF18": "Creative Product Design Pty. Ltd.",
"7CEF8A": "Inhon International Ltd.",
"7CF05F": "Apple",
"7CF098": "Bee Beans Technologies, Inc.",
"7CF0BA": "Linkwell Telesystems Pvt Ltd",
"7CF429": "NUUO Inc.",
"7CFADF": "Apple",
"7CFE28": "Salutron Inc.",
"7CFE4E": "Shenzhen Safe vision Technology Co.,LTD",
"7CFF62": "Huizhou Super Electron Technology Co.,Ltd.",
"80000B": "Intel Corporate",
"800010": "ATT BELL LABORATORIES",
"80006E": "Apple",
"8005DF": "Montage Technology Group Limited",
"8007A2": "Esson Technology Inc.",
"800A06": "COMTEC co.,ltd",
"800E24": "ForgetBox",
"801440": "Sunlit System Technology Corp",
"8014A8": "Guangzhou V-SOLUTION Electronic Technology Co., Ltd.",
"8016B7": "Brunel University",
"80177D": "Nortel Networks",
"8018A7": "Samsung Eletronics Co., Ltd",
"801934": "Intel Corporate",
"801DAA": "Avaya Inc",
"801F02": "Edimax Technology Co. Ltd.",
"8020AF": "Trade FIDES, a.s.",
"802275": "Beijing Beny Wave Technology Co Ltd",
"802AFA": "Germaneers GmbH",
"802DE1": "Solarbridge Technologies",
"802E14": "azeti Networks AG",
"802FDE": "Zurich Instruments AG",
"803457": "OT Systems Limited",
"8038FD": "LeapFrog Enterprises, Inc.",
"8039E5": "PATLITE CORPORATION",
"803B9A": "ghe-ces electronic ag",
"803F5D": "Winstars Technology Ltd",
"803FD6": "bytes at work AG",
"80414E": "BBK Electronics Corp., Ltd.,",
"80427C": "Adolf Tedsen GmbH & Co. KG",
"804731": "Packet Design, Inc.",
"8048A5": "SICHUAN TIANYI COMHEART TELECOM CO.,LTD",
"804971": "Apple",
"804B20": "Ventilation Control",
"804F58": "ThinkEco, Inc.",
"80501B": "Nokia Corporation",
"8056F2": "Hon Hai Precision Ind. Co.,Ltd.",
"805719": "Samsung Electronics Co.,Ltd",
"8058C5": "NovaTec Kommunikationstechnik GmbH",
"8059FD": "Noviga",
"806007": "RIM",
"80618F": "Shenzhen sangfei consumer communications co.,ltd",
"806459": "Nimbus Inc.",
"8065E9": "BenQ Corporation",
"806629": "Prescope Technologies CO.,LTD.",
"806C8B": "KAESER KOMPRESSOREN AG",
"806CBC": "NET New Electronic Technology GmbH",
"80711F": "Juniper Networks",
"807693": "Newag SA",
"8079AE": "ShanDong Tecsunrise Co.,Ltd",
"807A7F": "ABB Genway Xiamen Electrical Equipment CO., LTD",
"807B1E": "Corsair Components",
"807D1B": "Neosystem Co. Ltd.",
"807DE3": "Chongqing Sichuan Instrument Microcircuit Co.LTD.",
"8081A5": "TONGQING COMMUNICATION EQUIPMENT (SHENZHEN) Co.,Ltd",
"808287": "ATCOM Technology Co.Ltd.",
"808698": "Netronics Technologies Inc.",
"8086F2": "Intel Corporate",
"808B5C": "Shenzhen Runhuicheng Technology Co., Ltd",
"80912A": "Lih Rong electronic Enterprise Co., Ltd.",
"8091C0": "AgileMesh, Inc.",
"80929F": "Apple",
"809393": "Xapt GmbH",
"80946C": "TOKYO RADAR CORPORATION",
"8096B1": "ARRIS Group, Inc.",
"80971B": "Altenergy Power System,Inc.",
"809B20": "Intel Corporate",
"80A1D7": "Shanghai DareGlobal Technologies Co.,Ltd",
"80AAA4": "USAG",
"80B219": "ELEKTRON TECHNOLOGY UK LIMITED",
"80B289": "Forworld Electronics Ltd.",
"80B32A": "Alstom Grid",
"80B686": "Huawei Technologies Co., Ltd",
"80B95C": "ELFTECH Co., Ltd.",
"80BAAC": "TeleAdapt Ltd",
"80BAE6": "Neets",
"80BBEB": "Satmap Systems Ltd",
"80C16E": "Hewlett Packard",
"80C63F": "Remec Broadband Wireless , LLC",
"80C6AB": "Technicolor USA Inc.",
"80C6CA": "Endian s.r.l.",
"80C862": "Openpeak, Inc",
"80CEB1": "Theissen Training Systems GmbH",
"80CF41": "Lenovo Mobile Communication Technology Ltd.",
"80D019": "Embed, Inc",
"80D18B": "Hangzhou I'converge Technology Co.,Ltd",
"80D433": "LzLabs GmbH",
"80D733": "QSR Automations, Inc.",
"80DB31": "Power Quotient International Co., Ltd.",
"80EA96": "Apple",
"80EE73": "Shuttle Inc.",
"80F25E": "Kyynel",
"80F593": "IRCO Sistemas de Telecomunicaci\u00f3n S.A.",
"80F62E": "Hangzhou H3C Technologies Co., Limited",
"80FA5B": "CLEVO CO.",
"80FB06": "HUAWEI TECHNOLOGIES CO.,LTD",
"80FFA8": "UNIDIS",
"8400D2": "Sony Ericsson Mobile Communications AB",
"8401A7": "Greyware Automation Products, Inc",
"840B2D": "SAMSUNG ELECTRO-MECHANICS CO., LTD",
"840F45": "Shanghai GMT Digital Technologies Co., Ltd",
"841715": "GP Electronics (HK) Ltd.",
"841766": "Weifang GoerTek Electronics Co., Ltd",
"841888": "Juniper Networks",
"841B38": "Shenzhen Excelsecu Data Technology Co.,Ltd",
"841B5E": "NETGEAR",
"841E26": "KERNEL-I Co.,LTD",
"842141": "Shenzhen Ginwave Technologies Ltd.",
"84248D": "Motorola Solutions Inc",
"84253F": "Silex Technology, Inc",
"8425A4": "Tariox Limited",
"8425DB": "Samsung Electronics Co.,Ltd",
"842615": "ADB Broadband Italia",
"84262B": "Alcatel-Lucent",
"8427CE": "Corporation of the Presiding Bishop of The Church of Jesus Christ of Latter-day Saints",
"842914": "EMPORIA TELECOM Produktions- und VertriebsgesmbH & Co KG",
"842999": "Apple",
"842B2B": "Dell Inc.",
"842B50": "Huria Co.,Ltd.",
"842BBC": "Modelleisenbahn GmbH",
"842F75": "Innokas Group",
"8430E5": "SkyHawke Technologies, LLC",
"843497": "Hewlett Packard",
"843611": "hyungseul publishing networks",
"843835": "Apple",
"843838": "Samsung Electro Mechanics co., LTD.",
"843A4B": "Intel Corporate",
"843F4E": "Tri-Tech Manufacturing, Inc.",
"844823": "WOXTER TECHNOLOGY Co. Ltd",
"844915": "vArmour Networks, Inc.",
"844BF5": "Hon Hai Precision Ind. Co.,Ltd.",
"844F03": "Ablelink Electronics Ltd",
"845181": "Samsung Electronics Co.,Ltd",
"84569C": "Coho Data, Inc.,",
"845787": "DVR C&C Co., Ltd.",
"845C93": "Chabrier Services",
"845DD7": "Shenzhen Netcom Electronics Co.,Ltd",
"846223": "Shenzhen Coship Electronics Co., Ltd.",
"8462A6": "EuroCB (Phils), Inc.",
"846AED": "Wireless Tsukamoto.,co.LTD",
"846EB1": "Park Assist LLC",
"847207": "I&C Technology",
"84742A": "zte corporation",
"847616": "Addat S.r.o.",
"8478AC": "Cisco",
"847A88": "HTC Corporation",
"847E40": "Texas Instruments",
"8482F4": "Beijing Huasun Unicreate Technology Co., Ltd",
"848336": "Newrun",
"848371": "Avaya, Inc",
"848433": "Paradox Engineering SA",
"848506": "Apple",
"848D84": "Rajant Corporation",
"848DC7": "Cisco SPVTG",
"848E0C": "Apple",
"848E96": "Embertec Pty Ltd",
"848F69": "Dell Inc.",
"849000": "Arnold & Richter Cine Technik",
"84948C": "Hitron Technologies. Inc",
"8496D8": "Pace plc",
"8497B8": "Memjet Inc.",
"849CA6": "Arcadyan Technology Corporation",
"849DC5": "Centera Photonics Inc.",
"84A6C8": "Intel Corporate",
"84A783": "Alcatel Lucent",
"84A8E4": "Shenzhen Huawei Communication Technologies Co., Ltd",
"84A991": "Cyber Trans Japan Co.,Ltd.",
"84ACA4": "Beijing Novel Super Digital TV Technology Co., Ltd",
"84AF1F": "Beat System Service Co,. Ltd.",
"84B153": "Apple",
"84B59C": "Juniper networks",
"84C2E4": "Jiangsu Qinheng Co., Ltd.",
"84C727": "Gnodal Ltd",
"84C7A9": "C3PO S.A.",
"84C8B1": "Incognito Software Inc.",
"84C9B2": "D-Link International",
"84D32A": "IEEE 1905.1",
"84D9C8": "Unipattern Co.,",
"84DB2F": "Sierra Wireless Inc",
"84DD20": "Texas Instruments",
"84DE3D": "Crystal Vision Ltd",
"84DF0C": "NET2GRID BV",
"84E4D9": "Shenzhen NEED technology Ltd.",
"84E629": "Bluwan SA",
"84E714": "Liang Herng Enterprise,Co.Ltd.",
"84EA99": "Vieworks",
"84ED33": "BBMC Co.,Ltd",
"84F493": "OMS spol. s.r.o.",
"84F64C": "Cross Point BV",
"84FCFE": "Apple",
"84FE9E": "RTC Industries, Inc.",
"880355": "Arcadyan Technology Corp.",
"880905": "MTMCommunications",
"880F10": "Huami Information Technology Co.,Ltd.",
"880FB6": "Jabil Circuits India Pvt Ltd,-EHTP unit",
"881036": "Panodic(ShenZhen) Electronics Limted",
"88124E": "Qualcomm Atheros",
"88142B": "Protonic Holland",
"881544": "Meraki, Inc.",
"8818AE": "Tamron Co., Ltd",
"881FA1": "Apple",
"882012": "LMI Technologies",
"8821E3": "Nebusens, S.L.",
"882364": "Watchnet DVR Inc",
"8823FE": "TTTech Computertechnik AG",
"88252C": "Arcadyan Technology Corporation",
"882E5A": "storONE",
"88308A": "Murata Manufactuaring Co.,Ltd.",
"88329B": "Samsung Electro Mechanics co.,LTD.",
"883314": "Texas Instruments",
"88354C": "Transics",
"883612": "SRC Computers, LLC",
"8841C1": "ORBISAT DA AMAZONIA IND E AEROL SA",
"8841FC": "AirTies Wireless Netowrks",
"8843E1": "CISCO SYSTEMS, INC.",
"8844F6": "Nokia Corporation",
"88462A": "Telechips Inc.",
"884B39": "Siemens AG, Healthcare Sector",
"8851FB": "Hewlett Packard",
"88532E": "Intel Corporate",
"885395": "Apple",
"8853D4": "Huawei Technologies Co., Ltd",
"88576D": "XTA Electronics Ltd",
"885A92": "Cisco",
"885BDD": "Aerohive Networks Inc.",
"885C47": "Alcatel Lucent",
"88615A": "Siano Mobile Silicon Ltd.",
"8863DF": "Apple",
"88685C": "Shenzhen ChuangDao & Perpetual Eternal Technology Co.,Ltd",
"886B76": "CHINA HOPEFUL GROUP HOPEFUL ELECTRIC CO.,LTD",
"887398": "K2E Tekpoint",
"887556": "Cisco",
"88789C": "Game Technologies SA",
"888603": "HUAWEI TECHNOLOGIES CO.,LTD",
"8886A0": "Simton Technologies, Ltd.",
"888717": "CANON INC.",
"8887DD": "DarbeeVision Inc.",
"888914": "All Components Incorporated",
"888964": "GSI Electronics Inc.",
"888B5D": "Storage Appliance Corporation",
"888C19": "Brady Corp Asia Pacific Ltd",
"889166": "Viewcooper Corp.",
"8891DD": "Racktivity",
"889471": "Brocade Communications Systems, Inc.",
"8894F9": "Gemicom Technology, Inc.",
"8895B9": "Unified Packet Systems Crop",
"889676": "TTC MARCONI s.r.o.",
"8897DF": "Entrypass Corporation Sdn. Bhd.",
"889821": "TERAON",
"889B39": "Samsung Electronics Co.,Ltd",
"889CA6": "BTB Korea INC",
"889FFA": "Hon Hai Precision Ind. Co.,Ltd.",
"88A3CC": "Amatis Controls",
"88A5BD": "QPCOM INC.",
"88A73C": "Ragentek Technology Group",
"88ACC1": "Generiton Co., Ltd.",
"88AE1D": "COMPAL INFORMATION(KUNSHAN)CO.,LTD",
"88B168": "Delta Control GmbH",
"88B1E1": "AirTight Networks, Inc.",
"88B627": "Gembird Europe BV",
"88BA7F": "Qfiednet Co., Ltd.",
"88BFD5": "Simple Audio Ltd",
"88C36E": "Beijing Ereneben lnformation Technology Limited",
"88C626": "Logitech - Ultimate Ears",
"88C663": "Apple",
"88CB87": "Apple",
"88D7BC": "DEP Company",
"88D962": "Canopus Systems US LLC",
"88DC96": "SENAO Networks, Inc.",
"88DD79": "Voltaire",
"88E0A0": "Shenzhen VisionSTOR Technologies Co., Ltd",
"88E0F3": "Juniper Networks",
"88E3AB": "Huawei Technologies Co., Ltd",
"88E712": "Whirlpool Corporation",
"88E7A6": "iKnowledge Integration Corp.",
"88E8F8": "YONG TAI ELECTRONIC (DONGGUAN) LTD.",
"88E917": "Tamaggo",
"88ED1C": "Cudo Communication Co., Ltd.",
"88F031": "Cisco",
"88F077": "CISCO SYSTEMS, INC.",
"88F488": "cellon communications technology(shenzhen)Co.,Ltd.",
"88F490": "Jetmobile Pte Ltd",
"88F7C7": "Technicolor USA Inc.",
"88FD15": "LINEEYE CO., LTD",
"88FED6": "ShangHai WangYong Software Co., Ltd.",
"8C006D": "Apple",
"8C04FF": "Technicolor USA Inc.",
"8C078C": "FLOW DATA INC",
"8C088B": "Remote Solution",
"8C09F4": "ARRIS Group, Inc.",
"8C0C90": "Ruckus Wireless",
"8C0CA3": "Amper",
"8C0EE3": "GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD.",
"8C11CB": "ABUS Security-Center GmbH & Co. KG",
"8C1F94": "RF Surgical System Inc.",
"8C210A": "TP-LINK TECHNOLOGIES CO., LTD.",
"8C271D": "QuantHouse",
"8C278A": "Vocollect Inc",
"8C2937": "Apple",
"8C2DAA": "Apple",
"8C2F39": "IBA Dosimetry GmbH",
"8C3330": "EmFirst Co., Ltd.",
"8C3AE3": "LG Electronics",
"8C3C07": "Skiva Technologies, Inc.",
"8C3C4A": "NAKAYO TELECOMMUNICATIONS,INC.",
"8C41F2": "RDA Technologies Ltd.",
"8C4435": "Shanghai BroadMobi Communication Technology Co., Ltd.",
"8C4AEE": "GIGA TMS INC",
"8C4B59": "3D Imaging & Simulations Corp",
"8C4CDC": "PLANEX COMMUNICATIONS INC.",
"8C4DB9": "Unmonday Ltd",
"8C4DEA": "Cerio Corporation",
"8C5105": "Shenzhen ireadygo Information Technology CO.,LTD.",
"8C53F7": "A&D ENGINEERING CO., LTD.",
"8C541D": "LGE",
"8C569D": "Imaging Solutions Group",
"8C56C5": "Nintendo Co., Ltd.",
"8C57FD": "LVX Western",
"8C5877": "Apple",
"8C598B": "C Technologies AB",
"8C5AF0": "Exeltech Solar Products",
"8C5CA1": "d-broad,INC",
"8C5FDF": "Beijing Railway Signal Factory",
"8C604F": "CISCO SYSTEMS, INC.",
"8C640B": "Beyond Devices d.o.o.",
"8C6422": "Sony Ericsson Mobile Communications AB",
"8C6878": "Nortek-AS",
"8C6AE4": "Viogem Limited",
"8C705A": "Intel Corporate",
"8C71F8": "Samsung Electronics Co.,Ltd",
"8C736E": "Fujitsu Limited",
"8C76C1": "Goden Tech Limited",
"8C7712": "Samsung Electronics Co.,Ltd",
"8C7716": "LONGCHEER TELECOMMUNICATION LIMITED",
"8C7B9D": "Apple",
"8C7C92": "Apple",
"8C7CB5": "Hon Hai Precision Ind. Co.,Ltd.",
"8C7CFF": "Brocade Communications Systems, Inc.",
"8C7EB3": "Lytro, Inc.",
"8C82A8": "Insigma Technology Co.,Ltd",
"8C8401": "PRIVATE",
"8C89A5": "Micro-Star INT'L CO., LTD",
"8C8A6E": "ESTUN AUTOMATION TECHNOLOY CO., LTD",
"8C8E76": "taskit GmbH",
"8C90D3": "Alcatel Lucent",
"8C9236": "Aus.Linx Technology Co., Ltd.",
"8C94CF": "Encell Technology, Inc.",
"8CA048": "Beijing NeTopChip Technology Co.,LTD",
"8CA982": "Intel Corporate",
"8CAE4C": "Plugable Technologies",
"8CAE89": "Y-cam Solutions Ltd",
"8CB64F": "CISCO SYSTEMS, INC.",
"8CB7F7": "Shenzhen UniStrong Science & Technology Co., Ltd",
"8CB82C": "IPitomy Communications",
"8CB864": "AcSiP Technology Corp.",
"8CBEBE": "Xiaomi Technology Co.,Ltd",
"8CBF9D": "Shanghai Xinyou Information Technology Ltd. Co.",
"8CC121": "Panasonic Corporation AVC Networks Company",
"8CC5E1": "ShenZhen Konka Telecommunication Technology Co.,Ltd",
"8CC7AA": "Radinet Communications Inc.",
"8CC7D0": "zhejiang ebang communication co.,ltd",
"8CC8CD": "Samsung Electronics Co., LTD",
"8CCDA2": "ACTP, Inc.",
"8CCDE8": "Nintendo Co., Ltd.",
"8CCF5C": "BEFEGA GmbH",
"8CD17B": "CG Mobile",
"8CD3A2": "VisSim AS",
"8CD628": "Ikor Metering",
"8CDB25": "ESG Solutions",
"8CDD8D": "Wifly-City System Inc.",
"8CDE52": "ISSC Technologies Corp.",
"8CDE99": "Comlab Inc.",
"8CE081": "zte corporation",
"8CE748": "PRIVATE",
"8CE7B3": "Sonardyne International Ltd",
"8CEEC6": "Precepscion Pty. Ltd.",
"8CF945": "Power Automation pte Ltd",
"8CF9C9": "MESADA Technology Co.,Ltd.",
"8CFABA": "Apple",
"8CFDF0": "QUALCOMM Incorporated",
"90004E": "Hon Hai Precision Ind. Co.,Ltd.",
"90013B": "SAGEMCOM",
"90028A": "Shenzhen Shidean Legrand Electronic Products Co.,Ltd",
"9002A9": "ZHEJIANG DAHUA TECHNOLOGY CO.,LTD",
"9003B7": "PARROT",
"900917": "Far-sighted mobile",
"900A3A": "PSG Plastic Service GmbH",
"900D66": "Digimore Electronics Co., Ltd",
"900DCB": "ARRIS Group, Inc.",
"90185E": "Apex Tool Group GmbH & Co OHG",
"90187C": "Samsung Electro Mechanics co., LTD.",
"9018AE": "Shanghai Meridian Technologies, Co. Ltd.",
"901900": "SCS SA",
"901ACA": "ARRIS Group, Inc.",
"901B0E": "Fujitsu Technology Solutions GmbH",
"901D27": "zte corporation",
"901EDD": "GREAT COMPUTER CORPORATION",
"902083": "General Engine Management Systems Ltd.",
"902155": "HTC Corporation",
"9027E4": "Apple",
"902B34": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"902E87": "LabJack",
"9031CD": "Onyx Healthcare Inc.",
"90342B": "Gatekeeper Systems, Inc.",
"9034FC": "Hon Hai Precision Ind. Co.,Ltd.",
"90356E": "Vodafone Omnitel N.V.",
"9038DF": "Changzhou Tiannengbo System Co. Ltd.",
"903AA0": "Alcatel-Lucent",
"903CAE": "Yunnan KSEC Digital Technology Co.,Ltd.",
"903D5A": "Shenzhen Wision Technology Holding Limited",
"903D6B": "Zicon Technology Corp.",
"903EAB": "ARRIS Group, Inc.",
"9046B7": "Vadaro Pte Ltd",
"904716": "RORZE CORPORATION",
"90489A": "Hon Hai Precision Ind. Co.,Ltd.",
"9049FA": "Intel Corporation",
"904CE5": "Hon Hai Precision Ind. Co.,Ltd.",
"904E2B": "Huawei Technologies Co., Ltd",
"90507B": "Advanced PANMOBIL Systems GmbH & Co. KG",
"90513F": "Elettronica Santerno",
"905446": "TES ELECTRONIC SOLUTIONS",
"9055AE": "Ericsson, EAB/RWI/K",
"905682": "Lenbrook Industries Limited",
"905692": "Autotalks Ltd.",
"9059AF": "Texas Instruments",
"905F2E": "TCT Mobile Limited",
"905F8D": "modas GmbH",
"90610C": "Fida International (S) Pte Ltd",
"906717": "Alphion India Private Limited",
"9067B5": "Alcatel-Lucent",
"9067F3": "Alcatel Lucent",
"906DC8": "DLG Automa\u00e7\u00e3o Industrial Ltda",
"906EBB": "Hon Hai Precision Ind. Co.,Ltd.",
"907025": "Garea Microsys Co.,Ltd.",
"907240": "Apple",
"907990": "Benchmark Electronics Romania SRL",
"907A0A": "Gebr. Bode GmbH & Co KG",
"907A28": "Beijing Morncloud Information And Technology Co. Ltd.",
"907AF1": "SNUPI Technologies",
"907EBA": "UTEK TECHNOLOGY (SHENZHEN) CO.,LTD",
"907F61": "Chicony Electronics Co., Ltd.",
"908260": "IEEE 1904.1 Working Group",
"90837A": "General Electric Water & Process Technologies",
"90840D": "Apple",
"9088A2": "IONICS TECHNOLOGY ME LTDA",
"908C44": "H.K ZONGMU TECHNOLOGY CO., LTD.",
"908C63": "GZ Weedong Networks Technology Co. , Ltd",
"908D1D": "GH Technologies",
"908FCF": "UNO System Co., Ltd",
"90903C": "TRISON TECHNOLOGY CORPORATION",
"909060": "RSI VIDEO TECHNOLOGIES",
"9092B4": "Diehl BGT Defence GmbH & Co. KG",
"9094E4": "D-Link International",
"909864": "Impex-Sat GmbH&Co KG",
"909916": "ELVEES NeoTek OJSC",
"909DE0": "Newland Design + Assoc. Inc.",
"909F43": "Accutron Instruments Inc.",
"90A2DA": "GHEO SA",
"90A4DE": "Wistron Neweb Corp.",
"90A783": "JSW PACIFIC CORPORATION",
"90A7C1": "Pakedge Device and Software Inc.",
"90AC3F": "BrightSign LLC",
"90AE1B": "TP-LINK TECHNOLOGIES CO.,LTD.",
"90B11C": "Dell Inc.",
"90B134": "ARRIS Group, Inc.",
"90B21F": "Apple",
"90B8D0": "Joyent, Inc.",
"90B931": "Apple, Inc",
"90B97D": "Johnson Outdoors Marine Electronics d/b/a Minnkota",
"90C115": "Sony Ericsson Mobile Communications AB",
"90C792": "ARRIS Group, Inc.",
"90CC24": "Synaptics, Inc",
"90CF15": "Nokia Corporation",
"90CF6F": "Dlogixs Co Ltd",
"90CF7D": "Qingdao Hisense Electric Co.,Ltd.",
"90D11B": "Palomar Medical Technologies",
"90D74F": "Bookeen",
"90D7EB": "Texas Instruments",
"90D852": "Comtec Co., Ltd.",
"90D92C": "HUG-WITSCHI AG",
"90DA4E": "AVANU",
"90DB46": "E-LEAD ELECTRONIC CO., LTD",
"90DFB7": "s.m.s smart microwave sensors GmbH",
"90E0F0": "IEEE 1722a Working Group",
"90E2BA": "Intel Corporate",
"90E6BA": "ASUSTek COMPUTER INC.",
"90EA60": "SPI Lasers Ltd",
"90F1AA": "Samsung Electronics Co.,LTD",
"90F1B0": "Hangzhou Anheng Info&Tech CO.,LTD",
"90F278": "Radius Gateway",
"90F3B7": "Kirisun Communications Co., Ltd.",
"90F4C1": "Rand McNally",
"90F652": "TP-LINK TECHNOLOGIES CO., LTD.",
"90F72F": "Phillips Machine & Welding Co., Inc.",
"90FB5B": "Avaya, Inc",
"90FBA6": "Hon Hai Precision Ind.Co.Ltd",
"90FF79": "Metro Ethernet Forum",
"940070": "Nokia Corporation",
"940149": "AutoHotBox",
"9401C2": "Samsung Electronics Co.,Ltd",
"940B2D": "NetView Technologies(Shenzhen) Co., Ltd",
"940BD5": "Himax Technologies, Inc",
"940C6D": "TP-LINK Technologies Co.,Ltd.",
"94103E": "Belkin International Inc.",
"9411DA": "ITF Fr\u00f6schl GmbH",
"941673": "Point Core SARL",
"941D1C": "TLab West Systems AB",
"942053": "Nokia Corporation",
"942197": "Stalmart Technology Limited",
"94236E": "Shenzhen Junlan Electronic Ltd",
"942E17": "Schneider Electric Canada Inc",
"942E63": "Fins\u00e9cur",
"94319B": "Alphatronics BV",
"9433DD": "Taco Electronic Solutions, Inc.",
"94350A": "Samsung Electronics Co.,Ltd",
"9436E0": "Sichuan Bihong Broadcast & Television New Technologies Co.,Ltd",
"9439E5": "Hon Hai Precision Ind. Co.,Ltd.",
"943AF0": "Nokia Corporation",
"943BB1": "KAONMEDIA",
"9440A2": "Anywave Communication Technologies, Inc.",
"944444": "LG Innotek",
"944452": "Belkin International Inc.",
"944696": "BaudTec Corporation",
"944A09": "BitWise Controls",
"945047": "Rechnerbetriebsgruppe",
"945103": "Samsung Electronics",
"9451BF": "Hyundai ESG",
"94592D": "EKE Building Technology Systems Ltd",
"945B7E": "TRILOBIT LTDA.",
"946124": "Pason Systems",
"9463D1": "Samsung Electronics Co.,Ltd",
"9471AC": "TCT Mobile Limited",
"94756E": "QinetiQ North America",
"947C3E": "Polewall Norge AS",
"9481A4": "Azuray Technologies",
"94857A": "Evantage Industries Corp",
"9486D4": "Surveillance Pro Corporation",
"94877C": "ARRIS Group, Inc.",
"948854": "Texas Instruments",
"948B03": "EAGET Innovation and Technology Co., Ltd.",
"948D50": "Beamex Oy Ab",
"948FEE": "Hughes Telematics, Inc.",
"949426": "Apple",
"9498A2": "Shanghai LISTEN TECH.LTD",
"949BFD": "Trans New Technology, Inc.",
"949C55": "Alta Data Technologies",
"949F3F": "Optek Digital Technology company limited",
"949FB4": "ChengDu JiaFaAnTai Technology Co.,Ltd",
"94A7BC": "BodyMedia, Inc.",
"94AAB8": "Joview(Beijing) Technology Co. Ltd.",
"94ACCA": "trivum technologies GmbH",
"94AE61": "Alcatel Lucent",
"94B8C5": "RuggedCom Inc.",
"94B9B4": "Aptos Technology",
"94BA31": "Visiontec da Amaz\u00f4nia Ltda.",
"94BA56": "Shenzhen Coship Electronics Co., Ltd.",
"94BF1E": "eflow Inc. / Smart Device Planning and Development Division",
"94C014": "Sorter Sp. j. Konrad Grzeszczyk MichaA, Ziomek",
"94C150": "2Wire Inc",
"94C3E4": "SCA Schucker Gmbh & Co KG",
"94C4E9": "PowerLayer Microsystems HongKong Limited",
"94C6EB": "NOVA electronics, Inc.",
"94C7AF": "Raylios Technology",
"94C962": "Teseq AG",
"94CA0F": "Honeywell Analytics",
"94CCB9": "ARRIS Group, Inc.",
"94CDAC": "Creowave Oy",
"94CE2C": "Sony Mobile Communications AB",
"94D019": "Cydle Corp.",
"94D723": "Shanghai DareGlobal Technologies Co., Ltd",
"94D771": "Samsung Electronics Co.,Ltd",
"94D93C": "ENELPS",
"94DB49": "SITCORP",
"94DBC9": "Azurewave",
"94DD3F": "A+V Link Technologies, Corp.",
"94DE0E": "SmartOptics AS",
"94DE80": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"94DF4E": "Wistron InfoComm(Kunshan)Co.,Ltd.",
"94DF58": "IJ Electron CO.,Ltd.",
"94E0D0": "HealthStream Taiwan Inc.",
"94E226": "D. ORtiz Consulting, LLC",
"94E711": "Xirka Dama Persada PT",
"94E848": "FYLDE MICRO LTD",
"94E98C": "Alcatel-Lucent",
"94EB2C": "Google Inc.",
"94EBCD": "Research In Motion Limited",
"94F692": "Geminico co.,Ltd.",
"94F720": "Tianjin Deviser Electronics Instrument Co., Ltd",
"94FAE8": "Shenzhen Eycom Technology Co., Ltd",
"94FBB2": "Shenzhen Gongjin Electronics Co.,Ltd",
"94FD1D": "WhereWhen Corp",
"94FD2E": "Shanghai Uniscope Technologies Co.,Ltd",
"94FEF4": "SAGEMCOM",
"980284": "Theobroma Systems GmbH",
"9803A0": "ABB n.v. Power Quality Products",
"9803D8": "Apple",
"980C82": "Samsung Electro Mechanics",
"980D2E": "HTC Corporation",
"980EE4": "PRIVATE",
"981094": "Shenzhen Vsun communication technology Co.,ltd",
"98208E": "Definium Technologies",
"98262A": "Applied Research Associates, Inc",
"98291D": "Jaguar de Mexico, SA de CV",
"98293F": "Fujian Start Computer Equipment Co.,Ltd",
"982CBE": "2Wire",
"982D56": "Resolution Audio",
"983000": "Beijing KEMACOM Technologies Co., Ltd.",
"983071": "DAIKYUNG VASCOM",
"98349D": "Krauss Maffei Technologies GmbH",
"983571": "Sub10 Systems Ltd",
"9835B8": "Assembled Products Corporation",
"983713": "PT.Navicom Indonesia",
"983B16": "AMPAK Technology Inc",
"983F9F": "China SSJ (Suzhou) Network Technology Inc.",
"984246": "SOL INDUSTRY PTE., LTD",
"9843DA": "INTERTECH",
"98473C": "SHANGHAI SUNMON COMMUNICATION TECHNOGY CO.,LTD",
"984A47": "CHG Hospital Beds",
"984B4A": "ARRIS Group, Inc.",
"984BE1": "Hewlett-Packard Company",
"984C04": "Zhangzhou Keneng Electrical Equipment Co Ltd",
"984CD3": "Mantis Deposition",
"984E97": "Starlight Marketing (H. K.) Ltd.",
"984FEE": "Intel Corporate",
"9852B1": "Samsung Electronics",
"9857D3": "HON HAI-CCPBG PRECISION IND.CO.,LTD.",
"98588A": "SYSGRATION Ltd.",
"985945": "Texas Instruments",
"985C93": "SBG Systems SAS",
"985D46": "PeopleNet Communication",
"985E1B": "ConversDigital Co., Ltd.",
"986022": "EMW Co., Ltd.",
"9866EA": "Industrial Control Communications, Inc.",
"986B3D": "ARRIS Group, Inc.",
"986CF5": "zte corporation",
"986DC8": "TOSHIBA MITSUBISHI-ELECTRIC INDUSTRIAL SYSTEMS CORPORATION",
"9873C4": "Sage Electronic Engineering LLC",
"9876B6": "Adafruit",
"987770": "Pep Digital Technology (Guangzhou) Co., Ltd",
"988217": "Disruptive Ltd",
"9886B1": "Flyaudio corporation (China)",
"9889ED": "Anadem Information Inc.",
"988B5D": "SAGEM COMMUNICATION",
"988BAD": "Corintech Ltd.",
"988E34": "ZHEJIANG BOXSAM ELECTRONIC CO.,LTD",
"988E4A": "NOXUS(BEIJING) TECHNOLOGY CO.,LTD",
"988EDD": "TE Connectivity Limerick",
"989080": "Linkpower Network System Inc Ltd.",
"9893CC": "LG Electronics Inc.",
"989449": "Skyworth Wireless Technology Ltd.",
"98A7B0": "MCST ZAO",
"98AAD7": "BLUE WAVE NETWORKING CO LTD",
"98B039": "Alcatel-Lucent",
"98B8E3": "Apple",
"98BC57": "SVA TECHNOLOGIES CO.LTD",
"98BC99": "Edeltech Co.,Ltd.",
"98BE94": "IBM",
"98C0EB": "Global Regency Ltd",
"98C845": "PacketAccess",
"98CDB4": "Virident Systems, Inc.",
"98D331": "Shenzhen Bolutek Technology Co.,Ltd.",
"98D686": "Chyi Lee industry Co., ltd.",
"98D6BB": "Apple",
"98D6F7": "LG Electronics",
"98D88C": "Nortel Networks",
"98DA92": "Vuzix Corporation",
"98DCD9": "UNITEC Co., Ltd.",
"98E165": "Accutome",
"98E79A": "Foxconn(NanJing) Communication Co.,Ltd.",
"98EC65": "Cosesy ApS",
"98F0AB": "Apple",
"98F170": "Murata Manufacturing Co., Ltd.",
"98F537": "zte corporation",
"98F8C1": "IDT Technology Limited",
"98F8DB": "Marini Impianti Industriali s.r.l.",
"98FB12": "Grand Electronics (HK) Ltd",
"98FC11": "Cisco-Linksys, LLC",
"98FE03": "Ericsson - North America",
"98FE94": "Apple",
"98FF6A": "OTEC(Shanghai)Technology Co.,Ltd.",
"98FFD0": "Lenovo Mobile Communication Technology Ltd.",
"9C0111": "Shenzhen Newabel Electronic Co., Ltd.",
"9C0298": "Samsung Electronics Co.,Ltd",
"9C039E": "Beijing Winchannel Software Technology Co., Ltd",
"9C0473": "Tecmobile (International) Ltd.",
"9C04EB": "Apple",
"9C066E": "Hytera Communications Corporation Limited",
"9C0DAC": "Tymphany HK Limited",
"9C1465": "Edata Elektronik San. ve Tic. A.\u015e.",
"9C1874": "Nokia Danmark A/S",
"9C1C12": "Aruba Networks",
"9C1FDD": "Accupix Inc.",
"9C207B": "Apple",
"9C216A": "TP-LINK TECHNOLOGIES CO.,LTD.",
"9C220E": "TASCAN Service GmbH",
"9C2840": "Discovery Technology,LTD..",
"9C28BF": "Continental Automotive Czech Republic s.r.o.",
"9C28EF": "HUAWEI TECHNOLOGIES CO.,LTD",
"9C2A70": "Hon Hai Precision Ind. Co.,Ltd.",
"9C3178": "Foshan Huadian Intelligent Communications Teachnologies Co.,Ltd",
"9C31B6": "Kulite Semiconductor Products Inc",
"9C3AAF": "Samsung Electronics Co.,Ltd",
"9C3EAA": "EnvyLogic Co.,Ltd.",
"9C417C": "Hame Technology Co., Limited",
"9C443D": "CHENGDU XUGUANG TECHNOLOGY CO, LTD",
"9C44A6": "SwiftTest, Inc.",
"9C4563": "DIMEP Sistemas",
"9C4A7B": "Nokia Corporation",
"9C4CAE": "Mesa Labs",
"9C4E20": "CISCO SYSTEMS, INC.",
"9C4E36": "Intel Corporate",
"9C4E8E": "ALT Systems Ltd",
"9C4EBF": "BoxCast",
"9C53CD": "ENGICAM s.r.l.",
"9C541C": "Shenzhen My-power Technology Co.,Ltd",
"9C54CA": "Zhengzhou VCOM Science and Technology Co.,Ltd",
"9C55B4": "I.S.E. S.r.l.",
"9C5711": "Feitian Xunda(Beijing) Aeronautical Information Technology Co., Ltd.",
"9C5B96": "NMR Corporation",
"9C5C8D": "FIREMAX IND\u00daSTRIA E COM\u00c9RCIO DE PRODUTOS ELETR\u00d4NICOS LTDA",
"9C5D12": "Aerohive Networks Inc",
"9C5D95": "VTC Electronics Corp.",
"9C5E73": "Calibre UK Ltd",
"9C611D": "Omni-ID USA, Inc.",
"9C645E": "Harman Consumer Group",
"9C65B0": "Samsung Electronics Co.,Ltd",
"9C65F9": "AcSiP Technology Corp.",
"9C6650": "Glodio Technolies Co.,Ltd Tianjin Branch",
"9C6ABE": "QEES ApS.",
"9C7514": "Wildix srl",
"9C77AA": "NADASNV",
"9C79AC": "Suntec Software(Shanghai) Co., Ltd.",
"9C7BD2": "NEOLAB Convergence",
"9C807D": "SYSCABLE Korea Inc.",
"9C80DF": "Arcadyan Technology Corporation",
"9C86DA": "Phoenix Geophysics Ltd.",
"9C8888": "Simac Techniek NV",
"9C8BF1": "The Warehouse Limited",
"9C8D1A": "INTEG process group inc",
"9C8E99": "Hewlett-Packard Company",
"9C8EDC": "Teracom Limited",
"9C934E": "Xerox Corporation",
"9C93E4": "PRIVATE",
"9C95F8": "SmartDoor Systems, LLC",
"9C9726": "Technicolor",
"9C9811": "Guangzhou Sunrise Electronics Development Co., Ltd",
"9C9C1D": "Starkey Labs Inc.",
"9CA10A": "SCLE SFE",
"9CA134": "Nike, Inc.",
"9CA3BA": "SAKURA Internet Inc.",
"9CA577": "Osorno Enterprises Inc.",
"9CA9E4": "zte corporation",
"9CADEF": "Obihai Technology, Inc.",
"9CAFCA": "CISCO SYSTEMS, INC.",
"9CB008": "Ubiquitous Computing Technology Corporation",
"9CB206": "PROCENTEC",
"9CB654": "Hewlett Packard",
"9CB70D": "Liteon Technology Corporation",
"9CB793": "Creatcomm Technology Inc.",
"9CBB98": "Shen Zhen RND Electronic Co.,LTD",
"9CBD9D": "SkyDisk, Inc.",
"9CC077": "PrintCounts, LLC",
"9CC0D2": "Conductix-Wampfler AG",
"9CC7A6": "AVM GmbH",
"9CC7D1": "SHARP Corporation",
"9CCAD9": "Nokia Corporation",
"9CCD82": "CHENG UEI PRECISION INDUSTRY CO.,LTD",
"9CD21E": "Hon Hai Precision Ind. Co.,Ltd.",
"9CD24B": "zte corporation",
"9CD36D": "NETGEAR INC.,",
"9CD643": "D-Link International",
"9CDF03": "Harman/Becker Automotive Systems GmbH",
"9CE10E": "NCTech Ltd",
"9CE1D6": "Junger Audio-Studiotechnik GmbH",
"9CE635": "Nintendo Co., Ltd.",
"9CE6E7": "Samsung Electronics Co.,Ltd",
"9CE7BD": "Winduskorea co., Ltd",
"9CEBE8": "BizLink (Kunshan) Co.,Ltd",
"9CF61A": "UTC Fire and Security",
"9CF67D": "Ricardo Prague, s.r.o.",
"9CF8DB": "shenzhen eyunmei technology co,.ltd",
"9CF938": "AREVA NP GmbH",
"9CFBF1": "MESOMATIC GmbH & Co.KG",
"9CFFBE": "OTSL Inc.",
"A002DC": "Amazon Technologies Inc.",
"A00363": "Robert Bosch Healthcare GmbH",
"A00798": "Samsung Electronics",
"A007B6": "Advanced Technical Support, Inc.",
"A00ABF": "Wieson Technologies Co., Ltd.",
"A00BBA": "SAMSUNG ELECTRO-MECHANICS",
"A00CA1": "SKTB SKiT",
"A01290": "Avaya, Inc",
"A012DB": "TABUCHI ELECTRIC CO.,LTD",
"A0133B": "Copyright \u00a9 HiTi Digital, Inc.",
"A0143D": "PARROT SA",
"A0165C": "Triteka LTD",
"A01859": "Shenzhen Yidashi Electronics Co Ltd",
"A01917": "Bertel S.p.a.",
"A01C05": "NIMAX TELECOM CO.,LTD.",
"A01D48": "Hewlett Packard",
"A02195": "Samsung Electronics Digital Imaging",
"A021B7": "NETGEAR",
"A0231B": "TeleComp R&D Corp.",
"A02BB8": "Hewlett Packard",
"A02EF3": "United Integrated Services Co., Led.",
"A0369F": "Intel Corporate",
"A036F0": "Comprehensive Power",
"A036FA": "Ettus Research LLC",
"A03A75": "PSS Belgium N.V.",
"A03B1B": "Inspire Tech",
"A04025": "Actioncable, Inc.",
"A04041": "SAMWONFA Co.,Ltd.",
"A041A7": "NL Ministry of Defense",
"A0423F": "Tyan Computer Corp",
"A0481C": "Hewlett Packard",
"A04CC1": "Helixtech Corp.",
"A04E04": "Nokia Corporation",
"A051C6": "Avaya, Inc",
"A055DE": "Pace plc",
"A0593A": "V.D.S. Video Display Systems srl",
"A05AA4": "Grand Products Nevada, Inc.",
"A05B21": "ENVINET GmbH",
"A05DC1": "TMCT Co., LTD.",
"A05DE7": "DIRECTV, Inc.",
"A05E6B": "MELPER Co., Ltd.",
"A06518": "VNPT TECHNOLOGY",
"A067BE": "Sicon s.r.l.",
"A06986": "Wellav Technologies Ltd",
"A06A00": "Verilink Corporation",
"A06CEC": "RIM",
"A06D09": "Intelcan Technosystems Inc.",
"A06E50": "Nanotek Elektronik Sistemler Ltd. Sti.",
"A071A9": "Nokia Corporation",
"A07332": "Cashmaster International Limited",
"A073FC": "Rancore Technologies Private Limited",
"A07591": "Samsung Electronics Co.,Ltd",
"A07771": "Vialis BV",
"A078BA": "Pantech Co., Ltd.",
"A0821F": "Samsung Electronics Co.,Ltd",
"A082C7": "P.T.I Co.,LTD",
"A0861D": "Chengdu Fuhuaxin Technology co.,Ltd",
"A086EC": "SAEHAN HITEC Co., Ltd",
"A08869": "Intel Corporate",
"A088B4": "Intel Corporate",
"A089E4": "Skyworth Digital Technology(Shenzhen) Co.,Ltd",
"A08A87": "HuiZhou KaiYue Electronic Co.,Ltd",
"A08C15": "Gerhard D. Wempe KG",
"A08C9B": "Xtreme Technologies Corp",
"A090DE": "VEEDIMS,LLC",
"A09805": "OpenVox Communication Co Ltd",
"A098ED": "Shandong Intelligent Optical Communication Development Co., Ltd.",
"A09A5A": "Time Domain",
"A09BBD": "Total Aviation Solutions Pty Ltd",
"A0A130": "DLI Taiwan Branch office",
"A0A23C": "GPMS",
"A0A763": "Polytron Vertrieb GmbH",
"A0A8CD": "Intel Corporate",
"A0AAFD": "EraThink Technologies Corp.",
"A0B100": "ShenZhen Cando Electronics Co.,Ltd",
"A0B3CC": "Hewlett Packard",
"A0B5DA": "HongKong THTF Co., Ltd",
"A0B662": "Acutvista Innovation Co., Ltd.",
"A0B9ED": "Skytap",
"A0BAB8": "Pixon Imaging",
"A0BF50": "S.C. ADD-PRODUCTION S.R.L.",
"A0BFA5": "CORESYS",
"A0C3DE": "Triton Electronic Systems Ltd.",
"A0C6EC": "ShenZhen ANYK Technology Co.,LTD",
"A0CEC8": "CE LINK LIMITED",
"A0CF5B": "CISCO SYSTEMS, INC.",
"A0D12A": "AXPRO Technology Inc.",
"A0D3C1": "Hewlett Packard",
"A0DA92": "Nanjing Glarun Atten Technology Co. Ltd.",
"A0DC04": "Becker-Antriebe GmbH",
"A0DD97": "PolarLink Technologies, Ltd",
"A0DDE5": "SHARP Corporation",
"A0DE05": "JSC \"Irbis-T\"",
"A0E201": "AVTrace Ltd.(China)",
"A0E25A": "Amicus SK, s.r.o.",
"A0E295": "DAT System Co.,Ltd",
"A0E453": "Sony Mobile Communications AB",
"A0E534": "Stratec Biomedical AG",
"A0E5E9": "enimai Inc",
"A0E9DB": "Ningbo FreeWings Technologies Co.,Ltd",
"A0EB76": "AirCUVE Inc.",
"A0EC80": "zte corporation",
"A0EDCD": "Apple",
"A0EF84": "Seine Image Int'l Co., Ltd",
"A0F217": "GE Medical System(China) Co., Ltd.",
"A0F3C1": "TP-LINK TECHNOLOGIES CO., LTD.",
"A0F3E4": "Alcatel Lucent IPD",
"A0F419": "Nokia Corporation",
"A0F450": "HTC Corporation",
"A0F459": "FN-LINK TECHNOLOGY LIMITED",
"A0FC6E": "Telegrafia a.s.",
"A0FE91": "AVAT Automation GmbH",
"A40130": "ABIsystems Co., LTD",
"A4059E": "STA Infinity LLP",
"A409CB": "Alfred Kaercher GmbH & Co KG",
"A40BED": "Carry Technology Co.,Ltd",
"A40CC3": "CISCO SYSTEMS, INC.",
"A4134E": "Luxul",
"A41566": "Wei Fang Goertek Electronics Co.,Ltd",
"A41731": "Hon Hai Precision Ind. Co.,Ltd.",
"A41875": "CISCO SYSTEMS, INC.",
"A41BC0": "Fastec Imaging Corporation",
"A41F72": "Dell Inc.",
"A4218A": "Nortel Networks",
"A42305": "Open Networking Laboratory",
"A424B3": "FlatFrog Laboratories AB",
"A42940": "Shenzhen YOUHUA Technology Co., Ltd",
"A429B7": "bluesky",
"A42C08": "Masterwork Automodules",
"A433D1": "Fibrlink Communications Co.,Ltd.",
"A438FC": "Plastic Logic",
"A43A69": "Vers Inc",
"A43BFA": "IEEE REGISTRATION AUTHORITY - Please see MAM public listing for more information.",
"A43D78": "GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD",
"A4466B": "EOC Technology",
"A446FA": "AmTRAN Video Corporation",
"A44B15": "Sun Cupid Technology (HK) LTD",
"A44C11": "CISCO SYSTEMS, INC.",
"A44E2D": "Adaptive Wireless Solutions, LLC",
"A44E31": "Intel Corporate",
"A45055": "busware.de",
"A4526F": "ADB Broadband Italia",
"A4561B": "MCOT Corporation",
"A45630": "CISCO SYSTEMS, INC.",
"A45A1C": "smart-electronic GmbH",
"A45C27": "Nintendo Co., Ltd.",
"A45D36": "Hewlett Packard",
"A46032": "MRV Communications (Networks) LTD",
"A46706": "Apple",
"A46CC1": "LTi REEnergy GmbH",
"A46E79": "DFT System Co.Ltd",
"A47733": "Google",
"A47760": "Nokia Corporation",
"A479E4": "KLINFO Corp",
"A47AA4": "ARRIS Group, Inc.",
"A47ACF": "VIBICOM COMMUNICATIONS INC.",
"A47C14": "ChargeStorm AB",
"A47C1F": "Cobham plc",
"A47E39": "zte corporation",
"A481EE": "Nokia Corporation",
"A4856B": "Q Electronics Ltd",
"A4895B": "ARK INFOSOLUTIONS PVT LTD",
"A49005": "CHINA GREATWALL COMPUTER SHENZHEN CO.,LTD",
"A4934C": "CISCO SYSTEMS, INC.",
"A497BB": "Hitachi Industrial Equipment Systems Co.,Ltd",
"A49947": "Huawei Technologies Co., Ltd",
"A49981": "FuJian Elite Power Tech CO.,LTD.",
"A49A58": "Samsung Electronics Co.,Ltd",
"A49B13": "Burroughs Payment Systems, Inc.",
"A49EDB": "AutoCrib, Inc.",
"A49F85": "Lyve Minds, Inc",
"A49F89": "Shanghai Rui Rui Communication Technology Co.Ltd.",
"A4A24A": "Cisco SPVTG",
"A4A80F": "Shenzhen Coship Electronics Co., Ltd.",
"A4AD00": "Ragsdale Technology",
"A4ADB8": "Vitec Group, Camera Dynamics Ltd",
"A4AE9A": "Maestro Wireless Solutions ltd.",
"A4B121": "Arantia 2010 S.L.",
"A4B197": "Apple",
"A4B1E9": "Technicolor",
"A4B1EE": "H. ZANDER GmbH & Co. KG",
"A4B2A7": "Adaxys Solutions AG",
"A4B36A": "JSC SDO Chromatec",
"A4B818": "PENTA Gesellschaft f\u00fcr elektronische Industriedatenverarbeitung mbH",
"A4B980": "Parking BOXX Inc.",
"A4BADB": "Dell Inc.",
"A4BBAF": "Lime Instruments",
"A4BE61": "EutroVision System, Inc.",
"A4C0C7": "ShenZhen Hitom Communication Technology Co..LTD",
"A4C0E1": "Nintendo Co., Ltd.",
"A4C2AB": "Hangzhou LEAD-IT Information & Technology Co.,Ltd",
"A4C361": "Apple",
"A4C7DE": "Cambridge Industries(Group) Co.,Ltd.",
"A4D094": "Erwin Peters Systemtechnik GmbH",
"A4D18F": "Shenzhen Skyee Optical Fiber Communication Technology Ltd.",
"A4D1D1": "ECOtality North America",
"A4D1D2": "Apple",
"A4D3B5": "GLITEL Stropkov, s.r.o.",
"A4D856": "Qualcomm Labs Inc.",
"A4DA3F": "Bionics Corp.",
"A4DB2E": "Kingspan Environmental Ltd",
"A4DB30": "Liteon Technology Corporation",
"A4DE50": "Total Walther GmbH",
"A4E0E6": "FILIZOLA S.A. PESAGEM E AUTOMACAO",
"A4E32E": "Silicon & Software Systems Ltd.",
"A4E391": "DENY FONTAINE",
"A4E4B8": "BlackBerry Limited",
"A4E731": "Nokia Corporation",
"A4E7E4": "Connex GmbH",
"A4E991": "SISTEMAS AUDIOVISUALES ITELSIS S.L.",
"A4E9A3": "Honest Technology Co., Ltd",
"A4EBD3": "Samsung Electronics Co.,Ltd",
"A4ED4E": "ARRIS Group, Inc.",
"A4EE57": "SEIKO EPSON CORPORATION",
"A4EF52": "Telewave Co., Ltd.",
"A4F3C1": "Open Source Robotics Foundation, Inc.",
"A4F522": "CHOFU SEISAKUSHO CO.,LTD",
"A4F7D0": "LAN Accessories Co., Ltd.",
"A4FB8D": "Hangzhou Dunchong Technology Co.Ltd",
"A4FCCE": "Security Expert Ltd.",
"A80180": "IMAGO Technologies GmbH",
"A80600": "Samsung Electronics Co.,Ltd",
"A80C0D": "Cisco",
"A8154D": "TP-LINK TECHNOLOGIES CO.,LTD.",
"A816B2": "LG Electronics",
"A81758": "Elektronik System i Ume\u00e5 AB",
"A81B18": "XTS CORP",
"A81FAF": "KRYPTON POLSKA",
"A82066": "Apple",
"A824EB": "ZAO NPO Introtest",
"A826D9": "HTC Corporation",
"A8294C": "Precision Optical Transceivers, Inc.",
"A82BD6": "Shina System Co., Ltd",
"A830AD": "Wei Fang Goertek Electronics Co.,Ltd",
"A83944": "Actiontec Electronics, Inc",
"A84041": "Dragino Technology Co., Limited",
"A84481": "Nokia Corporation",
"A845E9": "Firich Enterprises CO., LTD.",
"A849A5": "Lisantech Co., Ltd.",
"A854B2": "Wistron Neweb Corp.",
"A8556A": "Pocketnet Technology Inc.",
"A8574E": "TP-LINK TECHNOLOGIES CO.,LTD.",
"A85BB0": "Shenzhen Dehoo Technology Co.,Ltd",
"A85BF3": "Audivo GmbH",
"A861AA": "Cloudview Limited",
"A862A2": "JIWUMEDIA CO., LTD.",
"A863DF": "DISPLAIRE CORPORATION",
"A863F2": "Texas Instruments",
"A865B2": "DONGGUAN YISHANG ELECTRONIC TECHNOLOGY CO., LIMITED",
"A86A6F": "RIM",
"A870A5": "UniComm Inc.",
"A875D6": "FreeTek International Co., Ltd.",
"A875E2": "Aventura Technologies, Inc.",
"A8776F": "Zonoff",
"A87B39": "Nokia Corporation",
"A87E33": "Nokia Danmark A/S",
"A881F1": "BMEYE B.V.",
"A886DD": "Apple, Inc.",
"A88792": "Broadband Antenna Tracking Systems",
"A887ED": "ARC Wireless LLC",
"A88808": "Apple",
"A88CEE": "MicroMade Galka i Drozdz sp.j.",
"A88D7B": "SunDroid Global limited.",
"A8922C": "LG Electronics",
"A893E6": "JIANGXI JINGGANGSHAN CKING COMMUNICATION TECHNOLOGY CO.,LTD",
"A895B0": "Aker Subsea Ltd",
"A8968A": "Apple",
"A897DC": "IBM",
"A898C6": "Shinbo Co., Ltd.",
"A8995C": "aizo ag",
"A89B10": "inMotion Ltd.",
"A8A668": "zte corporation",
"A8AD3D": "Alcatel-Lucent Shanghai Bell Co., Ltd",
"A8B0AE": "LEONI",
"A8B1D4": "CISCO SYSTEMS, INC.",
"A8B9B3": "ESSYS",
"A8BBCF": "Apple",
"A8BD1A": "Honey Bee (Hong Kong) Limited",
"A8BD3A": "UNIONMAN TECHNOLOGY CO.,LTD",
"A8C222": "TM-Research Inc.",
"A8CB95": "EAST BEST CO., LTD.",
"A8CCC5": "Saab AB (publ)",
"A8CE90": "CVC",
"A8D0E5": "Juniper Networks",
"A8D236": "Lightware Visual Engineering",
"A8D3C8": "Wachendorff Elektronik GmbH & Co. KG",
"A8E018": "Nokia Corporation",
"A8E3EE": "Sony Computer Entertainment Inc.",
"A8E539": "Moimstone Co.,Ltd",
"A8EF26": "Tritonwave",
"A8F274": "Samsung Electronics",
"A8F470": "Fujian Newland Communication Science Technologies Co.,Ltd.",
"A8F7E0": "PLANET Technology Corporation",
"A8F94B": "Eltex Enterprise Ltd.",
"A8FAD8": "Apple",
"A8FB70": "WiseSec L.t.d",
"A8FCB7": "Consolidated Resource Imaging",
"AA0000": "DIGITAL EQUIPMENT CORPORATION",
"AA0001": "DIGITAL EQUIPMENT CORPORATION",
"AA0002": "DIGITAL EQUIPMENT CORPORATION",
"AA0003": "DIGITAL EQUIPMENT CORPORATION",
"AA0004": "DIGITAL EQUIPMENT CORPORATION",
"AC0142": "Uriel Technologies SIA",
"AC02CA": "HI Solutions, Inc.",
"AC02CF": "RW Tecnologia Industria e Comercio Ltda",
"AC02EF": "Comsis",
"AC0613": "Senselogix Ltd",
"AC0A61": "Labor S.r.L.",
"AC0DFE": "Ekon GmbH - myGEKKO",
"AC1461": "ATAW Co., Ltd.",
"AC14D2": "wi-daq, inc.",
"AC162D": "Hewlett Packard",
"AC1702": "Fibar Group sp. z o.o.",
"AC1826": "SEIKO EPSON CORPORATION",
"AC199F": "SUNGROW POWER SUPPLY CO.,LTD.",
"AC20AA": "DMATEK Co., Ltd.",
"AC220B": "ASUSTek COMPUTER INC.",
"AC2DA3": "TXTR GmbH",
"AC2FA8": "Humannix Co.,Ltd.",
"AC319D": "Shenzhen TG-NET Botone Technology Co.,Ltd.",
"AC34CB": "Shanhai GBCOM Communication Technology Co. Ltd",
"AC3613": "Samsung Electronics Co.,Ltd",
"AC3C0B": "Apple",
"AC3CB4": "Nilan A/S",
"AC3D05": "Instorescreen Aisa",
"AC3D75": "HANGZHOU ZHIWAY TECHNOLOGIES CO.,LTD.",
"AC3FA4": "TAIYO YUDEN CO.,LTD",
"AC40EA": "C&T Solution Inc.",
"AC4122": "Eclipse Electronic Systems Inc.",
"AC44F2": "Revolabs Inc",
"AC4723": "Genelec",
"AC4AFE": "Hisense Broadband Multimedia Technology Co.,Ltd.",
"AC4BC8": "Juniper Networks",
"AC4E91": "HUAWEI TECHNOLOGIES CO.,LTD",
"AC4FFC": "SVS-VISTEK GmbH",
"AC5036": "Pi-Coral Inc",
"AC5135": "MPI TECH",
"AC51EE": "Cambridge Communication Systems Ltd",
"AC54EC": "IEEE P1823 Standards Working Group",
"AC583B": "Human Assembler, Inc.",
"AC5D10": "Pace Americas",
"AC5E8C": "Utillink",
"AC6123": "Drivven, Inc.",
"AC6706": "Ruckus Wireless",
"AC6BAC": "Jenny Science AG",
"AC6E1A": "Shenzhen Gongjin Electronics Co.,Ltd",
"AC6F4F": "Enspert Inc",
"AC6FBB": "TATUNG Technology Inc.",
"AC6FD9": "Valueplus Inc.",
"AC7236": "Lexking Technology Co., Ltd.",
"AC7289": "Intel Corporate",
"AC7A42": "iConnectivity",
"AC7BA1": "Intel Corporate",
"AC7F3E": "Apple",
"AC80D6": "Hexatronic AB",
"AC8112": "Gemtek Technology Co., Ltd.",
"AC81F3": "Nokia Corporation",
"AC8317": "Shenzhen Furtunetel Communication Co., Ltd",
"AC83F0": "ImmediaTV Corporation",
"AC853D": "HUAWEI TECHNOLOGIES CO.,LTD",
"AC8674": "Open Mesh, Inc.",
"AC867E": "Create New Technology (HK) Limited Company",
"AC8ACD": "ROGER D.Wensker, G.Wensker sp.j.",
"AC8D14": "Smartrove Inc",
"AC932F": "Nokia Corporation",
"AC9403": "Envision Peripherals Inc",
"AC9A96": "Lantiq Deutschland GmbH",
"AC9B84": "Smak Tecnologia e Automacao",
"AC9CE4": "Alcatel-Lucent Shanghai Bell Co., Ltd",
"ACA016": "CISCO SYSTEMS, INC.",
"ACA22C": "Baycity Technologies Ltd",
"ACA31E": "Aruba Networks",
"ACA430": "Peerless AV",
"ACA919": "TrekStor GmbH",
"ACA9A0": "Audioengine, Ltd.",
"ACAB8D": "Lyngso Marine A/S",
"ACB313": "ARRIS Group, Inc.",
"ACB859": "Uniband Electronic Corp,",
"ACBD0B": "IMAC CO.,LTD",
"ACBE75": "Ufine Technologies Co.,Ltd.",
"ACBEB6": "Visualedge Technology Co., Ltd.",
"ACC2EC": "CLT INT'L IND. CORP.",
"ACC595": "Graphite Systems",
"ACC698": "Kohzu Precision Co., Ltd.",
"ACC935": "Ness Corporation",
"ACCA54": "Telldus Technologies AB",
"ACCA8E": "ODA Technologies",
"ACCABA": "Midokura Co., Ltd.",
"ACCB09": "Hefcom Metering (Pty) Ltd",
"ACCC8E": "Axis Communications AB",
"ACCE8F": "HWA YAO TECHNOLOGIES CO., LTD",
"ACCF23": "Hi-flying electronics technology Co.,Ltd",
"ACCF5C": "Apple",
"ACD180": "Crexendo Business Solutions, Inc.",
"ACD364": "ABB SPA, ABB SACE DIV.",
"ACD657": "Shaanxi Guolian Digital TV Technology Co., Ltd.",
"ACD9D6": "tci GmbH",
"ACDBDA": "Shenzhen Geniatech Inc, Ltd",
"ACDE48": "PRIVATE",
"ACE069": "ISAAC Instruments",
"ACE215": "Huawei Technologies Co., Ltd",
"ACE348": "MadgeTech, Inc",
"ACE42E": "SK hynix",
"ACE64B": "Shenzhen Baojia Battery Technology Co., Ltd.",
"ACE87B": "Huawei Technologies Co., Ltd",
"ACE87E": "Bytemark Computer Consulting Ltd",
"ACE97F": "IoT Tech Limited",
"ACE9AA": "Hay Systems Ltd",
"ACEA6A": "GENIX INFOCOMM CO., LTD.",
"ACEE3B": "6harmonics Inc",
"ACF0B2": "Becker Electronics Taiwan Ltd.",
"ACF1DF": "D-Link International",
"ACF2C5": "Cisco",
"ACF7F3": "XIAOMI CORPORATION",
"ACF97E": "ELESYS INC.",
"ACFDEC": "Apple, Inc",
"B000B4": "Cisco",
"B00594": "Liteon Technology Corporation",
"B01203": "Dynamics Hong Kong Limited",
"B01266": "Futaba-Kikaku",
"B01408": "LIGHTSPEED INTERNATIONAL CO.",
"B01743": "EDISON GLOBAL CIRCUITS LLC",
"B01B7C": "Ontrol A.S.",
"B01C91": "Elim Co",
"B024F3": "Progeny Systems",
"B025AA": "PRIVATE",
"B03495": "Apple",
"B0358D": "Nokia Corporation",
"B03829": "Siliconware Precision Industries Co., Ltd.",
"B03850": "Nanjing CAS-ZDC IOT SYSTEM CO.,LTD",
"B0435D": "NuLEDs, Inc.",
"B04545": "YACOUB Automation GmbH",
"B046FC": "MitraStar Technology Corp.",
"B0487A": "TP-LINK TECHNOLOGIES CO., LTD.",
"B04C05": "Fresenius Medical Care Deutschland GmbH",
"B050BC": "SHENZHEN BASICOM ELECTRONIC CO.,LTD.",
"B0518E": "Holl technology CO.Ltd.",
"B05706": "Vallox Oy",
"B058C4": "Broadcast Microwave Services, Inc",
"B05B1F": "THERMO FISHER SCIENTIFIC S.P.A.",
"B05CE5": "Nokia Corporation",
"B061C7": "Ericsson-LG Enterprise",
"B06563": "Shanghai Railway Communication Factory",
"B065BD": "Apple",
"B068B6": "Hangzhou OYE Technology Co. Ltd",
"B06971": "DEI Sales, Inc.",
"B06CBF": "3ality Digital Systems GmbH",
"B0750C": "QA Cafe",
"B0754D": "Alcatel-Lucent",
"B075D5": "ZTE Corporation",
"B077AC": "ARRIS Group, Inc.",
"B07908": "Cummings Engineering",
"B0793C": "Revolv Inc",
"B07994": "Motorola Mobility LLC",
"B07D62": "Dipl.-Ing. H. Horstmann GmbH",
"B0808C": "Laser Light Engines",
"B081D8": "I-sys Corp",
"B0869E": "Chloride S.r.L",
"B08807": "Strata Worldwide",
"B08991": "LGE",
"B08E1A": "URadio Systems Co., Ltd",
"B09074": "Fulan Electronics Limited",
"B09134": "Taleo",
"B0973A": "E-Fuel Corporation",
"B0989F": "LG CNS",
"B09928": "Fujitsu Limited",
"B09AE2": "STEMMER IMAGING GmbH",
"B09BD4": "GNH Software India Private Limited",
"B09FBA": "Apple",
"B0A10A": "Pivotal Systems Corporation",
"B0A37E": "Qingdao Haier Electronics Co.,Ltd",
"B0A72A": "Ensemble Designs, Inc.",
"B0A737": "Roku, Inc.",
"B0A86E": "Juniper Networks",
"B0AA36": "GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD.",
"B0ACFA": "Fujitsu Limited",
"B0ADAA": "Avaya, Inc",
"B0B2DC": "Zyxel Communications Corporation",
"B0B32B": "Slican Sp. z o.o.",
"B0B448": "Texas Instruments",
"B0B8D5": "Nanjing Nengrui Auto Equipment CO.,Ltd",
"B0BD6D": "Echostreams Innovative Solutions",
"B0BDA1": "ZAKLAD ELEKTRONICZNY SIMS",
"B0BF99": "WIZITDONGDO",
"B0C4E7": "Samsung Electronics",
"B0C554": "D-Link International",
"B0C69A": "Juniper Networks",
"B0C745": "Buffalo Inc.",
"B0C83F": "Jiangsu Cynray IOT Co., Ltd.",
"B0C8AD": "People Power Company",
"B0C95B": "Beijing Symtech CO.,LTD",
"B0CE18": "Zhejiang shenghui lighting co.,Ltd",
"B0CF4D": "MI-Zone Technology Ireland",
"B0D09C": "Samsung Electronics Co.,Ltd",
"B0D2F5": "Vello Systems, Inc.",
"B0D59D": "Shenzhen Zowee Technology Co., Ltd",
"B0D7C5": "STP KFT",
"B0DA00": "CERA ELECTRONIQUE",
"B0DF3A": "Samsung Electronics Co.,Ltd",
"B0E39D": "CAT SYSTEM CO.,LTD.",
"B0E50E": "NRG SYSTEMS INC",
"B0E754": "2Wire",
"B0E892": "SEIKO EPSON CORPORATION",
"B0E97E": "Advanced Micro Peripherals",
"B0EC71": "Samsung Electronics Co.,Ltd",
"B0EC8F": "GMX SAS",
"B0EE45": "AzureWave Technologies, Inc.",
"B0F1BC": "Dhemax Ingenieros Ltda",
"B0FAEB": "Cisco",
"B0FEBD": "PRIVATE",
"B4009C": "CableWorld Ltd.",
"B40142": "GCI Science & Technology Co.,LTD",
"B40418": "Smartchip Integrated Inc.",
"B407F9": "SAMSUNG ELECTRO-MECHANICS",
"B40832": "TC Communications",
"B40B7A": "Brusa Elektronik AG",
"B40C25": "Palo Alto Networks",
"B40E96": "HERAN",
"B40EDC": "LG-Ericsson Co.,Ltd.",
"B41489": "CISCO SYSTEMS, INC.",
"B41513": "HUAWEI TECHNOLOGIES CO.,LTD",
"B418D1": "Apple",
"B41DEF": "Internet Laboratories, Inc.",
"B4211D": "Beijing GuangXin Technology Co., Ltd",
"B4218A": "Dog Hunter LLC",
"B424E7": "Codetek Technology Co.,Ltd",
"B428F1": "E-Prime Co., Ltd.",
"B42A39": "ORBIT MERRET, spol. s r. o.",
"B42C92": "Zhejiang Weirong Electronic Co., Ltd",
"B42CBE": "Direct Payment Solutions Limited",
"B431B8": "Aviwest",
"B4346C": "MATSUNICHI DIGITAL TECHNOLOGY (HONG KONG) LIMITED",
"B43564": "Fujian Tian Cheng Electron Science & Technical Development Co.,Ltd.",
"B435F7": "Zhejiang Pearmain Electronics Co.ltd.",
"B43741": "Consert, Inc.",
"B439D6": "ProCurve Networking by HP",
"B43A28": "Samsung Electronics Co.,Ltd",
"B43DB2": "Degreane Horizon",
"B43E3B": "Viableware, Inc",
"B4417A": "ShenZhen Gongjin Electronics Co.,Ltd",
"B4430D": "Broadlink Pty Ltd",
"B4475E": "Avaya, Inc",
"B44CC2": "NR ELECTRIC CO., LTD",
"B451F9": "NB Software",
"B45253": "Seagate Technology",
"B4527D": "Sony Mobile Communications AB",
"B4527E": "Sony Mobile Communications AB",
"B45570": "Borea",
"B45861": "CRemote, LLC",
"B45CA4": "Thing-talk Wireless Communication Technologies Corporation Limited",
"B461FF": "Lumigon A/S",
"B46238": "Exablox",
"B46293": "Samsung Electronics Co.,Ltd",
"B462AD": "raytest GmbH",
"B46698": "Zealabs srl",
"B467E9": "Qingdao GoerTek Technology Co., Ltd.",
"B4749F": "askey computer corp",
"B4750E": "Belkin International Inc.",
"B47F5E": "Foresight Manufacture (S) Pte Ltd",
"B48255": "Research Products Corporation",
"B4827B": "AKG Acoustics GmbH",
"B482C5": "Relay2, Inc.",
"B482FE": "Askey Computer Corp",
"B48547": "Amptown System Company GmbH",
"B48910": "Coster T.E. S.P.A.",
"B4944E": "WeTelecom Co., Ltd.",
"B49842": "zte corporation",
"B4994C": "Texas Instruments",
"B499BA": "Hewlett-Packard Company",
"B49DB4": "Axion Technologies Inc.",
"B49EAC": "Imagik Int'l Corp",
"B49EE6": "SHENZHEN TECHNOLOGY CO LTD",
"B4A4B5": "Zen Eye Co.,Ltd",
"B4A4E3": "CISCO SYSTEMS, INC.",
"B4A5A9": "MODI GmbH",
"B4A82B": "Histar Digital Electronics Co., Ltd.",
"B4A95A": "Avaya, Inc",
"B4AA4D": "Ensequence, Inc.",
"B4AB2C": "MtM Technology Corporation",
"B4B017": "Avaya, Inc",
"B4B362": "ZTE Corporation",
"B4B52F": "Hewlett Packard",
"B4B542": "Hubbell Power Systems, Inc.",
"B4B5AF": "Minsung Electronics",
"B4B676": "Intel Corporate",
"B4B88D": "Thuh Company",
"B4C44E": "VXL eTech Pvt Ltd",
"B4C799": "Motorola Solutions Inc.",
"B4C810": "UMPI Elettronica",
"B4CCE9": "PROSYST",
"B4CFDB": "Shenzhen Jiuzhou Electric Co.,LTD",
"B4D8A9": "BetterBots",
"B4D8DE": "iota Computing, Inc.",
"B4DD15": "ControlThings Oy Ab",
"B4DF3B": "Chromlech",
"B4DFFA": "Litemax Electronics Inc.",
"B4E0CD": "Fusion-io, Inc",
"B4E1EB": "PRIVATE",
"B4E9B0": "Cisco",
"B4ED19": "Pie Digital, Inc.",
"B4ED54": "Wohler Technologies",
"B4EEB4": "ASKEY COMPUTER CORP",
"B4EED4": "Texas Instruments",
"B4F0AB": "Apple",
"B4F2E8": "Pace plc",
"B4F323": "PETATEL INC.",
"B4FC75": "SEMA Electronics(HK) CO.,LTD",
"B4FE8C": "Centro Sicurezza Italia SpA",
"B80305": "Intel Corporate",
"B80415": "Bayan Audio",
"B80B9D": "ROPEX Industrie-Elektronik GmbH",
"B81413": "Keen High Holding(HK) Ltd.",
"B81619": "ARRIS Group, Inc.",
"B817C2": "Apple",
"B81999": "Nesys",
"B820E7": "Guangzhou Horizontal Information & Network Integration Co. Ltd",
"B82410": "Magneti Marelli Slovakia s.r.o.",
"B8241A": "SWEDA INFORMATICA LTDA",
"B8266C": "ANOV France",
"B826D4": "Furukawa Industrial S.A. Produtos El\u00e9tricos",
"B827EB": "Raspberry Pi Foundation",
"B8288B": "Parker Hannifin",
"B829F7": "Blaster Tech",
"B82A72": "Dell Inc",
"B82ADC": "EFR Europ\u00e4ische Funk-Rundsteuerung GmbH",
"B82CA0": "Honeywell HomMed",
"B830A8": "Road-Track Telematics Development",
"B836D8": "Videoswitch",
"B83861": "Cisco",
"B838CA": "Kyokko Tsushin System CO.,LTD",
"B83A7B": "Worldplay (Canada) Inc.",
"B83D4E": "Shenzhen Cultraview Digital Technology Co.,Ltd Shanghai Branch",
"B83E59": "Roku, Inc",
"B8415F": "ASP AG",
"B843E4": "Vlatacom",
"B847C6": "SanJet Technology Corp.",
"B85510": "Zioncom Electronics (Shenzhen) Ltd.",
"B85810": "NUMERA, INC.",
"B85AF7": "Ouya, Inc",
"B85AFE": "Handaer Communication Technology (Beijing) Co., Ltd",
"B85E7B": "Samsung Electronics Co.,Ltd",
"B86091": "Onnet Technologies and Innovations LLC",
"B8616F": "Accton Wireless Broadband(AWB), Corp.",
"B8621F": "CISCO SYSTEMS, INC.",
"B863BC": "ROBOTIS, Co, Ltd",
"B86491": "CK Telecom Ltd",
"B8653B": "Bolymin, Inc.",
"B86B23": "Toshiba",
"B86CE8": "Samsung Electronics Co.,Ltd",
"B870F4": "COMPAL INFORMATION (KUNSHAN) CO., LTD.",
"B87424": "Viessmann Elektronik GmbH",
"B87447": "Convergence Technologies",
"B875C0": "PayPal, Inc.",
"B8763F": "Hon Hai Precision Ind. Co.,Ltd.",
"B877C3": "Decagon Devices, Inc.",
"B8782E": "Apple",
"B8797E": "Secure Meters (UK) Limited",
"B87AC9": "Siemens Ltd.",
"B87CF2": "Aerohive Networks Inc.",
"B8871E": "Good Mind Industries Co., Ltd.",
"B887A8": "Step Ahead Innovations Inc.",
"B888E3": "COMPAL INFORMATION (KUNSHAN) CO., LTD",
"B889CA": "ILJIN ELECTRIC Co., Ltd.",
"B88A60": "Intel Corporate",
"B88D12": "Apple",
"B88E3A": "Infinite Technologies JLT",
"B88F14": "Analytica GmbH",
"B8921D": "BG T&A",
"B894D2": "Retail Innovation HTT AB",
"B89674": "AllDSP GmbH & Co. KG",
"B8975A": "BIOSTAR Microtech Int'l Corp.",
"B898B0": "Atlona Inc.",
"B898F7": "Gionee Communication Equipment Co,Ltd.ShenZhen",
"B89AED": "OceanServer Technology, Inc",
"B89BC9": "SMC Networks Inc",
"B8A386": "D-Link International",
"B8A3E0": "BenRui Technology Co.,Ltd",
"B8A8AF": "Logic S.p.A.",
"B8AC6F": "Dell Inc",
"B8AE6E": "Nintendo Co., Ltd.",
"B8AF67": "Hewlett-Packard Company",
"B8B1C7": "BT&COM CO.,LTD",
"B8B42E": "Gionee Communication Equipment Co,Ltd.ShenZhen",
"B8B7D7": "2GIG Technologies",
"B8B94E": "Shenzhen iBaby Labs, Inc.",
"B8BA68": "Xi'an Jizhong Digital Communication Co.,Ltd",
"B8BA72": "Cynove",
"B8BB6D": "ENERES Co.,Ltd.",
"B8BEBF": "CISCO SYSTEMS, INC.",
"B8BF83": "Intel Corporate",
"B8C1A2": "Dragon Path Technologies Co., Limited",
"B8C46F": "PRIMMCON INDUSTRIES INC",
"B8C68E": "Samsung Electronics Co.,Ltd",
"B8C716": "Fiberhome Telecommunication Technologies Co.,LTD",
"B8C75D": "Apple",
"B8C855": "Shanghai GBCOM Communication Technology Co.,Ltd.",
"B8CA3A": "Dell Inc",
"B8CD93": "Penetek, Inc",
"B8CDA7": "Maxeler Technologies Ltd.",
"B8D06F": "GUANGZHOU HKUST FOK YING TUNG RESEARCH INSTITUTE",
"B8D49D": "M Seven System Ltd.",
"B8D9CE": "Samsung Electronics",
"B8DAF1": "Strahlenschutz- Entwicklungs- und Ausruestungsgesellschaft mbH",
"B8DAF7": "Advanced Photonics, Inc.",
"B8DC87": "IAI Corporation",
"B8DF6B": "SpotCam Co., Ltd.",
"B8E589": "Payter BV",
"B8E625": "2Wire",
"B8E779": "9Solutions Oy",
"B8E856": "Apple",
"B8E937": "Sonos, Inc.",
"B8EE65": "Liteon Technology Corporation",
"B8EE79": "YWire Technologies, Inc.",
"B8F4D0": "Herrmann Ultraschalltechnik GmbH & Co. Kg",
"B8F5E7": "WayTools, LLC",
"B8F6B1": "Apple",
"B8F732": "Aryaka Networks Inc",
"B8F828": "Changshu Gaoshida Optoelectronic Technology Co. Ltd.",
"B8F934": "Sony Ericsson Mobile Communications AB",
"B8FD32": "Zhejiang ROICX Microelectronics",
"B8FF61": "Apple",
"B8FF6F": "Shanghai Typrotech Technology Co.Ltd",
"B8FFFE": "Texas Instruments",
"BC0200": "Stewart Audio",
"BC0543": "AVM GmbH",
"BC0DA5": "Texas Instruments",
"BC0F2B": "FORTUNE TECHGROUP CO.,LTD",
"BC125E": "Beijing WisVideo INC.",
"BC1401": "Hitron Technologies. Inc",
"BC14EF": "ITON Technology Limited",
"BC15A6": "Taiwan Jantek Electronics,Ltd.",
"BC1665": "Cisco",
"BC1A67": "YF Technology Co., Ltd",
"BC20A4": "Samsung Electronics",
"BC20BA": "Inspur (Shandong) Electronic Information Co., Ltd",
"BC25F0": "3D Display Technologies Co., Ltd.",
"BC261D": "HONG KONG TECON TECHNOLOGY",
"BC2846": "NextBIT Computing Pvt. Ltd.",
"BC28D6": "Rowley Associates Limited",
"BC2B6B": "Beijing Haier IC Design Co.,Ltd",
"BC2BD7": "Revogi Innovation Co., Ltd.",
"BC2C55": "Bear Flag Design, Inc.",
"BC2D98": "ThinGlobal LLC",
"BC305B": "Dell Inc.",
"BC307D": "Wistron Neweb Corp.",
"BC3400": "IEEE REGISTRATION AUTHORITY - Please see MAM public listing for more information.",
"BC35E5": "Hydro Systems Company",
"BC38D2": "Pandachip Limited",
"BC39A6": "CSUN System Technology Co.,LTD",
"BC3BAF": "Apple",
"BC3E13": "Accordance Systems Inc.",
"BC4100": "Codaco Electronic s.r.o.",
"BC4377": "Hang Zhou Huite Technology Co.,ltd.",
"BC4486": "Samsung Electronics Co.,Ltd",
"BC4760": "Samsung Electronics Co.,Ltd",
"BC4B79": "SensingTek",
"BC4E3C": "CORE STAFF CO., LTD.",
"BC51FE": "Swann Communications Pty Ltd",
"BC52B7": "Apple",
"BC5FF4": "ASRock Incorporation",
"BC629F": "Telenet Systems P. Ltd.",
"BC6778": "Apple",
"BC6784": "Environics Oy",
"BC6A16": "tdvine",
"BC6A29": "Texas Instruments",
"BC6E76": "Green Energy Options Ltd",
"BC71C1": "XTrillion, Inc.",
"BC72B1": "Samsung Electronics Co.,Ltd",
"BC764E": "Rackspace US, Inc.",
"BC7670": "Shenzhen Huawei Communication Technologies Co., Ltd",
"BC7737": "Intel Corporate",
"BC779F": "SBM Co., Ltd.",
"BC79AD": "Samsung Electronics Co.,Ltd",
"BC7DD1": "Radio Data Comms",
"BC811F": "Ingate Systems",
"BC8199": "BASIC Co.,Ltd.",
"BC83A7": "SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LT",
"BC851F": "Samsung Electronics",
"BC8556": "Hon Hai Precision Ind. Co.,Ltd.",
"BC8893": "VILLBAU Ltd.",
"BC8B55": "NPP ELIKS America Inc. DBA T&M Atlantic",
"BC8CCD": "Samsung Electro Mechanics co.,LTD.",
"BC8D0E": "Alcatel-Lucent",
"BC926B": "Apple",
"BC9680": "Shenzhen Gongjin Electronics Co.,Ltd",
"BC9889": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"BC99BC": "FonSee Technology Inc.",
"BC9DA5": "DASCOM Europe GmbH",
"BCA4E1": "Nabto",
"BCA9D6": "Cyber-Rain, Inc.",
"BCAEC5": "ASUSTek COMPUTER INC.",
"BCB181": "SHARP CORPORATION",
"BCB1F3": "Samsung Electronics",
"BCB852": "Cybera, Inc.",
"BCBAE1": "AREC Inc.",
"BCBBC9": "Kellendonk Elektronik GmbH",
"BCC168": "DinBox Sverige AB",
"BCC23A": "Thomson Video Networks",
"BCC61A": "SPECTRA EMBEDDED SYSTEMS",
"BCC6DB": "Nokia Corporation",
"BCC810": "Cisco SPVTG",
"BCCAB5": "ARRIS Group, Inc.",
"BCCD45": "VOISMART",
"BCCFCC": "HTC Corporation",
"BCD177": "TP-LINK TECHNOLOGIES CO.,LTD.",
"BCD5B6": "d2d technologies",
"BCD940": "ASR Co,.Ltd.",
"BCE09D": "Eoslink",
"BCE59F": "WATERWORLD Technology Co.,LTD",
"BCEA2B": "CityCom GmbH",
"BCEE7B": "ASUSTek COMPUTER INC.",
"BCF2AF": "devolo AG",
"BCF5AC": "LG Electronics",
"BCF61C": "Geomodeling Wuxi Technology Co. Ltd.",
"BCF685": "D-Link International",
"BCFE8C": "Altronic, LLC",
"BCFFAC": "TOPCON CORPORATION",
"C00D7E": "Additech, Inc.",
"C011A6": "Fort-Telecom ltd.",
"C01242": "Alpha Security Products",
"C0143D": "Hon Hai Precision Ind. Co.,Ltd.",
"C01885": "Hon Hai Precision Ind. Co.,Ltd.",
"C01E9B": "Pixavi AS",
"C02250": "PRIVATE",
"C02506": "AVM GmbH",
"C0255C": "Cisco",
"C027B9": "Beijing National Railway Research & Design Institute of Signal & Communication Co., Ltd.",
"C02973": "Audyssey Laboratories Inc.",
"C029F3": "XySystem",
"C02BFC": "iNES. applied informatics GmbH",
"C02C7A": "Shen Zhen Horn audio Co., Ltd.",
"C034B4": "Gigastone Corporation",
"C03580": "A&R TECH",
"C035BD": "Velocytech Aps",
"C038F9": "Nokia Danmark A/S",
"C03B8F": "Minicom Digital Signage",
"C03D46": "Shanghai Mochui Network Technology Co., Ltd",
"C03E0F": "BSkyB Ltd",
"C03F0E": "NETGEAR",
"C03F2A": "Biscotti, Inc.",
"C03FD5": "Elitegroup Computer Systems Co., LTD",
"C041F6": "LG Electronics Inc",
"C04301": "Epec Oy",
"C044E3": "Shenzhen Sinkna Electronics Co., LTD",
"C046A1": "Telvent",
"C0493D": "MAITRISE TECHNOLOGIQUE",
"C04A00": "TP-LINK TECHNOLOGIES CO.,LTD.",
"C04DF7": "SERELEC",
"C056E3": "Hangzhou Hikvision Digital Technology Co.,Ltd.",
"C057BC": "Avaya, Inc",
"C058A7": "Pico Systems Co., Ltd.",
"C05E6F": "V. Stonkaus firma \"Kodinis Raktas\"",
"C05E79": "SHENZHEN HUAXUN ARK TECHNOLOGIES CO.,LTD",
"C06118": "TP-LINK TECHNOLOGIES CO.,LTD.",
"C0626B": "CISCO SYSTEMS, INC.",
"C06394": "Apple",
"C064C6": "Nokia Corporation",
"C06599": "Samsung Electronics Co.,Ltd",
"C067AF": "Cisco",
"C06C0F": "Dobbs Stanford",
"C06C6D": "MagneMotion, Inc.",
"C07BBC": "Cisco",
"C07E40": "SHENZHEN XDK COMMUNICATION EQUIPMENT CO.,LTD",
"C08170": "Effigis GeoSolutions",
"C0830A": "2Wire",
"C0847A": "Apple",
"C0885B": "SnD Tech Co., Ltd.",
"C08ADE": "Ruckus Wireless",
"C08B6F": "S I Sistemas Inteligentes Eletr\u00f4nicos Ltda",
"C08C60": "Cisco",
"C09132": "Patriot Memory",
"C09134": "ProCurve Networking by HP",
"C098E5": "University of Michigan",
"C09C92": "COBY",
"C09D26": "Topicon HK Lmd.",
"C09F42": "Apple",
"C0A0BB": "D-Link International",
"C0A0C7": "FAIRFIELD INDUSTRIES",
"C0A0DE": "Multi Touch Oy",
"C0A0E2": "Eden Innovations",
"C0A26D": "Abbott Point of Care",
"C0A364": "3D Systems Massachusetts",
"C0A39E": "EarthCam, Inc.",
"C0AA68": "OSASI Technos Inc.",
"C0AC54": "SAGEMCOM",
"C0B339": "Comigo Ltd.",
"C0B357": "Yoshiki Electronics Industry Ltd.",
"C0B8B1": "BitBox Ltd",
"C0BAE6": "Application Solutions (Electronics and Vision) Ltd",
"C0BD42": "ZPA Smart Energy a.s.",
"C0C1C0": "Cisco-Linksys, LLC",
"C0C3B6": "Automatic Systems",
"C0C520": "Ruckus Wireless",
"C0C569": "SHANGHAI LYNUC CNC TECHNOLOGY CO.,LTD",
"C0C687": "Cisco SPVTG",
"C0C946": "MITSUYA LABORATORIES INC.",
"C0CB38": "Hon Hai Precision Ind. Co.,Ltd.",
"C0CFA3": "Creative Electronics & Software, Inc.",
"C0D044": "SAGEMCOM",
"C0D962": "Askey Computer Corp.",
"C0DA74": "Hangzhou Sunyard Technology Co., Ltd.",
"C0DF77": "Conrad Electronic SE",
"C0E422": "Texas Instruments",
"C0E54E": "DENX Computer Systems GmbH",
"C0EAE4": "Sonicwall",
"C0F1C4": "Pacidal Corporation Ltd.",
"C0F79D": "Powercode",
"C0F8DA": "Hon Hai Precision Ind. Co.,Ltd.",
"C0F991": "GME Standard Communications P/L",
"C40142": "MaxMedia Technology Limited",
"C4017C": "Ruckus Wireless",
"C401B1": "SeekTech INC",
"C40415": "NETGEAR INC.,",
"C40528": "Huawei Technologies Co., Ltd",
"C4084A": "Alcatel-Lucent",
"C40938": "Fujian Star-net Communication Co., Ltd",
"C40ACB": "CISCO SYSTEMS, INC.",
"C40E45": "ACK Networks,Inc.",
"C40F09": "Hermes electronic GmbH",
"C4108A": "Ruckus Wireless",
"C4143C": "Cisco",
"C416FA": "Prysm Inc",
"C417FE": "Hon Hai Precision Ind. Co.,Ltd.",
"C4198B": "Dominion Voting Systems Corporation",
"C419EC": "Qualisys AB",
"C41ECE": "HMI Sources Ltd.",
"C421C8": "KYOCERA Corporation",
"C4237A": "WhizNets Inc.",
"C4242E": "Galvanic Applied Sciences Inc",
"C42628": "Airo Wireless",
"C42795": "Technicolor USA Inc.",
"C4291D": "KLEMSAN ELEKTRIK ELEKTRONIK SAN.VE TIC.AS.",
"C42C03": "Apple",
"C4346B": "Hewlett Packard",
"C436DA": "Rusteletech Ltd.",
"C438D3": "TAGATEC CO.,LTD",
"C4393A": "SMC Networks Inc",
"C43A9F": "Siconix Inc.",
"C43C3C": "CYBELEC SA",
"C43DC7": "NETGEAR",
"C4438F": "LG Electronics",
"C44567": "SAMBON PRECISON and ELECTRONICS",
"C445EC": "Shanghai Yali Electron Co.,LTD",
"C44619": "Hon Hai Precision Ind. Co.,Ltd.",
"C44838": "Satcom Direct, Inc.",
"C44AD0": "FIREFLIES SYSTEMS",
"C44B44": "Omniprint Inc.",
"C44E1F": "BlueN",
"C44EAC": "Shenzhen Shiningworth Technology Co., Ltd.",
"C45006": "Samsung Electronics Co.,Ltd",
"C45444": "QUANTA COMPUTER INC.",
"C455A6": "Cadac Holdings Ltd",
"C455C2": "Bach-Simpson",
"C45600": "Galleon Embedded Computing",
"C458C2": "Shenzhen TATFOOK Technology Co., Ltd.",
"C45976": "Fugoo Coorporation",
"C45DD8": "HDMI Forum",
"C46044": "Everex Electronics Limited",
"C4626B": "ZPT Vigantice",
"C462EA": "Samsung Electronics Co.,Ltd",
"C46354": "U-Raku, Inc.",
"C46413": "CISCO SYSTEMS, INC.",
"C467B5": "Libratone A/S",
"C46AB7": "Xiaomi Technology,Inc.",
"C46BB4": "myIDkey",
"C46DF1": "DataGravity",
"C46E1F": "TP-LINK TECHNOLOGIES CO.,LTD",
"C47130": "Fon Technology S.L.",
"C471FE": "CISCO SYSTEMS, INC.",
"C4731E": "Samsung Eletronics Co., Ltd",
"C47B2F": "Beijing JoinHope Image Technology Ltd.",
"C47BA3": "NAVIS Inc.",
"C47D4F": "CISCO SYSTEMS, INC.",
"C47DCC": "Motorola Solutions Inc.",
"C47DFE": "A.N. Solutions GmbH",
"C47F51": "Inventek Systems",
"C4823F": "Fujian Newland Auto-ID Tech. Co,.Ltd.",
"C4824E": "Changzhou Uchip Electronics Co., LTD.",
"C48508": "Intel Corporate",
"C488E5": "Samsung Electronics Co.,Ltd",
"C4913A": "Shenzhen Sanland Electronic Co., ltd.",
"C49300": "8Devices",
"C49313": "100fio networks technology llc",
"C49380": "Speedytel technology",
"C495A2": "SHENZHEN WEIJIU INDUSTRY AND TRADE DEVELOPMENT CO., LTD",
"C49805": "Minieum Networks, Inc",
"C4A81D": " D-Link International",
"C4AAA1": "SUMMIT DEVELOPMENT, spol.s r.o.",
"C4AD21": "MEDIAEDGE Corporation",
"C4B512": "General Electric Digital Energy",
"C4BA99": "I+ME Actia Informatik und Mikro-Elektronik GmbH",
"C4C0AE": "MIDORI ELECTRONIC CO., LTD.",
"C4C19F": "National Oilwell Varco Instrumentation, Monitoring, and Optimization (NOV IMO)",
"C4C755": "Beijing HuaqinWorld Technology Co.,Ltd",
"C4C919": "Energy Imports Ltd",
"C4C9EC": "D&D GROUP sp. z o.o.",
"C4CAD9": "Hangzhou H3C Technologies Co., Limited",
"C4CD45": "Beijing Boomsense Technology CO.,LTD.",
"C4D489": "JiangSu Joyque Information Industry Co.,Ltd",
"C4D655": "Tercel technology co.,ltd",
"C4D987": "Intel Corporate",
"C4DA26": "NOBLEX SA",
"C4E032": "IEEE 1904.1 Working Group",
"C4E17C": "U2S co.",
"C4E7BE": "SCSpro Co.,Ltd",
"C4E92F": "AB Sciex",
"C4E984": "TP-LINK TECHNOLOGIES CO.,LTD.",
"C4EBE3": "RRCN SAS",
"C4EDBA": "Texas Instruments",
"C4EEAE": "VSS Monitoring",
"C4EEF5": "Oclaro, Inc.",
"C4F464": "Spica international",
"C4F57C": "Brocade Communications Systems, Inc.",
"C4FCE4": "DishTV NZ Ltd",
"C80258": "ITW GSE ApS",
"C802A6": "Beijing Newmine Technology",
"C80718": "TDSi",
"C80AA9": "Quanta Computer Inc.",
"C80E77": "Le Shi Zhi Xin Electronic Technology (Tianjin) Co.,Ltd",
"C80E95": "OmniLync Inc.",
"C81479": "Samsung Electronics Co.,Ltd",
"C816BD": "HISENSE ELECTRIC CO.,LTD.",
"C819F7": "Samsung Electronics Co.,Ltd",
"C81AFE": "DLOGIC GmbH",
"C81E8E": "ADV Security (S) Pte Ltd",
"C81F66": "Dell Inc",
"C8208E": "Storagedata",
"C8292A": "Barun Electronics",
"C82A14": "Apple",
"C82E94": "Halfa Enterprise Co., Ltd.",
"C83168": "eZEX corporation",
"C83232": "Hunting Innova",
"C8334B": "Apple",
"C835B8": "Ericsson, EAB/RWI/K",
"C83A35": "Tenda Technology Co., Ltd.",
"C83B45": "JRI-Maxant",
"C83D97": "Nokia Corporation",
"C83E99": "Texas Instruments",
"C83EA7": "KUNBUS GmbH",
"C84529": "IMK Networks Co.,Ltd",
"C84544": "Shanghai Enlogic Electric Technology Co., Ltd.",
"C848F5": "MEDISON Xray Co., Ltd",
"C84C75": "CISCO SYSTEMS, INC.",
"C85645": "Intermas France",
"C85663": "Sunflex Europe GmbH",
"C86000": "ASUSTek COMPUTER INC.",
"C864C7": "zte corporation",
"C86C1E": "Display Systems Ltd",
"C86C87": "Zyxel Communications Corp",
"C86CB6": "Optcom Co., Ltd.",
"C86F1D": "Apple",
"C87248": "Aplicom Oy",
"C87B5B": "zte corporation",
"C87CBC": "Valink Co., Ltd.",
"C87D77": "Shenzhen Kingtech Communication Equipment Co.,Ltd",
"C87E75": "Samsung Electronics Co.,Ltd",
"C88439": "Sunrise Technologies",
"C88447": "Beautiful Enterprise Co., Ltd",
"C8873B": "Net Optics",
"C88A83": "Dongguan HuaHong Electronics Co.,Ltd",
"C88B47": "Nolangroup S.P.A con Socio Unico",
"C8903E": "Pakton Technologies",
"C89346": "MXCHIP Company Limited",
"C89383": "Embedded Automation, Inc.",
"C894D2": "Jiangsu Datang Electronic Products Co., Ltd",
"C8979F": "Nokia Corporation",
"C89C1D": "CISCO SYSTEMS, INC.",
"C89CDC": "ELITEGROUP COMPUTER SYSTEM CO., LTD.",
"C89F1D": "SHENZHEN COMMUNICATION TECHNOLOGIES CO.,LTD",
"C89F42": "VDII Innovation AB",
"C8A030": "Texas Instruments",
"C8A1B6": "Shenzhen Longway Technologies Co., Ltd",
"C8A1BA": "Neul Ltd",
"C8A620": "Nebula, Inc",
"C8A70A": "Verizon Business",
"C8A729": "SYStronics Co., Ltd.",
"C8AA21": "ARRIS Group, Inc.",
"C8AACC": "PRIVATE",
"C8AE9C": "Shanghai TYD Elecronic Technology Co. Ltd",
"C8AF40": "marco Systemanalyse und Entwicklung GmbH",
"C8B373": "Cisco-Linksys, LLC",
"C8B5B7": "Apple",
"C8BA94": "Samsung Electro Mechanics co., LTD.",
"C8BBD3": "Embrane",
"C8BCC8": "Apple",
"C8BE19": "D-Link International",
"C8C126": "ZPM Industria e Comercio Ltda",
"C8C13C": "RuggedTek Hangzhou Co., Ltd",
"C8C791": "Zero1.tv GmbH",
"C8CBB8": "Hewlett Packard",
"C8CD72": "SAGEMCOM",
"C8D10B": "Nokia Corporation",
"C8D15E": "Huawei Technologies Co., Ltd",
"C8D1D1": "AGAiT Technology Corporation",
"C8D2C1": "Jetlun (Shenzhen) Corporation",
"C8D3A3": "D-Link International",
"C8D429": "Muehlbauer AG",
"C8D590": "FLIGHT DATA SYSTEMS",
"C8D5FE": "Shenzhen Zowee Technology Co., Ltd",
"C8D719": "Cisco Consumer Products, LLC",
"C8DDC9": "Lenovo Mobile Communication Technology Ltd.",
"C8DE51": "Integra Networks, Inc.",
"C8DF7C": "Nokia Corporation",
"C8E0EB": "Apple",
"C8E1A7": "Vertu Corporation Limited",
"C8E42F": "Technical Research Design and Development",
"C8EE08": "TANGTOP TECHNOLOGY CO.,LTD",
"C8EE75": "Pishion International Co. Ltd",
"C8EEA6": "Shenzhen SHX Technology Co., Ltd",
"C8EF2E": "Beijing Gefei Tech. Co., Ltd",
"C8F36B": "Yamato Scale Co.,Ltd.",
"C8F386": "Shenzhen Xiaoniao Technology Co.,Ltd",
"C8F406": "Avaya, Inc",
"C8F650": "Apple",
"C8F68D": "S.E.TECHNOLOGIES LIMITED",
"C8F704": "Building Block Video",
"C8F733": "Intel Corporate",
"C8F981": "Seneca s.r.l.",
"C8F9F9": "CISCO SYSTEMS, INC.",
"C8FB26": "Cisco SPVTG",
"C8FE30": "Bejing DAYO Mobile Communication Technology Ltd.",
"C8FF77": "Dyson Limited",
"CC0080": "BETTINI SRL",
"CC047C": "G-WAY Microwave",
"CC04B4": "Select Comfort",
"CC051B": "Samsung Electronics Co.,Ltd",
"CC07AB": "Samsung Electronics Co.,Ltd",
"CC07E4": "Lenovo Mobile Communication Technology Ltd.",
"CC08E0": "Apple",
"CC09C8": "IMAQLIQ LTD",
"CC0CDA": "Miljovakt AS",
"CC0DEC": "Cisco SPVTG",
"CC14A6": "Yichun MyEnergy Domain, Inc",
"CC187B": "Manzanita Systems, Inc.",
"CC1AFA": "zte corporation",
"CC1EFF": "Metrological Group BV",
"CC2218": "InnoDigital Co., Ltd.",
"CC262D": "Verifi, LLC",
"CC2A80": "Micro-Biz intelligence solutions Co.,Ltd",
"CC2D8C": "LG ELECTRONICS INC",
"CC33BB": "SAGEMCOM SAS",
"CC3429": "TP-LINK TECHNOLOGIES CO.,LTD.",
"CC34D7": "GEWISS S.P.A.",
"CC3540": "Technicolor USA Inc.",
"CC398C": "Shiningtek",
"CC3A61": "SAMSUNG ELECTRO MECHANICS CO., LTD.",
"CC3C3F": "SA.S.S. Datentechnik AG",
"CC3E5F": "Hewlett Packard",
"CC43E3": "Trump s.a.",
"CC4703": "Intercon Systems Co., Ltd.",
"CC4AE1": "Fourtec -Fourier Technologies",
"CC4BFB": "Hellberg Safety AB",
"CC4E24": "Brocade Communications Systems, Inc.",
"CC501C": "KVH Industries, Inc.",
"CC5076": "Ocom Communications, Inc.",
"CC52AF": "Universal Global Scientific Industrial Co., Ltd.",
"CC53B5": "HUAWEI TECHNOLOGIES CO.,LTD",
"CC5459": "OnTime Networks AS",
"CC55AD": "RIM",
"CC593E": "TOUMAZ LTD",
"CC5C75": "Weightech Com. Imp. Exp. Equip. Pesagem Ltda",
"CC5D4E": "ZyXEL Communications Corporation",
"CC5D57": "Information System Research Institute,Inc.",
"CC60BB": "Empower RF Systems",
"CC65AD": "ARRIS Group, Inc.",
"CC69B0": "Global Traffic Technologies, LLC",
"CC6B98": "Minetec Wireless Technologies",
"CC6BF1": "Sound Masking Inc.",
"CC6DA0": "Roku, Inc.",
"CC6DEF": "TJK Tietolaite Oy",
"CC720F": "Viscount Systems Inc.",
"CC7498": "Filmetrics Inc.",
"CC7669": "SEETECH",
"CC785F": "Apple",
"CC7A30": "CMAX Wireless Co., Ltd.",
"CC7B35": "zte corporation",
"CC7D37": "ARRIS Group, Inc.",
"CC7EE7": "Panasonic AVC Networks Company",
"CC856C": "SHENZHEN MDK DIGITAL TECHNOLOGY CO.,LTD",
"CC89FD": "Nokia Corporation",
"CC8CE3": "Texas Instruments",
"CC9093": "Hansong Tehnologies",
"CC912B": "TE Connectivity Touch Solutions",
"CC944A": "Pfeiffer Vacuum GmbH",
"CC95D7": "VIZIO, Inc",
"CC96A0": "Shenzhen Huawei Communication Technologies Co., Ltd",
"CC9E00": "Nintendo Co., Ltd.",
"CC9F35": "Transbit Sp. z o.o.",
"CCA0E5": "DZG Metering GmbH",
"CCA374": "Guangdong Guanglian Electronic Technology Co.Ltd",
"CCA462": "ARRIS Group, Inc.",
"CCA614": "AIFA TECHNOLOGY CORP.",
"CCAF78": "Hon Hai Precision Ind. Co.,Ltd.",
"CCB255": "D-Link International",
"CCB3F8": "FUJITSU ISOTEC LIMITED",
"CCB55A": "Fraunhofer ITWM",
"CCB691": "NECMagnusCommunications",
"CCB888": "AnB Securite s.a.",
"CCB8F1": "EAGLE KINGDOM TECHNOLOGIES LIMITED",
"CCBD35": "Steinel GmbH",
"CCBE71": "OptiLogix BV",
"CCC104": "Applied Technical Systems",
"CCC3EA": "Motorola Mobility LLC",
"CCC50A": "SHENZHEN DAJIAHAO TECHNOLOGY CO.,LTD",
"CCC62B": "Tri-Systems Corporation",
"CCC8D7": "CIAS Elettronica srl",
"CCCC4E": "Sun Fountainhead USA. Corp",
"CCCC81": "HUAWEI TECHNOLOGIES CO.,LTD",
"CCCD64": "SM-Electronic GmbH",
"CCCE40": "Janteq Corp",
"CCD29B": "Shenzhen Bopengfa Elec&Technology CO.,Ltd",
"CCD539": "Cisco",
"CCD811": "Aiconn Technology Corporation",
"CCD9E9": "SCR Engineers Ltd.",
"CCE1D5": "Buffalo Inc.",
"CCE798": "My Social Stuff",
"CCE7DF": "American Magnetics, Inc.",
"CCE8AC": "SOYEA Technology Co.,Ltd.",
"CCEA1C": "DCONWORKS Co., Ltd",
"CCEED9": "Deto Mechatronic GmbH",
"CCEF48": "CISCO SYSTEMS, INC.",
"CCF3A5": "Chi Mei Communication Systems, Inc",
"CCF407": "EUKREA ELECTROMATIQUE SARL",
"CCF67A": "Ayecka Communication Systems LTD",
"CCF841": "Lumewave",
"CCF8F0": "Xi'an HISU Multimedia Technology Co.,Ltd.",
"CCF954": "Avaya, Inc",
"CCF9E8": "Samsung Electronics Co.,Ltd",
"CCFA00": "LG Electronics",
"CCFB65": "Nintendo Co., Ltd.",
"CCFC6D": "RIZ TRANSMITTERS",
"CCFCB1": "Wireless Technology, Inc.",
"CCFE3C": "Samsung Electronics",
"D00790": "Texas Instruments",
"D00EA4": "Porsche Cars North America",
"D0131E": "Sunrex Technology Corp",
"D0154A": "zte corporation",
"D0176A": "Samsung Electronics Co.,Ltd",
"D01AA7": "UniPrint",
"D01CBB": "Beijing Ctimes Digital Technology Co., Ltd.",
"D022BE": "Samsung Electro Mechanics co.,LTD.",
"D023DB": "Apple",
"D02788": "Hon Hai Precision Ind.Co.Ltd",
"D02C45": "littleBits Electronics, Inc.",
"D02DB3": "Huawei Technologies Co., Ltd",
"D03110": "Ingenic Semiconductor Co.,Ltd",
"D03761": "Texas Instruments",
"D03972": "Texas Instruments",
"D039B3": "ARRIS Group, Inc.",
"D046DC": "Southwest Research Institute",
"D04CC1": "SINTRONES Technology Corp.",
"D05099": "ASRock Incorporation",
"D05162": "Sony Mobile Communications AB",
"D052A8": "Physical Graph Corporation",
"D0542D": "Cambridge Industries(Group) Co.,Ltd.",
"D0574C": "CISCO SYSTEMS, INC.",
"D05785": "Pantech Co., Ltd.",
"D057A1": "Werma Signaltechnik GmbH & Co. KG",
"D05875": "Active Control Technology Inc.",
"D059C3": "CeraMicro Technology Corporation",
"D05A0F": "I-BT DIGITAL CO.,LTD",
"D05AF1": "Shenzhen Pulier Tech CO.,Ltd",
"D05FB8": "Texas Instruments",
"D05FCE": "Hitachi Data Systems",
"D0634D": "Meiko Maschinenbau GmbH & Co. KG",
"D063B4": "SolidRun Ltd.",
"D0667B": "Samsung Electronics Co., LTD",
"D067E5": "Dell Inc",
"D0699E": "LUMINEX Lighting Control Equipment",
"D069D0": "Verto Medical Solutions, LLC",
"D072DC": "Cisco",
"D0737F": "Mini-Circuits",
"D0738E": "DONG OH PRECISION CO., LTD.",
"D073D5": "LIFI LABS MANAGEMENT PTY LTD",
"D075BE": "Reno A&E",
"D07650": "IEEE REGISTRATION AUTHORITY - Please see MAM public listing for more information.",
"D07AB5": "Huawei Technologies Co., Ltd",
"D07DE5": "Forward Pay Systems, Inc.",
"D07E28": "Hewlett Packard",
"D07E35": "Intel Corporate",
"D08999": "APCON, Inc.",
"D08A55": "Skullcandy",
"D08B7E": "Passif Semiconductor",
"D08CB5": "Texas Instruments",
"D08CFF": "UPWIS AB",
"D093F8": "Stonestreet One LLC",
"D095C7": "Pantech Co., Ltd.",
"D09B05": "Emtronix",
"D09C30": "Foster Electric Company, Limited",
"D09D0A": "LINKCOM",
"D0A311": "Neuberger Geb\u00e4udeautomation GmbH",
"D0AEEC": "Alpha Networks Inc.",
"D0AFB6": "Linktop Technology Co., LTD",
"D0B33F": "SHENZHEN TINNO MOBILE TECHNOLOGY CO.,LTD.",
"D0B498": "Robert Bosch LLC Automotive Electronics",
"D0B523": "Bestcare Cloucal Corp.",
"D0B53D": "SEPRO ROBOTIQUE",
"D0BB80": "SHL Telemedicine International Ltd.",
"D0BD01": "DS International",
"D0BE2C": "CNSLink Co., Ltd.",
"D0C1B1": "Samsung Electronics Co.,Ltd",
"D0C282": "CISCO SYSTEMS, INC.",
"D0C42F": "Tamagawa Seiki Co.,Ltd.",
"D0C789": "Cisco",
"D0C7C0": "TP-LINK TECHNOLOGIES CO.,LTD.",
"D0CDE1": "Scientech Electronics",
"D0CF5E": "Energy Micro AS",
"D0D0FD": "CISCO SYSTEMS, INC.",
"D0D212": "K2NET Co.,Ltd.",
"D0D286": "Beckman Coulter K.K.",
"D0D3FC": "Mios, Ltd.",
"D0D412": "ADB Broadband Italia",
"D0D471": "MVTECH co., Ltd",
"D0D6CC": "Wintop",
"D0DB32": "Nokia Corporation",
"D0DF9A": "Liteon Technology Corporation",
"D0DFB2": "Genie Networks Limited",
"D0DFC7": "Samsung Electronics Co.,Ltd",
"D0E140": "Apple, Inc",
"D0E347": "Yoga",
"D0E40B": "Wearable Inc.",
"D0E54D": "Pace plc",
"D0E782": "Azurewave Technologies, Inc.",
"D0EB03": "Zhehua technology limited",
"D0EB9E": "Seowoo Inc.",
"D0F0DB": "Ericsson",
"D0F27F": "SteadyServ Technoligies, LLC",
"D0F73B": "Helmut Mauell GmbH",
"D0FF50": "Texas Instruments, Inc",
"D4000D": "Phoenix Broadband Technologies, LLC.",
"D40057": "MC Technologies GmbH",
"D40129": "Broadcom Corporation",
"D4016D": "TP-LINK TECHNOLOGIES CO.,LTD.",
"D4024A": "Delphian Systems LLC",
"D40BB9": "Solid Semecs bv.",
"D40FB2": "Applied Micro Electronics AME bv",
"D41090": "iNFORM Systems AG",
"D410CF": "Huanshun Network Science and Technology Co., Ltd.",
"D411D6": "ShotSpotter, Inc.",
"D41296": "Anobit Technologies Ltd.",
"D412BB": "Quadrant Components Inc. Ltd",
"D4136F": "Asia Pacific Brands",
"D41C1C": "RCF S.P.A.",
"D41E35": "TOHO Electronics INC.",
"D41F0C": "TVI Vision Oy",
"D4206D": "HTC Corporation",
"D42122": "Sercomm Corporation",
"D4223F": "Lenovo Mobile Communication Technology Ltd.",
"D4224E": "Alcatel Lucent",
"D42751": "Infopia Co., Ltd",
"D428B2": "ioBridge, Inc.",
"D429EA": "Zimory GmbH",
"D42C3D": "Sky Light Digital Limited",
"D42F23": "Akenori PTE Ltd",
"D4319D": "Sinwatec",
"D43A65": "IGRS Engineering Lab Ltd.",
"D43AE9": "DONGGUAN ipt INDUSTRIAL CO., LTD",
"D43D67": "Carma Industries Inc.",
"D43D7E": "Micro-Star Int'l Co, Ltd",
"D443A8": "Changzhou Haojie Electric Co., Ltd.",
"D44B5E": "TAIYO YUDEN CO., LTD.",
"D44C24": "Vuppalamritha Magnetic Components LTD",
"D44C9C": "Shenzhen YOOBAO Technology Co.Ltd",
"D44CA7": "Informtekhnika & Communication, LLC",
"D44F80": "Kemper Digital GmbH",
"D4507A": "CEIVA Logic, Inc",
"D45251": "IBT Ingenieurbureau Broennimann Thun",
"D45297": "nSTREAMS Technologies, Inc.",
"D453AF": "VIGO System S.A.",
"D45AB2": "Galleon Systems",
"D45C70": "Wireless Gigabit Alliance",
"D45D42": "Nokia Corporation",
"D464F7": "CHENGDU USEE DIGITAL TECHNOLOGY CO., LTD",
"D466A8": "Riedo Networks GmbH",
"D46761": "SAHAB TECHNOLOGY",
"D467E7": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"D46867": "Neoventus Design Group",
"D46A91": "Snap AV",
"D46AA8": "HUAWEI TECHNOLOGIES CO.,LTD",
"D46CBF": "Goodrich ISR",
"D46CDA": "CSM GmbH",
"D46E5C": "Huawei Technologies Co., Ltd",
"D46F42": "WAXESS USA Inc",
"D479C3": "Cameronet GmbH & Co. KG",
"D47B75": "HARTING Electronics GmbH",
"D481CA": "iDevices, LLC",
"D4823E": "Argosy Technologies, Ltd.",
"D48564": "Hewlett-Packard Company",
"D487D8": "Samsung Electronics",
"D48890": "Samsung Electronics Co.,Ltd",
"D48CB5": "CISCO SYSTEMS, INC.",
"D48FAA": "Sogecam Industrial, S.A.",
"D491AF": "Electroacustica General Iberica, S.A.",
"D49398": "Nokia Corporation",
"D493A0": "Fidelix Oy",
"D4945A": "COSMO CO., LTD",
"D494A1": "Texas Instruments",
"D49524": "Clover Network, Inc.",
"D496DF": "SUNGJIN C&T CO.,LTD",
"D4970B": "XIAOMI CORPORATION",
"D49A20": "Apple",
"D49C28": "JayBird Gear LLC",
"D49C8E": "University of FUKUI",
"D49E6D": "Wuhan Zhongyuan Huadian Science & Technology Co.,",
"D4A02A": "CISCO SYSTEMS, INC.",
"D4A425": "SMAX Technology Co., Ltd.",
"D4A499": "InView Technology Corporation",
"D4A928": "GreenWave Reality Inc",
"D4AAFF": "MICRO WORLD",
"D4AC4E": "BODi rS, LLC",
"D4AD2D": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"D4AE52": "Dell Inc",
"D4B110": "HUAWEI TECHNOLOGIES CO.,LTD",
"D4B43E": "Messcomp Datentechnik GmbH",
"D4BED9": "Dell Inc",
"D4BF2D": "SE Controls Asia Pacific Ltd",
"D4BF7F": "UPVEL",
"D4C1FC": "Nokia Corporation",
"D4C766": "Acentic GmbH",
"D4C9EF": "Hewlett Packard",
"D4CA6D": "Routerboard.com",
"D4CA6E": "u-blox AG",
"D4CBAF": "Nokia Corporation",
"D4CEB8": "Enatel LTD",
"D4CFF9": "Shenzhen Sen5 Technology Co., Ltd.",
"D4D184": "ADB Broadband Italia",
"D4D249": "Power Ethernet",
"D4D50D": "Southwest Microwave, Inc",
"D4D748": "CISCO SYSTEMS, INC.",
"D4D898": "Korea CNO Tech Co., Ltd",
"D4D919": "GoPro",
"D4DF57": "Alpinion Medical Systems",
"D4E08E": "ValueHD Corporation",
"D4E32C": "S. Siedle & Sohne",
"D4E33F": "Alcatel-Lucent",
"D4E8B2": "Samsung Electronics",
"D4EA0E": "Avaya, Inc",
"D4EC0C": "Harley-Davidson Motor Company",
"D4EE07": "HIWIFI Co., Ltd.",
"D4F027": "Navetas Energy Management",
"D4F0B4": "Napco Security Technologies",
"D4F143": "IPROAD.,Inc",
"D4F63F": "IEA S.R.L.",
"D8004D": "Apple",
"D8052E": "Skyviia Corporation",
"D806D1": "Honeywell Fire System (Shanghai) Co,. Ltd.",
"D808F5": "Arcadia Networks Co. Ltd.",
"D809C3": "Cercacor Labs",
"D80DE3": "FXI TECHNOLOGIES AS",
"D8150D": "TP-LINK TECHNOLOGIES CO.,LTD.",
"D8160A": "Nippon Electro-Sensory Devices",
"D8182B": "Conti Temic Microelectronic GmbH",
"D819CE": "Telesquare",
"D81BFE": "TWINLINX CORPORATION",
"D81C14": "Compacta International, Ltd.",
"D81EDE": "B&W Group Ltd",
"D824BD": "CISCO SYSTEMS, INC.",
"D826B9": "Guangdong Coagent Electronics S &T Co., Ltd.",
"D8270C": "MaxTronic International Co., Ltd.",
"D828C9": "General Electric Consumer and Industrial",
"D82916": "Ascent Communication Technology",
"D82986": "Best Wish Technology LTD",
"D82A15": "Leitner SpA",
"D82A7E": "Nokia Corporation",
"D82D9B": "Shenzhen G.Credit Communication Technology Co., Ltd",
"D82DE1": "Tricascade Inc.",
"D83062": "Apple",
"D831CF": "Samsung Electronics Co.,Ltd",
"D8337F": "Office FA.com Co.,Ltd.",
"D842AC": "Shanghai Feixun Communication Co.,Ltd.",
"D84606": "Silicon Valley Global Marketing",
"D8490B": "HUAWEI TECHNOLOGIES CO.,LTD",
"D8492F": "CANON INC.",
"D84B2A": "Cognitas Technologies, Inc.",
"D850E6": "ASUSTek COMPUTER INC.",
"D8543A": "Texas Instruments",
"D857EF": "Samsung Electronics",
"D858D7": "CZ.NIC, z.s.p.o.",
"D85D4C": "TP-LINK Technologies Co.,Ltd.",
"D85D84": "CAx soft GmbH",
"D85DFB": "PRIVATE",
"D86194": "Objetivos y Sevicios de Valor An\u0303adido",
"D862DB": "Eno Inc.",
"D86595": "Toy's Myth Inc.",
"D866C6": "Shenzhen Daystar Technology Co.,ltd",
"D867D9": "CISCO SYSTEMS, INC.",
"D86960": "Steinsvik",
"D86BF7": "Nintendo Co., Ltd.",
"D86CE9": "SAGEMCOM SAS",
"D87157": "Lenovo Mobile Communication Technology Ltd.",
"D87533": "Nokia Corporation",
"D8760A": "Escort, Inc.",
"D878E5": "KUHN SA",
"D87988": "Hon Hai Precision Ind. Co., Ltd.",
"D87CDD": "SANIX INCORPORATED",
"D87EB1": "x.o.ware, inc.",
"D881CE": "AHN INC.",
"D88A3B": "UNIT-EM",
"D890E8": "Samsung Electronics Co.,Ltd",
"D8952F": "Texas Instruments",
"D89685": "GoPro",
"D89695": "Apple",
"D8973B": "Emerson Network Power Embedded Power",
"D89760": "C2 Development, Inc.",
"D8977C": "Grey Innovation",
"D89D67": "Hewlett Packard",
"D89DB9": "eMegatech International Corp.",
"D89E3F": "Apple",
"D8A25E": "Apple",
"D8AE90": "Itibia Technologies",
"D8AF3B": "Hangzhou Bigbright Integrated communications system Co.,Ltd",
"D8AFF1": "Panasonic Appliances Company",
"D8B02E": "Guangzhou Zonerich Business Machine Co., Ltd",
"D8B04C": "Jinan USR IOT Technology Co., Ltd.",
"D8B12A": "Panasonic Mobile Communications Co., Ltd.",
"D8B377": "HTC Corporation",
"D8B6C1": "NetworkAccountant, Inc.",
"D8B6D6": "Blu Tether Limited",
"D8B8F6": "Nantworks",
"D8B90E": "Triple Domain Vision Co.,Ltd.",
"D8BF4C": "Victory Concept Electronics Limited",
"D8C068": "Netgenetech.co.,ltd.",
"D8C3FB": "DETRACOM",
"D8C691": "Hichan Technology Corp.",
"D8C7C8": "Aruba Networks",
"D8C99D": "EA DISPLAY LIMITED",
"D8CF9C": "Apple",
"D8D1CB": "Apple",
"D8D27C": "JEMA ENERGY, SA",
"D8D385": "Hewlett-Packard Company",
"D8D43C": "Sony Corporation",
"D8D5B9": "Rainforest Automation, Inc.",
"D8D67E": "GSK CNC EQUIPMENT CO.,LTD",
"D8DA52": "APATOR S.A.",
"D8DCE9": "Kunshan Erlab ductless filtration system Co.,Ltd",
"D8DD5F": "BALMUDA Inc.",
"D8DDFD": "Texas Instruments",
"D8DECE": "ISUNG CO.,LTD",
"D8DF0D": "beroNet GmbH",
"D8E3AE": "CIRTEC MEDICAL SYSTEMS",
"D8E56D": "TCT Mobile Limited",
"D8E72B": "OnPATH Technologies",
"D8E743": "Wush, Inc",
"D8E952": "KEOPSYS",
"D8EB97": "TRENDnet, Inc.",
"D8EE78": "Moog Protokraft",
"D8F0F2": "Zeebo Inc",
"D8FC93": "Intel Corporate",
"D8FE8F": "IDFone Co., Ltd.",
"D8FEE3": "D-Link International",
"DC0265": "Meditech Kft",
"DC028E": "zte corporation",
"DC052F": "National Products Inc.",
"DC0575": "SIEMENS ENERGY AUTOMATION",
"DC05ED": "Nabtesco Corporation",
"DC07C1": "HangZhou QiYang Technology Co.,Ltd.",
"DC0B1A": "ADB Broadband Italia",
"DC0EA1": "COMPAL INFORMATION (KUNSHAN) CO., LTD",
"DC16A2": "Medtronic Diabetes",
"DC175A": "Hitachi High-Technologies Corporation",
"DC1792": "Captivate Network",
"DC1D9F": "U & B tech",
"DC1DD4": "Microstep-MIS spol. s r.o.",
"DC1EA3": "Accensus LLC",
"DC2008": "ASD Electronics Ltd",
"DC2A14": "Shanghai Longjing Technology Co.",
"DC2B61": "Apple",
"DC2B66": "InfoBLOCK S.A. de C.V.",
"DC2BCA": "Zera GmbH",
"DC2C26": "Iton Technology Limited",
"DC2E6A": "HCT. Co., Ltd.",
"DC309C": "Heyrex Limited",
"DC3350": "TechSAT GmbH",
"DC37D2": "Hunan HKT Electronic Technology Co., Ltd",
"DC38E1": "Juniper networks",
"DC3A5E": "Roku, Inc",
"DC3C2E": "Manufacturing System Insights, Inc.",
"DC3C84": "Ticom Geomatics, Inc.",
"DC3E51": "Solberg & Andersen AS",
"DC3EF8": "Nokia Corporation",
"DC4517": "ARRIS Group, Inc.",
"DC49C9": "CASCO SIGNAL LTD",
"DC4EDE": "SHINYEI TECHNOLOGY CO., LTD.",
"DC5726": "Power-One",
"DC5E36": "Paterson Technology",
"DC647C": "C.R.S. iiMotion GmbH",
"DC663A": "Apacer Technology Inc.",
"DC6F00": "Livescribe, Inc.",
"DC6F08": "Bay Storage Technology",
"DC7014": "PRIVATE",
"DC7144": "Samsung Electro Mechanics",
"DC7B94": "CISCO SYSTEMS, INC.",
"DC825B": "JANUS, spol. s r.o.",
"DC85DE": "Azurewave Technologies., inc.",
"DC86D8": "Apple, Inc",
"DC9B1E": "Intercom, Inc.",
"DC9B9C": "Apple",
"DC9C52": "Sapphire Technology Limited.",
"DC9FA4": "Nokia Corporation",
"DC9FDB": "Ubiquiti Networks, Inc.",
"DCA5F4": "Cisco",
"DCA6BD": "Beijing Lanbo Technology Co., Ltd.",
"DCA7D9": "Compressor Controls Corp",
"DCA8CF": "New Spin Golf, LLC.",
"DCA971": "Intel Corporate",
"DCA989": "MACANDC",
"DCAD9E": "GreenPriz",
"DCAE04": "CELOXICA Ltd",
"DCB058": "Burkert Werke GmbH",
"DCB4C4": "Microsoft XCG",
"DCBF90": "HUIZHOU QIAOXING TELECOMMUNICATION INDUSTRY CO.,LTD.",
"DCC0DB": "Shenzhen Kaiboer Technology Co., Ltd.",
"DCC101": "SOLiD Technologies, Inc.",
"DCC422": "Systembase Limited",
"DCC793": "Nokia Corporation",
"DCCBA8": "Explora Technologies Inc",
"DCCE41": "FE GLOBAL HONG KONG LIMITED",
"DCCEBC": "Shenzhen JSR Technology Co.,Ltd.",
"DCCF94": "Beijing Rongcheng Hutong Technology Co., Ltd.",
"DCD0F7": "Bentek Systems Ltd.",
"DCD2FC": "HUAWEI TECHNOLOGIES CO.,LTD",
"DCD321": "HUMAX co.,tld",
"DCD52A": "Sunny Heart Limited",
"DCD87F": "Shenzhen JoinCyber Telecom Equipment Ltd",
"DCDECA": "Akyllor",
"DCE2AC": "Lumens Digital Optics Inc.",
"DCE578": "Experimental Factory of Scientific Engineering and Special Design Department",
"DCE71C": "AUG Elektronik GmbH",
"DCF05D": "Letta Teknoloji",
"DCF110": "Nokia Corporation",
"DCF755": "SITRONIK",
"DCF858": "Lorent Networks, Inc.",
"DCFAD5": "STRONG Ges.m.b.H.",
"DCFB02": "Buffalo Inc.",
"E005C5": "TP-LINK Technologies Co.,Ltd.",
"E006E6": "Hon Hai Precision Ind. Co.,Ltd.",
"E00B28": "Inovonics",
"E00C7F": "Nintendo Co., Ltd.",
"E0143E": "Modoosis Inc.",
"E01877": "Fujitsu Limited",
"E01C41": "Aerohive Networks Inc.",
"E01CEE": "Bravo Tech, Inc.",
"E01D3B": "Cambridge Industries(Group) Co.,Ltd",
"E01E07": "Anite Telecoms US. Inc",
"E01F0A": "Xslent Energy Technologies. LLC",
"E0247F": "HUAWEI TECHNOLOGIES CO.,LTD",
"E02538": "Titan Pet Products",
"E02630": "Intrigue Technologies, Inc.",
"E02636": "Nortel Networks",
"E0271A": "TTC Next-generation Home Network System WG",
"E02A82": "Universal Global Scientific Industrial Co., Ltd.",
"E02F6D": "Cisco",
"E03005": "Alcatel-Lucent Shanghai Bell Co., Ltd",
"E031D0": "SZ Telstar CO., LTD",
"E036E3": "Stage One International Co., Ltd.",
"E039D7": "Plexxi, Inc.",
"E03C5B": "SHENZHEN JIAXINJIE ELECTRON CO.,LTD",
"E03E4A": "Cavanagh Group International",
"E03E7D": "data-complex GmbH",
"E03F49": "ASUSTek COMPUTER INC.",
"E0469A": "NETGEAR",
"E05597": "Emergent Vision Technologies Inc.",
"E056F4": "AxesNetwork Solutions inc.",
"E0589E": "Laerdal Medical",
"E05B70": "Innovid, Co., Ltd.",
"E05DA6": "Detlef Fink Elektronik & Softwareentwicklung",
"E05FB9": "CISCO SYSTEMS, INC.",
"E061B2": "HANGZHOU ZENOINTEL TECHNOLOGY CO., LTD",
"E06290": "Jinan Jovision Science & Technology Co., Ltd.",
"E063E5": "Sony Mobile Communications AB",
"E064BB": "DigiView S.r.l.",
"E06678": "Apple",
"E067B3": "C-Data Technology Co., Ltd",
"E06995": "PEGATRON CORPORATION",
"E0750A": "ALPS ERECTORIC CO.,LTD.",
"E0757D": "Motorola Mobility LLC",
"E07C62": "Whistle Labs, Inc.",
"E07F53": "TECHBOARD SRL",
"E07F88": "EVIDENCE Network SIA",
"E08177": "GreenBytes, Inc.",
"E087B1": "Nata-Info Ltd.",
"E08A7E": "Exponent",
"E08FEC": "REPOTEC CO., LTD.",
"E09153": "XAVi Technologies Corp.",
"E091F5": "NETGEAR",
"E09467": "Intel Corporate",
"E09579": "ORTHOsoft inc, d/b/a Zimmer CAS",
"E097F2": "Atomax Inc.",
"E09D31": "Intel Corporate",
"E09DB8": "PLANEX COMMUNICATIONS INC.",
"E0A198": "NOJA Power Switchgear Pty Ltd",
"E0A1D7": "SFR",
"E0A30F": "Pevco",
"E0A670": "Nokia Corporation",
"E0AAB0": "GENERAL VISION ELECTRONICS CO. LTD.",
"E0ABFE": "Orb Networks, Inc.",
"E0AE5E": "ALPS Co,. Ltd.",
"E0AEB2": "Bender GmbH & Co.KG",
"E0AEED": "LOENK",
"E0AF4B": "Pluribus Networks, Inc.",
"E0B2F1": "FN-LINK TECHNOLOGY LIMITED",
"E0B7B1": "Pace plc",
"E0B9A5": "Azurewave",
"E0B9BA": "Apple",
"E0BC43": "C2 Microsystems, Inc.",
"E0C286": "Aisai Communication Technology Co., Ltd.",
"E0C2B7": "Masimo Corporation",
"E0C3F3": "ZTE Corporation",
"E0C6B3": "MilDef AB",
"E0C79D": "Texas Instruments",
"E0C86A": "SHENZHEN TW-SCIE Co., Ltd",
"E0C922": "Jireh Energy Tech., Ltd.",
"E0C97A": "Apple",
"E0CA4D": "Shenzhen Unistar Communication Co.,LTD",
"E0CA94": "Askey Computer",
"E0CB1D": "PRIVATE",
"E0CB4E": "ASUSTek COMPUTER INC.",
"E0CBEE": "Samsung Electronics Co.,Ltd",
"E0CEC3": "ASKEY COMPUTER CORP",
"E0CF2D": "Gemintek Corporation",
"E0D10A": "Katoudenkikougyousyo co ltd",
"E0D1E6": "Aliph dba Jawbone",
"E0D31A": "EQUES Technology Co., Limited",
"E0D7BA": "Texas Instruments",
"E0D9A2": "Hippih aps",
"E0DADC": "JVC KENWOOD Corporation",
"E0DB55": "Dell Inc",
"E0DB88": "Open Standard Digital-IF Interface for SATCOM Systems",
"E0DCA0": "Siemens Electrical Apparatus Ltd., Suzhou Chengdu Branch",
"E0E631": "SNB TECHNOLOGIES LIMITED",
"E0E751": "Nintendo Co., Ltd.",
"E0E8E8": "Olive Telecommunication Pvt. Ltd",
"E0ED1A": "vastriver Technology Co., Ltd",
"E0EDC7": "Shenzhen Friendcom Technology Development Co., Ltd",
"E0EE1B": "Panasonic Automotive Systems Company of America",
"E0EF25": "Lintes Technology Co., Ltd.",
"E0F211": "Digitalwatt",
"E0F379": "Vaddio",
"E0F5C6": "Apple",
"E0F5CA": "CHENG UEI PRECISION INDUSTRY CO.,LTD.",
"E0F847": "Apple",
"E0F9BE": "Cloudena Corp.",
"E0FAEC": "Platan sp. z o.o. sp. k.",
"E40439": "TomTom Software Ltd",
"E4115B": "Hewlett Packard",
"E41218": "ShenZhen Rapoo Technology Co., Ltd.",
"E4121D": "Samsung Electronics Co.,Ltd",
"E41289": "topsystem Systemhaus GmbH",
"E41C4B": "V2 TECHNOLOGY, INC.",
"E41F13": "IBM Corp",
"E425E7": "Apple",
"E425E9": "Color-Chip",
"E42771": "Smartlabs",
"E42AD3": "Magneti Marelli S.p.A. Powertrain",
"E42C56": "Lilee Systems, Ltd.",
"E42D02": "TCT Mobile Limited",
"E42F26": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"E42FF6": "Unicore communication Inc.",
"E432CB": "Samsung Electronics Co.,Ltd",
"E43593": "Hangzhou GoTo technology Co.Ltd",
"E435FB": "Sabre Technology (Hull) Ltd",
"E437D7": "HENRI DEPAEPE S.A.S.",
"E438F2": "Advantage Controls",
"E43FA2": "Wuxi DSP Technologies Inc.",
"E440E2": "Samsung Electronics Co.,Ltd",
"E441E6": "Ottec Technology GmbH",
"E446BD": "C&C TECHNIC TAIWAN CO., LTD.",
"E448C7": "Cisco SPVTG",
"E44C6C": "Shenzhen Guo Wei Electronic Co,. Ltd.",
"E44E18": "Gardasoft VisionLimited",
"E44F29": "MA Lighting Technology GmbH",
"E44F5F": "EDS Elektronik Destek San.Tic.Ltd.Sti",
"E455EA": "Dedicated Computing",
"E45614": "Suttle Apparatus",
"E457A8": "Stuart Manufacturing, Inc.",
"E45D95": "AVM GmbH",
"E46449": "ARRIS Group, Inc.",
"E467BA": "Danish Interpretation Systems A/S",
"E468A3": "HUAWEI TECHNOLOGIES CO.,LTD",
"E46C21": "messMa GmbH",
"E47185": "Securifi Ltd",
"E4751E": "Getinge Sterilization AB",
"E47723": "zte corporation",
"E4776B": "AARTESYS AG",
"E477D4": "Minrray Industry Co.,Ltd",
"E47CF9": "Samsung Electronics Co., LTD",
"E47D5A": "Beijing Hanbang Technology Corp.",
"E48184": "Alcatel-Lucent",
"E481B3": "Shenzhen ACT Industrial Co.,Ltd.",
"E48399": "ARRIS Group, Inc.",
"E48AD5": "RF WINDOW CO., LTD.",
"E48B7F": "Apple",
"E49069": "Rockwell Automation",
"E492E7": "Gridlink Tech. Co.,Ltd.",
"E492FB": "Samsung Electronics Co.,Ltd",
"E496AE": "ALTOGRAPHICS Inc.",
"E497F0": "Shanghai VLC Technologies Ltd. Co.",
"E498D6": "Apple, Inc",
"E4A5EF": "TRON LINK ELECTRONICS CO., LTD.",
"E4A7FD": "Cellco Partnership",
"E4AB46": "UAB Selteka",
"E4AD7D": "SCL Elements",
"E4AFA1": "HES-SO",
"E4B021": "Samsung Electronics Co.,Ltd",
"E4C146": "Objetivos y Servicios de Valor A",
"E4C63D": "Apple, Inc.",
"E4C6E6": "Mophie, LLC",
"E4C722": "Cisco",
"E4C806": "Ceiec Electric Technology Inc.",
"E4CE8F": "Apple",
"E4D332": "TP-LINK TECHNOLOGIES CO.,LTD.",
"E4D3F1": "Cisco",
"E4D53D": "Hon Hai Precision Ind. Co.,Ltd.",
"E4D71D": "Oraya Therapeutics",
"E4DD79": "En-Vision America, Inc.",
"E4E0C5": "Samsung Electronics Co., LTD",
"E4E409": "LEIFHEIT AG",
"E4EC10": "Nokia Corporation",
"E4EEFD": "MR&D Manufacturing",
"E4F365": "Time-O-Matic, Inc.",
"E4F3E3": "Shanghai iComhome Co.,Ltd.",
"E4F4C6": "NETGEAR",
"E4F7A1": "Datafox GmbH",
"E4FA1D": "PAD Peripheral Advanced Design Inc.",
"E4FFDD": "ELECTRON INDIA",
"E8039A": "Samsung Electronics CO., LTD",
"E8040B": "Apple",
"E80410": "PRIVATE",
"E80462": "CISCO SYSTEMS, INC.",
"E804F3": "Throughtek Co., Ltd.",
"E8056D": "Nortel Networks",
"E80688": "Apple",
"E8088B": "Huawei Technologies Co., Ltd",
"E80B13": "Akib Systems Taiwan, INC",
"E80C38": "DAEYOUNG INFORMATION SYSTEM CO., LTD",
"E80C75": "Syncbak, Inc.",
"E8102E": "Really Simple Software, Inc",
"E81132": "Samsung Electronics CO., LTD",
"E81324": "GuangZhou Bonsoninfo System CO.,LTD",
"E817FC": "NIFTY Corporation",
"E82877": "TMY Co., Ltd.",
"E828D5": "Cots Technology",
"E82AEA": "Intel Corporate",
"E82E24": "Out of the Fog Research LLC",
"E83935": "Hewlett Packard",
"E839DF": "Askey Computer",
"E83A97": "OCZ Technology Group",
"E83EB6": "RIM",
"E83EFB": "GEODESIC LTD.",
"E83EFC": "ARRIS Group, Inc.",
"E84040": "CISCO SYSTEMS, INC.",
"E840F2": "PEGATRON CORPORATION",
"E843B6": "QNAP Systems, Inc.",
"E8481F": "Advanced Automotive Antennas",
"E84E06": "EDUP INTERNATIONAL (HK) CO., LTD",
"E84E84": "Samsung Electronics Co.,Ltd",
"E84ECE": "Nintendo Co., Ltd.",
"E8516E": "TSMART Inc.",
"E8519D": "Yeonhab Precision Co.,LTD",
"E85484": "NEO INFORMATION SYSTEMS CO., LTD.",
"E856D6": "NCTech Ltd",
"E85AA7": "LLC Emzior",
"E85B5B": "LG ELECTRONICS INC",
"E85BF0": "Imaging Diagnostics",
"E85E53": "Infratec Datentechnik GmbH",
"E8611F": "Dawning Information Industry Co.,Ltd",
"E8617E": "Liteon Technology Corporation",
"E86183": "Black Diamond Advanced Technology, LLC",
"E86CDA": "Supercomputers and Neurocomputers Research Center",
"E86D52": "ARRIS Group, Inc.",
"E86D54": "Digit Mobile Inc",
"E86D6E": "Control & Display Systems Ltd t/a CDSRail",
"E8718D": "Elsys Equipamentos Eletronicos Ltda",
"E8757F": "FIRS Technologies(Shenzhen) Co., Ltd",
"E878A1": "BEOVIEW INTERCOM DOO",
"E87AF3": "S5 Tech S.r.l.",
"E880D8": "GNTEK Electronics Co.,Ltd.",
"E8892C": "ARRIS Group, Inc.",
"E88D28": "Apple",
"E88DF5": "ZNYX Networks, Inc.",
"E89218": "Arcontia International AB",
"E892A4": "LG Electronics",
"E8944C": "Cogent Healthcare Systems Ltd",
"E894F6": "TP-LINK TECHNOLOGIES CO.,LTD.",
"E8995A": "PiiGAB, Processinformation i Goteborg AB",
"E899C4": "HTC Corporation",
"E89A8F": "Quanta Computer Inc.",
"E89AFF": "Fujian Landi Commercial Equipment Co.,Ltd",
"E89D87": "Toshiba",
"E8A364": "Signal Path International / Peachtree Audio",
"E8A4C1": "Deep Sea Electronics PLC",
"E8ABFA": "Shenzhen Reecam Tech.Ltd.",
"E8B1FC": "Intel Corporate",
"E8B4AE": "Shenzhen C&D Electronics Co.,Ltd",
"E8B748": "CISCO SYSTEMS, INC.",
"E8BA70": "CISCO SYSTEMS, INC.",
"E8BB3D": "Sino Prime-Tech Limited",
"E8BBA8": "GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD.",
"E8BE81": "SAGEMCOM",
"E8C229": "H-Displays (MSC) Bhd",
"E8C320": "Austco Communication Systems Pty Ltd",
"E8CBA1": "Nokia Corporation",
"E8CC32": "Micronet LTD",
"E8CD2D": "Huawei Technologies Co., Ltd",
"E8CE06": "SkyHawke Technologies, LLC.",
"E8D0FA": "MKS Instruments Deutschland GmbH",
"E8D483": "ULTIMATE Europe Transportation Equipment GmbH",
"E8D4E0": "Beijing BenyWave Technology Co., Ltd.",
"E8DA96": "Zhuhai Tianrui Electrical Power Tech. Co., Ltd.",
"E8DAAA": "VideoHome Technology Corp.",
"E8DE27": "TP-LINK TECHNOLOGIES CO.,LTD.",
"E8DFF2": "PRF Co., Ltd.",
"E8E08F": "GRAVOTECH MARKING SAS",
"E8E0B7": "Toshiba",
"E8E1E2": "Energotest",
"E8E5D6": "Samsung Electronics Co.,Ltd",
"E8E732": "Alcatel-Lucent",
"E8E770": "Warp9 Tech Design, Inc.",
"E8E776": "Shenzhen Kootion Technology Co., Ltd",
"E8E875": "iS5 Communications Inc.",
"E8EA6A": "StarTech.com",
"E8EADA": "Denkovi Assembly Electroncs LTD",
"E8EDF3": "Cisco",
"E8EF89": "OPMEX Tech.",
"E8F1B0": "SAGEMCOM SAS",
"E8F226": "MILLSON CUSTOM SOLUTIONS INC.",
"E8F928": "RFTECH SRL",
"E8FC60": "ELCOM Innovations Private Limited",
"E8FCAF": "NETGEAR INC.,",
"EC0ED6": "ITECH INSTRUMENTS SAS",
"EC1120": "FloDesign Wind Turbine Corporation",
"EC14F6": "BioControl AS",
"EC172F": "TP-LINK TECHNOLOGIES CO., LTD.",
"EC1766": "Research Centre Module",
"EC1A59": "Belkin International Inc.",
"EC219F": "VidaBox LLC",
"EC2257": "JiangSu NanJing University Electronic Information Technology Co.,Ltd",
"EC233D": "Huawei Technologies Co., Ltd",
"EC2368": "IntelliVoice Co.,Ltd.",
"EC2AF0": "Ypsomed AG",
"EC2C49": "University of Tokyo",
"EC2E4E": "HITACHI-LG DATA STORAGE INC",
"EC3091": "CISCO SYSTEMS, INC.",
"EC3586": "Apple",
"EC3BF0": "NovelSat",
"EC3E09": "PERFORMANCE DESIGNED PRODUCTS, LLC",
"EC3F05": "Institute 706, The Second Academy China Aerospace Science & Industry Corp",
"EC42F0": "ADL Embedded Solutions, Inc.",
"EC43E6": "AWCER Ltd.",
"EC43F6": "ZyXEL Communications Corporation",
"EC4476": "CISCO SYSTEMS, INC.",
"EC4644": "TTK SAS",
"EC4670": "Meinberg Funkuhren GmbH & Co. KG",
"EC473C": "Redwire, LLC",
"EC4993": "Qihan Technology Co., Ltd",
"EC4C4D": "ZAO NPK RoTeK",
"EC542E": "Shanghai XiMei Electronic Technology Co. Ltd",
"EC55F9": "Hon Hai Precision Ind. Co.,Ltd.",
"EC5C69": "MITSUBISHI HEAVY INDUSTRIES MECHATRONICS SYSTEMS,LTD.",
"EC6264": "Global411 Internet Services, LLC",
"EC63E5": "ePBoard Design LLC",
"EC66D1": "B&W Group LTD",
"EC6C9F": "Chengdu Volans Technology CO.,LTD",
"EC71DB": "Shenzhen Baichuan Digital Technology Co., Ltd.",
"EC7C74": "Justone Technologies Co., Ltd.",
"EC7D9D": "MEI",
"EC836C": "RM Tech Co., Ltd.",
"EC852F": "Apple",
"EC888F": "TP-LINK TECHNOLOGIES CO., LTD.",
"EC89F5": "Lenovo Mobile Communication Technology Ltd.",
"EC8A4C": "zte corporation",
"EC8EAD": "DLX",
"EC9233": "Eddyfi NDT Inc",
"EC9327": "MEMMERT GmbH + Co. KG",
"EC9681": "2276427 Ontario Inc",
"EC986C": "Lufft Mess- und Regeltechnik GmbH",
"EC98C1": "Beijing Risbo Network Technology Co.,Ltd",
"EC9A74": "Hewlett Packard",
"EC9B5B": "Nokia Corporation",
"EC9ECD": "Emerson Network Power and Embedded Computing",
"ECA29B": "Kemppi Oy",
"ECA86B": "ELITEGROUP COMPUTER SYSTEMS CO., LTD.",
"ECB106": "Acuro Networks, Inc",
"ECB541": "SHINANO E and E Co.Ltd.",
"ECBBAE": "Digivoice Tecnologia em Eletronica Ltda",
"ECBD09": "FUSION Electronics Ltd",
"ECC38A": "Accuenergy (CANADA) Inc",
"ECC882": "CISCO SYSTEMS, INC.",
"ECCD6D": "Allied Telesis, Inc.",
"ECD00E": "MiraeRecognition Co., Ltd.",
"ECD040": "GEA Farm Technologies GmbH",
"ECD19A": "Zhuhai Liming Industries Co., Ltd",
"ECD925": "RAMI",
"ECD950": "IRT SA",
"ECDE3D": "Lamprey Networks, Inc.",
"ECE09B": "Samsung electronics CO., LTD",
"ECE1A9": "Cisco",
"ECE512": "tado GmbH",
"ECE555": "Hirschmann Automation",
"ECE744": "Omntec mfg. inc",
"ECE90B": "SISTEMA SOLUCOES ELETRONICAS LTDA - EASYTECH",
"ECE915": "STI Ltd",
"ECE9F8": "Guang Zhou TRI-SUN Electronics Technology Co., Ltd",
"ECEA03": "DARFON LIGHTING CORP",
"ECF00E": "Abocom",
"ECF236": "NEOMONTANA ELECTRONICS",
"ECF35B": "Nokia Corporation",
"ECF4BB": "Dell Inc",
"ECF72B": "HD DIGITAL TECH CO., LTD.",
"ECFAAA": "The IMS Company",
"ECFC55": "A. Eberle GmbH & Co. KG",
"ECFE7E": "BlueRadios, Inc.",
"F0007F": "Janz - Contadores de Energia, SA",
"F0022B": "Chrontel",
"F00248": "SmarteBuilding",
"F00786": "Shandong Bittel Electronics Co., Ltd",
"F008F1": "Samsung Electronics Co.,Ltd",
"F013C3": "SHENZHEN FENDA TECHNOLOGY CO., LTD",
"F015A0": "KyungDong One Co., Ltd.",
"F01C13": "LG Electronics",
"F01FAF": "Dell Inc",
"F0219D": "Cal-Comp Electronics & Communications Company Ltd.",
"F02329": "SHOWA DENKI CO.,LTD.",
"F02405": "OPUS High Technology Corporation",
"F02408": "Talaris (Sweden) AB",
"F02572": "CISCO SYSTEMS, INC.",
"F025B7": "Samsung Electro Mechanics co., LTD.",
"F0264C": "Dr. Sigrist AG",
"F02765": "Murata Manufactuaring Co.,Ltd.",
"F02929": "Cisco",
"F02A61": "Waldo Networks, Inc.",
"F02FD8": "Bi2-Vision",
"F0321A": "Mita-Teknik A/S",
"F037A1": "Huike Electronics (SHENZHEN) CO., LTD.",
"F03A4B": "Bloombase, Inc.",
"F03A55": "Omega Elektronik AS",
"F03FF8": "R L Drake",
"F04335": "DVN(Shanghai)Ltd.",
"F04A2B": "PYRAMID Computer GmbH",
"F04B6A": "Scientific Production Association Siberian Arsenal, Ltd.",
"F04BF2": "JTECH Communications, Inc.",
"F04DA2": "Dell Inc.",
"F04F7C": "PRIVATE",
"F05849": "CareView Communications",
"F05A09": "Samsung Electronics Co.,Ltd",
"F05D89": "Dycon Limited",
"F05DC8": "Duracell Powermat",
"F05F5A": "Getriebebau NORD GmbH and Co. KG",
"F06130": "Advantage Pharmacy Services, LLC",
"F0620D": "Shenzhen Egreat Tech Corp.,Ltd",
"F06281": "ProCurve Networking by HP",
"F065DD": "Primax Electronics Ltd.",
"F06853": "Integrated Corporation",
"F06BCA": "Samsung Electronics Co.,Ltd",
"F0728C": "Samsung Electronics Co.,Ltd",
"F073AE": "PEAK-System Technik",
"F07765": "Sourcefire, Inc",
"F077D0": "Xcellen",
"F07BCB": "Hon Hai Precision Ind. Co.,Ltd.",
"F07D68": "D-Link Corporation",
"F07F0C": "Leopold Kostal GmbH &Co. KG",
"F081AF": "IRZ AUTOMATION TECHNOLOGIES LTD",
"F08261": "SAGEMCOM",
"F0842F": "ADB Broadband Italia",
"F084C9": "zte corporation",
"F08A28": "JIANGSU HENGSION ELECTRONIC S and T CO.,LTD",
"F08BFE": "COSTEL.,CO.LTD",
"F08CFB": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"F08EDB": "VeloCloud Networks",
"F0921C": "Hewlett Packard",
"F0933A": "NxtConect",
"F093C5": "Garland Technology",
"F09CBB": "RaonThink Inc.",
"F09CE9": "Aerohive Networks Inc",
"F0A225": "PRIVATE",
"F0A764": "GST Co., Ltd.",
"F0ACA4": "HBC-radiomatic",
"F0AD4E": "Globalscale Technologies, Inc.",
"F0AE51": "Xi3 Corp",
"F0B479": "Apple",
"F0B6EB": "Poslab Technology Co., Ltd.",
"F0BCC8": "MaxID (Pty) Ltd",
"F0BDF1": "Sipod Inc.",
"F0BF97": "Sony Corporation",
"F0C1F1": "Apple, Inc.",
"F0C24C": "Zhejiang FeiYue Digital Technology Co., Ltd",
"F0C27C": "Mianyang Netop Telecom Equipment Co.,Ltd.",
"F0C88C": "LeddarTech Inc.",
"F0CBA1": "Apple",
"F0D14F": "LINEAR LLC",
"F0D1A9": "Apple",
"F0D3A7": "CobaltRay Co., Ltd",
"F0D3E7": "Sensometrix SA",
"F0D767": "Axema Passagekontroll AB",
"F0DA7C": "RLH INDUSTRIES,INC.",
"F0DB30": "Yottabyte",
"F0DBF8": "Apple",
"F0DCE2": "Apple",
"F0DE71": "Shanghai EDO Technologies Co.,Ltd.",
"F0DEB9": "ShangHai Y&Y Electronics Co., Ltd",
"F0DEF1": "Wistron InfoComm (Kunshan)Co",
"F0E5C3": "Dr\u00e4gerwerk AG & Co. KG aA",
"F0E77E": "Samsung Electronics Co.,Ltd",
"F0EBD0": "Shanghai Feixun Communication Co.,Ltd.",
"F0EC39": "Essec",
"F0ED1E": "Bilkon Bilgisayar Kontrollu Cih. Im.Ltd.",
"F0EEBB": "VIPAR GmbH",
"F0F002": "Hon Hai Precision Ind. Co.,Ltd.",
"F0F260": "Mobitec AB",
"F0F5AE": "Adaptrum Inc.",
"F0F61C": "Apple",
"F0F644": "Whitesky Science & Technology Co.,Ltd.",
"F0F669": "Motion Analysis Corporation",
"F0F755": "CISCO SYSTEMS, INC.",
"F0F7B3": "Phorm",
"F0F842": "KEEBOX, Inc.",
"F0F9F7": "IES GmbH & Co. KG",
"F0FDA0": "Acurix Networks LP",
"F40321": "BeNeXt B.V.",
"F4044C": "ValenceTech Limited",
"F4068D": "devolo AG",
"F406A5": "Hangzhou Bianfeng Networking Technology Co., Ltd.",
"F40B93": "Research In Motion",
"F40F9B": "WAVELINK",
"F415FD": "Shanghai Pateo Electronic Equipment Manufacturing Co., Ltd.",
"F41BA1": "Apple",
"F41E26": "Simon-Kaloi Engineering",
"F41F0B": "YAMABISHI Corporation",
"F41FC2": "Cisco",
"F42012": "Cuciniale GmbH",
"F42896": "SPECTO PAINEIS ELETRONICOS LTDA",
"F436E1": "Abilis Systems SARL",
"F437B7": "Apple",
"F43814": "Shanghai Howell Electronic Co.,Ltd",
"F43D80": "FAG Industrial Services GmbH",
"F43E61": "Shenzhen Gongjin Electronics Co., Ltd",
"F43E9D": "Benu Networks, Inc.",
"F44227": "S & S Research Inc.",
"F44450": "BND Co., Ltd.",
"F445ED": "Portable Innovation Technology Ltd.",
"F4472A": "Nanjing Rousing Sci. and Tech. Industrial Co., Ltd",
"F44848": "Amscreen Group Ltd",
"F44EFD": "Actions Semiconductor Co.,Ltd.(Cayman Islands)",
"F450EB": "Telechips Inc",
"F45214": "Mellanox Technologies, Inc.",
"F45433": "Rockwell Automation",
"F45595": "HENGBAO Corporation LTD.",
"F4559C": "Huawei Technologies Co., Ltd",
"F455E0": "Niceway CNC Technology Co.,Ltd.Hunan Province",
"F45842": "Boxx TV Ltd",
"F45F69": "Matsufu Electronics distribution Company",
"F45FD4": "Cisco SPVTG",
"F45FF7": "DQ Technology Inc.",
"F4600D": "Panoptic Technology, Inc",
"F46349": "Diffon Corporation",
"F46ABC": "Adonit Corp. Ltd.",
"F46D04": "ASUSTek COMPUTER INC.",
"F46DE2": "zte corporation",
"F473CA": "Conversion Sound Inc.",
"F47626": "Viltechmeda UAB",
"F47A4E": "Woojeon&Handan",
"F47ACC": "SolidFire, Inc.",
"F47B5E": "Samsung Eletronics Co., Ltd",
"F47F35": "CISCO SYSTEMS, INC.",
"F48139": "CANON INC.",
"F48771": "Infoblox",
"F48E09": "Nokia Corporation",
"F490CA": "Tensorcom",
"F490EA": "Deciso B.V.",
"F49461": "NexGen Storage",
"F49466": "CountMax, ltd",
"F499AC": "WEBER Schraubautomaten GmbH",
"F49F54": "Samsung Electronics",
"F4A294": "EAGLE WORLD DEVELOPMENT CO., LIMITED",
"F4A52A": "Hawa Technologies Inc",
"F4ACC1": "CISCO SYSTEMS, INC.",
"F4B164": "Lightning Telecommunications Technology Co. Ltd",
"F4B381": "WindowMaster A/S",
"F4B52F": "Juniper networks",
"F4B549": "Yeastar Technology Co., Ltd.",
"F4B6E5": "TerraSem Co.,Ltd",
"F4B72A": "TIME INTERCONNECT LTD",
"F4B7E2": "Hon Hai Precision Ind. Co.,Ltd.",
"F4BD7C": "Chengdu jinshi communication Co., LTD",
"F4C447": "Coagent International Enterprise Limited",
"F4C6D7": "blackned GmbH",
"F4C714": "Shenzhen Huawei Communication Technologies Co., Ltd",
"F4C795": "WEY Elektronik AG",
"F4CAE5": "FREEBOX SA",
"F4CD90": "Vispiron Rotec GmbH",
"F4CE46": "Hewlett-Packard Company",
"F4CFE2": "Cisco",
"F4D9FB": "Samsung Electronics CO., LTD",
"F4DC4D": "Beijing CCD Digital Technology Co., Ltd",
"F4DCDA": "Zhuhai Jiahe Communication Technology Co., limited",
"F4DCF9": "Huawei Technologies Co., Ltd",
"F4E142": "Delta Elektronika BV",
"F4E6D7": "Solar Power Technologies, Inc.",
"F4EA67": "CISCO SYSTEMS, INC.",
"F4EC38": "TP-LINK TECHNOLOGIES CO., LTD.",
"F4F15A": "Apple",
"F4F5A5": "Nokia corporation",
"F4F5E8": "Google",
"F4F951": "Apple",
"F4FC32": "Texas Instruments",
"F80113": "Huawei Technologies Co., Ltd",
"F80332": "Khomp",
"F8051C": "DRS Imaging and Targeting Solutions",
"F80BBE": "ARRIS Group, Inc.",
"F80BD0": "Datang Telecom communication terminal (Tianjin) Co., Ltd.",
"F80CF3": "LG Electronics",
"F80D43": "Hon Hai Precision Ind. Co., Ltd.",
"F80DEA": "ZyCast Technology Inc.",
"F80F41": "Wistron InfoComm(ZhongShan) Corporation",
"F80F84": "Natural Security SAS",
"F81037": "Atopia Systems, LP",
"F81547": "Avaya, Inc",
"F81654": "Intel Corporate",
"F81A67": "TP-LINK TECHNOLOGIES CO., LTD.",
"F81CE5": "Telefonbau Behnke GmbH",
"F81D93": "Longdhua(Beijing) Controls Technology Co.,Ltd",
"F81EDF": "Apple",
"F82285": "Cypress Technology CO., LTD.",
"F82793": "Apple, Inc",
"F82BC8": "Jiangsu Switter Co., Ltd",
"F82EDB": "RTW GmbH & Co. KG",
"F82F5B": "eGauge Systems LLC",
"F82FA8": "Hon Hai Precision Ind. Co.,Ltd.",
"F83094": "Alcatel-Lucent Telecom Limited",
"F8313E": "endeavour GmbH",
"F83376": "Good Mind Innovation Co., Ltd.",
"F83553": "Magenta Research Ltd.",
"F835DD": "Gemtek Technology Co., Ltd.",
"F83D4E": "Softlink Automation System Co., Ltd",
"F83DFF": "Huawei Technologies Co., Ltd",
"F842FB": "Yasuda Joho Co.,ltd.",
"F845AD": "Konka Group Co., Ltd.",
"F8462D": "SYNTEC Incorporation",
"F8472D": "X2gen Digital Corp. Ltd",
"F84897": "Hitachi, Ltd.",
"F84A73": "EUMTECH CO., LTD",
"F84A7F": "Innometriks Inc",
"F84ABF": "HUAWEI TECHNOLOGIES CO.,LTD",
"F84F57": "Cisco",
"F85063": "Verathon",
"F8516D": "Denwa Technology Corp.",
"F852DF": "VNL Europe AB",
"F854AF": "ECI Telecom Ltd.",
"F8572E": "Core Brands, LLC",
"F85BC9": "M-Cube Spa",
"F85C45": "IC Nexus Co. Ltd.",
"F85F2A": "Nokia Corporation",
"F862AA": "xn systems",
"F86601": "Suzhou Chi-tek information technology Co., Ltd",
"F866D1": "Hon Hai Precision Ind. Co., Ltd.",
"F866F2": "CISCO SYSTEMS, INC.",
"F86971": "Seibu Electric Co.,",
"F86ECF": "Arcx Inc",
"F871FE": "The Goldman Sachs Group, Inc.",
"F872EA": "Cisco",
"F8769B": "Neopis Co., Ltd.",
"F87B62": "FASTWEL INTERNATIONAL CO., LTD. Taiwan Branch",
"F87B7A": "ARRIS Group, Inc.",
"F87B8C": "Amped Wireless",
"F8811A": "OVERKIZ",
"F884F2": "Samsung Electronics Co.,Ltd",
"F88C1C": "KAISHUN ELECTRONIC TECHNOLOGY CO., LTD. BEIJING",
"F88DEF": "Tenebraex",
"F88E85": "COMTREND CORPORATION",
"F88FCA": "Google Fiber, Inc",
"F8912A": "GLP German Light Products GmbH",
"F893F3": "VOLANS",
"F89550": "Proton Products Chengdu Ltd",
"F897CF": "DAESHIN-INFORMATION TECHNOLOGY CO., LTD.",
"F89955": "Fortress Technology Inc",
"F89D0D": "Control Technology Inc.",
"F89FB8": "YAZAKI Energy System Corporation",
"F8A03D": "Dinstar Technologies Co., Ltd.",
"F8A2B4": "RHEWA-WAAGENFABRIK August Freudewald GmbH &Co. KG",
"F8A45F": "Beijing Xiaomi communications co.,ltd",
"F8A963": "COMPAL INFORMATION (KUNSHAN) CO., LTD.",
"F8A9D0": "LG Electronics",
"F8A9DE": "PUISSANCE PLUS",
"F8AA8A": "Axview Technology (Shenzhen) Co.,Ltd",
"F8AC6D": "Deltenna Ltd",
"F8B156": "Dell Inc",
"F8B599": "Guangzhou CHNAVS Digital Technology Co.,Ltd",
"F8BC12": "Dell Inc",
"F8C001": "Juniper Networks",
"F8C091": "Highgates Technology",
"F8C678": "Carefusion",
"F8D0AC": "Sony Computer Entertainment Inc.",
"F8D0BD": "Samsung Electronics Co.,Ltd",
"F8D111": "TP-LINK TECHNOLOGIES CO., LTD.",
"F8D3A9": "AXAN Networks",
"F8D462": "Pumatronix Equipamentos Eletronicos Ltda.",
"F8D756": "Simm Tronic Limited",
"F8D7BF": "REV Ritter GmbH",
"F8DADF": "EcoTech, Inc.",
"F8DAE2": "Beta LaserMike",
"F8DAF4": "Taishan Online Technology Co., Ltd.",
"F8DB4C": "PNY Technologies, INC.",
"F8DB7F": "HTC Corporation",
"F8DB88": "Dell Inc",
"F8DC7A": "Variscite LTD",
"F8DFA8": "ZTE Corporation",
"F8E079": "Motorola Mobility LLC",
"F8E4FB": "Actiontec Electronics, Inc",
"F8E7B5": "\u00b5Tech Tecnologia LTDA",
"F8E968": "Egker Kft.",
"F8EA0A": "Dipl.-Math. Michael Rauch",
"F8EDA5": "ARRIS Group, Inc.",
"F8F005": "Newport Media Inc.",
"F8F014": "RackWare Inc.",
"F8F082": "Orion Networks International, Inc",
"F8F1B6": "Motorola Mobility LLC",
"F8F25A": "G-Lab GmbH",
"F8F7D3": "International Communications Corporation",
"F8F7FF": "SYN-TECH SYSTEMS INC",
"F8FB2F": "Santur Corporation",
"F8FE5C": "Reciprocal Labs Corp",
"F8FEA8": "Technico Japan Corporation",
"F8FF5F": "Shenzhen Communication Technology Co.,Ltd",
"FC0012": "Toshiba Samsung Storage Technolgoy Korea Corporation",
"FC019E": "VIEVU",
"FC01CD": "FUNDACION TEKNIKER",
"FC0647": "Cortland Research, LLC",
"FC07A0": "LRE Medical GmbH",
"FC0877": "Prentke Romich Company",
"FC09D8": "ACTEON Group",
"FC09F6": "GUANGDONG TONZE ELECTRIC CO.,LTD",
"FC0A81": "Motorola Solutions Inc.",
"FC0FE6": "Sony Computer Entertainment Inc.",
"FC10BD": "Control Sistematizado S.A.",
"FC1186": "Logic3 plc",
"FC1349": "Global Apps Corp.",
"FC15B4": "Hewlett Packard",
"FC1607": "Taian Technology(Wuxi) Co.,Ltd.",
"FC1794": "InterCreative Co., Ltd",
"FC19D0": "Cloud Vision Networks Technology Co.,Ltd.",
"FC1BFF": "V-ZUG AG",
"FC1D59": "I Smart Cities HK Ltd",
"FC1E16": "IPEVO corp",
"FC1F19": "SAMSUNG ELECTRO-MECHANICS CO., LTD.",
"FC1FC0": "EURECAM",
"FC229C": "Han Kyung I Net Co.,Ltd.",
"FC2325": "EosTek (Shenzhen) Co., Ltd.",
"FC253F": "Apple",
"FC27A2": "TRANS ELECTRIC CO., LTD.",
"FC2A54": "Connected Data, Inc.",
"FC2E2D": "Lorom Industrial Co.LTD.",
"FC2F40": "Calxeda, Inc.",
"FC3598": "Favite Inc.",
"FC35E6": "Visteon corp",
"FC3FAB": "Henan Lanxin Technology Co., Ltd",
"FC4463": "Universal Audio, Inc",
"FC4499": "Swarco LEA d.o.o.",
"FC455F": "JIANGXI SHANSHUI OPTOELECTRONIC TECHNOLOGY CO.,LTD",
"FC48EF": "HUAWEI TECHNOLOGIES CO.,LTD",
"FC4AE9": "Castlenet Technology Inc.",
"FC4B1C": "INTERSENSOR S.R.L.",
"FC4BBC": "Sunplus Technology Co., Ltd.",
"FC4DD4": "Universal Global Scientific Industrial Co., Ltd.",
"FC5090": "SIMEX Sp. z o.o.",
"FC52CE": "Control iD",
"FC58FA": "Shen Zhen Shi Xin Zhong Xin Technology Co.,Ltd.",
"FC5B24": "Weibel Scientific A/S",
"FC5B26": "MikroBits",
"FC6018": "Zhejiang Kangtai Electric Co., Ltd.",
"FC6198": "NEC Personal Products, Ltd",
"FC626E": "Beijing MDC Telecom",
"FC683E": "Directed Perception, Inc",
"FC6C31": "LXinstruments GmbH",
"FC7516": "D-Link International",
"FC75E6": "Handreamnet",
"FC7CE7": "FCI USA LLC",
"FC8329": "Trei technics",
"FC8399": "Avaya, Inc",
"FC8B97": "Shenzhen Gongjin Electronics Co.,Ltd",
"FC8E7E": "Pace plc",
"FC8FC4": "Intelligent Technology Inc.",
"FC923B": "Nokia Corporation",
"FC946C": "UBIVELOX",
"FC94E3": "Technicolor USA Inc.",
"FC9947": "Cisco",
"FC9FAE": "Fidus Systems Inc",
"FCA13E": "Samsung Electronics",
"FCA841": "Avaya, Inc",
"FCA9B0": "MIARTECH (SHANGHAI),INC.",
"FCAD0F": "QTS NETWORKS",
"FCAF6A": "Conemtech AB",
"FCB0C4": "Shanghai DareGlobal Technologies Co., Ltd",
"FCBBA1": "Shenzhen Minicreate Technology Co.,Ltd",
"FCC23D": "Atmel Corporation",
"FCC2DE": "Murata Manufacturing Co., Ltd.",
"FCC734": "Samsung Electronics Co.,Ltd",
"FCC897": "ZTE Corporation",
"FCCCE4": "Ascon Ltd.",
"FCCF62": "IBM Corp",
"FCD4F2": "The Coca Cola Company",
"FCD4F6": "Messana Air.Ray Conditioning s.r.l.",
"FCD5D9": "Shenzhen SDMC Technology Co., Ltd.",
"FCD6BD": "Robert Bosch GmbH",
"FCD817": "Beijing Hesun Technologies Co.Ltd.",
"FCDB96": "ENERVALLEY CO., LTD",
"FCDD55": "Shenzhen WeWins wireless Co.,Ltd",
"FCE186": "A3M Co., LTD",
"FCE192": "Sichuan Jinwangtong Electronic Science&Technology Co,.Ltd",
"FCE1D9": "Stable Imaging Solutions LLC",
"FCE23F": "CLAY PAKY SPA",
"FCE557": "Nokia Corporation",
"FCE892": "Hangzhou Lancable Technology Co.,Ltd",
"FCEDB9": "Arrayent",
"FCF152": "Sony Corporation",
"FCF1CD": "OPTEX-FA CO.,LTD.",
"FCF528": "ZyXEL Communications Corporation",
"FCF647": "Fiberhome Telecommunication Tech.Co.,Ltd.",
"FCF8AE": "Intel Corporate",
"FCF8B7": "TRONTEQ Electronic",
"FCFAF7": "Shanghai Baud Data Communication Co.,Ltd.",
"FCFBFB": "CISCO SYSTEMS, INC.",
"FCFE77": "Hitachi Reftechno, Inc."
}
|
ninuxorg/netengine
|
netengine/resources/manufacturers.py
|
Python
|
mit
| 761,686
|
[
"ASE",
"Amber",
"BWA",
"CRYSTAL",
"Galaxy",
"Jaguar",
"NEURON",
"Octopus"
] |
2d8c0915ad204a3ee4297f6382ba177ff27dbfec43bf0ac714eaf0aef24e18ac
|
__all__ = [
'ADJECTIVES',
'ANIMALS',
]
ADJECTIVES = [
'adorable',
'acrobatic',
'adaptable',
'adventurous',
'agile',
'alert',
'amiable',
'amused',
'arboreal',
'ardent',
'artful',
'astonishing',
'astute',
'attentive',
'authentic',
'avid',
'beardless',
'benevolent',
'bicameral',
'big',
'bionic',
'blissful',
'bodacious',
'brave',
'bright',
'brilliant',
'bubbly',
'careful',
'cautious',
'cavernous',
'charming',
'cheeky',
'cheerful',
'circumspect',
'cognizant',
'cool',
'collaborative',
'collectible',
'colorful',
'colossal',
'comfortable',
'communicative',
'compact',
'compassionate',
'constant',
'contemplative',
'contiguous',
'convincing',
'convivial',
'cooperative',
'courageous',
'cordial',
'cosmic',
'creative',
'cryptic',
'crystalline',
'cunning',
'curious',
'dancing',
'daring',
'dauntless',
'dashing',
'dazzling',
'defiant',
'delightful',
'determined',
'dexterous',
'diminutive',
'discerning',
'distinctive',
'dreaming',
'dynamic',
'eager',
'earnest',
'easygoing',
'ebullient',
'eccentric',
'ecstatic',
'effluvial',
'effervescent',
'elaborate',
'elated',
'elegant',
'eloquent',
'elusive',
'enchanting',
'encouraging',
'energetic',
'energized',
'enormous',
'enthusiastic',
'erudite',
'essential',
'ethereal',
'excited',
'exhilarated',
'extraordinary',
'exotic',
'exuberant',
'fantastic',
'fearless',
'feisty',
'fierce',
'fiery',
'flourishing',
'flying',
'focused',
'fortunate',
'friendly',
'frolicking',
'gargantuan',
'gesticulating',
'gigantic',
'glamorous',
'gleaming',
'gleeful',
'glorious',
'gorgeous',
'graceful',
'grateful',
'gregarious',
'happy',
'harmonious',
'hatless',
'healthy',
'helpful',
'heroic',
'hydraulic',
'hyena',
'idealistic',
'illustrious',
'illustrative',
'imaginative',
'immense',
'impartial',
'imperturbable',
'improbable',
'incredible',
'inimitable',
'influential',
'inquisitive',
'insightful',
'inspired',
'intrepid',
'intricate',
'intuitive',
'invaluable',
'inventive',
'iterative',
'jaunty',
'jolly',
'joyful',
'joyous',
'jubilant',
'jumping',
'katabatic',
'keen',
'kempt',
'kept',
'kind',
'kindled',
'kinematic',
'kinesthtic',
'kingly',
'kitschy',
'knightly',
'knowing',
'kooky',
'kyphotic',
'large',
'laughing',
'legendary',
'lenient',
'lively',
'loquacious',
'lucky',
'luminescent',
'magnetic',
'magnificent',
'majestic',
'marvelous',
'masked',
'massive',
'mechanical',
'mercurial',
'meritorious',
'merry',
'methodical',
'meticulous',
'mighty',
'miniature',
'mirthful',
'mischievous',
'modest',
'momentous',
'motionless',
'multicolored',
'murmuring',
'musical',
'mustachioed',
'mysterious',
'nascent',
'neighborly',
'noble',
'nomadic',
'nonchalant',
'noncommittal',
'observant',
'omnidirectional',
'omnipotent',
'omnipresent',
'omnivorous',
'optimal',
'optimistic',
'otherworldly',
'outgoing',
'outspoken',
'panoramic',
'peaceful',
'perceptive',
'perpetual',
'perplexing',
'perspicacious',
'philosophical',
'picturesque',
'playful',
'pleasant',
'poised',
'practical',
'precious',
'precise',
'precocious',
'prestigious',
'primeval',
'primordial',
'prismatic',
'proactive',
'proficient',
'prototypical',
'prudent',
'purposeful',
'qualified',
'quizzical',
'quotable',
'radiant',
'reassured',
'reclusive',
'recursive',
'recurring',
'reflective',
'rejoicing',
'relaxed',
'relieved',
'remarkable',
'renowned',
'resilient',
'resolute',
'resourceful',
'rigorous',
'roaring',
'robust',
'salient',
'salubrious',
'sanguine',
'sapient',
'sarcastic',
'satisfied',
'scholarly',
'scintillating',
'scrupulous',
'selective',
'shiny',
'sincere',
'singing',
'sleek',
'sleepy',
'slippery',
'smiling',
'smooth',
'solid',
'sophisticated',
'sparkling',
'spectacular',
'splendid',
'spotless',
'squeaky',
'stately',
'steady',
'strategic',
'striped',
'stunning',
'stupendous',
'stylish',
'substantial',
'sufficient',
'swimming',
'symbolic',
'symmetrical',
'taciturn',
'terrestrial',
'tessellated',
'thankful',
'theoretical',
'thoughtful',
'thriving',
'timeless',
'timely',
'topographical',
'transparent',
'tranquil',
'transparent',
'tricky',
'ubiquitous',
'uncanny',
'unclouded',
'undisputed',
'unexpected',
'unfathomable',
'unflappable',
'unique',
'universal',
'unofficial',
'unseen',
'unthinkable',
'uproarious',
'variegated',
'versatile',
'victorious',
'vigilant',
'vigorous',
'virtuous',
'vivacious',
'vivid',
'vociferous',
'wacky',
'wandering',
'watchful',
'whimsical',
'windswept',
'witty',
'wonderful',
'wondrous',
'yakkety',
'yawning',
'yeatsian',
'yelling',
'yellow',
'yellowing',
'young',
'youthful',
'yummy',
'zany',
'zaftig',
'zealous',
'zestful',
'zesty',
'zippy',
'zoonatic',
'zygomorphous',
'zymoid',
'zymolytic',
]
ANIMALS = [
'aardvark',
'albatross',
'alligator',
'alpaca',
'angelfish',
'anteater',
'antelope',
'ape',
'armadillo',
'badger',
'barracuda',
'bat',
'beagle',
'bear',
'beaver',
'bird',
'bittern',
'bobolink',
'brontosaurus',
'boa-constrictor',
'buffalo',
'bulldog',
'bumblebee',
'butterfly',
'buzzard',
'camel',
'caribou',
'cassowary',
'cat',
'catfish',
'caterpillar',
'cauldron',
'centipede',
'cete',
'chameleon',
'cheetah',
'chinchilla',
'chipmunk',
'cobra',
'coelacanth',
'condor',
'coot',
'coral-snake',
'cormorant',
'crab',
'crane',
'crocodile',
'dalmatian',
'deer',
'dolphin',
'donkey',
'dotterel',
'dove',
'dragonfish',
'dragonfly',
'duck',
'eagle',
'eel',
'elephant',
'elk',
'falcon',
'ferret',
'finch',
'firefly',
'fish',
'flamingo',
'fox',
'frog',
'gang',
'gazelle',
'gecko',
'gerbil',
'giraffe',
'gnu',
'goldfish',
'goose',
'gorilla',
'grasshopper',
'greyhound',
'grouse',
'gull',
'hamster',
'hare',
'hawk',
'hatchetfish',
'hedgehog',
'heron',
'herring',
'hornet',
'horse',
'hummingbird',
'ibex',
'ibis',
'iguana',
'jackal',
'jaguar',
'jay',
'jellyfish',
'kangaroo',
'kestrel',
'kingfisher',
'koala',
'koi',
'lapwing',
'lark',
'lemur',
'leopard',
'lion',
'lionfish',
'llama',
'lobster',
'loris',
'magpie',
'mallard',
'mandrill',
'manta-ray',
'mantis',
'marlin',
'mastiff',
'mollusk',
'mongoose',
'moose',
'mouse',
'mule',
'narwhal',
'nautilus',
'newt',
'nightingale',
'obstinacy',
'octopus',
'okapi',
'opossum',
'orca',
'osprey',
'ostrich',
'otter',
'owl',
'panda',
'panther',
'parrot',
'partridge',
'pelican',
'penguin',
'pheasant',
'pig',
'pigeon',
'platypus',
'polar-bear',
'porcupine',
'porpoise',
'python',
'quail',
'rabbit',
'raccoon',
'ram',
'raven',
'reindeer',
'rhinoceros',
'roadrunner',
'rook',
'salamander',
'salmon',
'sandpiper',
'scorpion',
'sea-cucumber',
'sea-lion',
'sea-snake',
'sea-turtle',
'seahorse',
'seal',
'shark',
'sheep',
'shrewdness',
'sloth',
'sleuth',
'snowy-owl',
'songbird',
'sparrow',
'spider',
'squid',
'squirrel',
'starfish',
'starling',
'stegosaurus',
'stingray',
'stork',
'swan',
'tapir',
'tiger',
'toucan',
'triceratops',
'turtle',
'vampire-bat',
'velociraptor',
'wallaby',
'water-buffalo',
'walrus',
'whale',
'wolf',
'wolverine',
'wombat',
'wrasse',
'wren',
'xantus',
'xenarthra',
'xenops',
'xerus',
'yabby',
'yak',
'yellow-eyed-penguin',
'yellow-ground-squirrel',
'yellow-mongoose',
'yuma-myotis',
'zebra',
'zebra-finch',
'zebrafish',
'zebra-shark',
'zebroid',
'zebu',
'zonkey',
'zorilla',
'zorro',
'zorse',
'clowder', # cats
'pounce', # cats
'kindle', # kittens
'litter', # kittens, dogs
'intrigue', # kittens
'cowardice', # dogs
'pace', # donkeys
'parade', # elephants
'business', # ferrets
'shrewdness', # apes
'gang', # buffalo
'obstinacy', # buffalo
'leash', # foxes
'skulk', # foxes
'earth', # foxes
'tower', # giraffes
'tribe', # goats
'trip', # goats
'band', # gorillas
'bloat', # hippos
'thunder', # hippos
'cackle', # hyenas
'shadow', # jaguar
'troop', # kangaroo
'mob', # kangaroo
'conspiracy', # lemurs
'leap', # leopards
'pride', # lions
'sawt', # lions
'richness', # martens
'labor', # moles
'troop', # monkeys
'barrel', # monkeys
'pack', # mules
'span', # mules
'barren', # mules
'romp', # otter
'drift', # pigs
'drove', # drove
'sounder', # pigs
'team', # pigs
'passel', # pigs
'prickle', # porcupines
'pod', # porpoises, whales
'school', # porpoises, whales
'herd', # porpoises
'turmoil', # porpoises
'colony', # rabbits
'warren', # rabbits
'nest', # rabbits
'down', # rabbits
'husk', # rabbits
'gam', # whales
'route', # wolves
'woute', # wolves
'sedge', # bitterns
'wake', # buzzard
'chain', # bobolink
'cover', # coot
'gulp', # cormorants
'murder', # crows, magpies
'horde', # crows
'trip', # dotterel
'dule', # doves
'pitying', # doves
'brace', # ducks
'team', # ducks
'flock', # ducks
'raft', # ducks
'paddling', # ducks
'badling', # ducks
'convocation', # eagles
'charm', # finches, magpies
'flamingos', # stand
'gaggle', # geese
'skein', # geese
'cast', # hawk
'kettle', # hawk
'boil', # hawk
'sedge', # heron
'siege', # heron
'party', # jays
'scold', # jays
'deceit', # lapwings
'exaltation', # larks
'sord', # mallards
'tiding', # magpies
'gulp', # magpies
'watch', # nightingales
'parliament', # owls
'pandemonium', # parrots
'company', # parrots
'covey', # partridge
'ostentation', # peacocks
'muster', # peacocks, penguins
'parcel', # penguins
'rookery', # rookery
]
|
2ps/djenga
|
djenga/animal_pairs/dictionary.py
|
Python
|
bsd-3-clause
| 12,228
|
[
"Elk",
"Firefly",
"GULP",
"Jaguar",
"MOOSE",
"ORCA",
"Octopus"
] |
c40ea27e33ae16886a3c7748045e36b647a70a3ba8b51ff1a591846d4878dd2a
|
#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""A quick and dirty example of using Mayavi to overlay anatomy and activation.
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import print_function # Python 2/3 compatibility
import numpy as np
try:
from mayavi import mlab
except ImportError:
try:
from enthought.mayavi import mlab
except ImportError:
raise RuntimeError('Need mayavi for this module')
from fiac_util import load_image_fiac
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
MASK = load_image_fiac('group', 'mask.nii')
AVGANAT = load_image_fiac('group', 'avganat.nii')
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
def view_thresholdedT(design, contrast, threshold, inequality=np.greater):
"""
A mayavi isosurface view of thresholded t-statistics
Parameters
----------
design : {'block', 'event'}
contrast : str
threshold : float
inequality : {np.greater, np.less}, optional
"""
maska = np.asarray(MASK)
tmap = np.array(load_image_fiac('group', design, contrast, 't.nii'))
test = inequality(tmap, threshold)
tval = np.zeros(tmap.shape)
tval[test] = tmap[test]
# XXX make the array axes agree with mayavi2
avganata = np.array(AVGANAT)
avganat_iso = mlab.contour3d(avganata * maska, opacity=0.3, contours=[3600],
color=(0.8,0.8,0.8))
avganat_iso.actor.property.backface_culling = True
avganat_iso.actor.property.ambient = 0.3
tval_iso = mlab.contour3d(tval * MASK, color=(0.8,0.3,0.3),
contours=[threshold])
return avganat_iso, tval_iso
#-----------------------------------------------------------------------------
# Script entry point
#-----------------------------------------------------------------------------
if __name__ == '__main__':
# A simple example use case
design = 'block'
contrast = 'sentence_0'
threshold = 0.3
print('Starting thresholded view with:')
print('Design=', design, 'contrast=', contrast, 'threshold=', threshold)
view_thresholdedT(design, contrast, threshold)
|
bthirion/nipy
|
examples/fiac/view_contrasts_3d.py
|
Python
|
bsd-3-clause
| 2,584
|
[
"Mayavi"
] |
600cfdcd0f7d3ba13d019f07528c8ce22f2aab096a6f4db9f06dbc9e60c09632
|
# (C) British Crown Copyright 2016, Met Office.
"""
The package: `ASoP1_spectral` package contains methods for analysing precipitation
intensity at any given time resolution, as part of the Analysing Scale of Precipitation
v1.0 package described in Klingaman et al. (2016).
ASoP (v1.0): A set of methods for analyzing scales of precipitation in general
circulation models. By N.P. Klingaman, G.M. Martin and A.F. Moise. Geoscientific Model
Development, submitted.
Testing is based on the reproduction of standard output netCDF data files and PNG figure
files using an artificial input rainfall dataset.
"""
|
gillmmartin/ASoP1-Spectral
|
ASoP1_Spectral/tests/__init__.py
|
Python
|
apache-2.0
| 612
|
[
"NetCDF"
] |
7cd4de208af0bc6ad8503f1a5f7c325dd9aec87bf5b89dcbe39b562da600322b
|
from paraview.simple import *
#reader = XDMFReader(FileNames=['/scratch/snx3000tds/piccinal/sphflow/rotating_square_patch_3D/015/RES/square200.h5.xmf'])
reader = XDMFReader(FileNames=['/scratch/snx3000tds/piccinal/sphflow/rotating_square_patch_3D/015/RES/square278.h5.xmf'])
reader.PointArrayStatus = ['DeltaX', 'ID', 'Interface', 'Kind', 'Marker', 'P', 'Velocity', 'Volume']
reader.GridStatus = ['Particles']
rep=Show()
rep.SetRepresentationType("Points")
ResetCamera()
SaveScreenshot("foo.png")
|
jgphpc/linux
|
paraview/dom/pvbatch/0.py
|
Python
|
gpl-2.0
| 505
|
[
"ParaView"
] |
c5ac7f9fe64c793a3d893bbe0d3a4833c5d4aac0c8e9716534ac62d74b8e96fc
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: ec2_elb_lb
description:
- Returns information about the load balancer.
- Will be marked changed when called only if state is changed.
short_description: Creates or destroys Amazon ELB.
version_added: "1.5"
author:
- "Jim Dalton (@jsdalton)"
- "Rick Mendes (@rickmendes)"
options:
state:
description:
- Create or destroy the ELB
choices: ["present", "absent"]
required: true
name:
description:
- The name of the ELB
required: true
listeners:
description:
- List of ports/protocols for this ELB to listen on (see example)
required: false
purge_listeners:
description:
- Purge existing listeners on ELB that are not found in listeners
required: false
default: true
zones:
description:
- List of availability zones to enable on this ELB
required: false
purge_zones:
description:
- Purge existing availability zones on ELB that are not found in zones
required: false
default: false
security_group_ids:
description:
- A list of security groups to apply to the elb
require: false
default: None
version_added: "1.6"
security_group_names:
description:
- A list of security group names to apply to the elb
require: false
default: None
version_added: "2.0"
health_check:
description:
- An associative array of health check configuration settings (see example)
require: false
default: None
access_logs:
description:
- An associative array of access logs configuration settings (see example)
require: false
default: None
version_added: "2.0"
subnets:
description:
- A list of VPC subnets to use when creating ELB. Zones should be empty if using this.
required: false
default: None
aliases: []
version_added: "1.7"
purge_subnets:
description:
- Purge existing subnet on ELB that are not found in subnets
required: false
default: false
version_added: "1.7"
scheme:
description:
- The scheme to use when creating the ELB. For a private VPC-visible ELB use 'internal'.
required: false
default: 'internet-facing'
version_added: "1.7"
validate_certs:
description:
- When set to "no", SSL certificates will not be validated for boto versions >= 2.6.0.
required: false
default: "yes"
choices: ["yes", "no"]
aliases: []
version_added: "1.5"
connection_draining_timeout:
description:
- Wait a specified timeout allowing connections to drain before terminating an instance
required: false
aliases: []
version_added: "1.8"
idle_timeout:
description:
- ELB connections from clients and to servers are timed out after this amount of time
required: false
version_added: "2.0"
cross_az_load_balancing:
description:
- Distribute load across all configured Availability Zones
required: false
default: "no"
choices: ["yes", "no"]
aliases: []
version_added: "1.8"
stickiness:
description:
- An associative array of stickness policy settings. Policy will be applied to all listeners ( see example )
required: false
version_added: "2.0"
wait:
description:
- When specified, Ansible will check the status of the load balancer to ensure it has been successfully
removed from AWS.
required: false
default: no
choices: ["yes", "no"]
version_added: "2.1"
wait_timeout:
description:
- Used in conjunction with wait. Number of seconds to wait for the elb to be terminated.
A maximum of 600 seconds (10 minutes) is allowed.
required: false
default: 60
version_added: "2.1"
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = """
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Basic provisioning example (non-VPC)
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
- protocol: https
load_balancer_port: 443
instance_protocol: http # optional, defaults to value of protocol setting
instance_port: 80
# ssl certificate required for https or ssl
ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/company/servercerts/ProdServerCert"
# Internal ELB example
- local_action:
module: ec2_elb_lb
name: "test-vpc"
scheme: internal
state: present
subnets:
- subnet-abcd1234
- subnet-1a2b3c4d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
# Configure a health check and the access logs
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
health_check:
ping_protocol: http # options are http, https, ssl, tcp
ping_port: 80
ping_path: "/index.html" # not required for tcp or ssl
response_timeout: 5 # seconds
interval: 30 # seconds
unhealthy_threshold: 2
healthy_threshold: 10
access_logs:
interval: 5 # minutes (defaults to 60)
s3_location: "my-bucket" # This value is required if access_logs is set
s3_prefix: "logs"
# Ensure ELB is gone
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
# Ensure ELB is gone and wait for check (for default timeout)
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
wait: yes
# Ensure ELB is gone and wait for check with timeout value
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
wait: yes
wait_timeout: 600
# Normally, this module will purge any listeners that exist on the ELB
# but aren't specified in the listeners parameter. If purge_listeners is
# false it leaves them alone
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_listeners: no
# Normally, this module will leave availability zones that are enabled
# on the ELB alone. If purge_zones is true, then any extraneous zones
# will be removed
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_zones: yes
# Creates a ELB and assigns a list of subnets to it.
- local_action:
module: ec2_elb_lb
state: present
name: 'New ELB'
security_group_ids: 'sg-123456, sg-67890'
region: us-west-2
subnets: 'subnet-123456,subnet-67890'
purge_subnets: yes
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
# Create an ELB with connection draining, increased idle timeout and cross availability
# zone load balancing
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
connection_draining_timeout: 60
idle_timeout: 300
cross_az_load_balancing: "yes"
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocols: http
- load_balancer_port: 80
- instance_port: 80
# Create an ELB with load balanacer stickiness enabled
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocols: http
- load_balancer_port: 80
- instance_port: 80
stickiness:
type: loadbalancer
enabled: yes
expiration: 300
# Create an ELB with application stickiness enabled
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocols: http
- load_balancer_port: 80
- instance_port: 80
stickiness:
type: application
enabled: yes
cookie: SESSIONID
"""
try:
import boto
import boto.ec2.elb
import boto.ec2.elb.attributes
from boto.ec2.elb.healthcheck import HealthCheck
from boto.regioninfo import RegionInfo
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
class ElbManager(object):
"""Handles ELB creation and destruction"""
def __init__(self, module, name, listeners=None, purge_listeners=None,
zones=None, purge_zones=None, security_group_ids=None,
health_check=None, subnets=None, purge_subnets=None,
scheme="internet-facing", connection_draining_timeout=None,
idle_timeout=None,
cross_az_load_balancing=None, access_logs=None,
stickiness=None, wait=None, wait_timeout=None, region=None, **aws_connect_params):
self.module = module
self.name = name
self.listeners = listeners
self.purge_listeners = purge_listeners
self.zones = zones
self.purge_zones = purge_zones
self.security_group_ids = security_group_ids
self.health_check = health_check
self.subnets = subnets
self.purge_subnets = purge_subnets
self.scheme = scheme
self.connection_draining_timeout = connection_draining_timeout
self.idle_timeout = idle_timeout
self.cross_az_load_balancing = cross_az_load_balancing
self.access_logs = access_logs
self.stickiness = stickiness
self.wait = wait
self.wait_timeout = wait_timeout
self.aws_connect_params = aws_connect_params
self.region = region
self.changed = False
self.status = 'gone'
self.elb_conn = self._get_elb_connection()
self.elb = self._get_elb()
self.ec2_conn = self._get_ec2_connection()
def ensure_ok(self):
"""Create the ELB"""
if not self.elb:
# Zones and listeners will be added at creation
self._create_elb()
else:
self._set_zones()
self._set_security_groups()
self._set_elb_listeners()
self._set_subnets()
self._set_health_check()
# boto has introduced support for some ELB attributes in
# different versions, so we check first before trying to
# set them to avoid errors
if self._check_attribute_support('connection_draining'):
self._set_connection_draining_timeout()
if self._check_attribute_support('connecting_settings'):
self._set_idle_timeout()
if self._check_attribute_support('cross_zone_load_balancing'):
self._set_cross_az_load_balancing()
if self._check_attribute_support('access_log'):
self._set_access_log()
# add sitcky options
self.select_stickiness_policy()
def ensure_gone(self):
"""Destroy the ELB"""
if self.elb:
self._delete_elb()
if self.wait:
elb_removed = self._wait_for_elb_removed()
# Unfortunately even though the ELB itself is removed quickly
# the interfaces take longer so reliant security groups cannot
# be deleted until the interface has registered as removed.
elb_interface_removed = self._wait_for_elb_interface_removed()
if not (elb_removed and elb_interface_removed):
self.module.fail_json(msg='Timed out waiting for removal of load balancer.')
def get_info(self):
try:
check_elb = self.elb_conn.get_all_load_balancers(self.name)[0]
except:
check_elb = None
if not check_elb:
info = {
'name': self.name,
'status': self.status,
'region': self.region
}
else:
try:
lb_cookie_policy = check_elb.policies.lb_cookie_stickiness_policies[0].__dict__['policy_name']
except:
lb_cookie_policy = None
try:
app_cookie_policy = check_elb.policies.app_cookie_stickiness_policies[0].__dict__['policy_name']
except:
app_cookie_policy = None
info = {
'name': check_elb.name,
'dns_name': check_elb.dns_name,
'zones': check_elb.availability_zones,
'security_group_ids': check_elb.security_groups,
'status': self.status,
'subnets': self.subnets,
'scheme': check_elb.scheme,
'hosted_zone_name': check_elb.canonical_hosted_zone_name,
'hosted_zone_id': check_elb.canonical_hosted_zone_name_id,
'lb_cookie_policy': lb_cookie_policy,
'app_cookie_policy': app_cookie_policy,
'instances': [instance.id for instance in check_elb.instances],
'out_of_service_count': 0,
'in_service_count': 0,
'unknown_instance_state_count': 0,
'region': self.region
}
# status of instances behind the ELB
if info['instances']:
info['instance_health'] = [ dict(
instance_id = instance_state.instance_id,
reason_code = instance_state.reason_code,
state = instance_state.state
) for instance_state in self.elb_conn.describe_instance_health(self.name)]
else:
info['instance_health'] = []
# instance state counts: InService or OutOfService
if info['instance_health']:
for instance_state in info['instance_health']:
if instance_state['state'] == "InService":
info['in_service_count'] += 1
elif instance_state['state'] == "OutOfService":
info['out_of_service_count'] += 1
else:
info['unknown_instance_state_count'] += 1
if check_elb.health_check:
info['health_check'] = {
'target': check_elb.health_check.target,
'interval': check_elb.health_check.interval,
'timeout': check_elb.health_check.timeout,
'healthy_threshold': check_elb.health_check.healthy_threshold,
'unhealthy_threshold': check_elb.health_check.unhealthy_threshold,
}
if check_elb.listeners:
info['listeners'] = [self._api_listener_as_tuple(l)
for l in check_elb.listeners]
elif self.status == 'created':
# When creating a new ELB, listeners don't show in the
# immediately returned result, so just include the
# ones that were added
info['listeners'] = [self._listener_as_tuple(l)
for l in self.listeners]
else:
info['listeners'] = []
if self._check_attribute_support('connection_draining'):
info['connection_draining_timeout'] = self.elb_conn.get_lb_attribute(self.name, 'ConnectionDraining').timeout
if self._check_attribute_support('connecting_settings'):
info['idle_timeout'] = self.elb_conn.get_lb_attribute(self.name, 'ConnectingSettings').idle_timeout
if self._check_attribute_support('cross_zone_load_balancing'):
is_cross_az_lb_enabled = self.elb_conn.get_lb_attribute(self.name, 'CrossZoneLoadBalancing')
if is_cross_az_lb_enabled:
info['cross_az_load_balancing'] = 'yes'
else:
info['cross_az_load_balancing'] = 'no'
# return stickiness info?
return info
def _wait_for_elb_removed(self):
polling_increment_secs = 15
max_retries = (self.wait_timeout / polling_increment_secs)
status_achieved = False
for x in range(0, max_retries):
try:
result = self.elb_conn.get_all_lb_attributes(self.name)
except (boto.exception.BotoServerError, StandardError), e:
if "LoadBalancerNotFound" in e.code:
status_achieved = True
break
else:
time.sleep(polling_increment_secs)
return status_achieved
def _wait_for_elb_interface_removed(self):
polling_increment_secs = 15
max_retries = (self.wait_timeout / polling_increment_secs)
status_achieved = False
elb_interfaces = self.ec2_conn.get_all_network_interfaces(
filters={'attachment.instance-owner-id': 'amazon-elb',
'description': 'ELB {0}'.format(self.name) })
for x in range(0, max_retries):
for interface in elb_interfaces:
try:
result = self.ec2_conn.get_all_network_interfaces(interface.id)
if result == []:
status_achieved = True
break
else:
time.sleep(polling_increment_secs)
except (boto.exception.BotoServerError, StandardError), e:
if 'InvalidNetworkInterfaceID' in e.code:
status_achieved = True
break
else:
self.module.fail_json(msg=str(e))
return status_achieved
def _get_elb(self):
elbs = self.elb_conn.get_all_load_balancers()
for elb in elbs:
if self.name == elb.name:
self.status = 'ok'
return elb
def _get_elb_connection(self):
try:
return connect_to_aws(boto.ec2.elb, self.region,
**self.aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError), e:
self.module.fail_json(msg=str(e))
def _get_ec2_connection(self):
try:
return connect_to_aws(boto.ec2, self.region,
**self.aws_connect_params)
except (boto.exception.NoAuthHandlerFound, StandardError), e:
self.module.fail_json(msg=str(e))
def _delete_elb(self):
# True if succeeds, exception raised if not
result = self.elb_conn.delete_load_balancer(name=self.name)
if result:
self.changed = True
self.status = 'deleted'
def _create_elb(self):
listeners = [self._listener_as_tuple(l) for l in self.listeners]
self.elb = self.elb_conn.create_load_balancer(name=self.name,
zones=self.zones,
security_groups=self.security_group_ids,
complex_listeners=listeners,
subnets=self.subnets,
scheme=self.scheme)
if self.elb:
self.changed = True
self.status = 'created'
def _create_elb_listeners(self, listeners):
"""Takes a list of listener tuples and creates them"""
# True if succeeds, exception raised if not
self.changed = self.elb_conn.create_load_balancer_listeners(self.name,
complex_listeners=listeners)
def _delete_elb_listeners(self, listeners):
"""Takes a list of listener tuples and deletes them from the elb"""
ports = [l[0] for l in listeners]
# True if succeeds, exception raised if not
self.changed = self.elb_conn.delete_load_balancer_listeners(self.name,
ports)
def _set_elb_listeners(self):
"""
Creates listeners specified by self.listeners; overwrites existing
listeners on these ports; removes extraneous listeners
"""
listeners_to_add = []
listeners_to_remove = []
listeners_to_keep = []
# Check for any listeners we need to create or overwrite
for listener in self.listeners:
listener_as_tuple = self._listener_as_tuple(listener)
# First we loop through existing listeners to see if one is
# already specified for this port
existing_listener_found = None
for existing_listener in self.elb.listeners:
# Since ELB allows only one listener on each incoming port, a
# single match on the incoming port is all we're looking for
if existing_listener[0] == int(listener['load_balancer_port']):
existing_listener_found = self._api_listener_as_tuple(existing_listener)
break
if existing_listener_found:
# Does it match exactly?
if listener_as_tuple != existing_listener_found:
# The ports are the same but something else is different,
# so we'll remove the existing one and add the new one
listeners_to_remove.append(existing_listener_found)
listeners_to_add.append(listener_as_tuple)
else:
# We already have this listener, so we're going to keep it
listeners_to_keep.append(existing_listener_found)
else:
# We didn't find an existing listener, so just add the new one
listeners_to_add.append(listener_as_tuple)
# Check for any extraneous listeners we need to remove, if desired
if self.purge_listeners:
for existing_listener in self.elb.listeners:
existing_listener_tuple = self._api_listener_as_tuple(existing_listener)
if existing_listener_tuple in listeners_to_remove:
# Already queued for removal
continue
if existing_listener_tuple in listeners_to_keep:
# Keep this one around
continue
# Since we're not already removing it and we don't need to keep
# it, let's get rid of it
listeners_to_remove.append(existing_listener_tuple)
if listeners_to_remove:
self._delete_elb_listeners(listeners_to_remove)
if listeners_to_add:
self._create_elb_listeners(listeners_to_add)
def _api_listener_as_tuple(self, listener):
"""Adds ssl_certificate_id to ELB API tuple if present"""
base_tuple = listener.get_complex_tuple()
if listener.ssl_certificate_id and len(base_tuple) < 5:
return base_tuple + (listener.ssl_certificate_id,)
return base_tuple
def _listener_as_tuple(self, listener):
"""Formats listener as a 4- or 5-tuples, in the order specified by the
ELB API"""
# N.B. string manipulations on protocols below (str(), upper()) is to
# ensure format matches output from ELB API
listener_list = [
int(listener['load_balancer_port']),
int(listener['instance_port']),
str(listener['protocol'].upper()),
]
# Instance protocol is not required by ELB API; it defaults to match
# load balancer protocol. We'll mimic that behavior here
if 'instance_protocol' in listener:
listener_list.append(str(listener['instance_protocol'].upper()))
else:
listener_list.append(str(listener['protocol'].upper()))
if 'ssl_certificate_id' in listener:
listener_list.append(str(listener['ssl_certificate_id']))
return tuple(listener_list)
def _enable_zones(self, zones):
try:
self.elb.enable_zones(zones)
except boto.exception.BotoServerError, e:
if "Invalid Availability Zone" in e.error_message:
self.module.fail_json(msg=e.error_message)
else:
self.module.fail_json(msg="an unknown server error occurred, please try again later")
self.changed = True
def _disable_zones(self, zones):
try:
self.elb.disable_zones(zones)
except boto.exception.BotoServerError, e:
if "Invalid Availability Zone" in e.error_message:
self.module.fail_json(msg=e.error_message)
else:
self.module.fail_json(msg="an unknown server error occurred, please try again later")
self.changed = True
def _attach_subnets(self, subnets):
self.elb_conn.attach_lb_to_subnets(self.name, subnets)
self.changed = True
def _detach_subnets(self, subnets):
self.elb_conn.detach_lb_from_subnets(self.name, subnets)
self.changed = True
def _set_subnets(self):
"""Determine which subnets need to be attached or detached on the ELB"""
if self.subnets:
if self.purge_subnets:
subnets_to_detach = list(set(self.elb.subnets) - set(self.subnets))
subnets_to_attach = list(set(self.subnets) - set(self.elb.subnets))
else:
subnets_to_detach = None
subnets_to_attach = list(set(self.subnets) - set(self.elb.subnets))
if subnets_to_attach:
self._attach_subnets(subnets_to_attach)
if subnets_to_detach:
self._detach_subnets(subnets_to_detach)
def _set_zones(self):
"""Determine which zones need to be enabled or disabled on the ELB"""
if self.zones:
if self.purge_zones:
zones_to_disable = list(set(self.elb.availability_zones) -
set(self.zones))
zones_to_enable = list(set(self.zones) -
set(self.elb.availability_zones))
else:
zones_to_disable = None
zones_to_enable = list(set(self.zones) -
set(self.elb.availability_zones))
if zones_to_enable:
self._enable_zones(zones_to_enable)
# N.B. This must come second, in case it would have removed all zones
if zones_to_disable:
self._disable_zones(zones_to_disable)
def _set_security_groups(self):
if self.security_group_ids != None and set(self.elb.security_groups) != set(self.security_group_ids):
self.elb_conn.apply_security_groups_to_lb(self.name, self.security_group_ids)
self.changed = True
def _set_health_check(self):
"""Set health check values on ELB as needed"""
if self.health_check:
# This just makes it easier to compare each of the attributes
# and look for changes. Keys are attributes of the current
# health_check; values are desired values of new health_check
health_check_config = {
"target": self._get_health_check_target(),
"timeout": self.health_check['response_timeout'],
"interval": self.health_check['interval'],
"unhealthy_threshold": self.health_check['unhealthy_threshold'],
"healthy_threshold": self.health_check['healthy_threshold'],
}
update_health_check = False
# The health_check attribute is *not* set on newly created
# ELBs! So we have to create our own.
if not self.elb.health_check:
self.elb.health_check = HealthCheck()
for attr, desired_value in health_check_config.iteritems():
if getattr(self.elb.health_check, attr) != desired_value:
setattr(self.elb.health_check, attr, desired_value)
update_health_check = True
if update_health_check:
self.elb.configure_health_check(self.elb.health_check)
self.changed = True
def _check_attribute_support(self, attr):
return hasattr(boto.ec2.elb.attributes.LbAttributes(), attr)
def _set_cross_az_load_balancing(self):
attributes = self.elb.get_attributes()
if self.cross_az_load_balancing:
if not attributes.cross_zone_load_balancing.enabled:
self.changed = True
attributes.cross_zone_load_balancing.enabled = True
else:
if attributes.cross_zone_load_balancing.enabled:
self.changed = True
attributes.cross_zone_load_balancing.enabled = False
self.elb_conn.modify_lb_attribute(self.name, 'CrossZoneLoadBalancing',
attributes.cross_zone_load_balancing.enabled)
def _set_access_log(self):
attributes = self.elb.get_attributes()
if self.access_logs:
if 's3_location' not in self.access_logs:
self.module.fail_json(msg='s3_location information required')
access_logs_config = {
"enabled": True,
"s3_bucket_name": self.access_logs['s3_location'],
"s3_bucket_prefix": self.access_logs.get('s3_prefix', ''),
"emit_interval": self.access_logs.get('interval', 60),
}
update_access_logs_config = False
for attr, desired_value in access_logs_config.iteritems():
if getattr(attributes.access_log, attr) != desired_value:
setattr(attributes.access_log, attr, desired_value)
update_access_logs_config = True
if update_access_logs_config:
self.elb_conn.modify_lb_attribute(self.name, 'AccessLog', attributes.access_log)
self.changed = True
elif attributes.access_log.enabled:
attributes.access_log.enabled = False
self.changed = True
self.elb_conn.modify_lb_attribute(self.name, 'AccessLog', attributes.access_log)
def _set_connection_draining_timeout(self):
attributes = self.elb.get_attributes()
if self.connection_draining_timeout is not None:
if not attributes.connection_draining.enabled or \
attributes.connection_draining.timeout != self.connection_draining_timeout:
self.changed = True
attributes.connection_draining.enabled = True
attributes.connection_draining.timeout = self.connection_draining_timeout
self.elb_conn.modify_lb_attribute(self.name, 'ConnectionDraining', attributes.connection_draining)
else:
if attributes.connection_draining.enabled:
self.changed = True
attributes.connection_draining.enabled = False
self.elb_conn.modify_lb_attribute(self.name, 'ConnectionDraining', attributes.connection_draining)
def _set_idle_timeout(self):
attributes = self.elb.get_attributes()
if self.idle_timeout is not None:
if attributes.connecting_settings.idle_timeout != self.idle_timeout:
self.changed = True
attributes.connecting_settings.idle_timeout = self.idle_timeout
self.elb_conn.modify_lb_attribute(self.name, 'ConnectingSettings', attributes.connecting_settings)
def _policy_name(self, policy_type):
return __file__.split('/')[-1].replace('_', '-') + '-' + policy_type
def _create_policy(self, policy_param, policy_meth, policy):
getattr(self.elb_conn, policy_meth )(policy_param, self.elb.name, policy)
def _delete_policy(self, elb_name, policy):
self.elb_conn.delete_lb_policy(elb_name, policy)
def _update_policy(self, policy_param, policy_meth, policy_attr, policy):
self._delete_policy(self.elb.name, policy)
self._create_policy(policy_param, policy_meth, policy)
def _set_listener_policy(self, listeners_dict, policy=[]):
for listener_port in listeners_dict:
if listeners_dict[listener_port].startswith('HTTP'):
self.elb_conn.set_lb_policies_of_listener(self.elb.name, listener_port, policy)
def _set_stickiness_policy(self, elb_info, listeners_dict, policy, **policy_attrs):
for p in getattr(elb_info.policies, policy_attrs['attr']):
if str(p.__dict__['policy_name']) == str(policy[0]):
if str(p.__dict__[policy_attrs['dict_key']]) != str(policy_attrs['param_value'] or 0):
self._set_listener_policy(listeners_dict)
self._update_policy(policy_attrs['param_value'], policy_attrs['method'], policy_attrs['attr'], policy[0])
self.changed = True
break
else:
self._create_policy(policy_attrs['param_value'], policy_attrs['method'], policy[0])
self.changed = True
self._set_listener_policy(listeners_dict, policy)
def select_stickiness_policy(self):
if self.stickiness:
if 'cookie' in self.stickiness and 'expiration' in self.stickiness:
self.module.fail_json(msg='\'cookie\' and \'expiration\' can not be set at the same time')
elb_info = self.elb_conn.get_all_load_balancers(self.elb.name)[0]
d = {}
for listener in elb_info.listeners:
d[listener[0]] = listener[2]
listeners_dict = d
if self.stickiness['type'] == 'loadbalancer':
policy = []
policy_type = 'LBCookieStickinessPolicyType'
if self.module.boolean(self.stickiness['enabled']) == True:
if 'expiration' not in self.stickiness:
self.module.fail_json(msg='expiration must be set when type is loadbalancer')
expiration = self.stickiness['expiration'] if self.stickiness['expiration'] is not 0 else None
policy_attrs = {
'type': policy_type,
'attr': 'lb_cookie_stickiness_policies',
'method': 'create_lb_cookie_stickiness_policy',
'dict_key': 'cookie_expiration_period',
'param_value': expiration
}
policy.append(self._policy_name(policy_attrs['type']))
self._set_stickiness_policy(elb_info, listeners_dict, policy, **policy_attrs)
elif self.module.boolean(self.stickiness['enabled']) == False:
if len(elb_info.policies.lb_cookie_stickiness_policies):
if elb_info.policies.lb_cookie_stickiness_policies[0].policy_name == self._policy_name(policy_type):
self.changed = True
else:
self.changed = False
self._set_listener_policy(listeners_dict)
self._delete_policy(self.elb.name, self._policy_name(policy_type))
elif self.stickiness['type'] == 'application':
policy = []
policy_type = 'AppCookieStickinessPolicyType'
if self.module.boolean(self.stickiness['enabled']) == True:
if 'cookie' not in self.stickiness:
self.module.fail_json(msg='cookie must be set when type is application')
policy_attrs = {
'type': policy_type,
'attr': 'app_cookie_stickiness_policies',
'method': 'create_app_cookie_stickiness_policy',
'dict_key': 'cookie_name',
'param_value': self.stickiness['cookie']
}
policy.append(self._policy_name(policy_attrs['type']))
self._set_stickiness_policy(elb_info, listeners_dict, policy, **policy_attrs)
elif self.module.boolean(self.stickiness['enabled']) == False:
if len(elb_info.policies.app_cookie_stickiness_policies):
if elb_info.policies.app_cookie_stickiness_policies[0].policy_name == self._policy_name(policy_type):
self.changed = True
self._set_listener_policy(listeners_dict)
self._delete_policy(self.elb.name, self._policy_name(policy_type))
else:
self._set_listener_policy(listeners_dict)
def _get_health_check_target(self):
"""Compose target string from healthcheck parameters"""
protocol = self.health_check['ping_protocol'].upper()
path = ""
if protocol in ['HTTP', 'HTTPS'] and 'ping_path' in self.health_check:
path = self.health_check['ping_path']
return "%s:%s%s" % (protocol, self.health_check['ping_port'], path)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state={'required': True, 'choices': ['present', 'absent']},
name={'required': True},
listeners={'default': None, 'required': False, 'type': 'list'},
purge_listeners={'default': True, 'required': False, 'type': 'bool'},
zones={'default': None, 'required': False, 'type': 'list'},
purge_zones={'default': False, 'required': False, 'type': 'bool'},
security_group_ids={'default': None, 'required': False, 'type': 'list'},
security_group_names={'default': None, 'required': False, 'type': 'list'},
health_check={'default': None, 'required': False, 'type': 'dict'},
subnets={'default': None, 'required': False, 'type': 'list'},
purge_subnets={'default': False, 'required': False, 'type': 'bool'},
scheme={'default': 'internet-facing', 'required': False},
connection_draining_timeout={'default': None, 'required': False},
idle_timeout={'default': None, 'required': False},
cross_az_load_balancing={'default': None, 'required': False},
stickiness={'default': None, 'required': False, 'type': 'dict'},
access_logs={'default': None, 'required': False, 'type': 'dict'},
wait={'default': False, 'type': 'bool', 'required': False},
wait_timeout={'default': 60, 'type': 'int', 'required': False}
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive = [['security_group_ids', 'security_group_names']]
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="Region must be specified as a parameter, in EC2_REGION or AWS_REGION environment variables or in boto configuration file")
name = module.params['name']
state = module.params['state']
listeners = module.params['listeners']
purge_listeners = module.params['purge_listeners']
zones = module.params['zones']
purge_zones = module.params['purge_zones']
security_group_ids = module.params['security_group_ids']
security_group_names = module.params['security_group_names']
health_check = module.params['health_check']
access_logs = module.params['access_logs']
subnets = module.params['subnets']
purge_subnets = module.params['purge_subnets']
scheme = module.params['scheme']
connection_draining_timeout = module.params['connection_draining_timeout']
idle_timeout = module.params['idle_timeout']
cross_az_load_balancing = module.params['cross_az_load_balancing']
stickiness = module.params['stickiness']
wait = module.params['wait']
wait_timeout = module.params['wait_timeout']
if state == 'present' and not listeners:
module.fail_json(msg="At least one port is required for ELB creation")
if state == 'present' and not (zones or subnets):
module.fail_json(msg="At least one availability zone or subnet is required for ELB creation")
if wait_timeout > 600:
module.fail_json(msg='wait_timeout maximum is 600 seconds')
if security_group_names:
security_group_ids = []
try:
ec2 = ec2_connect(module)
grp_details = ec2.get_all_security_groups()
for group_name in security_group_names:
if isinstance(group_name, basestring):
group_name = [group_name]
group_id = [ str(grp.id) for grp in grp_details if str(grp.name) in group_name ]
security_group_ids.extend(group_id)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg = str(e))
elb_man = ElbManager(module, name, listeners, purge_listeners, zones,
purge_zones, security_group_ids, health_check,
subnets, purge_subnets, scheme,
connection_draining_timeout, idle_timeout,
cross_az_load_balancing,
access_logs, stickiness, wait, wait_timeout,
region=region, **aws_connect_params)
# check for unsupported attributes for this version of boto
if cross_az_load_balancing and not elb_man._check_attribute_support('cross_zone_load_balancing'):
module.fail_json(msg="You must install boto >= 2.18.0 to use the cross_az_load_balancing attribute")
if connection_draining_timeout and not elb_man._check_attribute_support('connection_draining'):
module.fail_json(msg="You must install boto >= 2.28.0 to use the connection_draining_timeout attribute")
if idle_timeout and not elb_man._check_attribute_support('connecting_settings'):
module.fail_json(msg="You must install boto >= 2.33.0 to use the idle_timeout attribute")
if state == 'present':
elb_man.ensure_ok()
elif state == 'absent':
elb_man.ensure_gone()
ansible_facts = {'ec2_elb': 'info'}
ec2_facts_result = dict(changed=elb_man.changed,
elb=elb_man.get_info(),
ansible_facts=ansible_facts)
module.exit_json(**ec2_facts_result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
alxnov/ansible-modules-core
|
cloud/amazon/ec2_elb_lb.py
|
Python
|
gpl-3.0
| 43,768
|
[
"Dalton"
] |
739b6f25d8e08ca3826a8e666e802afa52e46c44cae10cb9b3f01f88f48bbfb0
|
# -*- coding: utf-8 -*-
#
# hpc_benchmark.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Random balanced network HPC benchmark
-------------------------------------
This script produces a balanced random network of `scale*11250` neurons in
which the excitatory-excitatory neurons exhibit STDP with
multiplicative depression and power-law potentiation. A mutual
equilibrium is obtained between the activity dynamics (low rate in
asynchronous irregular regime) and the synaptic weight distribution
(unimodal). The number of incoming connections per neuron is fixed
and independent of network size (indegree=11250).
This is the standard network investigated in [1]_, [2]_, [3]_.
A note on scaling
~~~~~~~~~~~~~~~~~
This benchmark was originally developed for very large-scale simulations on
supercomputers with more than 1 million neurons in the network and
11.250 incoming synapses per neuron. For such large networks, synaptic input
to a single neuron will be little correlated across inputs and network
activity will remain stable over long periods of time.
The original network size corresponds to a scale parameter of 100 or more.
In order to make it possible to test this benchmark script on desktop
computers, the scale parameter is set to 1 below, while the number of
11.250 incoming synapses per neuron is retained. In this limit, correlations
in input to neurons are large and will lead to increasing synaptic weights.
Over time, network dynamics will therefore become unstable and all neurons
in the network will fire in synchrony, leading to extremely slow simulation
speeds.
Therefore, the presimulation time is reduced to 50 ms below and the
simulation time to 250 ms, while we usually use 100 ms presimulation and
1000 ms simulation time.
For meaningful use of this benchmark, you should use a scale > 10 and check
that the firing rate reported at the end of the benchmark is below 10 spikes
per second.
References
~~~~~~~~~~
.. [1] Morrison A, Aertsen A, Diesmann M (2007). Spike-timing-dependent
plasticity in balanced random networks. Neural Comput 19(6):1437-67
.. [2] Helias et al (2012). Supercomputers ready for use as discovery machines
for neuroscience. Front. Neuroinform. 6:26
.. [3] Kunkel et al (2014). Spiking network simulation code for petascale
computers. Front. Neuroinform. 8:78
"""
import numpy as np
import os
import sys
import time
import scipy.special as sp
import nest
import nest.raster_plot
M_INFO = 10
M_ERROR = 30
###############################################################################
# Parameter section
# Define all relevant parameters: changes should be made here
params = {
'nvp': 1, # total number of virtual processes
'scale': 1., # scaling factor of the network size
# total network size = scale*11250 neurons
'simtime': 250., # total simulation time in ms
'presimtime': 50., # simulation time until reaching equilibrium
'dt': 0.1, # simulation step
'record_spikes': True, # switch to record spikes of excitatory
# neurons to file
'path_name': '.', # path where all files will have to be written
'log_file': 'log', # naming scheme for the log files
}
def convert_synapse_weight(tau_m, tau_syn, C_m):
"""
Computes conversion factor for synapse weight from mV to pA
This function is specific to the leaky integrate-and-fire neuron
model with alpha-shaped postsynaptic currents.
"""
# compute time to maximum of V_m after spike input
# to neuron at rest
a = tau_m / tau_syn
b = 1.0 / tau_syn - 1.0 / tau_m
t_rise = 1.0 / b * (-lambertwm1(-np.exp(-1.0 / a) / a).real - 1.0 / a)
v_max = np.exp(1.0) / (tau_syn * C_m * b) * (
(np.exp(-t_rise / tau_m) - np.exp(-t_rise / tau_syn)) /
b - t_rise * np.exp(-t_rise / tau_syn))
return 1. / v_max
###############################################################################
# For compatibility with earlier benchmarks, we require a rise time of
# ``t_rise = 1.700759 ms`` and we choose ``tau_syn`` to achieve this for given
# ``tau_m``. This requires numerical inversion of the expression for ``t_rise``
# in ``convert_synapse_weight``. We computed this value once and hard-code
# it here.
tau_syn = 0.32582722403722841
brunel_params = {
'NE': int(9000 * params['scale']), # number of excitatory neurons
'NI': int(2250 * params['scale']), # number of inhibitory neurons
'Nrec': 1000, # number of neurons to record spikes from
'model_params': { # Set variables for iaf_psc_alpha
'E_L': 0.0, # Resting membrane potential(mV)
'C_m': 250.0, # Capacity of the membrane(pF)
'tau_m': 10.0, # Membrane time constant(ms)
't_ref': 0.5, # Duration of refractory period(ms)
'V_th': 20.0, # Threshold(mV)
'V_reset': 0.0, # Reset Potential(mV)
# time const. postsynaptic excitatory currents(ms)
'tau_syn_ex': tau_syn,
# time const. postsynaptic inhibitory currents(ms)
'tau_syn_in': tau_syn,
'tau_minus': 30.0, # time constant for STDP(depression)
# V can be randomly initialized see below
'V_m': 5.7 # mean value of membrane potential
},
####################################################################
# Note that Kunkel et al. (2014) report different values. The values
# in the paper were used for the benchmarks on K, the values given
# here were used for the benchmark on JUQUEEN.
'randomize_Vm': True,
'mean_potential': 5.7,
'sigma_potential': 7.2,
'delay': 1.5, # synaptic delay, all connections(ms)
# synaptic weight
'JE': 0.14, # peak of EPSP
'sigma_w': 3.47, # standard dev. of E->E synapses(pA)
'g': -5.0,
'stdp_params': {
'delay': 1.5,
'alpha': 0.0513,
'lambda': 0.1, # STDP step size
'mu': 0.4, # STDP weight dependence exponent(potentiation)
'tau_plus': 15.0, # time constant for potentiation
},
'eta': 1.685, # scaling of external stimulus
'filestem': params['path_name']
}
###############################################################################
# Function Section
def build_network(logger):
"""Builds the network including setting of simulation and neuron
parameters, creation of neurons and connections
Requires an instance of Logger as argument
"""
tic = time.time() # start timer on construction
# unpack a few variables for convenience
NE = brunel_params['NE']
NI = brunel_params['NI']
model_params = brunel_params['model_params']
stdp_params = brunel_params['stdp_params']
# set global kernel parameters
nest.SetKernelStatus({
'total_num_virtual_procs': params['nvp'],
'resolution': params['dt'],
'overwrite_files': True})
nest.message(M_INFO, 'build_network', 'Creating excitatory population.')
E_neurons = nest.Create('iaf_psc_alpha', NE, params=model_params)
nest.message(M_INFO, 'build_network', 'Creating inhibitory population.')
I_neurons = nest.Create('iaf_psc_alpha', NI, params=model_params)
if brunel_params['randomize_Vm']:
nest.message(M_INFO, 'build_network',
'Randomzing membrane potentials.')
random_vm = nest.random.normal(brunel_params['mean_potential'],
brunel_params['sigma_potential'])
nest.GetLocalNodeCollection(E_neurons).V_m = random_vm
nest.GetLocalNodeCollection(I_neurons).V_m = random_vm
# number of incoming excitatory connections
CE = int(1. * NE / params['scale'])
# number of incomining inhibitory connections
CI = int(1. * NI / params['scale'])
nest.message(M_INFO, 'build_network',
'Creating excitatory stimulus generator.')
# Convert synapse weight from mV to pA
conversion_factor = convert_synapse_weight(
model_params['tau_m'], model_params['tau_syn_ex'], model_params['C_m'])
JE_pA = conversion_factor * brunel_params['JE']
nu_thresh = model_params['V_th'] / (
CE * model_params['tau_m'] / model_params['C_m'] *
JE_pA * np.exp(1.) * tau_syn)
nu_ext = nu_thresh * brunel_params['eta']
E_stimulus = nest.Create('poisson_generator', 1, {
'rate': nu_ext * CE * 1000.})
nest.message(M_INFO, 'build_network',
'Creating excitatory spike recorder.')
if params['record_spikes']:
recorder_label = os.path.join(
brunel_params['filestem'],
'alpha_' + str(stdp_params['alpha']) + '_spikes')
E_recorder = nest.Create('spike_recorder', params={
'record_to': 'ascii',
'label': recorder_label
})
BuildNodeTime = time.time() - tic
logger.log(str(BuildNodeTime) + ' # build_time_nodes')
logger.log(str(memory_thisjob()) + ' # virt_mem_after_nodes')
tic = time.time()
nest.SetDefaults('static_synapse_hpc', {'delay': brunel_params['delay']})
nest.CopyModel('static_synapse_hpc', 'syn_ex',
{'weight': JE_pA})
nest.CopyModel('static_synapse_hpc', 'syn_in',
{'weight': brunel_params['g'] * JE_pA})
stdp_params['weight'] = JE_pA
nest.SetDefaults('stdp_pl_synapse_hom_hpc', stdp_params)
nest.message(M_INFO, 'build_network', 'Connecting stimulus generators.')
# Connect Poisson generator to neuron
nest.Connect(E_stimulus, E_neurons, {'rule': 'all_to_all'},
{'synapse_model': 'syn_ex'})
nest.Connect(E_stimulus, I_neurons, {'rule': 'all_to_all'},
{'synapse_model': 'syn_ex'})
nest.message(M_INFO, 'build_network',
'Connecting excitatory -> excitatory population.')
nest.Connect(E_neurons, E_neurons,
{'rule': 'fixed_indegree', 'indegree': CE,
'allow_autapses': False, 'allow_multapses': True},
{'synapse_model': 'stdp_pl_synapse_hom_hpc'})
nest.message(M_INFO, 'build_network',
'Connecting inhibitory -> excitatory population.')
nest.Connect(I_neurons, E_neurons,
{'rule': 'fixed_indegree', 'indegree': CI,
'allow_autapses': False, 'allow_multapses': True},
{'synapse_model': 'syn_in'})
nest.message(M_INFO, 'build_network',
'Connecting excitatory -> inhibitory population.')
nest.Connect(E_neurons, I_neurons,
{'rule': 'fixed_indegree', 'indegree': CE,
'allow_autapses': False, 'allow_multapses': True},
{'synapse_model': 'syn_ex'})
nest.message(M_INFO, 'build_network',
'Connecting inhibitory -> inhibitory population.')
nest.Connect(I_neurons, I_neurons,
{'rule': 'fixed_indegree', 'indegree': CI,
'allow_autapses': False, 'allow_multapses': True},
{'synapse_model': 'syn_in'})
if params['record_spikes']:
if params['nvp'] != 1:
local_neurons = nest.GetLocalNodeCollection(E_neurons)
# GetLocalNodeCollection returns a stepped composite NodeCollection, which
# cannot be sliced. In order to allow slicing it later on, we're creating a
# new regular NodeCollection from the plain node IDs.
local_neurons = nest.NodeCollection(local_neurons.tolist())
else:
local_neurons = E_neurons
if len(local_neurons) < brunel_params['Nrec']:
nest.message(
M_ERROR, 'build_network',
"""Spikes can only be recorded from local neurons, but the
number of local neurons is smaller than the number of neurons
spikes should be recorded from. Aborting the simulation!""")
exit(1)
nest.message(M_INFO, 'build_network', 'Connecting spike recorders.')
nest.Connect(local_neurons[:brunel_params['Nrec']], E_recorder,
'all_to_all', 'static_synapse_hpc')
# read out time used for building
BuildEdgeTime = time.time() - tic
logger.log(str(BuildEdgeTime) + ' # build_edge_time')
logger.log(str(memory_thisjob()) + ' # virt_mem_after_edges')
return E_recorder if params['record_spikes'] else None
def run_simulation():
"""Performs a simulation, including network construction"""
# open log file
with Logger(params['log_file']) as logger:
nest.ResetKernel()
nest.set_verbosity(M_INFO)
logger.log(str(memory_thisjob()) + ' # virt_mem_0')
sr = build_network(logger)
tic = time.time()
nest.Simulate(params['presimtime'])
PreparationTime = time.time() - tic
logger.log(str(memory_thisjob()) + ' # virt_mem_after_presim')
logger.log(str(PreparationTime) + ' # presim_time')
tic = time.time()
nest.Simulate(params['simtime'])
SimCPUTime = time.time() - tic
logger.log(str(memory_thisjob()) + ' # virt_mem_after_sim')
logger.log(str(SimCPUTime) + ' # sim_time')
if params['record_spikes']:
logger.log(str(compute_rate(sr)) + ' # average rate')
print(nest.GetKernelStatus())
def compute_rate(sr):
"""Compute local approximation of average firing rate
This approximation is based on the number of local nodes, number
of local spikes and total time. Since this also considers devices,
the actual firing rate is usually underestimated.
"""
n_local_spikes = sr.n_events
n_local_neurons = brunel_params['Nrec']
simtime = params['simtime']
return 1. * n_local_spikes / (n_local_neurons * simtime) * 1e3
def memory_thisjob():
"""Wrapper to obtain current memory usage"""
nest.ll_api.sr('memory_thisjob')
return nest.ll_api.spp()
def lambertwm1(x):
"""Wrapper for LambertWm1 function"""
# Using scipy to mimic the gsl_sf_lambert_Wm1 function.
return sp.lambertw(x, k=-1 if x < 0 else 0).real
class Logger(object):
"""Logger context manager used to properly log memory and timing
information from network simulations.
"""
def __init__(self, file_name):
# copy output to cout for ranks 0..max_rank_cout-1
self.max_rank_cout = 5
# write to log files for ranks 0..max_rank_log-1
self.max_rank_log = 30
self.line_counter = 0
self.file_name = file_name
def __enter__(self):
if nest.Rank() < self.max_rank_log:
# convert rank to string, prepend 0 if necessary to make
# numbers equally wide for all ranks
rank = '{:0' + str(len(str(self.max_rank_log))) + '}'
fn = '{fn}_{rank}.dat'.format(
fn=self.file_name, rank=rank.format(nest.Rank()))
self.f = open(fn, 'w')
return self
def log(self, value):
if nest.Rank() < self.max_rank_log:
line = '{lc} {rank} {value} \n'.format(
lc=self.line_counter, rank=nest.Rank(), value=value)
self.f.write(line)
self.line_counter += 1
if nest.Rank() < self.max_rank_cout:
print(str(nest.Rank()) + ' ' + value + '\n', file=sys.stdout)
print(str(nest.Rank()) + ' ' + value + '\n', file=sys.stderr)
def __exit__(self, exc_type, exc_val, traceback):
if nest.Rank() < self.max_rank_log:
self.f.close()
if __name__ == '__main__':
run_simulation()
|
lekshmideepu/nest-simulator
|
pynest/examples/hpc_benchmark.py
|
Python
|
gpl-2.0
| 16,266
|
[
"NEURON"
] |
c6a581e03f985fd827e106e06ee8380f727748fe2bf357c6141ef2c5fa5c264b
|
#pylint: disable=missing-docstring
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import copy
import vtk
import mooseutils
from .ChiggerSourceBase import ChiggerSourceBase
class ChiggerFilterSourceBase(ChiggerSourceBase):
"""
A base class for creating "source" objects (in VTK something that needs an vtkActor) that
require additional input into the mapper and are capable of accepting filters.
This class adds two main items:
1. A getSource method is provided, this method should provide a VTK object that will be connect
to the mapper or chain of filters (see 2).
2. Defines a method for adding filters and controlling the types and order in which they are
applied, see ExodusSource for example.
Inputs:
vtkactor_type: The VTK actor type to build, must be an instance of VTKACTOR_TYPE
vtkmapper_type: The VTK mapper type to build, must be an instance of VTKMAPPER_TYPE
**kwargs: The key, value options for this object.
"""
# The base class actor/mapper that this object to which ownership is restricted
VTKACTOR_TYPE = vtk.vtkProp
VTKMAPPER_TYPE = vtk.vtkAbstractMapper
# The list of filter types allowed, in the order they should be connected
FILTER_TYPES = []
@staticmethod
def getOptions():
opt = ChiggerSourceBase.getOptions()
opt.add('filters', [], "A list of Filter objects to apply to this mapper.")
return opt
def __init__(self, *args, **kwargs):
super(ChiggerFilterSourceBase, self).__init__(*args, **kwargs)
self._filters = []
self._required_filters = []
def getVTKSource(self):
"""
Return the "source" vtk object. (abstract)
classes must override this method. The VTK object returned from this function will
be connected to the first filter, if then exist, or the vtkAbstractMapper object. See the
'update' method for this class for how the connections are made.
"""
raise mooseutils.MooseException('The {}."getSource()" method must be overridden by your '
'mapper object and return the source vtk object to connect '
'to the filers and mapper.'.format(self.__class__.__name__))
def getFilters(self):
"""
Return the list of filter objects.
"""
return self._filters
def needsUpdate(self):
"""
Return True if the object needs to be updated.
"""
changed = [super(ChiggerFilterSourceBase, self).needsUpdate()]
for f in self._filters:
changed.append(f.needsUpdate())
return any(changed)
def update(self, **kwargs):
"""
Updates the object by connecting the VTK objects. (override)
Inputs:
see ChiggerSourceBase
"""
super(ChiggerFilterSourceBase, self).update(**kwargs)
self.__connectFilters()
# Initialize and update filters
for f in self._filters:
if f.needsInitialize():
f.initializeFilter(self)
if f.needsUpdate():
f.update()
def __connectFilters(self):
"""
Helper function for connecting filter to vtkMapper object.
"""
def debug(src, fltr):
"""
Inline function for debug messages.
"""
mooseutils.mooseDebug('{} --> {}'.format(type(src).__name__, type(fltr).__name__),
color='GREEN')
# Create a list of filters to apply to the VTK pipeline, this is done by
# combining the required filters with the 'filters' options. This combined list
# is then sorted based on the list provided in FILTER_TYPES.
filters = []
filters_in = copy.copy(self._required_filters) # shallow copy (don't modify require list)
if self.isOptionValid('filters'):
filters_in += self.getOption('filters')
for f in filters_in:
for i, order in enumerate(self.FILTER_TYPES):
if isinstance(f, order):
filters.append((f, i))
self._filters = [f[0] for f in sorted(filters, key=lambda x: x[1])]
# Connect the filters, if any exist
if self._filters:
debug(self.getVTKSource(), self._filters[0].getVTKFilter())
self._filters[0].getVTKFilter().SetInputConnection(self.getVTKSource().GetOutputPort())
for i in range(1, len(self._filters)):
debug(self._filters[i-1].getVTKFilter(), self._filters[i].getVTKFilter())
f = self._filters[i-1].getVTKFilter().GetOutputPort()
self._filters[i].getVTKFilter().SetInputConnection(f)
if self._vtkmapper:
debug(self._filters[-1].getVTKFilter(), self._vtkmapper)
self._vtkmapper.SetInputConnection(self._filters[-1].getVTKFilter().GetOutputPort())
elif self._vtkmapper:
debug(self.getVTKSource(), self._vtkmapper)
self._vtkmapper.SetInputConnection(self.getVTKSource().GetOutputPort())
|
harterj/moose
|
python/chigger/base/ChiggerFilterSourceBase.py
|
Python
|
lgpl-2.1
| 5,420
|
[
"MOOSE",
"VTK"
] |
7d84f0bd94682fca115f94539f2ec9f4b59e60949772ea16507bbe7c9e13d606
|
import re
_csq_format_re = re.compile(r'.*Format:\s*((\S+\|)*\S+)')
# for capturing VEP CSQ format in Description field of metaheader
_ann_format_re = re.compile(
r".*Functional annotations:\s*'(([^\|]+\|)*[^\|]+)'")
# for capturing SnpEff ANN format in Description field of metaheader
_common_csq_fields = ['CSQ', 'BCSQ', 'CQ', 'vep']
_common_ann_fields = ['ANN', 'EFF']
_required_keys = {'info': ['number', 'type', 'description'],
'format': ['number', 'type', 'description'],
'filter': ['description'],
'alt': ['description']}
_field2pysam = {'info': 'info',
'format': 'formats',
'filter': 'filters',
'alt': 'alts'}
class VcfHeader(object):
''' Header class storing metadata and sample information for a vcf '''
__slots__ = ['vcfreader', 'header', '__csq_label', '__csq_fields',
'__ann_label', '__ann_fields']
def __init__(self, vcfreader):
self.vcfreader = vcfreader
self.header = self.vcfreader.variant_file.header
self.__csq_fields = None
self.__csq_label = None
self.__ann_fields = None
self.__ann_label = None
@property
def formats(self):
return self.header.formats
@property
def info(self):
return self.header.info
@property
def filters(self):
return self.header.filters
@property
def samples(self):
return self.header.samples
@property
def csq_label(self):
'''
String labelling the INFO field label of VEP consequence
annotations. Will raise a KeyError if access is attempted
but no VEP CSQ or ANN field is present in the header.
'''
if self.__csq_label is None:
self.csq_fields
return self.__csq_label
@csq_label.setter
def csq_label(self, c):
self.__csq_label = c
@property
def csq_fields(self):
'''
A list of CSQ field names in the order they are represented
in CSQ INFO field entries. Set to None on initialization.
Will raise a KeyError if access is attempted but no VEP
CSQ, ANN, BCSQ or CQ field is present in the header.
'''
if self.__csq_fields is None:
if self.__csq_label is None:
csq = None
for x in _common_csq_fields:
if x in self.info:
csq = x
break
if csq is None:
raise KeyError("No common CSQ fields found in INFO " +
"header - unable to retrieve consequence " +
"fields.")
self.csq_label = csq
else:
csq = self.__csq_label
csq_header = self.info[csq]
match = _csq_format_re.match(csq_header.description)
if match:
self.__csq_fields = match.group(1).split('|')
else:
raise KeyError("Could not parse {} Format in ".format(csq)
+ "header. Unable to retrieve consequence "
+ "annotations.")
return self.__csq_fields
@csq_fields.setter
def csq_fields(self, csq):
self.__csq_fields = csq
@property
def ann_label(self):
'''
String labelling the INFO field label of SnpEff consequence
annotations. Will raise a KeyError if access is attempted
but no ANN or EFF field is present in the header.
'''
if self.__ann_label is None:
self.ann_fields
return self.__ann_label
@ann_label.setter
def ann_label(self, lbl):
self.__ann_label = lbl
@property
def ann_fields(self):
'''
A list of SnpEff ANN field names in the order they are
represented in ANN/EFF INFO field entries. Set to None on
initialization. Will raise a KeyError if access is attempted but no
SnpEff ANN, or EFF field is present in the header.
'''
if self.__ann_fields is None:
if self.__ann_label is None:
ann = None
for x in _common_ann_fields:
if x in self.info:
ann = x
break
if ann is None:
raise KeyError("No common ANN fields found in INFO " +
"header - unable to retrieve SnpEff " +
"consequence fields.")
self.ann_label = ann
else:
ann = self.__ann_label
ann_header = self.info[ann]
match = _ann_format_re.match(ann_header.description)
if match:
self.__ann_fields = match.group(1).split(' | ')
else:
raise KeyError("Could not parse {} Format in ".format(ann)
+ "header. Unable to retrieve consequence "
+ "annotations.")
return self.__ann_fields
@ann_fields.setter
def ann_fields(self, ann):
self.__ann_fields = ann
def add_header_field(self, name, string=None, field_type=None,
dictionary=None):
'''
Add a header field with given name and optional field type,
and dictionary of properties.
Args:
name: name of field to add
string: string to add to field. Ignored if 'dictionary'
is provided.
field_type:
type of field - e.g. if INFO/FILTER/FORMAT
field. Required if providing a dictionary.
dictionary:
a dict of keys to values for the given field.
If 'field_type' is specified, this arg must be
provided and must contain all the essential keys
for that field type. For example, an 'INFO'
field must have 'Number', 'Type', and
'Description' keys.
'''
add_order = ['number', 'type', 'description']
h_vals = []
if dictionary is None and string is None:
raise ValueError("Either dict or string argument is required")
if field_type is not None and field_type in _required_keys:
if dictionary is None:
raise ValueError("Header type {} requires a dict.".format(
field_type))
if dictionary:
if not field_type:
raise ValueError("field_type is required for use with " +
"dictionary")
dictionary.update([(k.lower(), v) for k, v in dictionary.items()])
field_type = field_type.lower()
field_header = getattr(self.header, _field2pysam[field_type])
if name in field_header:
# TODO check header same? pysam segfaults if we remove and
# add INFO field with same name
return
if field_type in _required_keys:
for k in add_order:
if k in _required_keys[field_type]:
try:
h_vals.append(dictionary[k])
except KeyError:
raise ValueError("Header type '" + field_type +
"' requires '" + k + "' field")
else:
h_vals.append(None)
h_vals.insert(0, name)
getattr(self.header, _field2pysam[field_type]).add(*h_vals)
else:
raise ValueError("Field type {} not recognised".format(
field_type))
else:
self.header.add_meta(key=name, value=string)
|
gantzgraf/vape
|
vase/vcf_header.py
|
Python
|
gpl-3.0
| 8,057
|
[
"pysam"
] |
5bbfac9062c98aef61db9230d44439b98de51f9114289161e3b84696f12cb35b
|
"""
===================================
Simple 1D Kernel Density Estimation
===================================
This example uses the :class:`sklearn.neighbors.KernelDensity` class to
demonstrate the principles of Kernel Density Estimation in one dimension.
The first plot shows one of the problems with using histograms to visualize
the density of points in 1D. Intuitively, a histogram can be thought of as a
scheme in which a unit "block" is stacked above each point on a regular grid.
As the top two panels show, however, the choice of gridding for these blocks
can lead to wildly divergent ideas about the underlying shape of the density
distribution. If we instead center each block on the point it represents, we
get the estimate shown in the bottom left panel. This is a kernel density
estimation with a "top hat" kernel. This idea can be generalized to other
kernel shapes: the bottom-right panel of the first figure shows a Gaussian
kernel density estimate over the same distribution.
Scikit-learn implements efficient kernel density estimation using either
a Ball Tree or KD Tree structure, through the
:class:`sklearn.neighbors.KernelDensity` estimator. The available kernels
are shown in the second figure of this example.
The third figure compares kernel density estimates for a distribution of 100
samples in 1 dimension. Though this example uses 1D distributions, kernel
density estimation is easily and efficiently extensible to higher dimensions
as well.
"""
# Author: Jake Vanderplas <jakevdp@cs.washington.edu>
#
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm
from sklearn.neighbors import KernelDensity
#----------------------------------------------------------------------
# Plot the progression of histograms to kernels
np.random.seed(1)
N = 20
X = np.concatenate((np.random.normal(0, 1, int(0.3 * N)),
np.random.normal(5, 1, int(0.7 * N))))[:, np.newaxis]
X_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]
bins = np.linspace(-5, 10, 10)
fig, ax = plt.subplots(2, 2, sharex=True, sharey=True)
fig.subplots_adjust(hspace=0.05, wspace=0.05)
# histogram 1
ax[0, 0].hist(X[:, 0], bins=bins, fc='#AAAAFF', normed=True)
ax[0, 0].text(-3.5, 0.31, "Histogram")
# histogram 2
ax[0, 1].hist(X[:, 0], bins=bins + 0.75, fc='#AAAAFF', normed=True)
ax[0, 1].text(-3.5, 0.31, "Histogram, bins shifted")
# tophat KDE
kde = KernelDensity(kernel='tophat', bandwidth=0.75).fit(X)
log_dens = kde.score_samples(X_plot)
ax[1, 0].fill(X_plot[:, 0], np.exp(log_dens), fc='#AAAAFF')
ax[1, 0].text(-3.5, 0.31, "Tophat Kernel Density")
# Gaussian KDE
kde = KernelDensity(kernel='gaussian', bandwidth=0.75).fit(X)
log_dens = kde.score_samples(X_plot)
ax[1, 1].fill(X_plot[:, 0], np.exp(log_dens), fc='#AAAAFF')
ax[1, 1].text(-3.5, 0.31, "Gaussian Kernel Density")
for axi in ax.ravel():
axi.plot(X[:, 0], np.full(X.shape[0], -0.01), '+k')
axi.set_xlim(-4, 9)
axi.set_ylim(-0.02, 0.34)
for axi in ax[:, 0]:
axi.set_ylabel('Normalized Density')
for axi in ax[1, :]:
axi.set_xlabel('x')
#----------------------------------------------------------------------
# Plot all available kernels
X_plot = np.linspace(-6, 6, 1000)[:, None]
X_src = np.zeros((1, 1))
fig, ax = plt.subplots(2, 3, sharex=True, sharey=True)
fig.subplots_adjust(left=0.05, right=0.95, hspace=0.05, wspace=0.05)
def format_func(x, loc):
if x == 0:
return '0'
elif x == 1:
return 'h'
elif x == -1:
return '-h'
else:
return '%ih' % x
for i, kernel in enumerate(['gaussian', 'tophat', 'epanechnikov',
'exponential', 'linear', 'cosine']):
axi = ax.ravel()[i]
log_dens = KernelDensity(kernel=kernel).fit(X_src).score_samples(X_plot)
axi.fill(X_plot[:, 0], np.exp(log_dens), '-k', fc='#AAAAFF')
axi.text(-2.6, 0.95, kernel)
axi.xaxis.set_major_formatter(plt.FuncFormatter(format_func))
axi.xaxis.set_major_locator(plt.MultipleLocator(1))
axi.yaxis.set_major_locator(plt.NullLocator())
axi.set_ylim(0, 1.05)
axi.set_xlim(-2.9, 2.9)
ax[0, 1].set_title('Available Kernels')
#----------------------------------------------------------------------
# Plot a 1D density example
N = 100
np.random.seed(1)
X = np.concatenate((np.random.normal(0, 1, int(0.3 * N)),
np.random.normal(5, 1, int(0.7 * N))))[:, np.newaxis]
X_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]
true_dens = (0.3 * norm(0, 1).pdf(X_plot[:, 0])
+ 0.7 * norm(5, 1).pdf(X_plot[:, 0]))
fig, ax = plt.subplots()
ax.fill(X_plot[:, 0], true_dens, fc='black', alpha=0.2,
label='input distribution')
for kernel in ['gaussian', 'tophat', 'epanechnikov']:
kde = KernelDensity(kernel=kernel, bandwidth=0.5).fit(X)
log_dens = kde.score_samples(X_plot)
ax.plot(X_plot[:, 0], np.exp(log_dens), '-',
label="kernel = '{0}'".format(kernel))
ax.text(6, 0.38, "N={0} points".format(N))
ax.legend(loc='upper left')
ax.plot(X[:, 0], -0.005 - 0.01 * np.random.random(X.shape[0]), '+k')
ax.set_xlim(-4, 9)
ax.set_ylim(-0.02, 0.4)
plt.show()
|
vortex-ape/scikit-learn
|
examples/neighbors/plot_kde_1d.py
|
Python
|
bsd-3-clause
| 5,119
|
[
"Gaussian"
] |
9369f4c85f289a71f00493e7b26cd108bcb05ad99cca65aaf51bbddf10300233
|
import numpy as np
from SimPEG import Mesh
from SimPEG import Problem
from SimPEG import Survey
from SimPEG import DataMisfit
from SimPEG import Directives
from SimPEG import Optimization
from SimPEG import Regularization
from SimPEG import InvProblem
from SimPEG import Inversion
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# from pymatsolver import Pardiso
import matplotlib
from ipywidgets import (
interact, FloatSlider, ToggleButtons, IntSlider, FloatText, IntText, SelectMultiple
)
import ipywidgets as widgets
class LinearInversionApp(object):
"""docstring for LinearInversionApp"""
# Parameters for sensitivity matrix, G
N=None
M=None
j_start=None
j_end=None
p=None
q=None
seed=None
# Parameters for Model
m_background= None
m1=None
m2=None
m1_center=None
dm1 =None
m2_center=None
dm2 =None
sigma =None
m_min =None
m_max =None
data=None
save=None
def __init__(self):
super(LinearInversionApp, self).__init__()
@property
def G(self):
return self._G
@property
def jk(self):
return self._jk
@property
def mesh(self):
return self._mesh
def set_G(
self,
N=20,
M=100,
p=-0.25,
q=0.25,
j1=1,
jn=60,
):
"""
Parameters
----------
N: # of data
M: # of model parameters
...
"""
self.N=N
self.M=M
self._mesh=Mesh.TensorMesh([M])
jk=np.linspace(j1, jn, N)
self._G=np.zeros((N, self.mesh.nC), dtype=float, order='C')
def g(k):
return (
np.exp(p*jk[k]*self.mesh.vectorCCx) *
np.cos(np.pi*q*jk[k]*self.mesh.vectorCCx)
)
for i in range(N):
self._G[i, :] = g(i) * self.mesh.hx
self._jk = jk
def plot_G(
self,
N=20,
M=100,
p=-0.25,
q=0.25,
j1=1,
jn=60,
scale='log',
fixed=False,
ymin=-0.001,
ymax=0.011
):
self.set_G(
N=N,
M=M,
p=p,
q=q,
j1=j1,
jn=jn,
)
_, s, _ = np.linalg.svd(self.G, full_matrices=False)
matplotlib.rcParams['font.size']=14
fig=plt.figure(figsize=(10, 4))
gs1 = gridspec.GridSpec(1, 4)
ax1 = plt.subplot(gs1[0, :3])
ax2 = plt.subplot(gs1[0, 3:])
ax1.plot(self.mesh.vectorCCx, self.G.T)
if fixed:
ax1.set_ylim(ymin, ymax)
ax1.set_xlabel("x")
ax1.set_ylabel("g(x)")
ax2.plot(np.arange(self.N)+1, s, 'ro')
ax2.set_xlabel("")
ax2.set_title("singular values", fontsize=12)
ax2.set_xscale(scale)
ax2.set_yscale(scale)
ax2.xaxis.set_major_locator(plt.NullLocator())
ax2.xaxis.set_minor_locator(plt.NullLocator())
ax2.xaxis.set_major_formatter(plt.NullFormatter())
ax2.xaxis.set_minor_formatter(plt.NullFormatter())
plt.tight_layout()
plt.show()
def set_model(
self,
m_background=0.,
m1=1.,
m2=-1.,
m1_center=0.2,
dm1=0.2,
m2_center=0.5,
sigma_2=1.,
):
m=np.zeros(self.mesh.nC) + m_background
m1_inds=np.logical_and(self.mesh.vectorCCx > m1_center-dm1/2., self.mesh.vectorCCx < m1_center+dm1/2.)
m[m1_inds]=m1
def gaussian(x,x0,sigma):
return np.exp(-np.power((x - x0)/sigma, 2.)/2.)
m += gaussian(self.mesh.vectorCCx, m2_center, sigma_2) * m2
return m
def plot_model(
self,
m_background=0.,
m1=1.,
m1_center=0.2,
dm1=0.2,
m2=-1.,
m2_center=0.5,
sigma_2=1.,
option="model",
add_noise=True,
percentage =10,
floor=1e-1,
):
m=self.set_model(
m_background=m_background,
m1=m1,
m2=m2,
m1_center=m1_center,
dm1=dm1,
m2_center=m2_center,
sigma_2=sigma_2,
)
np.random.seed(1)
if add_noise:
survey, _=self.get_problem_survey()
data=survey.dpred(m)
noise=abs(data)*percentage * 0.01 *np.random.randn(self.N) + np.random.randn(self.N)*floor
else:
survey, _=self.get_problem_survey()
data=survey.dpred(m)
noise=np.zeros(self.N, float)
data += noise
self.data=data.copy()
self.m=m.copy()
self.uncertainty=abs(self.data) * percentage* 0.01 + floor
self.percentage = percentage
self.floor = floor
option_bools = [False, False, False]
for item in option:
if item == 'kernel':
option_bools[0] = True
elif item == 'model':
option_bools[1] = True
elif item == 'data':
option_bools[2] = True
fig, axes = plt.subplots(1, 3, figsize=(12*1.2, 3*1.2))
for i, ax in enumerate(axes):
if option_bools[i]:
if i == 0:
ax.plot(self.mesh.vectorCCx, self.G.T)
ax.set_title('Rows of matrix G')
ax.set_xlabel("x")
ax.set_ylabel("g(x)")
elif i == 1:
ax.plot(self.mesh.vectorCCx, m)
ax.set_ylim([-2.5, 2.5])
ax.set_title('Model')
ax.set_xlabel("x")
ax.set_ylabel("m(x)")
ax.set_ylabel("$d_j$")
elif i == 2:
if add_noise:
# this is just for visualization of uncertainty
ax.errorbar(
x=self.jk, y=self.data,
yerr=self.uncertainty,
color='k', lw=1
)
ax.plot(self.jk, self.data, 'ko')
else:
ax.plot(self.jk, self.data, 'ko-')
ax.set_title('Data')
ax.set_xlabel("$k_j$")
for i, ax in enumerate(axes):
if not option_bools[i]:
ax.axis('off')
# ax.xaxis.set_minor_locator(plt.NullLocator())
# ax.xaxis.set_major_formatter(plt.NullFormatter())
# ax.xaxis.set_minor_formatter(plt.NullFormatter())
# ax.yaxis.set_major_locator(plt.NullLocator())
# ax.yaxis.set_minor_locator(plt.NullLocator())
# ax.yaxis.set_major_formatter(plt.NullFormatter())
# ax.yaxis.set_minor_formatter(plt.NullFormatter())
plt.tight_layout()
def get_problem_survey(self):
prob=Problem.LinearProblem(self.mesh, G=self.G)
survey=Survey.LinearSurvey()
survey.pair(prob)
return survey, prob
def run_inversion(
self,
maxIter=60,
m0=0.,
mref=0.,
percentage=5,
floor=0.1,
chifact=1,
beta0_ratio=1.,
coolingFactor=1,
coolingRate=1,
alpha_s=1.,
alpha_x=1.,
use_target=False
):
survey, prob=self.get_problem_survey()
survey.eps=percentage
survey.std=floor
survey.dobs=self.data.copy()
self.uncertainty = percentage*abs(survey.dobs)*0.01 + floor
m0=np.ones(self.M) * m0
mref=np.ones(self.M) * mref
reg=Regularization.Tikhonov(
self.mesh,
alpha_s=alpha_s,
alpha_x=alpha_x,
mref=mref
)
dmis=DataMisfit.l2_DataMisfit(survey)
dmis.W=1./self.uncertainty
opt=Optimization.InexactGaussNewton(
maxIter=maxIter,
maxIterCG=20
)
opt.remember('xc')
opt.tolG=1e-10
opt.eps=1e-10
invProb=InvProblem.BaseInvProblem(dmis, reg, opt)
save=Directives.SaveOutputEveryIteration()
beta_schedule=Directives.BetaSchedule(
coolingFactor=coolingFactor,
coolingRate=coolingRate
)
target=Directives.TargetMisfit(chifact=chifact)
if use_target:
directives=[
Directives.BetaEstimate_ByEig(beta0_ratio=beta0_ratio),
beta_schedule,
target,
save
]
else:
directives=[
Directives.BetaEstimate_ByEig(beta0_ratio=beta0_ratio),
beta_schedule,
save
]
inv=Inversion.BaseInversion(invProb, directiveList=directives)
mopt=inv.run(m0)
model = opt.recall('xc')
model.append(mopt)
pred = []
for m in model:
pred.append(survey.dpred(m))
return model, pred, save
def plot_inversion(
self,
maxIter=60,
m0=0.,
mref=0.,
percentage=5,
floor=0.1,
chifact=1,
beta0_ratio=1.,
coolingFactor=1,
coolingRate=1,
alpha_s=1.,
alpha_x=1.,
use_target=False,
run=True,
option ='model',
i_iteration=1,
):
if run:
self.model, self.pred, self.save=self.run_inversion(
maxIter=maxIter,
m0=m0,
mref=mref,
percentage=percentage,
floor=floor,
chifact=chifact,
beta0_ratio=beta0_ratio,
coolingFactor=coolingFactor,
coolingRate=coolingRate,
alpha_s=alpha_s,
alpha_x=alpha_x,
use_target=use_target,
)
if len(self.model) == 2:
fig, axes=plt.subplots(1, 2, figsize=(14*1.2 *2/3, 3*1.2))
i_plot = -1
else:
self.save.load_results()
if self.save.i_target is None:
i_plot = -1
else:
i_plot = self.save.i_target + 1
fig, axes=plt.subplots(1, 3, figsize=(14*1.2, 3*1.2))
axes[0].plot(self.mesh.vectorCCx, self.m)
if run:
axes[0].plot(self.mesh.vectorCCx, self.model[i_plot])
axes[0].set_ylim([-2.5, 2.5])
axes[1].errorbar(
x=self.jk, y=self.data,
yerr=self.uncertainty,
color='k', lw=1
)
axes[1].plot(self.jk, self.data, 'ko')
if run:
axes[1].plot(self.jk, self.pred[i_plot], 'bx')
axes[1].legend(("Observed", "Predicted"))
axes[0].legend(("True", "Pred"))
axes[0].set_title('Model')
axes[0].set_xlabel("x")
axes[0].set_ylabel("m(x)")
axes[1].set_title('Data')
axes[1].set_xlabel("$k_j$")
axes[1].set_ylabel("$d_j$")
if len(self.model) > 2:
max_iteration = len(self.model)-1
if i_iteration > max_iteration:
print ((">> Warning: input iteration (%i) is greater than maximum iteration (%i)") % (i_iteration, len(self.model)-1))
i_iteration = max_iteration
if option == 'misfit':
if not run:
axes[0].plot(self.mesh.vectorCCx, self.model[i_iteration])
axes[1].plot(self.jk, self.pred[i_iteration], 'bx')
# axes[0].legend(("True", "Pred", ("%ith")%(i_iteration)))
# axes[1].legend(("Observed", "Predicted", ("%ith")%(i_iteration)))
axes[1].legend(("Observed", "Predicted"))
if i_iteration == 0:
i_iteration = 1
axes[2].plot(np.arange(len(self.save.phi_d))[i_iteration-1]+1, self.save.phi_d[i_iteration-1]*2, 'go', ms=10)
ax_1 = axes[2].twinx()
axes[2].semilogy(np.arange(len(self.save.phi_d))+1, self.save.phi_d*2, 'k-', lw=2)
if self.save.i_target is not None:
axes[2].plot(np.arange(len(self.save.phi_d))[self.save.i_target]+1, self.save.phi_d[self.save.i_target]*2, 'k*', ms=10)
axes[2].plot(np.r_[axes[2].get_xlim()[0], axes[2].get_xlim()[1]], np.ones(2)*self.save.target_misfit*2, 'k:')
ax_1.semilogy(np.arange(len(self.save.phi_d))+1, self.save.phi_m, 'r', lw=2)
axes[2].set_xlabel("Iteration")
axes[2].set_ylabel("$\phi_d$")
ax_1.set_ylabel("$\phi_m$", color='r')
for tl in ax_1.get_yticklabels():
tl.set_color('r')
axes[2].set_title('Misfit curves')
elif option == 'tikhonov':
if not run:
axes[0].plot(self.mesh.vectorCCx, self.model[i_iteration])
axes[1].plot(self.jk, self.pred[i_iteration], 'bx')
# axes[0].legend(("True", "Pred", ("%ith")%(i_iteration)))
# axes[1].legend(("Observed", "Predicted", ("%ith")%(i_iteration)))
axes[0].legend(("True", "Pred"))
axes[1].legend(("Observed", "Predicted"))
if i_iteration == 0:
i_iteration = 1
axes[2].plot(self.save.phi_m[i_iteration-1], self.save.phi_d[i_iteration-1]*2, 'go', ms=10)
axes[2].plot(self.save.phi_m, self.save.phi_d*2, 'k-', lw=2)
axes[2].set_xlim(np.hstack(self.save.phi_m).min(), np.hstack(self.save.phi_m).max())
axes[2].set_xlabel("$\phi_m$", fontsize=14)
axes[2].set_ylabel("$\phi_d$", fontsize=14)
if self.save.i_target is not None:
axes[2].plot(self.save.phi_m[self.save.i_target], self.save.phi_d[self.save.i_target]*2., 'k*', ms=10)
axes[2].set_title('Tikhonov curve')
plt.tight_layout()
def interact_plot_G(self):
Q=interact(
self.plot_G,
N=IntSlider(min=1, max=100, step=1, value=20, continuous_update=False),
M=IntSlider(min=1, max=100, step=1, value=100, continuous_update=False),
p =FloatSlider(min=-1, max=0, step=0.05, value=-0.15, continuous_update=False),
q=FloatSlider(min=0, max=1, step=0.05, value=0.25, continuous_update=False),
j1 =FloatText(value=1.),
jn=FloatText(value=19.),
scale=ToggleButtons(
options=["linear", "log"], value="log"
),
fixed=False,
ymin=FloatText(value=-0.005),
ymax=FloatText(value=0.011),
)
return Q
def interact_plot_model(self):
Q=interact(
self.plot_model,
m_background=FloatSlider(
min=-2, max=2, step=0.05, value=0., continuous_update=False, description="m$_{background}$",
),
m1=FloatSlider(
min=-2, max=2, step=0.05, value=1., continuous_update=False, description="m1",
),
m2=FloatSlider(
min=-2, max=2, step=0.05, value=2., continuous_update=False, description="m2",
),
m1_center=FloatSlider(
min=-2, max=2, step=0.05, value=0.2, continuous_update=False, description="m1$_{center}$",
),
dm1 =FloatSlider(
min=0, max=0.5, step=0.05, value=0.2, continuous_update=False, description="m1$_{width}$",
),
m2_center=FloatSlider(
min=-2, max=2, step=0.05, value=0.75, continuous_update=False, description="m2$_{center}$",
),
sigma_2=FloatSlider(
min=0.01, max=0.1, step=0.01, value=0.07, continuous_update=False, description="m2$_{sigma}$",
),
option=SelectMultiple(
options=["kernel", "model", "data"],
value=["model"],
description='option'
),
percentage=FloatText(value=5),
floor=FloatText(value=0.02),
)
return Q
def interact_plot_inversion(self, maxIter=30):
Q = interact(
self.plot_inversion,
maxIter=IntText(value=maxIter),
m0=FloatSlider(min=-2, max=2, step=0.05, value=0., continuous_update=False),
mref=FloatSlider(min=-2, max=2, step=0.05, value=0., continuous_update=False),
percentage=FloatText(value=self.percentage),
floor=FloatText(value=self.floor),
chifact=FloatText(value=1.),
beta0_ratio=FloatText(value=100),
coolingFactor=FloatSlider(min=0.1, max=10, step=1, value=2, continuous_update=False),
coolingRate=IntSlider(min=1, max=10, step=1, value=1, continuous_update=False),
alpha_s=FloatText(value=1e-10),
alpha_x=FloatText(value=0),
run = True,
target = False,
option=ToggleButtons(
options=["misfit", "tikhonov"], value="misfit"
),
i_iteration=IntSlider(min=0, max=maxIter, step=1, value=0, continuous_update=False)
)
|
geoscixyz/em_examples
|
em_examples/LinearInversion.py
|
Python
|
mit
| 17,361
|
[
"Gaussian"
] |
858fbd86cd116b5eecd3d4750f101aa2882dbae7eec55e63e49642eddc9bdf81
|
import math
import numpy
class ElasticNet:
def __init__(self, cities, param):
self._cities = cities
self._param = param
self._num_iter = 0
self._k = self._param['init_k']
self._num_neurons = int(self._param['num_neurons_factor'] \
* self._cities.shape[0])
self._init_neurons()
def iteration(self):
"""
Perform one iteration of the algorithm.
Return True if the algorithm has finished, False otherwise.
"""
self._num_iter += 1
self._update_k()
self._update_weights()
self._update_neurons()
return not self._stop_criteria()
def _get_dist2(self):
return self._dist2
dist2 = property(fget=_get_dist2)
def _get_neurons(self):
return self._neurons
neurons = property(fget=_get_neurons)
def _get_num_iter(self):
return self._num_iter
num_iter = property(fget=_get_num_iter)
def _get_worst_dist(self):
return self._worst_dist
worst_dist = property(fget=_get_worst_dist)
def _dist_force(self):
"""
Compute the force that minimize the distance between the
cities and the neurons.
"""
return numpy.array(
[numpy.dot(self._weights[:,i],
self._delta[:,i]) for i in range(self._num_neurons)])
def _init_neurons(self):
"""
Initialize the neurons in a circle at the center of the
cities.
"""
theta = numpy.linspace(0, 2 * math.pi, self._num_neurons, False)
centroid = self._cities.mean(axis=0)
self._neurons = numpy.vstack((numpy.cos(theta), numpy.sin(theta)))
self._neurons *= self._param['radius']
self._neurons += centroid[:,numpy.newaxis]
self._neurons = self._neurons.transpose()
def _length_force(self):
"""Compute the force that minimize the length of the elastic."""
return numpy.concatenate((
[self._neurons[1] - 2 * self._neurons[0]
+ self._neurons[self._num_neurons - 1]],
[(self._neurons[i+1]
- 2 * self._neurons[i]
+ self._neurons[i-1])
for i in range(1, self._num_neurons - 1)],
[self._neurons[0]
- 2 * self._neurons[self._num_neurons - 1]
+ self._neurons[self._num_neurons - 2]]))
def _stop_criteria(self):
"""Return True if the algorithm has finished, False otherwise."""
return self._worst_dist < self._param['epsilon'] \
or self._num_iter >= self._param['max_num_iter']
def _update_k(self):
if (self._num_iter % self._param['k_update_period']) == 0:
self._k = max(0.01, self._param['k_alpha'] * self._k)
def _update_neurons(self):
dist_force = self._dist_force()
length_force = self._length_force()
self._neurons += self._param['alpha'] * dist_force \
+ self._param['beta'] * self._k * length_force
def _update_weights(self):
"""Compute w_ij, i = 1, 2, ..., |Cities|; j = 1, 2, ...., |Neurons|"""
self._delta = self._cities[:,numpy.newaxis] - self._neurons
# At this point
# self._delta[i,j] == (delta_x, delta_y) between city i and neuron j
self._dist2 = (self._delta ** 2).sum(axis=2)
# At this point
# self._dist2[i,j] == square of the distance between city i and neuron j
self._worst_dist = numpy.sqrt(self._dist2.min(axis=1).max())
self._weights = numpy.exp(-self._dist2 / (2 * (self._k ** 2)))
# At this point
# self._weights[i,j] == unnormalized weight associated to city
# i and neuron j
self._weights /= self._weights.sum(axis=1)[:,numpy.newaxis]
# At this point
# self._weights[i,j] == normalized weight associated to city i
# and neuron j
|
larose/ena
|
elastic_net.py
|
Python
|
bsd-2-clause
| 4,067
|
[
"NEURON"
] |
f137edffc1292d50c64d199a6a58b966798df513ac16eca8c29604a6fed8820a
|
from .properties import Align, Bool, DataSpec, Enum, HasProps, Size, Any, Color
from .mixins import FillProps, LineProps, TextProps
from .enums import Units, AngleUnits, Direction
from .plot_object import Viewable
from six import add_metaclass, iteritems
# Size is a way to preserve a data-space-related metric all the way until
# render time, when the screen dimensions are known
# Align may be "center", "min", "max", or "jitter(func)" where func is name
# of a random distribution to draw random samples from. Defaults to uniform
# but gaussian could certainly be useful.
@add_metaclass(Viewable)
class BaseGlyph(HasProps):
""" Base class for all glyphs/marks/geoms/whatever-you-call-'em in Bokeh.
"""
# use __view_model__ for type field in js dict
# Common attributes for all glyphs
visible = Bool
margin = Size # the amount of desired space around this glyph
halign = Align # when there is horizontal wiggle room (e.g. categorical)
valign = Align # when there is vertical wiggle room
radius_units = Enum(Units)
length_units = Enum(Units)
angle_units = Enum(AngleUnits)
start_angle_units = Enum(AngleUnits)
end_angle_units = Enum(AngleUnits)
def to_glyphspec(self):
""" Returns a dict mapping attributes to values, that is amenable for
inclusion in a Glyph definition.
"""
props = self.changed_properties_with_values()
props["type"] = self.__view_model__
# Iterate over all the DataSpec properties and convert them, using the
# fact that DataSpecs store the dict-ified version on the object.
for attr, spec in iteritems(self.dataspecs_with_refs()):
props[attr] = spec.to_dict(self)
return props
class Marker(BaseGlyph, FillProps, LineProps):
""" Base class for glyphs which are just simple markers placed at (x,y)
locations.
"""
x = DataSpec
y = DataSpec
size = DataSpec(units="screen", min_value=0, default=4)
class Asterisk(Marker):
__view_model__ = "asterisk"
class Circle(Marker):
__view_model__ = "circle"
radius = DataSpec(units="data", min_value=0)
def to_glyphspec(self):
""" Returns a dict mapping attributes to values, that is amenable for
inclusion in a Glyph definition.
"""
d = super(Circle, self).to_glyphspec()
if "size" not in self._changed_vars and "radius" not in self._changed_vars:
del d["radius"]
elif "size" in self._changed_vars:
del d["radius"]
elif "radius" in self._changed_vars:
del d["size"]
return d
class CircleCross(Marker):
__view_model__ = "circle_cross"
class CircleX(Marker):
__view_model__ = "circle_x"
class Cross(Marker):
__view_model__ = "cross"
class Diamond(Marker):
__view_model__ = "diamond"
class DiamondCross(Marker):
__view_model__ = "diamond_cross"
class InvertedTriangle(Marker):
__view_model__ = "inverted_triangle"
class Square(Marker):
__view_model__ = "square"
angle = DataSpec
class SquareCross(Marker):
__view_model__ = "square_cross"
class SquareX(Marker):
__view_model__ = "square_x"
class Triangle(Marker):
__view_model__ = "triangle"
class Xmarker(Marker):
__view_model__ = "x"
class AnnularWedge(BaseGlyph, FillProps, LineProps):
__view_model__ = 'annular_wedge'
x = DataSpec
y = DataSpec
inner_radius = DataSpec(min_value=0)
outer_radius = DataSpec(min_value=0)
start_angle = DataSpec
end_angle = DataSpec
direction = Enum(Direction)
class Annulus(BaseGlyph, FillProps, LineProps):
__view_model__ = 'annulus'
x = DataSpec
y = DataSpec
inner_radius = DataSpec(min_value=0)
outer_radius = DataSpec(min_value=0)
class Arc(BaseGlyph, LineProps):
__view_model__ = 'arc'
x = DataSpec
y = DataSpec
radius = DataSpec(min_value=0)
start_angle = DataSpec
end_angle = DataSpec
direction = Enum(Direction)
class Bezier(BaseGlyph, LineProps):
__view_model__ = 'bezier'
x0 = DataSpec
y0 = DataSpec
x1 = DataSpec
y1 = DataSpec
cx0 = DataSpec
cy0 = DataSpec
cx1 = DataSpec
cy1 = DataSpec
class Gear(BaseGlyph, LineProps, FillProps):
__view_model__ = 'gear'
x = DataSpec # Float (mm, data)
y = DataSpec # Float (mm, data)
angle = DataSpec(default=0) # Float (rad)
module = DataSpec # Float (mm, data)
teeth = DataSpec # Int
pressure_angle = DataSpec(default=20) # Angle (deg)
shaft_size = DataSpec(default=0.3) # Percent
internal = DataSpec(default=False) # Bool
class Image(BaseGlyph):
__view_model__ = 'image'
image = DataSpec
x = DataSpec
y = DataSpec
dw = DataSpec
dh = DataSpec
dilate = Bool(False)
#TODO: Consider converting palette in to a first-class object, then wrap the color list and reserve values into it instead of here
#Reserve represents a color/value outside of the normal range. Commonly used to setup a 'background' color for the image
palette = DataSpec
#TODO: Using 'False' to indicate no reserve value is not great. A flag field or sentinel is probably better, but that can be worked out when/if palette becomes its own object
#The actual type of reserve_val is an instance of whatever is held in the image array, so the exact type will depend on the type of values in the dataspec of the image field.
reserve_val = Any(default=False)
reserve_color = DataSpec(default=0xffffff) #TODO: Why doesn't type Color work here?? (Came through as 'undefined' on the JS side)
#TODO: What is the color code for transparent???
class ImageURL(BaseGlyph):
__view_model__ = 'image_url'
url = DataSpec
x = DataSpec
y = DataSpec
w = DataSpec
h = DataSpec
angle = DataSpec
dilate = Bool(False)
anchor = Enum("top_left", "top_center", "top_right", "right_center", "bottom_right",
"bottom_center", "bottom_left", "left_center", "center")
class ImageRGBA(BaseGlyph):
__view_model__ = 'image_rgba'
image = DataSpec
x = DataSpec
y = DataSpec
dw = DataSpec
dh = DataSpec
dilate = Bool(False)
class Line(BaseGlyph, LineProps):
__view_model__ = "line"
x = DataSpec
y = DataSpec
class MultiLine(BaseGlyph, LineProps):
__view_model__ = 'multi_line'
xs = DataSpec
ys = DataSpec
class Oval(BaseGlyph, FillProps, LineProps):
__view_model__ = 'oval'
x = DataSpec
y = DataSpec
width = DataSpec
height = DataSpec
angle = DataSpec
class Patch(BaseGlyph, FillProps, LineProps):
__view_model__ = 'patch'
x = DataSpec
y = DataSpec
class Patches(BaseGlyph, LineProps, FillProps):
__view_model__ = 'patches'
xs = DataSpec
ys = DataSpec
class Quad(BaseGlyph, FillProps, LineProps):
__view_model__ = "quad"
left = DataSpec
right = DataSpec
bottom = DataSpec
top = DataSpec
class Quadratic(BaseGlyph, LineProps):
__view_model__ = 'quadratic'
x0 = DataSpec
y0 = DataSpec
x1 = DataSpec
y1 = DataSpec
cx = DataSpec
cy = DataSpec
class Ray(BaseGlyph, LineProps):
__view_model__ = "ray"
x = DataSpec
y = DataSpec
angle = DataSpec
length = DataSpec
class Rect(BaseGlyph, FillProps, LineProps):
__view_model__ = "rect"
x = DataSpec
y = DataSpec
width = DataSpec
height = DataSpec
angle = DataSpec
dilate = Bool(False)
class Segment(BaseGlyph, LineProps):
__view_model__ = 'segment'
x0 = DataSpec
y0 = DataSpec
x1 = DataSpec
y1 = DataSpec
class Text(BaseGlyph, TextProps):
__view_model__ = "text"
x = DataSpec
y = DataSpec
text = DataSpec
angle = DataSpec
class Wedge(BaseGlyph, FillProps, LineProps):
__view_model__ = 'wedge'
x = DataSpec
y = DataSpec
radius = DataSpec(min_value=0)
start_angle = DataSpec
end_angle = DataSpec
direction = Enum(Direction)
|
jakevdp/bokeh
|
bokeh/glyphs.py
|
Python
|
bsd-3-clause
| 8,148
|
[
"Gaussian"
] |
151f658645cc43d634b523092dee664c5b557bd3c5749db1aab2a227374c50a6
|
#!/usr/bin/env python
# This script checks and can optionally update MOOSE source files.
# You should always run this script without the "-u" option
# first to make sure there is a clean dry run of the files that should
# be updated
import os, string
from optparse import OptionParser
global_ignores = ['contrib', '.svn']
copyright_header = \
"""/****************************************************************/
/* DO NOT MODIFY THIS HEADER */
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* (c) 2010 Battelle Energy Alliance, LLC */
/* ALL RIGHTS RESERVED */
/* */
/* Prepared by Battelle Energy Alliance, LLC */
/* Under Contract No. DE-AC07-05ID14517 */
/* With the U. S. Department of Energy */
/* */
/* See COPYRIGHT for full restrictions */
/****************************************************************/
"""
global_options = {}
def fixupHeader():
for dirpath, dirnames, filenames in os.walk(os.getcwd() + "/../"):
# Don't traverse into ignored directories
for ignore in global_ignores:
if ignore in dirnames:
dirnames.remove(ignore)
#print dirpath
#print dirnames
for file in filenames:
suffix = os.path.splitext(file)
if suffix[-1] == '.C' or suffix[-1] == '.h':
checkAndUpdate(dirpath + '/' + file)
def checkAndUpdate(filename):
f = open(filename)
text = f.read()
f.close()
# Check (exact match only)
if (string.find(text, copyright_header) == -1):
# print the first 10 lines or so of the file
if global_options.update == False: # Report only
print filename + ' does not contain an up to date header'
if global_options.verbose == True:
print '>'*40, '\n', '\n'.join((text.split('\n', 10))[:10]), '\n'*5
else:
# Update
f = open(filename + '~tmp', 'w')
f.write(copyright_header)
f.write(text)
f.close()
os.rename(filename + '~tmp', filename)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-u", "--update", action="store_true", dest="update", default=False)
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False)
(global_options, args) = parser.parse_args()
fixupHeader()
|
zzyfisherman/moose
|
framework/scripts/fixup_headers.py
|
Python
|
lgpl-2.1
| 2,599
|
[
"MOOSE"
] |
906a9402b943390b83b3a1f570b5694f2cc738bc642549a495ed666aa7ae98f3
|
import numpy as np
import bayesianoracle as bo
import bayesianoracle.plot as boplotter
# Import function information
from function_data import *
import os
os.system("function_data.py")
def plot_kernels(bmao, X, x0, kernel_ranges, colors):
""" Auxillary plotting function
Parameters
----------
bmao : Bayesian model averaging optimization process
X : The values that have been previously traversed
x0 : location at which we want to evaluate the kernel
kernel_ranges : (list of scalars) the desired kernel_widths to be plotted
"""
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection
from matplotlib import colors as cl
from matplotlib import gridspec
boplt = boplotter.Plotter1D(x_range=x_range, y_range=y_range, num_points=num_points*10)
boplt.set_bma(bmao.bma)
### Plot the data and the models
fig = plt.figure(figsize=(8, 6), dpi=dpi)
ax = plt.gca()
legend_elements = []
legend_texts = []
for i in xrange(len(kernel_ranges)):
# Plot the kernels
kernel_line = boplt.plot_kernel(ax, x0, kernel_ranges[i], color=colors[i], ylabel=r'$k_{\gamma}\left(x,x^\prime\right)$')
# Plot data
data_scat = boplt.plot_kernel_at_data(ax, x0, kernel_ranges[i], color=colors[i], bool_color_cycled=True, xlabel=r'$x$')
"""
# Add the information for legends
legend_elements.append(kernel_line)
legend_texts.append(r"$K_\gamma(x)$ for $\gamma = "+str(kernel_ranges[i])+"$")
# Get the data element in the legend
legend_elements.append(data_scat)
legend_texts.append(r"$K_\theta(x_i)$ for $\theta = "+str(kernel_ranges[i])+"$")
"""
legend_elements.append((kernel_line, data_scat))
legend_texts.append(r"$\gamma = " + str(kernel_ranges[i])+'$')
#boplt.plot_data_locations(ax, color='black', alpha=0.3, linestyle='--', zorder=1)
# Create the x0 line
boplt.draw_vertical_line(ax, x0, r'$x^\prime$', color='#FF9900')
# Reverse the legend texts and elements
legend_elements = reversed(legend_elements)
legend_texts = reversed(legend_texts)
# Create the legend
legend = plt.legend(legend_elements,
legend_texts,
loc='center right', bbox_to_anchor=(1.05, 0.5), ncol=1, fancybox=True, shadow=False, scatterpoints=1)
# Change the sizes of the scatter dots in legend
"""
for i in xrange(len(kernel_ranges)):
legend.legendHandles[2*i+1]._sizes = [30]
"""
plt.setp(legend.get_texts(), fontsize=12)
plt.savefig("KernelDisplay_figures/Kernels.png", dpi=dpi)
plt.close(fig)
bmao = bo.optimizer.QuadraticBMAOptimizer(ndim = 1,
init_kernel_range=0.25,
n_int=50,
precision_beta = 1000.0,
constraints = [constr1, constr2],
bounding_box = bounding_box,
bool_compact = True,
kernel_type='Gaussian')
# Center of the kernel
x0 = np.array([-0.5])
# Initialize x_next
X = None
y_hist = np.array([])
for k in xrange(X_complete.shape[1]):
# Get next
x_next = X_complete[:,k]
x = x_next
if k == 0:
X = np.array([x_next])
else:
X = np.hstack([X, np.array([x_next])])
# Get y, grad, hess from precomputed lists
f = f_complete[k]
grad = grad_complete[k]
Hess = Hess_complete[k]
y_hist = np.append(y_hist, f)
# Add the observations to the bmao
bmao.add_observation(x, f, grad, Hess)
kernel_ranges = [2.0, 0.25, 0.1]
colors = ['#FFBBBB', '#FF7777', '#FF0000']
#colors = ['#00FF99', '#66FFCC', '#99FFCC']
#colors = ['#FFD494','#FFB870','#FF9900']
colors = ['#DDDDDD','#AAAAAA','#666666']
#colors = ['#707070','#333333','#000000']
# Plot the Kernels
plot_kernels(bmao, X, x0, kernel_ranges, colors)
|
altaetran/bayesianoracle
|
tests/quadraticBayesianAveraging/paper_examples/KernelDisplay.py
|
Python
|
apache-2.0
| 4,106
|
[
"Gaussian"
] |
8b9d543f1ac7c1811088fe6311ad6c6d13d67b199bd79082bc6176aea11911ea
|
"""
:mod: ReqClient
.. module: ReqClient
:synopsis: implementation of client for RequestDB using DISET framework
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
import os
import time
import random
import json
import datetime
# # from DIRAC
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.Core.Utilities.List import randomize, fromChar
from DIRAC.Core.Utilities.JEncode import strToIntDict
from DIRAC.Core.Utilities.DEncode import ignoreEncodeWarning
from DIRAC.ConfigurationSystem.Client import PathFinder
from DIRAC.Core.Base.Client import Client, createClient
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.RequestManagementSystem.private.RequestValidator import RequestValidator
from DIRAC.WorkloadManagementSystem.Client import JobStatus
from DIRAC.WorkloadManagementSystem.Client import JobMinorStatus
from DIRAC.WorkloadManagementSystem.Client.JobMonitoringClient import JobMonitoringClient
from DIRAC.WorkloadManagementSystem.Client.JobStateUpdateClient import JobStateUpdateClient
@createClient('RequestManagement/ReqManager')
class ReqClient(Client):
"""ReqClient is a class manipulating and operation on Requests.
:param ~RPCClient.RPCClient requestManager: RPC client to RequestManager
:param dict requestProxiesDict: RPC client to ReqestProxy
:param ~DIRAC.RequestManagementSystem.private.RequestValidator.RequestValidator requestValidator: RequestValidator instance
"""
__requestProxiesDict = {}
__requestValidator = None
def __init__(self, url=None, **kwargs):
"""c'tor
:param self: self reference
:param url: url of the ReqManager
:param kwargs: forwarded to the Base Client class
"""
super(ReqClient, self).__init__(**kwargs)
self.serverURL = 'RequestManagement/ReqManager' if not url else url
self.log = gLogger.getSubLogger("RequestManagement/ReqClient/pid_%s" % (os.getpid()))
def requestProxies(self, timeout=120):
""" get request proxies dict """
if not self.__requestProxiesDict:
self.__requestProxiesDict = {}
proxiesURLs = fromChar(PathFinder.getServiceURL("RequestManagement/ReqProxyURLs"))
if not proxiesURLs:
self.log.warn("CS option RequestManagement/ReqProxyURLs is not set!")
for proxyURL in proxiesURLs:
self.log.debug("creating RequestProxy for url = %s" % proxyURL)
self.__requestProxiesDict[proxyURL] = RPCClient(proxyURL, timeout=timeout)
return self.__requestProxiesDict
def requestValidator(self):
""" get request validator """
if not self.__requestValidator:
self.__requestValidator = RequestValidator()
return self.__requestValidator
def putRequest(self, request, useFailoverProxy=True, retryMainService=0):
"""Put request to RequestManager
:param self: self reference
:param ~Request.Request request: Request instance
:param bool useFailoverProxy: if False, will not attempt to forward the request to ReqProxies
:param int retryMainService: Amount of time we retry on the main ReqHandler in case of failures
:return: S_OK/S_ERROR
"""
errorsDict = {"OK": False}
valid = self.requestValidator().validate(request)
if not valid["OK"]:
self.log.error("putRequest: request not valid", "%s" % valid["Message"])
return valid
# # dump to json
requestJSON = request.toJSON()
if not requestJSON["OK"]:
return requestJSON
requestJSON = requestJSON["Value"]
retryMainService += 1
while retryMainService:
retryMainService -= 1
setRequestMgr = self._getRPC().putRequest(requestJSON)
if setRequestMgr["OK"]:
return setRequestMgr
errorsDict["RequestManager"] = setRequestMgr["Message"]
# sleep a bit
time.sleep(random.randint(1, 5))
self.log.warn("putRequest: unable to set request '%s' at RequestManager" %
request.RequestName, setRequestMgr["Message"])
proxies = self.requestProxies() if useFailoverProxy else {}
for proxyURL in randomize(proxies.keys()):
proxyClient = proxies[proxyURL]
self.log.debug("putRequest: trying RequestProxy at %s" % proxyURL)
setRequestProxy = proxyClient.putRequest(requestJSON)
if setRequestProxy["OK"]:
if setRequestProxy["Value"]["set"]:
self.log.info("putRequest: request '%s' successfully set using RequestProxy %s" % (request.RequestName,
proxyURL))
elif setRequestProxy["Value"]["saved"]:
self.log.info("putRequest: request '%s' successfully forwarded to RequestProxy %s" % (request.RequestName,
proxyURL))
return setRequestProxy
else:
self.log.warn("putRequest: unable to set request using RequestProxy %s: %s" % (proxyURL,
setRequestProxy["Message"]))
errorsDict["RequestProxy(%s)" % proxyURL] = setRequestProxy["Message"]
# # if we're here neither requestManager nor requestProxy were successful
self.log.error("putRequest: unable to set request", "'%s'" % request.RequestName)
errorsDict["Message"] = "ReqClient.putRequest: unable to set request '%s'" % request.RequestName
return errorsDict
def getRequest(self, requestID=0):
"""Get request from RequestDB
:param self: self reference
:param int requestID: ID of the request. If 0, choice is made for you
:return: S_OK( Request instance ) or S_OK() or S_ERROR
"""
self.log.debug("getRequest: attempting to get request.")
getRequest = self._getRPC().getRequest(requestID)
if not getRequest["OK"]:
self.log.error("getRequest: unable to get request", "'%s' %s" % (requestID, getRequest["Message"]))
return getRequest
if not getRequest["Value"]:
return getRequest
return S_OK(Request(getRequest["Value"]))
@ignoreEncodeWarning
def getBulkRequests(self, numberOfRequest=10, assigned=True):
""" get bulk requests from RequestDB
:param self: self reference
:param str numberOfRequest: size of the bulk (default 10)
:return: S_OK( Successful : { requestID, RequestInstance }, Failed : message ) or S_ERROR
"""
self.log.debug("getRequests: attempting to get request.")
getRequests = self._getRPC().getBulkRequests(numberOfRequest, assigned)
if not getRequests["OK"]:
self.log.error("getRequests: unable to get '%s' requests: %s" % (numberOfRequest, getRequests["Message"]))
return getRequests
# No Request returned
if not getRequests["Value"]:
return getRequests
# No successful Request
if not getRequests["Value"]["Successful"]:
return getRequests
jsonReq = getRequests["Value"]["Successful"]
# Do not forget to cast back str keys to int
reqInstances = {int(rId): Request(jsonReq[rId]) for rId in jsonReq}
failed = strToIntDict(getRequests["Value"]["Failed"])
return S_OK({"Successful": reqInstances, "Failed": failed})
def peekRequest(self, requestID):
""" peek request """
self.log.debug("peekRequest: attempting to get request.")
peekRequest = self._getRPC().peekRequest(int(requestID))
if not peekRequest["OK"]:
self.log.error("peekRequest: unable to peek request", "request: '%s' %s" % (requestID, peekRequest["Message"]))
return peekRequest
if not peekRequest["Value"]:
return peekRequest
return S_OK(Request(peekRequest["Value"]))
def deleteRequest(self, requestID):
""" delete request given it's ID
:param self: self reference
:param str requestID: request ID
"""
requestID = int(requestID)
self.log.debug("deleteRequest: attempt to delete '%s' request" % requestID)
deleteRequest = self._getRPC().deleteRequest(requestID)
if not deleteRequest["OK"]:
self.log.error("deleteRequest: unable to delete request",
"'%s' request: %s" % (requestID, deleteRequest["Message"]))
return deleteRequest
def getRequestIDsList(self, statusList=None, limit=None, since=None, until=None, getJobID=False):
""" get at most :limit: request ids with statuses in :statusList: """
statusList = statusList if statusList else list(Request.FINAL_STATES)
limit = limit if limit else 100
since = since.strftime('%Y-%m-%d') if since else ""
until = until.strftime('%Y-%m-%d') if until else ""
return self._getRPC().getRequestIDsList(statusList, limit, since, until, getJobID)
def getScheduledRequest(self, operationID):
""" get scheduled request given its scheduled OperationID """
self.log.debug("getScheduledRequest: attempt to get scheduled request...")
scheduled = self._getRPC().getScheduledRequest(operationID)
if not scheduled["OK"]:
self.log.error("getScheduledRequest failed", scheduled["Message"])
return scheduled
if scheduled["Value"]:
return S_OK(Request(scheduled["Value"]))
return scheduled
def getDBSummary(self):
""" Get the summary of requests in the RequestDBs. """
self.log.debug("getDBSummary: attempting to get RequestDB summary.")
dbSummary = self._getRPC().getDBSummary()
if not dbSummary["OK"]:
self.log.error("getDBSummary: unable to get RequestDB summary", dbSummary["Message"])
return dbSummary
def getDigest(self, requestID):
""" Get the request digest given a request ID.
:param self: self reference
:param str requestID: request id
"""
self.log.debug("getDigest: attempting to get digest for '%s' request." % requestID)
digest = self._getRPC().getDigest(int(requestID))
if not digest["OK"]:
self.log.error("getDigest: unable to get digest for request",
"request: '%s' %s" % (requestID, digest["Message"]))
return digest
def getRequestStatus(self, requestID):
""" Get the request status given a request id.
:param self: self reference
:param int requestID: id of the request
"""
if isinstance(requestID, six.string_types):
requestID = int(requestID)
self.log.debug("getRequestStatus: attempting to get status for '%d' request." % requestID)
requestStatus = self._getRPC().getRequestStatus(requestID)
if not requestStatus["OK"]:
self.log.error("getRequestStatus: unable to get status for request",
": '%d' %s" % (requestID, requestStatus["Message"]))
return requestStatus
# def getRequestName( self, requestID ):
# """ get request name for a given requestID """
# return self._getRPC().getRequestName( requestID )
def getRequestInfo(self, requestID):
""" The the request info given a request id.
:param self: self reference
:param int requestID: request nid
"""
self.log.debug("getRequestInfo: attempting to get info for '%s' request." % requestID)
requestInfo = self._getRPC().getRequestInfo(int(requestID))
if not requestInfo["OK"]:
self.log.error("getRequestInfo: unable to get status for request",
"request: '%s' %s" % (requestID, requestInfo["Message"]))
return requestInfo
def getRequestFileStatus(self, requestID, lfns):
""" Get file status for request given a request id.
:param self: self reference
:param int requestID: request id
:param lfns: list of LFNs
:type lfns: python:list
"""
self.log.debug("getRequestFileStatus: attempting to get file statuses for '%s' request." % requestID)
fileStatus = self._getRPC().getRequestFileStatus(int(requestID), lfns)
if not fileStatus["OK"]:
self.log.verbose("getRequestFileStatus: unable to get file status for request",
"request: '%s' %s" % (requestID, fileStatus["Message"]))
return fileStatus
def finalizeRequest(self, requestID, jobID, useCertificates=True):
""" check request status and perform finalization if necessary
update the request status and the corresponding job parameter
:param self: self reference
:param str requestID: request id
:param int jobID: job id
"""
stateServer = JobStateUpdateClient(useCertificates=useCertificates)
# Checking if to update the job status - we should fail here, so it will be re-tried later
# Checking the state, first
res = self.getRequestStatus(requestID)
if not res['OK']:
self.log.error("finalizeRequest: failed to get request",
"request: %s status: %s" % (requestID, res["Message"]))
return res
if res["Value"] != "Done":
return S_ERROR("The request %s isn't 'Done' but '%s', this should never happen, why are we here?" %
(requestID, res['Value']))
# The request is 'Done', let's update the job status. If we fail, we should re-try later
monitorServer = JobMonitoringClient(useCertificates=useCertificates)
res = monitorServer.getJobSummary(int(jobID))
if not res["OK"]:
self.log.error("finalizeRequest: Failed to get job status", "JobID: %d" % jobID)
return res
elif not res['Value']:
self.log.info("finalizeRequest: job %d does not exist (anymore): finalizing" % jobID)
return S_OK()
else:
jobStatus = res["Value"]['Status']
jobMinorStatus = res["Value"]["MinorStatus"]
jobAppStatus = ''
newJobStatus = ''
if jobStatus == JobStatus.STALLED:
# If job is stalled, find the previous status from the logging info
res = monitorServer.getJobLoggingInfo(int(jobID))
if not res['OK']:
self.log.error("finalizeRequest: Failed to get job logging info", "JobID: %d" % jobID)
return res
# Check the last status was Stalled and get the one before
if len(res['Value']) >= 2 and res['Value'][-1][0] == JobStatus.STALLED:
jobStatus, jobMinorStatus, jobAppStatus = res['Value'][-2][:3]
newJobStatus = jobStatus
# update the job pending request digest in any case since it is modified
self.log.info("finalizeRequest: Updating request digest for job %d" % jobID)
digest = self.getDigest(requestID)
if digest["OK"]:
digest = digest["Value"]
self.log.verbose(digest)
res = stateServer.setJobParameter(jobID, "PendingRequest", digest)
if not res["OK"]:
self.log.info("finalizeRequest: Failed to set job %d parameter: %s" % (jobID, res["Message"]))
return res
else:
self.log.error("finalizeRequest: Failed to get request digest for %s: %s" % (requestID,
digest["Message"]))
if jobStatus == JobStatus.COMPLETED:
# What to do? Depends on what we have in the minorStatus
if jobMinorStatus == JobMinorStatus.PENDING_REQUESTS:
newJobStatus = JobStatus.DONE
elif jobMinorStatus == JobMinorStatus.APP_ERRORS:
newJobStatus = JobStatus.FAILED
else:
self.log.error("finalizeRequest: Unexpected jobMinorStatus",
"(got %s)" % jobMinorStatus)
return S_ERROR("Unexpected jobMinorStatus")
if newJobStatus:
self.log.info("finalizeRequest: Updating job status for %d to %s/Requests done" % (jobID, newJobStatus))
else:
self.log.info(
"finalizeRequest: Updating job minor status",
"for %d to 'Requests done' (current status is %s)" % (jobID, jobStatus))
stateUpdate = stateServer.setJobStatus(jobID, newJobStatus, "Requests done", 'RMS')
if jobAppStatus and stateUpdate['OK']:
stateUpdate = stateServer.setJobApplicationStatus(jobID, jobAppStatus, 'RMS')
if not stateUpdate["OK"]:
self.log.error("finalizeRequest: Failed to set job status",
"JobID: %d, error: %s" % (jobID, stateUpdate['Message']))
return stateUpdate
return S_OK(newJobStatus)
@ignoreEncodeWarning
def getRequestIDsForJobs(self, jobIDs):
""" get the request ids for the supplied jobIDs.
:param self: self reference
:param jobIDs: list of job IDs (integers)
:type jobIDs: python:list
:return: S_ERROR or S_OK( "Successful": { jobID1: reqID1, jobID2: requID2, ... },
"Failed" : { jobIDn: errMsg, jobIDm: errMsg, ...} )
"""
self.log.verbose("getRequestIDsForJobs: attempt to get request(s) for jobs",
"(n=%d)" % len(jobIDs))
res = self._getRPC().getRequestIDsForJobs(jobIDs)
if not res["OK"]:
self.log.error("getRequestIDsForJobs: unable to get request(s) for jobs",
"%s: %s" % (jobIDs, res["Message"]))
return res
# Cast the JobIDs back to int
successful = strToIntDict(res['Value']['Successful'])
failed = strToIntDict(res['Value']['Failed'])
return S_OK({'Successful': successful, 'Failed': failed})
@ignoreEncodeWarning
def readRequestsForJobs(self, jobIDs):
""" read requests for jobs
:param jobIDs: list with jobIDs
:type jobIDs: python:list
:return: S_OK( { "Successful" : { jobID1 : Request, ... },
"Failed" : { jobIDn : "Fail reason" } } )
"""
readReqsForJobs = self._getRPC().readRequestsForJobs(jobIDs)
if not readReqsForJobs["OK"]:
return readReqsForJobs
ret = readReqsForJobs["Value"]
# # create Requests out of JSONs for successful reads
# Do not forget to cast back str keys to int
successful = {int(jobID): Request(jsonReq) for jobID, jsonReq in ret['Successful'].items()}
failed = strToIntDict(ret['Failed'])
return S_OK({'Successful': successful, 'Failed': failed})
def resetFailedRequest(self, requestID, allR=False):
""" Reset a failed request to "Waiting" status
"""
# # we can safely only peek the request as it is Failed and therefore not owned by an agent
res = self.peekRequest(requestID)
if not res['OK']:
return res
req = res['Value']
if allR or recoverableRequest(req):
# Only reset requests that can be recovered
if req.Status != 'Failed':
gLogger.notice("Reset NotBefore time, was %s" % str(req.NotBefore))
else:
for i, op in enumerate(req):
op.Error = ''
if op.Status == 'Failed':
printOperation((i, op), onlyFailed=True)
for fi in op:
if fi.Status == 'Failed':
fi.Attempt = 1
fi.Error = ''
fi.Status = 'Waiting'
if op.Status == 'Failed':
op.Status = 'Waiting'
# Reset also NotBefore
req.NotBefore = datetime.datetime.utcnow().replace(microsecond=0)
return self.putRequest(req)
return S_OK("Not reset")
# ============= Some useful functions to be shared ===========
output = ''
def prettyPrint(mainItem, key='', offset=0):
global output
if key:
key += ': '
blanks = offset * ' '
if mainItem and isinstance(mainItem, dict):
output += "%s%s%s\n" % (blanks, key, '{') if blanks or key else ''
for key in sorted(mainItem):
prettyPrint(mainItem[key], key=key, offset=offset)
output += "%s%s\n" % (blanks, '}') if blanks else ''
elif mainItem and isinstance(mainItem, list) or isinstance(mainItem, tuple):
output += "%s%s%s\n" % (blanks, key, '[' if isinstance(mainItem, list) else '(')
for item in mainItem:
prettyPrint(item, offset=offset + 2)
output += "%s%s\n" % (blanks, ']' if isinstance(mainItem, list) else ')')
elif isinstance(mainItem, six.string_types):
if '\n' in mainItem:
prettyPrint(mainItem.strip('\n').split('\n'), offset=offset)
else:
output += "%s%s'%s'\n" % (blanks, key, mainItem)
else:
output += "%s%s%s\n" % (blanks, key, str(mainItem))
output = output.replace('[\n%s{' % blanks, '[{').replace('}\n%s]' % blanks, '}]') \
.replace('(\n%s{' % blanks, '({').replace('}\n%s)' % blanks, '})') \
.replace('(\n%s(' % blanks, '((').replace(')\n%s)' % blanks, '))') \
.replace('(\n%s[' % blanks, '[').replace(']\n%s)' % blanks, ']')
def printFTSJobs(request):
""" Prints the FTSJobs associated to a request
:param request: Request object
"""
try:
if request.RequestID:
# We try first the new FTS3 system
from DIRAC.DataManagementSystem.Client.FTS3Client import FTS3Client
fts3Client = FTS3Client()
res = fts3Client.ping()
if res['OK']:
associatedFTS3Jobs = []
for op in request:
res = fts3Client.getOperationsFromRMSOpID(op.OperationID)
if res['OK']:
for fts3Op in res['Value']:
associatedFTS3Jobs.extend(fts3Op.ftsJobs)
if associatedFTS3Jobs:
# Display the direct url and the status
gLogger.always(
'\n\nFTS3 jobs associated: \n%s' %
'\n'.join(
'%s/fts3/ftsmon/#/job/%s (%s)' %
(job.ftsServer.replace(':8446', ':8449'), # Submission port is 8446, web port is 8449
job.ftsGUID,
job.status) for job in associatedFTS3Jobs))
return
# AttributeError can be thrown because the deserialization will not have
# happened correctly on the new fts3 (CC7 typically), and the error is not
# properly propagated
except AttributeError as err:
gLogger.debug("Could not instantiate FtsClient because of Exception", repr(err))
def printRequest(request, status=None, full=False, verbose=True, terse=False):
global output
if full:
output = ''
prettyPrint(json.loads(request.toJSON()['Value']))
gLogger.always(output)
else:
if not status:
status = request.Status
gLogger.always("Request name='%s' ID=%s Status='%s'%s%s%s" %
(request.RequestName,
request.RequestID if hasattr(request, 'RequestID') else '(not set yet)',
request.Status, " ('%s' in DB)" % status if status != request.Status else '',
(" Error='%s'" % request.Error) if request.Error and request.Error.strip() else "",
(" Job=%s" % request.JobID) if request.JobID else ""))
gLogger.always("Created %s, Updated %s%s" % (request.CreationTime,
request.LastUpdate,
(", NotBefore %s" % request.NotBefore) if request.NotBefore else ""))
if request.OwnerDN:
gLogger.always("Owner: '%s', Group: %s" % (request.OwnerDN, request.OwnerGroup))
for indexOperation in enumerate(request):
op = indexOperation[1]
if not terse or op.Status == 'Failed':
printOperation(indexOperation, verbose, onlyFailed=terse)
printFTSJobs(request)
def printOperation(indexOperation, verbose=True, onlyFailed=False):
global output
i, op = indexOperation
prStr = ''
if op.SourceSE:
prStr += 'SourceSE: %s' % op.SourceSE
if op.TargetSE:
prStr += (' - ' if prStr else '') + 'TargetSE: %s' % op.TargetSE
if prStr:
prStr += ' - '
prStr += 'Created %s, Updated %s' % (op.CreationTime, op.LastUpdate)
if op.Type == 'ForwardDISET' and op.Arguments:
from DIRAC.Core.Utilities import DEncode
decode, _length = DEncode.decode(op.Arguments)
if verbose:
output = ''
prettyPrint(decode, offset=10)
prStr += '\n Arguments:\n' + output.strip('\n')
else:
prStr += '\n Service: %s' % decode[0][0]
gLogger.always(" [%s] Operation Type='%s' ID=%s Order=%s Status='%s'%s%s" %
(i, op.Type,
op.OperationID if hasattr(op, 'OperationID') else '(not set yet)',
op.Order, op.Status,
(" Error='%s'" % op.Error) if op.Error and op.Error.strip() else "",
(" Catalog=%s" % op.Catalog) if op.Catalog else ""))
if prStr:
gLogger.always(" %s" % prStr)
for indexFile in enumerate(op):
if not onlyFailed or indexFile[1].Status == 'Failed':
printFile(indexFile)
def printFile(indexFile):
ind, fi = indexFile
gLogger.always(" [%02d] ID=%s LFN='%s' Status='%s'%s%s%s" %
(ind + 1, fi.FileID if hasattr(fi, 'FileID') else '(not set yet)', fi.LFN, fi.Status,
(" Checksum='%s'" % fi.Checksum) if fi.Checksum or
(fi.Error and 'checksum' in fi.Error.lower()) else "",
(" Error='%s'" % fi.Error) if fi.Error and fi.Error.strip() else "",
(" Attempts=%d" % fi.Attempt) if fi.Attempt > 1 else "")
)
def recoverableRequest(request):
excludedErrors = ('File does not exist', 'No such file or directory',
'sourceSURL equals to targetSURL',
'Max attempts limit reached', 'Max attempts reached')
operationErrorsOK = ('is banned for', 'Failed to perform exists from any catalog')
for op in request:
if op.Status == 'Failed' and (not op.Error or not [errStr for errStr in operationErrorsOK if errStr in op.Error]):
for fi in op:
if fi.Status == 'Failed':
if [errStr for errStr in excludedErrors if errStr in fi.Error]:
return False
return True
return True
|
yujikato/DIRAC
|
src/DIRAC/RequestManagementSystem/Client/ReqClient.py
|
Python
|
gpl-3.0
| 25,398
|
[
"DIRAC"
] |
79fd0ad9f12b3c33651cd53a5e4c28b0a972a528404ca5995a043e1f929c240b
|
from flask import Flask, render_template, redirect, session, request
import random, datetime
app = Flask(__name__)
app.secret_key = "Felipe"
@app.route('/')
def index():
if not 'gold' in session:
session['gold'] = 0
if not 'activities' in session:
session['activities'] = []
return render_template('index.html', goldcount = session['gold'])
@app.route('/process_money', methods=['POST'])
def process():
places = {
'farm':random.randint(5,10),
'cave':random.randint(5,10),
'house':random.randint(2,5),
'casino':random.randint(-50,50)
}
if request.form['place'] in places:
result = places[request.form['place']]
session['gold']=session['gold']+ result
myStr = "{} {} golds from the {} ({})".format(('lost','Earned')[result > 0], abs(result), request.form['place'], datetime.datetime.now())
session['activities'].append(myStr)
return redirect('/')
@app.route('/reset')
def reset():
session.clear()
return redirect('/')
app.run(debug = True)
|
jiobert/python
|
Velez_Felipe/assignments/Ninja/ninja_gold.py
|
Python
|
mit
| 974
|
[
"CASINO"
] |
0fd5fe8ba6b685a7ea6a4fabdc2b8b36a0f1351535699e601c6099eae94b8099
|
import mdpow.equil
S = mdpow.equil.WaterSimulation(molecule="BNZ")
S.topology("benzene.itp")
S.solvate(struct="benzene.pdb")
S.energy_minimize()
S.MD_relaxed(runtime=5) # should be at least 1e3 ps for production not just 5 ps
# run simulation externally or use MDrunner
# (see docs for using mpi etc)
import gromacs
r = gromacs.run.MDrunner(dirname=S.dirs['MD_relaxed'], deffnm="md", c="md.pdb", cpi=True, append=True, v=True)
r.run() # runs mdrun in the python shell
S.MD(runtime=10, qscript=['local.sh']) # should be at least 10e3 ps for production, not just 10 ps
# run simulation
r = gromacs.run.MDrunner(dirname=S.dirs['MD_NPT'], deffnm="md", c="md.pdb", cpi=True, append=True, v=True)
r.run() # runs mdrun in the python shell
import mdpow.fep
gwat = mdpow.fep.Ghyd(simulation=S, runtime=10)
gwat.setup()
# run multiple simulations on cluster
O = mdpow.equil.OctanolSimulation(molecule="BNZ")
O.topology("benzene.itp")
O.solvate(struct="benzene.pdb")
O.energy_minimize()
O.MD_relaxed(runtime=0.5)
|
Becksteinlab/MDPOW
|
doc/examples/benzene/session.py
|
Python
|
gpl-3.0
| 1,018
|
[
"Gromacs"
] |
63d5c9c16e38407c92fd919e443e07c9661a2425eb003c470d8f75c12014392c
|
"""Forest of trees-based ensemble methods
Those methods include random forests and extremely randomized trees.
The module structure is the following:
- The ``BaseForest`` base class implements a common ``fit`` method for all
the estimators in the module. The ``fit`` method of the base ``Forest``
class calls the ``fit`` method of each sub-estimator on random samples
(with replacement, a.k.a. bootstrap) of the training set.
The init of the sub-estimator is further delegated to the
``BaseEnsemble`` constructor.
- The ``ForestClassifier`` and ``ForestRegressor`` base classes further
implement the prediction logic by computing an average of the predicted
outcomes of the sub-estimators.
- The ``RandomForestClassifier`` and ``RandomForestRegressor`` derived
classes provide the user with concrete implementations of
the forest ensemble method using classical, deterministic
``DecisionTreeClassifier`` and ``DecisionTreeRegressor`` as
sub-estimator implementations.
- The ``ExtraTreesClassifier`` and ``ExtraTreesRegressor`` derived
classes provide the user with concrete implementations of the
forest ensemble method using the extremely randomized trees
``ExtraTreeClassifier`` and ``ExtraTreeRegressor`` as
sub-estimator implementations.
Single and multi-output problems are both handled.
"""
# Authors: Gilles Louppe <g.louppe@gmail.com>
# Brian Holt <bdholt1@gmail.com>
# Joly Arnaud <arnaud.v.joly@gmail.com>
# Fares Hedayati <fares.hedayati@gmail.com>
#
# License: BSD 3 clause
from __future__ import division
import warnings
from warnings import warn
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy.sparse import issparse
from scipy.sparse import hstack as sparse_hstack
from ..base import ClassifierMixin, RegressorMixin
from ..externals.joblib import Parallel, delayed
from ..externals import six
from ..feature_selection.from_model import _LearntSelectorMixin
from ..metrics import r2_score
from ..preprocessing import OneHotEncoder
from ..tree import (DecisionTreeClassifier, DecisionTreeRegressor,
ExtraTreeClassifier, ExtraTreeRegressor)
from ..tree._tree import DTYPE, DOUBLE
from ..utils import check_random_state, check_array, compute_sample_weight
from ..exceptions import DataConversionWarning, NotFittedError
from .base import BaseEnsemble, _partition_estimators
from ..utils.fixes import bincount, parallel_helper
from ..utils.multiclass import check_classification_targets
__all__ = ["RandomForestClassifier",
"RandomForestRegressor",
"ExtraTreesClassifier",
"ExtraTreesRegressor",
"RandomTreesEmbedding"]
MAX_INT = np.iinfo(np.int32).max
def _generate_sample_indices(random_state, n_samples):
"""Private function used to _parallel_build_trees function."""
random_instance = check_random_state(random_state)
sample_indices = random_instance.randint(0, n_samples, n_samples)
return sample_indices
def _generate_unsampled_indices(random_state, n_samples):
"""Private function used to forest._set_oob_score function."""
sample_indices = _generate_sample_indices(random_state, n_samples)
sample_counts = bincount(sample_indices, minlength=n_samples)
unsampled_mask = sample_counts == 0
indices_range = np.arange(n_samples)
unsampled_indices = indices_range[unsampled_mask]
return unsampled_indices
def _parallel_build_trees(tree, forest, X, y, sample_weight, tree_idx, n_trees,
verbose=0, class_weight=None):
"""Private function used to fit a single tree in parallel."""
if verbose > 1:
print("building tree %d of %d" % (tree_idx + 1, n_trees))
if forest.bootstrap:
n_samples = X.shape[0]
if sample_weight is None:
curr_sample_weight = np.ones((n_samples,), dtype=np.float64)
else:
curr_sample_weight = sample_weight.copy()
indices = _generate_sample_indices(tree.random_state, n_samples)
sample_counts = bincount(indices, minlength=n_samples)
curr_sample_weight *= sample_counts
if class_weight == 'subsample':
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
curr_sample_weight *= compute_sample_weight('auto', y, indices)
elif class_weight == 'balanced_subsample':
curr_sample_weight *= compute_sample_weight('balanced', y, indices)
tree.fit(X, y, sample_weight=curr_sample_weight, check_input=False)
else:
tree.fit(X, y, sample_weight=sample_weight, check_input=False)
return tree
class BaseForest(six.with_metaclass(ABCMeta, BaseEnsemble,
_LearntSelectorMixin)):
"""Base class for forests of trees.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=10,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None):
super(BaseForest, self).__init__(
base_estimator=base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params)
self.bootstrap = bootstrap
self.oob_score = oob_score
self.n_jobs = n_jobs
self.random_state = random_state
self.verbose = verbose
self.warm_start = warm_start
self.class_weight = class_weight
def apply(self, X):
"""Apply trees in the forest to X, return leaf indices.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
X_leaves : array_like, shape = [n_samples, n_estimators]
For each datapoint x in X and for each tree in the forest,
return the index of the leaf x ends up in.
"""
X = self._validate_X_predict(X)
results = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
backend="threading")(
delayed(parallel_helper)(tree, 'apply', X, check_input=False)
for tree in self.estimators_)
return np.array(results).T
def decision_path(self, X):
"""Return the decision path in the forest
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
indicator : sparse csr array, shape = [n_samples, n_nodes]
Return a node indicator matrix where non zero elements
indicates that the samples goes through the nodes.
n_nodes_ptr : array of size (n_estimators + 1, )
The columns from indicator[n_nodes_ptr[i]:n_nodes_ptr[i+1]]
gives the indicator value for the i-th estimator.
"""
X = self._validate_X_predict(X)
indicators = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
backend="threading")(
delayed(parallel_helper)(tree, 'decision_path', X,
check_input=False)
for tree in self.estimators_)
n_nodes = [0]
n_nodes.extend([i.shape[1] for i in indicators])
n_nodes_ptr = np.array(n_nodes).cumsum()
return sparse_hstack(indicators).tocsr(), n_nodes_ptr
def fit(self, X, y, sample_weight=None):
"""Build a forest of trees from the training set (X, y).
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The training input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csc_matrix``.
y : array-like, shape = [n_samples] or [n_samples, n_outputs]
The target values (class labels in classification, real numbers in
regression).
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
Returns
-------
self : object
Returns self.
"""
# Validate or convert input data
X = check_array(X, accept_sparse="csc", dtype=DTYPE)
y = check_array(y, accept_sparse='csc', ensure_2d=False, dtype=None)
if issparse(X):
# Pre-sort indices to avoid that each individual tree of the
# ensemble sorts the indices.
X.sort_indices()
# Remap output
n_samples, self.n_features_ = X.shape
y = np.atleast_1d(y)
if y.ndim == 2 and y.shape[1] == 1:
warn("A column-vector y was passed when a 1d array was"
" expected. Please change the shape of y to "
"(n_samples,), for example using ravel().",
DataConversionWarning, stacklevel=2)
if y.ndim == 1:
# reshape is necessary to preserve the data contiguity against vs
# [:, np.newaxis] that does not.
y = np.reshape(y, (-1, 1))
self.n_outputs_ = y.shape[1]
y, expanded_class_weight = self._validate_y_class_weight(y)
if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous:
y = np.ascontiguousarray(y, dtype=DOUBLE)
if expanded_class_weight is not None:
if sample_weight is not None:
sample_weight = sample_weight * expanded_class_weight
else:
sample_weight = expanded_class_weight
# Check parameters
self._validate_estimator()
if not self.bootstrap and self.oob_score:
raise ValueError("Out of bag estimation only available"
" if bootstrap=True")
random_state = check_random_state(self.random_state)
if not self.warm_start:
# Free allocated memory, if any
self.estimators_ = []
n_more_estimators = self.n_estimators - len(self.estimators_)
if n_more_estimators < 0:
raise ValueError('n_estimators=%d must be larger or equal to '
'len(estimators_)=%d when warm_start==True'
% (self.n_estimators, len(self.estimators_)))
elif n_more_estimators == 0:
warn("Warm-start fitting without increasing n_estimators does not "
"fit new trees.")
else:
if self.warm_start and len(self.estimators_) > 0:
# We draw from the random state to get the random state we
# would have got if we hadn't used a warm_start.
random_state.randint(MAX_INT, size=len(self.estimators_))
trees = []
for i in range(n_more_estimators):
tree = self._make_estimator(append=False)
tree.set_params(random_state=random_state.randint(MAX_INT))
trees.append(tree)
# Parallel loop: we use the threading backend as the Cython code
# for fitting the trees is internally releasing the Python GIL
# making threading always more efficient than multiprocessing in
# that case.
trees = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
backend="threading")(
delayed(_parallel_build_trees)(
t, self, X, y, sample_weight, i, len(trees),
verbose=self.verbose, class_weight=self.class_weight)
for i, t in enumerate(trees))
# Collect newly grown trees
self.estimators_.extend(trees)
if self.oob_score:
self._set_oob_score(X, y)
# Decapsulate classes_ attributes
if hasattr(self, "classes_") and self.n_outputs_ == 1:
self.n_classes_ = self.n_classes_[0]
self.classes_ = self.classes_[0]
return self
@abstractmethod
def _set_oob_score(self, X, y):
"""Calculate out of bag predictions and score."""
def _validate_y_class_weight(self, y):
# Default implementation
return y, None
def _validate_X_predict(self, X):
"""Validate X whenever one tries to predict, apply, predict_proba"""
if self.estimators_ is None or len(self.estimators_) == 0:
raise NotFittedError("Estimator not fitted, "
"call `fit` before exploiting the model.")
return self.estimators_[0]._validate_X_predict(X, check_input=True)
@property
def feature_importances_(self):
"""Return the feature importances (the higher, the more important the
feature).
Returns
-------
feature_importances_ : array, shape = [n_features]
"""
if self.estimators_ is None or len(self.estimators_) == 0:
raise NotFittedError("Estimator not fitted, "
"call `fit` before `feature_importances_`.")
all_importances = Parallel(n_jobs=self.n_jobs,
backend="threading")(
delayed(getattr)(tree, 'feature_importances_')
for tree in self.estimators_)
return sum(all_importances) / len(self.estimators_)
class ForestClassifier(six.with_metaclass(ABCMeta, BaseForest,
ClassifierMixin)):
"""Base class for forest of trees-based classifiers.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=10,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None):
super(ForestClassifier, self).__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight)
def _set_oob_score(self, X, y):
"""Compute out-of-bag score"""
X = check_array(X, dtype=DTYPE, accept_sparse='csr')
n_classes_ = self.n_classes_
n_samples = y.shape[0]
oob_decision_function = []
oob_score = 0.0
predictions = []
for k in range(self.n_outputs_):
predictions.append(np.zeros((n_samples, n_classes_[k])))
for estimator in self.estimators_:
unsampled_indices = _generate_unsampled_indices(
estimator.random_state, n_samples)
p_estimator = estimator.predict_proba(X[unsampled_indices, :],
check_input=False)
if self.n_outputs_ == 1:
p_estimator = [p_estimator]
for k in range(self.n_outputs_):
predictions[k][unsampled_indices, :] += p_estimator[k]
for k in range(self.n_outputs_):
if (predictions[k].sum(axis=1) == 0).any():
warn("Some inputs do not have OOB scores. "
"This probably means too few trees were used "
"to compute any reliable oob estimates.")
decision = (predictions[k] /
predictions[k].sum(axis=1)[:, np.newaxis])
oob_decision_function.append(decision)
oob_score += np.mean(y[:, k] ==
np.argmax(predictions[k], axis=1), axis=0)
if self.n_outputs_ == 1:
self.oob_decision_function_ = oob_decision_function[0]
else:
self.oob_decision_function_ = oob_decision_function
self.oob_score_ = oob_score / self.n_outputs_
def _validate_y_class_weight(self, y):
check_classification_targets(y)
y = np.copy(y)
expanded_class_weight = None
if self.class_weight is not None:
y_original = np.copy(y)
self.classes_ = []
self.n_classes_ = []
y_store_unique_indices = np.zeros(y.shape, dtype=np.int)
for k in range(self.n_outputs_):
classes_k, y_store_unique_indices[:, k] = np.unique(y[:, k], return_inverse=True)
self.classes_.append(classes_k)
self.n_classes_.append(classes_k.shape[0])
y = y_store_unique_indices
if self.class_weight is not None:
valid_presets = ('auto', 'balanced', 'subsample', 'balanced_subsample')
if isinstance(self.class_weight, six.string_types):
if self.class_weight not in valid_presets:
raise ValueError('Valid presets for class_weight include '
'"balanced" and "balanced_subsample". Given "%s".'
% self.class_weight)
if self.class_weight == "subsample":
warn("class_weight='subsample' is deprecated in 0.17 and"
"will be removed in 0.19. It was replaced by "
"class_weight='balanced_subsample' using the balanced"
"strategy.", DeprecationWarning)
if self.warm_start:
warn('class_weight presets "balanced" or "balanced_subsample" are '
'not recommended for warm_start if the fitted data '
'differs from the full dataset. In order to use '
'"balanced" weights, use compute_class_weight("balanced", '
'classes, y). In place of y you can use a large '
'enough sample of the full training set target to '
'properly estimate the class frequency '
'distributions. Pass the resulting weights as the '
'class_weight parameter.')
if (self.class_weight not in ['subsample', 'balanced_subsample'] or
not self.bootstrap):
if self.class_weight == 'subsample':
class_weight = 'auto'
elif self.class_weight == "balanced_subsample":
class_weight = "balanced"
else:
class_weight = self.class_weight
with warnings.catch_warnings():
if class_weight == "auto":
warnings.simplefilter('ignore', DeprecationWarning)
expanded_class_weight = compute_sample_weight(class_weight,
y_original)
return y, expanded_class_weight
def predict(self, X):
"""Predict class for X.
The predicted class of an input sample is a vote by the trees in
the forest, weighted by their probability estimates. That is,
the predicted class is the one with highest mean probability
estimate across the trees.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
y : array of shape = [n_samples] or [n_samples, n_outputs]
The predicted classes.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return self.classes_.take(np.argmax(proba, axis=1), axis=0)
else:
n_samples = proba[0].shape[0]
predictions = np.zeros((n_samples, self.n_outputs_))
for k in range(self.n_outputs_):
predictions[:, k] = self.classes_[k].take(np.argmax(proba[k],
axis=1),
axis=0)
return predictions
def predict_proba(self, X):
"""Predict class probabilities for X.
The predicted class probabilities of an input sample are computed as
the mean predicted class probabilities of the trees in the forest. The
class probability of a single tree is the fraction of samples of the same
class in a leaf.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
# Check data
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
# Parallel loop
all_proba = Parallel(n_jobs=n_jobs, verbose=self.verbose,
backend="threading")(
delayed(parallel_helper)(e, 'predict_proba', X,
check_input=False)
for e in self.estimators_)
# Reduce
proba = all_proba[0]
if self.n_outputs_ == 1:
for j in range(1, len(all_proba)):
proba += all_proba[j]
proba /= len(self.estimators_)
else:
for j in range(1, len(all_proba)):
for k in range(self.n_outputs_):
proba[k] += all_proba[j][k]
for k in range(self.n_outputs_):
proba[k] /= self.n_estimators
return proba
def predict_log_proba(self, X):
"""Predict class log-probabilities for X.
The predicted class log-probabilities of an input sample is computed as
the log of the mean predicted class probabilities of the trees in the
forest.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return np.log(proba)
else:
for k in range(self.n_outputs_):
proba[k] = np.log(proba[k])
return proba
class ForestRegressor(six.with_metaclass(ABCMeta, BaseForest, RegressorMixin)):
"""Base class for forest of trees-based regressors.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=10,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False):
super(ForestRegressor, self).__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start)
def predict(self, X):
"""Predict regression target for X.
The predicted regression target of an input sample is computed as the
mean predicted regression targets of the trees in the forest.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
y : array of shape = [n_samples] or [n_samples, n_outputs]
The predicted values.
"""
# Check data
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
# Parallel loop
all_y_hat = Parallel(n_jobs=n_jobs, verbose=self.verbose,
backend="threading")(
delayed(parallel_helper)(e, 'predict', X, check_input=False)
for e in self.estimators_)
# Reduce
y_hat = sum(all_y_hat) / len(self.estimators_)
return y_hat
def _set_oob_score(self, X, y):
"""Compute out-of-bag scores"""
X = check_array(X, dtype=DTYPE, accept_sparse='csr')
n_samples = y.shape[0]
predictions = np.zeros((n_samples, self.n_outputs_))
n_predictions = np.zeros((n_samples, self.n_outputs_))
for estimator in self.estimators_:
unsampled_indices = _generate_unsampled_indices(
estimator.random_state, n_samples)
p_estimator = estimator.predict(
X[unsampled_indices, :], check_input=False)
if self.n_outputs_ == 1:
p_estimator = p_estimator[:, np.newaxis]
predictions[unsampled_indices, :] += p_estimator
n_predictions[unsampled_indices, :] += 1
if (n_predictions == 0).any():
warn("Some inputs do not have OOB scores. "
"This probably means too few trees were used "
"to compute any reliable oob estimates.")
n_predictions[n_predictions == 0] = 1
predictions /= n_predictions
self.oob_prediction_ = predictions
if self.n_outputs_ == 1:
self.oob_prediction_ = \
self.oob_prediction_.reshape((n_samples, ))
self.oob_score_ = 0.0
for k in range(self.n_outputs_):
self.oob_score_ += r2_score(y[:, k],
predictions[:, k])
self.oob_score_ /= self.n_outputs_
class RandomForestClassifier(ForestClassifier):
"""A random forest classifier.
A random forest is a meta estimator that fits a number of decision tree
classifiers on various sub-samples of the dataset and use averaging to
improve the predictive accuracy and control over-fitting.
The sub-sample size is always the same as the original
input sample size but the samples are drawn with replacement if
`bootstrap=True` (default).
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
Note: this parameter is tree-specific.
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)` (same as "auto").
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int, float, optional (default=2)
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a percentage and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
min_samples_leaf : int, float, optional (default=1)
The minimum number of samples required to be at a leaf node:
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a percentage and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_split : float, optional (default=1e-7)
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. versionadded:: 0.18
bootstrap : boolean, optional (default=True)
Whether bootstrap samples are used when building trees.
oob_score : bool (default=False)
Whether to use out-of-bag samples to estimate
the generalization accuracy.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
class_weight : dict, list of dicts, "balanced",
"balanced_subsample" or None, optional (default=None)
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one. For
multi-output problems, a list of dicts can be provided in the same
order as the columns of y.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
The "balanced_subsample" mode is the same as "balanced" except that
weights are computed based on the bootstrap sample for every tree
grown.
For multi-output, the weights of each column of y will be multiplied.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
increasing : list of ints, optional (default=None)
Indices of features to have a monotonically increasing effect.
decreasing : list of ints, optional (default=None)
Indices of features to have a monotonically decreasing effect.
Attributes
----------
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
classes_ : array of shape = [n_classes] or a list of such arrays
The classes labels (single output problem), or a list of arrays of
class labels (multi-output problem).
n_classes_ : int or list
The number of classes (single output problem), or a list containing the
number of classes for each output (multi-output problem).
n_features_ : int
The number of features when ``fit`` is performed.
n_outputs_ : int
The number of outputs when ``fit`` is performed.
feature_importances_ : array of shape = [n_features]
The feature importances (the higher, the more important the feature).
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
oob_decision_function_ : array of shape = [n_samples, n_classes]
Decision function computed with out-of-bag estimate on the training
set. If n_estimators is small it might be possible that a data point
was never left out during the bootstrap. In this case,
`oob_decision_function_` might contain NaN.
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
See also
--------
DecisionTreeClassifier, ExtraTreesClassifier
"""
def __init__(self,
n_estimators=10,
criterion="gini",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_split=1e-7,
bootstrap=True,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
increasing=None,
decreasing=None):
super(RandomForestClassifier, self).__init__(
base_estimator=DecisionTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes", "min_impurity_split",
"random_state"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_split = min_impurity_split
self.increasing = increasing
self.decreasing = decreasing
class RandomForestRegressor(ForestRegressor):
"""A random forest regressor.
A random forest is a meta estimator that fits a number of classifying
decision trees on various sub-samples of the dataset and use averaging
to improve the predictive accuracy and control over-fitting.
The sub-sample size is always the same as the original
input sample size but the samples are drawn with replacement if
`bootstrap=True` (default).
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="mse")
The function to measure the quality of a split. Supported criteria
are "mse" for the mean squared error, which is equal to variance
reduction as feature selection criterion, and "mae" for the mean
absolute error.
.. versionadded:: 0.18
Mean Absolute Error (MAE) criterion.
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int, float, optional (default=2)
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a percentage and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
min_samples_leaf : int, float, optional (default=1)
The minimum number of samples required to be at a leaf node:
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a percentage and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_split : float, optional (default=1e-7)
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. versionadded:: 0.18
bootstrap : boolean, optional (default=True)
Whether bootstrap samples are used when building trees.
oob_score : bool, optional (default=False)
whether to use out-of-bag samples to estimate
the R^2 on unseen data.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
increasing : list of ints, optional (default=None)
Indices of features to have a monotonically increasing effect.
decreasing : list of ints, optional (default=None)
Indices of features to have a monotonically decreasing effect.
Attributes
----------
estimators_ : list of DecisionTreeRegressor
The collection of fitted sub-estimators.
feature_importances_ : array of shape = [n_features]
The feature importances (the higher, the more important the feature).
n_features_ : int
The number of features when ``fit`` is performed.
n_outputs_ : int
The number of outputs when ``fit`` is performed.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
oob_prediction_ : array of shape = [n_samples]
Prediction computed with out-of-bag estimate on the training set.
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
See also
--------
DecisionTreeRegressor, ExtraTreesRegressor
"""
def __init__(self,
n_estimators=10,
criterion="mse",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_split=1e-7,
bootstrap=True,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
increasing=None,
decreasing=None):
super(RandomForestRegressor, self).__init__(
base_estimator=DecisionTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes", "min_impurity_split",
"random_state", "increasing", "decreasing"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_split = min_impurity_split
self.increasing = increasing
self.decreasing = decreasing
class ExtraTreesClassifier(ForestClassifier):
"""An extra-trees classifier.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and use averaging to improve the predictive accuracy
and control over-fitting.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int, float, optional (default=2)
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a percentage and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
min_samples_leaf : int, float, optional (default=1)
The minimum number of samples required to be at a leaf node:
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a percentage and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_split : float, optional (default=1e-7)
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. versionadded:: 0.18
bootstrap : boolean, optional (default=False)
Whether bootstrap samples are used when building trees.
oob_score : bool, optional (default=False)
Whether to use out-of-bag samples to estimate
the generalization accuracy.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
class_weight : dict, list of dicts, "balanced", "balanced_subsample" or None, optional (default=None)
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one. For
multi-output problems, a list of dicts can be provided in the same
order as the columns of y.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
The "balanced_subsample" mode is the same as "balanced" except that weights are
computed based on the bootstrap sample for every tree grown.
For multi-output, the weights of each column of y will be multiplied.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
increasing : list of ints, optional (default=None)
Indices of features to have a monotonically increasing effect.
decreasing : list of ints, optional (default=None)
Indices of features to have a monotonically decreasing effect.
Attributes
----------
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
classes_ : array of shape = [n_classes] or a list of such arrays
The classes labels (single output problem), or a list of arrays of
class labels (multi-output problem).
n_classes_ : int or list
The number of classes (single output problem), or a list containing the
number of classes for each output (multi-output problem).
feature_importances_ : array of shape = [n_features]
The feature importances (the higher, the more important the feature).
n_features_ : int
The number of features when ``fit`` is performed.
n_outputs_ : int
The number of outputs when ``fit`` is performed.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
oob_decision_function_ : array of shape = [n_samples, n_classes]
Decision function computed with out-of-bag estimate on the training
set. If n_estimators is small it might be possible that a data point
was never left out during the bootstrap. In this case,
`oob_decision_function_` might contain NaN.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
See also
--------
sklearn.tree.ExtraTreeClassifier : Base classifier for this ensemble.
RandomForestClassifier : Ensemble Classifier based on trees with optimal
splits.
"""
def __init__(self,
n_estimators=10,
criterion="gini",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_split=1e-7,
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
increasing=None,
decreasing=None):
super(ExtraTreesClassifier, self).__init__(
base_estimator=ExtraTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes", "min_impurity_split",
"random_state", "increasing", "decreasing"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_split = min_impurity_split
self.increasing = increasing
self.decreasing = decreasing
class ExtraTreesRegressor(ForestRegressor):
"""An extra-trees regressor.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and use averaging to improve the predictive accuracy
and control over-fitting.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="mse")
The function to measure the quality of a split. Supported criteria
are "mse" for the mean squared error, which is equal to variance
reduction as feature selection criterion, and "mae" for the mean
absolute error.
.. versionadded:: 0.18
Mean Absolute Error (MAE) criterion.
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int, float, optional (default=2)
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a percentage and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
min_samples_leaf : int, float, optional (default=1)
The minimum number of samples required to be at a leaf node:
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a percentage and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_split : float, optional (default=1e-7)
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. versionadded:: 0.18
bootstrap : boolean, optional (default=False)
Whether bootstrap samples are used when building trees.
oob_score : bool, optional (default=False)
Whether to use out-of-bag samples to estimate the R^2 on unseen data.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
increasing : list of ints, optional (default=None)
Indices of features to have a monotonically increasing effect.
decreasing : list of ints, optional (default=None)
Indices of features to have a monotonically decreasing effect.
Attributes
----------
estimators_ : list of DecisionTreeRegressor
The collection of fitted sub-estimators.
feature_importances_ : array of shape = [n_features]
The feature importances (the higher, the more important the feature).
n_features_ : int
The number of features.
n_outputs_ : int
The number of outputs.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
oob_prediction_ : array of shape = [n_samples]
Prediction computed with out-of-bag estimate on the training set.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
See also
--------
sklearn.tree.ExtraTreeRegressor: Base estimator for this ensemble.
RandomForestRegressor: Ensemble regressor using trees with optimal splits.
"""
def __init__(self,
n_estimators=10,
criterion="mse",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_split=1e-7,
bootstrap=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
increasing=None,
decreasing=None):
super(ExtraTreesRegressor, self).__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes", "min_impurity_split",
"random_state", "increasing", "decreasing"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_split = min_impurity_split
self.increasing = increasing
self.decreasing = decreasing
class RandomTreesEmbedding(BaseForest):
"""An ensemble of totally random trees.
An unsupervised transformation of a dataset to a high-dimensional
sparse representation. A datapoint is coded according to which leaf of
each tree it is sorted into. Using a one-hot encoding of the leaves,
this leads to a binary coding with as many ones as there are trees in
the forest.
The dimensionality of the resulting representation is
``n_out <= n_estimators * max_leaf_nodes``. If ``max_leaf_nodes == None``,
the number of leaf nodes is at most ``n_estimators * 2 ** max_depth``.
Read more in the :ref:`User Guide <random_trees_embedding>`.
Parameters
----------
n_estimators : integer, optional (default=10)
Number of trees in the forest.
max_depth : integer, optional (default=5)
The maximum depth of each tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int, float, optional (default=2)
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a percentage and
`ceil(min_samples_split * n_samples)` is the minimum
number of samples for each split.
min_samples_leaf : int, float, optional (default=1)
The minimum number of samples required to be at a leaf node:
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a percentage and
`ceil(min_samples_leaf * n_samples)` is the minimum
number of samples for each node.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the input samples required to be at a
leaf node.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_split : float, optional (default=1e-7)
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. versionadded:: 0.18
sparse_output : bool, optional (default=True)
Whether or not to return a sparse CSR matrix, as default behavior,
or to return a dense array compatible with dense pipeline operators.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both `fit` and `predict`.
If -1, then the number of jobs is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity of the tree building process.
warm_start : bool, optional (default=False)
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest.
increasing : list of ints, optional (default=None)
Indices of features to have a monotonically increasing effect.
decreasing : list of ints, optional (default=None)
Indices of features to have a monotonically decreasing effect.
Attributes
----------
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
.. [2] Moosmann, F. and Triggs, B. and Jurie, F. "Fast discriminative
visual codebooks using randomized clustering forests"
NIPS 2007
"""
def __init__(self,
n_estimators=10,
max_depth=5,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_leaf_nodes=None,
min_impurity_split=1e-7,
sparse_output=True,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
increasing=None,
decreasing=None):
super(RandomTreesEmbedding, self).__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes", "min_impurity_split",
"random_state", "increasing", "decreasing"),
bootstrap=False,
oob_score=False,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start)
self.criterion = 'mse'
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = 1
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_split = min_impurity_split
self.sparse_output = sparse_output
self.increasing = increasing
self.decreasing = decreasing
def _set_oob_score(self, X, y):
raise NotImplementedError("OOB score not supported by tree embedding")
def fit(self, X, y=None, sample_weight=None):
"""Fit estimator.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
The input samples. Use ``dtype=np.float32`` for maximum
efficiency. Sparse matrices are also supported, use sparse
``csc_matrix`` for maximum efficiency.
Returns
-------
self : object
Returns self.
"""
self.fit_transform(X, y, sample_weight=sample_weight)
return self
def fit_transform(self, X, y=None, sample_weight=None):
"""Fit estimator and transform dataset.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
Input data used to build forests. Use ``dtype=np.float32`` for
maximum efficiency.
Returns
-------
X_transformed : sparse matrix, shape=(n_samples, n_out)
Transformed dataset.
"""
# ensure_2d=False because there are actually unit test checking we fail
# for 1d.
X = check_array(X, accept_sparse=['csc'], ensure_2d=False)
if issparse(X):
# Pre-sort indices to avoid that each individual tree of the
# ensemble sorts the indices.
X.sort_indices()
rnd = check_random_state(self.random_state)
y = rnd.uniform(size=X.shape[0])
super(RandomTreesEmbedding, self).fit(X, y,
sample_weight=sample_weight)
self.one_hot_encoder_ = OneHotEncoder(sparse=self.sparse_output)
return self.one_hot_encoder_.fit_transform(self.apply(X))
def transform(self, X):
"""Transform dataset.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
Input data to be transformed. Use ``dtype=np.float32`` for maximum
efficiency. Sparse matrices are also supported, use sparse
``csr_matrix`` for maximum efficiency.
Returns
-------
X_transformed : sparse matrix, shape=(n_samples, n_out)
Transformed dataset.
"""
return self.one_hot_encoder_.transform(self.apply(X))
|
PatrickOReilly/scikit-learn
|
sklearn/ensemble/forest.py
|
Python
|
bsd-3-clause
| 68,627
|
[
"Brian"
] |
0746fbc6b8650a90fef7240b8ab15b164d71534f033889c9f64af5f7f84ec244
|
#
# Copyright (C) 2013-2018 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import unittest as ut
import espressomd
import espressomd.interactions
class Non_bonded_interactionsTests(ut.TestCase):
system = espressomd.System(box_l=[20.0, 20.0, 20.0])
def intersMatch(self, inType, outType, inParams, outParams):
"""Check, if the interaction type set and gotten back as well as the
bond parameters set and gotten back match. Only check keys present in
``inParams``.
"""
if inType != outType:
print("Type mismatch:", inType, outType)
return False
for k in list(inParams.keys()):
if k not in outParams:
print(k, "missing from returned parameters")
return False
if outParams[k] != inParams[k]:
print("Mismatch in parameter ", k, inParams[k], outParams[k])
return False
return True
def parameterKeys(self, interObject):
"""
Check :meth:`~espressomd.interactions.NonBondedInteraction.valid_keys`
and :meth:`~espressomd.interactions.NonBondedInteraction.required_keys`
return sets, and that
:meth:`~espressomd.interactions.NonBondedInteraction.default_params`
returns a dictionary with the correct keys.
Parameters
----------
interObject: instance of a class derived from :class:`espressomd.interactions.NonBondedInteraction`
Object of the interaction to test, e.g.
:class:`~espressomd.interactions.LennardJonesInteraction`
"""
classname = interObject.__class__.__name__
valid_keys = interObject.valid_keys()
required_keys = interObject.required_keys()
default_keys = set(interObject.default_params().keys())
self.assertIsInstance(valid_keys, set,
"{}.valid_keys() must return a set".format(
classname))
self.assertIsInstance(required_keys, set,
"{}.required_keys() must return a set".format(
classname))
self.assertTrue(default_keys.issubset(valid_keys),
"{}.default_params() has unknown parameters: {}".format(
classname, default_keys.difference(valid_keys)))
self.assertTrue(default_keys.isdisjoint(required_keys),
"{}.default_params() has extra parameters: {}".format(
classname, default_keys.intersection(required_keys)))
self.assertSetEqual(default_keys, valid_keys - required_keys,
"{}.default_params() should have keys: {}, got: {}".format(
classname, valid_keys - required_keys, default_keys))
def generateTestForNon_bonded_interaction(
_partType1, _partType2, _interClass, _params, _interName):
"""Generates test cases for checking interaction parameters set and
gotten back from the espresso core actually match those in the Python
classes. Only keys which are present in ``_params`` are checked.
Parameters
----------
_partType1, _partType2: :obj:`int`
Particle type ids to check on
_interClass: class derived from :class:`espressomd.interactions.NonBondedInteraction`
Class of the interaction to test, e.g.
:class:`~espressomd.interactions.LennardJonesInteraction`
_params: :obj:`dict`
Interaction parameters, e.g. ``{"k": 1., "r_0": 0}``
_interName: :obj:`str`
Name of the interaction property to set (e.g. ``"lennard_jones"``)
"""
partType1 = _partType1
partType2 = _partType2
interClass = _interClass
params = _params
interName = _interName
def func(self):
# This code is run at the execution of the generated function.
# It will use the state of the variables in the outer function,
# which was there, when the outer function was called
# Set parameters
getattr(self.system.non_bonded_inter[partType1, partType2],
interName).set_params(**params)
# Read them out again
outInter = getattr(
self.system.non_bonded_inter[partType1, partType2], interName)
outParams = outInter.get_params()
self.assertTrue(
self.intersMatch(
interClass,
type(outInter),
params,
outParams),
interClass(**params).type_name() +
": value set and value gotten back differ for particle types " +
str(partType1) +
" and " +
str(partType2) +
": " +
params.__str__() +
" vs. " +
outParams.__str__())
self.parameterKeys(outInter)
return func
if espressomd.has_features(["LENNARD_JONES"]):
test_lj1 = generateTestForNon_bonded_interaction(
0, 0, espressomd.interactions.LennardJonesInteraction,
{"epsilon": 1., "sigma": 2., "cutoff": 3.,
"shift": 4., "offset": 5., "min": 7.},
"lennard_jones")
test_lj2 = generateTestForNon_bonded_interaction(
0, 0, espressomd.interactions.LennardJonesInteraction,
{"epsilon": 1.3, "sigma": 2.2, "cutoff": 3.4,
"shift": 4.1, "offset": 5.1, "min": 7.1},
"lennard_jones")
test_lj3 = generateTestForNon_bonded_interaction(
0, 0, espressomd.interactions.LennardJonesInteraction,
{"epsilon": 1.3, "sigma": 2.2, "cutoff": 3.4,
"shift": 4.1, "offset": 5.1, "min": 7.1},
"lennard_jones")
if espressomd.has_features(["LENNARD_JONES_GENERIC"]):
test_ljgen1 = generateTestForNon_bonded_interaction(
0, 0, espressomd.interactions.GenericLennardJonesInteraction,
{"epsilon": 1., "sigma": 2., "cutoff": 3., "shift": 4.,
"offset": 5., "e1": 7, "e2": 8, "b1": 9., "b2": 10.},
"generic_lennard_jones")
test_ljgen2 = generateTestForNon_bonded_interaction(
0, 0, espressomd.interactions.GenericLennardJonesInteraction,
{"epsilon": 1.1, "sigma": 2.1, "cutoff": 3.1, "shift": 4.1,
"offset": 5.1, "e1": 71, "e2": 81, "b1": 9.1, "b2": 10.1},
"generic_lennard_jones")
test_ljgen3 = generateTestForNon_bonded_interaction(
0, 0, espressomd.interactions.GenericLennardJonesInteraction,
{"epsilon": 1.2, "sigma": 2.2, "cutoff": 3.2, "shift": 4.2,
"offset": 5.2, "e1": 72, "e2": 82, "b1": 9.2, "b2": 10.2},
"generic_lennard_jones")
if espressomd.has_features(["GAY_BERNE"]):
test_gb = generateTestForNon_bonded_interaction(
0, 0, espressomd.interactions.GayBerneInteraction,
{"eps": 1.0, "sig": 1.0, "cut": 4.0, "k1": 3.0,
"k2": 5.0, "mu": 2.0, "nu": 1.0},
"gay_berne")
if __name__ == "__main__":
ut.main()
|
mkuron/espresso
|
testsuite/python/interactions_non-bonded_interface.py
|
Python
|
gpl-3.0
| 7,883
|
[
"ESPResSo"
] |
fdf3d55bdc497d7c2133a26d2f84f2c17c182d82f3a313a2084298571f001ca2
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
import spack.util.spack_yaml as syaml
@pytest.fixture()
def minimal_configuration():
return {
'spack': {
'specs': [
'gromacs',
'mpich',
'fftw precision=float'
],
'container': {
'format': 'docker',
'base': {
'image': 'ubuntu:18.04',
'spack': 'develop'
}
}
}
}
@pytest.fixture()
def config_dumper(tmpdir):
"""Function that dumps an environment config in a temporary folder."""
def dumper(configuration):
content = syaml.dump(configuration, default_flow_style=False)
config_file = tmpdir / 'spack.yaml'
config_file.write(content)
return str(tmpdir)
return dumper
@pytest.fixture()
def container_config_dir(minimal_configuration, config_dumper):
return config_dumper(minimal_configuration)
|
rspavel/spack
|
lib/spack/spack/test/container/conftest.py
|
Python
|
lgpl-2.1
| 1,160
|
[
"Gromacs"
] |
18b12fa6fa14141476852655cecec27af8955932f960c6d2448e002f8ad049e3
|
import sys
def set_help(input_write,dataname):
""" When using the help flag. It assists in writing all your files for you"""
print "\nIf you dont know parameters for these inputs, please refer to https://github.com/sipjca/cmlparser_py/blob/master/lammps_params.md\n"
print "Input the whole string you intend on using for each lammps parameter, without the type"
print "For example when prompted 'Enter Lammps units: ', enter real for the real style"
print "For a full example refer to the above link\n"
units = raw_input("Enter Lammps units: ")
atom_style = raw_input("Enter Lammps atom_style: ")
boundary = raw_input("Enter Lammps boundary: ")
bond_style = raw_input("Enter Lammps bond_style: ")
dielectric = raw_input("Enter Lammps dielectric: ")
pair_style = raw_input("Enter Lammps pair_style: ")
angle_style = raw_input("Enter Lammps angle_style: ")
special_bonds = raw_input("Enter Lammps special_bonds: ")
improper_style = raw_input("Enter Lammps improper_style: ")
kspace_style = raw_input("Enter Lammps kspace_style: ")
thermo_style = raw_input("Enter Lammps thermo_style: ")
dump1 = raw_input("Enter Lammps dump: ")
neighbor = raw_input("Enter Lammps neighbor: ")
neigh_modify = raw_input("Enter Lammps neigh_modify: ")
fix1 = raw_input("Enter fix 1: ")
fix2 = raw_input("Enter fix 2: ")
velocity = raw_input("Enter velocity: ")
timestep = raw_input("Enter timestep: ")
thermo = raw_input("Enter thermo: ")
run = raw_input("Enter run time: ")
restart1 = raw_input("Enter write_restart: ")
replicate = raw_input("Enter how many times to replicate: ")
fix3 = raw_input("Enter fix1 after replication: ")
fix4 = raw_input("Enter fix2 after replication: ")
velocity2 = raw_input("Enter velocity post replication: ")
dump2 = raw_input("Enter Lammps dump for replicate: ")
run2 = raw_input("Enter run time after replication: ")
restart2 = raw_input("Enter restart after replication: ")
#ADD IN HELP FOR HELP
print "units %s " % units
print "atom_style %s " % atom_style
print "boundary %s " % boundary
print "bond_style %s " % bond_style
print "dielectric %s " % dielectric
print "pair_style %s " % pair_style
print "angle_style %s " % angle_style
print "special_bonds %s " % special_bonds
print "improper_style %s " % improper_style
print "kspace_style %s " % kspace_style
print "thermo_style %s " % thermo_style
print "dump 1 %s " % dump1
print "neighbor %s " % neighbor
print "neigh_modify %s " % neigh_modify
print "fix1 %s " % fix1
print "fix2 %s " % fix2
print "velocity %s " % velocity
print "timestep %s " % timestep
print "thermo %s " % thermo
print "run %s " % run
print "write_restart %s " % restart1
print "replicate %s " % replicate
print "fix1 %s " % fix3
print "fix2 %s " % fix4
print "velocity %s " % velocity2
print "dump 2 %s " % dump2
print "run %s " % run2
print "write_restart %s " % restart2
correct = raw_input("Your file will be printed out almost exactly as above. Is this correct? (y/n)")
if correct == "n":
set()
else: #write output
lammps = open(input_write,"w")
sys.stdout = lammps
print "# created by CMLParser\n"
print "units %s " % units
print "atom_style %s " % atom_style
print "boundary %s " % boundary
print "bond_style %s " % bond_style
print "dielectric %s " % dielectric
print "pair_style %s " % pair_style
print "angle_style %s " % angle_style
print "dihedral_style opls"
print "special_bonds %s " % special_bonds
print "improper_style %s " % improper_style
print "kspace_style %s " % kspace_style
print "read_data %s" % dataname
print "thermo_style %s " % thermo_style
print "dump %s " % dump1
print "neighbor %s " % neighbor
print "neigh_modify %s " % neigh_modify
print "fix1 %s " % fix1
print "fix2 %s " % fix2
print "velocity %s " % velocity
print "timestep %s " % timestep
print "thermo %s " % thermo
print "run %s " % run
print "unfix 1"
print "unfix 2"
print "write_restart %s " % restart1
print "replicate %s " % replicate
print "undump 1"
print "fix1 %s " % fix3
print "fix2 %s " % fix4
print "velocity %s " % velocity2
print "dump 2 %s " % dump2
print "run %s " % run2
print "write_restart %s " % restart2
print "unfix 1"
print "unfix 2"
lammps.close()
def change_data_from_filein(file,dataname):
fileread = open(file,'r')
read = fileread.readlines()
for i in range(len(read)):
split = read[i].split()
for j in range(len(split)):
if split[0] == "read_data":
line = i
fileread.close()
filewrite = open(file,'w')
for i in range(len(read)):
if read == line:
filewrite.write('read_data %s' % dataname)
else:
filewrite.write(read[i])
filewrite.close()
|
sipjca/cmlparser_py
|
setparams.py
|
Python
|
apache-2.0
| 5,206
|
[
"LAMMPS"
] |
bf427ff98396789e069da3d063192438a8ebc9bfc9470667073e97301d7b08c9
|
# import shutil # not used
from collections import Counter
import string, re
import os, os.path, sys, subprocess, inspect
import pickle
import yaml
#import getopt # not used
from optparse import OptionParser
from Bio.Blast.Applications import NcbiblastnCommandline
from Bio.Blast import NCBIXML
from Bio import SeqIO
# from Bio.SeqFeature import SeqFeature, FeatureLocation # not used
from Bio.SeqRecord import SeqRecord
from Bio.Alphabet import generic_dna
import Bio.Seq
# import Bio.SeqIO # already imported SeqIO in line 10
# import Bio.SeqRecord # already imported SeqRecord in line 12
import fileinput
from lxml import etree
from time import clock, time
import glob
from math import *
import traceback
from itertools import groupby
from operator import itemgetter
import numpy
module_folder_paths = ["modules"]
for module_folder_path in module_folder_paths:
module_folder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],module_folder_path)))
if module_folder not in sys.path:
sys.path.insert(1, module_folder)
import log_writer
from utility_functions import *
"""
Function
Main function -> Calls prepare, get_profiles, get_filesets, align_and_get_scores and write_scores functions
The option of the method
files[str]: The path to where the fastq file located
output_directory[str]: The path to where output file located
profile_file_directory[str]: The path to where reference.seq, and the EMM variant sequences (*.fas) files are located
bowtie[str]: The path to Bowtie2 command
samtools[str]: The path to SAMtools command
ids[str]: Unique identifier number
version[str]: version number
"""
def findST(files, output_directory, profile_file_directory, bowtie, samtools, ids, log_directory, verbose, version = ""):
# start run time
start = clock()
#delete tmp file
if not os.path.exists(output_directory + '/tmp'):
#make tmp directory in output_directory
os.makedirs(output_directory + '/tmp')
workingDir = output_directory + '/tmp'
#create EMM_log file
log = open(output_directory + "/EMM_log.txt", "w")
#set stderr.log and stdout.log files.
#stderr_log_output from try_and_except function and logger function are appended into ids+ ".stderr.log file within output dir
stderr_log_output = log_directory + "/" + 'emm_typing'+ ".stderr"
stdout_log_output = log_directory + "/" + 'emm_typing'+ ".stdout"
logger = log_writer.setup_logger(stdout_log_output, stderr_log_output)
#Extract flanking regions of 100bp upstream and downstream of each MLST locus by blast against a reference genome
try_and_except(stderr_log_output, prep_SRST,profile_file_directory, output_directory, logger)
#Concatenate flanking regions extracted by prep SRST function to correspondent locus variants sequence in fasta format.
#Newly concatenated sequence are then indexed by Bowtie2
lociHeader = try_and_except(stderr_log_output, prepare,output_directory + "/summary.txt", workingDir, bowtie,logger)
#Calls two functions: Align and Score functions.
#1. Align function: map each read set to reference sequence and creates SAM file, converts the SAM file to BAM file,Sort and index BAM file and Generate pileup
#2. Score function:designate the correct allele and calculate coverage statistics for each locus
top_hits = try_and_except(stderr_log_output ,align_and_get_scores,workingDir, files, bowtie, samtools,log,logger,ids, verbose) ################### SP
#files with the following extension:'.fasta', '.pkl', '.sam', '.tmp', '.bt2','.out','.unmapped','.unmap','fai' are removed from tmp and output files
for root, dirs, files in os.walk(output_directory):
for currentFile in files:
exts=('.fasta', '.pkl', '.sam', '.tmp', '.bt2','.out','.unmapped','.unmap','fai' )
if any(currentFile.lower().endswith(ext) for ext in exts):
os.remove(os.path.join(root, currentFile))
elif currentFile in ['my_blast_tmp.xml', 'summary.txt', 'PHE221509-all.bam']:
os.remove(os.path.join(root, currentFile))
return top_hits
"""
Function
- Extract flanking regions of 100bp upstream and downstream of EMM by blast against a reference genome. BLAST uses the first variant sequence as a query.
NB Need to make sure BLAST, EMBOSS and Biopython are in the path.
- Create summary.txt file (a tab-delimited text file display the path to the variant sequences and flanking sequences)
The option of the method
output_directory[str]: The path to where the summary.txt file will be created
profile_file_directory[str]: The path to the reference.seq and the EMM variant sequences (*.fas) files
logger[str]: The path to where the stderr and stdout logged
Return value
return summary.txt file
"""
def prep_SRST(profile_file_directory, output_directory, logger):
reference_fasta_file = profile_file_directory + "/reference.seq"
refseq_record = SeqIO.read(reference_fasta_file, "fasta", generic_dna)
locus_files = glob.glob(profile_file_directory + "/*.fas")
locus_files = sorted(locus_files)
summary_file_handle = open(output_directory + "/summary.txt", "w")
for seq in locus_files:
(seqDir,seqFileName) = os.path.split(seq)
(seqBaseName,ext) = os.path.splitext(seqFileName)
bait = seqBaseName + "_bait.fasta"
# extract first sequence to use as bait
log_writer.info_header(logger, "create bait file")
process = subprocess.Popen(['seqret',seq,'-firstonly','-auto','-out',output_directory+ '/' + bait], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
process.wait()
log_writer.log_process(logger, process, log_error_to = "info")
cline = NcbiblastnCommandline(query=output_directory+ '/' + bait, db=profile_file_directory + "/reference",evalue=0.001, out=output_directory + "/my_blast_tmp.xml", outfmt=5)
stdout_log_output, stderr_log_output = cline()
result_handle = open(output_directory + "/my_blast_tmp.xml")
blast_record = NCBIXML.read(result_handle)
query_length = blast_record.query_letters
for alignment in blast_record.alignments:
hsp = alignment.hsps[0] # only consider top hit
if hsp.align_length/float(query_length) > 0.5:
if hsp.sbjct_start > hsp.sbjct_end:
subject_start = hsp.sbjct_start + (hsp.query_start - 1)
else:
subject_start = hsp.sbjct_start - (hsp.query_start - 1)
if hsp.sbjct_start > hsp.sbjct_end:
subject_end = hsp.sbjct_end - (query_length - hsp.query_end)
else:
subject_end = hsp.sbjct_end + (query_length - hsp.query_end)
revcomp = 1 # hit is in forward strand
if hsp.sbjct_start > hsp.sbjct_end:
revcomp = -1
left_coords = [min(subject_start,subject_end)-100,min(subject_start,subject_end)-1]
right_coords = [max(subject_start,subject_end)+1,max(subject_start,subject_end)+100]
left_cmd = ["seqret ",reference_fasta_file," -sbegin ",str(left_coords[0])," -send ",str(left_coords[1])," -osformat fasta -auto -out " + output_directory + "/tmp_left_flank.fasta"]
os.system(''.join(left_cmd)) # extract left flank using emboss
right_cmd = ["seqret ",reference_fasta_file," -sbegin ",str(right_coords[0])," -send ",str(right_coords[1])," -osformat fasta -auto -out " + output_directory + "/tmp_right_flank.fasta"]
os.system(''.join(right_cmd)) # extract right flank using emboss
left_record = SeqIO.read(output_directory + "/tmp_left_flank.fasta", "fasta")
if revcomp < 0:
left_record.id = "down"
left_record.seq = left_record.seq.reverse_complement() # reverse the sequence
else:
left_record.id = "up"
right_record = SeqIO.read(output_directory + "/tmp_right_flank.fasta", "fasta")
if revcomp < 0:
right_record.id = "up"
right_record.seq = right_record.seq.reverse_complement() # reverse the sequence
else:
right_record.id = "down"
right_record.description = ""
left_record.description = ""
out_handle = open(output_directory + "/" + seqBaseName + "_flanks.fasta", "w")
out_handle.write(right_record.format("fasta"))
out_handle.write(left_record.format("fasta"))
out_handle.close()
# generate file list for srst
summary_file_handle.write('\t'.join([seqBaseName,seq,output_directory + "/" + seqBaseName + "_flanks.fasta"]) + "\n")
summary_file_handle.close()
"""
Function
1. Concatenate flanking regions extracted by prep SRST function to EMM variant sequences in fasta format. Newly concatenated sequence are then indexed by Bowtie2
2. Then extract and store as pickled object:
a. locus- variant names (loci.pkl)
b. start and end position of EMM variant sequences (without the flanking sequences)(ranges.pkl)
c. EMM variants sequence (refSeqs.pkl)
The option of the method
specFn[str]: A tab-delimited text file display the path to the flanking and EMM sequences(summary.txt)
workDir[str] The path to where refSeqs.pkl, ranges.pkl and loci.pkl will be created
bowtie[str]: The command used to index the reference sequence
logger[str]: The path to where the stderr and stdout logged
return
return[list] loci name
"""
def prepare(specFn, workDir, bowtie,logger):
(specDir,summaryFileName) = os.path.split(specFn)
spc = [] # (locus name, variants fasta, flanking sequences)
for l in open(specFn):
spc.append(l.split())
refFn = os.path.join(workDir, "reference.fa")
rf = open(refFn, "w") # file handle for reference sequence fasta file
ranges = {}
loci = [] # array of locus names
refSeqs = {} # list of reference sequences (key = id, value = seq object)
for (loc, variantsFn, flanksFn) in spc:
loci.append(loc)
fs = {} # flanking sequences at this locus (key = id, value = seq object)
f = open(flanksFn)
for r in SeqIO.parse(f, "fasta"):
fs[r.id] = r.seq
f = open(variantsFn) ################### SP
for r in SeqIO.parse(f, "fasta"):
s = Bio.Seq.MutableSeq('', generic_dna)
s += fs['up'] # add upstream seq, allele seq, downstream seq
s += r.seq
s += fs['down']
SeqIO.write([SeqRecord(s, id=r.id)], rf, "fasta") # add to reference fasta file
ranges[r.id] = (len(fs['up']), len(fs['up']) + len(r.seq)) # get range of allele sequence
refSeqs[r.id] = s # store this reference sequence in list
rf.close()
rangesFn = os.path.join(workDir, "ranges.pkl") #start and end position of locus variant sequences (without the flanking sequences)
f = open(rangesFn, 'w')
pickle.dump(ranges, f)
f.close()
lociFn = os.path.join(workDir, "loci.pkl")
f = open(lociFn, 'w')
pickle.dump(loci, f)
f.close()
refSeqsFn = os.path.join(workDir, "refSeqs.pkl") #Locus variants sequence
f = open(refSeqsFn, 'w')
pickle.dump(refSeqs, f)
f.close()
bowtie2_index = bowtie + "-build"
log_writer.info_header(logger, "bowtie_indexed")
process = subprocess.Popen([bowtie2_index, refFn, refFn], stderr=subprocess.PIPE, stdout=subprocess.PIPE) # generate index of reference fasta for mapping
process.wait()
log_writer.log_process(logger, process, log_error_to = "info")
os.system("rm -f summary.txt")
return loci
"""
Function
Calls two functions: Align and Score functions.
Align function:
(a) Calls Bowtie2(with very senstivie options). Bowtie2 map each read set to reference sequence and creates SAM file
(b) Converts the SAM file to BAM file
(c) Sort and index BAM file
(d) Generate pileup file
Score function:
(a) From the pileup file read Depthofcoverage and calculate probability score based on Depthofcoverage
(b) Designate the correct allele based on probability score and degree of variability of the read from the locus variant (by identifying present or absence of SNPs/INDELs).
(c) Calculate coverage statistics for each locus (max_percenatge non consensus bases, Minimum total depth, Maximum total depth, Minimum consensus depth,
Maximum consesnsus depth, mean consensus depth and stdDev of consensus depth)
The option of the method
fileSets[dict]: Keys are fastq filenames and value correspond to path to fastq file
bowtie[str]: The path to Bowtie2 command
samtools[str]: The path to SAMtools command
log[str]: The path to where the EMM_log.txt will be created
logger[str]: The path to where the stderr and stdout logged
Return
Return score[]probability and coverage statistics score value for each allele
"""
def align_and_get_scores(workingDir, files, bowtie, samtools, log,logger,ids, verbose): ################### SP
out = sys.stdout
nameSep = "-"
scores = []
paired = True
pair = files
if os.path.exists(pair[0]) and os.path.exists(pair[1]):
align(workingDir, paired, pair, sys.stderr, bowtie, samtools,logger,ids)
s = score(pair, workingDir, paired, out, log, nameSep, bowtie, samtools,logger,ids, log, verbose)
out.flush()
log.flush()
else:
log_writer.info_header(logger, "the paired reads are not labelled as as sampleid.R*.fastq*")
return s
"""
Function
Prints EMM value and metrics for each variant with coverage > 90%
The option for method:
hits[dict] = {allele: [identity, coverage, meanDepth, minDepth, snps, indels, mixed, filteredCoverage], ...}
log[str]: Location to where the EMM_log.txt file will be created
"""
def write_log(hits,log):
# metrics[allele] = [identity, coverage, meanDepth, minDepth, snps, indels, mixed, filteredCoverage]
if len(hits) > 0:
print >> log
print >> log, '=' * 70
print >> log, "Results Summary"
print >> log, '=' * 70
print >> log
print >> log, "Allele\tidentity\tcoverage\tmeanDepth\tminDepth\tsnps\tindels\tmixed\tfilteredCoverage"
sorted_hits = sorted(hits.items(), key=lambda x: x[1][0], reverse=True)
filtered_hits = [f for f in sorted_hits if f[1][1] >= 90]
for hit in filtered_hits:
print >> log, hit[0]+'\t'+str(hit[1][0])+'\t'+str(hit[1][1])+'\t'+str(hit[1][2])+'\t'+str(hit[1][3])+'\t'+str(len(hit[1][4]))+'\t'+str(hit[1][5])+'\t'+str(len(hit[1][6]))+'\t'+str(hit[1][7])
#try:
# i=0
# while sorted_hits[i][1][1] >= 90:
# print >> log, sorted_hits[i][0]+'\t'+str(sorted_hits[i][1][0])+'\t'+str(sorted_hits[i][1][1])+'\t'+str(sorted_hits[i][1][2])+'\t'+str(sorted_hits[i][1][3])+'\t'+str(len(sorted_hits[i][1][4]))+'\t'+str(sorted_hits[i][1][5])+'\t'+str(len(sorted_hits[i][1][6]))+'\t'+str(sorted_hits[i][1][7])
# i+=1
#except IndexError:
# pass
"""
Function
Calls bamify and pileupReads function
The option for method
workingDir[str]: The path to where SAM, BAM, Pileup will be created
paired[bool]: true= paired end reads
files[list]: The path to the fastq file location
logFile[str]: The path to the EMM_log.txt file location
bowtie[str]: The path to Bowtie2 command
samtools[str]: The path to SAMtools command
logger[str]: The path to where the stderr and stdout logged
ids[str]: unique identifier number
return
return pileup file
"""
def align(workDir, paired, files, logFile, bowtie, samtools,logger,ids): # removed insertSize since it wasn't used in the function
refFn = os.path.join(workDir, "reference.fa")
bam = bamify(workDir, 'all', files, refFn, True, logFile, bowtie, samtools,logger,ids)
pileFn = os.path.join(workDir, 'all.pileup')
pileupReads(workDir, bam, refFn, open(pileFn, 'w'), logFile, samtools,logger)
"""
Function
(a) Map each read set to each of the possible EMM variants by calling Bowtie2 (with very sensitive options) and create SAM and tmp file
(b) Convert the sam to tmp file by unsetting the secondary alignment bit score
(c) Convert the tmp to BAM file
(d) Sort BAM
The option for method:
workDir[str]: The path to where the SAM, BAM and sorted BAM files will be created
pref[str]: pref = "all"
files[list]: The path to the fastq file location
refFn[str]: The path to the reference file location
expand[bool]: True or false value
logFile[str]: The path to the EMM_log.txt file location
bowtie[str]: The path to Bowtie2 command
samtools[str]: The path to SAMtools command
logger[str]: The path to where the stderr and stdout logged
ids[str]: unique identifier number
returns[string]: out: sorted BAM file
"""
def bamify(workDir, pref, files, refFn, expand, logFile, bowtie, samtools, logger, ids): # removed insertSize since it wasn't used in the function
single = False
out0 = os.path.join(workDir,ids + '-' + pref + '-all')# #pref = all
out = os.path.join(workDir, ids + '-'+ pref + '-all.bam')
tmp = os.path.join(workDir, ids + '-' + pref + '.tmp') # temporary sam output
sam = os.path.join(workDir, ids + '-'+ pref + '.sam')
bam = os.path.join(workDir, ids + '-'+ pref + '.bam')
if expand: # expand = true
log_writer.info_header(logger, "Creating tmp file")
process = subprocess.Popen([bowtie, '--fr', '--no-unal', '--minins', '300', '--maxins', '1100', '-x', refFn, '-1', files[0], '-2', files[1], '-S', tmp, '-k', '99999', '-D', '20', '-R', '3', '-N', '0', '-L', '20', '-i', 'S,1,0.50'], stderr=subprocess.PIPE, stdout=subprocess.PIPE) #refFn = refrence sequence, tmp = temorary sam output, -k = report up to 99999 good alignments per read, -D 20 -R 3 -N 0 -L 20 -i S,1,0.50 --very-sensitive option
process.wait()
#print ' '.join([bowtie, '--fr', '--minins', '300', '--maxins', '1100', '-x', refFn, '-1', files[0], '-2', files[1],'-S', tmp, '-k', '99999', '-D', '20', '-R', '3', '-N', '0', '-L', '20', '-i', 'S,1,0.50', '--un', un_reads , '--un-conc',un_conc_reads])
log_writer.log_process(logger, process, log_error_to = "info")
log_writer.info_header(logger, "remove_secondary_mapping_bit")
i = open(tmp)
o = open(sam, 'w')
remove_secondary_mapping_bit(tmp, sam)
i.close()
o.close()
else:# expand = false, command is called within getNovelAllele function
log_writer.info_header(logger, "Creating sam file")
process= subprocess.Popen([bowtie, '--fr', '--no-unal', '--minins', '300', '--maxins', '1100', '-x', refFn, '-1', files[0], '-2', files[1],'-S', sam, '-k', '99999', '-D', '20', '-R', '3', '-N', '0', '-L', '20', '-i', 'S,1,0.50'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
process.wait()
log_writer.log_process(logger, process, log_error_to = "info")
log_writer.info_header(logger, "Converting sam to bam")
process = subprocess.Popen([samtools, 'view', '-bhS', '-o', bam, sam], stderr=subprocess.PIPE, stdout=subprocess.PIPE)# convert to bam
process.wait()
log_writer.log_process(logger, process, log_error_to = "info")
log_writer.info_header(logger, "Sorting bam")
process = subprocess.Popen([samtools, 'sort', bam, out0], stderr=subprocess.PIPE, stdout=subprocess.PIPE) # sort bam
process.wait()
log_writer.log_process(logger, process, log_error_to = "info")
return out
"""
Function:
Takes a SAM file and deducts 256 from the second column(FLAG) that unset the secondary alignment bit score
NB: reads with bit(250) set are not reported when using Samtools pileup
The option for method:
sam[string]: SAM file
sam_parsed[string]: parsed SAM file
Return
returns[string]: Parsed SAM file
"""
def remove_secondary_mapping_bit(sam,sam_parsed):
lines = iter(fileinput.input([sam]))
sam_parsed_file = open(sam_parsed, "w")
headers = []
body = []
for line in lines:
if line.startswith('@'):
sam_parsed_file.write(line)
else:
# chomp line
line = line.rstrip('\n')
details = line.split("\t")
flag = int(details[1])
if flag > 256:
details[1] = str(flag - 256)
print >> sam_parsed_file, '\t'.join(details)
sam_parsed_file.close()
"""
Function
Generate pileup file by using SAMtools mpileup command.
NB: use -B -A -f option to optimises coverage
--A flag count anomalous read
The option for method:
workDir[str]: The path to where pileup file will be created
bam[str]: The path to the BAM file location
refFn[str]: The path to the reference file location
pileupFile[str]: The path to pileup file location
logFile[str]: The path to the EMM_log.txt file location
samtools[str]: The path to SAMtools command
logger[str]: The path to where the stderr and stdout logged
return
returns: pileup file
"""
def pileupReads(workDir, bam, refFn, pileupFile, logFile, samtools,logger):
log_writer.info_header(logger, "index bam file")
process = subprocess.Popen([samtools, 'index', bam], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
process.wait()
log_writer.log_process(logger, process, log_error_to = "info")
log_writer.info_header(logger, "Generate pileup file")
process = subprocess.Popen([samtools, 'mpileup', '-B', '-A', '-f', refFn, bam], stderr=subprocess.PIPE, stdout=subprocess.PIPE)# -A -count anomalous read pairs, -B - disable BAQ computation and -f FILE - indexed reference sequence file
for l in process.stdout:
pileupFile.write(l)
process.wait()
log_writer.log_process(logger, process, log_error_to = "info")
pileupFile.close()
"""
Function
Parse through the pileup file and count the number of observed bases
The option for method
pileupFile[str]: The path to pileup file location
refSeq[dict]: reference sequence for each allele
ranges[dict]: start and end position of EMM variant sequences (without the flanking sequences)
Return
hash_alignment[dict]: hash_alignment[allele]: [pos, ref, orig_depth, filtered_match, filtered_mismatch, filtered_depth, total_indels, alt_bps, insertions_to_report] for each position
"""
def read_pileup(pileupFile, refSeq, ranges):
# read pileup file
with open(pileupFile) as pileup:
hash_alignment = {}
# Split all lines in the pileup by whitespace
pileup_split = ( x.split() for x in pileup )
# Group the split lines based on the first field (allele)
for allele, lines in groupby(pileup_split, itemgetter(0)):
hash_alignment[allele] = []
for fields in lines:
alt_bps = {}
locus = fields[0]
nuc_num = int(fields[1])# Actual position in ref allele
nuc_ref = fields[2]
orig_depth = int(fields[3])
if nuc_num <= ranges[locus][0] or nuc_num > ranges[locus][1]:
continue
elif nuc_num == ranges[locus][0]+1: # replaced +1 with + 30
ref_pos = nuc_num
else:
try:
ref_pos += 1
except UnboundLocalError:
ref_pos = nuc_num
if nuc_num != ref_pos:
for i in range(ref_pos, nuc_num-1): # if bps not covered
hash_alignment[allele].append([i+1, refSeq[allele][i], 0, 0, 0, 0, 'None', alt_bps, []])
ref_pos = nuc_num # filter reads based on Phred scores - cutoff Q20 and calculate matches and mismatches
if orig_depth != 0:
orig_match, orig_mismatch, nuc_match,nuc_mismatch, total_indels, alt_bps, report_insertions= pileup_extract_information(nuc_ref, fields[4], fields[5])
if nuc_num == ranges[allele][1] and report_insertions != []:
report_insertions = total_indels = []
elif report_insertions != []:
#i = 0
#while report_insertions:
# ins = report_insertions[i]
# ins_length = int(re.search('[0-9]+', ins).group())
# ins_seq = re.search('[A-Z]+', ins).group()
# if (ins_seq + str(refSeq[allele][nuc_num:ranges[allele][1]]))[:len(str(refSeq[allele][nuc_num:ranges[allele][1]]))] == str(refSeq[allele][nuc_num:ranges[allele][1]]):
# report_insertions = report_insertions.remove(ins)
# else:
# i += 1
for ins in report_insertions:
ins_length = int(re.search('[0-9]+', ins).group())
ins_seq = re.search('[A-Z]+', ins).group()
if (ins_seq + str(refSeq[allele][nuc_num:ranges[allele][1]]))[:len(str(refSeq[allele][nuc_num:ranges[allele][1]]))] == str(refSeq[allele][nuc_num:ranges[allele][1]]):
try:
report_insertions = report_insertions.remove(ins) if len(report_insertions)>1 else []
except TypeError:
pass
if report_insertions == None: report_insertions = []
nuc_depth = nuc_match + nuc_mismatch
if orig_match+orig_mismatch != orig_depth or nuc_depth > orig_depth:
print "Attention required!"
print "Line: {0}".format(fields)
elif nuc_num > ref_pos:
for i in range(ref_pos, nuc_num):
hash_alignment[allele].append([i+1, refSeq[allele][i], 0, 0, 0, 0, 'None', alt_bps, []])
hash_alignment[allele].append([nuc_num, nuc_ref, orig_depth, nuc_match, nuc_mismatch, nuc_depth, total_indels, alt_bps, report_insertions])
ref_pos = nuc_num
else:
# Hash for later processing in R
hash_alignment[allele].append([nuc_num, nuc_ref, orig_depth, nuc_match, nuc_mismatch, nuc_depth, total_indels, alt_bps, report_insertions])
#ref_pos += 1
else:
hash_alignment[allele].append([nuc_num, nuc_ref, 0, 0, 0, 0, 'None', alt_bps, []])
ref_pos += 1
return hash_alignment
"""
Function
Parse through a single line of the pileup file and count the number of observed bases
The option for method
ref_bp[str]: Reference base: 2nd column within the pileup file
align_bps[str]: reads bases: 4th column within the pileup file
qualities[str]: base qualities: 5th column within the pileup file
Return
filtered_match[int]: number of reads that match the reference with base quality > 20
filtered_mismatch[int]: number of reads that do not match the reference with base quality > 20
filtered_depth[int]: number of reads with base quality > 20
total_indels[list]: list of tuples (indel1, number of bases with indel1)
alt_bps[dict]: number of observed bases in this position
report_insertions[list]: insertion that occur in positions with depth > 4 and in more than half the reads
"""
def pileup_extract_information(ref_bp, align_bps, qualities):
match = 0
mismatch = 0
probabilities = {}
# remove all indels of format -1a and +1a since they do not have corresponding qualities and they refer to the following bp.
# for example deletion cat in positions 242-4 first appears in pos 241 with the actual deletions appearing as * in
# the respective positions
#ndh 241 C 79 ,$,,,,,,,,,,,,,,,,,,,,,-3catG,-3cat,,-3cat,,,,,,,,,,,,..,.,....,.........,.,.......,,...,,..... ;FFFFHGHIIHIJIIJJHII!)!*G)!!I!GGCD!!CCDDDDDDDDDCDBDDDDDDDDDCDDDJGJEDDJJJD@HFFFF
#ndh 242 C 79 ,,,,,,,,,,,,,,,,,,,,-2at*A*,*,-2at,-2at,,-2at,,,,,-2at,-2at,,..,.,....,.........,.,.......,,...,,.....^~. BCCFFFHIHBGIGGIIGHG!!!!J!!!H!GECC!!C>DDCDCBDDDDDDDDDBDDDDDCDDDJJJBCCJGJCDHFFFFC
#ndh 243 A 79 ,,,,,,,,,,,,,,,,,,,**C*,***,*,,,,**,,..,.,....,.........,.,.......,,...,,...... @CCDDFHHGFGHGEHIFAH!!!!G!!!G!ACCC!!AADDDDCDDDDCDDDDDBDBDC?ABDDIIJD@CJIIACHHHGFC
#ndh 244 T 79 ,$,$,$,,,,,,,,,,,,,,,,**C*,***,*,,,,**,,..,.,....,.........,.,.......,,...,,...... @B@DDFDEBDFEEFEI?CB!!!!H!!!B!@C>C!!:>DDDC@CDDDCDDDDDDDCCCC>DDDIGJF:>JJJ>CJHHHFC
# The script below demonstrates that
# >>> st1 = ',$,$.$,...,...,,.,,.,.,,,,.,,...,,.,,.,,.....,.,,,..,,.,.....,,......,,...,,,,.,.,,,,.,.,,.,,.,,,....,.,,.,.,,...+2AG,,,,,,,.,'
# >>> st2 = '>>>CDB>HD<B@FBFCBFDJHDJBJJDDDJIDJI@H<DDDDDDDDHBDDDJDDDDDDDDDBDDDDDDDDDDDDDBDDDDDD>GDIDDJBDJDB<JIJJDIB@JDJBBJH8DBB<BDDCD'
# >>> re.findall(r'[^,\.]', st1)
# ['$', '$', '$', '+', '2', 'A', 'G']
# >>> len(st1) - len(re.findall(r'[^,\.]', st1))
# 119
# >>> len(st2)
# 119
indels = re.findall(r'[\+\-]\d{1,2}[acgtnACGTN]*', align_bps)
# it rectifies cases where the deletion is followed by a mismatch base - see above at position 241 (-3catG)
Indels = list(set(indels))
for index, i in enumerate(Indels):
if len(re.search(r'[a-zA-Z]+', i).group()) != int(re.search(r'\d+', i).group()):
dif = len(re.search(r'[a-zA-Z]+', i).group()) - int(re.search(r'\d+', i).group())
indels = [i[:-dif] if x == i else x for x in indels]
for e in list(set(indels)):
align_bps = (align_bps.replace(e,''))
indels = [x.upper() for x in indels]
indels_freq = Counter(indels).most_common() if indels != [] else 'None'
# find all matches of format ^~. or .$ and remove extra symbols leaving only . and ,
match1 = re.findall(r'\^[0-9a-zA-Z\!\ "#\$%&\'()\*\+,\.\-\/:;<>\?@\[\]\\\^_`\{\}\|~]{1}[,\.]{1}', align_bps)
for e in list(set(match1)):
align_bps = align_bps.replace(e, e[-1:])
match2 = re.findall(r'[,\.]{1}\$', align_bps)
for e in list(set(match2)):
align_bps = align_bps.replace(e, e[:-1])
# look for possible mismatches and remove extra symbols
if set(','.join(align_bps)) != set([",","."]):
mm1= re.findall(r'\^[0-9a-zA-Z\!\ "#\$%&\'()\*\+,\.\-\/:;<>\?@\[\]\\\^_`\{\}\|~]{1}[acgtnACGTN]{1}', align_bps)
for e in list(set(mm1)):
align_bps = align_bps.replace(e, e[-1:])
mm2 = re.findall(r'[acgtnACGTN]{1}\$', align_bps)
for e in list(set(mm2)):
align_bps = align_bps.replace(e, e[:-1])
# calculate matches and mismatches
match = align_bps.count('.') + align_bps.count(',')
mismatch = sum([align_bps.upper().count(x) for x in ('*', 'A', 'C', 'G', 'T', 'N')])
# filter positions based on Q score - cutoff 20
alt_bps = {}
filtered_mismatch = 0
filtered_match = 0
match_filtered = []
# calculate filtered match and mismatch positions
if mismatch > 0:
mismatch_pos = {}
for x in (r'\*{1}', r'A{1}', r'C{1}', 'G{1}', 'T{1}', 'N{1}'):
pt = re.compile(x)
alt = re.search(r'[\*\+ACGTN]{1}', x).group()[0]
try:
pt.search(align_bps.upper()).group()
except AttributeError:
continue
else:
mismatch_pos[alt] = [m.start() for m in pt.finditer(align_bps.upper())]
mismatch_filtered = {}
for bp in mismatch_pos.keys():
if bp != 'N':
mismatch_filtered[bp] = []
for m in mismatch_pos[bp]:
try:
Q=ord(qualities[m])-33
except IndexError:
pass
if Q > 20: mismatch_filtered[bp].append(m)
match_pos = [m.start() for m in re.finditer(r'[,\.]{1}', align_bps)]
for m in match_pos:
try:
Q= ord(qualities[m])-33
except IndexError:
pass
if Q > 20: match_filtered.append(m)
for bp in ('A', 'C', 'G', 'T', 'N', '*'):
if bp != ref_bp:
alt_bps[bp] = len(mismatch_filtered[bp]) if mismatch > 0 and bp in mismatch_filtered.keys() else 0
filtered_mismatch += alt_bps[bp]
else:
alt_bps[bp] = len(match_filtered)
filtered_match = alt_bps[bp]
insertions = [x for x in filter(lambda x:x[0]=="+", indels)]
report_insertions = []
for ins in list(set(insertions)):
if filtered_match+filtered_mismatch > 4 and insertions.count(ins) >= int(len(qualities)*0.40): # accepts a read if it occurs in more than 40% of the reads
report_insertions.append(ins)
if report_insertions == [] and len(insertions) > len(qualities)*0.80:
report_insertions = list(set(insertions))
return match, mismatch, filtered_match, filtered_mismatch, indels_freq, alt_bps, report_insertions
"""
Function
Score function:
(a) Parse through the pileup file to capture DepthofCoverage
(b) Calculate probability score for all the EMM variants based on DepthofCoverage
(c) Calculate coverage statistics for all EMM variants (max_percentage non
consensus bases, Minimum total depth, Maximum total depth, Minimum consensus depth, Maximum consesnsus depth, mean consensus depth and stdDev of consensus depth)
(d)For each locus display: locus names, variant number(allele number), number of snps different between the readset and locus,
minimum probability score , locus variant name, probability score value for each locus variant position (the probability score for each of the three bases other than the majority consensus base),
list of snps (SNP position, Reference base) and coverage statistics for each locus variants
(e) Then filter the correct allele based on probability score and degree of variability of the read from the locus variant (by identifying present or absence of SNPs/INDELs).
- If number of snps different between the readset and locus variant is zero and probability score greater than -10, the locus variant is assigned
(f) Calculate percentage coverage (check if the reads are mapped to all locus variant position)
The option for method:
files[dict]: Keys are fastq filenames and value correspond to path to fastq file
workDir[str]: output_directory -> The path to where logfile files will be created
sig[int]: -10 CUTOFF
paired[bool]: assigned to False
insertSize[Nonetype]: insertSize assigned as None
outFile[str]: print result output
logFile[str]: The path to where the EMM_log.txt will be created
nameSep[str]: nameSep assigned as "-"
verboseFiles[bool] False
bowtie[str]: The path to Bowtie2 command
samtools[str]: The path to SAMtools command
logger[str]: The path to where the stderr and stdout logged
ids[str]: unique identifier number
return
log file
"""
def score(files, workDir, paired, outFile, logFile, nameSep, bowtie, samtools,logger,ids, log, verbose): # removed insertSize since it wasn't used in the function ################### SP
pileFn = os.path.join(workDir, 'all.pileup')
rangesFn = os.path.join(workDir, "ranges.pkl")# start and end position of locus variant sequences (without the flanking sequences)(ranges.pkl)
refSeqsFn = os.path.join(workDir, "refSeqs.pkl")#Locus variants sequence ( refSeqs.pkl)
ranges = pickle.load(open(rangesFn))
refSeqs = pickle.load(open(refSeqsFn))
# PILEUP FILE SIZE = ZERO: no reads mapped to emm references
if os.path.getsize(pileFn) == 0 : ################### SP
print >> logFile, "No reads mapped to any of the EMM reference sequences. Suggestion: check sequencing yield" ################### SP
print >> logFile ################### SP
return("Failed","No mapping to EMM references")
validatedTypes = ['emm'+str(f) for f in range(1, 125)]
hash_alignment = read_pileup(pileFn, refSeqs, ranges)
top_hits = {}
metrics = {}
for allele in hash_alignment.keys():
if hash_alignment[allele] == []: continue
flanking = ranges[allele][0] + (len(refSeqs[allele])- ranges[allele][1])# added + 29
matched_bps = len([f for f in hash_alignment[allele] if f[5]>4 and f[3]/float(f[5]) >= 0.8 and f[8] == []])
unmatched_bps = [f for f in hash_alignment[allele] if f[5]<5 or f[3]/float(f[5]) < 0.8 or f[8] != [] or (f[6]!='None' and [m for m in f[6] if m[0].startswith('-') and m[1]/float(f[2]) > 0.5])]
try:
coverage = round(len(hash_alignment[allele])/float(len(refSeqs[allele])-flanking)*100, 4)
identity = round(matched_bps/float(len(hash_alignment[allele]))*100, 4)
filteredCoverage = round(len([f for f in hash_alignment[allele] if f[5]>4])/float(len(refSeqs[allele])-flanking)*100, 4)
except ZeroDivisionError:
identity = 0
# select snps if mismatches more than 80% of the filtered depth (Q>20)
snps = [(f[0],f[1], f[7]) for f in hash_alignment[allele] if f[5] > 4 and f[4]/float(f[5]) >= 0.8]
try:
indels = [(f[0], [m for m in f[6] if (m[0] in f[8]) or (m[1]/float(f[2]) > 0.5 and f[0]!=ranges[allele][1])]) for f in unmatched_bps if f[8] != [] or f[6]!= 'None'] # an insertion at the very last bp of the allele would be a result of a mismatch in the flanking region and not a mutation in the allele
except TypeError:
pass
indels = [f for f in indels if f[1] != []]
mixed = [(f[0],f[1], f[7]) for f in hash_alignment[allele] if f[5] > 4 and f[3]/float(f[5]) < 0.8 and f[4]/float(f[5]) < 0.8]
posDelEvents = []
for f in indels:
if f[1][0][0].startswith('-'):
posDelEvents += range(f[0]+1, f[0]+1+int(re.search(r'\d+', f[1][0][0]).group()))
# separate snps and deletions
deletions = [f for f in snps if Counter(f[2]).most_common()[0][0] == '*' and f[0] not in posDelEvents]
snps = [f for f in snps if Counter(f[2]).most_common()[0][0] != '*']
depths = [f[5] for f in hash_alignment[allele]]
meanDepth = round(numpy.mean(depths), 2) if depths != [] else 0
minDepth = min(depths) if depths != [] else 0
if indels != [] and [f for f in indels if f[1][0][0].startswith('+')]: # only do this for insertions
lenIndel = int(re.search('\d+', indels[0][1][0][0]).group())
matched_bps -= lenIndel - 1
identity = round(matched_bps/float(len(hash_alignment[allele])+lenIndel)*100, 4)
metrics[allele] = [identity, coverage, meanDepth, minDepth, snps, indels, mixed, filteredCoverage]
write_log(metrics, log)
filtCovId = filter(lambda x: x[1][0]>=90.0 and x[1][-1]==100.0, metrics.items())
if verbose:
with open(workDir+'/summary.yml', 'w') as out_fp:
out_fp.write(yaml.dump(metrics, default_flow_style=True))
if filtCovId == []:
validatedMetrics = [(f, metrics[f]) for f in metrics.keys() if f.split('.')[0] in validatedTypes]
nonValidatedMetrics = [(f, metrics[f]) for f in metrics.keys() if f.split('.')[0] not in validatedTypes]
top_hit_failedValidated = sorted(validatedMetrics, key=lambda x:x[1][0], reverse=True)
top_hits['validated'] = ('Failed:'+top_hit_failedValidated[0][0], top_hit_failedValidated[0][1]) if validatedMetrics != [] else (None, ['n/a', 'n/a', 'n/a', 'n/a', [], [], [], 'n/a'])
top_hit_failedNonValidated = sorted(nonValidatedMetrics, key=lambda x:x[1][0], reverse=True)
top_hits['nonValidated'] = ('Failed:'+top_hit_failedNonValidated[0][0], top_hit_failedNonValidated[0][1]) if nonValidatedMetrics != [] else (None, ['n/a', 'n/a', 'n/a', 'n/a', [], [], [], 'n/a'])
else:
validated = [f for f in filtCovId if f[0].split('.')[0] in validatedTypes]
nonValidated = [f for f in filtCovId if f[0].split('.')[0] not in validatedTypes]
sortedValidated = sorted(validated, key=lambda x:x[1][0], reverse=True)
sortedNonValidated = sorted(nonValidated, key=lambda x:x[1][0], reverse=True)
if sortedValidated == []:
validatedMetrics = [(f, metrics[f]) for f in metrics.keys() if f.split('.')[0] in validatedTypes]
sortedMetrics = sorted(validatedMetrics, key=lambda x:x[1][-1], reverse=True)
if sortedMetrics and sortedMetrics[0][1][0] > 95 and sortedMetrics[0][1][3] < 5:
top_hits['validated'] = (sortedMetrics[0][0]+'**',sortedMetrics[0][1])
else:
top_hits['validated'] = (None, ['n/a', 'n/a', 'n/a', 'n/a', [], [], [], 'n/a'])
elif len(set([f[1][0] for f in sortedValidated[:2]])) == 1:
hits = [f for f in sortedValidated if f[1][0] == sortedValidated[0][1][0]]
top_hits['validated'] = sortedValidated[0] if len(hits) == 1 else hits
else:
top_hits['validated'] = sortedValidated[0]
if sortedNonValidated == []:
top_hits['nonValidated'] = (None, ['n/a', 'n/a', 'n/a', 'n/a', [], [], [], 'n/a'])
elif len(set([f[1][0] for f in sortedNonValidated[:2]])) == 1:
hits = [f for f in sortedNonValidated if f[1][0] == sortedNonValidated[0][1][0]]
top_hits['nonValidated'] = sortedNonValidated[0] if len(hits) == 1 else hits
else:
top_hits['nonValidated'] = sortedNonValidated[0]
return top_hits
"""
Extracts the following values from scores data structure and writes data to results.xml in the format below:
<ngs_sample id="PHE221920">
<script value="emm typing tool" version="1-0"/>
<results>
<result type="EMM_validated" value="89.0">
<result_data type="percentage_identity" value="100.00"/?
<result_data type="percentage_coverage" value="100.00"/>
<result_data type="mean_consensus_depth" value="109.09"/>
<result_data type="minimum_consensus_depth" value="26"/>
</result>
<result type="EMM_Nonvalidated" value="232.0">
<result_data type="percentage_identity" value="100.00"/?
<result_data type="percentage_coverage" value="100.00"/>
<result_data type="mean_consensus_depth" value="109.09"/>
<result_data type="minimum_consensus_depth" value="26"/>
</result>
</results>
</ngs_sample>
- Id : NGS sample id (sample identifier)
- Version : software version number
- EMM validated : assigned validated variant (emm1-124)
- ENN_Nonvalidated: assigned non validate variant (emm125+)
- mean consensus depth - the minimum average consensus depth
- number_of_reads_mapped: the number of reads mapped across allele length
- percentage_coverage: percentage coverage across allele length
- percentage_identity: percentage identity across allele length
- minimum consensus depth- the minimum consensus depth values
The option for method:
output_directory[str]: The path to the result.xml file
xml_values: EMM value, emm score, QC mean consensus depth, QC max percentage non consensus base value, QC percentage coverage and QC minimum consensus depth
ids : NGS sample id (sample identifier)
Workflow_name: streptococcus-pyogenes-typing
Version : version number
return
print results to "id".results.xml file
"""
def create_xml_file(top_hits,output_directory,ids,version): ################### SP
#print scores
xml_log_file = open(output_directory + "/" + ids + ".results.xml", "w") # open a file and write xml result
root = etree.Element("ngs_sample", id = ids)
script = etree.SubElement(root, "script", value="emm typing tool", version = version)
results = etree.SubElement(root, 'results')
if type(top_hits) == tuple:
result = etree.SubElement(results, "result", type='Final_EMM_type', value = ':'.join(top_hits)) # Failed:No mapping to EMM references
else:
if type(top_hits['validated']) == tuple and top_hits['validated'][1][0] == 100:
finalMtype = top_hits['validated'][0]
elif type(top_hits['validated']) != tuple and top_hits['validated'][0][1][0] == 100:
if len(set([f[0].split('.')[0] for f in top_hits['validated']])) == 1:
finalMtype = top_hits['validated'][0][0].split('.')[0] + ": mixed subtypes"
else:
finalMtype = "Mixed sample: " + '/'.join([f[0] for f in top_hits['validated']])
elif top_hits['validated'][0] == None and ((type(top_hits['nonValidated']) == tuple and top_hits['nonValidated'][1][0] == 100) or (type(top_hits['nonValidated']) != tuple and top_hits['nonValidated'][0][1][0] == 100)):
finalMtype = top_hits['nonValidated'][0] if type(top_hits['nonValidated']) == tuple else '/'.join([f[0] for f in top_hits['nonValidated']])
elif (type(top_hits['validated']) != tuple and top_hits['validated'][0][1][0] < 100) or (type(top_hits['validated']) == tuple and top_hits['validated'][1][0]<100 and top_hits['validated'][1][-1] == 100):
if (type(top_hits['validated']) != tuple and len(set([f[0].split('.')[0] for f in top_hits['validated']])) == 1) or (type(top_hits['validated']) == tuple):
top_hit = top_hits['validated'][0] if type(top_hits['validated']) != tuple else top_hits['validated']
mutPos = 0 # investigate region 30-120 to determine whether the type can be reported or the sample should be ivestigated further before new type is reported
for i,f in enumerate(top_hit[1][4:-1]):
for m in f:
if m[0] in range(131, 222):
if i == 1: # if ind
for indel in m[1]:
mutPos += int(re.search('\d+', indel[0]).group())
else:
mutPos+=1
newID = (90-mutPos)/float(90) * 100
if newID >= 92:
finalMtype = top_hit[0].split('.')[0]
else:
if type(top_hits['nonValidated']) == tuple and (type(top_hits['nonValidated']) == tuple and top_hits['nonValidated'][1][0] == 100) or (type(top_hits['nonValidated']) != tuple and top_hits['nonValidated'][0][1][0] == 100):
finalMtype = top_hits['nonValidated'][0]
else:
finalMtype = 'Investigate new type'
elif (type(top_hits['validated']) != tuple and len(set([f[0].split('.')[0] for f in top_hits['validated']])) != 1):
finalMtype = 'Not determined'
elif [f for f in top_hits.values() if f[0].startswith('Failed')]:
finalMtype = 'Failed'
else:
finalMtype =[]
for top_hit in top_hits['validated']:
mutPos = 0 # investigate region 30-120 to determine whether the type can be reported or the sample should be investigated further before new type is reported
for i,f in enumerate(top_hit[1][4:-1]):
for m in f:
if m[0] in range(131, 222):
if i == 1: # if indels
for indel in m[1]:
mutPos += int(re.search('\d+', indel[0]).group())
else:
mutPos+=1
newID = (90-mutPos)/float(90) * 100
if newID >= 92:
if top_hit[0].split('.')[0] not in finalMtype: finalMtype.append(top_hit[0].split('.')[0])
finalMtype='/'.join(finalMtype)
if finalMtype==[]:
if type(top_hits['nonValidated']) == tuple and (type(top_hits['nonValidated']) == tuple and top_hits['nonValidated'][1][0] == 100) or (type(top_hits['nonValidated']) != tuple and top_hits['nonValidated'][0][1][0] == 100):
finalMtype = top_hits['nonValidated'][0]
else:
finalMtype = 'Investigate new type'
elif top_hits['validated'][0] == None and (type(top_hits['nonValidated']) == tuple and top_hits['nonValidated'][1][0] < 100) or (type(top_hits['nonValidated']) != tuple and top_hits['nonValidated'][0][1][0] < 100):
if type(top_hits['nonValidated']) != tuple and len(set([f[0].split('.')[0] for f in top_hits['nonValidated']])) != 1:
finalMtype = 'Not determined' # if two different types are reported with the same coverage and covereage < 100 --> de novo assembly to resolve
else:
top_hit = top_hits['nonValidated'] if type(top_hits['nonValidated']) == tuple else top_hits['nonValidated'][0]
mutPos = 0 # investigate region 30-120 to determine whether the type can be reported or the sample should be ivestigated further before new type is reported
for i,f in enumerate(top_hit[1][4:-1]):
for m in f:
if m[0] in range(131, 232):
if i == 1: # if indels
for indel in m[1]:
mutPos += int(re.search('\d+', indel[0]).group())
else:
mutPos+=1
newID = (90-mutPos)/float(90) * 100
if newID >= 92:
finalMtype = top_hit[0].split('.')[0]
else:
finalMtype = 'Investigate new type'
elif top_hits["validated"] == tuple and top_hits['validated'][0].find('**') != -1:
finalMtype = 'Investigate mapping issues'
else:
finalMtype = 'Not determined'
comment = etree.Comment('(START) EMM Typing Results (START)')
results.append(comment)
result = etree.SubElement(results, "result", type='Final_EMM_type', value = finalMtype)
for key in top_hits.keys():
emm_type = 'EMM_'+key
EMM = str(top_hits[key][0]) if type(top_hits[key]) == tuple else '/'.join([f[0] for f in top_hits[key]])
result = etree.SubElement(results, "result", type=emm_type, value = EMM)
pctID = top_hits[key][1][0] if type(top_hits[key]) == tuple else top_hits[key][0][1][0]
pct_coverage = top_hits[key][1][7] if type(top_hits[key]) == tuple else top_hits[key][0][1][7]
mean_depth = str(top_hits[key][1][2]) if type(top_hits[key]) == tuple else '/'.join([str(f[1][2]) for f in top_hits[key]])
min_depth = str(top_hits[key][1][3]) if type(top_hits[key]) == tuple else '/'.join([str(f[1][3]) for f in top_hits[key]])
snps = str(len([f for f in top_hits[key][1][4] if Counter(f[2]).most_common()[0][0] != '*'])) if type(top_hits[key]) == tuple else '/'.join([str(len([f for f in hit[1][4] if Counter(f[2]).most_common()[0][0] != '*'])) for hit in top_hits[key]])
if type(top_hits[key]) == tuple:
indels = re.search('\d+', top_hits[key][1][5][0][1][0][0]).group() if top_hits[key][1][5] != [] else '0'
else:
indels = []
for hit in top_hits[key]:
if hit[1][5] != []:
indels.append(re.search('\d+', hit[1][5][0][1][0][0]).group())
else:
indels.append('0')
indels = '/'.join(indels)
mixed = str(len(top_hits[key][1][6])) if type(top_hits[key]) == tuple else '/'.join([str(len(hit[1][6])) for hit in top_hits[key]])
etree.SubElement(result, "result_data", type="percentage_identity", value = str(pctID))
etree.SubElement(result, "result_data", type="percentage_coverage", value=str(pct_coverage))
etree.SubElement(result, "result_data", type="mean_consensus_depth", value=mean_depth)
etree.SubElement(result, "result_data", type="minimum_consensus_depth", value=min_depth)
etree.SubElement(result, "result_data", type="snps", value=snps)
etree.SubElement(result, "result_data", type="indels", value = indels)
etree.SubElement(result, "result_data", type="mixed", value = mixed)
print ids, emm_type, EMM, "indels:", indels, "SNPS:", snps, "mixed:", mixed
print >> xml_log_file, etree.tostring(root, pretty_print=True)
|
phe-bioinformatics/emm-typing-tool
|
modules/EMM_determiner_functions.py
|
Python
|
gpl-3.0
| 54,076
|
[
"BLAST",
"Biopython",
"Bowtie"
] |
1bb5132479b50fee43c776bf7c72d15a7e9a6ec01afb8cbbd567a05aa6b00bac
|
"""
Tests for models.py.
"""
import json
import os
from django.conf import settings
from django.test import TestCase
from utils.import_util import import_reference_genome_from_local_file
from utils.import_util import copy_dataset_to_entity_data_dir
from main.models import AlignmentGroup
from main.models import Chromosome
from main.models import Dataset
from main.models import ExperimentSample
from main.models import ExperimentSampleToAlignment
from main.models import Project
from main.models import ReferenceGenome
from main.models import User
from main.models import Variant
from main.models import VariantCallerCommonData
from main.model_utils import clean_filesystem_location
from main.model_utils import get_dataset_with_type
from main.testing_util import create_common_entities
from utils import uppercase_underscore
import subprocess
TEST_USERNAME = 'testuser'
TEST_PASSWORD = 'password'
TEST_EMAIL = 'test@example.com'
TEST_PROJECT_NAME = 'testModels_project'
TEST_REF_GENOME_NAME = 'mg1655_partial'
TEST_REF_GENOME_PATH = os.path.join(settings.PWD,
'test_data/full_vcf_test_set/mg1655_tolC_through_zupT.gb')
class TestModels(TestCase):
def setUp(self):
"""Override.
"""
common_entities = create_common_entities()
self.user = common_entities['user']
def test_delete(self):
"""Test deleting models and their associated data.
This test was written in response to an error being thrown
when deleting data:
https://github.com/churchlab/genome-designer-v2/issues/219
"""
# TODO: Add more models until we started reproducing issue #219
# when we try to delete.
self.user.delete()
class TestAlignmentGroup(TestCase):
def test_get_samples(self):
user = User.objects.create_user(TEST_USERNAME, password=TEST_PASSWORD,
email=TEST_EMAIL)
self.test_project = Project.objects.create(
title=TEST_PROJECT_NAME,
owner=user.get_profile())
self.test_ref_genome = ReferenceGenome.objects.create(
project=self.test_project,
label='blah')
alignment_group = AlignmentGroup.objects.create(
label='Alignment 1',
reference_genome=self.test_ref_genome,
aligner=AlignmentGroup.ALIGNER.BWA)
# Create a bunch of samples and relate them.
for sample_idx in range(10):
sample = ExperimentSample.objects.create(
uid=str(sample_idx),
project=self.test_project,
label='some label'
)
ExperimentSampleToAlignment.objects.create(
alignment_group=alignment_group,
experiment_sample=sample)
# Test the method.
samples = alignment_group.get_samples()
sample_uid_set = set([sample.uid for sample in samples])
self.assertEqual(sample_uid_set,
set([str(x) for x in range(10)]))
class TestDataset(TestCase):
def test_get_related_model_set(self):
user = User.objects.create_user(TEST_USERNAME, password=TEST_PASSWORD,
email=TEST_EMAIL)
self.test_project = Project.objects.create(
title=TEST_PROJECT_NAME,
owner=user.get_profile())
self.test_ref_genome = ReferenceGenome.objects.create(
project=self.test_project,
label='blah')
alignment_group = AlignmentGroup.objects.create(
label='Alignment 1',
reference_genome=self.test_ref_genome,
aligner=AlignmentGroup.ALIGNER.BWA)
dataset = Dataset.objects.create(
label='the label', type=Dataset.TYPE.VCF_FREEBAYES)
alignment_group.dataset_set.add(dataset)
alignment_group_set = dataset.get_related_model_set()
self.assertTrue(alignment_group in alignment_group_set.all())
def test_dataset_compression_piping(self):
"""
Make sure data set compression behaves correctly.
"""
dataset = Dataset.objects.create(
label='test_dataset',
type=Dataset.TYPE.FASTQ1)
GZIPPED_FASTQ_FILEPATH = os.path.join(settings.PWD, 'test_data',
'compressed_fastq', 'sample0.simLibrary.1.fq.gz')
dataset.filesystem_location = clean_filesystem_location(
GZIPPED_FASTQ_FILEPATH)
assert dataset.is_compressed()
process = subprocess.Popen(
('head '+dataset.wrap_if_compressed()+' | wc -l'),
shell=True, executable=settings.BASH_PATH, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
wc_output, errmsg = process.communicate()
rc = process.returncode
assert rc == 0, (
"Compression process returned non-zero exit status: %s" % (
errmsg))
assert int(wc_output) == 10, (
"Compression failed: %s" % (errmsg))
def test_compress_dataset(self):
"""
Make sure that compressing a dataset and putting a new dataset
entry into the db works correctly.
"""
user = User.objects.create_user(TEST_USERNAME, password=TEST_PASSWORD,
email=TEST_EMAIL)
self.test_project = Project.objects.create(
title=TEST_PROJECT_NAME,
owner=user.get_profile())
self.test_ref_genome = import_reference_genome_from_local_file(
self.test_project,
TEST_REF_GENOME_NAME,
TEST_REF_GENOME_PATH,
'genbank')
dataset = get_dataset_with_type(self.test_ref_genome,
type= Dataset.TYPE.REFERENCE_GENOME_GENBANK)
# All the magic happens here
compressed_dataset = dataset.make_compressed('.gz')
# Grab the new compressed dataset through the ref genome to
# make sure that it got added
compressed_dataset_through_ref_genome = get_dataset_with_type(
entity= self.test_ref_genome,
type= Dataset.TYPE.REFERENCE_GENOME_GENBANK,
compressed= True)
assert compressed_dataset == compressed_dataset_through_ref_genome
def test_dataset_strings(self):
user = User.objects.create_user(TEST_USERNAME, password=TEST_PASSWORD,
email=TEST_EMAIL)
self.test_project = Project.objects.create(
title=TEST_PROJECT_NAME,
owner=user.get_profile())
self.test_ref_genome = import_reference_genome_from_local_file(
self.test_project,
TEST_REF_GENOME_NAME,
TEST_REF_GENOME_PATH,
'genbank')
dataset = get_dataset_with_type(self.test_ref_genome,
type= Dataset.TYPE.REFERENCE_GENOME_GENBANK)
self.assertEquals(
dataset.internal_string(self.test_ref_genome),
(str(self.test_ref_genome.uid) +
'_' + uppercase_underscore(Dataset.TYPE.REFERENCE_GENOME_GENBANK)))
class TestModelsStatic(TestCase):
"""Tests for static methods.
"""
def test_clean_filesystem_location(self):
FAKE_ABS_ROOT = '/root/of/all/evil'
EXPECTED_CLEAN_URL = 'projects/blah'
dirty_full_url = os.path.join(FAKE_ABS_ROOT, settings.MEDIA_ROOT,
EXPECTED_CLEAN_URL)
clean_location = clean_filesystem_location(dirty_full_url)
self.assertEqual(EXPECTED_CLEAN_URL, clean_location)
class TestVariantCallerCommonData(TestCase):
def test_json_data_field(self):
"""Tests the data field which uses the Postgresql 9.3 json type.
"""
user = User.objects.create_user(TEST_USERNAME, password=TEST_PASSWORD,
email=TEST_EMAIL)
test_project = Project.objects.create(
title=TEST_PROJECT_NAME,
owner=user.get_profile())
reference_genome = ReferenceGenome.objects.create(
project=test_project,
label='ref1')
chromosome = Chromosome.objects.create(
reference_genome=reference_genome,
label='Chromosome',
num_bases=9001)
variant = Variant.objects.create(
reference_genome=reference_genome,
type='UNKNOWN',
chromosome=chromosome,
position=100,
ref_value='A'
)
alignment_group = AlignmentGroup.objects.create(
label='Alignment 1',
reference_genome=reference_genome,
aligner=AlignmentGroup.ALIGNER.BWA)
raw_data_dict = {
'key1': 'val1',
'key2': 'val2',
}
# Test storing as dictionary.
vccd = VariantCallerCommonData.objects.create(
variant=variant,
source_dataset_id=1,
alignment_group=alignment_group,
data=raw_data_dict
)
vccd_lookup = VariantCallerCommonData.objects.get(
id=vccd.id)
self.assertEquals(raw_data_dict, vccd_lookup.data)
# Test storing as string.
vccd = VariantCallerCommonData.objects.create(
variant=variant,
source_dataset_id=1,
alignment_group=alignment_group,
data=json.dumps(raw_data_dict)
)
vccd_lookup = VariantCallerCommonData.objects.get(
id=vccd.id)
self.assertEquals(raw_data_dict, vccd_lookup.data)
# Test blank value.
vccd = VariantCallerCommonData.objects.create(
variant=variant,
source_dataset_id=1,
alignment_group=alignment_group,
)
self.assertEquals(0, len(vccd.data))
# Test assigning after initial create.
vccd = VariantCallerCommonData.objects.create(
variant=variant,
source_dataset_id=1,
alignment_group=alignment_group,
)
vccd.data=json.dumps(raw_data_dict)
vccd.save()
vccd_lookup = VariantCallerCommonData.objects.get(
id=vccd.id)
self.assertEquals(raw_data_dict, vccd_lookup.data)
class TestExperimentSample(TestCase):
def setUp(self):
"""Override.
"""
self.common_entities = create_common_entities()
self.ref_genome = self.common_entities['reference_genome']
def test_data_dir_create_and_delete(self):
"""Make sure data directory gets deleted.
"""
es = ExperimentSample.objects.create(
project=self.ref_genome.project, label='test_es')
es_data_dir = es.get_model_data_dir()
self.assertTrue(os.path.exists(es_data_dir))
es.delete()
self.assertFalse(os.path.exists(es_data_dir))
def test_add_child(self):
"""
Make sure parent/child relationships work.
"""
assert len(self.common_entities['sample_1'].get_children()) == 0
assert len(self.common_entities['sample_1'].get_parents()) == 0
self.common_entities['sample_1'].add_child(
self.common_entities['sample_2'])
assert len(self.common_entities['sample_1'].get_children()) == 1
assert len(self.common_entities['sample_2'].get_parents()) == 1
assert(self.common_entities['sample_1'].get_children()[0].uid == (
self.common_entities['sample_2'].uid))
class TestChromosome(TestCase):
def test_multiple_chromosome_dataset_import(self):
user = User.objects.create_user(
TEST_USERNAME, password=TEST_PASSWORD, email=TEST_EMAIL)
project = Project.objects.create(
title=TEST_PROJECT_NAME, owner=user.get_profile())
test_yeast_genome = ReferenceGenome.objects.create(
project=project,
label='superbrewer2000')
test_dataset_path = os.path.join(settings.PWD, 'test_data/yeast_chrom_jkl.fasta')
dataset_path = copy_dataset_to_entity_data_dir(test_yeast_genome, test_dataset_path)
test_chroms_dataset = Dataset.objects.create(
label='jkl_chroms',
type=Dataset.TYPE.REFERENCE_GENOME_FASTA,
filesystem_location=clean_filesystem_location(dataset_path))
test_yeast_genome.dataset_set.add(test_chroms_dataset)
# Assert correct number of chromosomes
assert(test_yeast_genome.num_chromosomes == 3)
# Assert correct number of bases
assert(test_yeast_genome.num_bases == sum([chrom.num_bases for chrom in
Chromosome.objects.filter(reference_genome=test_yeast_genome)]))
# Assert correct chromosome labels
expected_chrom_names = [
'gi|448092123|ref|NC_020215.1|',
'gi|448096713|ref|NC_020216.1|',
'gi|448100869|ref|NC_020217.1|']
assert([chrom.label for chrom in Chromosome.objects.filter(reference_genome=test_yeast_genome)] == expected_chrom_names)
|
woodymit/millstone_accidental_source
|
genome_designer/main/tests/test_models.py
|
Python
|
mit
| 12,943
|
[
"BWA"
] |
753e1221b2c2036d776b25dc41a468d2baafbd42d24c9b756321f8957f6de039
|
"""
Copyright (C) 2017 Roberto Bruttomesso <roberto.bruttomesso@gmail.com>
This file is distributed under the terms of the 3-clause BSD License.
A copy of the license can be found in the root directory or at
https://opensource.org/licenses/BSD-3-Clause.
Author: Roberto Bruttomesso <roberto.bruttomesso@gmail.com>
Date: 27/03/2017
This module implements a simple visitable decorator
"""
class Visitable(object):
"""
Implementation of a base class that adds
the capability of being visited by a visitor
"""
def accept(self, visitor):
"""
Function to be called to trigger a visitor
"""
return visitor.visit(self)
|
formalmethods/intrepyd
|
intrepyd/visitable.py
|
Python
|
bsd-3-clause
| 669
|
[
"VisIt"
] |
8720f113fb62b7a8a5024f2b5f8803a20cc3d4ff96aa68006e6752f40d6e3745
|
import dataclasses
from dataclasses import dataclass
from typing import Any
import typing
import sys
import numpy as np
import h5py
import Pyro5.api
# metadata support
from .mupifobject import MupifObject
from . import units, pyroutil, dumpable
from . import dataid
from .heavydata import HeavyDataBase, HeavyDataBase_ModeChoice
from .pyrofile import PyroFile
import types
import json
import tempfile
import logging
import os
import pydantic
import subprocess
import shutil
log = logging.getLogger(__name__)
'''
The *heavydata* module defines classes for sematic access to potentially large (that is, larger than RAM) structured data, both locally and over the network. The data structure is defined using *schemas* in JSON, where each schema defines table-like structure with rows of data, each row possibly further referencing another table with a different schema. The data is internally stored in a HDF5 file, which includes the schemas, making the file self-describing. JSON schema is thus only required when the data is being created, not for opening an already existing data.
JSON schema specification
--------------------------
The schema is defined as dictionary (nesting is possible). The top-level dictionary of each schema
#. **must** include ``_schema`` (which **must** define ``name`` and ``version``),
#. **must** include ``_datasetName`` entries,
#. **may** include other, regular entries, as described below; names **must not** be reserved names.
Reserved names are those starting with ``_`` (underscore) plus ``dtype``, ``lookup`` and ``path``.
Regular entries
""""""""""""""""
Regular entries are one of the following.
#. Computed attribute (identified via the ``lookup`` keyword); computed attribute **must** define ``lookup``, which is a lookup table (key-value dictionary), **dtype** (datatype being returned from lookup) and **key** (descriptor of data attribute used for lookup); it **may** define ``unit``.
#. Data attribute (identified via the ``dtype`` key, but not having ``lookup``); it **must** define ``dtype`` and **may** define ``unit``, ``shape``.
#. subschema reference (identified by the ``path`` keyword); it **must** define ``path`` and ``schema``. ``path`` must contain the substring ``{ROW}`` (is replaced by row number to which the nested data structure belongs) and end with ``/`` (forward slash).
#. dictionary possibly including other regular entries (directory, creating hierarchy).
Data types
^^^^^^^^^^^^
``dtype`` specifies datatype for the entry value using the ``numpy.dtype`` notation (see `Data type objects <https://numpy.org/doc/stable/reference/arrays.dtypes.html>`__), for example ``f8`` for 8-byte (64-bit) floating-point number.
Strings are stored as utf-8 encoded byte arrays (thus their storage length might be larger than number of characters). use ``"dtype":"a"`` for variable-length strings (``a`` implies ``"shape":"variable"``), and ``"dtype":"a10"`` for string of maximum 10 bytes.
``shape`` is a tuple specifying fixed shape: e.g. ``"shape":(3)`` is rank-1 3-vector, ``"shape":(3,3)`` is rank-2 3×3 matrix and so on. The special value of ``"shape":"variable"`` denotes dynamic 1d array of given ``dtype``.
.. note:: Variable-length data (both strings and numerical arrays) are handled in a special way by the HDF5 storage; each (non-empty) entry has about 30b overhead, plus necessitates allocationes. If your data can always fit into a fixed-size array (such as string of maximum 20 bytes, ``a20``), prefer that for both performance ans storage reasons.
All data are initialized to the default when constructed, which is:
* ``NaN`` (not-a-number) for floating-point types (scalars and arrays),
* ``0`` (zero) for integer types (scalars and arrays),
* empty array for dynamic arrays,
* empty string for both static-sized (``"dtype":"a10"``) and dynamic-sized (``"dtype":"a"``) strings.
Assignments of incompatible data (which cannot be converted to the underlying storage type), including mismatched shape of arrays, will raise exception.
Units
^^^^^^
Entries specifying ``unit`` (which is any string `astropy.units.Unit <https://docs.astropy.org/en/stable/api/astropy.units.Unit.html>`__ can grok) **must** be assigned with quantities including compatible units; the value will be converted to the schema unit before being set. The field will be returned as a Quantity (including the unit) when read back.
Subschema
"""""""""""
Subschema entries associate full (nested hierarchical) data stratucture with each table line. The entry **must** specify ``schema`` name (which must be present in the *schemaRegistry* argument of :obj:`HeavyStruct.openData`) and ``path``. Path defines where the nested data is stored within the HDF5 file and **must** contain ``{ROW}`` (as string, including the curly braces) and end with ``/``.
Accessing data
----------------
Data are accesse using *Contexts*, special classes abstracting away the underlying storage. They define getters (and setters) for each data level (the are called simply ``get``/``set`` followed by capitalized entry name). Rows are selected using the usual indexing operator ``[index]``, though a whole column can be returned when index is not specified.
Top contexts (on the level of the schema) define a few special methods:
* ``resize`` which will change the number of rows; new rows will be always set to the default values. When passing the argument ``reset=True`` to ``resize``, all rows will be default-initialized.
* ``inject`` will replace the current context's data with data from another context (recursively); the routine will take care to resize structures as necessary. Schema names must be matching, and differences in schema versions will be reported as warning (it will be possible to user-define transformation for converting between different schema versions). The data exchange happens using serialized format which can be obtained and consumed using ``to_dump()`` and ``from_dump(…)`` methods.
'''
sampleSchemas_json = '''
[
{
"_schema": {
"name": "org.mupif.sample.atom",
"version": "1.0"
},
"_datasetName": "atoms",
"identity": {
"element": {
"dtype": "a2"
},
"atomicNumber": {
"dtype": "l",
"key": "identity.element",
"lookup": {
"H": 1,
"C": 6,
"N": 7,
"Na": 11,
"Cl": 17,
"Fe": 26
}
},
"atomicMass": {
"dtype": "f",
"key": "identity.element",
"unit": "Dalton",
"lookup": {
"H": 1.0079,
"C": 12.0107,
"N": 14.0067,
"Na": 22.9897,
"Cl": 35.453,
"Fe": 55.845
}
}
},
"properties": {
"physical": {
"partialCharge": {
"neutral": {
"dtype": "d",
"unit": "e"
},
"anion": {
"dtype": "d",
"unit": "e"
},
"cation": {
"dtype": "d",
"unit": "e"
}
},
"polarizability": {
"neutral": {
"dtype": "d",
"unit": "AA^2 s^4 kg^-1"
},
"anion": {
"dtype": "d",
"unit": "AA^2 s^4 kg^-1"
},
"cation": {
"dtype": "d",
"unit": "AA^2 s^4 kg^-1"
}
}
},
"topology": {
"parent": {
"dtype": "l"
},
"type": {
"dtype": "a",
"shape": "variable"
},
"name": {
"dtype": "a",
"shape": "variable"
},
"position": {
"dtype": "d",
"shape": [
3
],
"unit": "AA"
},
"velocity": {
"dtype": "d",
"shape": [
3
],
"unit": "AA/ps"
},
"structure": {
"dtype": "l",
"shape": "variable"
}
}
}
},
{
"_schema": {
"name": "org.mupif.sample.molecule",
"version": "1.0"
},
"_datasetName": "molecules",
"identity": {
"chemicalName": {
"dtype": "a",
"shape": "variable"
},
"molecularWeight": {
"dtype": "d",
"unit": "Dalton"
}
},
"properties": {
"electrical": {
"HOMO": {
"dtype": "d",
"unit": "eV"
},
"LUMO": {
"dtype": "d",
"unit": "eV"
},
"siteEnergy": {
"orbital": {
"dtype": "d",
"unit": "eV"
},
"electrostatic": {
"dtype": "d",
"unit": "eV"
},
"polarization": {
"dtype": "d",
"unit": "eV"
}
},
"transferIntegrals": {
"dtype": "d",
"shape": "variable"
},
"reorganizationEnergyInternal": {
"anion": {
"dtype": "d",
"unit": "eV"
},
"cation": {
"dtype": "d",
"unit": "eV"
}
}
},
"physical": {
"polarizability": {
"neutral": {
"dtype": "d",
"shape": [
3,
3
],
"unit": "AA^2 s^4 kg^-1"
},
"anion": {
"dtype": "d",
"shape": [
3,
3
],
"unit": "AA^2 s^4 kg^-1"
},
"cation": {
"dtype": "d",
"shape": [
3,
3
],
"unit": "AA^2 s^4 kg^-1"
}
}
},
"chemical": {}
},
"topology": {
"parent": {
"dtype": "l",
"unit": "none"
},
"centerOfMass": {
"dtype": "d",
"shape": [
3
],
"unit": "AA"
},
"symmetryAxis": {
"dtype": "d",
"shape": [
3
],
"unit": "AA"
},
"structureNeighbors": {
"dtype": "l",
"shape": "variable"
}
},
"implementation": {
"forceFieldType": {
"dtype": "a",
"shape": "variable"
}
},
"atoms": {
"path": "molecule/{ROW}/",
"schema": "org.mupif.sample.atom"
}
},
{
"_schema": {
"name": "org.mupif.sample.grain",
"version": "1.0"
},
"_datasetName": "grains",
"identity": {
"material": {
"dtype": "a",
"shape": "variable"
}
},
"properties": {
"eletrical": {
"freeElectrons": {
"dtype": "l",
"unit": "none"
},
"freeHoles": {
"dtype": "l",
"unit": "none"
}
},
"physical": {
"reorganizationEnergyExternal": {
"dtype": "d",
"unit": "eV"
}
},
"chemical": {},
"symmetry": { "choice":["none","axial","periodic","translational","rotational"] }
},
"topology": {
"parent": {
"dtype": "l"
},
"cellSize": {
"dtype": "d",
"shape": [
3
],
"unit": "m"
}
},
"implementation": {
"boundaryCondition": {
"dtype": "a"
}
},
"molecules": {
"path": "grain/{ROW}/",
"schema": "org.mupif.sample.molecule"
}
}
]
'''
def _cookSchema(desc, prefix='', schemaName='', fakeModule='', datasetName=''):
__doc0__ = '''
Transform dictionary-structured data schema into context access types.
The access types are created using the "type" builtin and only stored
in closures of the functions returning them. The top-level context is
returned from this function to the user.
get/set methods (and others) are not created on the fly but are instead
put into those context types. This is substantially more efficient than
hijacking __getattr__ and __setattr__.
Closures in Python are somewhat unintuitive, since e.g. loop does not
create a new scope (thus variable reference would later have the value
in the last iteration step). Therefore local variables are captured via
local function defaults, which makes some of the code less readable.
'''
@dataclass
class CookedSchemaFragment:
'Internal data used when cookSchema is called recursively'
dtypes: list # accumulates numpy dtypes for compound datatype
defaults: dict # default values, nan for floats and 0 for integers
subpaths: dict # accumulates nested paths (for deletion when resizing), as (path,schema) tuple, keyed by FQ
units: dict # accumulates units for normal values types (for dict export), keyed by FQ
T: Any = None # nested context type
doc: typing.List[str] = dataclasses.field(default_factory=list) # accumulates documentation (as markdown nested list)
def append(self, other):
self.dtypes += other.dtypes
self.defaults.update(other.defaults)
self.doc += other.doc
self.subpaths.update(other.subpaths)
self.units.update(other.units)
def dtypeUnitDefaultDoc(v):
'Parse dictionary *v* (part of the schema) and return (dtype,unit,default,doc) tuple'
shape = v['shape'] if 'shape' in v else ()
if isinstance(shape, list):
shape = tuple(shape)
ddoc = {}
if shape:
ddoc['shape'] = f'[{"×".join([str(s) for s in shape])}]'
unit = units.Unit(v['unit']) if 'unit' in v else None
dtype = v['dtype']
default = None
if dtype == 'a':
dtype = h5py.string_dtype(encoding='utf-8')
shape = None
ddoc['dtype'] = 'string (utf-8 encoded)'
ddoc['shape'] = 'dynamic'
elif shape == 'variable':
ddoc['dtype'] = f'`[{np.dtype(dtype).name},…]`'
dtype = h5py.vlen_dtype(np.dtype(dtype))
shape = None
ddoc['shape'] = 'dynamic'
else:
dtype = np.dtype((dtype, shape))
# log.warning(f'{fq}: defaults for non-scalar quantities (dtype.subdtype) not yet supported.')
basedtype = (dtype if (not hasattr(dtype, 'subdtype') or dtype.subdtype is None) else dtype.subdtype[0])
# basedtype=dtype # workaround
if basedtype.kind == 'f':
default = np.nan
elif basedtype.kind in 'iu':
default = 0
ddoc['dtype'] = f'`{basedtype.name}`'
if unit:
ddoc['unit'] = f"`{str(unit)}`"
if 'lookup' in v:
ddoc['read-only'] = f'table look-up by `{v["key"]}`'
default = None
if default is not None:
ddoc['default'] = f"`{str(default)}`"
return dtype, unit, default, ', '.join(f'{k}: {v}' for k, v in ddoc.items())
def capitalize(k):
'Turn the first letter into uppercase'
return k[0].upper()+k[1:]
ret = CookedSchemaFragment(dtypes=[], defaults={}, subpaths={}, units={})
meth = {} # accumulate attribute access methods
docLevel = (0 if not schemaName else prefix.count('.')+1)
# top-level only
if not schemaName:
schemaName = desc['_schema']['name']
schemaVersion = desc['_schema']['version']
datasetName = desc['_datasetName']
assert len(prefix) == 0
T_name = 'Context_'+schemaName.replace('.', '_')
import hashlib
h = hashlib.blake2b(digest_size=6)
h.update(json.dumps(desc).encode('utf-8'))
fakeModule = types.ModuleType('_mupif_heavydata_'+h.hexdigest(), 'Synthetically generated module for mupif.HeavyStruct schemas')
# this somehow breaks imports, so better to avoid it until understood
# if fakeModule.__name__ in sys.modules: return getattr(sys.modules[fakeModule.__name__],T_name)
# sys.modules[fakeModule.__name__]=fakeModule
ret.doc += [f'**schema {schemaName}**', '']
else:
T_name = 'Context_'+schemaName+'_'+prefix.replace('.', '_')
for key, val in desc.items():
# fully-qualified name: for messages and compound field name in h5py
fq = (f"{prefix}.{key}" if prefix else key)
docHead = docLevel*3*' '+f'* `{key}`'
# special keys start with underscore, so far only _schema is used
if key.startswith('_'):
if key == '_schema':
continue
elif key == '_datasetName':
continue
else:
raise ValueError(f"Unrecognized special key '{key}' in prefix '{prefix}'.")
if not isinstance(val, dict):
raise TypeError("{fq}: value is not a dictionary.")
# attribute defined via lookup, not stored
if 'lookup' in val:
dtype, unit, default, doc = dtypeUnitDefaultDoc(val)
ret.doc += [docHead+f': `get{capitalize(key)}()`: '+doc]
lKey, lDict = val['key'], val['lookup']
if isinstance(lKey, bytes):
lKey = lKey.decode('utf8')
# bind local values via default args (closure)
def inherentGetter(self, *, fq=fq, dtype=dtype, unit=unit, lKey=lKey, lDict=lDict):
_T_assertDataset(self, f"when looking up '{fq}' based on '{lKey}'.")
def _lookup(row):
k=self.ctx.dataset[lKey, row]
if isinstance(k, bytes):
k = k.decode('utf8')
try:
val = np.array(lDict[k], dtype=dtype)[()] # [()] unpacks rank-0 scalar
except KeyError:
raise KeyError(f"{fq}: key '{k}' ({lKey}) not found in the lookup table with keys {list(lDict.keys())}") from None
return val
# fake broadcasting
if self.row is None:
val = np.array([_lookup(r) for r in range(self.ctx.dataset.shape[0])])
else:
val = _lookup(self.row)
if unit:
return units.Quantity(value=val, unit=unit)
else:
return val
meth['get'+capitalize(key)] = inherentGetter
elif 'choice' in val:
choices=val['choice']
num=0
str2num,num2str={},{}
if not isinstance(choices,(list,tuple)): raise TypeError(f"'{fq}': choice must be a sequence")
for ilabel,label in enumerate(choices):
if not isinstance(label,str): raise ValueError(f"'{fq}': choice items must be strings (item {ilabel} is a {type(label).__name__})")
if label in str2num: raise ValueError(f"'{fq}': duplicate choice label '{label}'")
num2str[num],str2num[label]=label,num
num+=1
mn,mx=min(num2str.keys()),max(num2str.keys())
# min_scalar_type: signed when mn<0, otherwise unsigned; min_scalar_type should ensure appropriate range
dtype=(np.min_scalar_type(mx) if mn>=0 else mn.min_scalar_type(-max(abs(mn),abs(mx))))
ret.dtypes+=[(fq,dtype)]
ret.doc+=[docHead+f': `get{capitalize(key)}()`, `set{capitalize(key)}(…)`: named enumeration stored as {dtype.name}: '+', '.join([f'{k} (v)' for k,v in str2num.items()])]
ret.defaults[fq]=list(num2str.keys())[0] # first key is the default
ret.units[fq]=None
def getter(self,*,fq=fq):
if self.row is not None: return num2str[self.ctx.dataset[fq,self.row]]
else: return [num2str[self.ctx.dataset[fq,r]] for r in range(self.ctx.dataset.shape[0])]
def setter(self,val,*,fq=fq):
if self.row is not None: self.ctx.dataset[fq,self.row]=str2num[val]
else: self.ctx.dataset[fq]=np.full(self.ctx.dataset.shape[0],str2num[val])
meth['get'+capitalize(key)]=getter
meth['set'+capitalize(key)]=setter
# normal data attribute
elif 'dtype' in val:
dtype,unit,default,doc=dtypeUnitDefaultDoc(val)
basedtype=(b[0] if (b:=getattr(dtype,'subdtype',None)) else dtype)
ret.dtypes+=[(fq,dtype)] # add to the compound type
ret.doc+=[docHead+f': `get{capitalize(key)}()`, `set{capitalize(key)}(…)`: '+doc]
ret.units[fq]=unit
if default is not None: ret.defaults[fq]=default # add to the defaults
def getter(self,*,fq=fq,unit=unit):
_T_assertDataset(self,f"when getting the value of '{fq}'")
if self.row is not None: value=self.ctx.dataset[fq,self.row]
else: value=self.ctx.dataset[fq]
if isinstance(value,bytes): value=value.decode('utf-8')
if unit is None: return value
return units.Quantity(value=value,unit=unit)
def _cookValue(val,*,unit,dtype,basedtype):
'Unit conversion, type conversion before assignment'
if unit: val=(units.Quantity(val).to(unit)).value
if isinstance(val,str): val=val.encode('utf-8')
#sys.stderr.write(f"{fq}: {basedtype}\n")
ret=np.array(val).astype(basedtype,casting='safe',copy=False)
# for object (variable-length) types, convertibility was checked but the result is discarded
if basedtype.kind=='O': return val
#sys.stderr.write(f"{fq}: cook {val} → {ret}\n")
return ret
def setter_direct(self,val,*,fq=fq,unit=unit,dtype=dtype,basedtype=basedtype):
_T_assertDataset(self,f"when setting the value of '{fq}'")
#_T_assertWritable(self,f"when setting the value of '{fq}'")
val=_cookValue(val,unit=unit,dtype=dtype,basedtype=basedtype)
# sys.stderr.write(f'{fq}: direct setting {val}\n')
if self.row is None: self.ctx.dataset[fq]=val
else: self.ctx.dataset[self.row,fq]=val
def setter_wholeRow(self,val,*,fq=fq,unit=unit,dtype=dtype,basedtype=basedtype):
_T_assertDataset(self,f"when setting the value of '{fq}'")
#_T_assertWritable(self,f"when setting the value of '{fq}'")
val=_cookValue(val,unit=unit,dtype=dtype,basedtype=basedtype)
#sys.stderr.write(f'{fq}: wholeRow setting {repr(val)}\n')
# workaround for bugs in h5py: for variable-length fields, and dim>1 subarrays:
# direct assignment does not work; must read the whole row, modify, write it back
# see https://stackoverflow.com/q/67192725/761090 and https://stackoverflow.com/q/67451714/761090
# kind=='O' covers h5py.vlen_dtype and strings (h5py.string_dtype) with variable length
if self.row is None: raise NotImplementedError('Broadcasting to variable-length fields or multidimensional subarrays not yet implemented.')
rowdata=self.ctx.dataset[self.row]
rowdata[self.ctx.dataset.dtype.names.index(fq)]=val
self.ctx.dataset[self.row]=rowdata
meth['get'+capitalize(key)]=getter
meth['set'+capitalize(key)]=(setter_wholeRow if (dtype.kind=='O' or dtype.ndim>1) else setter_direct)
elif 'schema' in val:
schema,path=val['schema'],val.get('path','{NAME}/{ROW}/')
path=path.replace('{NAME}',key)
if '{ROW}' not in path: raise ValueError(f"'{fq}': schema ref path '{path}' does not contain '{{ROW}}'.")
if not path.endswith('/'): raise ValueError(f"'{fq}': schema ref path '{path}' does not end with '/'.")
ret.subpaths[fq]=(path,schema)
# path=path[:-1] # remove trailing slash
def subschemaGetter(self,row=None,*,fq=fq,path=path,schema=schema):
rr=[self.row is None,row is None]
if sum(rr)==2: raise AttributeError(f"'{fq}': row index not set (or given as arg), unable to follow schema ref.")
if sum(rr)==0: raise AttributeError(f"'{fq}': row given both as index ({self.row}) and arg ({row}).")
if row is None: row=self.row
#_T_assertDataset(self,f"when accessing subschema '{path}'.")
#self.ctx.dataset[self.row] # catch invalid row index, data unused
#print(f"{fq}: getting {path}")
path=path.replace('{ROW}',str(row))
subgrp=self.ctx.h5group.require_group(path)
SchemaT=self.ctx.schemaRegistry[schema]
ret=SchemaT(top=HeavyStruct.TopContext(h5group=subgrp,schemaRegistry=self.ctx.schemaRegistry,pyroIds=self.ctx.pyroIds),row=None)
# print(f"{fq}: schema is {SchemaT}, returning: {ret}.")
return _registeredWithDaemon(self,ret)
ret.doc+=[docHead+f': `get{capitalize(key)}()`: nested data at `{path}`, schema `{schema}`.']
meth['get'+capitalize(key)]=subschemaGetter
else:
# recurse
ret.doc+=[docHead+f': `get{capitalize(key)}()`',''] # empty line for nesting in restructured text
cooked=_cookSchema(val,prefix=fq,schemaName=schemaName,fakeModule=fakeModule,datasetName=datasetName)
ret.append(cooked)
def nestedGetter(self,*,T=cooked.T):
#print('nestedGetter',T)
ret=T(other=self)
return _registeredWithDaemon(self,ret)
meth['get'+capitalize(key)]=nestedGetter # lambda self, T=cooked.T: T(self)
def _registeredWithDaemon(context,obj):
if not hasattr(context,'_pyroDaemon'): return obj
context._pyroDaemon.register(obj)
context.ctx.pyroIds.append(obj._pyroId)
return obj
def T_init(self,*,top=None,other=None,row=None):
'''
The constructor is a bit hairy, as the new context either:
(1) nests inside TopContext (think of dataset);
(2) nests inside an already nested context (think of sub-dataset);
(3) adds row information, not changing location (row in (sub)dataset)
(4) nests & adds row, such as in getMolecules(0) which is a shorthand for getMolecules()[0]
'''
if top is not None:
assert isinstance(top,HeavyStruct.TopContext)
self.ctx,self.row=top,row
elif other is not None:
assert not isinstance(other,HeavyStruct.TopContext)
# print(f'other.row={other.row}, row={row}')
if (other.row is not None) and (row is not None): raise IndexError(f'Context already indexed, with row={row}.')
self.ctx,self.row=other.ctx,(other.row if row is None else row)
# print(f"[LEAF] {self}, other={other}")
else: raise ValueError('One of *top* or *other* must be given.')
def T_str(self):
'Context string representation'
return F"<{self.__class__.__name__}, row={self.row}, ctx={self.ctx}{', _pyroId='+self._pyroId if hasattr(self,'_pyroDaemon') else ''}>"
def T_getitem(self,row):
'Indexing access; checks index validity and returns new context with the row set'
_T_assertDataset(self,msg=f'when trying to index row {row}')
if(row<0 or row>=self.ctx.dataset.shape[0]): raise IndexError(f"{fq}: row index {row} out of range 0…{self.ctx.dataset.shape[0]}.")
# self.ctx.dataset[row] # this would raise ValueError but iteration protocol needs IndexError
# print(f'Item #{row}: returning {self.__class__(self,row=row)}')
ret=self.__class__(other=self,row=row)
return _registeredWithDaemon(self,ret)
return ret
def T_len(self):
'Return sequence length'
if not _T_hasDataset(self): return 0
_T_assertDataset(self,msg=f'querying dataset length')
if self.row is not None: return IndexError('Row index already set, not behaving as sequence.')
return self.ctx.dataset.shape[0]
def _T_hasDataset(self): return self.ctx.dataset or (self.__class__.datasetName in self.ctx.h5group)
def _T_assertDataset(self,msg=''):
'checks that the backing dataset it present/open. Raises exception otherwise.'
if self.ctx.dataset is None:
if self.__class__.datasetName in self.ctx.h5group: self.ctx.dataset=self.ctx.h5group[self.__class__.datasetName]
else: raise RuntimeError(f'Dataset not yet initialized, use resize first{" ("+msg+")" if msg else ""}: {self.ctx.h5group.name}/{self.__class__.datasetName}.')
def _T_assertWritable(self,msg):
if self.ctx.h5group.file.mode!='r+': raise RuntimeError(f'Underlying HDF5 file was not open for writing ({msg}).')
def T_resize(self,size,reset=False,*,ret=ret):
'Resizes the backing dataset; this will, as necessary, create a new dataset, or grow/shrink size of an existing dataset. New records are always default-initialized.'
def _initrows(ds,rowmin,rowmax):
'default-initialize contiguous range of rows rmin…rmax (inclusive), create groups for subpaths'
defrow=ds[rowmin] # use first row as storage, assign all defaults into it, then copy over all other rows
for fq,val in ret.defaults.items(): defrow[fq]=val
ds[rowmin+1:rowmax+1]=defrow
assert size>=0
_T_assertWritable(self,msg=f'when resizing to {size}.')
if reset: self.resize(size=0)
if self.ctx.dataset is None:
dsname=self.__class__.datasetName
if dsname not in self.ctx.h5group: # create new dataset, initialize, return
if size==0: return # request to reset but nothing is here
self.ctx.dataset=self.ctx.h5group.create_dataset(dsname,shape=(size,),maxshape=(None,),dtype=ret.dtypes,compression='gzip')
_initrows(ds=self.ctx.dataset,rowmin=0,rowmax=size-1)
return
else: # open existing dataset
self.ctx.dataset=self.ctx.h5group[dsname]
size0=self.ctx.dataset.shape[0]
if size==size0: return
self.ctx.dataset.resize((size,)) # this changes size of the underlying HDF5 data
# default-initialize added rows
if size0<size: _initrows(ds=self.ctx.dataset,rowmin=size0,rowmax=size-1)
else:
# remove stale subpaths
# sys.stderr.write(f'Removing stale subpaths {str(ret.subpaths)}, {size0} → {size}…\n')
for fq,(subpath,schema) in ret.subpaths.items():
for r in range(size,size0):
p=subpath.replace('{ROW}',str(r))
# sys.stderr.write(f'Resizing {self.ctx.dataset}, {prevSize} → {size}: deleting {p}\n')
if p in self.ctx.h5group: del self.ctx.h5group[p]
else: pass # sys.stderr.write(f'{self.ctx.h5group}: does not contain {p}, not deleted')
def T_inject(self,other):
self.from_dump(other.to_dump())
def T_to_dump(self,*,ret=ret):
_T_assertDataset(self,msg=f'when dumping')
def _onerow(row):
d={'_schema':{"name":schemaName,"version":schemaVersion}}
for fq,unit in ret.units.items(): #
d[fq]=(self.ctx.dataset[row,fq],unit)
for fq,(subpath,schema) in ret.subpaths.items():
SchemaT=self.ctx.schemaRegistry[schema]
subpath=subpath.replace('{ROW}',str(row))
if subpath not in self.ctx.h5group: continue
subgrp=self.ctx.h5group[subpath]
subcontext=SchemaT(top=HeavyStruct.TopContext(h5group=subgrp,schemaRegistry=self.ctx.schemaRegistry,pyroIds=[]),row=None)
d[fq]=subcontext.to_dump()
return d
if self.row is not None: return _onerow(self.row)
else: return [_onerow(r) for r in range(self.ctx.dataset.shape[0])]
def T_from_dump(self,dump,*,ret=ret):
_T_assertWritable(self,msg=f'when applying dump')
def _onerow(row,di):
rowdata=self.ctx.dataset[row]
s2n,s2v=di['_schema']['name'],di['_schema']['version']
if s2n!=self.schemaName: raise ValueError(f'Schema mismatch: source {s2n}, target {self.schemaName}')
if s2v!=self.schemaVersion: log.warning('Schema {s2n} version mismatch: source {s2v}, target {self.schemaVersion}')
for fq,valUnit in di.items():
if fq=='_schema': continue
if fq in ret.units: # value field
rowdata[fq]=valUnit[0] if (valUnit[1] is None) else units.Quantity(value=valUnit[0],unit=valUnit[1]).to(ret.units[fq]).value
elif fq in ret.subpaths: # subpath
assert isinstance(valUnit,list)
subpath,schema=ret.subpaths[fq]
SchemaT=self.ctx.schemaRegistry[schema]
subpath=subpath.replace('{ROW}',str(row))
subgrp=self.ctx.h5group.require_group(subpath)
subcontext=SchemaT(top=HeavyStruct.TopContext(h5group=subgrp,schemaRegistry=self.ctx.schemaRegistry,pyroIds=[]),row=None)
subcontext.from_dump(valUnit)
else:
raise ValueError(f'Key {fq} not in target schema {self.schemaName}, in {self.ctx.h5group}.')
# key not in target schema
self.ctx.dataset[row]=rowdata
if self.row is not None:
assert isinstance(dump,dict)
_T_assertDataset(self,msg=f'when applying dump with row={self.row}')
_onerow(self.row,dump)
else:
assert isinstance(dump,list)
self.resize(len(dump),reset=True)
_T_assertDataset(self,msg=f'when applying dump')
for row,di in enumerate(dump): _onerow(row,di)
def T_iter(self):
_T_assertDataset(self,msg=f'when iterating')
for row in range(self.ctx.dataset.shape[0]): yield self[row]
meth['__init__']=T_init
meth['__str__']=meth['__repr__']=T_str
meth['__getitem__']=T_getitem
meth['__len__']=T_len
meth['row']=None
meth['ctx']=None
# __del__ note: it would be nice to use context destructor to unregister contexts from Pyro
# (those which registered automatically). Since the daemon is holding one reference, however,
# the dtor will never be called, unfortunately
# those are defined only for the "root" context
if not prefix:
meth['resize']=T_resize
meth['to_dump']=T_to_dump
meth['from_dump']=T_from_dump
meth['inject']=T_inject
ret.dtypes=np.dtype(ret.dtypes)
T_bases=()
else:
T_bases=() # only top-level has metadata
T=type(T_name,T_bases,meth)
T.__module__=fakeModule.__name__ ## make the (T.__module__,T.__name__) tuple used in serialization unique
T.datasetName=datasetName
T=Pyro5.api.expose(T)
setattr(fakeModule,T_name,T)
if not prefix:
T.schemaName=schemaName # schema knows its own name, for convenience of creating schema registry
T.schemaVersion=schemaVersion
T.__doc__='\n'.join(ret.doc)+'\n'
return T
else:
ret.T=T
return ret
def makeSchemaRegistry(dd):
'''
Compile schema registry from dictionary representation; use ``json.loads`` to convert JSON schema to its dictionary representation.
'''
return dict([((T:=_cookSchema(d)).schemaName,T) for d in dd])
def _make_grains(h5name):
import time, random
from mupif.units import U as u
t0=time.time()
atomCounter=0
# precompiled schemas
schemaRegistry=makeSchemaRegistry(json.loads(sampleSchemas_json))
with h5py.File(h5name,'w') as h5:
grp=h5.require_group('test')
schemaT=schemaRegistry['org.mupif.sample.grain']
grp.attrs['schemas']=sampleSchemas_json
grp.attrs['schema']=schemaT.schemaName
grains=schemaT(top=HeavyStruct.TopContext(h5group=grp,schemaRegistry=schemaRegistry,pyroIds=[]))
log.debug(f"{grains}")
grains.resize(size=2)
log.info(f"There is {len(grains)} grains.")
for ig,g in enumerate(grains):
#g=grains[ig]
log.debug(f'grain {ig} {g}')
g.getMolecules().resize(size=random.randint(5,20))
log.debug(f"Grain #{ig} has {len(g.getMolecules())} molecules")
for m in g.getMolecules():
#for im in range(len(g.getMolecules())):
#m=g.getMolecules()[im]
# print('molecule: ',m)
m.getIdentity().setMolecularWeight(random.randint(1,10)*u.yg)
m.getAtoms().resize(size=random.randint(30,60))
for a in m.getAtoms():
#for ia in range(len(m.getAtoms())):
#a=m.getAtoms()[ia]
a.getIdentity().setElement(random.choice(['H','N','Cl','Na','Fe']))
a.getProperties().getTopology().setPosition((1,2,3)*u.nm)
a.getProperties().getTopology().setVelocity((24,5,77)*u.m/u.s)
# not yet, see https://stackoverflow.com/q/67192725/761090
struct=np.array([random.randint(1,20) for i in range(random.randint(5,20))],dtype='l')
a.getProperties().getTopology().setStructure(struct)
atomCounter+=1
t1=time.time()
log.info(f'{atomCounter} atoms created in {t1-t0:g} sec ({atomCounter/(t1-t0):g}/sec).')
def _read_grains(h5name):
import time
# note how this does NOT need any schemas defined, they are all pulled from the HDF5
t0 = time.time()
atomCounter = 0
with h5py.File(h5name, 'r') as h5:
grp = h5['test']
schemaRegistry = makeSchemaRegistry(json.loads(grp.attrs['schemas']))
grains = schemaRegistry[grp.attrs['schema']](top=HeavyStruct.TopContext(h5group=grp, schemaRegistry=schemaRegistry, pyroIds=[]))
for g in grains:
# print(g)
log.info(f'Grain #{g.row} has {len(g.getMolecules())} molecules.')
for m in g.getMolecules():
m.getIdentity().getMolecularWeight()
for a in m.getAtoms():
a.getIdentity().getElement()
a.getProperties().getTopology().getPosition()
a.getProperties().getTopology().getVelocity()
a.getProperties().getTopology().getStructure()
atomCounter += 1
t1 = time.time()
log.info(f'{atomCounter} atoms read in {t1-t0:g} sec ({atomCounter/(t1-t0):g}/sec).')
def HeavyDataHandle(*args, **kwargs):
import warnings
warnings.warn("HeavyDataHandle class was renamed to HeavyStruct, update your code.",DeprecationWarning)
return HeavyStruct(*args, **kwargs)
@Pyro5.api.expose
class HeavyStruct(HeavyDataBase):
h5group: str='/'
schemaName: typing.Optional[str] = None
schemasJson: typing.Optional[str] = None
id: dataid.DataID = dataid.DataID.ID_None
# __doc__ is a computed property which will add documentation for the sample JSON schemas
__doc0__ = '''
*mode* specifies how the underlying HDF5 file (:obj:`h5path`) is to be opened:
* ``readonly`` only allows reading;
* ``readwrite`` alows reading and writing;
* ``create`` creates new HDF5 file, raising an exception if the file exists already; if :obj:`h5path` is empty, a temporary file will be created automatically;
* ``overwrite`` create new HDF5 file, allowing overwriting an existing file;
* ``create-memory`` create HDF5 file in RAM only; if :obj:`h5path` is non-empty, it will be written out when data is closed via :obj:`closeData` (and discarded otherwise);
*schemaName* and *schemasJson* must be provided when creating new data (``overwrite``, ``create``, ``create-memory``) and are ignored otherwise.
This class can be used as context manager, in which case the :obj:`openData` and :obj:`closeData` will be called automatically.
'''
# from https://stackoverflow.com/a/3203659/761090
class _classproperty(object):
def __init__(self, getter): self.getter = getter
def __get__(self, instance, owner): return self.getter(owner)
@_classproperty
def __doc__(cls):
ret = cls.__doc0__
reg = makeSchemaRegistry(json.loads(sampleSchemas_json))
for key, val in reg.items():
ret += '\n\n'+val.__doc__.replace('`', '``')
return ret
# this is not useful over Pyro (the Proxy defines its own context manager) but handy for local testing
def __enter__(self): return self.openData(mode=self.mode)
def __exit__(self, exc_type, exc_value, traceback): self.closeData()
@dataclass
@Pyro5.api.expose
class TopContext:
'This class is for internal use only. It is the return type of :obj:`HeavyStruct.openData` and others.'
h5group: Any
pyroIds: list
schemaRegistry: dict
dataset: Any = None
def __str__(self):
return f'{self.__module__}.{self.__class__.__name__}(h5group={str(self.h5group)},dataset={str(self.dataset)},schemaRegistry=<<{",".join(self.schemaRegistry.keys())}>>)'
def __init__(self, **kw):
super().__init__(**kw)
@pydantic.validate_arguments
def openData(self,mode=typing.Optional[HeavyDataBase_ModeChoice]):
'''
Return top context for the underlying HDF5 data. The context is automatically published through Pyro5 daemon, if the :obj:`HeavyStruct` instance is also published (this is true recursively, for all subcontexts). The contexts are unregistered when :obj:`HeavyStruct.closeData` is called (directly or via context manager).
'''
self.openStorage(mode=mode)
extant=(self.h5group in self._h5obj and 'schema' in self._h5obj[self.h5group].attrs)
if extant:
# for modes readonly, readwrite
grp = self._h5obj[self.h5group]
schemaRegistry = makeSchemaRegistry(json.loads(grp.attrs['schemas']))
top=schemaRegistry[grp.attrs['schema']](top=HeavyStruct.TopContext(h5group=grp, schemaRegistry=schemaRegistry, pyroIds=self.pyroIds))
self.updateMetadata(json.loads(grp.attrs['metadata']))
return self._returnProxy(top)
else:
if not self.schemaName or not self.schemasJson:
raise ValueError(f'Both *schema* and *schemaJson* must be given (opening {self.h5path} in mode {self.mode})')
# modes: overwrite, create, create-memory
grp = self._h5obj.require_group(self.h5group)
grp.attrs['schemas'] = self.schemasJson
grp.attrs['schema'] = self.schemaName
grp.attrs['metadata'] = json.dumps(self.getAllMetadata())
schemaRegistry = makeSchemaRegistry(json.loads(self.schemasJson))
top = schemaRegistry[grp.attrs['schema']](top=HeavyStruct.TopContext(h5group=grp, schemaRegistry=schemaRegistry, pyroIds=self.pyroIds))
return self._returnProxy(top)
'''
future ideas:
* Create all context classes as Ctx_<md5 of the JSON schema> so that the name is unique.\
* Register classes to Pyro when the schema is read
* Register classes to remote Pyro when the heavy file is transferred?
'''
# uses relative imports, therefore run stand-alone as as:
#
# PYTHONPATH=.. python3 -m mupif.heavydata
#
if __name__ == '__main__':
import json
import pprint
print(HeavyStruct.__doc__)
# print(json.dumps(json.loads(sampleSchemas_json),indent=2))
_make_grains('/tmp/grains.h5')
_read_grains('/tmp/grains.h5')
# this won't work through Pyro yet
pp = HeavyStruct(h5path='/tmp/grains.h5', h5group='test')
for key, val in pp.getSchemaRegistry(compile=True).items():
print(val.__doc__.replace('`', '``'))
grains = pp.openData('readonly')
print(pp.openData(mode='readonly')[0].getMolecules())
print(grains.getMolecules(0).getAtoms(5).getIdentity().getElement())
print(grains[0].getMolecules()[5].getAtoms().getIdentity().getElement())
import pprint
mol5dump = grains[0].getMolecules()[5].to_dump()
pp.closeData()
grains = pp.openData('readwrite')
grains[0].getMolecules()[4].from_dump(mol5dump)
mol4dump = grains[0].getMolecules()[4].to_dump()
# pprint.pprint(mol4dump)
# pprint.pprint(mol4dump,stream=open('/tmp/m4.txt','w'))
# pprint.pprint(mol5dump,stream=open('/tmp/m5.txt','w'))
print(str(mol4dump) == str(mol5dump))
pp.closeData()
|
mupif/mupif
|
mupif/heavystruct.py
|
Python
|
lgpl-3.0
| 46,818
|
[
"Dalton"
] |
36b54db37fc75bd91818e76413980b42e889a24f4e992990cab81c7c95195aa1
|
import logging
import random
import numpy as np
from ray.rllib.agents import with_common_config
from ray.rllib.agents.dreamer.dreamer_torch_policy import DreamerTorchPolicy
from ray.rllib.agents.trainer_template import build_trainer
from ray.rllib.execution.common import STEPS_SAMPLED_COUNTER, \
LEARNER_INFO, _get_shared_metrics
from ray.rllib.policy.sample_batch import DEFAULT_POLICY_ID, SampleBatch
from ray.rllib.evaluation.metrics import collect_metrics
from ray.rllib.agents.dreamer.dreamer_model import DreamerModel
from ray.rllib.execution.rollout_ops import ParallelRollouts
from ray.rllib.utils.typing import SampleBatchType
logger = logging.getLogger(__name__)
# yapf: disable
# __sphinx_doc_begin__
DEFAULT_CONFIG = with_common_config({
# PlaNET Model LR
"td_model_lr": 6e-4,
# Actor LR
"actor_lr": 8e-5,
# Critic LR
"critic_lr": 8e-5,
# Grad Clipping
"grad_clip": 100.0,
# Discount
"discount": 0.99,
# Lambda
"lambda": 0.95,
# Clipping is done inherently via policy tanh.
"clip_actions": False,
# Training iterations per data collection from real env
"dreamer_train_iters": 100,
# Horizon for Enviornment (1000 for Mujoco/DMC)
"horizon": 1000,
# Number of episodes to sample for Loss Calculation
"batch_size": 50,
# Length of each episode to sample for Loss Calculation
"batch_length": 50,
# Imagination Horizon for Training Actor and Critic
"imagine_horizon": 15,
# Free Nats
"free_nats": 3.0,
# KL Coeff for the Model Loss
"kl_coeff": 1.0,
# Distributed Dreamer not implemented yet
"num_workers": 0,
# Prefill Timesteps
"prefill_timesteps": 5000,
# This should be kept at 1 to preserve sample efficiency
"num_envs_per_worker": 1,
# Exploration Gaussian
"explore_noise": 0.3,
# Batch mode
"batch_mode": "complete_episodes",
# Custom Model
"dreamer_model": {
"custom_model": DreamerModel,
# RSSM/PlaNET parameters
"deter_size": 200,
"stoch_size": 30,
# CNN Decoder Encoder
"depth_size": 32,
# General Network Parameters
"hidden_size": 400,
# Action STD
"action_init_std": 5.0,
},
"env_config": {
# Repeats action send by policy for frame_skip times in env
"frame_skip": 2,
}
})
# __sphinx_doc_end__
# yapf: enable
class EpisodicBuffer(object):
def __init__(self, max_length: int = 1000, length: int = 50):
"""Data structure that stores episodes and samples chunks
of size length from episodes
Args:
max_length: Maximum episodes it can store
length: Episode chunking lengh in sample()
"""
# Stores all episodes into a list: List[SampleBatchType]
self.episodes = []
self.max_length = max_length
self.timesteps = 0
self.length = length
def add(self, batch: SampleBatchType):
"""Splits a SampleBatch into episodes and adds episodes
to the episode buffer
Args:
batch: SampleBatch to be added
"""
self.timesteps += batch.count
episodes = batch.split_by_episode()
for i, e in enumerate(episodes):
episodes[i] = self.preprocess_episode(e)
self.episodes.extend(episodes)
if len(self.episodes) > self.max_length:
delta = len(self.episodes) - self.max_length
# Drop oldest episodes
self.episodes = self.episodes[delta:]
def preprocess_episode(self, episode: SampleBatchType):
"""Batch format should be in the form of (s_t, a_(t-1), r_(t-1))
When t=0, the resetted obs is paired with action and reward of 0.
Args:
episode: SampleBatch representing an episode
"""
obs = episode["obs"]
new_obs = episode["new_obs"]
action = episode["actions"]
reward = episode["rewards"]
act_shape = action.shape
act_reset = np.array([0.0] * act_shape[-1])[None]
rew_reset = np.array(0.0)[None]
obs_end = np.array(new_obs[act_shape[0] - 1])[None]
batch_obs = np.concatenate([obs, obs_end], axis=0)
batch_action = np.concatenate([act_reset, action], axis=0)
batch_rew = np.concatenate([rew_reset, reward], axis=0)
new_batch = {
"obs": batch_obs,
"rewards": batch_rew,
"actions": batch_action
}
return SampleBatch(new_batch)
def sample(self, batch_size: int):
"""Samples [batch_size, length] from the list of episodes
Args:
batch_size: batch_size to be sampled
"""
episodes_buffer = []
while len(episodes_buffer) < batch_size:
rand_index = random.randint(0, len(self.episodes) - 1)
episode = self.episodes[rand_index]
if episode.count < self.length:
continue
available = episode.count - self.length
index = int(random.randint(0, available))
episodes_buffer.append(episode.slice(index, index + self.length))
batch = {}
for k in episodes_buffer[0].keys():
batch[k] = np.stack([e[k] for e in episodes_buffer], axis=0)
return SampleBatch(batch)
def total_sampled_timesteps(worker):
return worker.policy_map[DEFAULT_POLICY_ID].global_timestep
class DreamerIteration:
def __init__(self, worker, episode_buffer, dreamer_train_iters, batch_size,
act_repeat):
self.worker = worker
self.episode_buffer = episode_buffer
self.dreamer_train_iters = dreamer_train_iters
self.repeat = act_repeat
self.batch_size = batch_size
def __call__(self, samples):
# Dreamer Training Loop
for n in range(self.dreamer_train_iters):
print(n)
batch = self.episode_buffer.sample(self.batch_size)
if n == self.dreamer_train_iters - 1:
batch["log_gif"] = True
fetches = self.worker.learn_on_batch(batch)
# Custom Logging
policy_fetches = self.policy_stats(fetches)
if "log_gif" in policy_fetches:
gif = policy_fetches["log_gif"]
policy_fetches["log_gif"] = self.postprocess_gif(gif)
# Metrics Calculation
metrics = _get_shared_metrics()
metrics.info[LEARNER_INFO] = fetches
metrics.counters[STEPS_SAMPLED_COUNTER] = self.episode_buffer.timesteps
metrics.counters[STEPS_SAMPLED_COUNTER] *= self.repeat
res = collect_metrics(local_worker=self.worker)
res["info"] = metrics.info
res["info"].update(metrics.counters)
res["timesteps_total"] = metrics.counters[STEPS_SAMPLED_COUNTER]
self.episode_buffer.add(samples)
return res
def postprocess_gif(self, gif: np.ndarray):
gif = np.clip(255 * gif, 0, 255).astype(np.uint8)
B, T, C, H, W = gif.shape
frames = gif.transpose((1, 2, 3, 0, 4)).reshape((1, T, C, H, B * W))
return frames
def policy_stats(self, fetches):
return fetches[DEFAULT_POLICY_ID]["learner_stats"]
def execution_plan(workers, config):
# Special Replay Buffer for Dreamer agent
episode_buffer = EpisodicBuffer(length=config["batch_length"])
local_worker = workers.local_worker()
# Prefill episode buffer with initial exploration (uniform sampling)
while total_sampled_timesteps(local_worker) < config["prefill_timesteps"]:
samples = local_worker.sample()
episode_buffer.add(samples)
batch_size = config["batch_size"]
dreamer_train_iters = config["dreamer_train_iters"]
act_repeat = config["action_repeat"]
rollouts = ParallelRollouts(workers)
rollouts = rollouts.for_each(
DreamerIteration(local_worker, episode_buffer, dreamer_train_iters,
batch_size, act_repeat))
return rollouts
def get_policy_class(config):
return DreamerTorchPolicy
def validate_config(config):
config["action_repeat"] = config["env_config"]["frame_skip"]
if config["num_gpus"] > 1:
raise ValueError("`num_gpus` > 1 not yet supported for Dreamer!")
if config["framework"] != "torch":
raise ValueError("Dreamer not supported in Tensorflow yet!")
if config["batch_mode"] != "complete_episodes":
raise ValueError("truncate_episodes not supported")
if config["num_workers"] != 0:
raise ValueError("Distributed Dreamer not supported yet!")
if config["clip_actions"]:
raise ValueError("Clipping is done inherently via policy tanh!")
if config["action_repeat"] > 1:
config["horizon"] = config["horizon"] / config["action_repeat"]
DREAMERTrainer = build_trainer(
name="Dreamer",
default_config=DEFAULT_CONFIG,
default_policy=DreamerTorchPolicy,
get_policy_class=get_policy_class,
execution_plan=execution_plan,
validate_config=validate_config)
|
pcmoritz/ray-1
|
rllib/agents/dreamer/dreamer.py
|
Python
|
apache-2.0
| 9,028
|
[
"Gaussian"
] |
044ccba1331ef60590669098c1308a624e9a0413206f831f4804c187ccef5c8e
|
#!/usr/bin/env python
# =============================================================================
# Initialization
# =============================================================================
from active_worker.task import task
from task_types import TaskTypes as tt
import numpy as np
import h5py
num_surrs = 10
@task
def crosscorrelogram_task(inputdata, number_of_jobs, job_id):
'''
Task Manifest Version: 1
Full Name: crosscorrelogram_task
Caption: cross-correlogram
Author: Elephant-Developers
Description: |
This task calculates all pair-wise cross-correlograms between all
combinations of spike trains in the input file. Significance of
the correlation is evaluated based on spike-dither surrogates.
Categories:
- FDAT
Compatible_queues: ['cscs_viz']
Accepts:
inputdata: application/unknown
number_of_jobs: long
job_id: long
Returns:
res: application/unknown
'''
import quantities as pq
import neo
import elephant
if job_id > number_of_jobs:
print "Input data is invalid, exiting"
return
# =========================================================================
# Load data
# =========================================================================
# stage the input file
original_path = crosscorrelogram_task.task.uri.get_file(inputdata)
session = neo.NeoHdf5IO(filename=original_path)
block = session.read_block()
# select spike trains
sts = block.filter(use_st=True)
# print("Number of spike trains: " + str(len(sts)))
# =========================================================================
# Cross-correlograms
# =========================================================================
max_lag_bins = 200
lag_res = 1 * pq.ms
max_lag = max_lag_bins * lag_res
smoothing = 10 * pq.ms
num_neurons = len(sts)
cc = {}
cc['unit_i'] = {}
cc['unit_j'] = {}
cc['times_ms'] = {}
cc['original'] = {}
cc['surr'] = {}
cc['original_measure'] = {}
cc['surr_measure'] = {}
cc['pvalue'] = {}
# create all combinations of tasks
num_total_pairs = 0
all_combos_unit_i = []
all_combos_unit_j = []
for ni in range(num_neurons):
for nj in range(ni, num_neurons):
all_combos_unit_i.append(ni)
all_combos_unit_j.append(nj)
num_total_pairs += 1
# calculate indices in cc['unit_i'] list which to calculate for each task
idx = np.linspace(0, num_total_pairs, number_of_jobs + 1, dtype=int)
task_starts_idx = idx[:-1]
task_stop_idx = idx[1:]
# Loop over all pairs of neurons
for calc_i in range(task_starts_idx[job_id], task_stop_idx[job_id]):
# save neuron i,j index
ni = all_combos_unit_i[calc_i]
nj = all_combos_unit_j[calc_i]
cc['unit_i'][calc_i] = ni
cc['unit_j'][calc_i] = nj
print("Cross-correlating %i and %i" % (ni, nj))
# original CCH
cco = elephant.spikecorr.cch(
sts[ni], sts[nj], w=lag_res, lag=max_lag, smooth=smoothing)
cc['original'][calc_i] = cco.magnitude
cc['times_ms'][calc_i] = cco.times.rescale(pq.ms).magnitude
# extract measure
ind = np.argmin(np.abs(cco.times))
ccom = cch_measure(cco, ind)
cc['original_measure'][calc_i] = ccom
surr_i = elephant.surrogates.spike_dithering(
sts[ni], dither=50. * pq.ms, n=num_surrs)
surr_j = elephant.surrogates.spike_dithering(
sts[nj], dither=50. * pq.ms, n=num_surrs)
ccs = []
ccsm = []
# cross-correlogram of each surrogate pair
for surrogate in range(num_surrs):
scc = elephant.spikecorr.cch(
surr_i[surrogate], surr_j[surrogate],
w=lag_res, lag=max_lag, smooth=smoothing)
ccs.append(scc.magnitude)
ccsm.append(cch_measure(scc, ind))
cc['surr'][calc_i] = np.array(ccs)
cc['surr_measure'][calc_i] = np.sort(ccsm)
cc['pvalue'][calc_i] = np.count_nonzero(np.array(ccsm) >= ccom)
# save result to hdf5
outputname = 'cc_result'+str(number_of_jobs)+'_'+str(job_id)+'.h5'
export_hdf5(cc, outputname)
return crosscorrelogram_task.task.uri.save_file(mime_type='\
application/unknown',
src_path=outputname,
dst_path=outputname)
# write parameters to disk
# import h5py_wrapper.wrapper
# h5py_wrapper.wrapper.add_to_h5(
# 'correlation_output_' + filename + '_' + str(job_id) + '.h5',
# cc, write_mode='w', overwrite_dataset=True)
def cch_measure(cch, ind):
return np.sum(cch[ind - 5:ind + 5].magnitude)
def export_hdf5(cc, outputname):
# cc has type dict with 8-keys
file = h5py.File(outputname, 'w')
get_hdf5_surr_measure(cc, file)
get_hdf5_original_measure(cc, file)
get_hdf5_pvalue(cc, file)
get_hdf5_unit_i(cc, file)
get_hdf5_unit_j(cc, file)
get_hdf5_times_ms(cc, file)
get_hdf5_surr(cc, file)
get_hdf5_original(cc, file)
file.close()
return file
def get_hdf5_surr_measure(cc, file):
# --------------------- cc['surr_measure']
# numsurr = 10 -> create_dataset(...(l_surr_measure, 10), ...)
l_surr_measure = len(cc['surr_measure'])
dataset_surr_measure = file.create_dataset("/cc_group/surr_measure",
(l_surr_measure, num_surrs),
dtype=h5py.h5t.NATIVE_FLOAT)
data = np.zeros((l_surr_measure, num_surrs))
for i in range(l_surr_measure):
for j in range(len(cc['surr_measure'][i])):
data[i][j] = cc['surr_measure'][i][j]
dataset_surr_measure[...] = data
def get_hdf5_original_measure(cc, file):
# --------------------- cc['original_measure'] dict
l_orig_measure = len(cc['original_measure'])
dataset_orig_measure = file.create_dataset("/cc_group/original_measure",
(l_orig_measure, 1),
dtype=h5py.h5t.NATIVE_FLOAT)
data = np.zeros((l_orig_measure, 1))
for i in range(l_orig_measure):
data[i] = cc['original_measure'].items()[i][1]
dataset_orig_measure[...] = data
def get_hdf5_pvalue(cc, file):
# --------------------- cc['pvalue'] dict
l_pvalue = len(cc['pvalue'])
dataset_pvalue = file.create_dataset("/cc_group/pvalue",
(l_pvalue, 1),
dtype=h5py.h5t.NATIVE_FLOAT)
data = np.zeros((l_pvalue, 1))
for i in range(l_pvalue):
data[i] = cc['pvalue'].items()[i][1]
dataset_pvalue[...] = data
def get_hdf5_unit_i(cc, file):
# --------------------- cc['unit_i'] dict
l_unit_i = len(cc['unit_i'])
dataset_unit_i = file.create_dataset("/cc_group/unit_i",
(l_unit_i, 1),
dtype=h5py.h5t.NATIVE_FLOAT)
data = np.zeros((l_unit_i, 1))
for i in range(l_unit_i):
data[i] = cc['unit_i'].items()[i][1]
dataset_unit_i[...] = data
def get_hdf5_unit_j(cc, file):
# --------------------- cc['unit_j'] dict
l_unit_j = len(cc['unit_j'])
dataset_unit_j = file.create_dataset("/cc_group/unit_j",
(l_unit_j, 1),
dtype=h5py.h5t.NATIVE_FLOAT)
data = np.zeros((l_unit_j, 1))
for i in range(l_unit_j):
data[i] = cc['unit_j'].items()[i][1]
dataset_unit_j[...] = data
def get_hdf5_times_ms(cc, file):
# --------------------- cc['times_ms'] dict array
l_times_ms = len(cc['times_ms'])
dataset_times_ms = file.create_dataset("/cc_group/times_ms", (l_times_ms,
cc['times_ms'][0].size),
dtype=h5py.h5t.NATIVE_FLOAT)
data = np.zeros((l_times_ms, cc['times_ms'][0].size))
for i in range(l_times_ms):
for j in range(cc['times_ms'][i].size):
data[i][j] = cc['times_ms'][i].item(j)
dataset_times_ms[...] = data
def get_hdf5_surr(cc, file):
# --------------------- cc['surr'] dict numpy.ndarray
l_surr = len(cc['surr'])
dataset_surr = file.create_dataset("/cc_group/surr",
(l_surr, cc['surr'][0].size),
dtype=h5py.h5t.NATIVE_FLOAT)
data = np.zeros((l_surr, cc['surr'][0].size))
for i in range(l_surr):
for j in range(cc['surr'][i].size):
data[i][j] = cc['surr'][i].item(j)
dataset_surr[...] = data
def get_hdf5_original(cc, file):
# --------------------- cc['original']
l_original = len(cc['original'])
dataset_orig = file.create_dataset("/cc_group/original",
(l_original, cc['original'][0].size),
dtype=h5py.h5t.NATIVE_FLOAT)
data = np.zeros((l_original, cc['original'][0].size))
for i in range(l_original):
for j in range(cc['original'][i].size):
data[i][j] = cc['original'][i].item(j)
dataset_orig[...] = data
if __name__ == '__main__':
# this number relates to the "-t" parameter:
# -t 0-X => number_of_jobs=X+1
# INPUT-second parameter
# number_of_jobs is (0, 200]
number_of_jobs = 1
# INPUT-third parameter
# job parameter: a number between 0 and number_of_jobs-1
import os
PBS_value = os.getenv('PBS_ARRAYID')
if PBS_value is not None:
job_id = int(PBS_value)
else:
job_id = 0
# INPUT-first parameter
inputdata = tt.URI('application/unknown', 'data/experiment.h5')
crosscorrelogram_task(inputdata, number_of_jobs, job_id)
|
jakobj/UP-Tasks
|
Elephant/crosscorrelogram_task/crosscorrelogram_task.py
|
Python
|
gpl-2.0
| 10,090
|
[
"NEURON"
] |
487cce2d8f47c92705c1f7c1de2e98ef5965835709d5601703694edb58b06b50
|
#!/usr/bin/env python
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- http://www.mdanalysis.org
# Copyright (c) 2006-2016 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
"""Setuptools-based setup script for MDAnalysis.
A working installation of NumPy <http://numpy.scipy.org> is required.
For a basic installation just type the command::
python setup.py install
For more in-depth instructions, see the installation section at the
MDAnalysis Wiki:
https://github.com/MDAnalysis/mdanalysis/wiki/INSTALL
Also free to ask on the MDAnalysis mailing list for help:
http://groups.google.com/group/mdnalysis-discussion
(Note that the group really is called `mdnalysis-discussion' because
Google groups forbids any name that contains the string `anal'.)
"""
from __future__ import print_function
from setuptools import setup, Extension, find_packages
from distutils.ccompiler import new_compiler
import codecs
import os
import sys
import shutil
import tempfile
import warnings
# Make sure I have the right Python version.
if sys.version_info[:2] < (2, 7):
print('MDAnalysis requires Python 2.7 or better. Python {0:d}.{1:d} detected'.format(*
sys.version_info[:2]))
print('Please upgrade your version of Python.')
sys.exit(-1)
if sys.version_info[0] < 3:
import ConfigParser as configparser
open_kwargs = {}
else:
import configparser
open_kwargs = {'encoding': 'utf-8'}
# Handle cython modules
try:
from Cython.Distutils import build_ext
cython_found = True
cmdclass = {'build_ext': build_ext}
except ImportError:
cython_found = False
cmdclass = {}
# NOTE: keep in sync with MDAnalysis.__version__ in version.py
RELEASE = "0.16.1-dev0"
is_release = 'dev' not in RELEASE
if cython_found:
# cython has to be >=0.16 to support cython.parallel
import Cython
from Cython.Build import cythonize
from distutils.version import LooseVersion
required_version = "0.16"
if not LooseVersion(Cython.__version__) >= LooseVersion(required_version):
# We don't necessarily die here. Maybe we already have
# the cythonized '.c' files.
print("Cython version {0} was found but won't be used: version {1} "
"or greater is required because it offers a handy "
"parallelization module".format(
Cython.__version__, required_version))
cython_found = False
del Cython
del LooseVersion
else:
if not is_release:
print("*** package: Cython not found ***")
print("MDAnalysis requires cython for development builds")
sys.exit(1)
class Config(object):
"""Config wrapper class to get build options
This class looks for options in the environment variables and the
'setup.cfg' file. The order how we look for an option is.
1. Environment Variable
2. set in 'setup.cfg'
3. given default
Environment variables should start with 'MDA_' and be all uppercase.
Values passed to environment variables are checked (case-insensitively)
for specific strings with boolean meaning: 'True' or '1' will cause `True`
to be returned. '0' or 'False' cause `False` to be returned.
"""
def __init__(self, fname='setup.cfg'):
if os.path.exists(fname):
self.config = configparser.SafeConfigParser()
self.config.read(fname)
def get(self, option_name, default=None):
environ_name = 'MDA_' + option_name.upper()
if environ_name in os.environ:
val = os.environ[environ_name]
if val.upper() in ('1', 'TRUE'):
return True
elif val.upper() in ('0', 'FALSE'):
return False
return val
try:
option = self.config.get('options', option_name)
return option
except configparser.NoOptionError:
return default
class MDAExtension(Extension, object):
"""Derived class to cleanly handle setup-time (numpy) dependencies.
"""
# The only setup-time numpy dependency comes when setting up its
# include dir.
# The actual numpy import and call can be delayed until after pip
# has figured it must install numpy.
# This is accomplished by passing the get_numpy_include function
# as one of the include_dirs. This derived Extension class takes
# care of calling it when needed.
def __init__(self, *args, **kwargs):
self._mda_include_dirs = []
super(MDAExtension, self).__init__(*args, **kwargs)
@property
def include_dirs(self):
if not self._mda_include_dirs:
for item in self._mda_include_dir_args:
try:
self._mda_include_dirs.append(item()) #The numpy callable
except TypeError:
self._mda_include_dirs.append(item)
return self._mda_include_dirs
@include_dirs.setter
def include_dirs(self, val):
self._mda_include_dir_args = val
def get_numpy_include():
# Obtain the numpy include directory. This logic works across numpy
# versions.
# setuptools forgets to unset numpy's setup flag and we get a crippled
# version of it unless we do it ourselves.
try:
# Python 3 renamed the ``__builin__`` module into ``builtins``.
# Here we import the python 2 or the python 3 version of the module
# with the python 3 name. This could be done with ``six`` but that
# module may not be installed at that point.
import __builtin__ as builtins
except ImportError:
import builtins
builtins.__NUMPY_SETUP__ = False
try:
import numpy as np
except ImportError:
print('*** package "numpy" not found ***')
print('MDAnalysis requires a version of NumPy (>=1.10.4), even for setup.')
print('Please get it from http://numpy.scipy.org/ or install it through '
'your package manager.')
sys.exit(-1)
return np.get_include()
def hasfunction(cc, funcname, include=None, extra_postargs=None):
# From http://stackoverflow.com/questions/
# 7018879/disabling-output-when-compiling-with-distutils
tmpdir = tempfile.mkdtemp(prefix='hasfunction-')
devnull = oldstderr = None
try:
try:
fname = os.path.join(tmpdir, 'funcname.c')
with open(fname, 'w') as f:
if include is not None:
f.write('#include {0!s}\n'.format(include))
f.write('int main(void) {\n')
f.write(' {0!s};\n'.format(funcname))
f.write('}\n')
# Redirect stderr to /dev/null to hide any error messages
# from the compiler.
# This will have to be changed if we ever have to check
# for a function on Windows.
devnull = open('/dev/null', 'w')
oldstderr = os.dup(sys.stderr.fileno())
os.dup2(devnull.fileno(), sys.stderr.fileno())
objects = cc.compile([fname], output_dir=tmpdir,
extra_postargs=extra_postargs)
cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
except Exception:
return False
return True
finally:
if oldstderr is not None:
os.dup2(oldstderr, sys.stderr.fileno())
if devnull is not None:
devnull.close()
shutil.rmtree(tmpdir)
def detect_openmp():
"""Does this compiler support OpenMP parallelization?"""
print("Attempting to autodetect OpenMP support... ", end="")
compiler = new_compiler()
compiler.add_library('gomp')
include = '<omp.h>'
extra_postargs = ['-fopenmp']
hasopenmp = hasfunction(compiler, 'omp_get_num_threads()', include=include,
extra_postargs=extra_postargs)
if hasopenmp:
print("Compiler supports OpenMP")
else:
print("Did not detect OpenMP support.")
return hasopenmp
def extensions(config):
# dev installs must build their own cythonized files.
use_cython = config.get('use_cython', default=not is_release)
use_openmp = config.get('use_openmp', default=True)
if config.get('debug_cflags', default=False):
extra_compile_args = '\
-std=c99 -pedantic -Wall -Wcast-align -Wcast-qual -Wpointer-arith \
-Wchar-subscripts -Winline -Wnested-externs -Wbad-function-cast \
-Wunreachable-code -Werror'
define_macros = [('DEBUG', '1')]
else:
extra_compile_args = ''
define_macros = []
# Needed for large-file seeking under 32bit systems (for xtc/trr indexing
# and access).
largefile_macros = [
('_LARGEFILE_SOURCE', None),
('_LARGEFILE64_SOURCE', None),
('_FILE_OFFSET_BITS', '64')
]
has_openmp = detect_openmp()
if use_openmp and not has_openmp:
print('No openmp compatible compiler found default to serial build.')
parallel_args = ['-fopenmp'] if has_openmp and use_openmp else []
parallel_libraries = ['gomp'] if has_openmp and use_openmp else []
parallel_macros = [('PARALLEL', None)] if has_openmp and use_openmp else []
if use_cython:
print('Will attempt to use Cython.')
if not cython_found:
print("Couldn't find a Cython installation. "
"Not recompiling cython extensions.")
use_cython = False
else:
print('Will not attempt to use Cython.')
source_suffix = '.pyx' if use_cython else '.c'
# The callable is passed so that it is only evaluated at install time.
include_dirs = [get_numpy_include]
dcd = MDAExtension('coordinates._dcdmodule',
['MDAnalysis/coordinates/src/dcd.c'],
include_dirs=include_dirs + ['MDAnalysis/coordinates/include'],
define_macros=define_macros,
extra_compile_args=extra_compile_args)
dcd_time = MDAExtension('coordinates.dcdtimeseries',
['MDAnalysis/coordinates/dcdtimeseries' + source_suffix],
include_dirs=include_dirs + ['MDAnalysis/coordinates/include'],
define_macros=define_macros,
extra_compile_args=extra_compile_args)
distances = MDAExtension('lib.c_distances',
['MDAnalysis/lib/c_distances' + source_suffix],
include_dirs=include_dirs + ['MDAnalysis/lib/include'],
libraries=['m'],
define_macros=define_macros,
extra_compile_args=extra_compile_args)
distances_omp = MDAExtension('lib.c_distances_openmp',
['MDAnalysis/lib/c_distances_openmp' + source_suffix],
include_dirs=include_dirs + ['MDAnalysis/lib/include'],
libraries=['m'] + parallel_libraries,
define_macros=define_macros + parallel_macros,
extra_compile_args=parallel_args,
extra_link_args=parallel_args)
qcprot = MDAExtension('lib.qcprot',
['MDAnalysis/lib/qcprot' + source_suffix],
include_dirs=include_dirs,
extra_compile_args=["-O3", "-ffast-math"])
transformation = MDAExtension('lib._transformations',
['MDAnalysis/lib/src/transformations/transformations.c'],
libraries=['m'],
define_macros=define_macros,
include_dirs=include_dirs,
extra_compile_args=extra_compile_args)
libmdaxdr = MDAExtension('lib.formats.libmdaxdr',
sources=['MDAnalysis/lib/formats/libmdaxdr' + source_suffix,
'MDAnalysis/lib/formats/src/xdrfile.c',
'MDAnalysis/lib/formats/src/xdrfile_xtc.c',
'MDAnalysis/lib/formats/src/xdrfile_trr.c',
'MDAnalysis/lib/formats/src/trr_seek.c',
'MDAnalysis/lib/formats/src/xtc_seek.c',
],
include_dirs=include_dirs + ['MDAnalysis/lib/formats/include',
'MDAnalysis/lib/formats'],
define_macros=largefile_macros)
util = MDAExtension('lib.formats.cython_util',
sources=['MDAnalysis/lib/formats/cython_util' + source_suffix],
include_dirs=include_dirs)
encore_utils = MDAExtension('analysis.encore.cutils',
sources = ['MDAnalysis/analysis/encore/cutils' + source_suffix],
include_dirs = include_dirs,
extra_compile_args = ["-O3", "-ffast-math"])
ap_clustering = MDAExtension('analysis.encore.clustering.affinityprop',
sources = ['MDAnalysis/analysis/encore/clustering/affinityprop' + source_suffix, 'MDAnalysis/analysis/encore/clustering/src/ap.c'],
include_dirs = include_dirs+['MDAnalysis/analysis/encore/clustering/include'],
libraries=["m"],
extra_compile_args=["-O3", "-ffast-math","-std=c99"])
spe_dimred = MDAExtension('analysis.encore.dimensionality_reduction.stochasticproxembed',
sources = ['MDAnalysis/analysis/encore/dimensionality_reduction/stochasticproxembed' + source_suffix, 'MDAnalysis/analysis/encore/dimensionality_reduction/src/spe.c'],
include_dirs = include_dirs+['MDAnalysis/analysis/encore/dimensionality_reduction/include'],
libraries=["m"],
extra_compile_args=["-O3", "-ffast-math","-std=c99"])
pre_exts = [dcd, dcd_time, distances, distances_omp, qcprot,
transformation, libmdaxdr, util, encore_utils,
ap_clustering, spe_dimred]
cython_generated = []
if use_cython:
extensions = cythonize(pre_exts)
for pre_ext, post_ext in zip(pre_exts, extensions):
for source in post_ext.sources:
if source not in pre_ext.sources:
cython_generated.append(source)
else:
#Let's check early for missing .c files
extensions = pre_exts
for ext in extensions:
for source in ext.sources:
if not (os.path.isfile(source) and
os.access(source, os.R_OK)):
raise IOError("Source file '{}' not found. This might be "
"caused by a missing Cython install, or a "
"failed/disabled Cython build.".format(source))
return extensions, cython_generated
def dynamic_author_list():
"""Generate __authors__ from AUTHORS
This function generates authors.py that contains the list of the
authors from the AUTHORS file. This avoids having that list maintained in
several places. Note that AUTHORS is sorted chronologically while we want
__authors__ in authors.py to be sorted alphabetically.
The authors are written in AUTHORS as bullet points under the
"Chronological list of authors" title.
"""
authors = []
with codecs.open('AUTHORS', encoding='utf-8') as infile:
# An author is a bullet point under the title "Chronological list of
# authors". We first want move the cursor down to the title of
# interest.
for line_no, line in enumerate(infile, start=1):
if line[:-1] == "Chronological list of authors":
break
else:
# If we did not break, it means we did not find the authors.
raise IOError('EOF before the list of authors')
# Skip the next line as it is the title underlining
line = next(infile)
line_no += 1
if line[:4] != '----':
raise IOError('Unexpected content on line {0}, '
'should be a string of "-".'.format(line_no))
# Add each bullet point as an author until the next title underlining
for line in infile:
if line[:4] in ('----', '====', '~~~~'):
# The previous line was a title, hopefully it did not start as
# a bullet point so it got ignored. Since we hit a title, we
# are done reading the list of authors.
break
elif line.strip()[:2] == '- ':
# This is a bullet point, so it should be an author name.
name = line.strip()[2:].strip()
authors.append(name)
# So far, the list of authors is sorted chronologically. We want it
# sorted alphabetically of the last name.
authors.sort(key=lambda name: name.split()[-1])
# Move Naveen and Elizabeth first, and Oliver last.
authors.remove('Naveen Michaud-Agrawal')
authors.remove('Elizabeth J. Denning')
authors.remove('Oliver Beckstein')
authors = (['Naveen Michaud-Agrawal', 'Elizabeth J. Denning']
+ authors + ['Oliver Beckstein'])
# Write the authors.py file.
out_path = 'MDAnalysis/authors.py'
with codecs.open(out_path, 'w', encoding='utf-8') as outfile:
# Write the header
header = '''\
#-*- coding:utf-8 -*-
# This file is generated from the AUTHORS file during the installation process.
# Do not edit it as your changes will be overwritten.
'''
print(header, file=outfile)
# Write the list of authors as a python list
template = u'__authors__ = [\n{}\n]'
author_string = u',\n'.join(u' u"{}"'.format(name)
for name in authors)
print(template.format(author_string), file=outfile)
if __name__ == '__main__':
try:
dynamic_author_list()
except (OSError, IOError):
warnings.warn('Cannot write the list of authors.')
with open("SUMMARY.txt") as summary:
LONG_DESCRIPTION = summary.read()
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python',
'Programming Language :: C',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
]
config = Config()
exts, cythonfiles = extensions(config)
setup(name='MDAnalysis',
version=RELEASE,
description='An object-oriented toolkit to analyze molecular dynamics '
'trajectories generated by CHARMM, Gromacs, NAMD, LAMMPS, or Amber.',
long_description=LONG_DESCRIPTION,
author='Naveen Michaud-Agrawal',
author_email='naveen.michaudagrawal@gmail.com',
maintainer='Richard Gowers',
maintainer_email='mdnalysis-discussion@googlegroups.com',
url='http://www.mdanalysis.org',
download_url='https://github.com/MDAnalysis/mdanalysis/releases',
provides=['MDAnalysis'],
license='GPL 2',
packages=find_packages(),
package_dir={'MDAnalysis': 'MDAnalysis'},
ext_package='MDAnalysis',
ext_modules=exts,
classifiers=CLASSIFIERS,
cmdclass=cmdclass,
requires=['numpy (>=1.10.4)', 'biopython', 'mmtf (>=1.0.0)',
'networkx (>=1.0)', 'GridDataFormats (>=0.3.2)', 'joblib'],
# all standard requirements are available through PyPi and
# typically can be installed without difficulties through setuptools
setup_requires=[
'numpy>=1.9.3',
],
install_requires=[
'numpy>=1.10.4',
'biopython>=1.59',
'networkx>=1.0',
'GridDataFormats>=0.3.2',
'six>=1.4.0',
'mmtf-python>=1.0.0',
'joblib',
],
# extras can be difficult to install through setuptools and/or
# you might prefer to use the version available through your
# packaging system
extras_require={
'AMBER': ['netCDF4>=1.0'], # for AMBER netcdf, also needs HDF5
# and netcdf-4
'analysis': [
'matplotlib>=1.5.1',
'scipy',
'seaborn', # for annotated heat map and nearest neighbor
# plotting in PSA
'sklearn', # For clustering and dimensionality reduction
# functionality in encore
],
},
test_suite="MDAnalysisTests",
tests_require=[
'nose>=1.3.7',
'MDAnalysisTests=={0}'.format(RELEASE), # same as this release!
],
zip_safe=False, # as a zipped egg the *.so files are not found (at
# least in Ubuntu/Linux)
)
# Releases keep their cythonized stuff for shipping.
if not config.get('keep_cythonized', default=is_release):
for cythonized in cythonfiles:
try:
os.unlink(cythonized)
except OSError as err:
print("Warning: failed to delete cythonized file {0}: {1}. "
"Moving on.".format(cythonized, err.strerror))
|
kain88-de/mdanalysis
|
package/setup.py
|
Python
|
gpl-2.0
| 22,850
|
[
"Amber",
"Biopython",
"CHARMM",
"Gromacs",
"LAMMPS",
"MDAnalysis",
"NAMD",
"NetCDF"
] |
cd129645f77b4a8ef5ae58eef59bfe9164f3e45ec59de671c6f227be47021d8f
|
from functools import wraps
import os
import random
import re
import pytest
from flask.testing import FlaskClient
from flask import session
os.environ['PYDRILL_CONFIG'] = os.path.join(os.path.dirname(__file__), 'pydrill.cfg')
from pydrill import app, db, redis_store
from pydrill import models
from pydrill.jinja_env import get_score_text
from pydrill.utils import User, TEAM_APPLE, TEAM_HN, TEAM_LINUX
EASY_Q = 'average'
MEDIUM_Q = 'static-decorator'
HARD_Q = 'mro'
REFERER = 'HTTP_REFERER'
USER_AGENT = 'HTTP_USER_AGENT'
MAC_USER_AGENT = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 '
'(KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36')
LINUX_USER_AGENT = ('Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 '
'(KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36')
# TODO: switch to app.test_client when Flask 1.0 is ready
def new_test_client(environ_base, *args, **kwargs):
"""Copy-pasted from Flask.test_client because we need to pass environ_base in .get() and .post()
which test_client can do only from version 1.0 which is not production ready yet.
"""
return Client(environ_base, app, app.response_class, *args, **kwargs)
class Client(FlaskClient):
def __init__(self, environ_base, *args, **kwargs):
self.__environ_base = environ_base
super(Client, self).__init__(*args, **kwargs)
def get(self, *args, **kwargs):
return super(Client, self).get(*args, environ_base=self.__environ_base, **kwargs)
def checked_get(self, path):
rv = self.get(path)
assert rv.status_code == 200
return rv
def post(self, *args, **kwargs):
return super(Client, self).post(*args, environ_base=self.__environ_base, **kwargs)
def checked_post(self, path):
rv = self.post(path)
assert rv.status_code == 302
return rv
def ask_me(self, question_id):
path = make_path('ask', question_id)
return self.checked_get(path)
def ask_me_without_seed(self, question_id):
path = make_path_without_seed('ask', question_id)
return self.get(path)
def explain_to_me(self, question_id):
question = models.Question.query.get(question_id)
path = make_path('explain', question_id, get_any_answer(question).id)
return self.checked_get(path)
def answer(self, question_id, is_correct=None):
if is_correct is None:
is_correct = random_boolean()
self.ask_me(question_id)
question = models.Question.query.get(question_id)
path = make_path('answer', question_id, get_answer(question, is_correct).id)
return self.checked_post(path)
def answer_correct(self, question_id):
return self.answer(question_id, is_correct=True)
def answer_wrong(self, question_id):
return self.answer(question_id, is_correct=False)
def score(self):
return self.checked_get('/score/')
def make_path(*path_parts):
with_seed = path_parts + (random.randint(1, 100),)
return make_path_without_seed(*with_seed)
def make_path_without_seed(*path_parts):
return '/'.join([''] + map(str, path_parts) + [''])
@pytest.fixture(autouse=True)
def flush_redis_db():
redis_store.flushdb()
# this fixture runs once
@pytest.fixture(autouse=True, scope='session')
def create_sql_db():
db.drop_all()
db.create_all()
questions_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'questions')
for question in [EASY_Q, MEDIUM_Q, HARD_Q]:
models.read_question(os.path.join(questions_dir, question + '.yml'))
db.session.commit()
@pytest.fixture(autouse=True)
def run_app_in_testing_mode():
app.config['TESTING'] = True
def client_fixture(fn):
"""
:param fn: callable returning test client
"""
@pytest.yield_fixture
@wraps(fn)
def yielding_fn():
with fn() as client:
yield client
return yielding_fn
@client_fixture
def steve():
return new_test_client({USER_AGENT: MAC_USER_AGENT,
REFERER: 'parse this'})
@client_fixture
def paul():
return new_test_client({USER_AGENT: LINUX_USER_AGENT,
REFERER: 'https://news.ycombinator.com/item?id=test'})
@client_fixture
def tim():
return new_test_client({USER_AGENT: MAC_USER_AGENT})
def get_user():
return User(**session['user'])
def test_user_id(paul):
paul.ask_me(EASY_Q)
user = get_user()
assert len(user.id) == 36 # length of str(uuid4) is 36
paul.ask_me(EASY_Q)
# id doesn't change after the first visit
assert get_user().id == user.id
def test_new_user_score(paul):
paul.ask_me(EASY_Q)
assert get_user().score == 0
def test_user_teams(paul):
paul.ask_me(EASY_Q)
assert_same_items(get_user().teams, [TEAM_LINUX, TEAM_HN])
def test_questions():
assert models.Question.query.count() == 3
def test_only_first_answer_can_increase_score(steve):
steve.answer_wrong(EASY_Q)
steve.answer_correct(EASY_Q)
assert_team_score(TEAM_APPLE, score_sum=0)
def test_cant_increase_score_twice(steve):
steve.answer_correct(EASY_Q)
assert_team_score(TEAM_APPLE, score_sum=1)
steve.answer_correct(EASY_Q)
assert_team_score(TEAM_APPLE, score_sum=1)
def matches_any_ask_path(*question_ids):
parts = [r'/ask/{}/(\d+)/$'.format(q) for q in question_ids]
return '|'.join(parts)
def test_answer_redirects(steve):
rv = steve.answer(EASY_Q)
assert redirects_to_question(rv, MEDIUM_Q) or redirects_to_question(rv, HARD_Q)
rv = steve.answer(MEDIUM_Q)
assert redirects_to_question(rv, HARD_Q)
def redirects_to_question(rv, question_id):
regex = matches_any_ask_path(question_id)
return re.search(regex, rv.location)
def random_boolean():
return random.choice([True, False])
def assert_same_items(xs, ys):
assert sorted(xs) == sorted(ys)
def get_answer(question, is_correct):
return question.answers.filter_by(is_correct=is_correct).first()
def get_any_answer(question):
answers = list(question.answers)
return random.choice(answers)
def get_correct_answer(question):
return get_answer(question, is_correct=True)
def test_ask_without_seed(paul):
rv = paul.ask_me_without_seed(EASY_Q)
assert rv.status_code == 302
assert redirects_to_question(rv, EASY_Q)
def test_team_scores(steve, paul, tim):
steve.answer_correct(EASY_Q)
assert_team_score(TEAM_APPLE, num_users=1, score_sum=1)
tim.answer_wrong(EASY_Q)
assert_team_score(TEAM_APPLE, num_users=2, score_sum=1)
paul.answer_correct(EASY_Q)
assert_team_score(TEAM_APPLE, num_users=2, score_sum=1) # paul is not in Apple team
assert_team_score(TEAM_LINUX, num_users=1, score_sum=1)
assert_team_score(TEAM_HN, num_users=1, score_sum=1)
steve.answer_correct(MEDIUM_Q)
assert_team_score(TEAM_APPLE, num_users=2, score_sum=3)
def assert_team_score(team, **expected):
team_score = redis_store.hgetall('team:{}'.format(team))
score = {k: int(v) for k, v in team_score.viewitems()}
for key, value in expected.viewitems():
assert score[key] == value
def test_never_ask_the_same_question_twice_in_a_row(steve):
# we need to answer every question, because otherwise
# steve.answer(EASY_Q) will always redirect to the unanswered question.
# We want to test that even if every question is answered,
# then we don't ask the same question twice in row anyway.
for question in models.Question.query.all():
steve.answer(question.id)
rv = steve.answer(EASY_Q)
assert not redirects_to_question(rv, EASY_Q)
@pytest.mark.parametrize('rank, num_users, expected_text', [
(1, 1, 'top 1%'),
(1, 2, 'top 1%'),
(2, 2, 'bottom 50%'),
(1, 3, 'top 1%'),
(2, 3, 'top 50%'),
(3, 3, 'bottom 33%'),
])
def test_get_score_text(rank, num_users, expected_text):
assert get_score_text(rank, num_users) == expected_text
def test_ask_question_rendering(steve):
rv = steve.ask_me(EASY_Q)
# checking that '... / 2' is highlighted
assert '<span class="o">/</span> <span class="mi">2</span>' in rv.data
def test_explain_question_rendering(steve):
rv = steve.explain_to_me(EASY_Q)
assert '__future__' in rv.data # 'from __future__ import division' part
def test_score_rendering(steve):
steve.answer_correct(EASY_Q)
rv = steve.score()
assert_has_score(rv, 1)
assert '{} is your team'.format(TEAM_APPLE) in rv.data
def test_score_top_text(steve, paul):
steve.answer_correct(EASY_Q)
assert "You're in the top 1%" in steve.score().data
paul.answer_correct(MEDIUM_Q)
assert "You're in the top 1%" in paul.score().data
assert "You're in the bottom 50%" in steve.score().data
def test_score_during_ask(steve):
rv = steve.ask_me(EASY_Q)
assert_has_score(rv, 0)
# TODO: don't test markup with string comparisons, use css selectors
def assert_has_score(rv, expected_score):
assert 'score: <strong>{:d}</strong>'.format(expected_score) in rv.data.lower()
|
alexandershov/pydrill
|
tests/test_pydrill.py
|
Python
|
mit
| 9,111
|
[
"VisIt"
] |
ccc4d23540f0c085a11f987b90e8afdb83f98b009dcdb64d2e6c53fb6cdb1ec1
|
# Please do not change the configuration file quisk_conf_defaults.py.
# Instead copy one of the other quisk_conf_*.py files to your own
# .quisk_conf.py and make changes there. For a normal sound card
# configuration, copy quisk_conf_model.py to your .quisk_conf.py.
#
# PLEASE DO **NOT** COPY THIS FILE AND USE IT AS A START FOR YOUR CONFIGURATION FILE!
# YOUR CONFIGURATION FILE SHOULD ONLY HAVE LINES THAT DIFFER FROM THIS FILE. QUISK
# IMPORTS THIS FILE FIRST, AND THEN YOUR CONFIG FILE OVERWRITES A FEW ITEMS SUCH AS
# SOUND CARD NAMES.
#
# Quisk imports quisk_conf_defaults to set its configuration.
# If you have a configuration file, it then overwrites the defaults
# with your parameters. Your configuration file must be named
# ~/.quisk_conf.py, where "~" means your home directory. Or
# you may specify a different name with the -c or --config command
# line option. Try --help. Check the config screen to make sure that
# the correct configuration file is in use.
#
# The Quisk receiver can use a high quality sound card for capture and playback,
# or it can use the SDR-IQ by RfSpace for capture and a lower quality
# sound card for playback. Quisk can also be used as a panadapter.
# Quisk can control some rigs. See quisk_hardware_*.py. If you have a rig
# to control, copy one of the quisk_hardware_*.py files to your own file named
# quisk_hardware.py, and edit that file. If there is no quisk_hardware.py, then
# quisk_hardware_model.py is used instead.
import sys
import wx
# Import the default Hardware module. You can import a different module in
# your .quisk_conf.py.
import quisk_hardware_model as quisk_hardware
# Module for additional widgets (advanced usage). See n2adr/quisk_widgets.py for an example.
# import n2adr.quisk_widgets as quisk_widgets
quisk_widgets = None
# Select the default screen when Quisk starts:
default_screen = 'Graph'
#default_screen = 'WFall'
#default_screen = 'Config'
# The width of the graph data as a fraction of the total screen size. This
# will be adjusted by Quisk to accommodate preferred FFT sizes. It can
# not be changed once Quisk starts. It can not be made too small because
# of the space needed for all the buttons.
graph_width = 0.8
# The graph_width parameter controls the width of Quisk unless a larger width is forced.
# If the Quisk screen is too wide or the buttons are too crowded, perhaps due to a low screen
# resolution, you can reduce the font sizes. Thanks to Christof, DJ4CM.
button_font_size = 10
# button_font_size = 9
# button_font_size = 8
default_font_size = 12
status_font_size = 14
config_font_size = 14
graph_font_size = 10
favorites_font_size = 14
# This controls the speed of the graph peak hold. Lower numbers give a longer time constant.
graph_peak_hold_1 = 0.25
graph_peak_hold_2 = 0.10
# Select the default mode when Quisk starts (overruled by persistent_state):
# default_mode = 'FM'
default_mode = 'USB'
# Select the way the waterfall screen scrolls:
# waterfall_scroll_mode = 0 # scroll at a constant rate.
waterfall_scroll_mode = 1 # scroll faster at the top so that a new signal appears sooner.
# Select the initial size in pixels (minimum 1) of the graph at the top of the waterfall.
waterfall_graph_size = 80
# These are the initial values for the Y-scale and Y-zero sliders for each screen.
# The sliders go from zero to 160.
graph_y_scale = 100
graph_y_zero = 0
waterfall_y_scale = 80 # Initial value; new values are saved for each band
waterfall_y_zero = 40 # Initial value; new values are saved for each band
waterfall_graph_y_scale = 100
waterfall_graph_y_zero = 60
scope_y_scale = 80
scope_y_zero = 0 # Currently doesn't do anything
filter_y_scale = 90
filter_y_zero = 0
# Quisk can save its current state in a file on exit, and restore it when you restart.
# State includes band, frequency and mode, but not every item of state (not screen).
# The file is .quisk_init.pkl in the same directory as your config file. If this file
# becomes corrupted, just delete it and it will be reconstructed.
#persistent_state = False
persistent_state = True
# The quisk config screen has a "favorites" tab where you can enter the frequencies and modes of
# stations. The data is stored in this file; default quisk_favorites.txt in the directory
# where your config file is located.
favorites_file_path = ''
# These control the typeface used in fonts. The objective is to choose an available font that
# offers good support for the Unicode characters used on buttons and windows.
if sys.platform == "win32":
quisk_typeface = 'Lucida Sans Unicode' # Typeface for quisk buttons and text
#quisk_typeface = 'Arial Unicode MS'
else:
quisk_typeface = '' # Use a default typeface
# This controls whether the "U" symbols or the "T" symbols are used on buttons and windows.
# You can change the "U" and "T" symbols to anything you want; either Unicode or text.
use_unicode_symbols = True # Use the symbols starting with "U"
#use_unicode_symbols = False # Use the symbols starting with "T"
# These are the Unicode symbols used in the station window. Thanks to Christof, DJ4CM.
Usym_stat_fav = unichr(0x2605) # Symbol for favorites, a star
Usym_stat_mem = unichr(0x24C2) # Symbol for memory stations, an "M" in a circle
#Usym_stat_dx = unichr(0x2691) # Symbol for DX Cluster stations, a flag
Usym_stat_dx = unichr(0x25B2) # Symbol for DX Cluster stations, a Delta
# These are the text symbols used in the station window.
Tsym_stat_fav = 'F'
Tsym_stat_mem = 'M'
Tsym_stat_dx = 'Dx'
# These are the Unicode symbols to display on buttons. Thanks to Christof, DJ4CM.
Ubtn_text_range_dn = unichr(0x2190) # Down band, left arrow
Ubtn_text_range_up = unichr(0x2192) # Up band, right arrow
Ubtn_text_play = unichr(0x25BA) # Play button
Ubtn_text_rec = unichr(0x25CF) # Record button, a filled dot
Ubtn_text_file_rec = "File " + unichr(0x25CF) # Record to file
Ubtn_text_fav_add = unichr(0x2605) + unichr(0x2191) # Add to favorites
Ubtn_text_fav_recall = unichr(0x2605) + unichr(0x2193) # Jump to favorites screen
Ubtn_text_mem_add = unichr(0x24C2) + unichr(0x2191) # Add to memory
Ubtn_text_mem_next = unichr(0x24C2) + unichr(0x27B2) # Next memory
Ubtn_text_mem_del = unichr(0x24C2) + unichr(0x2613) # Delete from memory
# These are the text symbols to display on buttons.
Tbtn_text_range_dn = "Dn"
Tbtn_text_range_up = "Up"
Tbtn_text_play = "Play"
Tbtn_text_rec = "Rec"
Tbtn_text_file_rec = "File Rec"
Tbtn_text_fav_add = ">Fav"
Tbtn_text_fav_recall = "Fav"
Tbtn_text_mem_add = "Save"
Tbtn_text_mem_next = "Next"
Tbtn_text_mem_del = "Del"
# Station info display configuration, thanks to DJ4CM. This displays a window of station names
# below the graph frequency (X axis).
station_display_lines = 1 # number of station info display lines below the graph X axis
#station_display_lines = 0
#station_display_lines = 3
# DX cluster telent login data, thanks to DJ4CM. Must have station_display_lines > 0.
dxClHost = '' # host name for telnet server, null string to disable
#dxClHost = 'example.host.net'
dxClPort = 7373 # port number for telnet
user_call_sign = 'CALL' # your radio station call sign; probably needed for telnet login
dxClPassword = None # telnet password
dxClExpireTime = 20 # Time in minutes until DX Cluster entries will be removed
# This converts from dB to S-units for the S-meter (it is in S-units).
correct_smeter = 15.5
# This is the fraction of spectrum to display from zero to one. It is needed if
# the passband edges are not valid. Use 0.85 for the SDR-IQ.
display_fraction = 1.00
# Quisk has record and playback buttons to save radio sound. If there is no more room for
# sound, the old sound is discarded and the most recent sound is retained. This controls
# the maximum time of sound storage in minutes for this recorded audio, and also the record
# time for the Tx Audio test screen.
#
# If you want to transmit recorded sound, then mic_sample_rate must equal playback_rate and both must be 48000.
max_record_minutes = 1.00
# Quisk can save recorded sound and samples to files. There is a button on the Config/Config screen
# to set the file names. You can set the initial names with these variables:
file_name_audio = ""
#file_name_audio = "/home/jim/tmp/qaudio.wav"
file_name_samples = ""
#file_name_samples = "C:/tmp/qsamples.wav"
# Thanks to Steve Murphy, KB8RWQ for the patch adding additional color control.
# Thanks to Christof, DJ4CM for the patch adding additional color control.
# Define colors used by all widgets in wxPython colour format:
color_bg = 'light steel blue' # Lower screen background
color_bg_txt = 'black' # Lower screen text color
color_graph = 'lemonchiffon1' # Graph background
color_config2 = 'lemonchiffon3' # color in tab row of config screen
color_gl = 'grey' # Lines on the graph
color_graphticks = 'black' # Graph ticks
color_graphline = '#005500' # graph data line color
color_graphlabels = '#555555' # graph label color
color_btn = 'steelblue2' # button color
color_check_btn = 'yellow2' # color of a check button when it is checked
color_cycle_btn = 'goldenrod3' # color of a cycle button when it is checked
color_adjust_btn = 'orange3' # color of an adjustable button when it is checked
color_test = 'hot pink' # color of a button used for test (turn off for tx)
color_freq = 'lightcyan1' # background color of frequency and s-meter
color_freq_txt = 'black' # text color of frequency display
color_entry = color_freq # frequency entry box
color_entry_txt = 'black' # text color of entry box
color_enable = 'black' # text color for an enabled button
color_disable = 'white' # text color for a disabled button
color_bandwidth = 'lemonchiffon2' # color for bandwidth display; thanks to WB4JFI
#color_bandwidth = 'lemonchiffon3'
color_txline = 'red' # vertical line color for tx in graph
color_rxline = 'green' # vertical line color for rx in graph
color_notebook_txt = 'black' # text of notebook labels
# This is a dark color scheme designed by Steve Murphy, KB8RWQ.
#color_bg = '#111111'
#color_bg_txt = 'white'
#color_graph = '#111111'
#color_config2 = color_bg
#color_gl = '#555555'
#color_graphticks = '#DDDDDD'
#color_graphline = '#00AA00'
#color_graphlabels = '#FFFFFF'
#color_btn = '#666666'
#color_check_btn = '#996699'
#color_cycle_btn = '#666699'
#color_adjust_btn = '#669999'
#color_test = 'hot pink'
#color_freq = '#333333'
#color_freq_txt = 'white'
#color_entry = color_freq
#color_entry_txt = color_freq_txt
#color_enable = 'white'
#color_disable = 'black'
#color_bandwidth = '#333333'
#color_txline = 'red'
#color_rxline = 'green'
#color_notebook_txt = 'white'
filter_display = 1 # Display the filter bandwidth on the graph screen; 0 or 1; thanks to WB4JFI
# Quisk can operate in Split mode and can receive both the Tx and Rx frequency signals. This option
# controls where the sound goes. You may need to try 1 or 2 depending on your wiring.
#split_rxtx = 1 # Play both signals in stereo with the higher frequency on the real channel.
split_rxtx = 2 # Play both signals in stereo with the lower frequency on the real channel.
#split_rxtx = 3 # Play the receive signal on both channels.
#split_rxtx = 4 # Play the transmit signal on both channels.
# These are the palettes for the waterfall. The one used is named waterfallPallette,
# so to use a different one, overwrite this name in your .quisk_conf.py.
waterfallPalette = (
( 0, 0, 0, 0),
( 36, 85, 0, 255),
( 73, 153, 0, 255),
(109, 255, 0, 128),
(146, 255, 119, 0),
(182, 85, 255, 100),
(219, 255, 255, 0),
(255, 255, 255, 255)
)
digipanWaterfallPalette = (
( 0, 0, 0, 0),
( 32, 0, 0, 62),
( 64, 0, 0, 126),
( 96, 145, 142, 96),
(128, 181, 184, 48),
(160, 223, 226, 105),
(192, 254, 254, 4),
(255, 255, 58, 0)
)
# On Linux, Quisk can access your sound card through ALSA, PortAudio or PulseAudio.
# On Windows, Quisk uses DirectX for sound card access.
# In PortAudio, soundcards have an index number 0, 1, 2, ... and a name.
# The name can be something like "HDA NVidia: AD198x Analog (hw:0,0)" or
# "surround41". In Quisk, all PortAudio device names start with "portaudio".
# A device name like "portaudio#6" directly specifies the index. A name like
# "portaudio:text" means to search for "text" in all available devices. And
# there is a default device "portaudiodefault". So these portaudio names are useful:
#name_of_sound_capt = "portaudio:(hw:0,0)" # First sound card
#name_of_sound_capt = "portaudio:(hw:1,0)" # Second sound card, etc.
#name_of_sound_capt = "portaudio#1" # Directly specified index
#name_of_sound_capt = "portaudiodefault" # May give poor performance on capture
# In ALSA, soundcards have these names. The "hw" devices are the raw
# hardware devices, and should be used for soundcard capture.
#name_of_sound_capt = "hw:0" # First sound card
#name_of_sound_capt = "hw:1" # Second sound card, etc.
#name_of_sound_capt = "plughw"
#name_of_sound_capt = "plughw:1"
#name_of_sound_capt = "default"
# It is usually best to use ALSA names because they provide minimum latency. But
# you may need to use PulseAudio to connect to other programs such as wsjt-x.
# Pulseaudio support was added by Philip G. Lee. Many thanks!
# For PulseAudio devices, use the name "pulse:name" and connect the streams
# to your hardware devices using a PulseAudio control program. The name "pulse"
# alone refers to the "default" device. The PulseAudio names are quite long;
# for example "alsa_output.pci-0000_00_1b.0.analog-stereo". Look on the screen
# Config/Sound to see the device names. There is a description, a PulseAudio name,
# and for ALSA devices, the ALSA name. An example is:
#
# CM106 Like Sound Device Analog Stereo
# alsa_output.usb-0d8c_USB_Sound_Device-00-Device.analog-stereo
# USB Sound Device USB Audio (hw:1,0)
#
# Instead of the long PulseAudio name, you can enter a substring of any of
# these three strings.
#
# Use the default pulse device for radio sound:
#name_of_sound_play = "pulse"
# Use a PulseAudio name for radio sound:
#name_of_sound_play = "pulse:alsa_output.usb-0d8c_USB_Sound_Device-00-Device.analog-stereo"
# Abbreviate the PulseAudio name:
#name_of_sound_play = "pulse:alsa_output.usb"
# Another abbreviation:
#name_of_sound_play = "pulse:CM106"
# This controls whether the PulseAudio devices are shown in the device list.
show_pulse_audio_devices = True
# Normally you would capture and play on the same soundcard to avoid problems with the
# two clocks running at slightly different rates. But you can define name_of_sound_play
# to play back on a different device. Define this as the empty string "" to turn off
# play (for a panadapter).
#
# For the SDR-IQ the soundcard is not used for capture; it only plays back audio.
# Quisk has a custom decimation scheme for each sample rate. The allowable sample rates
# are the four SDR-IQ rates plus 24, 48, 96, 192, 240, 384, 480, and 960 ksps. Other rates
# can be added.
# Configuration for soundcard capture and playback
use_sdriq = 0 # Get ADC samples from SDR-IQ is not used
use_rx_udp = 0 # Get ADC samples from UDP is not used
sample_rate = 48000 # ADC hardware sample rate in Hertz
if sys.platform == "win32":
name_of_sound_capt = "Primary"
else:
name_of_sound_capt = "hw:0" # Name of soundcard capture hardware device.
name_of_sound_play = name_of_sound_capt # Use the same device for play back
#name_of_sound_play = "" # Panadapter: Do not play
channel_i = 0 # Soundcard index of in-phase channel: 0, 1, 2, ...
channel_q = 1 # Soundcard index of quadrature channel: 0, 1, 2, ...
# Thanks to Franco Spinelli for this fix:
# The H101 hardware using the PCM2904 chip has a one-sample delay between
# channels, which must be fixed in software. If you have this problem,
# change channel_delay to either channel_i or channel_q. Use -1 for no delay.
channel_delay = -1
# This is for mic playback (SoftRock transmit):
tx_channel_delay = -1
# If you use a soundcard with Ethernet control of the VFO, set these parameters:
rx_ip = "" # Receiver IP address for VFO control
# If you use an SDR-IQ for capture, see the sample config file quisk_conf_sdriq.py.
# For the N2ADR 2010 transceiver described in QEX, and for the improved version HiQSDR,
# see the sample config file in the hiqsdr package directory, and set these:
# tx_level sets the transmit level 0 to 255 for each band. The None band is the default.
# The config screen has a slider 0 to 100% so you can reduce the transmit power. The sliders
# only appear if your hardware defines the method SetTxLevel(). The hardware only supports a
# power adjustment range of 20 dB, so zero is still a small amount of power.
tx_level = {None:120, '60':110} # Adjust your power for each band
# Digital modes reduce power by the percentage on the config screen.
# The maximum value of the slider is digital_tx_level.
digital_tx_level = 20 # The slider on the config screen is 20% maximum.
#
# If you use the HiQSDR hardware, set these:
# The HiQSDR_BandDict sets the preselect (4 bits) on the X1 connector.
HiQSDR_BandDict = {'160':1, '80':2, '40':3, '30':4, '20':5, '15':6, '17':7,
'12':8, '10':9, '6':10, '500k':11, '137k':12 }
# For the original N2ADR hardware set this:
# use_rx_udp = 1
# For the newer HiQSDR hardware set this:
# use_rx_udp = 2
# For FPGA firmware version 1.4 and newer, the hardware is set to the rx_udp_ip you enter here.
# For older firmware, the IP address is programmed into the FPGA, and you must enter that address as rx_udp_ip.
rx_udp_ip = "192.168.2.196" # Sample source IP address
rx_udp_ip_netmask = '255.255.255.0' # The netmask for the network of rx_udp_ip
rx_udp_port = 0xBC77 # Sample source UDP port
rx_udp_clock = 122880000 # ADC sample rate in Hertz
sndp_active = True # Enable setting the hardware IP to rx_udp_ip
# Vendor and product ID's for the SoftRock
usb_vendor_id = 0x16c0
usb_product_id = 0x05dc
# I2C-address of the SI570 in the softrock; Thanks to Joachim Schneider, DB6QS
si570_i2c_address = 0x55
#si570_i2c_address = 0x70
# Thanks to Ethan Blanton, KB8OJH, for this patch for the Si570 (many SoftRock's):
# If you are using a DG8SAQ interface to set a Si570 clock directly, set
# this to True. Complex controllers which have their own internal
# crystal calibration do not require this.
si570_direct_control = False
# This is the Si570 startup frequency in Hz. 114.285MHz is the typical
# value from the data sheet; you can use 'usbsoftrock calibrate' to find
# the value for your device.
si570_xtal_freq = 114285000
# This is the received radio sound playback rate. The default will
# be 48 kHz for the SDR-IQ and UDP port samples, and sample_rate for sound
# card capture. Set it yourself for other rates or hardware.
# The playback_rate must be 24000, 48000, 96000 or 192000.
# The preferred rate is 48000 for use with digital modes and transmit of recorded audio.
# playback_rate = 48000
# If you use quisk_hardware_fixed.py, this is the fixed VFO frequency in Hertz
fixed_vfo_freq = 7056000
# Softrock hardware must be polled to get the key up/down state when using CW mode.
# This is the time between polls in milliseconds. Use zero to turn off the poll if you
# only use SSB, or if your SoftRock does not have a key jack and USB control.
key_poll_msec = 0
#key_poll_msec = 5
# Softrock transmit hardware uses semi break-in for CW operation. This is the time in
# seconds before changing back to receive.
key_hang_time = 0.7
# This determines what happens when you tune by dragging the mouse. The correct
# choice depends on how your hardware performs tuning. You may want to use a
# custom hardware file with a custom ChangeFrequency() method too.
mouse_tune_method = 0 # The Quisk tune frequency changes and the VFO frequency is unchanged.
#mouse_tune_method = 1 # The Quisk tune frequency is unchanged and the VFO changes.
# configurable mouse wheel thanks to DG7MGY
mouse_wheelmod = 50 # Round frequency when using mouse wheel (50 Hz)
# If freq_spacing is not zero, frequencies are rounded to the freq_base plus the
# freq_spacing; frequency = freq_base + N * freq_spacing. This is useful at
# VHF and higher when Quisk is used with a transverter.
freq_spacing = 0
freq_base = 0
# This is the CW tone frequency in Hertz
cwTone = 600
# These options are used by the digital modes that send audio to an external
# program, and receive audio to transmit. Set Fldigi to USB, XML-RPC control.
digital_xmlrpc_url = "http://localhost:7362" # URL for control by XML-RPC
#digital_xmlrpc_url = "" # Do not poll socket for XML-RPC control
# Input audio from an external program for use with digital modes. The input must be
# stereo at 48000 sps, and you must set mic_sample_rate to 48000 also.
digital_input_name = "" # device name for transmit audio
# digital_input_name = 'hw:Loopback,0'
# Output audio to an external program for use with digital modes. The output is
# stereo at the same sample rate as the radio sound playback.
digital_output_name = "" # device name for received audio
# digital_output_name = digital_input_name
digital_output_level = 0.7 # This is the volume control 0.0 to 1.0 for digital playback to fldigi, etc.
# This option sends the raw I/Q samples to another program using a loopback device (Linux) or
# a Virtual Audio Cable (Windows). The sample rate is the same as the hardware sample rate.
# Read the samples from the loopback device with another program.
sample_playback_name = ""
#sample_playback_name = "hw:Loopback,0"
# You can control Quisk from Hamlib. Set the Hamlib rig to 2 and the device for rig 2 to
# localhost:4575, or other hamlib_port as used by Quisk.
hamlib_port = 4575 # Standard port for Quisk control. Set the port in Hamlib to 4575 too.
#hamlib_port = 4532 # Default port for rig 2. Use this if you can not set the Hamlib port.
#hamlib_port = 0 # Turn off Hamlib control.
# If you use the microphone feature, the mic_channel_I and Q are the two capture
# microphone channels. Quisk uses a monophonic mic, so audio is taken from the I
# channel, and the Q channel is (currently) ignored. It is OK to set the same
# channel number for both, and this is necessary for a USB mono mic. The mic sample rate
# should be 48000 to enable digital modes and the sound recorder to work, but 8000 can be used.
# Mic samples can be sent to an Ethernet device (use tx_ip and name_of_mic_play = "")
# or to a sound card (use name_of_mic_play="hw:1" or other device).
#
# If mic samples are sent to a sound card for Tx, the samples are tuned to the audio
# transmit frequency, and are set to zero unless the key is down. You must set both
# microphone_name and name_of_mic_play even for CW. For softrock hardware, you usually
# capture radio samples and play Tx audio on one soundcard; and capture the mic and play radio
# sound on the other sound card at 48000 sps. For example:
# name_of_sound_capt = "hw:0" # high quality sound card at 48, 96, or 192 ksps
# name_of_sound_play = "hw:1" # lower quality sound card at 48 ksps
# microphone_name = name_of_sound_play
# name_of_mic_play = name_of_sound_capt
# Microphone capture:
microphone_name = "" # Name of microphone capture device (or "hw:1")
mic_sample_rate = 48000 # Microphone capture sample rate in Hertz, should be 48000, can be 8000
mic_channel_I = 0 # Soundcard index of mic capture audio channel
mic_channel_Q = 0 # Soundcard index of ignored capture channel
# Microphone samples sent to soundcard:
name_of_mic_play = "" # Name of play device if mic I/Q is sent to a sound card
mic_playback_rate = 48000 # Playback rate must be a multiple 1, 2, ... of mic_sample_rate
mic_play_chan_I = 0 # Soundcard index of mic I play channel
mic_play_chan_Q = 1 # Soundcard index of mic Q play channel
mic_out_volume = 0.7 # Microphone output volume (after all processing) as a fraction 0.0 to 0.7
# Microphone samples sent to UDP:
tx_ip = "" # Transmit IP address for mic sent to UDP (or "192.168.2.195")
tx_audio_port = 0 # UDP port for mic samples (or 0x553B)
# These microphone parameters are obsolete, and have no effect. Use the Config/Tx Audio screen instead.
# mic_clip, mic_preemphasis, mic_avg_gain, mic_max_gain
# If your mixing scheme inverts the RF spectrum, set this option to un-invert it
invertSpectrum = 0
# This is a list of mixer settings. It only works for Linux; it has no effect in Windows.
# Use "amixer -c 1 contents" to get a list of mixer controls and their numid's for
# card 1 (or "-c 0" for card 0). Then make a list of (device_name, numid, value)
# for each control you need to set. For a decimal fraction, use a Python float; for example,
# use "1.0", not the integer "1".
#mixer_settings = [
# ("hw:1", 2, 0.80), # numid of microphone volume control, volume 0.0 to 1.0;
# ("hw:1", 1, 1) # numid of capture on/off control, turn on with 1;
# ]
# If you want Quisk to add a button to generate a 2-tone IMD test signal,
# set this to 1. This feature requires the microphone to work.
add_imd_button = 0
# If you want Quisk to add a full duplex button (transmit and receive at the
# same time), set this to 1.
add_fdx_button = 0
# If you want to write your own I/Q filter and demodulation module, set
# this to the name of the button to add, and change extdemod.c.
# add_extern_demod = "WFM"
add_extern_demod = ""
# These are the suppressed carrier frequencies for 60 meters
freq60 = (5330500, 5346500, 5357000, 5371500, 5403500)
# These are the filter bandwidths for each mode. Quisk has built-in optimized filters
# for these values, but you can change them if you want.
FilterBwCW = (200, 400, 600, 1000, 1500, 3000)
FilterBwSSB = (2000, 2200, 2500, 2800, 3000, 3300)
FilterBwAM = (4000, 5000, 6000, 8000, 10000, 9000)
FilterBwFM = (8000, 10000, 12000, 15000, 17000, 20000)
FilterBwIMD = FilterBwSSB
FilterBwDGT= (1600, 3200, 4800, 10000, 20000, 20000)
FilterBwEXT = (8000, 10000, 12000, 15000, 17000, 20000)
# This is the data used to draw colored lines on the frequency X axis to
# indicate CW and Phone sub-bands. You can make it anything you want.
# These are the colors used for sub-bands:
CW = '#FF4444' # General class CW
eCW = '#FF8888' # Extra class CW
Phone = '#4444FF' # General class phone
ePhone = '#8888FF' # Extra class phone
# ARRL band plan special frequencies
Data = '#FF9900'
DxData = '#CC6600'
RTTY = '#FF9900'
SSTV = '#FFFF00'
AM = '#00FF00'
Packet = '#00FFFF'
Beacons = '#66FF66'
Satellite = '#22AA88'
Repeater = '#AA00FF' # Repeater outputs
RepInput = '#AA88FF' # Repeater inputs
Simplex = '#00FF44'
Special = 'hot pink'
Other = '#888888'
# Colors start at the indicated frequency and continue until the
# next frequency. The special color "None" turns off color.
#
# To change BandPlan in your config file, first remove any frequencies in the range
# you want to change; then add your frequencies; and then sort the list. Or you could just
# replace the whole list.
BandPlan = [
# Test display of colors
#[ 0, CW], [ 50000, eCW], [ 100000, Phone], [ 150000, ePhone], [ 200000, Data], [ 250000, DxData], [ 300000, RTTY], [ 350000, SSTV],
#[ 400000, AM], [ 450000, Packet], [ 500000, Beacons], [ 550000, Satellite], [ 600000, Repeater], [ 650000, RepInput], [ 700000, Simplex],
#[ 750000, Other], [ 800000, Special], [ 850000, None],
# 160 meters
[ 1800000, Data],
[ 1809000, Other],
[ 1811000, CW],
[ 1843000, Phone],
[ 1908000, Other],
[ 1912000, Phone],
[ 1995000, Other],
[ 2000000, None],
# 80 meters
[ 3500000, eCW],
[ 3525000, CW],
[ 3570000, Data],
[ 3589000, DxData],
[ 3591000, Data],
[ 3600000, ePhone],
[ 3790000, Other],
[ 3800000, Phone],
[ 3844000, SSTV],
[ 3846000, Phone],
[ 3880000, AM],
[ 3890000, Phone],
[ 4000000, None],
# 60 meters
[ freq60[0], Phone],
[ freq60[0] + 2800, None],
[ freq60[1], Phone],
[ freq60[1] + 2800, None],
[ freq60[2], Phone],
[ freq60[2] + 2800, None],
[ freq60[3], Phone],
[ freq60[3] + 2800, None],
[ freq60[4], Phone],
[ freq60[4] + 2800, None],
# 40 meters
[ 7000000, eCW],
[ 7025000, CW],
[ 7039000, DxData],
[ 7041000, CW],
[ 7080000, Data],
[ 7125000, ePhone],
[ 7170000, SSTV],
[ 7172000, ePhone],
[ 7175000, Phone],
[ 7285000, AM],
[ 7295000, Phone],
[ 7300000, None],
# 30 meters
[10100000, CW],
[10130000, RTTY],
[10140000, Packet],
[10150000, None],
# 20 meters
[14000000, eCW],
[14025000, CW],
[14070000, RTTY],
[14095000, Packet],
[14099500, Other],
[14100500, Packet],
[14112000, CW],
[14150000, ePhone],
[14225000, Phone],
[14229000, SSTV],
[14231000, Phone],
[14281000, AM],
[14291000, Phone],
[14350000, None],
# 17 meters
[18068000, CW],
[18100000, RTTY],
[18105000, Packet],
[18110000, Phone],
[18168000, None],
# 15 meters
[21000000, eCW],
[21025000, CW],
[21070000, RTTY],
[21110000, CW],
[21200000, ePhone],
[21275000, Phone],
[21339000, SSTV],
[21341000, Phone],
[21450000, None],
# 12 meters
[24890000, CW],
[24920000, RTTY],
[24925000, Packet],
[24930000, Phone],
[24990000, None],
# 10 meters
[28000000, CW],
[28070000, RTTY],
[28150000, CW],
[28200000, Beacons],
[28300000, Phone],
[28679000, SSTV],
[28681000, Phone],
[29000000, AM],
[29200000, Phone],
[29300000, Satellite],
[29520000, Repeater],
[29590000, Simplex],
[29610000, Repeater],
[29700000, None],
# 6 meters
[50000000, Beacons],
[50100000, Phone],
[54000000, None],
# 2 meters
[144000000, CW],
[144200000, Phone],
[144275000, Beacons],
[144300000, Satellite],
[144380000, Special],
[144400000, Satellite],
[144500000, RepInput],
[144900000, Other],
[145100000, Repeater],
[145500000, Other],
[145800000, Satellite],
[146010000, RepInput],
[146400000, Simplex],
[146510000, Special], # Simplex calling frequency
[146530000, Simplex],
[146610000, Repeater],
[147420000, Simplex],
[147600000, RepInput],
[148000000, None],
# 1.25 meters
[222000000, Phone],
[222250000, RepInput],
[223400000, Simplex],
[223520000, Data],
[223640000, Repeater],
[225000000, None],
#70 centimeters
[420000000, SSTV],
[432000000, Satellite],
[432070000, Phone],
[432300000, Beacons],
[432400000, Phone],
[433000000, Repeater],
[435000000, Satellite],
[438000000, Repeater],
[445900000, Simplex],
[445990000, Special], # Simplex calling frequency
[446010000, Simplex],
[446100000, Repeater],
[450000000, None],
# 33 centimeters
[902000000, Other],
[928000000, None],
# 23 centimeters
[1240000000, Other],
[1300000000, None],
]
# For each band, this dictionary gives the lower and upper band edges. Frequencies
# outside these limits will not be remembered as the last frequency in the band.
BandEdge = {
'160':( 1800000, 2000000), '80' :( 3500000, 4000000),
'60' :( 5300000, 5430000), '40' :( 7000000, 7300000),
'30' :(10100000, 10150000), '20' :(14000000, 14350000),
'17' :(18068000, 18168000), '15' :(21000000, 21450000),
'12' :(24890000, 24990000), '10' :(28000000, 29700000),
'6' :( 50000000, 54000000),
'2' :( 144000000, 148000000),
'1.25' :( 222000000, 225000000),
'70cm' :( 420000000, 450000000),
'33cm' :( 902000000, 928000000),
'23cm' :(1240000000, 1300000000),
}
# For each band, this dictionary gives the initial center frequency, tuning
# frequency as an offset from the center frequency, and the mode. This is
# no longer too useful because the persistent_state feature saves and then
# overwrites these values anyway.
bandState = {'Audio':(0, 0, 'LSB'),
'160':( 1890000, -10000, 'LSB'), '80' :( 3660000, -10000, 'LSB'),
'60' :( 5370000, 1500, 'USB'), '40' :( 7180000, -5000, 'LSB'), '30':(10120000, -10000, 'CWL'),
'Time':( 5000000, 0, 'AM')}
for band, (f1, f2) in BandEdge.items():
if f1 > 13500000:
f = (f1 + f2) / 2
f = (f + 5000) / 10000
f *= 10000
bandState[band] = (f, 10000, 'USB')
# For the Time band, this is the center frequency, tuning frequency and mode:
bandTime = [
( 2500000-10000, 10000, 'AM'),
( 3330000-10000, 10000, 'AM'),
( 5000000-10000, 10000, 'AM'),
( 7335000-10000, 10000, 'AM'),
(10000000-10000, 10000, 'AM'),
(14670000-10000, 10000, 'AM'),
(15000000-10000, 10000, 'AM'),
(20000000-10000, 10000, 'AM'),
]
# This is the list of band buttons that Quisk displays, and it should have
# a length of 14 or less. Empty buttons can have a null string "" label.
# Note that the 60 meter band and the Time band have buttons that support
# multiple presses.
bandLabels = ['Audio', '160', '80', ('60',) * 5, '40', '30', '20', '17',
'15', '12', '10', ('Time',) * len(bandTime)]
# If you use a transverter, you need to tune your hardware to a frequency lower than
# the frequency displayed by Quisk. For example, if you have a 2 meter transverter,
# you may need to tune your hardware from 28 to 30 MHz to receive 144 to 146 MHz.
# Enter the transverter offset in Hertz in this dictionary. For this to work, your
# hardware must support it. Currently, the HiQSDR, SDR-IQ and SoftRock are supported.
bandTransverterOffset = {
# '2': 144000000 - 28000000
}
# If you get your I/Q samples from a sound card, you will need to correct the
# amplitude and phase for inaccuracies in the analog hardware. The data is
# entered using the controls from the "Rx Phase" button on the config screen.
# The corrections are saved by the persistent_state feature.
#
# The available range of the amplitude and phase controls for receive:
rx_max_amplitude_correct = 0.2 # Correction relative to 1.000000 (ideally 0.0000)
rx_max_phase_correct = 10.0 # Correction in degrees of phase (ideally 0.0000)
#
# The bandAmplPhase dictionary gives the amplitude and phase corrections for
# sound card data. The format is a dictionary with key "band", giving a dictionary
# with key "rx" or "tx", giving a list of tuples (VFO, tune, amplitude, phase).
#
# If you use Quisk as a panadapter, the corrections will not depend on the band.
# In that case create a band "panadapter" in your config file, and all corrections
# will be read/written to that band.
bandAmplPhase = {} # Empty dictionary to start
#bandAmplPhase = {'panadapter':{}} # Create "panadapter" band for all corrections
# The program polls the soundcard or SDR-IQ for data every data_poll_usec microseconds.
# A lower time reduces latency; a higher time is less taxing on the hardware.
if sys.platform == "win32":
data_poll_usec = 20000 # poll time in microseconds
else:
data_poll_usec = 5000 # poll time in microseconds
# The fft_size is the width of the data on the screen (about 800 to
# 1200 pixels) times the fft_size_multiplier. Multiple FFTs are averaged
# together to achieve your graph refresh rate. If fft_size_multiplier is
# too small you will get many fft errors. You can specify fft_size_multiplier,
# or enter a large number (use 9999) to maximize it, or enter zero to let
# quisk calculate it for you. Look for fft_size_multiplier in quisk.py.
# Your fft_size_multiplier should have many small factors. Avoid 7 and 13, and
# use 8 or 12 instead.
#
# If your hardware can change the decimation, there are further compilcations.
# The FFT size is fixed, and only the average count can change to adjust the
# refresh rate.
fft_size_multiplier = 0
# The graph_refresh is the frequency at which the graph is updated,
# and should be about 5 to 10 Hertz. Higher rates require more processor power.
graph_refresh = 7 # update the graph at this rate in Hertz
# latency_millisecs determines how many samples are in the soundcard play buffer.
# A larger number makes it less likely that you will run out of samples to play,
# but increases latency. It is OK to suffer a certain number of play buffer
# underruns in order to get lower latency.
latency_millisecs = 150 # latency time in milliseconds
# Select the method to test the state of the key; see is_key_down.c
key_method = "" # No keying, or internal method
# key_method = "/dev/parport0" # Use the named parallel port
# key_method = "/dev/ttyS0" # Use the named serial port
# key_method = "192.168.1.44" # Use UDP from this address
# If your hardware file defines the method OnButtonPTT(self, event), then Quisk will
# display a PTT button you can press. The method must switch your hardware to
# transmit somehow, for example, by setting a serial port pin to high.
#
# You can define two hot keys that when pressed simultaneously, will push the PTT button.
# If you want only one hot key, set hot_key_ptt2 to None. Use any of the wx.WXK_* key codes,
# or the ord() of the letter. Do not choose hot keys that interfere with other features
# on your system; for example, system menus or the frequency entry box.
hot_key_ptt1 = None
hot_key_ptt2 = None
# hot_key_ptt1 = wx.WXK_CONTROL
# hot_key_ptt1 = wx.WXK_SHIFT
# hot_key_ptt1 = wx.WXK_ALT
# hot_key_ptt1 = wx.WXK_F5
# hot_key_ptt2 = ord(' ')
# hot_key_ptt2 = ord('A')
# If you want Quisk to generate a sidetone, include self.use_sidetone = 1
# in the __init__ method of your hardware file.
#
# If you are using keying, key-down throws away the current capture buffer
# and starts a sidetone with a rise time of 5 milliseconds. For
# key-up, the sidetone is ended with a fall time of 5 milliseconds, then
# a silent period starts, then normal audio starts with a rise time of
# 5 milliseconds. The length of the silent period is given by keyupDelay,
# but will be at least the time necessary to collect enough samples to
# refill the filters. A larger keyupDelay may be needed to accomodate
# antenna switching or other requirement of your hardware.
keyupDelay = 23 # extra milliseconds silence on key up
# For FM transmit, this is the modulation index.
modulation_index = 1.67
# These are the tuning parameters for the AGC. There is a button to turn AGC on or off,
# but AGC still limits the peak amplitude to avoid clipping even if it is off.
# Right click the AGC button to show the adjustment slider. If the slider is at maximum,
# all signals will have the same (maximum) amplitude. For lower values, weak signals
# will be somewhat less loud than strong signals; that is, some variation in signal
# amplitude remains.
# The AGC parameters are not used for FM, and the AGC button becomes a squelch button
# with a squelch slider control.
# This controls the maximum AGC gain and thus the scale of the AGC slider control. If
# it is too high, all signals reach the same amplitude at much less than 100% slider.
# If it is too low, then all signals fail to have the same amplitude even at 100%. But
# the value is not critical, because you can adjust the slider a bit more.
agc_max_gain = 15000.0
# agc_off_gain is obsolete and has no effect. Turn off AGC and adjust the slider instead.
# This is the AGC release time in seconds. It must be greater than zero. It is the time
# constant for gain recovery after a strong signal disappears.
agc_release_time = 1.0
|
shenki/quisk
|
quisk_conf_defaults.py
|
Python
|
gpl-2.0
| 39,603
|
[
"CRYSTAL"
] |
3eb5dba55135fd65e5efd66f3db66e2684aa347774a412d80f5c31a8bdc62021
|
# -*- coding: utf-8 -*-
import numpy as np
from numpy.linalg import matrix_rank
from joblib import Parallel, delayed
from scipy.stats import multivariate_normal as _N
from scipy.optimize import minimize
from fitr.stats import lme
from fitr.stats import bic
from fitr.inference import OptimizationResult
def l_bfgs_b(f,
i,
data,
nparams,
jac,
minstarts=2,
maxstarts=10,
maxstarts_without_improvement=3,
init_sd=2):
""" Minimizes the negative log-probability of data with respect to some parameters under function `f` using the L-BFGS-B algorithm.
This function is specified for use with parallel CPU resources.
Arguments:
f: (Negative!) Log likelihood function
i: `int`. Subject being optimized (slices first dimension of `data`)
data: Object subscriptable along first dimension to indicate subject being optimized
nparams: `int`. Number of parameters in the model
jac: `bool`. Set to `True` if `f` returns a Jacobian as the second element of the returned values
minstarts: `int`. Minimum number of restarts with new initial values
maxstarts: `int`. Maximum number of restarts with new initial values
maxstarts_without_improvement: `int`. Maximum number of restarts without improvement in objective function value
init_sd: Standard deviation for Gaussian initial values
Returns:
i: `int`. Subject being optimized (slices first dimension of `data`)
xmin: `ndarray((nparams,))`. Parameter values at optimum
fmin: Scalar objective function value at optimum
fevals: `int`. Number of function evaluations
niters: `int`. Number of iterations
lme_: Scalar log-model evidence at optimum
bic_: Scalar Bayesian Information Criterion at optimum
hess_inv: `ndarray((nparams, nparams))`. Inv at optimum
"""
nlog_prob = lambda x: f(x, data[i])
fmin = np.inf
fevals = 0
niters = 0
nstarts = 0
nstarts_without_improvement = 0
done = False
succeeded = False
while not done:
xinit = np.random.normal(0, init_sd, size=nparams)
res = minimize(nlog_prob, xinit, jac=jac, method='L-BFGS-B')
nstarts += 1
fevals += res.nfev
niters += res.nit
# Convergence test
if nstarts < maxstarts:
if res.success is True and res.fun < fmin:
fmin = -res.fun
xmin = res.x
hess_inv = res.hess_inv.todense()
lme_ = lme(fmin, nparams, hess_inv)
bic_ = bic(fmin, nparams, data[i].shape[1])
succeeded = True
if res.fun >= fmin:
nstarts_without_improvement += 1
if nstarts_without_improvement >= maxstarts_without_improvement:
done = True
print('Subject %s Fit | %s Starts | Fevals %s | lp_= %s' %(i, nstarts, fevals, fmin))
else:
done = True
print('Subject %s Fit | %s Starts | Fevals %s | lp_= %s' %(i, nstarts, fevals, fmin))
if succeeded is False:
print('Subject %s failed to converge after %s iterations (%s fx evals)' %(i, niters, fevals))
fmin = np.nan
xmin = np.array([np.nan]*xinit.size)
hess_inv = np.array([[np.nan]*xinit.size]*xinit.size)
lme_ = np.nan
bic_ = np.nan
succeeded = False
return i, xmin, fmin, fevals, niters, lme_, bic_, hess_inv
def second_order_optimizer(f,
i,
data,
nparams,
jac,
hess,
minstarts=2,
maxstarts=10,
maxstarts_without_improvement=3,
init_sd=2,
method='trust-exact'):
""" Minimizes the negative log-probability of data with respect to some parameters under function `f` using the exact .
This function is specified for use with parallel CPU resources.
Arguments:
f: (Negative!) Log likelihood function.
i: `int`. Subject being optimized (slices first dimension of `data`)
data: Object subscriptable along first dimension to indicate subject being optimized
nparams: `int`. Number of parameters in the model
jac: `bool`. Set to `True` if `f` returns a Jacobian as the second element of the returned values
hess: `bool`. Set to `True` if third output value of `f` is the Hessian matrix
minstarts: `int`. Minimum number of restarts with new initial values
maxstarts: `int`. Maximum number of restarts with new initial values
maxstarts_without_improvement: `int`. Maximum number of restarts without improvement in objective function value
init_sd: Standard deviation for Gaussian initial values
Returns:
i: `int`. Subject being optimized (slices first dimension of `data`)
xmin: `ndarray((nparams,))`. Parameter values at optimum
fmin: Scalar objective function value at optimum
fevals: `int`. Number of function evaluations
niters: `int`. Number of iterations
lme_: Scalar log-model evidence at optimum
bic_: Scalar Bayesian Information Criterion at optimum
hess: `ndarray((nparams, nparams))`. Inv at optimum
"""
nlog_prob = lambda x: f(x, data[i])[:-1]
hessian = lambda x: f(x, data[i])[-1]
fmin = np.inf
fevals = 0
niters = 0
nstarts = 0
nstarts_without_improvement = 0
done = False
succeeded = False
while not done:
xinit = None
lbest = np.inf
for _ in range(15):
xtest = np.random.normal(0, init_sd, size=nparams)
ll, _ = nlog_prob(xtest)
if ll <= lbest:
lbest = ll
xinit = xtest
res = minimize(nlog_prob,
xinit,
jac=jac,
hess=hessian,
method=method)
nstarts += 1
fevals += res.nfev
niters += res.nit
# Convergence test
if nstarts < maxstarts:
if res.success is True and res.fun < fmin:
fmin = -res.fun
xmin = res.x
hess_inv = np.linalg.pinv(res.hess)
lme_ = lme(fmin, nparams, hess_inv)
bic_ = bic(fmin, nparams, data[i].shape[1])
succeeded = True
if res.fun >= fmin:
nstarts_without_improvement += 1
if nstarts_without_improvement >= maxstarts_without_improvement:
done = True
print('Subject %s Fit | %s Starts | Fevals %s | lp_= %s' %(i, nstarts, fevals, fmin))
else:
done = True
print('Subject %s Fit | %s Starts | Fevals %s | lp_= %s' %(i, nstarts, fevals, fmin))
if succeeded is False:
print('Subject %s failed to converge after %s iterations (%s fx evals)' %(i, niters, fevals))
fmin = np.nan
xmin = np.array([np.nan]*xinit.size)
hess_inv = np.array([[np.nan]*xinit.size]*xinit.size)
lme_ = np.nan
bic_ = np.nan
succeeded = False
return i, xmin, fmin, fevals, niters, lme_, bic_, hess_inv
def mlepar(f,
data,
nparams,
minstarts=2,
maxstarts=10,
maxstarts_without_improvement=3,
init_sd=2,
njobs=-1,
jac=None,
hess=None,
method='L-BFGS-B'):
""" Computes maximum likelihood estimates using parallel CPU resources.
Wraps over the `fitr.optimization.mle_parallel.mle` function.
Arguments:
f: Likelihood function
data: A subscriptable object whose first dimension indexes subjects
optimizer: Optimization function (currently only `l_bfgs_b` supported)
nparams: `int` number of parameters to be estimated
minstarts: `int`. Minimum number of restarts with new initial values
maxstarts: `int`. Maximum number of restarts with new initial values
maxstarts_without_improvement: `int`. Maximum number of restarts without improvement in objective function value
init_sd: Standard deviation for Gaussian initial values
jac: `bool`. Set to `True` if `f` returns a Jacobian as the second element of the returned values
hess: `bool`. Set to `True` if third output value of `f` is the Hessian matrix
method: `str`. One of the `scipy.optimize` methods.
Returns:
`fitr.inference.OptimizationResult`
Todo:
- [ ] Raise errors when user selects inappropriate optimization function given values for `jac` and `hess`
"""
nsubjects = len(data)
if method == 'L-BFGS-B':
plist = [[f, i, data, nparams, jac, minstarts, maxstarts, maxstarts_without_improvement, init_sd] for i in range(nsubjects)]
y = Parallel(n_jobs=njobs)(delayed(l_bfgs_b)(z[0],z[1],z[2],z[3],z[4],z[5],z[6],z[7], z[8]) for z in plist)
elif method in ['trust-exact', 'trust-ncg', 'trust-krylov', 'dogleg']:
plist = [[f, i, data, nparams, jac, hess, minstarts, maxstarts, maxstarts_without_improvement, init_sd, method] for i in range(nsubjects)]
y = Parallel(n_jobs=njobs)(delayed(second_order_optimizer)(z[0],z[1],z[2],z[3],z[4],z[5],z[6],z[7],z[8],z[9],z[10]) for z in plist)
res = OptimizationResult(nsubjects, nparams)
for i, item in enumerate(y):
sid = item[0]
res.subject_id[sid] = sid
res.xmin[sid,:]= item[1]
res.fmin[sid]=item[2]
res.fevals[sid] = item[3]
res.niters[sid] = item[4]
res.lme[sid]=item[5]
res.bic[sid] = item[6]
res.err[sid,:]=np.sqrt(np.diag(item[7]))
res.hess_inv[sid,:,:]=item[7]
return res
|
ComputationalPsychiatry/fitr
|
fitr/inference/mle_parallel.py
|
Python
|
gpl-3.0
| 10,027
|
[
"Gaussian"
] |
5bfcc9601f60a22cabe857c555b20faf90651c191e102d68e2581c9546b4ae27
|
# coding: utf-8
# # Pollen
# This is the time of year when even thinking about flowers makes my eyes water.
# While it doesn't completely stop me from enjoying the outdoors, I find forecasts useful for planning and setting expectations.
#
# There are pollen tracking services that I find useful, but have shortcomings in their coverage in time or granularity. Some don't measure pollen on the weekends, when I'm most likely to be out, and some just give an aggregated count, ignoring the more relevant tree pollen count I'm interested in. This gap left me wondering how far I could get predicting pollen levels from the weather on my own, and looked like an opportunity to apply my recent interest in deep learning models, and RNNs in particular.
# In[ ]:
from imports import *
# %mkdir cache
import joblib; mem = joblib.Memory(cachedir='cache')
get_ipython().magic('matplotlib inline')
# In[ ]:
from util.pollen_utils import pscale
import util.utils; reload(util.utils); from util.utils import (
check_one2one, yrmths, flatten_multindex, ends_with,
BatchArray, ravel, repackage_hidden, mse,
replace_with_dummies, filter_dtypes, log_,
join_pollen_weather, read
)
date = lambda xs: dt.datetime(*xs)
# ## The training data
# For the forecasting model to be useful for me, it needs to be trained on data that will be available when I need to make prediction.
#
# If I want to know tomorrow's pollen levels and I have a summary of today's weather available, then it will be enough to train the model on pollen counts using weather data from the previous day. The problem is simpler if I simply want an estimate of today's count based on today's weather (say it's the weekend, when the regular source is unavailable), in which case the input and output of the training data are aligned to the same day. Another variation would be to train on historical _forecasts_ of the data, since this would give longer range estimates and would allow for more flexibility in use cases.
#
# For this first iteration of the model I decided to train on daily weather summaries from Dark Sky as the inputs, and same-day pollen counts as the output, due to the ease in accessing the data. This is suboptimal for many use cases, but can at least be a start for a ballpark estimate of expected model performance. (The code for pulling this data is in the data-fetcher.ipynb notebook)
#
# In addition to the weather data, I used different date fields (for example, month number and day of the year) and the previous day's pollen count, which turns out to be super correlated with today's pollen count (almost 90%).
# In[ ]:
dailydf = feather.read_dataframe('cache/dark_day.fth')
dailydf = (
dailydf.sort_values('Time', ascending=True).reset_index(drop=1)
.assign(Dt=lambda x: pd.to_datetime(x.Time, unit='s'))
.assign(
Day=lambda x: x.Dt.dt.day,
Doy=lambda x: x.Dt.dt.dayofyear,
M=lambda x: x.Dt.dt.month,
Y=lambda x: x.Dt.dt.year,
Day_int=lambda x: (x['Dt'] - x['Dt'].min()).dt.days,
)
.drop('Ozone', axis=1) # This is a new field, I guess
)
# Here's a sample of the daily weather data:
# In[ ]:
dailydf[:3]
# # Nulls
# I've yet to find perfectly clean dataset that's ready to use out of the box, and this is no exception. While better than some weather data sources I tried, it still has some columns with nulls that we'll have to deal with. Some are straightforward, like `Precip_type` and `Precip_accumulation`:
# In[ ]:
dailydf.loc[dailydf.eval('Precip_type != Precip_type'), 'Precip_type'] = 'none'
dailydf['Precip_accumulation'] = dailydf.Precip_accumulation.fillna(0)
# For the field that records the time of day with the maximum precipitation, I just filled in the missing values (for days when it didn't rain) with the minimum time of the day:
#
# In[ ]:
def fill_pimt_null(s, timecol):
"""This column is null when there is no precipitation.
Not sure of anything better to do, so I'm just setting
it to the minimum time of the day in question
"""
s2 = s.copy()
null_ptime = s.isnull()
s2.loc[null_ptime] = timecol[null_ptime]
return s2.astype(int)
dailydf['Min_time'] = dailydf.Dt.map(lambda t: int(t.replace(hour=0).strftime('%s')))
dailydf.Precip_intensity_max_time = fill_pimt_null(dailydf.Precip_intensity_max_time, dailydf.Min_time)
# The nulls in the cloud cover were a bit trickier to tease out. Throwing it through a decision tree didn't reveal any obvious rules for when nulls would occur, so I just built a quick random forest model to use the other features to determine what should go in the missing rows for `Cloud_cover`. The predictions from this imputation model on held out data shows a pretty good correlation with the actual values:
# In[ ]:
from IPython.display import Image
Image('plots/cloud_cover_model_perf.png', height=400, width=400)
# so I went ahead and stuck with it. The nulls don't appear to be randomly distributed, however (they seemed to be correlated with snow and visibility), so it may be worthwhile to look for a better way to deal with them some other time.
# In[ ]:
from sklearn.ensemble import RandomForestRegressor
def fill_cloud_cover_null(cc, X):
"""Solution wasn't obvious, so I just imputed the nulls
with a random forest using the other columns.
"""
null = cc != cc
if not null.any():
return cc
rf = RandomForestRegressor(n_estimators=30, oob_score=True)
rf.fit(X[~null], cc[~null])
cc2 = cc.copy()
cc2.loc[null] = rf.predict(X[null])
return cc2
_feats = [k for k, d in dailydf.dtypes.items()
if (d == float or d == int) and (k != 'Cloud_cover')
]
dailydf['Cloud_cover'] = fill_cloud_cover_null(dailydf.Cloud_cover, dailydf[_feats])
# ### Check times
# In addition to replacing the text categorical variables with dummy values, I also adjusted the time attributes (sunrise, sunset, maximum temperature &c) so that they reflected time of the day rather than UNIX time.
# In[ ]:
ddf = replace_with_dummies(dailydf, 'Icon Precip_type'.split())
assert (ddf == ddf).all().all(), "Don't want nulls here"
# In[ ]:
# Check that within a day the difference between maximum
# and minimum times are not greater than the
# number of seconds in a day
times = lfilter(lambda x: x.endswith('ime'), ddf)
minmax = DataFrame({
'Min': ddf[times].min(axis=1),
'Max': ddf[times].max(axis=1),
}).assign(Diff=lambda x: x.Max.sub(x.Min).div(60 * 60 * 24))
assert 0 <= minmax.Diff.max() <= 1, "All times within a day should be no more than 24 hrs apart"
minmax.Diff.max() # should be no more than 1
# In[ ]:
assert (ddf[times].min(axis=1) == ddf.Min_time).all(), 'By definition'
# In[ ]:
unix_time_to_day_hrs = lambda s, min_time: (s - min_time) / 3600
for t in set(times) - {'Min_time'}:
c = t + 's'
ddf[c] = unix_time_to_day_hrs(ddf[t], ddf.Min_time)
# Some cols now have a single value. Drop them.
# In[ ]:
slen = lambda x: len(set(x))
nunique = ddf.apply(slen)
ddf = ddf[nunique[nunique > 1].index].copy()
# ### Pollen
# From personal experience, the symptoms have a nonlinear response rate to the pollen count that is mirrored in the [NAB](http://www.aaaai.org/global/nab-pollen-counts/reading-the-charts) classification. Here's the scale:
# In[ ]:
pscale
# Since the symptoms increase with each order of magnitude change in the count, rather than by a constant, this tells me the target would be well modeled by the log of the pollen count. The standard MSE would penalize a prediction that's off by 50 the same, regardless of whether the actual count is in the low or high range, but modeling the log of the count would improve the metric.
#
# The plot below shows what I mean. The raw plot on the left shows a more drastically varying scale, which would be trickier to learn with a simple MSE loss, compared to the more evenly scaled log plot on the right.
# In[ ]:
_, [ax1, ax2] = plt.subplots(1, 2, figsize=(10, 4))
cutoffs = np.array([1, 15, 90, 1500])
ax1.set_title('Linear scale')
ax1.plot(cutoffs)
ax2.set_title('Log scale')
ax2.plot(np.log10(cutoffs));
# ### Weather & pollen become one
# This is where weather in pollen tables are joined, the target variable is logified, features are standardized, and data is wrapped in torch variables. You can see `utils.join_pollen_weather` if such things do not bore you.
#
# I'm pretty sure that not every variable that is helpful and many convey redundant information, but since I haven't seen the straightforward way to do feature selection with deep networks, I'll have to save that part for another day.
# In[ ]:
poldf = feather.read_dataframe('cache/pollen.fth')
xdf, xt, yt, rx, rxdf, ry = join_pollen_weather(
poldf, ddf, time_cols=times, ycol='Logcnt'
)
# In[ ]:
print('|X|:', xt.size())
print('|y|:', yt.size())
print("Pollen count's 1st lag auto-correlation: {:.2%}"
.format(xdf.Logcnt.corr(xdf.Logcnt.shift())))
# Sanity check that it's ordered ascending my date and not null
assert xdf.Dt.is_monotonic_increasing
assert xdf.Time.is_monotonic_increasing
assert (xdf.Doy > xdf.Doy.shift(1)).mean() > .98, (
"Day of year int should increase once a year")
assert not xdf.isnull().any().any()
# Sanity check that I didn't accidentally include a transformation of target variable in the predictors:
# In[ ]:
corrs = (rxdf.corrwith(ry).to_frame().rename(columns={0: 'Corr'})
.assign(Abs=lambda x: x.Corr.abs())
.sort_values('Abs', ascending=0).Corr)
assert corrs.abs().max() < .9
corrs[:5]
# ## RNN
# While a standard model that works on tabular data could work (think linear regression, KNN, GBMs) their standard usage doesn't take into account the sequential structure of the data, and ignores useful information. There are more classical models that take the sequential structure into account, like HMMs and Kalman filters,
# but since I'm going through a phase of aiming deep learning solutions at problems, a recurrent neural network is the favored choice. While different in many ways from an HMM, it does share the element of hidden variables that track state over time, even if they don't convey the kind of useful probabilistic information that an HMM's hidden states would.
#
# There are roughly a gajillion deep learning frameworks around these days, but I went with [Pytorch](http://pytorch.org/) because it looked fun.
# In[ ]:
import torch as T
from torch.autograd import Variable
from torch import optim
from torch import nn
tofloat = lambda x: x.data[0]
unvar = lambda x: x.data.numpy().ravel()
# And here is the main model. It basically uses an RNN with GRUs (a simplified version of the standard LSTM cell, named after a data science twitter celebrity), with the output units leading to a dense layer after a dropout layer
# In[ ]:
class Rnn(nn.Module):
def __init__(self, P=3, nhidden=21, num_layers=1, dropout=0):
super().__init__()
self.P, self.nhidden, self.num_layers, self.dropout = (
P, nhidden, num_layers, dropout
)
self.rnn = nn.GRU(P, nhidden, num_layers, batch_first=True, dropout=dropout)
self.Dropout = nn.Dropout(p=dropout)
self.decoder = nn.Linear(nhidden, 1)
self.init_weights()
self.zero_grad()
def __dir__(self):
return super().__dir__() + list(self._modules)
def forward(self, input, hidden=None, outputh=False):
if hidden is None:
hidden = self.hidden
out1, hout = self.rnn(input, hidden)
out1d = self.Dropout(out1)
out2 = self.decoder(ravel(out1d))
self.hidden = repackage_hidden(hout) # don't waste time tracking the grad
if outputh:
return out2, hout
return out2
def init_weights(self):
initrange = 0.1
for p in self.rnn.parameters():
xavier_init(p.data)
self.decoder.bias.data.fill_(0)
xavier_init(self.decoder.weight.data)
def init_hidden(self, bsz):
"For lstm I'll need to return 2"
weight = next(self.rnn.parameters()).data
mkvar = lambda: Variable(weight.new(self.num_layers, bsz, self.nhidden).zero_())
return mkvar()
def set_hidden(self, bsz):
h = self.init_hidden(bsz)
self.hidden = h
def xavier_init(t):
"This seems to be the recommended distribution for weight initialization"
n = max(t.size())
return t.normal_(std=n ** -.5)
criterion = nn.MSELoss()
# The training routine is pretty standard and self explanatory:
# In[ ]:
def train_epoch(barray, model=None, hidden=None, optimizer=None, eval=False, batch_size=None):
batch_size = batch_size or barray.batch_size
assert batch_size or hidden
hidden = model.init_hidden(batch_size) if hidden is None else hidden
res = []
ss, n = 0, 0
for bix in barray.batch_ix_iter(batch_size=batch_size):
x, y = barray[bix]
optimizer.zero_grad()
output = model(x, hidden)
res.append(output.data.squeeze())
if eval:
continue
loss = criterion(output, y.view(-1, 1))
loss.backward()
T.nn.utils.clip_grad_norm(model.parameters(), 3)
optimizer.step()
ss += tofloat(loss) * len(output) # keep track of ss
n += len(output)
res = T.stack(res).view(-1).numpy()
if eval:
return res
tot_loss = ss / n
return tot_loss, res
# When I want to make a prediction on the validation set, I first run the model over a few preceding examples to update the hidden weights, since the validation set is relatively small (that is, with `warmup=True` in `val_pred`):
# In[ ]:
def val_pred(model, warmup=True):
if warmup:
model.set_hidden(1)
ix = int(not warmup)
Dt = baval.Dt[ix]
xs, ysv = baval[[ix]]
ys = Series(unvar(ysv), index=Dt)
yspred = model(xs)
yspred_s = Series(unvar(yspred), index=Dt)
return yspred_s, ys
# And here are the functions to run the training routines and log the progress
# In[ ]:
# %mkdir /tmp/res/
VALFN = '/tmp/res/val.txt'
TRNFN = '/tmp/res/trn.txt'
def report_hook(model, res, vals=None):
print()
val_pred(model, warmup=True)
yspred, ys = val_pred(model, warmup=False)
val_acc = mse(yspred, ys)
vals.append(val_acc)
trn_acc = mse(ba.train_samples_y, res)
with open(VALFN, 'a') as f:
f.write('{:}\n'.format(val_acc))
with open(TRNFN, 'a') as f:
f.write('{:}\n'.format(trn_acc))
print('{:,.3f}; val: {:,.4f}'.format(trn_acc, val_acc), end='; ')
def train_epochs(model, optimizer=None, rng=(500, ), print_every=10, report_hook=None, report_kw={}):
with open(VALFN, 'w') as f: pass
with open(TRNFN, 'w') as f: pass
vals = []
for i in range(*rng):
_, res = train_epoch(ba, model=model, hidden=None, optimizer=optimizer)
print('.', end='')
if i % print_every == 0:
if report_hook:
report_hook(model, res, vals=vals)
return res, min(vals)
# ## Training and Model parameters
#
# A lot of standard tips for RNN settings didn't seem to apply to this problem, since approximately 100% of the use cases I see are for NLP tasks. For one, I have a lot less data, so the batch size remainders matter more. Using a batch size of 32 and sequence length of 25, I use all the full batches for the training set (4000 samples), and the remaining samples (~400) for the validation set.
#
# I also found that fewer hidden units worked pretty well. For some settings, 128 units was overkill, and completely overfit the data, though using just 8 served as a decent form of regularization. I settled on a higher number of units for reasons explained later, but dropout helped prevent overfitting.
#
# I used [skopt's](https://scikit-optimize.github.io/) Gaussian process optimizer to find a good set of hyperparameters.
# In[ ]:
# training batches
seq_len = 25
bcz = 32
ba = BatchArray(x=xt, y=yt, seq_len=seq_len, batch_size=bcz)
# validation batches
l = ba.num_leftover_rows
baval = BatchArray(x=xt[-2 * l:], y=yt[-2 * l:], seq_len=l, batch_size=1)
assert (xdf.index == rxdf.index).all(), 'Dropped some nulls?'
baval.Dt = [xdf.Dt.iloc[-2*l:-l], xdf.Dt.iloc[-l:]]
print('Training size: {}\nValidation size: {}'.format(ba.num_truncated_rows, l))
# In[ ]:
nhidden = 128
num_layers = 2
model = Rnn(P=rx.shape[-1], nhidden=nhidden, num_layers=num_layers, dropout=.05)
model.set_hidden(bcz)
optimizer = optim.Adam(model.parameters(), lr = 0.001)
model
# With these settings, the loss plot below shows that the validation score doesn't improve much after about 20 epochs
# In[ ]:
Image('plots/valid.png')
# In[ ]:
st = time.perf_counter()
res, mvals = train_epochs(model=model, optimizer=optimizer, rng=(25, ), print_every=10, report_hook=report_hook)
tt = time.perf_counter() - st
print('\n\nTime: {:.2f}'.format(tt))
print('Acc: {:.2f}; Val: {:.3f}'.format(mse(res, ba.train_samples_y), mvals))
# ## Results...with uncertainty
#
# Although they use probabilistic activations within the cells, RNNs aren't usually chosen for quantifying the uncertainty of their predictions. Being a Bayesian, this makes me want to wash my hands, but it also makes RNNs a lot less useful for forecasting than other methods when you have no idea what the model's confidence is in its predictions.
#
# A [fascinating blog post by Yarin Gal](http://mlg.eng.cam.ac.uk/yarin/blog_3d801aa532c1ce.html), however, draws a connection between deep networks using dropout and Gaussian processes, with a simple formula to provide probabilistic uncertainty estimates for fully connected layers with dropout. While I'm not sure (in fact, quite doubtful) that the exact equations transfer to RNNs, I was curious about the results using dropout to simulate variation in the prediction.
# In[ ]:
(x_warm, y_warm) = baval[0]
(x_val, y_val) = baval[1]
y_val = y_val.data.numpy().ravel()
x_warm = x_warm.unsqueeze(0)
x_val = x_val.unsqueeze(0)
# In[ ]:
def eval_val(model, x_val):
model(x_warm)
val_pred = model(x_val).data #.numpy().ravel()
return val_pred
model.set_hidden(1)
# Here I get simulated results on the held out data
# In[ ]:
get_ipython().run_cell_magic('time', '', '# ressv = np.array([eval_val(model, x_val) for _ in range(100)])\nressv = T.cat([eval_val(model, x_val) for _ in range(100)], 1).numpy()')
# and calculate the mean and variance
# In[ ]:
mu = ressv.mean(axis=1)
var = ressv.var(axis=1)
lparam = 50
tau = lparam**2 * (1 - model.dropout) / (2 * l * .9)
var += tau**-1
# In[ ]:
plt.figure(figsize=(16, 10))
dates = xdf.Dt[-l:].values
datify = lambda x: Series(x, index=dates)
datify(y_val).plot()
datify(mu).plot()
plt.legend(['Y', 'Pred'])
lo = datify(mu - var)
hi = datify(mu + var)
plt.fill_between(dates, lo, hi, alpha=.35, edgecolor='none')
# ### Errors
#
# To my eye, it looks like the predictions track the held out values pretty nicely, though they look like they have something of a smoothing effect when the actual values jump around a lot. Looking at the residuals show that the error is higher when the pollen count jumps a lot from day to day:
# In[ ]:
resid = y_val - mu
diffy = y_val[1:] - y_val[:-1]
# diffpred = mu[1:] - mu[:-1]
plt.scatter(diffy, resid[1:], alpha=.3)
plt.xlabel('Daily difference')
plt.ylabel('Residual')
plt.text(-1, 1, 'Corr coef: {:.1%}'.format(np.corrcoef(diffy, resid[1:])[0][1]));
# Since the pollen counts are so highly correlated with the count of the previous day, I thought that on weekends or other days with missing counts could be associated with the big jumps, that tend to have the highest error. I even included the number of skips days in the model as `Day_diff`, but this seems to have almost relation with the errors (see the left plot).
#
# My other thought was that the amount of uncertainty in the simulated results should also increase around these big changes, and therefore around the residuals. Sadly, the model didn't learn this, and the variance looks completely uncorrelated with the residuals (on the right).
# In[ ]:
_, [ax1, ax2] = plt.subplots(1, 2, figsize=(16, 6))
sns.swarmplot(data=rxdf[-l:].assign(Resid=resid), x='Day_diff', y='Resid', ax=ax1)
# uncert_diff = (m9 - m10)[1:]
uncert_diff = var[1:]
ax2.scatter(uncert_diff, resid[1:], alpha=.3)
plt.xlabel('Daily difference')
plt.ylabel('Residual')
plt.text(.35, 1, 'Corr coef: {:.1%}'.format(np.corrcoef(uncert_diff, resid[1:])[0][1]));
# Overall I'm pretty happy in the RNN's ability to capture the dynamics of the weather and make pollen count predictions. Some improvements I would be interested in making, would be to give the model a better idea of when to be less confident. Because of the suspected feature information redundancy, it would also be worthwhile to look into applying feature selection to the model. But all in all this looks like a good start to a making a useful personalized pollen forecasting system.
|
d10genes/pollen
|
pollen2.py
|
Python
|
mit
| 21,073
|
[
"Gaussian"
] |
181be1499b46ca92cf7b9ba7c9af39a46627dfa7a01d938b580528df0f636bb2
|
from datetime import datetime, date
from django.contrib.auth.models import User
from django.forms import (
BooleanField,
CharField,
CheckboxInput,
ChoiceField,
EmailField,
FileField,
Form,
ModelForm,
Select,
SelectMultiple,
Textarea,
ValidationError,
)
from datetimewidget.widgets import DateWidget
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Fieldset, ButtonHolder, Submit, HTML
from crispy_forms.bootstrap import PrependedText
from .models import *
TODAY_YEAR = datetime.now().year
SELECT_DATE_WIDGE_YEARS = [TODAY_YEAR + delta for delta in range(-3, 4)]
class GarlicForm(ModelForm):
not_send_email_field = BooleanField(
widget=CheckboxInput,
required=False,
initial=False,
label="Suppress email notification for this update to claimant?"
)
not_copy_email_field = BooleanField(
widget=CheckboxInput,
required=False,
initial=True,
label="Suppress copy of email to staff?"
)
def __init__(self, *args, **kwargs):
# Add staff option to not send email notification
self.is_staff = kwargs.pop("is_staff", False)
# Set up Garlic attribute to persistent data
super(GarlicForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.attrs = {
'data_persist': "garlic",
}
class ClaimantForm(GarlicForm):
class Meta:
model = Claimant
fields = [
'forenames',
'surname',
'email',
'phone',
'gender',
'home_country',
'home_city',
'career_stage_when_apply',
'affiliation',
'work_description',
'institutional_website',
'website',
'website_feed',
'orcid',
'google_scholar',
'github',
'gitlab',
'bitbucket',
'twitter',
'linkedin',
'facebook',
]
required_css_class = 'form-field-required'
def __init__(self, *args, **kwargs):
super(ClaimantForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
HTML('<h2>Personal details</h2>'),
'forenames',
'surname',
'email',
'phone',
'gender',
'home_country',
'home_city',
HTML('<h2>Professional details</h2>'),
'career_stage_when_apply',
'affiliation',
'work_description',
HTML('<h2>Social Networks</h2>'),
'institutional_website',
'website',
'website_feed',
PrependedText(
'orcid',
'https://orcid.org/'
),
PrependedText(
'google_scholar',
'https://scholar.google.co.uk/citations?user='
),
PrependedText(
'github',
'https://gihub.com/'
),
PrependedText(
'gitlab',
'https://gitlab.com/'
),
PrependedText(
'bitbucket',
'https://bitbucket.org/'
),
PrependedText(
'linkedin',
'https://www.linkedin.com/in/'
),
PrependedText(
'twitter',
'https://twitter.com/'
),
PrependedText(
'facebook',
'https://facebook.com/'
),
ButtonHolder(
Submit('submit', 'Add')
)
)
)
class FellowForm(GarlicForm):
class Meta:
model = Claimant
fields = [
'forenames',
'surname',
'email',
'phone',
'gender',
'home_country',
'home_city',
'photo',
'photo_work_description',
'career_stage_when_apply',
'job_title_when_apply',
'research_area',
'research_area_code',
'affiliation',
'department',
'group',
'funding',
'funding_notes',
'interests',
'work_description',
'institutional_website',
'website',
'website_feed',
'orcid',
'google_scholar',
'github',
'gitlab',
'bitbucket',
'twitter',
'linkedin',
'facebook',
]
labels = {
'home_country': "Country",
'home_city': "City",
'photo': "Photo (Thumbnail)",
'photo_work_description': "Photo (Main)",
'career_stage_when_apply': "Career Stage",
'job_title_when_apply': "Job Title",
'research_area_code': "Research Classification",
'affiliation': "Home institution",
'department': "Department",
'group': "Group within Department",
'funding': "Primary funding body/charity/organisation",
'funding_notes': "Any additional funders",
'work_description': "Short Biography",
}
required_css_class = 'form-field-required'
def __init__(self, *args, **kwargs):
super(FellowForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
HTML('<h2>Personal details</h2>'),
'forenames',
'surname',
'email',
'phone',
'gender',
'home_country',
'home_city',
'photo_work_description',
'photo',
HTML('<h2>Professional details</h2>'),
'career_stage_when_apply',
'job_title_when_apply',
'research_area',
'research_area_code',
'affiliation',
'department',
'group',
'funding',
'funding_notes',
HTML('<h2>Information for the website</h2>'),
'interests',
'work_description',
HTML('<h2>Social Networks</h2>'),
'institutional_website',
'website',
'website_feed',
PrependedText(
'orcid',
'https://orcid.org/'
),
PrependedText(
'google_scholar',
'https://scholar.google.co.uk/citations?user='
),
PrependedText(
'github',
'https://gihub.com/'
),
PrependedText(
'gitlab',
'https://gitlab.com/'
),
PrependedText(
'bitbucket',
'https://bitbucket.org/'
),
PrependedText(
'linkedin',
'https://www.linkedin.com/in/'
),
PrependedText(
'twitter',
'https://twitter.com/'
),
PrependedText(
'facebook',
'https://facebook.com/'
),
ButtonHolder(
Submit('submit', 'Add')
)
)
)
class FundForm(GarlicForm):
class Meta:
model = Fund
exclude = [ # pylint: disable=modelform-uses-exclude
"success_reported",
"status",
"ad_status",
"budget_approved",
"required_blog_posts",
"grant_heading",
"grant",
"notes_from_admin",
"added",
"approved",
"updated",
"approval_chain",
]
labels = {
'claimant': 'Requester name',
'mandatory': 'Is this related with Fellows face to face selection meeting, Fellows inaugural meeting or Collaborations Workshop?',
'title': 'Event title',
'url': 'Event webpage link',
'country': 'Country in which event is taking place',
'city': 'City in which the event is taking place',
'start_date': 'Start date of event',
'end_date': 'End date of event',
'budget_request_travel': "Travel costs (e.g. airfare or ground transportation)",
'budget_request_attendance_fees': "Attendance fees (e.g. workshop / event registration costs)",
'budget_request_subsistence_cost': "Subsistence costs (e.g. accommodation and meals)",
'budget_request_venue_hire': "Venue hire",
'budget_request_catering': "Catering",
'budget_request_others': "Other costs",
'success_targeted': "Successful outputs and outcomes",
'can_be_included_in_calendar': "Can we include your participation in this event into the private Fellows calendar?",
'can_be_advertise_before': "Can we public promote your involvement in this event before it takes place?",
'can_be_advertise_after': "Can we public promote your involvement in this event after it takes place?"
}
widgets = {
'claimant': Select(attrs={"class": "select-single-item"}),
'category': Select(attrs={"class": "select-single-item"}),
'focus': Select(attrs={"class": "select-single-item"}),
'country': Select(attrs={"class": "select-single-item"}),
'start_date': DateWidget(
usel10n=True,
bootstrap_version=3
),
'end_date': DateWidget(
usel10n=True,
bootstrap_version=3
),
}
required_css_class = 'form-field-required'
total_budget = CharField(required=False)
def clean_start_date(self):
if 'start_date' in self.cleaned_data:
date_from_today = self.cleaned_data['start_date'] - date.today()
if date_from_today.days <= 0:
raise ValidationError('"Start date of event" must be in the future.')
return self.cleaned_data['start_date']
def clean_end_date(self):
if 'end_date' in self.cleaned_data:
date_from_today = self.cleaned_data['end_date'] - date.today()
if date_from_today.days <= 0:
raise ValidationError('"End date of event" must be in the future.')
if 'start_date' in self.cleaned_data and 'end_date' in self.cleaned_data:
duration = self.cleaned_data['end_date'] - self.cleaned_data['start_date']
if duration.days < 0:
raise ValidationError('"End date of event" must be after "Start date of event".')
return self.cleaned_data['end_date']
def __init__(self, *args, **kwargs):
super(FundForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
HTML('<p>To apply for expenses for eligible events, please fill in this form at least one month before the start date of the event you wish to attend or organise.</p><h2>Requester details</h2>'),
'claimant',
HTML('<h2>Funding request details</h2>'),
'category',
'focus',
'mandatory',
'title',
'url',
'country',
'city',
'start_date',
'end_date',
HTML('<h2>Costs</h2><p>Please provide an estimate of your costs below. All values should be entered in GBP. See the terms and conditions for details (<a href="{{ terms_and_conditions_url }}">{{ terms_and_conditions_url }}</a>)</p><p>Please fill in all cost sections that are relevant to your event type.</p>'),
PrependedText(
'budget_request_travel',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_attendance_fees',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_subsistence_cost',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_venue_hire',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_catering',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_others',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'total_budget',
'£',
disabled=True,
value=0.00
),
HTML('<h2>Justification for attending or organising the event</h2><p>When filling in the questions below please consider the following points:</p><ul><li>For attending conferences/workshops: will the conference focus on a significant field, will you meet significant researchers, will there be a focus on research software?</li><li>For organising workshops: how will the event help your domain, how will the event help the Institute, how will the event help you.</li><li>For policy related work: how might participation or organisation help the policy goals of the Institute, such as improving software and improved research (this can include people and tools perspectives).</li><li>For other: please state reasons - note it maybe good to discuss matter with the Institute Community Lead before filling the form to make sure the rationale is aligned to the Institute and to your own objectives.</li></ul>'),
'justification',
HTML('<p>Please specify what outputs (what maybe be produced) and outcomes (what change it could lead to) are likely to be produced from your participation in this event. These can include learning goals being met, collaborations, reports etc.</p>'),
'success_targeted',
'additional_info',
HTML('<h2>Details of people being sponsored from your Fellowship funds</h2><p>If you are sponsoring others to take part in this event from your Fellowship funds please give their names and email addresses below, if you do not know their names at this stage please state whether there is sponsorship of others needed in this request. In either case please provide some justification.</p>'),
'extra_sponsored',
HTML('<h2>Publicity</h2>'),
'can_be_included_in_calendar',
'can_be_advertise_before',
'can_be_advertise_after',
'not_send_email_field' if self.is_staff else None,
ButtonHolder(
Submit('submit', '{{ title }}')
)
)
)
# Force user to select one category
self.fields['category'].widget.choices.insert(0, ('', '---------'))
self.fields['category'].initial = ''
# Force user to select one focus
self.fields['focus'].widget.choices.insert(0, ('', '---------'))
self.fields['focus'].initial = ''
class FundPublicForm(GarlicForm):
forenames = CharField(
max_length=MAX_CHAR_LENGTH,
required=True
)
surname = CharField(
max_length=MAX_CHAR_LENGTH,
required=True
)
email = EmailField(
required=True
)
phone = CharField(
max_length=MAX_CHAR_LENGTH,
required=True,
help_text="The number that we can contact you."
)
#gender = CharField(
# choices=GENDERS,
# max_length=1,
# default="R"
#)
#home_country = CountryField(
# required=True,
# default='GB' # Default for United Kingdom
#)
home_city = CharField(
required=True,
max_length=MAX_CHAR_LENGTH
)
affiliation = CharField( # Home institution
max_length=MAX_CHAR_LENGTH,
required=True,
)
department = CharField( # Department within home institution
max_length=MAX_CHAR_LENGTH,
required=True
)
class Meta:
model = Fund
exclude = [ # pylint: disable=modelform-uses-exclude
'claimant',
'mandatory',
'additional_info',
'extra_sponsored',
'can_be_included_in_calendar',
'can_be_advertise_before',
'can_be_advertise_after',
"status",
"ad_status",
"budget_approved",
"required_blog_posts",
"grant_heading",
"grant",
"notes_from_admin",
"added",
"approved",
"updated",
"approval_chain",
]
labels = {
'mandatory': 'Is this related with Fellows face to face selection meeting, Fellows inaugural meeting or Collaborations Workshop?',
'title': 'Event title',
'url': 'Event webpage link',
'country': 'Country in which event is taking place',
'city': 'City in which the event is taking place',
'start_date': 'Start date of event',
'end_date': 'End date of event',
'budget_request_travel': "Travel costs (e.g. airfare or ground transportation)",
'budget_request_attendance_fees': "Attendance fees (e.g. workshop / event registration costs)",
'budget_request_subsistence_cost': "Subsistence costs (e.g. accommodation and meals)",
'budget_request_venue_hire': "Venue hire",
'budget_request_catering': "Catering",
'budget_request_others': "Other costs",
'success_targeted': "Successful outputs and outcomes",
'can_be_included_in_calendar': "Can we include your participation in this event into the private Fellows calendar?",
'can_be_advertise_before': "Can we public promote your involvement in this event before it takes place?",
'can_be_advertise_after': "Can we public promote your involvement in this event after it takes place?"
}
widgets = {
'claimant': Select(attrs={"class": "select-single-item"}),
'category': Select(attrs={"class": "select-single-item"}),
'focus': Select(attrs={"class": "select-single-item"}),
'country': Select(attrs={"class": "select-single-item"}),
'start_date': DateWidget(
usel10n=True,
bootstrap_version=3
),
'end_date': DateWidget(
usel10n=True,
bootstrap_version=3
),
}
required_css_class = 'form-field-required'
total_budget = CharField(required=False)
def clean_start_date(self):
if 'start_date' in self.cleaned_data:
date_from_today = self.cleaned_data['start_date'] - date.today()
if date_from_today.days <= 0:
raise ValidationError('"Start date of event" must be in the future.')
return self.cleaned_data['start_date']
def clean_end_date(self):
if 'end_date' in self.cleaned_data:
date_from_today = self.cleaned_data['end_date'] - date.today()
if date_from_today.days <= 0:
raise ValidationError('"End date of event" must be in the future.')
if 'start_date' in self.cleaned_data and 'end_date' in self.cleaned_data:
duration = self.cleaned_data['end_date'] - self.cleaned_data['start_date']
if duration.days < 0:
raise ValidationError('"End date of event" must be after "Start date of event".')
return self.cleaned_data['end_date']
def __init__(self, *args, **kwargs):
super(FundPublicForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
HTML('<h2>Your details</h2>'),
'forenames',
'surname',
#'gender',
'email',
'phone',
'home_city',
#'home_country',
'affiliation',
'department',
HTML('<h2>Funding request details</h2>'),
'category',
'focus',
'title',
'url',
'country',
'city',
'start_date',
'end_date',
HTML('<h2>Costs</h2><p>Please provide an estimate of your costs below. All values should be entered in GBP. See the terms and conditions for details (<a href="{{ terms_and_conditions_url }}">{{ terms_and_conditions_url }}</a>)</p><p>Please fill in all cost sections that are relevant to your event type.</p>'),
PrependedText(
'budget_request_travel',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_attendance_fees',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_subsistence_cost',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_venue_hire',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_catering',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'budget_request_others',
'£',
onblur="update_budget()",
min=0.00,
step=0.01
),
PrependedText(
'total_budget',
'£',
disabled=True,
value=0.00
),
HTML('<h2>Justification for attending or organising the event</h2><p>When filling in the questions below please consider the following points:</p><ul><li>For attending conferences/workshops: will the conference focus on a significant field, will you meet significant researchers, will there be a focus on research software?</li><li>For organising workshops: how will the event help your domain, how will the event help the Institute, how will the event help you.</li><li>For policy related work: how might participation or organisation help the policy goals of the Institute, such as improving software and improved research (this can include people and tools perspectives).</li><li>For other: please state reasons - note it maybe good to discuss matter with the Institute Community Lead before filling the form to make sure the rationale is aligned to the Institute and to your own objectives.</li></ul>'),
'justification',
'success_targeted',
'not_send_email_field' if self.is_staff else None,
ButtonHolder(
Submit('submit', '{{ title }}')
)
)
)
# Force user to select one category
self.fields['category'].widget.choices.insert(0, ('', '---------'))
self.fields['category'].initial = ''
# Force user to select one focus
self.fields['focus'].widget.choices.insert(0, ('', '---------'))
self.fields['focus'].initial = ''
class FundGDPRForm(GarlicForm):
class Meta:
model = Fund
fields = [
'can_be_included_in_calendar',
'can_be_advertise_before',
'can_be_advertise_after',
]
labels = {
'can_be_included_in_calendar': "Can we include your participation in this event into the Fellows calendar?",
'can_be_advertise_before': "Can we public promote your involvement in this event before it takes place?",
'can_be_advertise_after': "Can we public promote your involvement in this event after it takes place?"
}
required_css_class = 'form-field-required'
def __init__(self, *args, **kwargs):
super(FundGDPRForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
HTML('<h2>GDPR</h2>'),
HTML('<h3>Publicity</h3>'),
'can_be_advertise_before',
'can_be_advertise_after',
'can_be_included_in_calendar',
ButtonHolder(
Submit('submit', '{{ title }}')
)
)
)
class FundReviewForm(GarlicForm):
class Meta:
model = Fund
fields = [
"status",
#"ad_status", # TODO uncomment in the future
"category",
"focus",
"mandatory",
"grant_heading",
"grant",
"activity",
"required_blog_posts",
"budget_approved",
"notes_from_admin",
]
labels = {
"mandatory": "Is this a mandatory event?",
"grant_heading": "Default Grant Heading",
"grant": "Default Grant",
"activity": "Activities tag",
'budget_approved': 'Total budget approved',
}
required_css_class = 'form-field-required'
email = CharField(widget=Textarea, required=False)
def __init__(self, *args, **kwargs):
super(FundReviewForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
"status",
"category",
"focus",
"mandatory",
"grant",
"grant_heading",
"activity",
HTML("""Visit <a href="/pages/guide/activities-tag/">Activities Tag Taxonomy</a> for a description about the tags."""),
"required_blog_posts",
PrependedText(
"budget_approved",
'£',
min=0.00,
step=0.01,
onblur="this.value = parseFloat(this.value).toFixed(2);"
),
"notes_from_admin",
"email",
'not_send_email_field' if self.is_staff else None,
'not_copy_email_field' if self.is_staff else None,
)
)
self.helper.add_input(Submit('submit', 'Submit'))
class FundImportForm(Form):
required_css_class = 'form-field-required'
csv = FileField()
def __init__(self, *args, **kwargs):
super(FundImportForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.attrs = {
'data_persist': "garlic",
}
self.helper.layout = Layout(
Fieldset(
'',
HTML("""Your CSV <strong>must</strong> have the following columns:
<ul>
<li>Forename(s)</li>
<li>Surname</li>
<li>Event type</li>
<li>Event title</li>
<li>Event website</li>
<li>Event Country</li>
<li>Event City</li>
<li>Start date</li>
<li>End date</li>
<li>Travel costs</li>
<li>Conference/Workshop attendance fees</li>
<li>Subsistence costs</li>
<li>Venue hire</li>
<li>Catering</li>
<li>Travel and subsistence cost for those being paid to attend your organised event</li>
<li>Other costs</li>
<li>How is the event relevant to the work of the Software Sustainability Institute?</li>
<li>Any other information relevant to this application?</li>
<li>Estimate</li>
<li>Submitted</li>
<li>Revised estimate</li>
<li>Approved</li>
</ul>
<p class="text-danger">You will not have access to debug information!</p>"""),
'csv',
ButtonHolder(
Submit('submit', '{{ title }}')
)
)
)
class ExpenseForm(GarlicForm):
class Meta:
model = Expense
fields = [
'fund',
'claim',
'amount_claimed',
'justification_for_extra',
'invoice',
'final',
'advance_booking',
'recipient_fullname',
'recipient_email',
'recipient_affiliation',
'recipient_group',
'recipient_connection',
]
labels = {
'fund': 'Choose approved funding request',
'claim': 'PDF copy of claim and receipt(s)',
'justification_for_extra': "If the claim is greater by 20% than the amount requested please provide justification",
'invoice': "Do you need to claim this expense via an invoice from your institution or company?",
'final': "Is this the final expense claim associated with this funding request?",
'recipient_fullname': "Full name",
'recipient_email': "E-mail",
'recipient_affiliation': "Affiliation",
'recipient_group': "Group",
'recipient_connection': "Reason for submit the recipient claim",
}
widgets = {
'fund': Select(attrs={"class": "select-single-item"}),
}
required_css_class = 'form-field-required'
def __init__(self, *args, **kwargs):
super(ExpenseForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
'fund',
HTML("</p>If your funding request isn't on the drop down menu above please email <a href='mailto:{{ config.FELLOWS_MANAGEMENT_EMAIL }}'>us</a>."),
HTML("</p><a href='{{ terms_and_conditions_url }}'>Fellowship Programme's terms and conditions</a> applies to your request. Please follow the guidelines at <a href='{{ terms_and_conditions_url }}#how-to-apply-for-and-claim-expenses'>How to apply for, and claim, expenses</a> section of <a href='{{ terms_and_conditions_url }}'>Fellowship Programme's terms and conditions.</a></p>"),
'claim',
PrependedText(
'amount_claimed',
'£',
min=0.00,
step=0.01,
onblur="this.value = parseFloat(this.value).toFixed(2);"
),
HTML("{% if fund %}<p class='text-warning'>Note that you only have <strong>£{{ fund.expenses_claimed_left }}</strong> left.</p>{% endif %}"),
'justification_for_extra',
'invoice',
'final',
'advance_booking' if self.is_staff else None,
HTML("<h2>Recipient</h2><p>Only fill this part if you are claiming this expense on behalf of someone.</p>"),
'recipient_fullname',
'recipient_email',
'recipient_affiliation',
'recipient_group',
'recipient_connection',
'not_send_email_field' if self.is_staff else None,
ButtonHolder(
Submit('submit', '{{ title }}')
)
)
)
if "initial" in kwargs and "fund" in kwargs["initial"]:
self.fields['fund'].queryset = Fund.objects.filter(id=kwargs["initial"]["fund"].id)
else:
self.fields['fund'].queryset = Fund.objects.filter(status__in=FUND_STATUS_APPROVED_SET)
class ExpenseShortlistedForm(GarlicForm):
class Meta:
model = Expense
fields = [
'fund',
'claim',
'amount_claimed',
'justification_for_extra',
]
labels = {
'fund': 'Choose approved funding request',
'claim': 'PDF copy of claim and receipt(s)',
'justification_for_extra': "If the claim is greater by 20% than the amount requested please provide justification",
}
widgets = {
'fund': Select(attrs={"class": "select-single-item"}),
}
required_css_class = 'form-field-required'
def __init__(self, *args, **kwargs):
super(ExpenseShortlistedForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
'fund',
HTML("</p>If your funding request isn't on the drop down menu above please email <a href='mailto:{{ config.FELLOWS_MANAGEMENT_EMAIL }}'>us</a>."),
'claim',
PrependedText(
'amount_claimed',
'£',
min=0.00,
step=0.01,
onblur="this.value = parseFloat(this.value).toFixed(2);"
),
'justification_for_extra',
'not_send_email_field' if self.is_staff else None,
ButtonHolder(
Submit('submit', '{{ title }}')
)
)
)
self.fields['fund'].queryset = Fund.objects.filter(status__in=FUND_STATUS_APPROVED_SET)
class ExpenseReviewForm(GarlicForm):
class Meta:
model = Expense
fields = [
'status',
'final',
'asked_for_authorization_date',
'send_to_finance_date',
'amount_authorized_for_payment',
'grant_heading',
'grant',
'notes_from_admin',
]
widgets = {
'asked_for_authorization_date': DateWidget(
usel10n=True,
bootstrap_version=3
),
'send_to_finance_date': DateWidget(
usel10n=True,
bootstrap_version=3
),
}
required_css_class = 'form-field-required'
email = CharField(widget=Textarea, required=False)
def __init__(self, *args, **kwargs):
super(ExpenseReviewForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
'status',
'final',
'asked_for_authorization_date',
'send_to_finance_date',
PrependedText(
'amount_authorized_for_payment',
'£',
min=0.00,
step=0.01,
onblur="this.value = parseFloat(this.value).toFixed(2);"
),
'grant',
'grant_heading',
'notes_from_admin',
'email',
'not_send_email_field' if self.is_staff else None,
'not_copy_email_field' if self.is_staff else None,
ButtonHolder(
Submit('submit', 'Update')
)
)
)
class BlogForm(GarlicForm):
success_reported = CharField(
widget=Textarea,
required=False,
initial="",
label="What outputs were produced and which outcomes were achieved by your participation in the event."
)
class Meta:
model = Blog
fields = [
'fund',
'coauthor',
'draft_url',
'final',
'notes_from_author',
]
labels = {
'fund': 'Open approved funding request',
'coauthor': 'Co-author',
'draft_url': 'URL of blog post draft',
'final': "Is this the final blog post draft associated with this funding request?",
'notes_from_author': "Notes"
}
widgets = {
'fund': Select(attrs={"class": "select-single-item"}),
'coauthor': SelectMultiple(attrs={"class": "select-many-item"}),
}
required_css_class = 'form-field-required'
# workaround for "no such table: lowfat_claimant"
try:
author_choices = [(this_claimant.id, this_claimant) for this_claimant in Claimant.objects.all()]
except: # pylint: disable=bare-except
author_choices = []
author = ChoiceField(
widget=Select(attrs={"class": "select-single-item"}),
required=False,
choices=author_choices,
label='Main author of draft'
)
def __init__(self, *args, user=None, **kwargs):
super(BlogForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
'fund',
'final',
'author' if self.is_staff else None,
'coauthor',
HTML("<p>We prefer to receive links to <a href='https://www.google.co.uk/docs/about/'>Google Docs</a> (tips <a href='/pages/guide/google-docs/'>here</a>), <a href='https://products.office.com/en-gb/office-365-home'>Microsoft Office 365 document</a> or any other online live collaborative document platform you like to use. Posts published somewhere already, e.g. your personal blog, are welcome as well.</p>"),
'draft_url',
'success_reported',
'notes_from_author',
'not_send_email_field' if self.is_staff else None,
ButtonHolder(
Submit('submit', '{{ title }}')
)
)
)
if "initial" in kwargs and "fund" in kwargs["initial"]:
self.fields['fund'].queryset = Fund.objects.filter(id=kwargs["initial"]["fund"].id)
else:
self.fields['fund'].queryset = Fund.objects.filter(status__in=FUND_STATUS_APPROVED_SET)
if user:
self.fields['fund'].queryset = Fund.objects.filter(status__in=FUND_STATUS_APPROVED_SET)
if self.is_staff:
# Force staff to select one author
self.fields['author'].widget.choices.insert(0, ('', '---------'))
self.fields['author'].initial = ''
class BlogReviewForm(GarlicForm):
class Meta:
model = Blog
exclude = [ # pylint: disable=modelform-uses-exclude
"fund",
"author",
"coauthor",
"notes_from_author",
"added",
"updated",
]
required_css_class = 'form-field-required'
email = CharField(widget=Textarea, required=False)
def __init__(self, *args, **kwargs):
super(BlogReviewForm, self).__init__(*args, **kwargs)
self.helper.layout = Layout(
Fieldset(
'',
'draft_url',
'final',
'status',
'reviewer',
'notes_from_admin',
'published_url',
'title',
'tweet_url',
'email',
'not_send_email_field' if self.is_staff else None,
'not_copy_email_field' if self.is_staff else None,
ButtonHolder(
Submit('submit', 'Update')
)
)
)
self.fields['reviewer'].queryset = User.objects.filter(is_staff=True)
|
softwaresaved/fat
|
lowfat/forms.py
|
Python
|
bsd-3-clause
| 40,592
|
[
"VisIt"
] |
6ad8f285381852d21f1966492b1c28ad941e8f81651fb43f3f0ce072bb51ceb2
|
########################################################################
# File: Operation.py
# Date: 2012/07/24 12:12:05
########################################################################
"""
:mod: Operation
.. module: Operation
:synopsis: Operation implementation
Operation implementation
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Disable invalid names warning
# pylint: disable=invalid-name
__RCSID__ = "$Id$"
import datetime
import json
import six
# # from DIRAC
from DIRAC import S_OK, S_ERROR
from DIRAC.RequestManagementSystem.Client.File import File
from DIRAC.RequestManagementSystem.private.JSONUtils import RMSEncoder
########################################################################
class Operation(object):
"""
:param int OperationID: OperationID as read from DB backend
:param int RequestID: parent RequestID
:param str Status: execution status
:param str Type: operation to perform
:param str Arguments: additional arguments
:param str SourceSE: source SE name
:param str TargetSE: target SE names as comma separated list
:param str Catalog: catalog to use as comma separated list
:param str Error: error string if any
:param Request.Request parent: parent Request instance
It is managed by SQLAlchemy, so the RequestID, OperationID should never be set by hand
(except when constructed from JSON of course...)
In principle, the _parent attribute could be totally managed by SQLAlchemy. However, it is
set only when inserted into the DB, this is why I manually set it in the Request _notify
"""
# # max files in a single operation
MAX_FILES = 10000
# # all states
ALL_STATES = ("Queued", "Waiting", "Scheduled", "Assigned", "Failed", "Done", "Canceled")
# # final states
FINAL_STATES = ("Failed", "Done", "Canceled")
# # valid attributes
ATTRIBUTE_NAMES = ['OperationID', 'RequestID', "Type", "Status", "Arguments",
"Order", "SourceSE", "TargetSE", "Catalog", "Error",
"CreationTime", "SubmitTime", "LastUpdate"]
_datetimeFormat = '%Y-%m-%d %H:%M:%S'
def __init__(self, fromDict=None):
""" c'tor
:param self: self reference
:param dict fromDict: attributes dictionary
"""
self._parent = None
now = datetime.datetime.utcnow().replace(microsecond=0)
self._SubmitTime = now
self._LastUpdate = now
self._CreationTime = now
self._Status = "Queued"
self._Order = 0
self.__files__ = []
self.TargetSE = None
self.SourceSE = None
self._Arguments = None
self.Error = None
self.Type = None
self._Catalog = None
if isinstance(fromDict, six.string_types):
fromDict = json.loads(fromDict)
elif not isinstance(fromDict, dict):
fromDict = {}
if "Files" in fromDict:
for fileDict in fromDict.get("Files", []):
self.addFile(File(fileDict))
del fromDict["Files"]
for key, value in fromDict.items():
# The JSON module forces the use of UTF-8, which is not properly
# taken into account in DIRAC.
# One would need to replace all the '== str' with 'in six.string_types'
# This is converting `unicode` to `str` and doesn't make sense in Python 3
if six.PY2 and isinstance(value, six.string_types):
value = value.encode()
if value:
setattr(self, key, value)
# # protected methods for parent only
def _notify(self):
""" notify self about file status change """
fStatus = set(self.fileStatusList())
if fStatus == set(['Failed']):
# All files Failed -> Failed
newStatus = 'Failed'
elif 'Scheduled' in fStatus:
newStatus = 'Scheduled'
elif "Waiting" in fStatus:
newStatus = 'Queued'
elif 'Failed' in fStatus:
newStatus = 'Failed'
else:
self.Error = ''
newStatus = 'Done'
# If the status moved to Failed or Done, update the lastUpdate time
if newStatus in ('Failed', 'Done', 'Scheduled'):
if self._Status != newStatus:
self._LastUpdate = datetime.datetime.utcnow().replace(microsecond=0)
self._Status = newStatus
if self._parent:
self._parent._notify()
def _setQueued(self, caller):
""" don't touch """
if caller == self._parent:
self._Status = "Queued"
def _setWaiting(self, caller):
""" don't touch as well """
if caller == self._parent:
self._Status = "Waiting"
# # Files arithmetics
def __contains__(self, opFile):
""" in operator """
return opFile in self.__files__
def __iadd__(self, opFile):
""" += operator """
if len(self) >= Operation.MAX_FILES:
raise RuntimeError("too many Files in a single Operation")
self.addFile(opFile)
return self
def addFile(self, opFile):
""" add :opFile: to operation
.. warning::
You cannot add a File object that has already been added to another operation. They must be different objects
"""
if len(self) >= Operation.MAX_FILES:
raise RuntimeError("too many Files in a single Operation")
if opFile not in self:
self.__files__.append(opFile)
opFile._parent = self
self._notify()
# # helpers for looping
def __iter__(self):
""" files iterator """
return self.__files__.__iter__()
def __getitem__(self, i):
""" [] op for opFiles """
return self.__files__.__getitem__(i)
def __delitem__(self, i):
""" remove file from op, only if OperationID is NOT set """
self.__files__.__delitem__(i)
self._notify()
def __setitem__(self, i, opFile):
""" overwrite opFile """
self.__files__.__setitem__(i, opFile)
opFile._parent = self
self._notify()
def fileStatusList(self):
""" get list of files statuses """
return [subFile.Status for subFile in self]
def __bool__(self):
""" for comparisons
"""
return True
# For Python 2 compatibility
__nonzero__ = __bool__
def __len__(self):
""" nb of subFiles """
return len(self.__files__)
@property
def sourceSEList(self):
""" helper property returning source SEs as a list"""
return self.SourceSE.split(",") if self.SourceSE else ['']
@property
def targetSEList(self):
""" helper property returning target SEs as a list"""
return self.TargetSE.split(",") if self.TargetSE else ['']
@property
def Arguments(self):
return self._Arguments
@Arguments.setter
def Arguments(self, value):
if isinstance(value, six.text_type):
value = value.encode()
if not isinstance(value, bytes):
raise TypeError("Arguments should be bytes!")
self._Arguments = value
@property
def Catalog(self):
""" catalog prop """
return self._Catalog
@Catalog.setter
def Catalog(self, value):
""" catalog setter """
if not isinstance(value, six.string_types + (list,)):
raise TypeError("wrong type for value")
if isinstance(value, six.string_types):
value = value.split(',')
value = ",".join(list(set([str(item).strip() for item in value if str(item).strip()])))
if len(value) > 255:
raise ValueError("Catalog list too long")
self._Catalog = value if value else ""
@property
def catalogList(self):
""" helper property returning catalogs as list """
return self._Catalog.split(",") if self._Catalog else []
@property
def Status(self):
""" Status prop """
return self._Status
@Status.setter
def Status(self, value):
""" Status setter """
if value not in Operation.ALL_STATES:
raise ValueError("unknown Status '%s'" % str(value))
if self.__files__:
self._notify()
else:
# If the status moved to Failed or Done, update the lastUpdate time
if value in ('Failed', 'Done'):
if self._Status != value:
self._LastUpdate = datetime.datetime.utcnow().replace(microsecond=0)
self._Status = value
if self._parent:
self._parent._notify()
if self._Status == 'Done':
self.Error = ''
@property
def Order(self):
""" order prop """
if self._parent:
self._Order = self._parent.indexOf(self) if self._parent else -1
return self._Order
@Order.setter
def Order(self, value):
""" order prop """
self._Order = value
@property
def CreationTime(self):
""" operation creation time prop """
return self._CreationTime
@CreationTime.setter
def CreationTime(self, value=None):
""" creation time setter """
if not isinstance(value, (datetime.datetime,) + six.string_types):
raise TypeError("CreationTime should be a datetime.datetime!")
if isinstance(value, six.string_types):
value = datetime.datetime.strptime(value.split(".")[0], self._datetimeFormat)
self._CreationTime = value
@property
def SubmitTime(self):
""" subrequest's submit time prop """
return self._SubmitTime
@SubmitTime.setter
def SubmitTime(self, value=None):
""" submit time setter """
if not isinstance(value, (datetime.datetime,) + six.string_types):
raise TypeError("SubmitTime should be a datetime.datetime!")
if isinstance(value, six.string_types):
value = datetime.datetime.strptime(value.split(".")[0], self._datetimeFormat)
self._SubmitTime = value
@property
def LastUpdate(self):
""" last update prop """
return self._LastUpdate
@LastUpdate.setter
def LastUpdate(self, value=None):
""" last update setter """
if not isinstance(value, (datetime.datetime,) + six.string_types):
raise TypeError("LastUpdate should be a datetime.datetime!")
if isinstance(value, six.string_types):
value = datetime.datetime.strptime(value.split(".")[0], self._datetimeFormat)
self._LastUpdate = value
if self._parent:
self._parent.LastUpdate = value
def __str__(self):
""" str operator """
return self.toJSON()['Value']
def toJSON(self):
""" Returns the JSON description string of the Operation """
try:
jsonStr = json.dumps(self, cls=RMSEncoder)
return S_OK(jsonStr)
except Exception as e:
return S_ERROR(str(e))
def _getJSONData(self):
""" Returns the data that have to be serialized by JSON """
jsonData = {}
for attrName in Operation.ATTRIBUTE_NAMES:
# RequestID and OperationID might not be set since they are managed by SQLAlchemy
if not hasattr(self, attrName):
continue
value = getattr(self, attrName)
if isinstance(value, datetime.datetime):
# We convert date time to a string
jsonData[attrName] = value.strftime(self._datetimeFormat) # pylint: disable=no-member
else:
jsonData[attrName] = value
jsonData['Files'] = self.__files__
return jsonData
|
yujikato/DIRAC
|
src/DIRAC/RequestManagementSystem/Client/Operation.py
|
Python
|
gpl-3.0
| 10,783
|
[
"DIRAC"
] |
9f34204433c62d5696027f0b122590aea904425cdccee9157018a00f2ca11e7d
|
from math import sqrt
import numpy as np
from ase.atoms import Atoms
from ase.units import Bohr, Hartree
from ase.dft.stm import STM
from ase.io.cube import write_cube
from ase.io.plt import write_plt
import gpaw.mpi as mpi
from gpaw.mpi import MASTER
from gpaw.io.plt import read_plt
from gpaw.grid_descriptor import GridDescriptor
class SimpleStm:
"""Simple STM object to simulate STM pictures.
The simulation uses either a single pseudo-wavefunction (PWF)
or the PWFs inside the given bias range."""
def __init__(self, atoms):
self.file = None
self.is_wf = False
self.bias = None
self.ldos = None
self.heights = None
if isinstance(atoms, str):
self.read_3D(atoms)
self.calc = None
else:
if isinstance(atoms, Atoms):
self.calc = atoms.get_calculator()
else:
self.calc = atoms
self.calc.converge_wave_functions()
self.gd = self.calc.wfs.gd
self.offset_c = [int(not a) for a in self.gd.pbc_c]
def calculate_ldos(self, bias):
"""bias is the n, k, s list/tuple."""
if self.calc is None:
return
self.bias = bias
self.is_wf = True
self.ldos = self.gd.zeros()
if hasattr(bias, '__len__') and len(bias) == 3:
n, k, s = bias
# only a single wf requested
u = self.calc.get_myu(k, s)
if u is not None:
self.add_wf_to_ldos(n, u, weight=1)
else:
# energy bias
try:
if self.calc.occupations.fixmagmom is True:
efermi_s = self.calc.get_fermi_levels()
else:
efermi_s = np.array([self.calc.get_fermi_level()] * 2)
except:
efermi_s = np.array([self.calc.get_homo_lumo().mean()] * 2)
if isinstance(bias, (int, long, float)):
# bias given
if bias > 0:
# positive bias = negative tip
# -> probe unoccupied states
emin_s = efermi_s
emax_s = efermi_s + bias
occupied = False
else:
# negative bias = positive tip
# -> probe occupied states
emin_s = efermi_s + bias
emax_s = efermi_s
occupied = True
else:
# emin and emax given
emin, emax = bias
if abs(emin) > abs(emax):
occupied = True
else:
occupied = False
emin_s = np.array([emin + efermi] * 2)
emax_s = np.array([emax + efermi] * 2)
emin_s /= Hartree
emax_s /= Hartree
for u in range(len(self.calc.wfs.kpt_u)):
kpt = self.calc.wfs.kpt_u[u]
emin = emin_s[kpt.s]
emax = emax_s[kpt.s]
for n, eps in enumerate(kpt.eps_n):
if eps > emin and eps < emax:
if occupied:
weight = kpt.f_n[n]
else:
weight = kpt.weight - kpt.f_n[n]
self.add_wf_to_ldos(n, u, weight)
def add_wf_to_ldos(self, n, u, weight=None):
"""Add the wf with given kpoint and spin to the ldos"""
kpt = self.calc.wfs.kpt_u[u]
psi = kpt.psit_nG[n]
w = weight
if w is None:
w = kpt.weight
## print "w=", w, kpt.weight
self.ldos += w * (psi * np.conj(psi)).real
def write_3D(self, bias, file, filetype=None):
"""Write the density as a 3D file.
Units: [e/A^3]"""
self.calculate_ldos(bias)
self.calc.wfs.kpt_comm.sum(self.ldos)
ldos = self.gd.collect(self.ldos)
## print "write: integrated =", self.gd.integrate(self.ldos)
if mpi.rank != MASTER:
return
if filetype is None:
# estimate file type from name ending
filetype = file.split('.')[-1]
filetype.lower()
if filetype == 'cube':
write_cube(file, self.calc.get_atoms(), ldos / Bohr**3)
elif filetype == 'plt':
write_plt(file, self.calc.get_atoms(), ldos / Bohr**3)
else:
raise NotImplementedError('unknown file type "' + filetype + '"')
def read_3D(self, file, filetype=None):
"""Read the density from a 3D file"""
if filetype is None:
# estimate file type from name ending
filetype = file.split('.')[-1]
filetype.lower()
if filetype == 'plt':
cell, grid = read_plt(file)[:2]
pbc_c = [True, True, True]
N_c = np.array(grid.shape)
for c in range(3):
if N_c[c] % 2 == 1:
pbc_c[c] = False
N_c[c] += 1
self.gd = GridDescriptor(N_c, cell.diagonal() / Bohr, pbc_c)
self.offset_c = [int(not a) for a in self.gd.pbc_c]
else:
raise NotImplementedError('unknown file type "' + filetype + '"')
self.file = file
self.ldos = np.array(grid * Bohr**3, np.float)
## print "read: integrated =", self.gd.integrate(self.ldos)
def current_to_density(self, current):
"""The connection between density n and current I
n [e/Angstrom^3] = 0.0002 sqrt(I [nA])
as given in Hofer et al., RevModPhys 75 (2003) 1287
"""
return 0.0002 * sqrt(current)
def density_to_current(self, density):
return 5000. * density**2
def scan_const_current(self, current, bias=None,
interpolate=False, hmax=None):
"""Get the height image for constant current I [nA].
hmax is the maximal height to consider
"""
return self.scan_const_density(self.current_to_density(current),
bias, interpolate, hmax)
def scan_const_density(self, density, bias, interpolate=False, hmax=None):
"""Get the height image for constant density [e/Angstrom^3].
"""
self.calculate_ldos(bias)
self.density = density
gd = self.gd
h_c = [np.linalg.norm(gd.h_cv[c]) for c in range(3)]
nx, ny = (gd.N_c - self.offset_c)[:2]
# each cpu will have the full array, but works on its
# own part only
heights = np.zeros((nx, ny)) - 1
if hmax is None:
hmax = h_c[2] * self.ldos.shape[2] + h_c[2] / 2.
else:
hmax /= Bohr
ihmax = min(gd.end_c[2]-1, int(hmax / h_c[2]))
for i in range(gd.beg_c[0], gd.end_c[0]):
ii = i - gd.beg_c[0]
for j in range(gd.beg_c[1], gd.end_c[1]):
jj = j - gd.beg_c[1]
zline = self.ldos[ii, jj]
# check from above until you find the required density
for k in range(ihmax, gd.beg_c[2]-1, -1):
kk = k - gd.beg_c[2]
if zline[kk] > density:
heights[i - self.offset_c[0],
j - self.offset_c[1]] = k
break
# collect the results
gd.comm.max(heights)
if interpolate:
# collect the full grid to enable interpolation
fullgrid = gd.collect(self.ldos)
kmax = self.ldos.shape[2] - 1
for i in range(gd.beg_c[0], gd.end_c[0]):
ii = i - gd.beg_c[0]
i -= self.offset_c[0]
for j in range(gd.beg_c[1], gd.end_c[1]):
jj = j - gd.beg_c[1]
j -= self.offset_c[1]
if heights[i, j] > 0:
if heights[i, j] < kmax:
c1 = fullgrid[i, j, int(heights[i, j])]
c2 = fullgrid[i, j, int(heights[i, j])+1]
k = heights[i, j] + (density - c1) / (c2 - c1)
else:
k = kmax
self.heights = np.where(heights > 0,
(heights + self.offset_c[2]) * h_c[2], -1)
return heights
def write(self, file=None):
"""Write STM data to a file in gnuplot readable tyle."""
if mpi.rank != MASTER:
return
xvals, yvals, heights = self.pylab_contour()
nx, ny = heights.shape[:2]
if file is None:
n, k, s = bias
fname = 'stm_n%dk%ds%d.dat' % (n, k, s)
else:
fname = file
f = open(fname, 'w')
try:
import datetime
print >> f, '#', datetime.datetime.now().ctime()
except:
pass
print >> f, '# Simulated STM picture'
if hasattr(self, 'file'):
print >> f, '# density read from', self.file
else:
if self.is_wf:
print >> f, '# pseudo-wf n=%d k=%d s=%d' % tuple(self.bias)
else:
print >> f, '# bias=', self.bias, '[eV]'
print >> f, '#'
print >> f, '# density=', self.density, '[e/Angstrom^3]',
print >> f, '(current=', self.density_to_current(self.density), '[nA])'
print >> f, '# x[Angs.] y[Angs.] h[Angs.] (-1 is not found)'
for i in range(nx):
for j in range(ny):
if heights[i, j] == -1:
height = -1
else:
height = heights[i, j] * Bohr
print >> f, '%10g %10g %12g' % (yvals[j], xvals[i], height)
print >> f
f.close()
def pylab_contour(self):
"""Return the countour to be plotted using pylab."""
nx, ny = self.heights.shape[:2]
h_c = np.array([np.linalg.norm(self.gd.h_cv[c])
for c in range(3)]) * Bohr
# the lowest point is not stored for non-periodic BCs
xvals = [(i + self.offset_c[0]) * h_c[0] for i in range(nx)]
yvals = [(i + self.offset_c[1]) * h_c[1] for i in range(ny)]
heights = self.heights * Bohr
# pylab interprets heights[y_i][x_i]
return np.array(xvals), np.array(yvals), heights.swapaxes(0,1)
|
robwarm/gpaw-symm
|
gpaw/analyse/simple_stm.py
|
Python
|
gpl-3.0
| 10,576
|
[
"ASE",
"GPAW"
] |
7f1d5458fcead3c00ad153ee90efc235c82e9acec66756c4727eea5cbbca79d6
|
# -*- coding: utf-8 -*-
"""
Acceptance tests for Video.
"""
from nose.plugins.attrib import attr
from unittest import skipIf, skip
from ..helpers import UniqueCourseTest, is_youtube_available, YouTubeStubConfig
from ...pages.lms.video.video import VideoPage
from ...pages.lms.tab_nav import TabNavPage
from ...pages.lms.course_nav import CourseNavPage
from ...pages.lms.auto_auth import AutoAuthPage
from ...pages.lms.course_info import CourseInfoPage
from ...fixtures.course import CourseFixture, XBlockFixtureDesc
from ..helpers import skip_if_browser
from flaky import flaky
VIDEO_SOURCE_PORT = 8777
HTML5_SOURCES = [
'http://localhost:{0}/gizmo.mp4'.format(VIDEO_SOURCE_PORT),
'http://localhost:{0}/gizmo.webm'.format(VIDEO_SOURCE_PORT),
'http://localhost:{0}/gizmo.ogv'.format(VIDEO_SOURCE_PORT),
]
HTML5_SOURCES_INCORRECT = [
'http://localhost:{0}/gizmo.mp99'.format(VIDEO_SOURCE_PORT),
]
@attr('shard_4')
@skipIf(is_youtube_available() is False, 'YouTube is not available!')
class VideoBaseTest(UniqueCourseTest):
"""
Base class for tests of the Video Player
Sets up the course and provides helper functions for the Video tests.
"""
def setUp(self):
"""
Initialization of pages and course fixture for video tests
"""
super(VideoBaseTest, self).setUp()
self.video = VideoPage(self.browser)
self.tab_nav = TabNavPage(self.browser)
self.course_nav = CourseNavPage(self.browser)
self.course_info_page = CourseInfoPage(self.browser, self.course_id)
self.auth_page = AutoAuthPage(self.browser, course_id=self.course_id)
self.course_fixture = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name']
)
self.metadata = None
self.assets = []
self.verticals = None
self.youtube_configuration = {}
self.user_info = {}
# reset youtube stub server
self.addCleanup(YouTubeStubConfig.reset)
def navigate_to_video(self):
""" Prepare the course and get to the video and render it """
self._install_course_fixture()
self._navigate_to_courseware_video_and_render()
def navigate_to_video_no_render(self):
"""
Prepare the course and get to the video unit
however do not wait for it to render, because
the has been an error.
"""
self._install_course_fixture()
self._navigate_to_courseware_video_no_render()
def _install_course_fixture(self):
""" Install the course fixture that has been defined """
if self.assets:
self.course_fixture.add_asset(self.assets)
chapter_sequential = XBlockFixtureDesc('sequential', 'Test Section')
chapter_sequential.add_children(*self._add_course_verticals())
chapter = XBlockFixtureDesc('chapter', 'Test Chapter').add_children(chapter_sequential)
self.course_fixture.add_children(chapter)
self.course_fixture.install()
if len(self.youtube_configuration) > 0:
YouTubeStubConfig.configure(self.youtube_configuration)
def _add_course_verticals(self):
"""
Create XBlockFixtureDesc verticals
:return: a list of XBlockFixtureDesc
"""
xblock_verticals = []
_verticals = self.verticals
# Video tests require at least one vertical with a single video.
if not _verticals:
_verticals = [[{'display_name': 'Video', 'metadata': self.metadata}]]
for vertical_index, vertical in enumerate(_verticals):
xblock_verticals.append(self._create_single_vertical(vertical, vertical_index))
return xblock_verticals
def _create_single_vertical(self, vertical, vertical_index):
"""
Create a single course vertical of type XBlockFixtureDesc with category `vertical`.
A single course vertical can contain single or multiple video modules.
:param vertical: vertical data list
:param vertical_index: index for the vertical display name
:return: XBlockFixtureDesc
"""
xblock_course_vertical = XBlockFixtureDesc('vertical', 'Test Vertical-{0}'.format(vertical_index))
for video in vertical:
xblock_course_vertical.add_children(
XBlockFixtureDesc('video', video['display_name'], metadata=video.get('metadata')))
return xblock_course_vertical
def _navigate_to_courseware_video(self):
""" Register for the course and navigate to the video unit """
self.auth_page.visit()
self.user_info = self.auth_page.user_info
self.course_info_page.visit()
self.tab_nav.go_to_tab('Courseware')
def _navigate_to_courseware_video_and_render(self):
""" Wait for the video player to render """
self._navigate_to_courseware_video()
self.video.wait_for_video_player_render()
def _navigate_to_courseware_video_no_render(self):
""" Wait for the video Xmodule but not for rendering """
self._navigate_to_courseware_video()
self.video.wait_for_video_class()
def metadata_for_mode(self, player_mode, additional_data=None):
"""
Create a dictionary for video player configuration according to `player_mode`
:param player_mode (str): Video player mode
:param additional_data (dict): Optional additional metadata.
:return: dict
"""
metadata = {}
if player_mode == 'html5':
metadata.update({
'youtube_id_1_0': '',
'youtube_id_0_75': '',
'youtube_id_1_25': '',
'youtube_id_1_5': '',
'html5_sources': HTML5_SOURCES
})
if player_mode == 'youtube_html5':
metadata.update({
'html5_sources': HTML5_SOURCES,
})
if player_mode == 'youtube_html5_unsupported_video':
metadata.update({
'html5_sources': HTML5_SOURCES_INCORRECT
})
if player_mode == 'html5_unsupported_video':
metadata.update({
'youtube_id_1_0': '',
'youtube_id_0_75': '',
'youtube_id_1_25': '',
'youtube_id_1_5': '',
'html5_sources': HTML5_SOURCES_INCORRECT
})
if additional_data:
metadata.update(additional_data)
return metadata
def go_to_sequential_position(self, position):
"""
Navigate to sequential specified by `video_display_name`
"""
self.course_nav.go_to_sequential_position(position)
self.video.wait_for_video_player_render()
class YouTubeVideoTest(VideoBaseTest):
""" Test YouTube Video Player """
def setUp(self):
super(YouTubeVideoTest, self).setUp()
def test_youtube_video_rendering_wo_html5_sources(self):
"""
Scenario: Video component is rendered in the LMS in Youtube mode without HTML5 sources
Given the course has a Video component in "Youtube" mode
Then the video has rendered in "Youtube" mode
"""
self.navigate_to_video()
# Verify that video has rendered in "Youtube" mode
self.assertTrue(self.video.is_video_rendered('youtube'))
def test_cc_button_wo_english_transcript(self):
"""
Scenario: CC button works correctly w/o english transcript in Youtube mode
Given the course has a Video component in "Youtube" mode
And I have defined a non-english transcript for the video
And I have uploaded a non-english transcript file to assets
Then I see the correct text in the captions
"""
data = {'transcripts': {'zh': 'chinese_transcripts.srt'}}
self.metadata = self.metadata_for_mode('youtube', data)
self.assets.append('chinese_transcripts.srt')
self.navigate_to_video()
self.video.show_captions()
# Verify that we see "好 各位同学" text in the captions
unicode_text = "好 各位同学".decode('utf-8')
self.assertIn(unicode_text, self.video.captions_text)
def test_cc_button_transcripts_and_sub_fields_empty(self):
"""
Scenario: CC button works correctly if transcripts and sub fields are empty,
but transcript file exists in assets (Youtube mode of Video component)
Given the course has a Video component in "Youtube" mode
And I have uploaded a .srt.sjson file to assets
Then I see the correct english text in the captions
"""
self._install_course_fixture()
self.course_fixture.add_asset(['subs_3_yD_cEKoCk.srt.sjson'])
self.course_fixture._upload_assets()
self._navigate_to_courseware_video_and_render()
self.video.show_captions()
# Verify that we see "Welcome to edX." text in the captions
self.assertIn('Welcome to edX.', self.video.captions_text)
def test_cc_button_hidden_no_translations(self):
"""
Scenario: CC button is hidden if no translations
Given the course has a Video component in "Youtube" mode
Then the "CC" button is hidden
"""
self.navigate_to_video()
self.assertFalse(self.video.is_button_shown('CC'))
def test_fullscreen_video_alignment_with_transcript_hidden(self):
"""
Scenario: Video is aligned with transcript hidden in fullscreen mode
Given the course has a Video component in "Youtube" mode
When I view the video at fullscreen
Then the video with the transcript hidden is aligned correctly
"""
self.navigate_to_video()
# click video button "fullscreen"
self.video.click_player_button('fullscreen')
# check if video aligned correctly without enabled transcript
self.assertTrue(self.video.is_aligned(False))
def test_download_button_wo_english_transcript(self):
"""
Scenario: Download button works correctly w/o english transcript in YouTube mode
Given the course has a Video component in "Youtube" mode
And I have defined a downloadable non-english transcript for the video
And I have uploaded a non-english transcript file to assets
Then I can download the transcript in "srt" format
"""
data = {'download_track': True, 'transcripts': {'zh': 'chinese_transcripts.srt'}}
self.metadata = self.metadata_for_mode('youtube', additional_data=data)
self.assets.append('chinese_transcripts.srt')
# go to video
self.navigate_to_video()
# check if we can download transcript in "srt" format that has text "好 各位同学"
unicode_text = "好 各位同学".decode('utf-8')
self.assertTrue(self.video.downloaded_transcript_contains_text('srt', unicode_text))
def test_download_button_two_transcript_languages(self):
"""
Scenario: Download button works correctly for multiple transcript languages
Given the course has a Video component in "Youtube" mode
And I have defined a downloadable non-english transcript for the video
And I have defined english subtitles for the video
Then I see the correct english text in the captions
And the english transcript downloads correctly
And I see the correct non-english text in the captions
And the non-english transcript downloads correctly
"""
self.assets.extend(['chinese_transcripts.srt', 'subs_3_yD_cEKoCk.srt.sjson'])
data = {'download_track': True, 'transcripts': {'zh': 'chinese_transcripts.srt'}, 'sub': '3_yD_cEKoCk'}
self.metadata = self.metadata_for_mode('youtube', additional_data=data)
# go to video
self.navigate_to_video()
# check if "Welcome to edX." text in the captions
self.assertIn('Welcome to edX.', self.video.captions_text)
# check if we can download transcript in "srt" format that has text "Welcome to edX."
self.assertTrue(self.video.downloaded_transcript_contains_text('srt', 'Welcome to edX.'))
# select language with code "zh"
self.assertTrue(self.video.select_language('zh'))
# check if we see "好 各位同学" text in the captions
unicode_text = "好 各位同学".decode('utf-8')
self.assertIn(unicode_text, self.video.captions_text)
# check if we can download transcript in "srt" format that has text "好 各位同学"
unicode_text = "好 各位同学".decode('utf-8')
self.assertTrue(self.video.downloaded_transcript_contains_text('srt', unicode_text))
def test_fullscreen_video_alignment_on_transcript_toggle(self):
"""
Scenario: Video is aligned correctly on transcript toggle in fullscreen mode
Given the course has a Video component in "Youtube" mode
And I have uploaded a .srt.sjson file to assets
And I have defined subtitles for the video
When I view the video at fullscreen
Then the video with the transcript enabled is aligned correctly
And the video with the transcript hidden is aligned correctly
"""
self.assets.append('subs_3_yD_cEKoCk.srt.sjson')
data = {'sub': '3_yD_cEKoCk'}
self.metadata = self.metadata_for_mode('youtube', additional_data=data)
# go to video
self.navigate_to_video()
# make sure captions are opened
self.video.show_captions()
# click video button "fullscreen"
self.video.click_player_button('fullscreen')
# check if video aligned correctly with enabled transcript
self.assertTrue(self.video.is_aligned(True))
# click video button "CC"
self.video.click_player_button('CC')
# check if video aligned correctly without enabled transcript
self.assertTrue(self.video.is_aligned(False))
def test_video_rendering_with_default_response_time(self):
"""
Scenario: Video is rendered in Youtube mode when the YouTube Server responds quickly
Given the YouTube server response time less than 1.5 seconds
And the course has a Video component in "Youtube_HTML5" mode
Then the video has rendered in "Youtube" mode
"""
# configure youtube server
self.youtube_configuration['time_to_response'] = 0.4
self.metadata = self.metadata_for_mode('youtube_html5')
self.navigate_to_video()
self.assertTrue(self.video.is_video_rendered('youtube'))
def test_video_rendering_wo_default_response_time(self):
"""
Scenario: Video is rendered in HTML5 when the YouTube Server responds slowly
Given the YouTube server response time is greater than 1.5 seconds
And the course has a Video component in "Youtube_HTML5" mode
Then the video has rendered in "HTML5" mode
"""
# configure youtube server
self.youtube_configuration['time_to_response'] = 2.0
self.metadata = self.metadata_for_mode('youtube_html5')
self.navigate_to_video()
self.assertTrue(self.video.is_video_rendered('html5'))
def test_video_with_youtube_blocked(self):
"""
Scenario: Video is rendered in HTML5 mode when the YouTube API is blocked
Given the YouTube server response time is greater than 1.5 seconds
And the YouTube API is blocked
And the course has a Video component in "Youtube_HTML5" mode
Then the video has rendered in "HTML5" mode
"""
# configure youtube server
self.youtube_configuration.update({
'time_to_response': 2.0,
'youtube_api_blocked': True,
})
self.metadata = self.metadata_for_mode('youtube_html5')
self.navigate_to_video()
self.assertTrue(self.video.is_video_rendered('html5'))
def test_download_transcript_button_works_correctly(self):
"""
Scenario: Download Transcript button works correctly
Given the course has Video components A and B in "Youtube" mode
And Video component C in "HTML5" mode
And I have defined downloadable transcripts for the videos
Then I can download a transcript for Video A in "srt" format
And I can download a transcript for Video A in "txt" format
And I can download a transcript for Video B in "txt" format
And the Download Transcript menu does not exist for Video C
"""
data_a = {'sub': '3_yD_cEKoCk', 'download_track': True}
youtube_a_metadata = self.metadata_for_mode('youtube', additional_data=data_a)
self.assets.append('subs_3_yD_cEKoCk.srt.sjson')
data_b = {'youtube_id_1_0': 'b7xgknqkQk8', 'sub': 'b7xgknqkQk8', 'download_track': True}
youtube_b_metadata = self.metadata_for_mode('youtube', additional_data=data_b)
self.assets.append('subs_b7xgknqkQk8.srt.sjson')
data_c = {'track': 'http://example.org/', 'download_track': True}
html5_c_metadata = self.metadata_for_mode('html5', additional_data=data_c)
self.verticals = [
[{'display_name': 'A', 'metadata': youtube_a_metadata}],
[{'display_name': 'B', 'metadata': youtube_b_metadata}],
[{'display_name': 'C', 'metadata': html5_c_metadata}]
]
# open the section with videos (open video "A")
self.navigate_to_video()
# check if we can download transcript in "srt" format that has text "00:00:00,260"
self.assertTrue(self.video.downloaded_transcript_contains_text('srt', '00:00:00,260'))
# select the transcript format "txt"
self.assertTrue(self.video.select_transcript_format('txt'))
# check if we can download transcript in "txt" format that has text "Welcome to edX."
self.assertTrue(self.video.downloaded_transcript_contains_text('txt', 'Welcome to edX.'))
# open video "B"
self.course_nav.go_to_sequential('B')
# check if we can download transcript in "txt" format that has text "Equal transcripts"
self.assertTrue(self.video.downloaded_transcript_contains_text('txt', 'Equal transcripts'))
# open video "C"
self.course_nav.go_to_sequential('C')
# menu "download_transcript" doesn't exist
self.assertFalse(self.video.is_menu_present('download_transcript'))
def _verify_caption_text(self, text):
self.video._wait_for(
lambda: (text in self.video.captions_text),
u'Captions contain "{}" text'.format(text),
timeout=5
)
def test_video_language_menu_working(self):
"""
Scenario: Language menu works correctly in Video component
Given the course has a Video component in "Youtube" mode
And I have defined multiple language transcripts for the videos
And I make sure captions are closed
And I see video menu "language" with correct items
And I select language with code "zh"
Then I see "好 各位同学" text in the captions
And I select language with code "en"
Then I see "Welcome to edX." text in the captions
"""
self.assets.extend(['chinese_transcripts.srt', 'subs_3_yD_cEKoCk.srt.sjson'])
data = {'transcripts': {"zh": "chinese_transcripts.srt"}, 'sub': '3_yD_cEKoCk'}
self.metadata = self.metadata_for_mode('youtube', additional_data=data)
# go to video
self.navigate_to_video()
self.video.hide_captions()
correct_languages = {'en': 'English', 'zh': 'Chinese'}
self.assertEqual(self.video.caption_languages, correct_languages)
self.video.select_language('zh')
unicode_text = "好 各位同学".decode('utf-8')
self._verify_caption_text(unicode_text)
self.video.select_language('en')
self._verify_caption_text('Welcome to edX.')
def test_multiple_videos_in_sequentials_load_and_work(self):
"""
Scenario: Multiple videos in sequentials all load and work, switching between sequentials
Given it has videos "A,B" in "Youtube" mode in position "1" of sequential
And videos "E,F" in "Youtube" mode in position "2" of sequential
"""
self.verticals = [
[{'display_name': 'A'}, {'display_name': 'B'}], [{'display_name': 'C'}, {'display_name': 'D'}]
]
tab1_video_names = ['A', 'B']
tab2_video_names = ['C', 'D']
def execute_video_steps(video_names):
"""
Execute video steps
"""
for video_name in video_names:
self.video.use_video(video_name)
self.video.click_player_button('play')
self.assertIn(self.video.state, ['playing', 'buffering'])
self.video.click_player_button('pause')
# go to video
self.navigate_to_video()
execute_video_steps(tab1_video_names)
# go to second sequential position
self.go_to_sequential_position(2)
execute_video_steps(tab2_video_names)
# go back to first sequential position
# we are again playing tab 1 videos to ensure that switching didn't broke some video functionality.
self.go_to_sequential_position(1)
execute_video_steps(tab1_video_names)
def test_video_component_stores_speed_correctly_for_multiple_videos(self):
"""
Scenario: Video component stores speed correctly when each video is in separate sequential
Given I have a video "A" in "Youtube" mode in position "1" of sequential
And a video "B" in "Youtube" mode in position "2" of sequential
And a video "C" in "HTML5" mode in position "3" of sequential
"""
self.verticals = [
[{'display_name': 'A'}], [{'display_name': 'B'}],
[{'display_name': 'C', 'metadata': self.metadata_for_mode('html5')}]
]
self.navigate_to_video()
# select the "2.0" speed on video "A"
self.course_nav.go_to_sequential('A')
self.video.speed = '2.0'
# select the "0.50" speed on video "B"
self.course_nav.go_to_sequential('B')
self.video.speed = '0.50'
# open video "C"
self.course_nav.go_to_sequential('C')
# check if video "C" should start playing at speed "0.75"
self.assertEqual(self.video.speed, '0.75x')
# open video "A"
self.course_nav.go_to_sequential('A')
# check if video "A" should start playing at speed "2.0"
self.assertEqual(self.video.speed, '2.0x')
# reload the page
self.video.reload_page()
# open video "A"
self.course_nav.go_to_sequential('A')
# check if video "A" should start playing at speed "2.0"
self.assertEqual(self.video.speed, '2.0x')
# select the "1.0" speed on video "A"
self.video.speed = '1.0'
# open video "B"
self.course_nav.go_to_sequential('B')
# check if video "B" should start playing at speed "0.50"
self.assertEqual(self.video.speed, '0.50x')
# open video "C"
self.course_nav.go_to_sequential('C')
# check if video "C" should start playing at speed "1.0"
self.assertEqual(self.video.speed, '1.0x')
def test_video_has_correct_transcript(self):
"""
Scenario: Youtube video has correct transcript if fields for other speeds are filled
Given it has a video in "Youtube" mode
And I have uploaded multiple transcripts
And I make sure captions are opened
Then I see "Welcome to edX." text in the captions
And I select the "1.50" speed
And I reload the page with video
Then I see "Welcome to edX." text in the captions
And I see duration "1:56"
"""
self.assets.extend(['subs_3_yD_cEKoCk.srt.sjson', 'subs_b7xgknqkQk8.srt.sjson'])
data = {'sub': '3_yD_cEKoCk', 'youtube_id_1_5': 'b7xgknqkQk8'}
self.metadata = self.metadata_for_mode('youtube', additional_data=data)
# go to video
self.navigate_to_video()
self.video.show_captions()
self.assertIn('Welcome to edX.', self.video.captions_text)
self.video.speed = '1.50'
self.video.reload_page()
self.assertIn('Welcome to edX.', self.video.captions_text)
self.assertTrue(self.video.duration, '1.56')
def test_video_position_stored_correctly_wo_seek(self):
"""
Scenario: Video component stores position correctly when page is reloaded
Given the course has a Video component in "Youtube" mode
Then the video has rendered in "Youtube" mode
And I click video button "play""
Then I wait until video reaches at position "0.05"
And I click video button "pause"
And I reload the page with video
And I click video button "play""
And I click video button "pause"
Then video slider should be Equal or Greater than "0:05"
"""
self.navigate_to_video()
self.video.click_player_button('play')
self.video.wait_for_position('0:05')
self.video.click_player_button('pause')
self.video.reload_page()
self.video.click_player_button('play')
self.video.click_player_button('pause')
self.assertGreaterEqual(self.video.seconds, 5)
@skip("Intermittently fails 03 June 2014")
def test_video_position_stored_correctly_with_seek(self):
"""
Scenario: Video component stores position correctly when page is reloaded
Given the course has a Video component in "Youtube" mode
Then the video has rendered in "Youtube" mode
And I click video button "play""
And I click video button "pause"
Then I seek video to "0:10" position
And I click video button "play""
And I click video button "pause"
And I reload the page with video
Then video slider should be Equal or Greater than "0:10"
"""
self.navigate_to_video()
self.video.click_player_button('play')
self.video.seek('0:10')
self.video.click_player_button('pause')
self.video.reload_page()
self.video.click_player_button('play')
self.video.click_player_button('pause')
self.assertGreaterEqual(self.video.seconds, 10)
def test_simplified_and_traditional_chinese_transcripts(self):
"""
Scenario: Simplified and Traditional Chinese transcripts work as expected in Youtube mode
Given the course has a Video component in "Youtube" mode
And I have defined a Simplified Chinese transcript for the video
And I have defined a Traditional Chinese transcript for the video
Then I see the correct subtitle language options in cc menu
Then I see the correct text in the captions for Simplified and Traditional Chinese transcripts
And I can download the transcripts for Simplified and Traditional Chinese
And video subtitle menu has 'zh_HANS', 'zh_HANT' translations for 'Simplified Chinese'
and 'Traditional Chinese' respectively
"""
data = {
'download_track': True,
'transcripts': {'zh_HANS': 'simplified_chinese.srt', 'zh_HANT': 'traditional_chinese.srt'}
}
self.metadata = self.metadata_for_mode('youtube', data)
self.assets.extend(['simplified_chinese.srt', 'traditional_chinese.srt'])
self.navigate_to_video()
langs = {'zh_HANS': '在线学习是革', 'zh_HANT': '在線學習是革'}
for lang_code, text in langs.items():
self.assertTrue(self.video.select_language(lang_code))
unicode_text = text.decode('utf-8')
self.assertIn(unicode_text, self.video.captions_text)
self.assertTrue(self.video.downloaded_transcript_contains_text('srt', unicode_text))
self.assertEqual(self.video.caption_languages, {'zh_HANS': 'Simplified Chinese', 'zh_HANT': 'Traditional Chinese'})
def test_video_bumper_render(self):
"""
Scenario: Multiple videos with bumper in sequentials all load and work, switching between sequentials
Given it has videos "A,B" in "Youtube" and "HTML5" modes in position "1" of sequential
And video "C" in "Youtube" mode in position "2" of sequential
When I open sequential position "1"
Then I see video "B" has a poster
When I click on it
Then I see video bumper is playing
When I skip the bumper
Then I see the main video
When I click on video "A"
Then the main video starts playing
When I open sequential position "2"
And click on the poster
Then the main video starts playing
Then I see that the main video starts playing once I go back to position "2" of sequential
When I reload the page
Then I see that the main video starts playing when I click on the poster
"""
additional_data = {
u'video_bumper': {
u'value': {
"transcripts": {},
"video_id": "video_001"
}
}
}
self.verticals = [
[{'display_name': 'A'}, {'display_name': 'B', 'metadata': self.metadata_for_mode('html5')}],
[{'display_name': 'C'}]
]
tab1_video_names = ['A', 'B']
tab2_video_names = ['C']
def execute_video_steps(video_names):
"""
Execute video steps
"""
for video_name in video_names:
self.video.use_video(video_name)
self.assertTrue(self.video.is_poster_shown)
self.video.click_on_poster()
self.video.wait_for_video_player_render(autoplay=True)
self.assertIn(self.video.state, ['playing', 'buffering', 'finished'])
self.course_fixture.add_advanced_settings(additional_data)
self.navigate_to_video_no_render()
self.video.use_video('B')
self.assertTrue(self.video.is_poster_shown)
self.video.click_on_poster()
self.video.wait_for_video_bumper_render()
self.assertIn(self.video.state, ['playing', 'buffering', 'finished'])
self.video.click_player_button('skip_bumper')
# no autoplay here, maybe video is too small, so pause is not switched
self.video.wait_for_video_player_render()
self.assertIn(self.video.state, ['playing', 'buffering', 'finished'])
self.video.use_video('A')
execute_video_steps(['A'])
# go to second sequential position
self.course_nav.go_to_sequential_position(2)
execute_video_steps(tab2_video_names)
# go back to first sequential position
# we are again playing tab 1 videos to ensure that switching didn't broke some video functionality.
self.course_nav.go_to_sequential_position(1)
execute_video_steps(tab1_video_names)
self.video.browser.refresh()
execute_video_steps(tab1_video_names)
class YouTubeHtml5VideoTest(VideoBaseTest):
""" Test YouTube HTML5 Video Player """
def setUp(self):
super(YouTubeHtml5VideoTest, self).setUp()
@flaky # TODO fix this, see TNL-1642
def test_youtube_video_rendering_with_unsupported_sources(self):
"""
Scenario: Video component is rendered in the LMS in Youtube mode
with HTML5 sources that doesn't supported by browser
Given the course has a Video component in "Youtube_HTML5_Unsupported_Video" mode
Then the video has rendered in "Youtube" mode
"""
self.metadata = self.metadata_for_mode('youtube_html5_unsupported_video')
self.navigate_to_video()
# Verify that the video has rendered in "Youtube" mode
self.assertTrue(self.video.is_video_rendered('youtube'))
class Html5VideoTest(VideoBaseTest):
""" Test HTML5 Video Player """
def setUp(self):
super(Html5VideoTest, self).setUp()
def test_autoplay_disabled_for_video_component(self):
"""
Scenario: Autoplay is disabled by default for a Video component
Given the course has a Video component in "HTML5" mode
When I view the Video component
Then it does not have autoplay enabled
"""
self.metadata = self.metadata_for_mode('html5')
self.navigate_to_video()
# Verify that the video has autoplay mode disabled
self.assertFalse(self.video.is_autoplay_enabled)
def test_html5_video_rendering_with_unsupported_sources(self):
"""
Scenario: LMS displays an error message for HTML5 sources that are not supported by browser
Given the course has a Video component in "HTML5_Unsupported_Video" mode
When I view the Video component
Then and error message is shown
And the error message has the correct text
"""
self.metadata = self.metadata_for_mode('html5_unsupported_video')
self.navigate_to_video_no_render()
# Verify that error message is shown
self.assertTrue(self.video.is_error_message_shown)
# Verify that error message has correct text
correct_error_message_text = 'No playable video sources found.'
self.assertIn(correct_error_message_text, self.video.error_message_text)
# Verify that spinner is not shown
self.assertFalse(self.video.is_spinner_shown)
def test_download_button_wo_english_transcript(self):
"""
Scenario: Download button works correctly w/o english transcript in HTML5 mode
Given the course has a Video component in "HTML5" mode
And I have defined a downloadable non-english transcript for the video
And I have uploaded a non-english transcript file to assets
Then I see the correct non-english text in the captions
And the non-english transcript downloads correctly
"""
data = {'download_track': True, 'transcripts': {'zh': 'chinese_transcripts.srt'}}
self.metadata = self.metadata_for_mode('html5', additional_data=data)
self.assets.append('chinese_transcripts.srt')
# go to video
self.navigate_to_video()
# check if we see "好 各位同学" text in the captions
unicode_text = "好 各位同学".decode('utf-8')
self.assertIn(unicode_text, self.video.captions_text)
# check if we can download transcript in "srt" format that has text "好 各位同学"
unicode_text = "好 各位同学".decode('utf-8')
self.assertTrue(self.video.downloaded_transcript_contains_text('srt', unicode_text))
def test_download_button_two_transcript_languages(self):
"""
Scenario: Download button works correctly for multiple transcript languages in HTML5 mode
Given the course has a Video component in "HTML5" mode
And I have defined a downloadable non-english transcript for the video
And I have defined english subtitles for the video
Then I see the correct english text in the captions
And the english transcript downloads correctly
And I see the correct non-english text in the captions
And the non-english transcript downloads correctly
"""
self.assets.extend(['chinese_transcripts.srt', 'subs_3_yD_cEKoCk.srt.sjson'])
data = {'download_track': True, 'transcripts': {'zh': 'chinese_transcripts.srt'}, 'sub': '3_yD_cEKoCk'}
self.metadata = self.metadata_for_mode('html5', additional_data=data)
# go to video
self.navigate_to_video()
# check if "Welcome to edX." text in the captions
self.assertIn('Welcome to edX.', self.video.captions_text)
# check if we can download transcript in "srt" format that has text "Welcome to edX."
self.assertTrue(self.video.downloaded_transcript_contains_text('srt', 'Welcome to edX.'))
# select language with code "zh"
self.assertTrue(self.video.select_language('zh'))
# check if we see "好 各位同学" text in the captions
unicode_text = "好 各位同学".decode('utf-8')
self.assertIn(unicode_text, self.video.captions_text)
# Then I can download transcript in "srt" format that has text "好 各位同学"
unicode_text = "好 各位同学".decode('utf-8')
self.assertTrue(self.video.downloaded_transcript_contains_text('srt', unicode_text))
def test_full_screen_video_alignment_with_transcript_visible(self):
"""
Scenario: Video is aligned correctly with transcript enabled in fullscreen mode
Given the course has a Video component in "HTML5" mode
And I have uploaded a .srt.sjson file to assets
And I have defined subtitles for the video
When I show the captions
And I view the video at fullscreen
Then the video with the transcript enabled is aligned correctly
"""
self.assets.append('subs_3_yD_cEKoCk.srt.sjson')
data = {'sub': '3_yD_cEKoCk'}
self.metadata = self.metadata_for_mode('html5', additional_data=data)
# go to video
self.navigate_to_video()
# make sure captions are opened
self.video.show_captions()
# click video button "fullscreen"
self.video.click_player_button('fullscreen')
# check if video aligned correctly with enabled transcript
self.assertTrue(self.video.is_aligned(True))
def test_cc_button_with_english_transcript(self):
"""
Scenario: CC button works correctly with only english transcript in HTML5 mode
Given the course has a Video component in "HTML5" mode
And I have defined english subtitles for the video
And I have uploaded an english transcript file to assets
Then I see the correct text in the captions
"""
self.assets.append('subs_3_yD_cEKoCk.srt.sjson')
data = {'sub': '3_yD_cEKoCk'}
self.metadata = self.metadata_for_mode('html5', additional_data=data)
# go to video
self.navigate_to_video()
# make sure captions are opened
self.video.show_captions()
# check if we see "Welcome to edX." text in the captions
self.assertIn("Welcome to edX.", self.video.captions_text)
def test_cc_button_wo_english_transcript(self):
"""
Scenario: CC button works correctly w/o english transcript in HTML5 mode
Given the course has a Video component in "HTML5" mode
And I have defined a non-english transcript for the video
And I have uploaded a non-english transcript file to assets
Then I see the correct text in the captions
"""
self.assets.append('chinese_transcripts.srt')
data = {'transcripts': {'zh': 'chinese_transcripts.srt'}}
self.metadata = self.metadata_for_mode('html5', additional_data=data)
# go to video
self.navigate_to_video()
# make sure captions are opened
self.video.show_captions()
# check if we see "好 各位同学" text in the captions
unicode_text = "好 各位同学".decode('utf-8')
self.assertIn(unicode_text, self.video.captions_text)
def test_video_rendering(self):
"""
Scenario: Video component is fully rendered in the LMS in HTML5 mode
Given the course has a Video component in "HTML5" mode
Then the video has rendered in "HTML5" mode
And video sources are correct
"""
self.metadata = self.metadata_for_mode('html5')
self.navigate_to_video()
self.assertTrue(self.video.is_video_rendered('html5'))
self.assertTrue(all([source in HTML5_SOURCES for source in self.video.sources]))
class YouTubeQualityTest(VideoBaseTest):
""" Test YouTube Video Quality Button """
def setUp(self):
super(YouTubeQualityTest, self).setUp()
@skip_if_browser('firefox')
def test_quality_button_visibility(self):
"""
Scenario: Quality button appears on play.
Given the course has a Video component in "Youtube" mode
Then I see video button "quality" is hidden
And I click video button "play"
Then I see video button "quality" is visible
"""
self.navigate_to_video()
self.assertFalse(self.video.is_quality_button_visible)
self.video.click_player_button('play')
self.assertTrue(self.video.is_quality_button_visible)
@skip_if_browser('firefox')
def test_quality_button_works_correctly(self):
"""
Scenario: Quality button works correctly.
Given the course has a Video component in "Youtube" mode
And I click video button "play"
And I see video button "quality" is inactive
And I click video button "quality"
Then I see video button "quality" is active
"""
self.navigate_to_video()
self.video.click_player_button('play')
self.assertFalse(self.video.is_quality_button_active)
self.video.click_player_button('quality')
self.assertTrue(self.video.is_quality_button_active)
|
zerobatu/edx-platform
|
common/test/acceptance/tests/video/test_video_module.py
|
Python
|
agpl-3.0
| 41,492
|
[
"VisIt"
] |
76740d0cf1572d39ae951d5ebcca3f57d3a4d630faf577bc7f184ccc0b7eef52
|
"""Test that compute_delta_cchalf returns required values"""
from __future__ import annotations
import json
from unittest import mock
from dxtbx.model import Crystal, Experiment, Scan
from dxtbx.model.experiment_list import ExperimentList
from libtbx import phil
from dials.algorithms.scaling.model.model import KBScalingModel
from dials.algorithms.scaling.scale_and_filter import AnalysisResults, log_cycle_results
from dials.algorithms.statistics.cc_half_algorithm import CCHalfFromDials, DeltaCCHalf
from dials.array_family import flex
from dials.util.options import ArgumentParser
def generate_test_reflections(n=2):
"""Generate data for testing."""
reflections = flex.reflection_table()
for id_ in range(0, n):
r = flex.reflection_table()
r["id"] = flex.int(10, id_)
r["xyzobs.px.value"] = flex.vec3_double([(0, 0, i + 0.5) for i in range(0, 10)])
r.experiment_identifiers()[id_] = str(id_)
r.set_flags(flex.bool(10, True), r.flags.integrated)
r.set_flags(flex.bool(10, True), r.flags.scaled)
reflections.extend(r)
return reflections
def generated_params():
"""Generate a param phil scope."""
phil_scope = phil.parse(
"""
include scope dials.algorithms.scaling.scaling_options.phil_scope
include scope dials.algorithms.scaling.model.model.model_phil_scope
include scope dials.algorithms.scaling.scaling_refiner.scaling_refinery_phil_scope
""",
process_includes=True,
)
parser = ArgumentParser(phil=phil_scope, check_format=False)
parameters, _ = parser.parse_args(args=[], quick_parse=True, show_diff_phil=False)
parameters.model = "KB"
return parameters
def get_scaling_model():
"""Make a KB Scaling model instance"""
return KBScalingModel.from_data(generated_params(), [], [])
def generate_test_experiments(n=2):
"""Make a test experiment list"""
experiments = ExperimentList()
exp_dict = {
"__id__": "crystal",
"real_space_a": [1.0, 0.0, 0.0],
"real_space_b": [0.0, 1.0, 0.0],
"real_space_c": [0.0, 0.0, 2.0],
"space_group_hall_symbol": " C 2y",
}
for i in range(n):
experiments.append(
Experiment(
crystal=Crystal.from_dict(exp_dict),
scan=Scan(image_range=[1, 10], oscillation=[0.0, 1.0]),
scaling_model=get_scaling_model(),
identifier=str(i),
)
)
return experiments
def test_scale_and_filter_results_logging():
"""Test ScaleAndFilter.log_cycle_results method."""
results = AnalysisResults()
scaling_script = mock.Mock()
scaling_script.merging_statistics_result = "stats_results"
scaling_script.scaled_miller_array.size.return_value = 1000
filter_script = mock.Mock()
filter_script.results_summary = {
"dataset_removal": {
"mode": "image_group",
"image_ranges_removed": [[(6, 10), 0]],
"experiments_fully_removed": [],
"experiment_ids_fully_removed": [],
"n_reflections_removed": 50,
},
"mean_cc_half": 80.0,
"per_dataset_delta_cc_half_values": {
"delta_cc_half_values": [-0.1, 0.1, -0.2, 0.2]
},
}
def _parse_side_effect(*args):
return args[0]
with mock.patch.object(
results, "_parse_merging_stats", side_effect=_parse_side_effect
):
res = log_cycle_results(results, scaling_script, filter_script)
# test things have been logged correctly
cycle_results = res.get_cycle_results()
assert len(cycle_results) == 1
assert cycle_results[0]["cumul_percent_removed"] == 100 * 50.0 / 1000.0
assert cycle_results[0]["n_removed"] == 50
assert cycle_results[0]["image_ranges_removed"] == [[(6, 10), 0]]
assert cycle_results[0]["removed_datasets"] == []
assert cycle_results[0]["delta_cc_half_values"] == [-0.1, 0.1, -0.2, 0.2]
assert res.get_merging_stats()[0] == "stats_results"
assert res.initial_n_reflections == 1000
# add another cycle of results
with mock.patch.object(
results, "_parse_merging_stats", side_effect=_parse_side_effect
):
res = log_cycle_results(res, scaling_script, filter_script)
cycle_results = res.get_cycle_results()
assert len(cycle_results) == 2
assert cycle_results[1]["cumul_percent_removed"] == 100 * 2 * 50.0 / 1000.0
assert cycle_results[1]["n_removed"] == 50
assert cycle_results[1]["image_ranges_removed"] == [[(6, 10), 0]]
assert cycle_results[1]["removed_datasets"] == []
assert cycle_results[0]["delta_cc_half_values"] == [-0.1, 0.1, -0.2, 0.2]
assert res.get_merging_stats()[1] == "stats_results"
assert res.initial_n_reflections == 1000
def test_compute_delta_cchalf_returned_results():
"""Test that delta cchalf return necessary values for scale_and_filter."""
# Check for correct recording of
# results_summary['per_dataset_delta_cc_half_values']['delta_cc_half_values']
summary = {}
delta_cc = {0: -4, 1: 2, 2: -3, 3: -5, 4: 1}
sorted_data, sorted_ccs = DeltaCCHalf.sort_deltacchalf_values(delta_cc, summary)
expected_data_order = [3, 0, 2, 4, 1]
expected_cc_order = [-5, -4, -3, 1, 2]
assert list(sorted_data) == expected_data_order
assert list(sorted_ccs) == expected_cc_order
assert (
summary["per_dataset_delta_cc_half_values"]["delta_cc_half_values"]
== expected_cc_order
)
# Check for correct recording for dataset mode
exp = generate_test_experiments(2)
refls = generate_test_reflections(2)
ids_to_remove = [0]
results_summary = {"dataset_removal": {}}
_ = CCHalfFromDials.remove_datasets_below_cutoff(
exp, refls, ids_to_remove, results_summary
)
assert "experiments_fully_removed" in results_summary["dataset_removal"]
assert "n_reflections_removed" in results_summary["dataset_removal"]
assert results_summary["dataset_removal"]["experiments_fully_removed"] == ["0"]
assert results_summary["dataset_removal"]["n_reflections_removed"] == 10
# Check for correct recording for image group mode.
exp = generate_test_experiments(2)
refls = generate_test_reflections(2)
ids_to_remove = [0, 1]
image_group_to_expid_and_range = {
0: ("0", (1, 5)),
1: ("0", (6, 10)),
2: ("1", (1, 5)),
3: ("1", (6, 10)),
}
expids_to_image_groups = {"0": [0, 1], "1": [2, 3]}
results_summary = {"dataset_removal": {}}
_ = CCHalfFromDials.remove_image_ranges_below_cutoff(
exp,
refls,
ids_to_remove,
image_group_to_expid_and_range,
expids_to_image_groups,
results_summary,
)
assert "experiments_fully_removed" in results_summary["dataset_removal"]
assert "n_reflections_removed" in results_summary["dataset_removal"]
assert "image_ranges_removed" in results_summary["dataset_removal"]
assert results_summary["dataset_removal"]["experiments_fully_removed"] == ["0"]
assert results_summary["dataset_removal"]["n_reflections_removed"] == 10
assert [(6, 10), 0] in results_summary["dataset_removal"]["image_ranges_removed"]
assert [(1, 5), 0] in results_summary["dataset_removal"]["image_ranges_removed"]
assert len(results_summary["dataset_removal"]["image_ranges_removed"]) == 2
def test_analysis_results_to_from_dict():
d = {
"termination_reason": "made up",
"initial_expids_and_image_ranges": [["foo", [1, 42]], ["bar", [1, 10]]],
"expids_and_image_ranges": [["foo", [1, 42]]],
"cycle_results": {"1": {"some stat": -424242}},
"initial_n_reflections": 424242,
"final_stats": "some final stats",
}
results = AnalysisResults.from_dict(d)
# The cycle_results dict output by AnalysisResults has integer keys but after
# conversion to json has str keys. AnalysisResults.from_dict expects str keys,
# hence do the comparison after converting to/from json
assert json.loads(json.dumps(results.to_dict())) == d
|
dials/dials
|
tests/algorithms/scaling/test_scale_and_filter.py
|
Python
|
bsd-3-clause
| 8,168
|
[
"CRYSTAL"
] |
1edaf40c11ec874ea8bebf395a6522ebfc1f891d7c6e2b300937eb761ea15b5d
|
#!/usr/bin/python
import os
import itertools
experiments = [
[1,"gaussian",1.5],
[0,"mixture",1,0.005],
[0.7,"mixture",1,0.1],
[0.7,"mixture",1,0.075],
[0.7,"mixture",2,0.05],
[0.7,"mixture",2,0.075]
]
os.chdir("../dboost")
for e in experiments:
if e[1] == "gaussian":
f = "sensors_dirty_stat{}_{}{}.out".format(*e)
cmd = "python dboost-stdin.py --minimal -F ' ' ../datasets/real/intel/sensors-1000-dirty.txt --statistical {} --{} {} -d fracpart -d unix2date_float > ../results/{}".format(*(e+[f]))
elif e[1] == "mixture":
f = "sensors_dirty_stat{}_{}{}_{}.out".format(*e)
cmd = "python dboost-stdin.py --minimal -F ' ' ../datasets/real/intel/sensors-1000-dirty.txt --statistical {} --{} {} {} -d fracpart -d unix2date_float > ../results/{}".format(*(e+[f]))
else: assert(False)
print(cmd)
os.system(cmd)
|
cpitclaudel/dBoost
|
scripts/run_sensor_experiments.py
|
Python
|
gpl-3.0
| 888
|
[
"Gaussian"
] |
39c881e62185bc2751f31fe0642299e1a484d1bec9fbcb82044130fdc97d71e2
|
import numpy as N
from image import *
import mylogger
from copy import deepcopy as cp
from . import has_pl
if has_pl:
import matplotlib.pyplot as pl
import scipy
import scipy.signal as S
import _cbdsm
import functions as func
import _pytesselate as _pytess
import shapelets as sh
from scipy.optimize import leastsq
import nat
from math import *
import statusbar
from const import fwsig
import multi_proc as mp
import itertools
class Op_psf_vary(Op):
"""Computes variation of psf across the image """
def __call__(self, img):
if img.opts.psf_vary_do:
mylog = mylogger.logging.getLogger("PyBDSM."+img.log+"Psf_Vary")
mylogger.userinfo(mylog, '\nEstimating PSF variations')
opts = img.opts
dir = img.basedir + '/misc/'
plot = False # debug figures
image = img.ch0_arr
try:
from astropy.io import fits as pyfits
old_pyfits = False
except ImportError, err:
from distutils.version import StrictVersion
import pyfits
if StrictVersion(pyfits.__version__) < StrictVersion('2.2'):
old_pyfits = True
else:
old_pyfits = False
if old_pyfits:
mylog.warning('PyFITS version is too old: psf_vary module skipped')
return
if opts.psf_fwhm is not None:
# User has specified a constant PSF to use, so skip PSF fitting/etc.
psf_maj = opts.psf_fwhm[0] # FWHM in deg
psf_min = opts.psf_fwhm[1] # FWHM in deg
psf_pa = opts.psf_fwhm[2] # PA in deg
mylogger.userinfo(mylog, 'Using constant PSF (major, minor, pos angle)',
'(%.5e, %.5e, %s) degrees' % (psf_maj, psf_maj,
round(psf_pa, 1)))
else:
# Use did not specify a constant PSF to use, so estimate it
over = 2
generators = opts.psf_generators; nsig = opts.psf_nsig; kappa2 = opts.psf_kappa2
snrtop = opts.psf_snrtop; snrbot = opts.psf_snrbot; snrcutstack = opts.psf_snrcutstack
gencode = opts.psf_gencode; primarygen = opts.psf_primarygen; itess_method = opts.psf_itess_method
tess_sc = opts.psf_tess_sc; tess_fuzzy= opts.psf_tess_fuzzy
bright_snr_cut = opts.psf_high_snr
s_only = opts.psf_stype_only
if opts.psf_snrcut < 5.0:
mylogger.userinfo(mylog, "Value of psf_snrcut too low; increasing to 5")
snrcut = 5.0
else:
snrcut = opts.psf_snrcut
img.psf_snrcut = snrcut
if opts.psf_high_snr is not None:
if opts.psf_high_snr < 10.0:
mylogger.userinfo(mylog, "Value of psf_high_snr too low; increasing to 10")
high_snrcut = 10.0
else:
high_snrcut = opts.psf_high_snr
else:
high_snrcut = opts.psf_high_snr
img.psf_high_snr = high_snrcut
wtfns=['unity', 'roundness', 'log10', 'sqrtlog10']
if 0 <= itess_method < 4: tess_method=wtfns[itess_method]
else: tess_method='unity'
### now put all relevant gaussian parameters into a list
ngaus = img.ngaus
nsrc = img.nsrc
num = N.zeros(nsrc, dtype=N.int32)
peak = N.zeros(nsrc)
xc = N.zeros(nsrc)
yc = N.zeros(nsrc)
bmaj = N.zeros(nsrc)
bmin = N.zeros(nsrc)
bpa = N.zeros(nsrc)
code = N.array(['']*nsrc);
rms = N.zeros(nsrc)
src_id_list = []
for i, src in enumerate(img.sources):
src_max = 0.0
for gmax in src.gaussians:
# Take only brightest Gaussian per source
if gmax.peak_flux > src_max:
src_max = gmax.peak_flux
g = gmax
num[i] = i
peak[i] = g.peak_flux
xc[i] = g.centre_pix[0]
yc[i] = g.centre_pix[1]
bmaj[i] = g.size_pix[0]
bmin[i] = g.size_pix[1]
bpa[i] = g.size_pix[2]
code[i] = img.sources[g.source_id].code
rms[i] = img.islands[g.island_id].rms
gauls = (num, peak, xc, yc, bmaj, bmin, bpa, code, rms)
tr_gauls = self.trans_gaul(gauls)
# takes gaussians with code=S and snr > snrcut.
if s_only:
tr = [n for n in tr_gauls if n[1]/n[8]>snrcut and n[7] == 'S']
else:
tr = [n for n in tr_gauls if n[1]/n[8]>snrcut]
g_gauls = self.trans_gaul(tr)
# computes statistics of fitted sizes. Same as psfvary_fullstat.f in fBDSM.
bmaj_a, bmaj_r, bmaj_ca, bmaj_cr, ni = _cbdsm.bstat(bmaj, None, nsig)
bmin_a, bmin_r, bmin_ca, bmin_cr, ni = _cbdsm.bstat(bmin, None, nsig)
bpa_a, bpa_r, bpa_ca, bpa_cr, ni = _cbdsm.bstat(bpa, None, nsig)
# get subset of sources deemed to be unresolved. Same as size_ksclip_wenss.f in fBDSM.
flag_unresolved = self.get_unresolved(g_gauls, img.beam, nsig, kappa2, over, img.psf_high_snr, plot)
if len(flag_unresolved) == 0:
mylog.warning('Insufficient number of sources to determine PSF variation.\nTry changing the PSF options or specify a (constant) PSF with the "psf_fwhm" option')
return
# see how much the SNR-weighted sizes of unresolved sources differ from the synthesized beam.
wtsize_beam_snr = self.av_psf(g_gauls, img.beam, flag_unresolved)
# filter out resolved sources
tr_gaul = self.trans_gaul(g_gauls)
tr = [n for i, n in enumerate(tr_gaul) if flag_unresolved[i]]
g_gauls = self.trans_gaul(tr)
mylogger.userinfo(mylog, 'Number of unresolved sources', str(len(g_gauls[0])))
# get a list of voronoi generators. vorogenS has values (and not None) if generators='field'.
vorogenP, vorogenS = self.get_voronoi_generators(g_gauls, generators, gencode, snrcut, snrtop, snrbot, snrcutstack)
mylogger.userinfo(mylog, 'Number of generators for PSF variation', str(len(vorogenP[0])))
if len(vorogenP[0]) < 3:
mylog.warning('Insufficient number of generators')
return
mylogger.userinfo(mylog, 'Tesselating image')
# group generators into tiles
tile_prop = self.edit_vorogenlist(vorogenP, frac=0.9)
# tesselate the image
volrank, vorowts = self.tesselate(vorogenP, vorogenS, tile_prop, tess_method, tess_sc, tess_fuzzy, \
generators, gencode, image.shape)
if opts.output_all:
func.write_image_to_file(img.use_io, img.imagename + '.volrank.fits', volrank, img, dir)
tile_list, tile_coord, tile_snr = tile_prop
ntile = len(tile_list)
bar = statusbar.StatusBar('Determining PSF variation ............... : ', 0, ntile)
mylogger.userinfo(mylog, 'Number of tiles for PSF variation', str(ntile))
# For each tile, calculate the weighted averaged psf image. Also for all the sources in the image.
cdelt = list(img.wcs_obj.acdelt[0:2])
factor=3.
psfimages, psfcoords, totpsfimage, psfratio, psfratio_aper = self.psf_in_tile(image, img.beam, g_gauls, \
cdelt, factor, snrcutstack, volrank, tile_prop, plot, img)
npsf = len(psfimages)
if opts.psf_use_shap:
if opts.psf_fwhm is None:
# use totpsfimage to get beta, centre and nmax for shapelet decomposition. Use nmax=5 or 6
mask=N.zeros(totpsfimage.shape, dtype=bool)
(m1, m2, m3)=func.moment(totpsfimage, mask)
betainit=sqrt(m3[0]*m3[1])*2.0 * 1.4
tshape = totpsfimage.shape
cen = N.array(N.unravel_index(N.argmax(totpsfimage), tshape))+[1,1]
cen = tuple(cen)
nmax = 12
basis = 'cartesian'
betarange = [0.5,sqrt(betainit*max(tshape))]
beta, error = sh.shape_varybeta(totpsfimage, mask, basis, betainit, cen, nmax, betarange, plot)
if error == 1: print ' Unable to find minimum in beta'
# decompose all the psf images using the beta from above
nmax=12; psf_cf=[]
for i in range(npsf):
psfim = psfimages[i]
cf = sh.decompose_shapelets(psfim, mask, basis, beta, cen, nmax, mode='')
psf_cf.append(cf)
if img.opts.quiet == False:
bar.increment()
bar.stop()
# transpose the psf image list
xt, yt = N.transpose(tile_coord)
tr_psf_cf = N.transpose(N.array(psf_cf))
# interpolate the coefficients across the image. Ok, interpolate in scipy for
# irregular grids is crap. doesnt even pass through some of the points.
# for now, fit polynomial.
compress = 100.0
x, y = N.transpose(psfcoords)
if len(x) < 3:
mylog.warning('Insufficient number of tiles to do interpolation of PSF variation')
return
psf_coeff_interp, xgrid, ygrid = self.interp_shapcoefs(nmax, tr_psf_cf, psfcoords, image.shape, \
compress, plot)
psfshape = psfimages[0].shape
skip = 5
aa = self.create_psf_grid(psf_coeff_interp, image.shape, xgrid, ygrid, skip, nmax, psfshape, \
basis, beta, cen, totpsfimage, plot)
img.psf_images = aa
else:
if opts.psf_fwhm is None:
if ntile < 4:
mylog.warning('Insufficient number of tiles to do interpolation of PSF variation')
return
else:
# Fit stacked PSFs with Gaussians and measure aperture fluxes
bm_pix = N.array([img.pixel_beam()[0]*fwsig, img.pixel_beam()[1]*fwsig, img.pixel_beam()[2]])
psf_maj = N.zeros(npsf)
psf_min = N.zeros(npsf)
psf_pa = N.zeros(npsf)
if img.opts.quiet == False:
bar.start()
for i in range(ntile):
psfim = psfimages[i]
mask = N.zeros(psfim.shape, dtype=bool)
x_ax, y_ax = N.indices(psfim.shape)
maxv = N.max(psfim)
p_ini = [maxv, (psfim.shape[0]-1)/2.0*1.1, (psfim.shape[1]-1)/2.0*1.1, bm_pix[0]/fwsig*1.3,
bm_pix[1]/fwsig*1.1, bm_pix[2]*2]
para, ierr = func.fit_gaus2d(psfim, p_ini, x_ax, y_ax, mask)
### first extent is major
if para[3] < para[4]:
para[3:5] = para[4:2:-1]
para[5] += 90
### clip position angle
para[5] = divmod(para[5], 180)[1]
psf_maj[i] = para[3]
psf_min[i] = para[4]
posang = para[5]
while posang >= 180.0:
posang -= 180.0
psf_pa[i] = posang
if img.opts.quiet == False:
bar.increment()
bar.stop()
# Interpolate Gaussian parameters
if img.aperture is None:
psf_maps = [psf_maj, psf_min, psf_pa, psfratio]
else:
psf_maps = [psf_maj, psf_min, psf_pa, psfratio, psfratio_aper]
nimgs = len(psf_maps)
bar = statusbar.StatusBar('Interpolating PSF images ................ : ', 0, nimgs)
if img.opts.quiet == False:
bar.start()
map_list = mp.parallel_map(func.eval_func_tuple,
itertools.izip(itertools.repeat(self.interp_prop),
psf_maps, itertools.repeat(psfcoords),
itertools.repeat(image.shape)), numcores=opts.ncores,
bar=bar)
if img.aperture is None:
psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int = map_list
else:
psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int, psf_ratio_aper_int = map_list
# Smooth if desired
if img.opts.psf_smooth is not None:
sm_scale = img.opts.psf_smooth / img.pix2beam([1.0, 1.0, 0.0])[0] / 3600.0 # pixels
if img.opts.aperture is None:
psf_maps = [psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int]
else:
psf_maps = [psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int, psf_ratio_aper_int]
nimgs = len(psf_maps)
bar = statusbar.StatusBar('Smoothing PSF images .................... : ', 0, nimgs)
if img.opts.quiet == False:
bar.start()
map_list = mp.parallel_map(func.eval_func_tuple,
itertools.izip(itertools.repeat(self.blur_image),
psf_maps, itertools.repeat(sm_scale)), numcores=opts.ncores,
bar=bar)
if img.aperture is None:
psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int = map_list
else:
psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int, psf_ratio_aper_int = map_list
# Make sure all smoothed, interpolated images are ndarrays
psf_maj_int = N.array(psf_maj_int)
psf_min_int = N.array(psf_min_int)
psf_pa_int = N.array(psf_pa_int)
psf_ratio_int = N.array(psf_ratio_int)
if img.aperture is None:
psf_ratio_aper_int = N.zeros(psf_maj_int.shape, dtype=N.float32)
else:
psf_ratio_aper_int = N.array(psf_ratio_aper_int, dtype=N.float32)
# Blank with NaNs if needed
mask = img.mask_arr
if isinstance(mask, N.ndarray):
pix_masked = N.where(mask == True)
psf_maj_int[pix_masked] = N.nan
psf_min_int[pix_masked] = N.nan
psf_pa_int[pix_masked] = N.nan
psf_ratio_int[pix_masked] = N.nan
psf_ratio_aper_int[pix_masked] = N.nan
# Store interpolated images. The major and minor axis images are
# the sigma in units of arcsec, the PA image in units of degrees east of
# north, the ratio images in units of 1/beam.
img.psf_vary_maj_arr = psf_maj_int * img.pix2beam([1.0, 1.0, 0.0])[0] * 3600.0 # sigma in arcsec
img.psf_vary_min_arr = psf_min_int * img.pix2beam([1.0, 1.0, 0.0])[0] * 3600.0 # sigma in arcsec
img.psf_vary_pa_arr = psf_pa_int
img.psf_vary_ratio_arr = psf_ratio_int # in 1/beam
img.psf_vary_ratio_aper_arr = psf_ratio_aper_int # in 1/beam
if opts.output_all:
func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_maj.fits', img.psf_vary_maj_arr*fwsig, img, dir)
func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_min.fits', img.psf_vary_min_arr*fwsig, img, dir)
func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_pa.fits', img.psf_vary_pa_arr, img, dir)
func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_ratio.fits', img.psf_vary_ratio_arr, img, dir)
func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_ratio_aper.fits', img.psf_vary_ratio_aper_arr, img, dir)
# Loop through source and Gaussian lists and deconvolve the sizes using appropriate beam
bar2 = statusbar.StatusBar('Correcting deconvolved source sizes ..... : ', 0, img.nsrc)
if img.opts.quiet == False:
bar2.start()
for src in img.sources:
src_pos = img.sky2pix(src.posn_sky_centroid)
src_pos_int = (int(src_pos[0]), int(src_pos[1]))
gaus_c = img.gaus2pix(src.size_sky, src.posn_sky_centroid)
if opts.psf_fwhm is None:
gaus_bm = [psf_maj_int[src_pos_int]*fwsig, psf_min_int[src_pos_int]*fwsig, psf_pa_int[src_pos_int]]
else:
# Use user-specified constant PSF instead
gaus_bm = img.beam2pix(opts.psf_fwhm)
gaus_dc, err = func.deconv2(gaus_bm, gaus_c)
src.deconv_size_sky = img.pix2gaus(gaus_dc, src_pos)
src.deconv_size_skyE = [0.0, 0.0, 0.0]
for g in src.gaussians:
gaus_c = img.gaus2pix(g.size_sky, src.posn_sky_centroid)
gaus_dc, err = func.deconv2(gaus_bm, gaus_c)
g.deconv_size_sky = img.pix2gaus(gaus_dc, g.centre_pix)
g.deconv_size_skyE = [0.0, 0.0, 0.0]
if img.opts.quiet == False:
bar2.spin()
if img.opts.quiet == False:
bar2.increment()
bar2.stop()
img.completed_Ops.append('psf_vary')
##################################################################################################
def trans_gaul(self, q):
" transposes a tuple of .gaul values "
y=[]
for i in range(len(q[0])):
elem=[]
for j in range(len(q)):
elem.append(q[j][i])
y.append(elem)
return y
##################################################################################################
def bindata(self, over, num): #ptpbin,nbin,ptplastbin, same as get_bins in fBDSM.
if num <= 100: ptpbin=num/5
if num > 100: ptpbin=num/10
if num > 1000: ptpbin=num/20
if ptpbin % 2 == 1: ptpbin=ptpbin+1
if num < 10: ptpbin=num
ptpbin = float(ptpbin) # cast to float to avoid integer division errors
nbin=int((num-ptpbin)/(ptpbin/over)+1)
ptplastbin=int((num-1)-(nbin-1)*ptpbin/over)
nbin=nbin+1
return ptpbin, nbin, ptplastbin
##################################################################################################
def bin_and_stats_ny(self, x,y,over,ptpbin,nbin,ptplastbin,nsig):
import math
n1=N.array(range(nbin))+1 # bin number
n2=N.array([ptpbin]*nbin); n2[nbin-2]=ptplastbin; n2[nbin-1]=ptpbin/over
n3=N.array([ptpbin]*nbin, dtype=float); n3[nbin-1]=float(over)*(len(x)-ptpbin/2)/(nbin-1)
xval=N.zeros(nbin)
meany=N.zeros(nbin); stdy=N.zeros(nbin); mediany=N.zeros(nbin)
for i in range(nbin):
lb=round(1+(n1[i]-1)*n3[i]/over+(1-1))-1 # -1 for python indexing
ub=round(1+(n1[i]-1)*n3[i]/over+(n2[i]-1))-1 # -1 for python indexing
x1=x[lb:ub+1]; y1=y[lb:ub+1]
# do calcmedianclip2vec.f for code=YYN
if len(x1) > 0 and len(y1) > 0:
nout=100; niter=0
while nout>0 and niter<6:
med1=N.median(y1[:])
med2=10.**(N.median(N.log10(x1[:])))
medstd=0 # calcmedianstd.f
for j in y1: medstd += (j-med1)*(j-med1)
medstd=math.sqrt(medstd/len(y1)) #
av1=N.mean(y1); std1=func.std(y1)
av2=N.mean(x1); std2=func.std(x1)
# get_medianclip_vec2
z=N.transpose([x1, y1])
z1=N.transpose([n for n in z if abs(n[1]-med1)<=nsig*medstd])
nout=len(x1)-len(z1[0])
x1=z1[0]; y1=z1[1];
niter+=1
xval[i]=med2;
meany[i]=av1; stdy[i]=std1; mediany[i]=med1
if stdy[nbin-1]/mediany[nbin-1] > stdy[nbin-2]/mediany[nbin-2]:
stdy[nbin-1]=stdy[nbin-2]/mediany[nbin-2]*mediany[nbin-1]
return xval, meany, stdy, mediany
##################################################################################################
def LM_fit(self, x, y, err, funct, order=0):
if funct == func.poly:
p0=N.array([y[N.argmax(x)]] + [0]*order)
if funct == func.wenss_fit:
p0=N.array([y[N.argmax(x)]] + [1.])
res=lambda p, x, y, err: (y-funct(p, x))/err
(p, flag)=leastsq(res, p0, args=(x, y, err))
return p
##################################################################################################
def fit_bins_func(self, x,y,over,ptpbin,nbin,ptplastbin,nsig): # sub_size_ksclip
import math
(xval,meany,stdy,medy)=self.bin_and_stats_ny(x,y,over,ptpbin,nbin,ptplastbin,nsig)
yfit=stdy/medy
err=N.array([1.]*nbin)
if ptplastbin > 0:
err[nbin-2]=err[0]*math.sqrt(1.0*ptpbin/ptplastbin)
err[nbin-1]=err[0]*math.sqrt(1.0*ptpbin*over/ptplastbin)
i=0
while i<nbin-4 and (N.all(N.sort(yfit[i:i+4])[::-1] == yfit[i:i+4]) == False):
i+=1
if i==nbin-4: sind=0
else: sind=i-1
if sind < 1:
sind = 0
if sind > 0.25*nbin:
sind=int(round(0.25*nbin))-1
s_c=self.LM_fit(xval[sind:],yfit[sind:],err[sind:], func.wenss_fit)
err[:]=1.
s_cm=self.LM_fit(N.log10(xval),medy,err,func.poly, order=1)
if len(xval) >= 3:
s_dm=self.LM_fit(N.log10(xval),medy,err,func.poly, order=2)
else:
s_dm = (N.array([s_cm[0], s_cm[1], 0.0]), 0)
if ptpbin<75: s_dm=N.append(s_cm[:], [0.])
return s_c, s_dm
##################################################################################################
def get_unresolved(self, g_gauls, beam, nsig, kappa2, over, bright_snr_cut=20.0, plot=False):
""""Gets subset of unresolved sources
Also flags as unresolved all sources with SNRs above
bright_cut_snr, since fitting below is unreliable for bright
sources.
"""
num=len(g_gauls[0])
if num < 10:
# Too few sources to do fitting
return []
b1=N.asarray(g_gauls[4])/(beam[0]*3600.)
b2=N.asarray(g_gauls[5])/(beam[1]*3600.)
s1=N.asarray(g_gauls[1])/N.array(g_gauls[8])
snr=N.array(s1)
index=snr.argsort()
snr=snr[index]
nmaj=N.array(b1)[index]
nmin=N.array(b2)[index]
# if plot: pl.figure()
f_sclip=N.zeros((2,num), dtype=bool)
for idx, nbeam in enumerate([nmaj, nmin]):
xarr=N.copy(snr)
yarr=N.copy(nbeam)
niter=0; nout=num; noutold=nout*2
while niter<10 and nout >0.75*num:
(ptpbin, nbin, ptplastbin)=self.bindata(over,nout) # get_bins in fBDSM
(s_c,s_dm) = self.fit_bins_func(xarr,yarr,over,ptpbin,nbin,ptplastbin,nsig) # size_ksclip_wenss in fBDSM
noutold = len(xarr)
z = N.transpose([xarr, yarr, s_dm[0]+s_dm[1]*N.log10(xarr)+s_dm[2]*(N.log10(xarr)**2.), \
N.sqrt(s_c[0]*s_c[0]+s_c[1]*s_c[1]/(xarr*xarr)) ])
z1 = N.transpose([n for n in z if abs(n[1]-n[2])/(n[2]*n[3])<kappa2]) # sub_size_wenss_getnum in fBDSM
if len(z1) == 0:
break
nout = len(z1[0])
niter += 1
xarr = z1[0]; yarr = z1[1]; # end of sub_size_wenss_getnum
if noutold == nout: break
# flag in the 'unresolved' sources. returns flag array, True ==> unresolved
logsnr=N.log10(snr)
dumr = N.sqrt(s_c[0]*s_c[0]+s_c[1]*s_c[1]/(snr*snr))
med = s_dm[0]+s_dm[1]*logsnr+s_dm[2]*(logsnr*logsnr)
f_sclip[idx] = N.abs((nbeam-med)/(med*dumr)) < N.array([kappa2]*num)
f_s = f_sclip[0]*f_sclip[1]
# Add bright sources
if bright_snr_cut is not None:
if bright_snr_cut < 20.0:
bright_snr_cut = 20.0
bright_srcs = N.where(snr >= bright_snr_cut)
if len(bright_srcs[0]) > 0:
f_s[bright_srcs] = True
# now make plots
# if plot:
# bb=[b1, b2]
# pl.subplot(211+idx)
# pl.semilogx(s1, bb[idx], 'og')
# f0=f_sclip[idx][index.argsort()]
# sf=[n for i, n in enumerate(s1) if f0[i]]
# b1f=[n for i, n in enumerate(bb[idx]) if f0[i]]
# pl.semilogx(sf, b1f, 'or')
# pl.semilogx(snr,med,'-')
# pl.semilogx(snr,med+med*dumr*(N.array([kappa2]*num)),'-')
# pl.semilogx(snr,med-med*dumr*(N.array([kappa2]*num)),'-')
# pl.title(' axis ' + str(idx))
#
return f_s[index.argsort()]
##################################################################################################
def av_psf(self, g_gauls, beam, flag):
""" calculate how much the SNR-weighted sizes of unresolved sources differs from the
synthesized beam. Same as av_psf.f in fBDSM."""
from math import sqrt
bmaj = N.asarray(g_gauls[4])
bmin = N.asarray(g_gauls[5])
bpa = N.asarray(g_gauls[6])
wt = N.asarray(g_gauls[1])/N.asarray(g_gauls[8])
flagwt = wt*flag
sumwt = N.sum(flagwt)
w1 = N.sum(flagwt*flagwt)
wtavbm = N.array([N.sum(bmaj*flagwt), N.sum(bmin*flagwt), N.sum(bpa*flagwt)])/sumwt
dumrar = N.array([N.sum(bmaj*bmaj*flagwt), N.sum(bmin*bmin*flagwt), N.sum(bpa*bpa*flagwt)])
dd = sumwt*sumwt-w1
wtstdbm = N.sqrt((dumrar - wtavbm*wtavbm*sumwt)*sumwt/dd)
avpa = N.sum(bpa*flagwt-180.0*flagwt*N.array(bpa >= 90))/sumwt
stdpa = N.sum(bpa*flagwt+(180.0*180.0-360.0*bpa)*flagwt*N.array(bpa >= 90))
stdpa = sqrt(abs((stdpa-avpa*avpa*sumwt)*sumwt/dd))
if stdpa < wtstdbm[2]:
wtstdbm[2] = stdpa
wtavbm[2] = avpa
return (wtavbm - N.array([beam[0]*3600.0, beam[1]*3600.0, beam[2]]))/wtstdbm
##################################################################################################
def get_voronoi_generators(self, g_gauls, generators, gencode, snrcut, snrtop, snrbot, snrcutstack):
"""This gets the list of all voronoi generators. It is either the centres of the brightest
sources, or is imported from metadata (in future)."""
from math import sqrt
num=len(g_gauls[0])
snr=N.asarray(g_gauls[1])/N.asarray(g_gauls[8])
index=snr.argsort()
snr_incr = snr[index]
snr = snr_incr[::-1]
x = N.asarray(g_gauls[2])[index]
y = N.asarray(g_gauls[3])[index]
cutoff = 0
if generators == 'calibrators' or generators == 'field':
if gencode != 'file':
gencode = 'list'
if gencode == 'list':
cutoff = int(round(num*(snrtop)))
if cutoff > len(snr):
cutoff = len(snr)
# Make sure we don't fall below snrcutstack (SNR cut for stacking of PSFs), since
# it makes no sense to make tiles with generators that fall below this cut.
if snr[cutoff-1] < snrcutstack:
cutoff = num - snr_incr.searchsorted(snrcutstack)
if generators == 'calibrators':
if gencode == 'file':
raise NotImplementedError, "gencode=file not yet implemented."
x1 = x.tolist()
y1 = y.tolist()
x1.reverse()
y1.reverse()
snr1 = snr.tolist()
vorogenP = N.asarray([x1[0:cutoff], y1[0:cutoff], snr1[0:cutoff]])
vorogenS = None
return vorogenP, vorogenS
##################################################################################################
def edit_vorogenlist(self, vorogenP, frac):
""" Edit primary voronoi generator list. Each tile has a tile centre and can
have more than one generator to be averaged. tile_list is a list of arrays, indexed
by the tile number and each array is an array of numbers in the ngen list which are
the generators in that tile. xtile, ytile and snrtile are arrays of length number_of_tiles
and have x,y,snr of each tile. Group together generators
if closer than a fraction of dist to third closest."""
xgen, ygen, snrgen = vorogenP
flag = N.zeros(len(xgen))
coord=N.array([xgen,ygen]).transpose()
tile_list = []
tile_coord = []; tile_snr = []
for i in range(len(xgen)):
dist = N.array(map(lambda t: func.dist_2pt(coord[i], t), coord))
indi = N.argsort(dist)
sortdist = dist[indi]
if sortdist[1] < frac * sortdist[2]: # first is the element itself
if flag[indi[1]] + flag[i] == 0: # not already deleted from other pair
tile_list.append([i, indi[1]])
tile_coord.append((coord[i]*snrgen[i]+coord[indi[1]]*snrgen[indi[1]])/(snrgen[i]+snrgen[indi[1]]))
tile_snr.append(snrgen[i]+snrgen[indi[1]])
flag[i] = 1
flag[indi[1]] = 1
else:
if len(dist) > 3:
if sortdist[1]+sortdist[2] < 2.0*frac*sortdist[3]: # for 3 close-by sources
in1=indi[1]
in2=indi[2]
if flag[in1]+flag[in2]+flag[i] == 0: # not already deleted from others
tile_list.append([i, in1, in2])
tile_coord.append((coord[i]*snrgen[i]+coord[in1]*snrgen[in1]+coord[in2]*snrgen[in2]) \
/(snrgen[i]+snrgen[in1]+snrgen[in2]))
tile_snr.append(snrgen[i]+snrgen[in1]+snrgen[in2])
flag[i] = 1
flag[in1] = 1
flag[in2] = 1
else:
tile_list.append([i])
tile_coord.append(coord[i])
tile_snr.append(snrgen[i])
# Assign any leftover generators
for i in range(len(xgen)):
if flag[i] == 0:
tile_list.append([i])
tile_coord.append(coord[i])
tile_snr.append(snrgen[i])
return tile_list, tile_coord, tile_snr
##################################################################################################
def tess_simple(self, vorogenP, wts, tess_sc, tess_fuzzy, shape):
""" Simple tesselation """
xgen, ygen, snrgen = vorogenP
volrank = _pytess.pytess_simple(shape[0], shape[1], xgen, ygen, snrgen, \
wts, tess_fuzzy, tess_sc)
return volrank
##################################################################################################
def tess_roundness(self, vorogenP, tess_sc, tess_fuzzy, shape):
""" Tesselation, modified to make the tiles more round. """
xgen, ygen, snrgen = vorogenP
volrank = _pytess.pytess_roundness(shape[0], shape[1], xgen, ygen, snrgen, \
tess_fuzzy, tess_sc)
return volrank
##################################################################################################
def pixintile(self, tilecoord, pixel, tess_method, wts, tess_sc, tess_fuzzy):
""" This has routines to find out which tile a given pixel belongs to. """
if tess_method == 'roundness':
#tilenum = pytess_roundness(tilecoord, pixel, wts, tess_sc, tess_fuzzy)
print " Not yet implemented !!!! "
return 0
else:
xgen, ygen = tilecoord
xgen = N.asarray(xgen)
ygen = N.asarray(ygen)
ngen = len(xgen)
i,j = pixel
dist = N.sqrt((i-xgen)*(i-xgen)+(j-ygen)*(j-ygen))/wts
minind = dist.argmin()
if tess_sc == 's':
tilenum=minind
else:
print " Not yet implemented !!!! "
return tilenum
##################################################################################################
def tesselate(self, vorogenP, vorogenS, tile_prop, tess_method, tess_sc, tess_fuzzy, generators, gencode, shape):
""" Various ways of tesselating. If generators='calibrator', no need to tesselate, just get
modified list based on very nearby sources. If generators='field' then tesselate. The image
is tesselated based on tile_prop. """
wtfn={'unity' : lambda x : N.ones(len(x)), \
'log10' : N.log10, \
'sqrtlog10' : lambda x : N.sqrt(N.log10(x)), \
'roundness' : N.array}
tile_list, tile_coord, tile_snr = tile_prop
xt = self.trans_gaul(tile_coord)[0]
yt = self.trans_gaul(tile_coord)[1]
vorogenT = xt, yt, tile_snr
wt_fn = wtfn[tess_method]
wts = wt_fn(tile_snr)
if tess_method == 'roundness':
volrank = self.tess_roundness(vorogenT, tess_sc, tess_fuzzy, shape)
else:
volrank = self.tess_simple(vorogenT, wts, tess_sc, tess_fuzzy, shape)
return volrank, wts
##################################################################################################
def edit_tile(self, ltnum, g_gauls, flag_unresolved, snrcutstack, volrank, tile_prop, tess_sc, \
tess_fuzzy, wts, tess_method, plot):
""" Looks at tiles with no (or one) unresolved source inside it and deletes it and recomputes
the tiling. For now, does not recompute since we wont use the rank for those pixels anyway."""
if ltnum > 1: raise NotImplementedError, "NOT YET IMPLEMENTED FOR LTNUM>1"
tile_list, tile_coord, tile_snr = tile_prop
tr_gaul = self.trans_gaul(g_gauls)
tr=[n for i, n in enumerate(tr_gaul) if flag_unresolved[i] and n[1]/n[8] >= snrcutstack]
ntile = len(tile_list)
ngenpertile=N.zeros(ntile)
for itile in range(ntile):
tile_gauls = [n for n in tr if volrank[int(round(n[2])),int(round(n[3]))]-1 \
== itile]
ngenpertile[itile]=len(tile_gauls)
new_n = N.sum(ngenpertile >= ltnum)
# prepare list of good tiles to pass to pixintile
goodtiles = N.array(N.where(ngenpertile >= ltnum)[0])
new_n = len(goodtiles)
tile_coord_n = [n for i,n in enumerate(tile_coord) if i in goodtiles]
wts_n = [n for i,n in enumerate(wts) if i in goodtiles]
r2t = N.zeros(ntile, dtype=int)
entry = -1
for itile in range(ntile):
if ngenpertile[itile] >= ltnum:
r2t[itile] = itile
else:
pixel = tile_coord[itile]
tilenum = self.pixintile(self.trans_gaul(tile_coord_n), pixel, tess_method, wts_n, tess_sc, tess_fuzzy)
r2t[itile] = tilenum
for itile in range(new_n):
num = N.sum(r2t == itile)
if num == 0:
minarr = -999
while minarr != itile:
arr = N.where(r2t > itile)[0]
minarr = r2t[arr].min()-1
for i in arr: r2t[i]=r2t[i]-1
n_tile_list = []; n_tile_coord = []; n_tile_snr = []
for itile in range(new_n):
ind = N.where(r2t == itile)[0]; ind1 = []
for i in ind: ind1 = ind1 + tile_list[i]
n_tile_list.append(ind1)
snrs = N.array([tile_snr[i] for i in ind])
coords = N.array([tile_coord[i] for i in ind])
n_tile_snr.append(N.sum(snrs))
n_tile_coord.append(N.sum([snrs[i]*coords[i] for i in range(len(snrs))], 0)/N.sum(snrs))
ngenpertile=N.zeros(new_n)
for itile in range(new_n):
tile_gauls = [n for n in tr if r2t[volrank[int(round(n[2])),int(round(n[3]))]-1] \
== itile]
ngenpertile[itile]=len(tile_gauls)
tile_prop = n_tile_list, n_tile_coord, n_tile_snr
return ngenpertile, tile_prop, r2t
##################################################################################################
def stackpsf(self, image, beam, g_gauls, wts, cdelt, factor):
""" Stacks all the images of sources in the gaussian list gauls from image, out to
a factor times the beam size. Currently the mask is for the whole image but need to
modify it for masks for each gaussian. These gaussians are supposed to be relatively
isolated unresolved sources. Cut out an image a big bigger than facXbeam and imageshift
to nearest half pixel and then add.
Does not handle masks etc well at all. Masks for image for blanks, masks for \
islands, etc."""
gxcens_pix = g_gauls[2]
gycens_pix = g_gauls[3]
peak = g_gauls[1]
psfimsize = int(round(max(beam[0], beam[1])/max(cdelt[0], cdelt[1]) * factor)) # fac X fwhm; fac ~ 2
psfimage = N.zeros((psfimsize, psfimsize), dtype=N.float32)
cs2=cutoutsize2 = int(round(psfimsize*(1. + 2./factor)/2.)) # size/2. factor => to avoid edge effects etc
cc = cutoutcen_ind=[cs2, cs2]
cpsf=cen_psf_ind = N.array([int(round(psfimsize))/2]*2)
wt=0.
num=len(gxcens_pix)
for isrc in range(num): # MASK !!!!!!!!!!!
wt += wts[isrc]
gcp=N.array([gxcens_pix[isrc], gycens_pix[isrc]])
gcen_ind=gcp-1
rc=rcen_ind = N.asarray(N.round(gcen_ind), dtype=int)
shift=cc-(gcen_ind-(rc-cs2))
cutimage = image[rc[0]-cs2:rc[0]+cs2,rc[1]-cs2:rc[1]+cs2]
if len(cutimage.shape) == 3: cutimage=cutimage[:,:,0]
if 0 not in cutimage.shape:
if sum(sum(N.isnan(cutimage))) == 0:
im_shift = func.imageshift(cutimage, shift)
im_shift = im_shift/peak[isrc]*wts[isrc]
subim_shift = im_shift[cc[0]-cpsf[0]:cc[0]-cpsf[0]+psfimsize,cc[1]-cpsf[1]:cc[1]-cpsf[1]+psfimsize]
if subim_shift.shape == psfimage.shape:
# Check shapes, as they can differ if source is near edge of image.
# If they do differ, don't use that source (may be distorted).
psfimage += subim_shift
psfimage = psfimage/wt
return psfimage
##################################################################################################
def psf_in_tile(self, image, beam, g_gauls, cdelt, factor, snrcutstack, volrank, \
tile_prop, plot, img):
""" For each tile given by tile_prop, make a list of all gaussians in the constituent tesselations
and pass it to stackpsf with a weight for each gaussian, to calculate the average psf per tile.
Should define weights inside a tile to include closure errors """
mylog = mylogger.logging.getLogger("PyBDSM."+img.log+"Psf_Vary")
tile_list, tile_coord, tile_snr = tile_prop
tr_gaul = self.trans_gaul(g_gauls)
tr=[n for i, n in enumerate(tr_gaul)]# if n[1]/n[8] >= snrcutstack]
ntile = len(tile_list)
psfimages = []
psfcoords = []
psfratio = [] # ratio of peak flux to total flux
psfratio_aper = [] # ratio of peak flux to aperture flux
srcpertile = N.zeros(ntile)
snrpertile = N.zeros(ntile)
xt, yt = N.transpose(tile_coord)
if plot:
pl.figure(None)
colours=['b','g','r','c','m','y','k']*(len(xt)/7+1)
pl.axis([0.0, image.shape[0], 0.0, image.shape[1]])
pl.title('Tesselated image with tile centres and unresolved sources')
for i in range(ntile):
pl.plot([xt[i]], [yt[i]], 'D'+colours[i])
pl.text(xt[i], yt[i], str(i))
for itile in range(ntile):
tile_gauls = [n for n in tr if volrank[int(round(n[2])),int(round(n[3]))]-1 \
== itile]
t_gauls = self.trans_gaul(tile_gauls)
srcpertile[itile] = len(tile_gauls)
if plot:
pl.plot(t_gauls[2], t_gauls[3], 'x'+'k', mew=1.3)#colours[itile])
for i, ig in enumerate(t_gauls[2]):
xx=[xt[itile], ig]
yy=[yt[itile], t_gauls[3][i]]
pl.plot(xx,yy,'-'+colours[itile])
wts = N.asarray(t_gauls[1])/N.asarray(t_gauls[8]) # wt is SNR
snrpertile[itile] = sum(wts)
mylog.info('PSF tile #%i (center = %i, %i): %i unresolved sources, SNR = %.1f' %
(itile, xt[itile], yt[itile], srcpertile[itile], snrpertile[itile]))
a = self.stackpsf(image, beam, t_gauls, wts, cdelt, factor)
psfimages.append(a)
psfcoords.append([sum(N.asarray(t_gauls[2])*wts)/sum(wts), sum(N.asarray(t_gauls[3])*wts)/sum(wts)])
# Find peak/total flux ratio for sources in tile. If an aperture is given,
# use the aperture flux as well.
# t_gauls[0] is source_id
src_ratio = []
src_wts = []
src_ratio_aper = []
src_wts_aper = []
for gt in tile_gauls:
src = img.sources[gt[0]]
if img.aperture is not None:
src_ratio_aper.append(src.peak_flux_max / src.aperture_flux)
src_wts_aper.append(src.total_flux / src.aperture_fluxE)
src_ratio.append(src.peak_flux_max / src.total_flux)
src_wts.append(src.total_flux / src.total_fluxE)
if img.aperture is not None:
psfratio_aper.append(sum(N.asarray(src_ratio_aper)*src_wts_aper)/sum(src_wts_aper))
else:
psfratio_aper.append(0.0)
psfratio.append(sum(N.asarray(src_ratio)*src_wts)/sum(src_wts))
totpsfimage = psfimages[0]*snrpertile[0]
for itile in range(1,ntile):
totpsfimage += psfimages[itile]*snrpertile[itile]
totpsfimage = totpsfimage/sum(snrpertile)
if plot:
pl.imshow(N.transpose(volrank), origin='lower', interpolation='nearest'); pl.colorbar()
if plot:
pl.figure(None)
pl.clf()
ax = pl.subplot(1,1,1)
pax = ax.get_position()
start = N.array((pax.xmin, pax.ymin))
stop = N.array((pax.xmax, pax.ymax))
plaxis = pl.axis([0, image.shape[0], 0, image.shape[1]])
pl.title('Stacked psf for each tile')
for itile in range(ntile):
im=psfimages[itile]
sz=0.07
spt = int(round(snrpertile[itile]*10))/10.
titl='n='+str(int(round(srcpertile[itile])))+'; SNR='+str(spt)
posn=[psfcoords[itile][0], psfcoords[itile][1]]
normposn=N.array(stop-start, dtype=float)/N.array(image.shape[0:2])*posn+start
a=pl.axes([normposn[0]-sz/2., normposn[1]-sz/2., sz, sz])
pl.contour(im,15)
pl.title(titl, fontsize='small')
pl.setp(a, xticks=[], yticks=[])
pl.show()
return psfimages, psfcoords, totpsfimage, psfratio, psfratio_aper
##################################################################################################
def interp_shapcoefs(self, nmax, tr_psf_cf, psfcoords, imshape, compress, plot):
"""Interpolate using natgrid.
Check to see if variation is significant.
"""
x, y = N.transpose(psfcoords)
index = [(i,j) for i in range(nmax+1) for j in range(nmax+1-i)]
xi=x
yi=y
xo=N.arange(0.0,round(imshape[0]), round(compress))
yo=N.arange(0.0,round(imshape[1]), round(compress))
rgrid=nat.Natgrid(xi,yi,xo,yo)
p={}
for coord in index:
z = N.array(tr_psf_cf[coord]) # else natgrid cant deal with noncontiguous memory
p[coord] = rgrid.rgrd(z)
# if plot:
# for i,coord in enumerate(index):
# if i % 36 == 0:
# pl.figure(None)
# pl.clf()
# title = 'Interpolated shapelet coefficients'
# if i>0: title = title+' (cont)'
# pl.suptitle(title)
# pl.subplot(6,6,(i%36)+1)
# pl.title(str(coord))
# pl.plot(xi/compress, yi/compress, 'xk')
# pl.imshow(p[coord], interpolation='nearest')
# pl.colorbar()
return p, xo, yo
##################################################################################################
def interp_prop(self, prop, psfcoords, imshape, compress=1):
"""Interpolate using natgrid.
Should check to see if variation is significant.
"""
x, y = N.transpose(psfcoords)
xi=x
yi=y
xo=N.arange(0.0,round(imshape[0]), round(compress))
yo=N.arange(0.0,round(imshape[1]), round(compress))
rgrid=nat.Natgrid(xi,yi,xo,yo)
prop_int = rgrid.rgrd(prop)
return prop_int
##################################################################################################
def create_psf_grid(self, psf_coeff_interp, imshape, xgrid, ygrid, skip, nmax, psfshape, basis, beta,
cen, totpsfimage, plot):
""" Creates a image with the gridded interpolated psfs. xgrid and ygrid are 1d numpy arrays
with the x and y coordinates of the grids. """
# if plot:
# plnum=N.zeros(2)
# for i in range(2):
# dum=pl.figure(None)
# plnum[i]=dum.number
# pl.clf()
# if i == 0: pl.suptitle('Gridded psfs')
# if i == 1: pl.suptitle('Gridded residual psfs')
# ax = pl.subplot(1,1,1)
# plaxis = pl.axis([0, imshape[0], 0, imshape[1]])
# pax = ax.get_position()
# start = N.array((pax.xmin, pax.ymin))
# stop = N.array((pax.xmax, pax.ymax))
# sz=0.07
mask=N.zeros(psfshape, dtype=bool) # right now doesnt matter
xg=xgrid[::skip+1]
yg=ygrid[::skip+1]
index = [(i,j) for i in range(0,len(xgrid),skip+1) for j in range(0,len(ygrid),skip+1)]
xy = [(i,j) for i in xgrid[::skip+1] for j in ygrid[::skip+1]]
blah=[]
for i, coord in enumerate(index):
maxpsfshape = [0, 0]
for k in psf_coeff_interp:
if k[0]+1 > maxpsfshape[0]:
maxpsfshape[0] = k[0]+1
if k[1]+1 > maxpsfshape[1]:
maxpsfshape[1] = k[1]+1
cf = N.zeros(maxpsfshape)
for k in psf_coeff_interp:
cf[k]=psf_coeff_interp[k][coord]
cf = N.transpose(cf)
psfgridim = sh.reconstruct_shapelets(psfshape, mask, basis, beta, cen, nmax, cf)
blah.append(psfgridim)
# if plot:
# for j in range(2):
# pl.figure(plnum[j])
# posn = [xy[i][0], xy[i][1]]
# normposn =N.array(stop-start, dtype=float)/N.array(imshape[0:2])*posn+start
# a=pl.axes([normposn[0]-sz/2., normposn[1]-sz/2., sz, sz])
# if j == 0: pl.contour(psfgridim,15)
# if j == 1: pl.contour(psfgridim-totpsfimage,15)
# pl.setp(a, xticks=[], yticks=[])
# pl.colorbar()
# if plot:
# pl.figure(plnum[0])
# pl.figure(plnum[1])
#
return blah
##################################################################################################
def blur_image(self, im, n, ny=None) :
""" blurs the image by convolving with a gaussian kernel of typical
size n. The optional keyword argument ny allows for a different
size in the y direction.
"""
from scipy.ndimage import gaussian_filter
sx = n
if ny is not None:
sy = ny
else:
sy = n
improc = gaussian_filter(im, [sy, sx])
return improc
|
jjdmol/LOFAR
|
CEP/PyBDSM/src/python/psf_vary.py
|
Python
|
gpl-3.0
| 48,910
|
[
"Gaussian"
] |
4abe0d40eb88ff081f22b182bea7e249f2315a4762eaf67196556ce244b9ff84
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2007 Donald N. Allingham
# Copyright (C) 2007-2008 Brian G. Matherly
# Copyright (C) 2008 Jerome Rapinat
# Copyright (C) 2008 Benny Malengier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.get_translation().gettext
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from .._hasnotebase import HasNoteBase
#-------------------------------------------------------------------------
# "Events having notes"
#-------------------------------------------------------------------------
class HasNote(HasNoteBase):
"""Events having notes"""
name = _('Events having <count> notes')
description = _("Matches events having a certain number of notes")
|
Forage/Gramps
|
gramps/gen/filters/rules/event/_hasnote.py
|
Python
|
gpl-2.0
| 1,764
|
[
"Brian"
] |
377319d86fb6f725efc69658e3a4b795f7771b8960b38051b0070590b626082a
|
"""
==============================================
Visualizing the enegy-sensor-weather structure
==============================================
This example employs several unsupervised learning techniques to extract
the energy data structure from variations in Building Automation System (BAS)
and historial weather data.
The fundermental timelet for analysis are 15 min, referred to as Q.
** currently use H (Hour) as a fundermental timelet, need to change later **
Learning a graph structure
--------------------------
We use sparse inverse covariance estimation to find which quotes are
correlated conditionally on the others. Specifically, sparse inverse
covariance gives us a graph, that is a list of connection. For each
symbol, the symbols that it is connected too are those useful to explain
its fluctuations.
Clustering
----------
We use clustering to group together quotes that behave similarly. Here,
amongst the :ref:`various clustering techniques <clustering>` available
in the scikit-learn, we use :ref:`affinity_propagation` as it does
not enforce equal-size clusters, and it can choose automatically the
number of clusters from the data.
Note that this gives us a different indication than the graph, as the
graph reflects conditional relations between variables, while the
clustering reflects marginal properties: variables clustered together can
be considered as having a similar impact at the level of the full stock
market.
Embedding in 2D space
---------------------
For visualization purposes, we need to lay out the different symbols on a
2D canvas. For this we use :ref:`manifold` techniques to retrieve 2D
embedding.
Visualization
-------------
The output of the 3 models are combined in a 2D graph where nodes
represents the stocks and edges the:
- cluster labels are used to define the color of the nodes
- the sparse covariance model is used to display the strength of the edges
- the 2D embedding is used to position the nodes in the plan
This example has a fair amount of visualization-related code, as
visualization is crucial here to display the graph. One of the challenge
is to position the labels minimizing overlap. For this we use an
heuristic based on the direction of the nearest neighbor along each
axis.
"""
#print(__doc__)
# Author: Deokwooo Jung deokwoo.jung@gmail.compile
from __future__ import division # To forace float point division
import os
import sys
import numpy as np
import pylab as pl
from scipy import stats
import matplotlib.pyplot as plt
#from datetime import datetime
import datetime as dt
from dateutil import tz
import shlex, subprocess
import mytool as mt
import time
import retrieve_weather as rw
import itertools
import mpl_toolkits.mplot3d.axes3d as p3
import calendar
from sklearn import cluster, covariance, manifold # Machine Learning Packeage
###############################################################################
# Constant global variables
###############################################################################
# in seconds
MIN=60; HOUR=60*MIN; DAY=HOUR*24; MONTH=DAY*31
# Hour, Weekday, Day, Month
MIN_IDX=0;HR_IDX=1; WD_IDX=2; MD_IDX=3 ;MN_IDX=4
# Define the period for analysis - year, month, day,hour
# Note: The sample data in the currently downloaded files are from 1 Apr 2013 to
# 30 Nov 2013.
ANS_START_T=dt.datetime(2013,7,1,0)
ANS_END_T=dt.datetime(2013,7,5,0)
#ANS_END_T=dt.datetime(2013,8,30,0)
# Interval of timelet, currently set to 1 Hour
TIMELET_INV=dt.timedelta(hours=1)
# UTC time of weather data
from_zone = tz.gettz('UTC')
# VTT local time
to_zone = tz.gettz('Europe/Helsinki')
# Multi-dimensional lists of hash tables
time_slots=[]
start=ANS_START_T
while start < ANS_END_T:
#print start
time_slots.append(start)
start = start + TIMELET_INV
# Data dictionary
# All sensor and weather data is processed and structred into
# a consistent single data format -- Dictionary
data_dict={}
# This is the list of non-digit symbolic weather data
# The symbolic weather data is such as Conditions (e.g Cloudy or Clear)
# and Events (e.g. Rain or Fog ...)
# Those symblic data is replaced with integer state representation whose
# pairs are stored in a hash table using Dictionary.
# If no data is given, key value is set to 0.
Conditions_dict={};Conditions_val=[];key_val_c=0
Events_dict={};Events_val=[]; key_val_e=0
Is_CSV=bool(0)
Start_t=time.time()
argv_len=len(sys.argv)
print 'arg length:',argv_len
###############################################################################
# Function
###############################################################################
def daterange(start, stop, step=dt.timedelta(days=1), inclusive=False):
# inclusive=False to behave like range by default
if step.days > 0:
while start < stop:
yield start
start = start + step
# not +=! don't modify object passed in if it's mutable
# since this function is not restricted to
# only types from datetime module
elif step.days < 0:
while start > stop:
yield start
start = start + step
if inclusive and start == stop:
yield start
###############################################################################
# Retrive weather data from internet for the specified periods
# prefix_order=TS (default) [Time][Sensor]
# prefix_order=ST [Sensor][Time]
###############################################################################
"""
def get_weather(t_start, t_end, perfix_order='TS'):
print 'getting weater data '
print 'start time:', t_start, ' ~ end time:',t_end
data_days=[]
for date in daterange(t_start, t_end, inclusive=True):
#print date.strftime("%Y-%m-%d")
temp=date.strftime("%Y,%m,%d").rsplit(',')
data_day=rw.retrieve_data('VTT', int(temp[0]), int(temp[1]), int(temp[2]), view='d')
data_day=data_day.split('\n')
if perfix_order=='TS':
# order by [Sensor][Time]
# Paring the strings of daily weather data
day_sample_parse=[]
for hour_sample in data_day:
#print hour_sample
day_sample_parse.append(hour_sample.split(','))
data_days.append(day_sample_parse)
else:
# order by [Time][Sensor]
# Paring the strings of daily weather data
#f=open('weather_data.txt','w')
day_sample_parse=[]
for h_idx,hour_sample in enumerate(data_day):
#print hour_sample
if h_idx==0:
sensor_name_list=hour_sample.split(',')
# f.write(str(sensor_name_list)+'\n')
else:
hour_samples=hour_sample.split(',')
#print hour_samples
#f.write(str(hour_samples)+'\n')
for sample_idx,each_sample in enumerate(hour_samples):
sensor_name=sensor_name_list[sample_idx]
if sensor_name in data_dict:
data_dict[sensor_name].append(each_sample)
else:
data_dict.update({sensor_name:[each_sample]})
if perfix_order=='TS':
return data_days
else:
return sensor_name_list
#f.close()
"""
###############################################################################
# Plotting tool
###############################################################################
def plotting_data(plot_list,opt='val'):
# times is seconds, but it might not correct for months with 30 days.
#times_in_secs=(time_val[:,[HR_IDX,MD_IDX,MN_IDX]]*[HOUR,DAY,MONTH]).sum(axis=1)
# Minute,Hour, Weekday, Day, Month - total 5 time fields
time_mat=np.zeros([len(time_slots),5])
for i, time_sample in enumerate(time_slots):
time_mat[i,HR_IDX]=time_sample.hour
time_mat[i,WD_IDX]=time_sample.weekday()
time_mat[i,MD_IDX]=time_sample.day
time_mat[i,MN_IDX]=time_sample.month
monthDict={1:'Jan', 2:'Feb', 3:'Mar', 4:'Apr', 5:'May', 6:'Jun', 7:'Jul', 8:'Aug', 9:'Sep', 10:'Oct', 11:'Nov', 12:'Dec'}
weekDict={0:'Mon', 1:'Tue', 2:'Wed', 3:'Thur', 4:'Fri', 5:'Sat', 6:'Sun'}
# Month indicator
time_mn_diff=np.diff(time_mat[:,MN_IDX])
m_label_idx=time_mn_diff.nonzero()[0]
m_label_str=[]
for m_num in time_mat[m_label_idx,MN_IDX]:
m_label_str.append(monthDict[m_num])
time_wk_diff=np.diff(time_mat[:,WD_IDX])
w_label_idx=time_wk_diff.nonzero()[0]
w_label_str=[]
for w_num in time_mat[w_label_idx,WD_IDX]:
w_label_str.append(weekDict[int(w_num)])
for k,sensor in enumerate(plot_list):
#print k, sensor
num_samples=[]
mean_samples=[]
for i,(t,samples) in enumerate(zip(time_slots,data_dict[sensor])):
#print i,str(t),len(samples)
num_samples.append(len(samples))
# Mean value with masking
mean_samples.append(np.mean(samples))
#mean_samples.append(np.mean(np.ma.masked_invalid(samples))
#sensor_samples.append(num_samples)
plt.figure(1)
plt.subplot(len(plot_list),1,k+1)
plt.plot(time_slots,num_samples)
plt.title(sensor,fontsize=8)
plt.xticks(fontsize=8)
plt.yticks(fontsize=8)
plt.ylabel('# Samples/Hour',fontsize=8)
if k<len(plot_list)-1:
frame1 = plt.gca()
frame1.axes.get_xaxis().set_visible(False)
#frame1.axes.get_yaxis().set_visible(False)
plt.figure(2)
plt.subplot(len(plot_list),1,k+1)
plt.plot(time_slots,mean_samples)
plt.title(sensor,fontsize=8)
plt.xticks(fontsize=8)
plt.yticks(fontsize=8)
plt.ylabel('Avg Val/Hour',fontsize=8)
if k<len(plot_list)-1:
frame1 = plt.gca()
frame1.axes.get_xaxis().set_visible(False)
#frame1.axes.get_yaxis().set_visible(False)
#plt.xticks(w_label_idx.tolist(),w_label_str,fontsize=8)
#plt.text(m_label_idx, np.max(num_samples)*0.8, m_label_str, fontsize=12)
print ' End of Plotting'
return time_mat
###############################################################################
# Parsing sensor data
###############################################################################
def get_val(filename):
if Is_CSV==True:
openfile=open(filename,"r")
sensor_val=[]
time_val=[];
for line in openfile:
tmp=line.rstrip().rsplit(",")
sensor_val.append(float(tmp[1]))
temp=dt.datetime.strptime(tmp[0],"%Y-%m-%d %H:%M:%S")
temp=temp.timetuple()
# Hour, Weekday, Day, Month
time_val.append([temp[3],temp[6],temp[2],temp[1]])
openfile.close()
#print 'list of input csv files: '
else:
data = mt.loadObjectBinary(filename)
sensor_val = data["value"]
time_val = data["ts"]
#print 'list of input bin files: '
return sensor_val,time_val
def get_val_timelet(filename,t_slots):
print ' get_val_timelet'
if Is_CSV==True:
openfile=open(filename,"r")
sensor_val=[]
time_val=[];
for line in openfile:
tmp=line.rstrip().rsplit(",")
sensor_val.append(float(tmp[1]))
temp=dt.datetime.strptime(tmp[0],"%Y-%m-%d %H:%M:%S")
temp=temp.timetuple()
# Hour, Weekday, Day, Month
time_val.append([temp[3],temp[6],temp[2],temp[1]])
openfile.close()
#print 'list of input csv files: '
else:
data = mt.loadObjectBinary(filename)
sensor_val = data["value"]
time_val = data["ts"]
# Creat the list of lists
sensor_read=[[] for i in range(len(t_slots))]
for t_sample, v_sample in zip(time_val,sensor_val):
#import pdb; pdb.set_trace()
# If data in 2013 is only available after Aprile, Otherwise it is 2014 data
if t_sample[MN_IDX]>3:
temp_dt=dt.datetime(2013,t_sample[MN_IDX],t_sample[MD_IDX],t_sample[HR_IDX])
else:
temp_dt=dt.datetime(2014,t_sample[MN_IDX],t_sample[MD_IDX],t_sample[HR_IDX])
#print temp_dt
try:
idx=t_slots.index(temp_dt)
sensor_read[idx].append(v_sample)
except ValueError:
idx=-1
return sensor_read, time_val
###############################################################################
# Parsing sensor data
# Data samples are regularized for specified times with timelet
###############################################################################
def symbol_to_state(symbol_list):
#list(itertools.chain(*list_of_lists))
symbol_dict={};symbol_val=[];key_val=1
print 'start'
for i,key_set in enumerate(symbol_list):
symbol_val_let=[]
for key in key_set:
if key not in symbol_dict:
if len(key)==0:
symbol_dict.update({key:0})
symbol_val_let.append(0)
else:
symbol_dict.update({key:key_val})
symbol_val_let.append(key_val)
key_val=key_val+1
else:
symbol_val_let.append(symbol_dict[key])
symbol_val.append(symbol_val_let)
return symbol_val,symbol_dict
def get_weather(t_start, t_end, perfix_order='TS'):
print 'getting weater data new '
print 'start time:', t_start, ' ~ end time:',t_end
data_days=[]
# Date iteration given start time and end-time
for date in daterange(t_start, t_end, inclusive=True):
print date.strftime("%Y-%m-%d")
temp=date.strftime("%Y,%m,%d").rsplit(',')
data_day=rw.retrieve_data('VTT', int(temp[0]), int(temp[1]), int(temp[2]), view='d')
data_day=data_day.split('\n')
if perfix_order=='TS':
# order by [Sensor][Time]
# Paring the strings of daily weather data
day_sample_parse=[]
for hour_sample in data_day:
#print hour_sample
day_sample_parse.append(hour_sample.split(','))
data_days.append(day_sample_parse)
else:
# order by [Time][Sensor]
# Paring the strings of daily weather data
#f=open('weather_data.txt','w')
day_sample_parse=[]
for h_idx,hour_sample in enumerate(data_day):
#print hour_sample
if h_idx==0:
sensor_name_list=hour_sample.split(',')
# f.write(str(sensor_name_list)+'\n')
else:
hour_samples=hour_sample.split(',')
#print hour_samples
#f.write(str(hour_samples)+'\n')
for sample_idx,each_sample in enumerate(hour_samples):
sensor_name=sensor_name_list[sample_idx]
if sensor_name in data_dict:
data_dict[sensor_name].append(each_sample)
else:
data_dict.update({sensor_name:[each_sample]})
if perfix_order=='TS':
return data_days
else:
return sensor_name_list
def get_weather_timelet(t_slots):
print 'getting weater data new '
t_start=t_slots[0]
t_end=t_slots[-1]
print 'start time:', t_start, ' ~ end time:',t_end
# Date iteration given start time and end-time
# Iterate for each day for all weather data types
for date_idx,date in enumerate(daterange(t_start, t_end, inclusive=True)):
print date.strftime("%Y-%m-%d")
temp=date.strftime("%Y,%m,%d").rsplit(',')
data_day=rw.retrieve_data('VTT', int(temp[0]), int(temp[1]), int(temp[2]), view='d')
# split the data into t
data_day=data_day.split('\n')
# Iterate for each time index(h_idx) of a day for all weather data types
for h_idx,hour_sample in enumerate(data_day):
hour_samples=hour_sample.split(',')
# Initialize weather data lists of dictionary
# The first row is always the list of weather data types
if (h_idx==0) and (date_idx==0):
sensor_name_list=hour_sample.split(',')
for sample_idx,each_sample in enumerate(hour_samples):
sensor_name=sensor_name_list[sample_idx]
sensor_read=[[] for i in range(len(t_slots))]
data_dict.update({sensor_name:sensor_read})
elif h_idx>0:
# 'DateUTC' is the one
sample_DateUTC=hour_samples[sensor_name_list.index('DateUTC')]
# convert to UTC time to VTT local time.
utc_dt=dt.datetime.strptime(sample_DateUTC, "%Y-%m-%d %H:%M:%S")
vtt_dt_aware = utc_dt.replace(tzinfo=from_zone).astimezone(to_zone)
# convert to offset-naive from offset-aware datetimes
vtt_dt=dt.datetime(*(vtt_dt_aware.timetuple()[:4]))
# time slot index a given weather sample time
try:
vtt_dt_idx=t_slots.index(vtt_dt)
for sample_idx,each_sample in enumerate(hour_samples):
# convert string type to float time if possible
try:
each_sample=float(each_sample)
except ValueError:
each_sample=each_sample
sensor_name=sensor_name_list[sample_idx]
#import pdb; pdb.set_trace()
if sensor_name in data_dict:
if each_sample!='N/A' and each_sample!=[]:
data_dict[sensor_name][vtt_dt_idx].append(each_sample)
else:
raise NameError('Inconsistency in the list of weather data')
except ValueError:
vtt_dt_idx=-1
else:
# hour_sample is list of weather filed name, discard
hour_sample=[]
return sensor_name_list
def data_dict_purge(purge_list):
for key in purge_list:
print 'purge', key
if key in data_dict.keys():
data_dict.pop(key,None)
#data_dict_purge(weather_list)
###############################################################################
# Reading sensor data from CSV or BIN files - use linux commands
###############################################################################
input_csvs=[]
num_csvs=[]
if argv_len==1:
if Is_CSV==True:
temp = subprocess.check_output("ls *.csv |grep _ACTIVE_POWER_", shell=True)
else:
temp = subprocess.check_output("ls *.bin |grep _ACTIVE_POWER_", shell=True)
input_csvs =shlex.split(temp)
plt.ion()
print 'argv 1'
elif argv_len>1:
input_csvs=sys.argv[1:]
print 'getting args'
else:
input_csvs=[]
print '...'
num_csvs=len(input_csvs)
num_col_subplot=np.ceil(np.sqrt(num_csvs))
###############################################################################
# Analysis script starts here ....
# List of sensors from BMS
print 'mapping sensor list into hasing table using dictionary'
sensor_list=input_csvs
# List of sensors from Weather data
# getting weather files
# Weather parameter list
#['TimeEEST', 'TemperatureC', 'Dew PointC', 'Humidity',
# 'Sea Level PressurehPa', 'VisibilityKm', 'Wind Direction',
# 'Wind SpeedKm/h', 'Gust SpeedKm/h', 'Precipitationmm',
# 'Events', 'Conditions', 'WindDirDegrees', 'DateUTC']
# Note: We select 'TemperatureC', 'Dew PointC', 'Humidity',
# 'Events', 'Conditions' for the main weather parameter
#weather_list=get_weather(ANS_START_T, ANS_END_T,'ST')
# Checking length of weather sample data
print "lenth of dictionary"
for key in data_dict.keys():
print 'len of ', key, len(data_dict[key])
# data dictionary that map all types of sensor readings into a single hash table
###############################################################################
# Read out all sensor files in the file list
time_set_temp=[]
for i,argv in enumerate(sensor_list):
print 'index ',i+1,': ', argv
# sensor value is read by time
start__dictproc_t=time.time()
dict_sensor_val, dict_time_val=get_val_timelet(argv,time_slots)
data_dict.update({argv:dict_sensor_val})
end__dictproc_t=time.time()
print argv,'- dict.proc time is ', end__dictproc_t-start__dictproc_t
print 'Check sample density over time slots'
time_mat=plotting_data(sensor_list[0:2])
"""
weather_list -that is pretty much fixed from database
(*) is the data to be used for our analysis
0 TimeEEST
1 TemperatureC (*)
2 Dew PointC (*)
3 Humidity (*)
4 Sea Level PressurehPa
5 VisibilityKm
6 Wind Direction
7 Wind SpeedKm/h
8 Gust SpeedKm/h
9 Precipitationmm
10 Events (*)
11 Conditions (*)
12 WindDirDegrees
13 DateUTC
"""
weather_list=get_weather_timelet(time_slots)
# Convert symbols to Integer representaion
data_dict['Conditions'],Conditions_dict=symbol_to_state(data_dict['Conditions'])
data_dict['Events'],Events_dict=symbol_to_state(data_dict['Events'])
# Weather data to be used
weather_list_used = [weather_list[i] for i in [1,2,3,10,11]]
# All (sensor + weather) data to be used
data_used=weather_list_used + sensor_list
def verify_data_format(key_list):
# Verify there is no [] or N/A in the list
print 'Checking any inconsisent data format.....'
print '---------------------------------'
list_of_wrong_data_format=[]
for key in key_list:
print 'checking ', key, '...'
for i,samples in enumerate(data_dict[key]):
for j,each_sample in enumerate(samples):
if each_sample==[]:
list_of_wrong_data_format.append([key,i,j])
print each_sample, 'at', time_slots[j], 'in', key
elif (isinstance(each_sample,int)==False and isinstance(each_sample,float)==False):
list_of_wrong_data_format.append([key,i,j])
print each_sample, 'at', time_slots[j], 'in', key
print '---------------------------------'
if len(list_of_wrong_data_format)==0:
print ' no inconsistent data format'
return list_of_wrong_data_format
# Verify there is no [] or N/A in the list
list_of_wrong_data_format=verify_data_format(data_used)
if len(list_of_wrong_data_format)!=0:
raise NameError('Inconsistent data format in the list of data_used')
# Weighted averge to impute missing value
# Imputing missing data -using weighted mean value
hr_set=time_mat[:,HR_IDX].astype(int)
wd_set=time_mat[:,WD_IDX].astype(int)
day_set=time_mat[:,MD_IDX].astype(int)
mn_set=time_mat[:,MN_IDX].astype(int)
cumnum_days_mn=np.r_[0,np.array([calendar.monthrange(2013, i)[1] for i in np.r_[1:12]]).cumsum()]
daycount_set=[ int(day+cumnum_days_mn[mn-1]) for i,(day,mn) in enumerate(zip(day_set,mn_set))]
# X.shape (1258, 7)
# type(X) <type 'numpy.ndarray'>
# type(X) <type 'numpy.ndarray'>
num_of_data=len(data_used)
num_of_samples=len(time_slots)
X=np.zeros([num_of_samples,num_of_data])
INT_type_cols=[]
FLOAT_type_cols=[]
for j,key in enumerate(data_used):
for i,sample in enumerate(data_dict[key]):
if len(sample)==0:
X[i,j]=np.infty
elif isinstance(sample[0],int):
X[i,j]=int(stats.mode(sample)[0])
if i==0: INT_type_cols.append(j)
elif isinstance(sample[0],float):
X[i,j]=np.mean(sample)
if i==0: FLOAT_type_cols.append(j)
else:
raise NameError('Sample type must either INT or FLOAT type')
# If no data availalbe, then imputes the data by weighted mean
print 'Before imputation'
for i,key in enumerate(data_used):
print key
print [k for k in np.nonzero(X[:,i]==np.infty)[0]]
# If no data availalbe, then imputes the data by weighted mean
for i,key in enumerate(data_used):
for inf_idx in np.nonzero(X[:,i]==np.infty)[0]:
whgt_bottom_sum=0;whgt_top_sum=0
for h_idx in np.nonzero(hr_set==hr_set[inf_idx])[0]:
#import pdb; pdb.set_trace()
sample_temp=X[h_idx,i]
if (sample_temp<np.infty and h_idx!=inf_idx):
wght=1/np.abs(daycount_set[h_idx]-daycount_set[inf_idx])
whgt_bottom_sum=whgt_bottom_sum+wght
whgt_top_sum=whgt_top_sum+wght*sample_temp
new_sample=whgt_top_sum/whgt_bottom_sum
X[inf_idx,i]=new_sample
# If no data availalbe, then imputes the data by weighted mean
print 'After imputation'
for i,key in enumerate(data_used):
print key
print [k for k in np.nonzero(X[:,i]==np.infty)[0]]
# If no data availalbe, then imputes the data by weighted mean
X_INT=X[:,INT_type_cols]
X_FLOAT=X[:,FLOAT_type_cols]
###############################################################################
# Learn a graphical structure from the correlations
edge_model = covariance.GraphLassoCV()
# standardize the time series: using correlations rather than covariance
# is more efficient for structure recovery
edge_model.fit(X_FLOAT)
# Using mode if interger type, using mean if real type
"""
vak1_power_sys_sum=[]
vak1_power_p1_sum=[]
vak1_power_p2_sum=[]
vak1_power_p3_sum=[]
for i,(psys,p1,p2,p3) in enumerate(zip(vak1_power_sys,vak1_power_p1,vak1_power_p2,vak1_power_p3)):
vak1_power_sys_sum.append(sum(psys))
vak1_power_p1_sum.append(sum(p1))
vak1_power_p2_sum.append(sum(p2))
vak1_power_p3_sum.append(sum(p3))
plt.subplot(2,1,1)
plt.plot(vak1_power_sys_sum)
plt.plot(np.array(vak1_power_p1_sum)+np.array(vak1_power_p2_sum)+np.array(vak1_power_p3_sum),'-s')
plt.subplot(2,1,2)
plt.plot(vak1_power_p1_sum,'-*')
plt.plot(vak1_power_p2_sum,'-s')
plt.plot(vak1_power_p3_sum,'-o')
"""
# Using the following weather data for variables
#
# Regularized the weather data into a single time referece
# For symbolic data, use mode, and for real number data, use average
# Gaussian Process (GP) model and interploation for power consumption data
#Conditions_dict,Events_dict
"""
3D plotting
fig=pl.figure()
ax = p3.Axes3D(fig)
ax.scatter(gw2_power_p1_sum.T, gw2_power_p2_sum, gw2_power_p3_sum, c=colors)
ax.set_xlabel('P1')
ax.set_ylabel('P2')
ax.set_zlabel('P3')
fig.add_axes(ax)
"""
if argv_len>1:
print 'end of program'
plt.show()
|
TinyOS-Camp/DDEA-DEV
|
Development/plot_csv.py
|
Python
|
gpl-2.0
| 27,192
|
[
"Gaussian"
] |
26728dcf6988faf4196c34ef18d3eb40375a6efc93ec9d3495115c34ddba919a
|
"""
"""
from abc import ABCMeta, abstractmethod
PULSAR_UNKNOWN_RETURN_CODE = '__unknown__'
class ManagerInterface(object):
"""
Defines the interface to various job managers.
"""
__metaclass__ = ABCMeta
@abstractmethod
def setup_job(self, input_job_id, tool_id, tool_version):
"""
Setup a job directory for specified input (galaxy) job id, tool id,
and tool version.
"""
@abstractmethod
def clean(self, job_id):
"""
Delete job directory and clean up resources associated with job with
id `job_id`.
"""
@abstractmethod
def launch(self, job_id, command_line, submit_params={}, dependencies_description=None, env=[]):
"""
Called to indicate that the client is ready for this job with specified
job id and command line to be executed (i.e. run or queue this job
depending on implementation).
"""
@abstractmethod
def get_status(self, job_id):
"""
Return status of job as string, currently supported statuses include
'cancelled', 'running', 'queued', and 'complete'.
"""
@abstractmethod
def return_code(self, job_id):
"""
Return integer indicating return code of specified execution or
PULSAR_UNKNOWN_RETURN_CODE.
"""
@abstractmethod
def stdout_contents(self, job_id):
"""
After completion, return contents of stdout associated with specified
job.
"""
@abstractmethod
def stderr_contents(self, job_id):
"""
After completion, return contents of stderr associated with specified
job.
"""
@abstractmethod
def kill(self, job_id):
"""
End or cancel execution of the specified job.
"""
@abstractmethod
def job_directory(self, job_id):
""" Return a JobDirectory abstraction describing the state of the
job working directory.
"""
class ManagerProxy(object):
"""
Subclass to build override proxy a manager and override specific
functionality.
"""
def __init__(self, manager):
self._proxied_manager = manager
def setup_job(self, *args, **kwargs):
return self._proxied_manager.setup_job(*args, **kwargs)
def clean(self, *args, **kwargs):
return self._proxied_manager.clean(*args, **kwargs)
def launch(self, *args, **kwargs):
return self._proxied_manager.launch(*args, **kwargs)
def get_status(self, *args, **kwargs):
return self._proxied_manager.get_status(*args, **kwargs)
def return_code(self, *args, **kwargs):
return self._proxied_manager.return_code(*args, **kwargs)
def stdout_contents(self, *args, **kwargs):
return self._proxied_manager.stdout_contents(*args, **kwargs)
def stderr_contents(self, *args, **kwargs):
return self._proxied_manager.stderr_contents(*args, **kwargs)
def kill(self, *args, **kwargs):
return self._proxied_manager.kill(*args, **kwargs)
def shutdown(self, timeout=None):
""" Optional. """
try:
shutdown_method = self._proxied_manager.shutdown
except AttributeError:
return
shutdown_method(timeout)
def job_directory(self, *args, **kwargs):
return self._proxied_manager.job_directory(*args, **kwargs)
def system_properties(self):
return self._proxied_manager.system_properties()
|
jmchilton/pulsar
|
pulsar/managers/__init__.py
|
Python
|
apache-2.0
| 3,493
|
[
"Galaxy"
] |
50cbe84f70f54fa08a8b16655b47d894ca9c09f39a8d1e4efffc4043882f2f70
|
# This python script contains functions and classes used in the
# two synthetic galaxy model notebooks in this directory.
import astropy.io.ascii as asciitable
from scipy import interpolate
import numpy as np
def LoadData(galaxy_datafile, HIrad, ddensdR):
# Read the galaxy description file which contains rotation velocity and
# density information in a comma-delimited file where each row contains
# radius (in kpc), a rotational velocity (in km/s), and a density
# (in atoms/cm^3).
#
raw_data = asciitable.read(galaxy_datafile)
# Restructure the data into a 3xN array containing radius, rot. vel.,
# and gas density in "columns"
galaxy_data = np.hstack((raw_data['radius'].reshape(-1, 1),
raw_data['rot_vel'].reshape(-1, 1),
raw_data['density'].reshape(-1, 1)))
#
# Interpolate any extension to the rotation curve as flat,
# but with dropping density out to HI radius limit.
#
extrapol_step = (25 - galaxy_data[-1][0])/4
while (galaxy_data[-1][0] < HIrad):
new_rad = galaxy_data[-1][0]+extrapol_step
new_vel = galaxy_data[-1][1]
if (galaxy_data[-1][2] > ddensdR*extrapol_step):
new_dens = galaxy_data[-1][2] - ddensdR*extrapol_step
else:
new_dens = 0.0
new_row = np.array([new_rad, new_vel, new_dens])
galaxy_data = np.vstack((galaxy_data, new_row))
# Save raw values
rad_raw = np.copy(galaxy_data[:, 0])
rotvel_raw = np.copy(galaxy_data[:, 1])
density_raw = np.copy(galaxy_data[:, 2])
return (rad_raw, rotvel_raw, density_raw)
def spline_curves(rad, vel, dens, dr):
# Do a modified spline fit to smooth the rotation curve and gas density
# data to smooth out data gaps and make sure density and rotational
# velocity don't go negative
# Find the spline representation of rotation curve (default is cubic
# spline)
rotvel_fit = interpolate.splrep(rad, vel, s=0)
density_fit = interpolate.splrep(rad, dens, s=0)
# Fit spline along evenly spaced points (in radius) and restrict rotational
# velocity and density to be positive (since spline fit is bit wiggly here
# and caused very small scale 'negative' values at origin for velocity and
# at high radii for density).
rad_sp = np.linspace(0, rad[-1], int(rad[-1]/dr))
rotvel_sp = np.absolute(interpolate.splev(rad_sp, rotvel_fit, der=0).round(1))
density_sp = np.absolute(interpolate.splev(rad_sp, density_fit, der=0).round(3))
return(rad_sp, rotvel_sp, density_sp)
def RotCoord(x, y):
# Converts (x, y) to (r,theta)
# Can work on entire arrays of x and y
return (np.sqrt(x*x+y*y), np.arctan2(y, x))
class nf(float):
# This class allows floating point numbers to be printed as integers.
# Based on
# http://matplotlib.sourceforge.net/examples/pylab_examples/contour_label_demo.html
def __repr__(self):
str = '%.1f' % (self.__float__(),)
if str[-1] == '0':
return '%.0f' % self.__float__()
else:
return '%.1f' % self.__float__()
|
JuanCab/synthetic_HI_models
|
galaxyparam.py
|
Python
|
gpl-3.0
| 3,144
|
[
"Galaxy"
] |
8fa8acec8aa55b652378d520eb8c2c09c0fddeab760f63e541cbcaf98d3c80bd
|
import os
import sys
import json
from datamanagerpkg import ProtonCommunication_data_manager
from datamanagerpkg import GalaxyCommunication_data_manager
from sequencer.models import Experiments, GalaxyUsers
from sequencer.models import GalaxyJobs, ExperimentRawData
##########################
#URL SEQUENCER
##########################
from GlobalVariables import sequencer_base_url
from GlobalVariables import sequencer_user
from GlobalVariables import sequencer_password
from GlobalVariables import sequencer_severName
from GlobalVariables import sequencer_ExperimentLimit
from GlobalVariables import toolsInformation
##########################
#URL GALAXY
##########################
from GlobalVariables import galaxy_base_url
from GlobalVariables import apiKey
##########################
#NAs DIr folder
##########################
from GlobalVariables import nasInput
from GlobalVariables import CNVfolderName
from GlobalVariables import plasmaFolderName
from GlobalVariables import nasResults
from GlobalVariables import workflowPath
##########################
#SMTP folder
##########################
from GlobalVariables import smtpServerAphp
from GlobalVariables import smtpPortServer
from GlobalVariables import fromAddrOfficial
from sequencer.views import getDataPath
from datamanagerpkg import ProtonCommunication_data_manager
from datamanagerpkg import GalaxyCommunication_data_manager
from pprint import pprint
#open a workflow
with open('/nas_Dir/workflow/Galaxy-Workflow-Plasma_mutation.ga') as data_file:
data = json.load(data_file)
pprint(data)
#now I have the key in order
stepkey=data['steps'].keys()
stepkey = [int(x) for x in stepkey]
stepkey.sort()
#create a workflow object
#~ u'annotation': u'plasma workflow to generates all the data',
#~ u'name': u'Plasma_mutation',
from sequencer.models import Workflows,WorkflowsTools,Supportedfiles
tryexp = None
try:
tryexp = Workflows.objects.get(name=str(data['name']))
except Workflows.DoesNotExist:
tryexp = None
if (tryexp == None):
workflow_local=Workflows(name=str(data['name']),description=str(data['name']))
workflow_local.save()
workflow_local = Workflows.objects.get(name=str(data['name']))
for step in stepkey:
if data['steps'][str(step)]['tool_id']!=None:
#create a tool
print("grou1")
try:
tryexp = WorkflowsTools.objects.get(primary_name=str(data['steps'][str(step)]['tool_id']+"_"+data['steps'][str(step)]['tool_version']+".json"))
except WorkflowsTools.DoesNotExist:
tryexp = None
#~ if tryexp == None:
print("grou2")
newtool=WorkflowsTools(primary_name=str(data['steps'][str(step)]['tool_id']+"_"+data['steps'][str(step)]['tool_version']+".json"),
name=str(data['steps'][str(step)]['tool_id']),
version=str(data['steps'][str(step)]['tool_version']))
newtool.save()
print("grou3")
workflow_local.tools_list.add(newtool)
print("grou4")
workflow_local.save()
print( data['steps'][str(step)]['tool_id']+"_"+data['steps'][str(step)]['tool_version']+".json")
#create a tool
with open(toolsInformation+data['steps'][str(step)]['tool_id']+"_"+data['steps'][str(step)]['tool_version']+".json") as data_file_tool:
tool = json.load(data_file_tool)
#~ print(tool['function'][0])
print("#######################input")
#~ print(tool['function'][0]['input'])
for dataInput in tool['function'][0]['input'] :
try:
tryexp = Supportedfiles.objects.get(dataDescription=str(dataInput['dataDescription']))
except Supportedfiles.DoesNotExist:
tryexp = None
newfile=Supportedfiles(dataHandle=str(dataInput['dataHandle']),
dataDescription=str(dataInput['dataDescription']),
dataFormatEdamOntology=str(dataInput['dataFormat'][0]['uri']))
newfile.save()
newtool.inputlist.add(newfile)
newtool.save()
#~ print("#######################dataInpty")
print("#######################output")
for dataInput in tool['function'][0]['input'] :
try:
tryexp = Supportedfiles.objects.get(dataDescription=str(dataInput['dataDescription']))
except Supportedfiles.DoesNotExist:
tryexp = None
#~ if tryexp == None:
newfile=Supportedfiles(dataHandle=str(dataInput['dataHandle']),
dataDescription=str(dataInput['dataDescription']),
dataFormatEdamOntology=str(dataInput['dataFormat'][0]['uri']) )
newfile.save()
newtool.outputlist.add(newfile)
newtool.save()
|
CARPEM/GalaxyDocker
|
data-manager-hegp/analysisManager/analysismanager/addPlasmaMutation_Information.py
|
Python
|
mit
| 4,872
|
[
"Galaxy"
] |
6a3f07bcc7b2b508e7311eb63c95a313e4ba71175535b37c8d9ba23c32803da3
|
"""
A simple VTK widget for PyQt v4, the Qt v4 bindings for Python.
See http://www.trolltech.com for Qt documentation, and
http://www.riverbankcomputing.co.uk for PyQt.
This class is based on the vtkGenericRenderWindowInteractor and is
therefore fairly powerful. It should also play nicely with the
vtk3DWidget code.
Created by Prabhu Ramachandran, May 2002
Based on David Gobbi's QVTKRenderWidget.py
Changes by Gerard Vermeulen Feb. 2003
Win32 support.
Changes by Gerard Vermeulen, May 2003
Bug fixes and better integration with the Qt framework.
Changes by Phil Thompson, Nov. 2006
Ported to PyQt v4.
Added support for wheel events.
Changes by Phil Thompson, Oct. 2007
Bug fixes.
Changes by Phil Thompson, Mar. 2008
Added cursor support.
Changes by Rodrigo Mologni, Sep. 2013 (Credit to Daniele Esposti)
Bug fix to PySide: Converts PyCObject to void pointer.
"""
try:
from PyQt4 import QtCore, QtGui
except ImportError:
try:
from PySide import QtCore, QtGui
except ImportError:
raise ImportError("Cannot load either PyQt or PySide")
import vtk
class QVTKRenderWindowInteractor(QtGui.QWidget):
""" A QVTKRenderWindowInteractor for Python and Qt. Uses a
vtkGenericRenderWindowInteractor to handle the interactions. Use
GetRenderWindow() to get the vtkRenderWindow. Create with the
keyword stereo=1 in order to generate a stereo-capable window.
The user interface is summarized in vtkInteractorStyle.h:
- Keypress j / Keypress t: toggle between joystick (position
sensitive) and trackball (motion sensitive) styles. In joystick
style, motion occurs continuously as long as a mouse button is
pressed. In trackball style, motion occurs when the mouse button
is pressed and the mouse pointer moves.
- Keypress c / Keypress o: toggle between camera and object
(actor) modes. In camera mode, mouse events affect the camera
position and focal point. In object mode, mouse events affect
the actor that is under the mouse pointer.
- Button 1: rotate the camera around its focal point (if camera
mode) or rotate the actor around its origin (if actor mode). The
rotation is in the direction defined from the center of the
renderer's viewport towards the mouse position. In joystick mode,
the magnitude of the rotation is determined by the distance the
mouse is from the center of the render window.
- Button 2: pan the camera (if camera mode) or translate the actor
(if object mode). In joystick mode, the direction of pan or
translation is from the center of the viewport towards the mouse
position. In trackball mode, the direction of motion is the
direction the mouse moves. (Note: with 2-button mice, pan is
defined as <Shift>-Button 1.)
- Button 3: zoom the camera (if camera mode) or scale the actor
(if object mode). Zoom in/increase scale if the mouse position is
in the top half of the viewport; zoom out/decrease scale if the
mouse position is in the bottom half. In joystick mode, the amount
of zoom is controlled by the distance of the mouse pointer from
the horizontal centerline of the window.
- Keypress 3: toggle the render window into and out of stereo
mode. By default, red-blue stereo pairs are created. Some systems
support Crystal Eyes LCD stereo glasses; you have to invoke
SetStereoTypeToCrystalEyes() on the rendering window. Note: to
use stereo you also need to pass a stereo=1 keyword argument to
the constructor.
- Keypress e: exit the application.
- Keypress f: fly to the picked point
- Keypress p: perform a pick operation. The render window interactor
has an internal instance of vtkCellPicker that it uses to pick.
- Keypress r: reset the camera view along the current view
direction. Centers the actors and moves the camera so that all actors
are visible.
- Keypress s: modify the representation of all actors so that they
are surfaces.
- Keypress u: invoke the user-defined function. Typically, this
keypress will bring up an interactor that you can type commands in.
- Keypress w: modify the representation of all actors so that they
are wireframe.
"""
# Map between VTK and Qt cursors.
_CURSOR_MAP = {
0: QtCore.Qt.ArrowCursor, # VTK_CURSOR_DEFAULT
1: QtCore.Qt.ArrowCursor, # VTK_CURSOR_ARROW
2: QtCore.Qt.SizeBDiagCursor, # VTK_CURSOR_SIZENE
3: QtCore.Qt.SizeFDiagCursor, # VTK_CURSOR_SIZENWSE
4: QtCore.Qt.SizeBDiagCursor, # VTK_CURSOR_SIZESW
5: QtCore.Qt.SizeFDiagCursor, # VTK_CURSOR_SIZESE
6: QtCore.Qt.SizeVerCursor, # VTK_CURSOR_SIZENS
7: QtCore.Qt.SizeHorCursor, # VTK_CURSOR_SIZEWE
8: QtCore.Qt.SizeAllCursor, # VTK_CURSOR_SIZEALL
9: QtCore.Qt.PointingHandCursor, # VTK_CURSOR_HAND
10: QtCore.Qt.CrossCursor, # VTK_CURSOR_CROSSHAIR
}
def __init__(self, parent=None, wflags=QtCore.Qt.WindowFlags(), **kw):
# the current button
self._ActiveButton = QtCore.Qt.NoButton
# private attributes
self.__saveX = 0
self.__saveY = 0
self.__saveModifiers = QtCore.Qt.NoModifier
self.__saveButtons = QtCore.Qt.NoButton
# do special handling of some keywords:
# stereo, rw
stereo = 0
if kw.has_key('stereo'):
if kw['stereo']:
stereo = 1
rw = None
if kw.has_key('rw'):
rw = kw['rw']
# create qt-level widget
QtGui.QWidget.__init__(self, parent, wflags|QtCore.Qt.MSWindowsOwnDC)
if rw: # user-supplied render window
self._RenderWindow = rw
else:
self._RenderWindow = vtk.vtkRenderWindow()
WId = self.winId()
if type(WId).__name__ == 'PyCObject':
from ctypes import pythonapi, c_void_p, py_object
pythonapi.PyCObject_AsVoidPtr.restype = c_void_p
pythonapi.PyCObject_AsVoidPtr.argtypes = [py_object]
WId = pythonapi.PyCObject_AsVoidPtr(WId)
self._RenderWindow.SetWindowInfo(str(int(WId)))
if stereo: # stereo mode
self._RenderWindow.StereoCapableWindowOn()
self._RenderWindow.SetStereoTypeToCrystalEyes()
if kw.has_key('iren'):
self._Iren = kw['iren']
else:
self._Iren = vtk.vtkGenericRenderWindowInteractor()
self._Iren.SetRenderWindow(self._RenderWindow)
# do all the necessary qt setup
self.setAttribute(QtCore.Qt.WA_OpaquePaintEvent)
self.setAttribute(QtCore.Qt.WA_PaintOnScreen)
self.setMouseTracking(True) # get all mouse events
self.setFocusPolicy(QtCore.Qt.WheelFocus)
self.setSizePolicy(QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding))
self._Timer = QtCore.QTimer(self)
self.connect(self._Timer, QtCore.SIGNAL('timeout()'), self.TimerEvent)
self._Iren.AddObserver('CreateTimerEvent', self.CreateTimer)
self._Iren.AddObserver('DestroyTimerEvent', self.DestroyTimer)
self._Iren.GetRenderWindow().AddObserver('CursorChangedEvent',
self.CursorChangedEvent)
#Create a hidden child widget and connect its destroyed signal to its
#parent ``Finalize`` slot. The hidden children will be destroyed before
#its parent thus allowing cleanup of VTK elements.
self._hidden = QtGui.QWidget(self)
self._hidden.hide()
self.connect(self._hidden, QtCore.SIGNAL('destroyed()'), self.Finalize)
def __getattr__(self, attr):
"""Makes the object behave like a vtkGenericRenderWindowInteractor"""
if attr == '__vtk__':
return lambda t=self._Iren: t
elif hasattr(self._Iren, attr):
return getattr(self._Iren, attr)
else:
raise AttributeError, self.__class__.__name__ + \
" has no attribute named " + attr
def Finalize(self):
'''
Call internal cleanup method on VTK objects
'''
self._RenderWindow.Finalize()
def CreateTimer(self, obj, evt):
self._Timer.start(10)
def DestroyTimer(self, obj, evt):
self._Timer.stop()
return 1
def TimerEvent(self):
self._Iren.TimerEvent()
def CursorChangedEvent(self, obj, evt):
"""Called when the CursorChangedEvent fires on the render window."""
# This indirection is needed since when the event fires, the current
# cursor is not yet set so we defer this by which time the current
# cursor should have been set.
QtCore.QTimer.singleShot(0, self.ShowCursor)
def HideCursor(self):
"""Hides the cursor."""
self.setCursor(QtCore.Qt.BlankCursor)
def ShowCursor(self):
"""Shows the cursor."""
vtk_cursor = self._Iren.GetRenderWindow().GetCurrentCursor()
qt_cursor = self._CURSOR_MAP.get(vtk_cursor, QtCore.Qt.ArrowCursor)
self.setCursor(qt_cursor)
def closeEvent(self, evt):
self.Finalize()
def sizeHint(self):
return QtCore.QSize(400, 400)
def paintEngine(self):
return None
def paintEvent(self, ev):
self._Iren.Render()
def resizeEvent(self, ev):
w = self.width()
h = self.height()
vtk.vtkRenderWindow.SetSize(self._RenderWindow, w, h)
self._Iren.SetSize(w, h)
self._Iren.ConfigureEvent()
self.update()
def _GetCtrlShift(self, ev):
ctrl = shift = False
if hasattr(ev, 'modifiers'):
if ev.modifiers() & QtCore.Qt.ShiftModifier:
shift = True
if ev.modifiers() & QtCore.Qt.ControlModifier:
ctrl = True
else:
if self.__saveModifiers & QtCore.Qt.ShiftModifier:
shift = True
if self.__saveModifiers & QtCore.Qt.ControlModifier:
ctrl = True
return ctrl, shift
def enterEvent(self, ev):
ctrl, shift = self._GetCtrlShift(ev)
self._Iren.SetEventInformationFlipY(self.__saveX, self.__saveY,
ctrl, shift, chr(0), 0, None)
self._Iren.EnterEvent()
def leaveEvent(self, ev):
ctrl, shift = self._GetCtrlShift(ev)
self._Iren.SetEventInformationFlipY(self.__saveX, self.__saveY,
ctrl, shift, chr(0), 0, None)
self._Iren.LeaveEvent()
def mousePressEvent(self, ev):
ctrl, shift = self._GetCtrlShift(ev)
repeat = 0
if ev.type() == QtCore.QEvent.MouseButtonDblClick:
repeat = 1
self._Iren.SetEventInformationFlipY(ev.x(), ev.y(),
ctrl, shift, chr(0), repeat, None)
self._ActiveButton = ev.button()
if self._ActiveButton == QtCore.Qt.LeftButton:
self._Iren.LeftButtonPressEvent()
elif self._ActiveButton == QtCore.Qt.RightButton:
self._Iren.RightButtonPressEvent()
elif self._ActiveButton == QtCore.Qt.MidButton:
self._Iren.MiddleButtonPressEvent()
def mouseReleaseEvent(self, ev):
ctrl, shift = self._GetCtrlShift(ev)
self._Iren.SetEventInformationFlipY(ev.x(), ev.y(),
ctrl, shift, chr(0), 0, None)
if self._ActiveButton == QtCore.Qt.LeftButton:
self._Iren.LeftButtonReleaseEvent()
elif self._ActiveButton == QtCore.Qt.RightButton:
self._Iren.RightButtonReleaseEvent()
elif self._ActiveButton == QtCore.Qt.MidButton:
self._Iren.MiddleButtonReleaseEvent()
def mouseMoveEvent(self, ev):
self.__saveModifiers = ev.modifiers()
self.__saveButtons = ev.buttons()
self.__saveX = ev.x()
self.__saveY = ev.y()
ctrl, shift = self._GetCtrlShift(ev)
self._Iren.SetEventInformationFlipY(ev.x(), ev.y(),
ctrl, shift, chr(0), 0, None)
self._Iren.MouseMoveEvent()
def keyPressEvent(self, ev):
ctrl, shift = self._GetCtrlShift(ev)
if ev.key() < 256:
key = str(ev.text())
else:
key = chr(0)
self._Iren.SetEventInformationFlipY(self.__saveX, self.__saveY,
ctrl, shift, key, 0, None)
self._Iren.KeyPressEvent()
self._Iren.CharEvent()
def keyReleaseEvent(self, ev):
ctrl, shift = self._GetCtrlShift(ev)
if ev.key() < 256:
key = chr(ev.key())
else:
key = chr(0)
self._Iren.SetEventInformationFlipY(self.__saveX, self.__saveY,
ctrl, shift, key, 0, None)
self._Iren.KeyReleaseEvent()
def wheelEvent(self, ev):
if ev.delta() >= 0:
self._Iren.MouseWheelForwardEvent()
else:
self._Iren.MouseWheelBackwardEvent()
def GetRenderWindow(self):
return self._RenderWindow
def Render(self):
self.update()
def QVTKRenderWidgetConeExample():
"""A simple example that uses the QVTKRenderWindowInteractor class."""
# every QT app needs an app
app = QtGui.QApplication(['QVTKRenderWindowInteractor'])
# create the widget
widget = QVTKRenderWindowInteractor()
widget.Initialize()
widget.Start()
# if you dont want the 'q' key to exit comment this.
widget.AddObserver("ExitEvent", lambda o, e, a=app: a.quit())
ren = vtk.vtkRenderer()
widget.GetRenderWindow().AddRenderer(ren)
cone = vtk.vtkConeSource()
cone.SetResolution(8)
coneMapper = vtk.vtkPolyDataMapper()
coneMapper.SetInputConnection(cone.GetOutputPort())
coneActor = vtk.vtkActor()
coneActor.SetMapper(coneMapper)
ren.AddActor(coneActor)
# show the widget
widget.show()
# start event processing
app.exec_()
if __name__ == "__main__":
QVTKRenderWidgetConeExample()
|
timkrentz/SunTracker
|
IMU/VTK-6.2.0/Wrapping/Python/vtk/qt4/QVTKRenderWindowInteractor.py
|
Python
|
mit
| 14,630
|
[
"CRYSTAL",
"VTK"
] |
57cee05725deb35e7fd3e9780074a9916d112e211549ba0a090c5014a6ac5dfc
|
"""Generators for geometric graphs.
"""
from bisect import bisect_left
from itertools import accumulate, combinations, product
from math import sqrt
import math
try:
from scipy.spatial import cKDTree as KDTree
except ImportError:
_is_scipy_available = False
else:
_is_scipy_available = True
import networkx as nx
from networkx.utils import nodes_or_number, py_random_state
__all__ = [
"geographical_threshold_graph",
"waxman_graph",
"navigable_small_world_graph",
"random_geometric_graph",
"soft_random_geometric_graph",
"thresholded_random_geometric_graph",
]
def euclidean(x, y):
"""Returns the Euclidean distance between the vectors ``x`` and ``y``.
Each of ``x`` and ``y`` can be any iterable of numbers. The
iterables must be of the same length.
"""
return sqrt(sum((a - b) ** 2 for a, b in zip(x, y)))
def _fast_edges(G, radius, p):
"""Returns edge list of node pairs within `radius` of each other
using scipy KDTree and Minkowski distance metric `p`
Requires scipy to be installed.
"""
pos = nx.get_node_attributes(G, "pos")
nodes, coords = list(zip(*pos.items()))
kdtree = KDTree(coords) # Cannot provide generator.
edge_indexes = kdtree.query_pairs(radius, p)
edges = ((nodes[u], nodes[v]) for u, v in edge_indexes)
return edges
def _slow_edges(G, radius, p):
"""Returns edge list of node pairs within `radius` of each other
using Minkowski distance metric `p`
Works without scipy, but in `O(n^2)` time.
"""
# TODO This can be parallelized.
edges = []
for (u, pu), (v, pv) in combinations(G.nodes(data="pos"), 2):
if sum(abs(a - b) ** p for a, b in zip(pu, pv)) <= radius ** p:
edges.append((u, v))
return edges
@py_random_state(5)
@nodes_or_number(0)
def random_geometric_graph(n, radius, dim=2, pos=None, p=2, seed=None):
"""Returns a random geometric graph in the unit cube of dimensions `dim`.
The random geometric graph model places `n` nodes uniformly at
random in the unit cube. Two nodes are joined by an edge if the
distance between the nodes is at most `radius`.
Edges are determined using a KDTree when SciPy is available.
This reduces the time complexity from $O(n^2)$ to $O(n)$.
Parameters
----------
n : int or iterable
Number of nodes or iterable of nodes
radius: float
Distance threshold value
dim : int, optional
Dimension of graph
pos : dict, optional
A dictionary keyed by node with node positions as values.
p : float, optional
Which Minkowski distance metric to use. `p` has to meet the condition
``1 <= p <= infinity``.
If this argument is not specified, the :math:`L^2` metric
(the Euclidean distance metric), p = 2 is used.
This should not be confused with the `p` of an Erdős-Rényi random
graph, which represents probability.
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
Returns
-------
Graph
A random geometric graph, undirected and without self-loops.
Each node has a node attribute ``'pos'`` that stores the
position of that node in Euclidean space as provided by the
``pos`` keyword argument or, if ``pos`` was not provided, as
generated by this function.
Examples
--------
Create a random geometric graph on twenty nodes where nodes are joined by
an edge if their distance is at most 0.1::
>>> G = nx.random_geometric_graph(20, 0.1)
Notes
-----
This uses a *k*-d tree to build the graph.
The `pos` keyword argument can be used to specify node positions so you
can create an arbitrary distribution and domain for positions.
For example, to use a 2D Gaussian distribution of node positions with mean
(0, 0) and standard deviation 2::
>>> import random
>>> n = 20
>>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)}
>>> G = nx.random_geometric_graph(n, 0.2, pos=pos)
References
----------
.. [1] Penrose, Mathew, *Random Geometric Graphs*,
Oxford Studies in Probability, 5, 2003.
"""
# TODO Is this function just a special case of the geographical
# threshold graph?
#
# n_name, nodes = n
# half_radius = {v: radius / 2 for v in nodes}
# return geographical_threshold_graph(nodes, theta=1, alpha=1,
# weight=half_radius)
#
n_name, nodes = n
G = nx.Graph()
G.add_nodes_from(nodes)
# If no positions are provided, choose uniformly random vectors in
# Euclidean space of the specified dimension.
if pos is None:
pos = {v: [seed.random() for i in range(dim)] for v in nodes}
nx.set_node_attributes(G, pos, "pos")
if _is_scipy_available:
edges = _fast_edges(G, radius, p)
else:
edges = _slow_edges(G, radius, p)
G.add_edges_from(edges)
return G
@py_random_state(6)
@nodes_or_number(0)
def soft_random_geometric_graph(
n, radius, dim=2, pos=None, p=2, p_dist=None, seed=None
):
r"""Returns a soft random geometric graph in the unit cube.
The soft random geometric graph [1] model places `n` nodes uniformly at
random in the unit cube in dimension `dim`. Two nodes of distance, `dist`,
computed by the `p`-Minkowski distance metric are joined by an edge with
probability `p_dist` if the computed distance metric value of the nodes
is at most `radius`, otherwise they are not joined.
Edges within `radius` of each other are determined using a KDTree when
SciPy is available. This reduces the time complexity from :math:`O(n^2)`
to :math:`O(n)`.
Parameters
----------
n : int or iterable
Number of nodes or iterable of nodes
radius: float
Distance threshold value
dim : int, optional
Dimension of graph
pos : dict, optional
A dictionary keyed by node with node positions as values.
p : float, optional
Which Minkowski distance metric to use.
`p` has to meet the condition ``1 <= p <= infinity``.
If this argument is not specified, the :math:`L^2` metric
(the Euclidean distance metric), p = 2 is used.
This should not be confused with the `p` of an Erdős-Rényi random
graph, which represents probability.
p_dist : function, optional
A probability density function computing the probability of
connecting two nodes that are of distance, dist, computed by the
Minkowski distance metric. The probability density function, `p_dist`,
must be any function that takes the metric value as input
and outputs a single probability value between 0-1. The scipy.stats
package has many probability distribution functions implemented and
tools for custom probability distribution definitions [2], and passing
the .pdf method of scipy.stats distributions can be used here. If the
probability function, `p_dist`, is not supplied, the default function
is an exponential distribution with rate parameter :math:`\lambda=1`.
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
Returns
-------
Graph
A soft random geometric graph, undirected and without self-loops.
Each node has a node attribute ``'pos'`` that stores the
position of that node in Euclidean space as provided by the
``pos`` keyword argument or, if ``pos`` was not provided, as
generated by this function.
Examples
--------
Default Graph:
G = nx.soft_random_geometric_graph(50, 0.2)
Custom Graph:
Create a soft random geometric graph on 100 uniformly distributed nodes
where nodes are joined by an edge with probability computed from an
exponential distribution with rate parameter :math:`\lambda=1` if their
Euclidean distance is at most 0.2.
Notes
-----
This uses a *k*-d tree to build the graph.
The `pos` keyword argument can be used to specify node positions so you
can create an arbitrary distribution and domain for positions.
For example, to use a 2D Gaussian distribution of node positions with mean
(0, 0) and standard deviation 2
The scipy.stats package can be used to define the probability distribution
with the .pdf method used as `p_dist`.
::
>>> import random
>>> import math
>>> n = 100
>>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)}
>>> p_dist = lambda dist : math.exp(-dist)
>>> G = nx.soft_random_geometric_graph(n, 0.2, pos=pos, p_dist=p_dist)
References
----------
.. [1] Penrose, Mathew D. "Connectivity of soft random geometric graphs."
The Annals of Applied Probability 26.2 (2016): 986-1028.
[2] scipy.stats -
https://docs.scipy.org/doc/scipy/reference/tutorial/stats.html
"""
n_name, nodes = n
G = nx.Graph()
G.name = f"soft_random_geometric_graph({n}, {radius}, {dim})"
G.add_nodes_from(nodes)
# If no positions are provided, choose uniformly random vectors in
# Euclidean space of the specified dimension.
if pos is None:
pos = {v: [seed.random() for i in range(dim)] for v in nodes}
nx.set_node_attributes(G, pos, "pos")
# if p_dist function not supplied the default function is an exponential
# distribution with rate parameter :math:`\lambda=1`.
if p_dist is None:
def p_dist(dist):
return math.exp(-dist)
def should_join(pair):
u, v = pair
u_pos, v_pos = pos[u], pos[v]
dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos))) ** (1 / p)
# Check if dist <= radius parameter. This check is redundant if scipy
# is available and _fast_edges routine is used, but provides the
# check in case scipy is not available and all edge combinations
# need to be checked
if dist <= radius:
return seed.random() < p_dist(dist)
else:
return False
if _is_scipy_available:
edges = _fast_edges(G, radius, p)
G.add_edges_from(filter(should_join, edges))
else:
G.add_edges_from(filter(should_join, combinations(G, 2)))
return G
@py_random_state(7)
@nodes_or_number(0)
def geographical_threshold_graph(
n, theta, dim=2, pos=None, weight=None, metric=None, p_dist=None, seed=None
):
r"""Returns a geographical threshold graph.
The geographical threshold graph model places $n$ nodes uniformly at
random in a rectangular domain. Each node $u$ is assigned a weight
$w_u$. Two nodes $u$ and $v$ are joined by an edge if
.. math::
(w_u + w_v)h(r) \ge \theta
where `r` is the distance between `u` and `v`, h(r) is a probability of
connection as a function of `r`, and :math:`\theta` as the threshold
parameter. h(r) corresponds to the p_dist parameter.
Parameters
----------
n : int or iterable
Number of nodes or iterable of nodes
theta: float
Threshold value
dim : int, optional
Dimension of graph
pos : dict
Node positions as a dictionary of tuples keyed by node.
weight : dict
Node weights as a dictionary of numbers keyed by node.
metric : function
A metric on vectors of numbers (represented as lists or
tuples). This must be a function that accepts two lists (or
tuples) as input and yields a number as output. The function
must also satisfy the four requirements of a `metric`_.
Specifically, if $d$ is the function and $x$, $y$,
and $z$ are vectors in the graph, then $d$ must satisfy
1. $d(x, y) \ge 0$,
2. $d(x, y) = 0$ if and only if $x = y$,
3. $d(x, y) = d(y, x)$,
4. $d(x, z) \le d(x, y) + d(y, z)$.
If this argument is not specified, the Euclidean distance metric is
used.
.. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
p_dist : function, optional
A probability density function computing the probability of
connecting two nodes that are of distance, r, computed by metric.
The probability density function, `p_dist`, must
be any function that takes the metric value as input
and outputs a single probability value between 0-1.
The scipy.stats package has many probability distribution functions
implemented and tools for custom probability distribution
definitions [2], and passing the .pdf method of scipy.stats
distributions can be used here. If the probability
function, `p_dist`, is not supplied, the default exponential function
:math: `r^{-2}` is used.
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
Returns
-------
Graph
A random geographic threshold graph, undirected and without
self-loops.
Each node has a node attribute ``pos`` that stores the
position of that node in Euclidean space as provided by the
``pos`` keyword argument or, if ``pos`` was not provided, as
generated by this function. Similarly, each node has a node
attribute ``weight`` that stores the weight of that node as
provided or as generated.
Examples
--------
Specify an alternate distance metric using the ``metric`` keyword
argument. For example, to use the `taxicab metric`_ instead of the
default `Euclidean metric`_::
>>> dist = lambda x, y: sum(abs(a - b) for a, b in zip(x, y))
>>> G = nx.geographical_threshold_graph(10, 0.1, metric=dist)
.. _taxicab metric: https://en.wikipedia.org/wiki/Taxicab_geometry
.. _Euclidean metric: https://en.wikipedia.org/wiki/Euclidean_distance
Notes
-----
If weights are not specified they are assigned to nodes by drawing randomly
from the exponential distribution with rate parameter $\lambda=1$.
To specify weights from a different distribution, use the `weight` keyword
argument::
>>> import random
>>> n = 20
>>> w = {i: random.expovariate(5.0) for i in range(n)}
>>> G = nx.geographical_threshold_graph(20, 50, weight=w)
If node positions are not specified they are randomly assigned from the
uniform distribution.
References
----------
.. [1] Masuda, N., Miwa, H., Konno, N.:
Geographical threshold graphs with small-world and scale-free
properties.
Physical Review E 71, 036108 (2005)
.. [2] Milan Bradonjić, Aric Hagberg and Allon G. Percus,
Giant component and connectivity in geographical threshold graphs,
in Algorithms and Models for the Web-Graph (WAW 2007),
Antony Bonato and Fan Chung (Eds), pp. 209--216, 2007
"""
n_name, nodes = n
G = nx.Graph()
G.add_nodes_from(nodes)
# If no weights are provided, choose them from an exponential
# distribution.
if weight is None:
weight = {v: seed.expovariate(1) for v in G}
# If no positions are provided, choose uniformly random vectors in
# Euclidean space of the specified dimension.
if pos is None:
pos = {v: [seed.random() for i in range(dim)] for v in nodes}
# If no distance metric is provided, use Euclidean distance.
if metric is None:
metric = euclidean
nx.set_node_attributes(G, weight, "weight")
nx.set_node_attributes(G, pos, "pos")
# if p_dist is not supplied, use default r^-2
if p_dist is None:
def p_dist(r):
return r ** -2
# Returns ``True`` if and only if the nodes whose attributes are
# ``du`` and ``dv`` should be joined, according to the threshold
# condition.
def should_join(pair):
u, v = pair
u_pos, v_pos = pos[u], pos[v]
u_weight, v_weight = weight[u], weight[v]
return (u_weight + v_weight) * p_dist(metric(u_pos, v_pos)) >= theta
G.add_edges_from(filter(should_join, combinations(G, 2)))
return G
@py_random_state(6)
@nodes_or_number(0)
def waxman_graph(
n, beta=0.4, alpha=0.1, L=None, domain=(0, 0, 1, 1), metric=None, seed=None
):
r"""Returns a Waxman random graph.
The Waxman random graph model places `n` nodes uniformly at random
in a rectangular domain. Each pair of nodes at distance `d` is
joined by an edge with probability
.. math::
p = \beta \exp(-d / \alpha L).
This function implements both Waxman models, using the `L` keyword
argument.
* Waxman-1: if `L` is not specified, it is set to be the maximum distance
between any pair of nodes.
* Waxman-2: if `L` is specified, the distance between a pair of nodes is
chosen uniformly at random from the interval `[0, L]`.
Parameters
----------
n : int or iterable
Number of nodes or iterable of nodes
beta: float
Model parameter
alpha: float
Model parameter
L : float, optional
Maximum distance between nodes. If not specified, the actual distance
is calculated.
domain : four-tuple of numbers, optional
Domain size, given as a tuple of the form `(x_min, y_min, x_max,
y_max)`.
metric : function
A metric on vectors of numbers (represented as lists or
tuples). This must be a function that accepts two lists (or
tuples) as input and yields a number as output. The function
must also satisfy the four requirements of a `metric`_.
Specifically, if $d$ is the function and $x$, $y$,
and $z$ are vectors in the graph, then $d$ must satisfy
1. $d(x, y) \ge 0$,
2. $d(x, y) = 0$ if and only if $x = y$,
3. $d(x, y) = d(y, x)$,
4. $d(x, z) \le d(x, y) + d(y, z)$.
If this argument is not specified, the Euclidean distance metric is
used.
.. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
Returns
-------
Graph
A random Waxman graph, undirected and without self-loops. Each
node has a node attribute ``'pos'`` that stores the position of
that node in Euclidean space as generated by this function.
Examples
--------
Specify an alternate distance metric using the ``metric`` keyword
argument. For example, to use the "`taxicab metric`_" instead of the
default `Euclidean metric`_::
>>> dist = lambda x, y: sum(abs(a - b) for a, b in zip(x, y))
>>> G = nx.waxman_graph(10, 0.5, 0.1, metric=dist)
.. _taxicab metric: https://en.wikipedia.org/wiki/Taxicab_geometry
.. _Euclidean metric: https://en.wikipedia.org/wiki/Euclidean_distance
Notes
-----
Starting in NetworkX 2.0 the parameters alpha and beta align with their
usual roles in the probability distribution. In earlier versions their
positions in the expression were reversed. Their position in the calling
sequence reversed as well to minimize backward incompatibility.
References
----------
.. [1] B. M. Waxman, *Routing of multipoint connections*.
IEEE J. Select. Areas Commun. 6(9),(1988) 1617--1622.
"""
n_name, nodes = n
G = nx.Graph()
G.add_nodes_from(nodes)
(xmin, ymin, xmax, ymax) = domain
# Each node gets a uniformly random position in the given rectangle.
pos = {v: (seed.uniform(xmin, xmax), seed.uniform(ymin, ymax)) for v in G}
nx.set_node_attributes(G, pos, "pos")
# If no distance metric is provided, use Euclidean distance.
if metric is None:
metric = euclidean
# If the maximum distance L is not specified (that is, we are in the
# Waxman-1 model), then find the maximum distance between any pair
# of nodes.
#
# In the Waxman-1 model, join nodes randomly based on distance. In
# the Waxman-2 model, join randomly based on random l.
if L is None:
L = max(metric(x, y) for x, y in combinations(pos.values(), 2))
def dist(u, v):
return metric(pos[u], pos[v])
else:
def dist(u, v):
return seed.random() * L
# `pair` is the pair of nodes to decide whether to join.
def should_join(pair):
return seed.random() < beta * math.exp(-dist(*pair) / (alpha * L))
G.add_edges_from(filter(should_join, combinations(G, 2)))
return G
@py_random_state(5)
def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None):
r"""Returns a navigable small-world graph.
A navigable small-world graph is a directed grid with additional long-range
connections that are chosen randomly.
[...] we begin with a set of nodes [...] that are identified with the set
of lattice points in an $n \times n$ square,
$\{(i, j): i \in \{1, 2, \ldots, n\}, j \in \{1, 2, \ldots, n\}\}$,
and we define the *lattice distance* between two nodes $(i, j)$ and
$(k, l)$ to be the number of "lattice steps" separating them:
$d((i, j), (k, l)) = |k - i| + |l - j|$.
For a universal constant $p >= 1$, the node $u$ has a directed edge to
every other node within lattice distance $p$---these are its *local
contacts*. For universal constants $q >= 0$ and $r >= 0$ we also
construct directed edges from $u$ to $q$ other nodes (the *long-range
contacts*) using independent random trials; the $i$th directed edge from
$u$ has endpoint $v$ with probability proportional to $[d(u,v)]^{-r}$.
-- [1]_
Parameters
----------
n : int
The length of one side of the lattice; the number of nodes in
the graph is therefore $n^2$.
p : int
The diameter of short range connections. Each node is joined with every
other node within this lattice distance.
q : int
The number of long-range connections for each node.
r : float
Exponent for decaying probability of connections. The probability of
connecting to a node at lattice distance $d$ is $1/d^r$.
dim : int
Dimension of grid
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
References
----------
.. [1] J. Kleinberg. The small-world phenomenon: An algorithmic
perspective. Proc. 32nd ACM Symposium on Theory of Computing, 2000.
"""
if p < 1:
raise nx.NetworkXException("p must be >= 1")
if q < 0:
raise nx.NetworkXException("q must be >= 0")
if r < 0:
raise nx.NetworkXException("r must be >= 1")
G = nx.DiGraph()
nodes = list(product(range(n), repeat=dim))
for p1 in nodes:
probs = [0]
for p2 in nodes:
if p1 == p2:
continue
d = sum((abs(b - a) for a, b in zip(p1, p2)))
if d <= p:
G.add_edge(p1, p2)
probs.append(d ** -r)
cdf = list(accumulate(probs))
for _ in range(q):
target = nodes[bisect_left(cdf, seed.uniform(0, cdf[-1]))]
G.add_edge(p1, target)
return G
@py_random_state(7)
@nodes_or_number(0)
def thresholded_random_geometric_graph(
n, radius, theta, dim=2, pos=None, weight=None, p=2, seed=None
):
r"""Returns a thresholded random geometric graph in the unit cube.
The thresholded random geometric graph [1] model places `n` nodes
uniformly at random in the unit cube of dimensions `dim`. Each node
`u` is assigned a weight :math:`w_u`. Two nodes `u` and `v` are
joined by an edge if they are within the maximum connection distance,
`radius` computed by the `p`-Minkowski distance and the summation of
weights :math:`w_u` + :math:`w_v` is greater than or equal
to the threshold parameter `theta`.
Edges within `radius` of each other are determined using a KDTree when
SciPy is available. This reduces the time complexity from :math:`O(n^2)`
to :math:`O(n)`.
Parameters
----------
n : int or iterable
Number of nodes or iterable of nodes
radius: float
Distance threshold value
theta: float
Threshold value
dim : int, optional
Dimension of graph
pos : dict, optional
A dictionary keyed by node with node positions as values.
weight : dict, optional
Node weights as a dictionary of numbers keyed by node.
p : float, optional
Which Minkowski distance metric to use. `p` has to meet the condition
``1 <= p <= infinity``.
If this argument is not specified, the :math:`L^2` metric
(the Euclidean distance metric), p = 2 is used.
This should not be confused with the `p` of an Erdős-Rényi random
graph, which represents probability.
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
Returns
-------
Graph
A thresholded random geographic graph, undirected and without
self-loops.
Each node has a node attribute ``'pos'`` that stores the
position of that node in Euclidean space as provided by the
``pos`` keyword argument or, if ``pos`` was not provided, as
generated by this function. Similarly, each node has a nodethre
attribute ``'weight'`` that stores the weight of that node as
provided or as generated.
Examples
--------
Default Graph:
G = nx.thresholded_random_geometric_graph(50, 0.2, 0.1)
Custom Graph:
Create a thresholded random geometric graph on 50 uniformly distributed
nodes where nodes are joined by an edge if their sum weights drawn from
a exponential distribution with rate = 5 are >= theta = 0.1 and their
Euclidean distance is at most 0.2.
Notes
-----
This uses a *k*-d tree to build the graph.
The `pos` keyword argument can be used to specify node positions so you
can create an arbitrary distribution and domain for positions.
For example, to use a 2D Gaussian distribution of node positions with mean
(0, 0) and standard deviation 2
If weights are not specified they are assigned to nodes by drawing randomly
from the exponential distribution with rate parameter :math:`\lambda=1`.
To specify weights from a different distribution, use the `weight` keyword
argument::
::
>>> import random
>>> import math
>>> n = 50
>>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)}
>>> w = {i: random.expovariate(5.0) for i in range(n)}
>>> G = nx.thresholded_random_geometric_graph(n, 0.2, 0.1, 2, pos, w)
References
----------
.. [1] http://cole-maclean.github.io/blog/files/thesis.pdf
"""
n_name, nodes = n
G = nx.Graph()
G.name = f"thresholded_random_geometric_graph({n}, {radius}, {theta}, {dim})"
G.add_nodes_from(nodes)
# If no weights are provided, choose them from an exponential
# distribution.
if weight is None:
weight = {v: seed.expovariate(1) for v in G}
# If no positions are provided, choose uniformly random vectors in
# Euclidean space of the specified dimension.
if pos is None:
pos = {v: [seed.random() for i in range(dim)] for v in nodes}
# If no distance metric is provided, use Euclidean distance.
nx.set_node_attributes(G, weight, "weight")
nx.set_node_attributes(G, pos, "pos")
# Returns ``True`` if and only if the nodes whose attributes are
# ``du`` and ``dv`` should be joined, according to the threshold
# condition and node pairs are within the maximum connection
# distance, ``radius``.
def should_join(pair):
u, v = pair
u_weight, v_weight = weight[u], weight[v]
u_pos, v_pos = pos[u], pos[v]
dist = (sum(abs(a - b) ** p for a, b in zip(u_pos, v_pos))) ** (1 / p)
# Check if dist is <= radius parameter. This check is redundant if
# scipy is available and _fast_edges routine is used, but provides
# the check in case scipy is not available and all edge combinations
# need to be checked
if dist <= radius:
return theta <= u_weight + v_weight
else:
return False
if _is_scipy_available:
edges = _fast_edges(G, radius, p)
G.add_edges_from(filter(should_join, edges))
else:
G.add_edges_from(filter(should_join, combinations(G, 2)))
return G
|
SpaceGroupUCL/qgisSpaceSyntaxToolkit
|
esstoolkit/external/networkx/generators/geometric.py
|
Python
|
gpl-3.0
| 28,960
|
[
"Gaussian"
] |
a517d8a2d0d3867c5876eff37e568d44c852589933bb5637b4c9449ccd85b1a2
|
#!/usr/bin/env python
import sys, os
from argparse import ArgumentParser, FileType
def get_data(small = False):
data_root = "http://www.ccb.jhu.edu/software/hisat2/downloads/evaluation"
# Download the reference human genome, SNPs, and gene annotations
if not os.path.exists("data"):
os.mkdir("data")
os.chdir("data")
genome_files = ["genome.fa", "genome.fa.fai", "genome.gtf", "snpCommon.txt", "genome.snp", "genome.ss", "genome.exon"]
small_genome_files = ["22.fa", "22.fa.fai", "22.gtf", "22.snp", "22.ss", "22.exon", \
"22_20-21M.fa", "22_20-21M.fa.fai", "22_20-21M.gtf", "22_20-21M.snp", "22_20-21M.ss", "22_20-21M.exon"]
files = []
if not small:
files += genome_files
files += small_genome_files
for file in files:
if os.path.exists(file):
continue
wget_cmd = "wget %s/data/%s" % (data_root, file)
print >> sys.stderr, wget_cmd
os.system(wget_cmd)
os.chdir("..")
# Download indexes
if not os.path.exists("indexes"):
os.mkdir("indexes")
os.chdir("indexes")
aligners = ["HISAT2", "HISAT", "Bowtie", "STAR", "GSNAP"]
for genome in ["genome", "22", "22_20-21M"]:
if small and genome == "genome":
continue
for aligner in aligners:
if genome == "genome":
aligner_dir = aligner
else:
aligner_dir = aligner + "_" + genome
if os.path.exists(aligner_dir):
continue
cmd = "wget %s/indexes/%s.tar.gz; tar xvzf %s.tar.gz; rm %s.tar.gz" % \
(data_root, aligner_dir, aligner_dir, aligner_dir)
print >> sys.stderr, cmd
os.system(cmd)
os.chdir("..")
# Download simulated and real reads
if not os.path.exists("reads"):
os.mkdir("reads")
os.chdir("reads")
for type in ["simulation", "real"]:
if small and type == "real":
continue
if not os.path.exists(type):
os.mkdir(type)
os.chdir(type)
if type == "simulation":
files = ["1M_DNA_reads_22",
"1M_DNA_mismatch_reads_22",
"1M_DNA_snp_reads_22",
"1M_DNA_mismatch_snp_reads_22",
"1M_RNA_reads_22",
"1M_RNA_constant_reads_22",
"1M_RNA_mismatch_reads_22",
"1M_RNA_snp_reads_22",
"1M_RNA_mismatch_snp_reads_22",
"1M_RNA_reads_22_20-21M",
"20M_DNA_reads_genome",
"20M_DNA_snp_reads_genome",
"20M_RNA_reads_genome",
"20M_RNA_snp_reads_genome"]
else:
files = ["108M_RNA_wgEncodeCshlLongRnaSeq",
"62M_RNA_SRR353653",
"80M_DNA_SRR345300",
"5M_DNA_NA12878D"]
for file in files:
if small and file.find("20M") != -1:
continue
if os.path.exists(file):
continue
cmd = "wget %s/reads/%s/%s.tar.gz; tar xvzf %s.tar.gz; rm %s.tar.gz" % \
(data_root, type, file, file, file)
print >> sys.stderr, cmd
os.system(cmd)
os.chdir("..")
os.chdir("..")
if __name__ == "__main__":
parser = ArgumentParser(
description='Get reference genome, annotations, and indexes')
parser.add_argument('-s', '--small',
dest='small',
action='store_true',
default=False,
help='small testset')
args = parser.parse_args()
get_data(args.small)
|
infphilo/hisat2
|
evaluation/get_data.py
|
Python
|
gpl-3.0
| 3,781
|
[
"Bowtie"
] |
1e0e8df3017445271da4de1e4e63a9abca55765d227daf4fca0b8ec39afaf627
|
#!/usr/bin/env python
#
# AST.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
import PyV8
import traceback
log = logging.getLogger("Thug")
class AST(object):
(AssignBreakPoint,
LoopBreakPoint) = range(0, 2)
AssignOps = [PyV8.AST.Op.ASSIGN,
PyV8.AST.Op.ASSIGN_ADD,
PyV8.AST.Op.ASSIGN_BIT_AND,
PyV8.AST.Op.ASSIGN_BIT_OR,
PyV8.AST.Op.ASSIGN_BIT_XOR,
PyV8.AST.Op.ASSIGN_DIV,
PyV8.AST.Op.ASSIGN_MOD,
PyV8.AST.Op.ASSIGN_MUL,
PyV8.AST.Op.ASSIGN_SAR,
PyV8.AST.Op.ASSIGN_SHL,
PyV8.AST.Op.ASSIGN_SHR,
PyV8.AST.Op.ASSIGN_SUB,
PyV8.AST.Op.INIT_VAR]
def __init__(self, window, script):
self.names = set()
self.inLoop = False
self.inBlock = True
self.exitingLoop = 0
self.assignStatement = False
self.breakpoints = set()
self.window = window
self.walk(script)
self.debug(self.breakpoints)
self.debug(self.names)
def debug(self, msg):
if log.ThugOpts.ast_debug:
log.debug(msg)
def checkExitingLoop(self, pos):
if self.exitingLoop > 0:
self.debug("\tExiting Loop: %d" % (self.exitingLoop, ))
self.exitingLoop -= 1
self.breakpoints.add((self.LoopBreakPoint, pos))
def walk(self, script):
self.block_no = 1
try:
PyV8.JSEngine().compile(script).visit(self)
except UnicodeDecodeError:
enc = log.Encoding.detect(script, safe = True)
if enc is None:
return
PyV8.JSEngine().compile(script.decode(enc['encoding'])).visit(self)
except: #pylint:disable=bare-except
pass
def onProgram(self, prog):
self.json = prog.toJSON()
self.ast = prog.toAST()
self.debug(self.json)
self.debug("[*] Program")
self.debug("\tProgram startPos: %d" % (prog.startPos, ))
self.debug("\tProgram endPos: %d" % (prog.endPos, ))
for decl in prog.scope.declarations:
decl.visit(self)
for stmt in prog.body:
stmt.visit(self)
def _enterBlock(self):
self.inBlock = True
def _exitBlock(self):
self.inBlock = False
def onBlock(self, block):
self.debug("[*] Entering Block #%d" % (self.block_no, ))
self._enterBlock()
for stmt in block.statements:
stmt.visit(self)
self._exitBlock()
self.debug("[*] Exiting Block #%d" % (self.block_no, ))
self.block_no += 1
def onExpressionStatement(self, stmt):
self.debug("[*] Expression Statement")
self.debug("\tStatement: %s" % (stmt, ))
self.debug("\tStatement type: %s" % (stmt.type, ))
self.debug("\tStatement position: %s" % (stmt.expression.pos, ))
self.checkExitingLoop(stmt.expression.pos)
stmt.expression.visit(self)
if self.assignStatement:
if self.inBlock:
# FIXME
# AstCallRuntime has no 'pos' attribute
try:
pos = stmt.expression.pos
except: #pylint:disable=bare-except
traceback.print_exc()
return
else:
pos = stmt.expression.pos
self.breakpoints.add((self.AssignBreakPoint, pos))
self.assignStatement = False
def onVariableDeclaration(self, decl):
var = decl.proxy
self.debug("[*] Variable Declaration Statement")
self.debug("\tVariable name: %s" % (var.name, ))
if decl.scope.isGlobal:
getattr(self.window, var.name, None)
if decl.mode == PyV8.AST.VarMode.var:
self.names.add(var.name)
def onFunctionDeclaration(self, decl):
f = decl.proxy
self.debug("[*] Function Declaration Statement")
self.debug("\tFunction name: %s" % (f.name, ))
if decl.scope.isGlobal:
getattr(self.window, f.name, None)
for d in decl.scope.declarations:
if not getattr(d, 'function', None):
continue
d.function.visit(self)
#for stmt in d.function.body:
# stmt.visit(self)
def onAssignment(self, expr):
self.debug("[*] Assignment Statement")
self.debug("\tAssignment op: %s" % (expr.op, ))
self.debug("\tAssignment pos: %s" % (expr.pos, ))
self.debug("\tAssignment target: %s" % (expr.target, ))
self.debug("\tAssignment value: %s" % (expr.value, ))
if not self.inLoop:
if expr.op in self.AssignOps:
self.assignStatement = True
self.names.add(str(expr.target))
expr.target.visit(self)
expr.value.visit(self)
def onIfStatement(self, stmt):
self.debug("[*] If Statement")
self.debug("\tIf condition: %s" % (stmt.condition, ))
self.debug("\tIf position: %s" % (stmt.pos, ))
stmt.condition.visit(self)
if stmt.hasThenStatement:
stmt.thenStatement.visit(self)
if stmt.hasElseStatement:
stmt.elseStatement.visit(self)
def enterLoop(self):
self.inLoop = True
def exitLoop(self):
self.inLoop = False
self.exitingLoop += 1
def onForStatement(self, stmt):
self.debug("[*] For Statement")
self.debug("\tInit condition: %s" % (stmt.init, ))
self.debug("\tNext condition: %s" % (stmt.nextStmt, ))
self.debug("\tEnd condition: %s" % (stmt.condition, ))
self.debug("\tFor position: %s" % (stmt.pos))
self.checkExitingLoop(stmt.pos)
self.enterLoop()
if stmt.init:
stmt.init.visit(self)
if stmt.nextStmt:
stmt.nextStmt.visit(self)
if stmt.condition:
stmt.condition.visit(self)
if stmt.body:
stmt.body.visit(self)
self.exitLoop()
def onWhileStatement(self, stmt):
self.debug("[*] While Statement")
self.debug("\tWhile position: %s" % (stmt.pos,))
self.checkExitingLoop(stmt.pos)
self.enterLoop()
stmt.condition.visit(self)
stmt.body.visit(self)
self.exitLoop()
def onDoWhileStatement(self, stmt):
self.debug("[*] Do-While Statement")
self.debug("\tDo-While position: %s" % (stmt.pos,))
self.checkExitingLoop(stmt.pos)
self.enterLoop()
stmt.condition.visit(self)
stmt.body.visit(self)
self.exitLoop()
def onForInStatement(self, stmt):
self.debug("[*] For-In Statement")
self.debug("\tFor-In position: %s" % (stmt.pos,))
self.checkExitingLoop(stmt.pos)
self.enterLoop()
stmt.enumerable.visit(self)
stmt.body.visit(self)
self.exitLoop()
def handle_eval(self, args):
for arg in args:
if len(str(arg)) > 64:
log.warning("[AST]: Eval argument length > 64")
def onCall(self, expr):
self.debug("[*] Call")
self.debug("\tCall position: %s" % (expr.pos, ))
self.debug("\tCall expr: %s" % (expr.expression, ))
self.debug("\tCall arguments")
for arg in expr.args:
arg.visit(self)
handle = getattr(self, "handle_%s" % (expr.expression, ), None)
if handle:
handle(expr.args)
expr.expression.visit(self)
def onCallNew(self, expr):
self.debug("[*] CallNew")
self.debug("\tCall position: %s" % (expr.pos, ))
self.debug("\tCall expr: %s" % (expr.expression, ))
handle = getattr(self, "handle_%s" % (expr.expression, ), None)
if handle:
handle(expr.args)
for arg in expr.args:
arg.visit(self)
def onCallRuntime(self, expr):
self.debug("[*] CallRuntime")
self.debug("\tCall name: %s" % (expr.name, ))
for arg in expr.args:
arg.visit(self)
def onFunctionLiteral(self, litr):
self.debug("\tFunction Literal: %s" % (litr.name, ))
for decl in litr.scope.declarations:
decl.visit(self)
for e in litr.body:
e.visit(self)
def onLiteral(self, litr):
if len(str(litr)) > 256:
log.ThugLogging.shellcodes.add(str(litr).lstrip('"').rstrip('"'))
self.debug("\tLiteral: %s" % (litr, ))
def onReturnStatement(self, stmt):
self.debug("[*] Return Statement")
self.debug("\tReturn position: %s" % (stmt.pos, ))
stmt.expression.visit(self)
def onCompareOperation(self, stmt):
self.debug("[*] Compare Operation")
self.debug("\tCompare Left: %s" % (stmt.left, ))
self.debug("\tCompare Operation: %s" % (stmt.op, ))
self.debug("\tCompare Right: %s" % (stmt.right, ))
stmt.left.visit(self)
stmt.right.visit(self)
def onCountOperation(self, stmt):
self.debug("[*] Count Operation: %s" % (stmt.op, ))
stmt.expression.visit(self)
def onVariableProxy(self, expr):
self.debug("\tVariable: %s" % (expr, ))
|
qistoph/thug
|
src/AST/AST.py
|
Python
|
gpl-2.0
| 10,198
|
[
"VisIt"
] |
aded855bd0880bc47cd3fbccd664ef7636498a13473d80924f92eecb0c348786
|
#!/usr/bin/python
import os, subprocess
import textwrap, argparse
if __name__ == '__main__':
argument_parser = argparse.ArgumentParser(
prog='stability_multiple_files.py',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
create a graphical functions from input data, multiple files
--------------------------------
example of use $python3 %(prog)s
'''))
argument_parser.add_argument('--inputdir', dest='inputdir', required=True, type=str , help='input directory (required)')
argument_parser.add_argument('--language', dest='lng', action='append', required=False, type=str , help='language (default all, optional)')
args = argument_parser.parse_args()
inputdir = args.inputdir
lng = args.lng
list_directories = []
lang_files = []
for d1 in os.listdir(inputdir):
list_directories.append(d1)
for d2 in os.listdir(inputdir+"/"+d1):
lng_file = d2.replace('.tab','')
if lng_file not in lang_files:
lang_files.append(lng_file)
print(lang_files)
# if no introduce language parameter, all language. otherwise check that introduced language is in MCR
if lng is None:
languages = lang_files
else:
for l in lng:
if not l in lang_files:
print("Language must be present in files, options:"+str(lang_files)+"\n")
exit(1)
languages = lng
# build header #######################################################################################################################
out_file_2 = open(inputdir+'/../stability_senses_2.csv', "w")
visit = []
for elem_1 in list_directories:
for elem_2 in list_directories:
if elem_1 != elem_2 and elem_1 not in visit and elem_2 not in visit:
out_file_2.write(elem_1 + " vs " + elem_2 + " / " + elem_1 + " , ")
out_file_2.write(elem_1 + " vs " + elem_2 + " / " + elem_2 + " , ")
if elem_1 not in visit: visit.append(elem_1)
out_file_2.write("\n")
# build body #########################################################################################################################
for lang in languages:
visit = []
out_file = open(inputdir+'/../stability_senses_'+lang+'.tab', "w")
for elem_1 in list_directories:
for elem_2 in list_directories:
if elem_1 != elem_2 and elem_1 not in visit and elem_2 not in visit:
num_lines_senses_1 = sum(1 for line in open(inputdir+"/"+elem_1+"/"+lang+".tab"))
num_lines_senses_2 = sum(1 for line in open(inputdir+"/"+elem_2+"/"+lang+".tab"))
subprocess.check_output("sort "+inputdir+"/"+elem_1+"/"+lang+".tab > tmp1.txt", shell=True)
subprocess.check_output("sort "+inputdir+"/"+elem_2+"/"+lang+".tab > tmp2.txt", shell=True)
equals = subprocess.check_output("comm -1 -2 tmp1.txt tmp2.txt | wc | gawk '{print $1}'", shell=True).strip()
out_file.write(str(float(equals)/num_lines_senses_1)+" ")
out_file.write(elem_1 + "_vs_" + elem_2 + "\n")
out_file_2.write(str(float(equals)/num_lines_senses_1)+" , ")
out_file_2.write(str(float(equals)/num_lines_senses_2)+" , ")
if elem_1 not in visit: visit.append(elem_1)
out_file.close()
out_file_2.write("\n")
out_file_2.close()
#subprocess.check_output("sort -r "+inputdir+"/../stability_senses.tab > "+inputdir+"/../stability_senses_ord.tab", shell=True)
|
daniparera/MCR
|
Senses/analisys/others/stability_multiple_files.py
|
Python
|
gpl-2.0
| 3,732
|
[
"VisIt"
] |
20509b749f594c268086890d73c32126f7487e3a89add5aefed3692a37aa5066
|
#!/usr/bin/env python
#
# E-Mail post-processing script for NZBGet
#
# Copyright (C) 2013-2017 Andrey Prygunkov <hugbug@users.sourceforge.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ###
# Send E-Mail notification.
#
# This script sends E-Mail notification when the job is done.
#
# NOTE: This script requires Python to be installed on your system.
##############################################################################
### OPTIONS ###
# When to send the message (Always, OnFailure).
#SendMail=Always
# Email address you want this email to be sent from.
#From="NZBGet" <myaccount@gmail.com>
# Email address you want this email to be sent to.
#
# Multiple addresses can be separated with comma.
#To=myaccount@gmail.com
# SMTP server host.
#Server=smtp.gmail.com
# SMTP server port (1-65535).
#Port=25
# Secure communication using TLS/SSL (yes, no, force).
# no - plain text communication (insecure);
# yes - switch to secure session using StartTLS command;
# force - start secure session on encrypted socket.
#Encryption=yes
# SMTP server user name, if required.
#Username=myaccount
# SMTP server password, if required.
#Password=mypass
# To check connection parameters click the button.
#ConnectionTest@Send Test E-Mail
# Append statistics to the message (yes, no).
#Statistics=yes
# Append list of files to the message (yes, no).
#
# Add the list of downloaded files (the content of destination directory).
#FileList=yes
# Append broken-log to the message (yes, no).
#
# Add the content of file _brokenlog.txt. This file contains the list of damaged
# files and the result of par-check/repair. For successful downloads the broken-log
# is usually deleted by cleanup-script and therefore is not sent.
#BrokenLog=yes
# Append nzb log to the message (Always, Never, OnFailure).
#
# Add the download and post-processing log of active job.
#NzbLog=OnFailure
### NZBGET POST-PROCESSING SCRIPT ###
##############################################################################
import os
import sys
import datetime
import smtplib
from email.mime.text import MIMEText
from email.header import Header
try: # python 2
from urllib2 import quote
from xmlrpclib import ServerProxy
except ImportError: # python 3
from urllib.parse import quote
from xmlrpc.client import ServerProxy
# Exit codes used by NZBGet
POSTPROCESS_SUCCESS=93
POSTPROCESS_ERROR=94
POSTPROCESS_NONE=95
# Check if the script is called from nzbget 15.0 or later
if not 'NZBOP_NZBLOG' in os.environ:
print('*** NZBGet post-processing script ***')
print('This script is supposed to be called from nzbget (15.0 or later).')
sys.exit(POSTPROCESS_ERROR)
print('[DETAIL] Script successfully started')
sys.stdout.flush()
required_options = ('NZBPO_FROM', 'NZBPO_TO', 'NZBPO_SERVER', 'NZBPO_PORT', 'NZBPO_ENCRYPTION', 'NZBPO_USERNAME', 'NZBPO_PASSWORD')
for optname in required_options:
if (not optname in os.environ):
print('[ERROR] Option %s is missing in configuration file. Please check script settings' % optname[6:])
sys.exit(POSTPROCESS_ERROR)
# Check if the script is executed from settings page with a custom command
command = os.environ.get('NZBCP_COMMAND')
test_mode = command == 'ConnectionTest'
if command != None and not test_mode:
print('[ERROR] Invalid command ' + command)
sys.exit(POSTPROCESS_ERROR)
status = os.environ.get('NZBPP_STATUS') if not test_mode else 'SUCCESS/ALL'
total_status = os.environ.get('NZBPP_TOTALSTATUS') if not test_mode else 'SUCCESS'
# If any script fails the status of the item in the history is "WARNING/SCRIPT".
# This status however is not passed to pp-scripts in the env var "NZBPP_STATUS"
# because most scripts are independent of each other and should work even
# if a previous script has failed. But not in the case of E-Mail script,
# which should take the status of the previous scripts into account as well.
if total_status == 'SUCCESS' and os.environ.get('NZBPP_SCRIPTSTATUS') == 'FAILURE':
total_status = 'WARNING'
status = 'WARNING/SCRIPT'
success = total_status == 'SUCCESS'
if success and os.environ.get('NZBPO_SENDMAIL') == 'OnFailure' and not test_mode:
print('[INFO] Skipping sending of message for successful download')
sys.exit(POSTPROCESS_NONE)
if success:
subject = 'Success for "%s"' % (os.environ.get('NZBPP_NZBNAME', 'Test download'))
text = 'Download of "%s" has successfully completed.' % (os.environ.get('NZBPP_NZBNAME', 'Test download'))
else:
subject = 'Failure for "%s"' % (os.environ['NZBPP_NZBNAME'])
text = 'Download of "%s" has failed.' % (os.environ['NZBPP_NZBNAME'])
text += '\nStatus: %s' % status
if (os.environ.get('NZBPO_STATISTICS') == 'yes' or \
os.environ.get('NZBPO_NZBLOG') == 'Always' or \
(os.environ.get('NZBPO_NZBLOG') == 'OnFailure' and not success)) and \
not test_mode:
# To get statistics or the post-processing log we connect to NZBGet via XML-RPC.
# For more info visit http://nzbget.net/api
# First we need to know connection info: host, port and password of NZBGet server.
# NZBGet passes all configuration options to post-processing script as
# environment variables.
host = os.environ['NZBOP_CONTROLIP'];
port = os.environ['NZBOP_CONTROLPORT'];
username = os.environ['NZBOP_CONTROLUSERNAME'];
password = os.environ['NZBOP_CONTROLPASSWORD'];
if host == '0.0.0.0': host = '127.0.0.1'
# Build a URL for XML-RPC requests
rpcUrl = 'http://%s:%s@%s:%s/xmlrpc' % (quote(username), quote(password), host, port);
# Create remote server object
server = ServerProxy(rpcUrl)
if os.environ.get('NZBPO_STATISTICS') == 'yes' and not test_mode:
# Find correct nzb in method listgroups
groups = server.listgroups(0)
nzbID = int(os.environ['NZBPP_NZBID'])
for nzbGroup in groups:
if nzbGroup['NZBID'] == nzbID:
break
text += '\n\nStatistics:';
# add download size
DownloadedSize = float(nzbGroup['DownloadedSizeMB'])
unit = ' MB'
if DownloadedSize > 1024:
DownloadedSize = DownloadedSize / 1024 # GB
unit = ' GB'
text += '\nDownloaded size: %.2f' % (DownloadedSize) + unit
# add average download speed
DownloadedSizeMB = float(nzbGroup['DownloadedSizeMB'])
DownloadTimeSec = float(nzbGroup['DownloadTimeSec'])
if DownloadTimeSec > 0: # check x/0 errors
avespeed = (DownloadedSizeMB/DownloadTimeSec) # MB/s
unit = ' MB/s'
if avespeed < 1:
avespeed = avespeed * 1024 # KB/s
unit = ' KB/s'
text += '\nAverage download speed: %.2f' % (avespeed) + unit
def format_time_sec(sec):
Hour = sec/3600
Min = (sec - (sec/3600)*3600)/60
Sec = (sec - (sec/3600)*3600)%60
return '%d:%02d:%02d' % (Hour,Min,Sec)
# add times
text += '\nTotal time: ' + format_time_sec(int(nzbGroup['DownloadTimeSec']) + int(nzbGroup['PostTotalTimeSec']))
text += '\nDownload time: ' + format_time_sec(int(nzbGroup['DownloadTimeSec']))
text += '\nVerification time: ' + format_time_sec(int(nzbGroup['ParTimeSec']) - int(nzbGroup['RepairTimeSec']))
text += '\nRepair time: ' + format_time_sec(int(nzbGroup['RepairTimeSec']))
text += '\nUnpack time: ' + format_time_sec(int(nzbGroup['UnpackTimeSec']))
# add list of downloaded files
files = False
if os.environ.get('NZBPO_FILELIST') == 'yes' and not test_mode:
text += '\n\nFiles:'
for dirname, dirnames, filenames in os.walk(os.environ['NZBPP_DIRECTORY']):
for filename in filenames:
text += '\n' + os.path.join(dirname, filename)[len(os.environ['NZBPP_DIRECTORY']) + 1:]
files = True
if not files:
text += '\n<no files found in the destination directory (moved by a script?)>'
# add _brokenlog.txt (if exists)
if os.environ.get('NZBPO_BROKENLOG') == 'yes' and not test_mode:
brokenlog = '%s/_brokenlog.txt' % os.environ['NZBPP_DIRECTORY']
if os.path.exists(brokenlog):
text += '\n\nBrokenlog:\n' + open(brokenlog, 'r').read().strip()
# add post-processing log
if (os.environ.get('NZBPO_NZBLOG') == 'Always' or \
(os.environ.get('NZBPO_NZBLOG') == 'OnFailure' and not success)) and \
not test_mode:
# To get the item log we connect to NZBGet via XML-RPC and call
# method "loadlog", which returns the log for a given nzb item.
# For more info visit http://nzbget.net/api
# Call remote method 'loadlog'
nzbid = int(os.environ['NZBPP_NZBID'])
log = server.loadlog(nzbid, 0, 10000)
# Now iterate through entries and save them to message text
if len(log) > 0:
text += '\n\nNzb-log:';
for entry in log:
text += '\n%s\t%s\t%s' % (entry['Kind'], datetime.datetime.fromtimestamp(int(entry['Time'])), entry['Text'])
# Create message
print('[DETAIL] Creating Email')
msg = MIMEText(text.encode('utf-8'), 'plain', 'utf-8')
msg['Subject'] = Header(subject, 'utf-8')
msg['From'] = os.environ['NZBPO_FROM']
msg['To'] = os.environ['NZBPO_TO']
msg['Date'] = datetime.datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
msg['X-Application'] = 'NZBGet'
# Send message
print('[DETAIL] Sending E-Mail')
sys.stdout.flush()
try:
if os.environ['NZBPO_ENCRYPTION'] == 'force':
smtp = smtplib.SMTP_SSL(os.environ['NZBPO_SERVER'], os.environ['NZBPO_PORT'])
else:
smtp = smtplib.SMTP(os.environ['NZBPO_SERVER'], os.environ['NZBPO_PORT'])
if os.environ['NZBPO_ENCRYPTION'] == 'yes':
smtp.starttls()
if os.environ['NZBPO_USERNAME'] != '' and os.environ['NZBPO_PASSWORD'] != '':
smtp.login(os.environ['NZBPO_USERNAME'], os.environ['NZBPO_PASSWORD'])
smtp.sendmail(os.environ['NZBPO_FROM'], os.environ['NZBPO_TO'].split(','), msg.as_string())
smtp.quit()
except Exception as err:
print('[ERROR] %s' % err)
sys.exit(POSTPROCESS_ERROR)
# All OK, returning exit status 'POSTPROCESS_SUCCESS' (int <93>) to let NZBGet know
# that our script has successfully completed.
sys.exit(POSTPROCESS_SUCCESS)
|
nzbget/nzbget
|
scripts/EMail.py
|
Python
|
gpl-2.0
| 10,475
|
[
"VisIt"
] |
14b9192cfb4b13d715866c3ae31cbc55052da2ea0272ebd6aeb8d741ca58ca5b
|
# class generated by DeVIDE::createDeVIDEModuleFromVTKObject
from module_kits.vtk_kit.mixins import SimpleVTKClassModuleBase
import vtk
class vtkMedicalImageReader2(SimpleVTKClassModuleBase):
def __init__(self, module_manager):
SimpleVTKClassModuleBase.__init__(
self, module_manager,
vtk.vtkMedicalImageReader2(), 'Processing.',
(), ('vtkImageData',),
replaceDoc=True,
inputFunctions=None, outputFunctions=None)
|
nagyistoce/devide
|
modules/vtk_basic/vtkMedicalImageReader2.py
|
Python
|
bsd-3-clause
| 486
|
[
"VTK"
] |
d88b6762995d9a702587597572829c04c588a824268cf0514d3676a5d1698396
|
#!/usr/bin/env python
#
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
# Chris Houseknecht, <house@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
'''
Azure External Inventory Script
===============================
Generates dynamic inventory by making API requests to the Azure Resource
Manager using the AAzure Python SDK. For instruction on installing the
Azure Python SDK see http://azure-sdk-for-python.readthedocs.org/
Authentication
--------------
The order of precedence is command line arguments, environment variables,
and finally the [default] profile found in ~/.azure/credentials.
If using a credentials file, it should be an ini formatted file with one or
more sections, which we refer to as profiles. The script looks for a
[default] section, if a profile is not specified either on the command line
or with an environment variable. The keys in a profile will match the
list of command line arguments below.
For command line arguments and environment variables specify a profile found
in your ~/.azure/credentials file, or a service principal or Active Directory
user.
Command line arguments:
- profile
- client_id
- secret
- subscription_id
- tenant
- ad_user
- password
Environment variables:
- AZURE_PROFILE
- AZURE_CLIENT_ID
- AZURE_SECRET
- AZURE_SUBSCRIPTION_ID
- AZURE_TENANT
- AZURE_AD_USER
- AZURE_PASSWORD
Run for Specific Host
-----------------------
When run for a specific host using the --host option, a resource group is
required. For a specific host, this script returns the following variables:
{
"ansible_host": "XXX.XXX.XXX.XXX",
"computer_name": "computer_name2",
"fqdn": null,
"id": "/subscriptions/subscription-id/resourceGroups/galaxy-production/providers/Microsoft.Compute/virtualMachines/object-name",
"image": {
"offer": "CentOS",
"publisher": "OpenLogic",
"sku": "7.1",
"version": "latest"
},
"location": "westus",
"mac_address": "00-00-5E-00-53-FE",
"name": "object-name",
"network_interface": "interface-name",
"network_interface_id": "/subscriptions/subscription-id/resourceGroups/galaxy-production/providers/Microsoft.Network/networkInterfaces/object-name1",
"network_security_group": null,
"network_security_group_id": null,
"os_disk": {
"name": "object-name",
"operating_system_type": "Linux"
},
"plan": null,
"powerstate": "running",
"private_ip": "172.26.3.6",
"private_ip_alloc_method": "Static",
"provisioning_state": "Succeeded",
"public_ip": "XXX.XXX.XXX.XXX",
"public_ip_alloc_method": "Static",
"public_ip_id": "/subscriptions/subscription-id/resourceGroups/galaxy-production/providers/Microsoft.Network/publicIPAddresses/object-name",
"public_ip_name": "object-name",
"resource_group": "galaxy-production",
"security_group": "object-name",
"security_group_id": "/subscriptions/subscription-id/resourceGroups/galaxy-production/providers/Microsoft.Network/networkSecurityGroups/object-name",
"tags": {
"db": "database"
},
"type": "Microsoft.Compute/virtualMachines",
"virtual_machine_size": "Standard_DS4"
}
Groups
------
When run in --list mode, instances are grouped by the following categories:
- azure
- location
- resource_group
- security_group
- tag key
- tag key_value
Control groups using azure_rm.ini or set environment variables:
AZURE_GROUP_BY_RESOURCE_GROUP=yes
AZURE_GROUP_BY_LOCATION=yes
AZURE_GROUP_BY_SECURITY_GROUP=yes
AZURE_GROUP_BY_TAG=yes
Select hosts within specific resource groups by assigning a comma separated list to:
AZURE_RESOURCE_GROUPS=resource_group_a,resource_group_b
Select hosts for specific tag key by assigning a comma separated list of tag keys to:
AZURE_TAGS=key1,key2,key3
Select hosts for specific locations:
AZURE_LOCATIONS=eastus,westus,eastus2
Or, select hosts for specific tag key:value pairs by assigning a comma separated list key:value pairs to:
AZURE_TAGS=key1:value1,key2:value2
If you don't need the powerstate, you can improve performance by turning off powerstate fetching:
AZURE_INCLUDE_POWERSTATE=no
azure_rm.ini
------------
As mentioned above, you can control execution using environment variables or a .ini file. A sample
azure_rm.ini is included. The name of the .ini file is the basename of the inventory script (in this case
'azure_rm') with a .ini extension. It also assumes the .ini file is alongside the script. To specify
a different path for the .ini file, define the AZURE_INI_PATH environment variable:
export AZURE_INI_PATH=/path/to/custom.ini
Powerstate:
-----------
The powerstate attribute indicates whether or not a host is running. If the value is 'running', the machine is
up. If the value is anything other than 'running', the machine is down, and will be unreachable.
Examples:
---------
Execute /bin/uname on all instances in the galaxy-qa resource group
$ ansible -i azure_rm.py galaxy-qa -m shell -a "/bin/uname -a"
Use the inventory script to print instance specific information
$ contrib/inventory/azure_rm.py --host my_instance_host_name --pretty
Use with a playbook
$ ansible-playbook -i contrib/inventory/azure_rm.py my_playbook.yml --limit galaxy-qa
Insecure Platform Warning
-------------------------
If you receive InsecurePlatformWarning from urllib3, install the
requests security packages:
pip install requests[security]
author:
- Chris Houseknecht (@chouseknecht)
- Matt Davis (@nitzmahone)
Company: Ansible by Red Hat
Version: 1.0.0
'''
import argparse
import ConfigParser
import json
import os
import re
import sys
from distutils.version import LooseVersion
from os.path import expanduser
HAS_AZURE = True
HAS_AZURE_EXC = None
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.compute import __version__ as azure_compute_version
from azure.common import AzureMissingResourceHttpError, AzureHttpError
from azure.common.credentials import ServicePrincipalCredentials, UserPassCredentials
from azure.mgmt.network.network_management_client import NetworkManagementClient
from azure.mgmt.resource.resources.resource_management_client import ResourceManagementClient
from azure.mgmt.compute.compute_management_client import ComputeManagementClient
except ImportError as exc:
HAS_AZURE_EXC = exc
HAS_AZURE = False
AZURE_CREDENTIAL_ENV_MAPPING = dict(
profile='AZURE_PROFILE',
subscription_id='AZURE_SUBSCRIPTION_ID',
client_id='AZURE_CLIENT_ID',
secret='AZURE_SECRET',
tenant='AZURE_TENANT',
ad_user='AZURE_AD_USER',
password='AZURE_PASSWORD'
)
AZURE_CONFIG_SETTINGS = dict(
resource_groups='AZURE_RESOURCE_GROUPS',
tags='AZURE_TAGS',
locations='AZURE_LOCATIONS',
include_powerstate='AZURE_INCLUDE_POWERSTATE',
group_by_resource_group='AZURE_GROUP_BY_RESOURCE_GROUP',
group_by_location='AZURE_GROUP_BY_LOCATION',
group_by_security_group='AZURE_GROUP_BY_SECURITY_GROUP',
group_by_tag='AZURE_GROUP_BY_TAG'
)
AZURE_MIN_VERSION = "0.30.0rc5"
def azure_id_to_dict(id):
pieces = re.sub(r'^\/', '', id).split('/')
result = {}
index = 0
while index < len(pieces) - 1:
result[pieces[index]] = pieces[index + 1]
index += 1
return result
class AzureRM(object):
def __init__(self, args):
self._args = args
self._compute_client = None
self._resource_client = None
self._network_client = None
self.debug = False
if args.debug:
self.debug = True
self.credentials = self._get_credentials(args)
if not self.credentials:
self.fail("Failed to get credentials. Either pass as parameters, set environment variables, "
"or define a profile in ~/.azure/credentials.")
if self.credentials.get('subscription_id', None) is None:
self.fail("Credentials did not include a subscription_id value.")
self.log("setting subscription_id")
self.subscription_id = self.credentials['subscription_id']
if self.credentials.get('client_id') is not None and \
self.credentials.get('secret') is not None and \
self.credentials.get('tenant') is not None:
self.azure_credentials = ServicePrincipalCredentials(client_id=self.credentials['client_id'],
secret=self.credentials['secret'],
tenant=self.credentials['tenant'])
elif self.credentials.get('ad_user') is not None and self.credentials.get('password') is not None:
self.azure_credentials = UserPassCredentials(self.credentials['ad_user'], self.credentials['password'])
else:
self.fail("Failed to authenticate with provided credentials. Some attributes were missing. "
"Credentials must include client_id, secret and tenant or ad_user and password.")
def log(self, msg):
if self.debug:
print (msg + u'\n')
def fail(self, msg):
raise Exception(msg)
def _get_profile(self, profile="default"):
path = expanduser("~")
path += "/.azure/credentials"
try:
config = ConfigParser.ConfigParser()
config.read(path)
except Exception as exc:
self.fail("Failed to access {0}. Check that the file exists and you have read "
"access. {1}".format(path, str(exc)))
credentials = dict()
for key in AZURE_CREDENTIAL_ENV_MAPPING:
try:
credentials[key] = config.get(profile, key, raw=True)
except:
pass
if credentials.get('client_id') is not None or credentials.get('ad_user') is not None:
return credentials
return None
def _get_env_credentials(self):
env_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
env_credentials[attribute] = os.environ.get(env_variable, None)
if env_credentials['profile'] is not None:
credentials = self._get_profile(env_credentials['profile'])
return credentials
if env_credentials['client_id'] is not None or env_credentials['ad_user'] is not None:
return env_credentials
return None
def _get_credentials(self, params):
# Get authentication credentials.
# Precedence: cmd line parameters-> environment variables-> default profile in ~/.azure/credentials.
self.log('Getting credentials')
arg_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
arg_credentials[attribute] = getattr(params, attribute)
# try module params
if arg_credentials['profile'] is not None:
self.log('Retrieving credentials with profile parameter.')
credentials = self._get_profile(arg_credentials['profile'])
return credentials
if arg_credentials['client_id'] is not None:
self.log('Received credentials from parameters.')
return arg_credentials
# try environment
env_credentials = self._get_env_credentials()
if env_credentials:
self.log('Received credentials from env.')
return env_credentials
# try default profile from ~./azure/credentials
default_credentials = self._get_profile()
if default_credentials:
self.log('Retrieved default profile credentials from ~/.azure/credentials.')
return default_credentials
return None
def _register(self, key):
try:
# We have to perform the one-time registration here. Otherwise, we receive an error the first
# time we attempt to use the requested client.
resource_client = self.rm_client
resource_client.providers.register(key)
except Exception as exc:
self.fail("One-time registration of {0} failed - {1}".format(key, str(exc)))
@property
def network_client(self):
self.log('Getting network client')
if not self._network_client:
self._network_client = NetworkManagementClient(self.azure_credentials, self.subscription_id)
self._register('Microsoft.Network')
return self._network_client
@property
def rm_client(self):
self.log('Getting resource manager client')
if not self._resource_client:
self._resource_client = ResourceManagementClient(self.azure_credentials, self.subscription_id)
return self._resource_client
@property
def compute_client(self):
self.log('Getting compute client')
if not self._compute_client:
self._compute_client = ComputeManagementClient(self.azure_credentials, self.subscription_id)
self._register('Microsoft.Compute')
return self._compute_client
class AzureInventory(object):
def __init__(self):
self._args = self._parse_cli_args()
try:
rm = AzureRM(self._args)
except Exception as e:
sys.exit("{0}".format(str(e)))
self._compute_client = rm.compute_client
self._network_client = rm.network_client
self._resource_client = rm.rm_client
self._security_groups = None
self.resource_groups = []
self.tags = None
self.locations = None
self.replace_dash_in_groups = False
self.group_by_resource_group = True
self.group_by_location = True
self.group_by_security_group = True
self.group_by_tag = True
self.include_powerstate = True
self._inventory = dict(
_meta=dict(
hostvars=dict()
),
azure=[]
)
self._get_settings()
if self._args.resource_groups:
self.resource_groups = self._args.resource_groups.split(',')
if self._args.tags:
self.tags = self._args.tags.split(',')
if self._args.locations:
self.locations = self._args.locations.split(',')
if self._args.no_powerstate:
self.include_powerstate = False
self.get_inventory()
print (self._json_format_dict(pretty=self._args.pretty))
sys.exit(0)
def _parse_cli_args(self):
# Parse command line arguments
parser = argparse.ArgumentParser(
description='Produce an Ansible Inventory file for an Azure subscription')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--debug', action='store_true', default=False,
help='Send debug messages to STDOUT')
parser.add_argument('--host', action='store',
help='Get all information about an instance')
parser.add_argument('--pretty', action='store_true', default=False,
help='Pretty print JSON output(default: False)')
parser.add_argument('--profile', action='store',
help='Azure profile contained in ~/.azure/credentials')
parser.add_argument('--subscription_id', action='store',
help='Azure Subscription Id')
parser.add_argument('--client_id', action='store',
help='Azure Client Id ')
parser.add_argument('--secret', action='store',
help='Azure Client Secret')
parser.add_argument('--tenant', action='store',
help='Azure Tenant Id')
parser.add_argument('--ad-user', action='store',
help='Active Directory User')
parser.add_argument('--password', action='store',
help='password')
parser.add_argument('--resource-groups', action='store',
help='Return inventory for comma separated list of resource group names')
parser.add_argument('--tags', action='store',
help='Return inventory for comma separated list of tag key:value pairs')
parser.add_argument('--locations', action='store',
help='Return inventory for comma separated list of locations')
parser.add_argument('--no-powerstate', action='store_true', default=False,
help='Do not include the power state of each virtual host')
return parser.parse_args()
def get_inventory(self):
if len(self.resource_groups) > 0:
# get VMs for requested resource groups
for resource_group in self.resource_groups:
try:
virtual_machines = self._compute_client.virtual_machines.list(resource_group)
except Exception as exc:
sys.exit("Error: fetching virtual machines for resource group {0} - {1}".format(resource_group,
str(exc)))
if self._args.host or self.tags:
selected_machines = self._selected_machines(virtual_machines)
self._load_machines(selected_machines)
else:
self._load_machines(virtual_machines)
else:
# get all VMs within the subscription
try:
virtual_machines = self._compute_client.virtual_machines.list_all()
except Exception as exc:
sys.exit("Error: fetching virtual machines - {0}".format(str(exc)))
if self._args.host or self.tags or self.locations:
selected_machines = self._selected_machines(virtual_machines)
self._load_machines(selected_machines)
else:
self._load_machines(virtual_machines)
def _load_machines(self, machines):
for machine in machines:
id_dict = azure_id_to_dict(machine.id)
#TODO - The API is returning an ID value containing resource group name in ALL CAPS. If/when it gets
# fixed, we should remove the .lower(). Opened Issue
# #574: https://github.com/Azure/azure-sdk-for-python/issues/574
resource_group = id_dict['resourceGroups'].lower()
if self.group_by_security_group:
self._get_security_groups(resource_group)
host_vars = dict(
ansible_host=None,
private_ip=None,
private_ip_alloc_method=None,
public_ip=None,
public_ip_name=None,
public_ip_id=None,
public_ip_alloc_method=None,
fqdn=None,
location=machine.location,
name=machine.name,
type=machine.type,
id=machine.id,
tags=machine.tags,
network_interface_id=None,
network_interface=None,
resource_group=resource_group,
mac_address=None,
plan=(machine.plan.name if machine.plan else None),
virtual_machine_size=machine.hardware_profile.vm_size,
computer_name=machine.os_profile.computer_name,
provisioning_state=machine.provisioning_state,
)
host_vars['os_disk'] = dict(
name=machine.storage_profile.os_disk.name,
operating_system_type=machine.storage_profile.os_disk.os_type.value
)
if self.include_powerstate:
host_vars['powerstate'] = self._get_powerstate(resource_group, machine.name)
if machine.storage_profile.image_reference:
host_vars['image'] = dict(
offer=machine.storage_profile.image_reference.offer,
publisher=machine.storage_profile.image_reference.publisher,
sku=machine.storage_profile.image_reference.sku,
version=machine.storage_profile.image_reference.version
)
# Add windows details
if machine.os_profile.windows_configuration is not None:
host_vars['windows_auto_updates_enabled'] = \
machine.os_profile.windows_configuration.enable_automatic_updates
host_vars['windows_timezone'] = machine.os_profile.windows_configuration.time_zone
host_vars['windows_rm'] = None
if machine.os_profile.windows_configuration.win_rm is not None:
host_vars['windows_rm'] = dict(listeners=None)
if machine.os_profile.windows_configuration.win_rm.listeners is not None:
host_vars['windows_rm']['listeners'] = []
for listener in machine.os_profile.windows_configuration.win_rm.listeners:
host_vars['windows_rm']['listeners'].append(dict(protocol=listener.protocol,
certificate_url=listener.certificate_url))
for interface in machine.network_profile.network_interfaces:
interface_reference = self._parse_ref_id(interface.id)
network_interface = self._network_client.network_interfaces.get(
interface_reference['resourceGroups'],
interface_reference['networkInterfaces'])
if network_interface.primary:
if self.group_by_security_group and \
self._security_groups[resource_group].get(network_interface.id, None):
host_vars['security_group'] = \
self._security_groups[resource_group][network_interface.id]['name']
host_vars['security_group_id'] = \
self._security_groups[resource_group][network_interface.id]['id']
host_vars['network_interface'] = network_interface.name
host_vars['network_interface_id'] = network_interface.id
host_vars['mac_address'] = network_interface.mac_address
for ip_config in network_interface.ip_configurations:
host_vars['private_ip'] = ip_config.private_ip_address
host_vars['private_ip_alloc_method'] = ip_config.private_ip_allocation_method
if ip_config.public_ip_address:
public_ip_reference = self._parse_ref_id(ip_config.public_ip_address.id)
public_ip_address = self._network_client.public_ip_addresses.get(
public_ip_reference['resourceGroups'],
public_ip_reference['publicIPAddresses'])
host_vars['ansible_host'] = public_ip_address.ip_address
host_vars['public_ip'] = public_ip_address.ip_address
host_vars['public_ip_name'] = public_ip_address.name
host_vars['public_ip_alloc_method'] = public_ip_address.public_ip_allocation_method
host_vars['public_ip_id'] = public_ip_address.id
if public_ip_address.dns_settings:
host_vars['fqdn'] = public_ip_address.dns_settings.fqdn
self._add_host(host_vars)
def _selected_machines(self, virtual_machines):
selected_machines = []
for machine in virtual_machines:
if self._args.host and self._args.host == machine.name:
selected_machines.append(machine)
if self.tags and self._tags_match(machine.tags, self.tags):
selected_machines.append(machine)
if self.locations and machine.location in self.locations:
selected_machines.append(machine)
return selected_machines
def _get_security_groups(self, resource_group):
''' For a given resource_group build a mapping of network_interface.id to security_group name '''
if not self._security_groups:
self._security_groups = dict()
if not self._security_groups.get(resource_group):
self._security_groups[resource_group] = dict()
for group in self._network_client.network_security_groups.list(resource_group):
if group.network_interfaces:
for interface in group.network_interfaces:
self._security_groups[resource_group][interface.id] = dict(
name=group.name,
id=group.id
)
def _get_powerstate(self, resource_group, name):
try:
vm = self._compute_client.virtual_machines.get(resource_group,
name,
expand='instanceview')
except Exception as exc:
sys.exit("Error: fetching instanceview for host {0} - {1}".format(name, str(exc)))
return next((s.code.replace('PowerState/', '')
for s in vm.instance_view.statuses if s.code.startswith('PowerState')), None)
def _add_host(self, vars):
host_name = self._to_safe(vars['name'])
resource_group = self._to_safe(vars['resource_group'])
security_group = None
if vars.get('security_group'):
security_group = self._to_safe(vars['security_group'])
if self.group_by_resource_group:
if not self._inventory.get(resource_group):
self._inventory[resource_group] = []
self._inventory[resource_group].append(host_name)
if self.group_by_location:
if not self._inventory.get(vars['location']):
self._inventory[vars['location']] = []
self._inventory[vars['location']].append(host_name)
if self.group_by_security_group and security_group:
if not self._inventory.get(security_group):
self._inventory[security_group] = []
self._inventory[security_group].append(host_name)
self._inventory['_meta']['hostvars'][host_name] = vars
self._inventory['azure'].append(host_name)
if self.group_by_tag and vars.get('tags'):
for key, value in vars['tags'].items():
safe_key = self._to_safe(key)
safe_value = safe_key + '_' + self._to_safe(value)
if not self._inventory.get(safe_key):
self._inventory[safe_key] = []
if not self._inventory.get(safe_value):
self._inventory[safe_value] = []
self._inventory[safe_key].append(host_name)
self._inventory[safe_value].append(host_name)
def _json_format_dict(self, pretty=False):
# convert inventory to json
if pretty:
return json.dumps(self._inventory, sort_keys=True, indent=2)
else:
return json.dumps(self._inventory)
def _get_settings(self):
# Load settings from the .ini, if it exists. Otherwise,
# look for environment values.
file_settings = self._load_settings()
if file_settings:
for key in AZURE_CONFIG_SETTINGS:
if key in ('resource_groups', 'tags', 'locations') and file_settings.get(key):
values = file_settings.get(key).split(',')
if len(values) > 0:
setattr(self, key, values)
elif file_settings.get(key):
val = self._to_boolean(file_settings[key])
setattr(self, key, val)
else:
env_settings = self._get_env_settings()
for key in AZURE_CONFIG_SETTINGS:
if key in('resource_groups', 'tags', 'locations') and env_settings.get(key):
values = env_settings.get(key).split(',')
if len(values) > 0:
setattr(self, key, values)
elif env_settings.get(key, None) is not None:
val = self._to_boolean(env_settings[key])
setattr(self, key, val)
def _parse_ref_id(self, reference):
response = {}
keys = reference.strip('/').split('/')
for index in range(len(keys)):
if index < len(keys) - 1 and index % 2 == 0:
response[keys[index]] = keys[index + 1]
return response
def _to_boolean(self, value):
if value in ['Yes', 'yes', 1, 'True', 'true', True]:
result = True
elif value in ['No', 'no', 0, 'False', 'false', False]:
result = False
else:
result = True
return result
def _get_env_settings(self):
env_settings = dict()
for attribute, env_variable in AZURE_CONFIG_SETTINGS.items():
env_settings[attribute] = os.environ.get(env_variable, None)
return env_settings
def _load_settings(self):
basename = os.path.splitext(os.path.basename(__file__))[0]
default_path = os.path.join(os.path.dirname(__file__), (basename + '.ini'))
path = os.path.expanduser(os.path.expandvars(os.environ.get('AZURE_INI_PATH', default_path)))
config = None
settings = None
try:
config = ConfigParser.ConfigParser()
config.read(path)
except:
pass
if config is not None:
settings = dict()
for key in AZURE_CONFIG_SETTINGS:
try:
settings[key] = config.get('azure', key, raw=True)
except:
pass
return settings
def _tags_match(self, tag_obj, tag_args):
'''
Return True if the tags object from a VM contains the requested tag values.
:param tag_obj: Dictionary of string:string pairs
:param tag_args: List of strings in the form key=value
:return: boolean
'''
if not tag_obj:
return False
matches = 0
for arg in tag_args:
arg_key = arg
arg_value = None
if re.search(r':', arg):
arg_key, arg_value = arg.split(':')
if arg_value and tag_obj.get(arg_key, None) == arg_value:
matches += 1
elif not arg_value and tag_obj.get(arg_key, None) is not None:
matches += 1
if matches == len(tag_args):
return True
return False
def _to_safe(self, word):
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''
regex = "[^A-Za-z0-9\_"
if not self.replace_dash_in_groups:
regex += "\-"
return re.sub(regex + "]", "_", word)
def main():
if not HAS_AZURE:
sys.exit("The Azure python sdk is not installed (try 'pip install azure>=2.0.0rc5') - {0}".format(HAS_AZURE_EXC))
if LooseVersion(azure_compute_version) < LooseVersion(AZURE_MIN_VERSION):
sys.exit("Expecting azure.mgmt.compute.__version__ to be {0}. Found version {1} "
"Do you have Azure >= 2.0.0rc5 installed?".format(AZURE_MIN_VERSION, azure_compute_version))
AzureInventory()
if __name__ == '__main__':
main()
|
crafty78/ansible
|
contrib/inventory/azure_rm.py
|
Python
|
gpl-3.0
| 32,305
|
[
"Galaxy"
] |
81fddfcf0d7a3fd538f5467d702d6e091bfdfa396df3043b1a3e2a8260c77183
|
import time
import argparse
import numpy as np
import tensorflow as tf
import reader
import model
import pickle
import os
def parsing_args():
parser = argparse.ArgumentParser()
parser.add_argument('--mode', type=str, default='train',
help='train or test')
parser.add_argument('--init_from', type=str, default=None,
help='init model path')
parser.add_argument('--init_method', type=str, default=None,
help='lstm/att init from lstm or full model')
parser.add_argument('--word_vector_path', type=str, default=None,
help='pretrain word2vector model')
parser.add_argument('--data_dir', type=str, default=None,
help='data directory containing train valid test data')
parser.add_argument('--save', type=str, default=None,
help='directory to store checkpointed models')
parser.add_argument('--model_result', type=str, default=None,
help='save model result')
parser.add_argument('--att_file', type=str, default=None,
help='file storing attention weights for analysis')
parser.add_argument('--rnn_size', type=int, default=300,
help='size of LSTM internal state')
parser.add_argument('--emb_size', type=int, default=300,
help='word embedding size')
parser.add_argument('--num_layers', type=int, default=1,
help='number of layers in the RNN')
parser.add_argument('--window_size', type=int, default=5,
help='history window size')
parser.add_argument('--batch_size', type=int, default=20,
help='minibatch size')
parser.add_argument('--max_seq_length', type=int, default=60,
help='max number of timesteps to unroll during BPTT')
parser.add_argument('--min_seq_length', type=int, default=0,
help='min number of timesteps to unroll during BPTT')
parser.add_argument('--max_epochs', type=int, default=50,
help='number of full passes through the training data')
parser.add_argument('--dropout', type=float, default=1,
help='dropout for regularization, neuron keep probabitity. 1 = no dropout')
parser.add_argument('--max_grad_norm', type=float, default=5.,
help='clip gradients at this value')
parser.add_argument('--entropy_reg', type=float, default=0.1,
help='entropy regulizar')
parser.add_argument('--learning_rate', type=float, default=1.0,
help='learning rate')
parser.add_argument('--init_scale', type=float, default=0.1,
help='initialization scale')
parser.add_argument('--decay_rate', type=float, default=0.5,
help='decay rate')
parser.add_argument('--learning_rate_decay_after', type=int, default=10,
help='in number of epochs, when to start decaying the learning rate')
parser.add_argument('--gpu_id', type=float, default=0,
help='% of gpu memory to be allocated to this process. Default is 66.6%')
parser.add_argument('--print_every', type=int, default=200,
help='how many steps/minibatches between printing out the loss')
args = parser.parse_args()
return args
def run_epoch_training(sess, all_op, data, lr, dropout, print_every):
start_time = time.time()
nbatch = data.get_batch_number()
total_words_num = 0
total_cost = 0
fetches = {}
fetches['train'] = all_op['train']
fetches['total_label_loss'] = all_op['total_label_loss']
for idx in range(nbatch):
x, y = data.get_data(idx)
feed_dict = {
all_op['input_data']:x,
all_op['labels']:y,
all_op['learning_rate']:lr,
all_op['dropout']:dropout
}
result = sess.run(fetches,feed_dict=feed_dict)
total_cost += result['total_label_loss']
total_words_num += x.size
if (idx+1)%print_every == 0:
print (idx+1), '/', nbatch, ': ', 'perplexity: ', np.exp(result['total_label_loss']/x.size)
total_perplexity = np.exp(total_cost/total_words_num)
print 'training perplexity in this epoch: ' , total_perplexity
print 'epoch training time: ', (time.time() - start_time)
return total_perplexity
def evaluating(sess, all_op, data):
nbatch = data.get_batch_number()
total_words_num = 0
total_cost = 0
fetches = {}
fetches['total_label_loss'] = all_op['total_label_loss']
for idx in range(nbatch):
x, y = data.get_data(idx)
feed_dict = {
all_op['input_data']:x,
all_op['labels']:y,
all_op['dropout']:1
}
result = sess.run(fetches,feed_dict=feed_dict)
total_cost += result['total_label_loss']
total_words_num += x.size
total_perplexity = np.exp(total_cost/total_words_num)
return total_perplexity
def train(args):
#read data
train_data = reader.data(data_dir=args.data_dir,
batch_size=args.batch_size,
min_seq_length=args.min_seq_length,
max_seq_length=args.max_seq_length,
min_count=0)
train_data.load('train')
valid_data = reader.data(data_dir=args.data_dir,
batch_size=args.batch_size,
min_seq_length=args.min_seq_length,
max_seq_length=args.max_seq_length,
min_count=0)
valid_data.load('valid')
test_data = reader.data(data_dir=args.data_dir,
batch_size=args.batch_size,
min_seq_length=args.min_seq_length,
max_seq_length=args.max_seq_length,
min_count=0)
test_data.load('test')
#load model
if args.init_from:
if not os.path.isfile(args.init_from):
print 'init file not found'
os.exit()
#the placeholder need for training
input_data_ph = tf.placeholder(tf.int32, [None, None])
labels_ph = tf.placeholder(tf.int32, [None, None])
learning_rate_ph = tf.placeholder(tf.float32, [])
dropout_ph = tf.placeholder(tf.float32, [])
#build model
vocab_size=train_data.vocab_size
default_initializer = tf.random_uniform_initializer(-args.init_scale,
args.init_scale)
with tf.variable_scope('model',initializer=default_initializer):
logits, pretrain_list, output_linear_list = model.inference(
input_x=input_data_ph,
embedding_dim=args.emb_size,
lstm_hidden_dim_1=args.rnn_size,
vocab_size=vocab_size,
dropout=dropout_ph,
window_size=args.window_size)
total_label_loss, loss = model.loss(logits=logits, labels=labels_ph)
train_op = model.training(loss, learning_rate_ph, args.max_grad_norm)
all_op = {'input_data':input_data_ph,
'labels':labels_ph,
'learning_rate':learning_rate_ph,
'dropout':dropout_ph,
'total_label_loss':total_label_loss,
'train':train_op}
#pretrain
if args.init_from:
if args.init_method == 'lstm':
with tf.variable_scope('model'):
with tf.variable_scope('output_lstm1_linear'):
lstm_linear_W = tf.get_variable('W', [args.rnn_size, vocab_size])
lstm_linear_b = tf.get_variable('b', [vocab_size], initializer=tf.constant_initializer(0.0))
pretrain_list += [lstm_linear_W,lstm_linear_b]
init_att_W = output_linear_list[0].assign(tf.concat(0,[lstm_linear_W,lstm_linear_W]))
init_att_b = output_linear_list[1].assign(lstm_linear_b)
saver_restore = tf.train.Saver(pretrain_list)
else:
saver_restore = tf.train.Saver()
#pretrain word embedding
if args.word_vector_path:
emb_matrix = pretrain_list[0]
pretrain_emb = emb_matrix.assign(train_data.generate_word_embedding_matrix(args.word_vector_path))
global_step = tf.Variable(0,name='global_step',trainable=False)
init = tf.initialize_all_variables()
saver_save = tf.train.Saver()
training_process_perplexity = {'train':[],'valid':[],'test':[],'best_val_test':[]}
file_name = 'rnn_size' + str(args.rnn_size)
with tf.Session() as sess:
sess.run(init)
#pretrain word embedding
if args.word_vector_path:
sess.run(pretrain_emb)
if args.init_from:
if args.init_method == 'lstm':
saver_restore.restore(sess, args.init_from)
sess.run(init_att_W)
sess.run(init_att_b)
else:
saver_restore.restore(sess, args.init_from)
#training
best_val_perplexity = np.inf
best_val_test_perplexity = np.inf
for i in range(args.max_epochs):
lr_decay = args.decay_rate ** max(i + 1 - args.learning_rate_decay_after, 0.0)
learning_rate = args.learning_rate * lr_decay
print("Epoch: %d Learning rate: %.3f" % (i + 1, learning_rate))
#training
training_perplexity = run_epoch_training(sess, all_op, train_data,
learning_rate, args.dropout, args.print_every)
print("Epoch: %d Train Perplexity: %.3f" % (i + 1, training_perplexity))
test_training_perplexity = evaluating(sess, all_op, train_data)
print("Epoch: %d test training Perplexity: %.3f" % (i + 1,
test_training_perplexity))
#validation
val_perplexity = evaluating(sess, all_op, valid_data)
print("Epoch: %d validation Perplexity: %.3f" % (i + 1, val_perplexity))
#peeking testing
test_perplexity = evaluating(sess, all_op, test_data)
print("Epoch: %d peeking testing Perplexity: %.3f" % (i + 1, test_perplexity))
if val_perplexity < best_val_perplexity :
best_val_perplexity = val_perplexity
best_val_test_perplexity = test_perplexity
#save
saver_save.save(sess, os.path.join(args.save,file_name), global_step=global_step)
print("So far best val testing Perplexity: %.3f" % (best_val_test_perplexity))
training_process_perplexity['train'].append(test_training_perplexity)
training_process_perplexity['valid'].append(val_perplexity)
training_process_perplexity['test'].append(test_perplexity)
training_process_perplexity['best_val_test'].append(best_val_test_perplexity)
with open(os.path.join(args.model_result,file_name),'wb') as f:
pickle.dump(training_process_perplexity, f)
def test(args):
test_data = reader.data(data_dir=args.data_dir,
batch_size=args.batch_size,
min_seq_length=args.min_seq_length,
max_seq_length=args.max_seq_length,
min_count=args.min_count)
test_data.load('test')
#load model
if args.init_from:
if not os.path.isfile(args.init_from):
print 'init file not found'
os.exit()
#the placeholder need for training
input_data_ph = tf.placeholder(tf.int32, [None, None])
labels_ph = tf.placeholder(tf.int32, [None, None])
learning_rate_ph = tf.placeholder(tf.float32, [])
dropout_ph = tf.placeholder(tf.float32, [])
#build model
vocab_size=test_data.vocab_size
logits, pretrain_list, output_linear_list = model.inference(input_x=input_data_ph,
embedding_dim=args.emb_size,
lstm_hidden_dim_1=args.rnn_size,
vocab_size=vocab_size,
dropout=dropout_ph)
total_label_loss, loss = model.loss(logits=logits, labels=labels_ph)
all_op = {'input_data':input_data_ph,
'labels':labels_ph,
'learning_rate':learning_rate_ph,
'dropout':dropout_ph,
'total_label_loss':total_label_loss}
#pretrain
if args.init_from:
saver_restore = tf.train.Saver()
#load model
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init)
saver_restore.restore(sess, args.init_from)
test_perplexity = evaluating(sess, all_op, test_data)
print ("Testing Perplexity: %.3f" % (test_perplexity))
if __name__ == "__main__":
args = parsing_args()
if args.mode == 'train':
train(args)
else :
test(args)
|
darongliu/Lstm_Turing_LM_tf
|
exp_merge_method/LSTM+att_fix+matrix_concat/run.py
|
Python
|
mit
| 13,336
|
[
"NEURON"
] |
ffdd850a030e80eb43edd58a55b923f8d518a9030d1565d97033233be4703d12
|
# -*- coding: utf-8 -*-
#
# hl_api_models.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Functions for model handling
"""
from ..ll_api import *
from .hl_api_helper import *
__all__ = [
'ConnectionRules',
'CopyModel',
'GetDefaults',
'Models',
'SetDefaults',
]
@check_stack
def Models(mtype="all", sel=None):
"""Return a tuple of model names, sorted by name.
All available models are neurons, devices and synapses.
Parameters
----------
mtype : str, optional
Use ``'mtype='nodes'`` to only see neuron and device models,
or ``'type='synapses'`` to only see synapse models.
sel : str, optional
String used to filter the result list and only return models
containing it.
Returns
-------
tuple
Available model names
Raises
------
ValueError
Description
Notes
-----
- Synapse model names ending with ``'_hpc'`` provide minimal memory
requirements by using thread-local target neuron IDs and fixing
the ``'rport'`` to 0.
- Synapse model names ending with ``'_lbl'`` allow to assign an individual
integer label (``'synapse_label'``) to created synapses at the cost
of increased memory requirements.
"""
if mtype not in ("all", "nodes", "synapses"):
raise ValueError("type has to be one of 'all', 'nodes' or 'synapses'")
models = []
if mtype in ("all", "nodes"):
sr("modeldict")
models += spp().keys()
if mtype in ("all", "synapses"):
sr("synapsedict")
models += spp().keys()
if sel is not None:
models = [x for x in models if x.find(sel) >= 0]
models.sort()
return tuple(models)
@check_stack
def ConnectionRules():
"""Return a typle of all available connection rules, sorted by name.
Returns
-------
tuple
Available connection rules
"""
sr('connruledict')
return tuple(sorted(spp().keys()))
@check_stack
def SetDefaults(model, params, val=None):
"""Set the default parameter values of the given model.
New default values are used for all subsequently created instances
of the model.
Parameters
----------
model : str
Name of the model
params : str or dict
Dictionary of new default parameter values
val : str, optional
If given, `params` has to be the name of a model property.
"""
if val is not None:
if is_literal(params):
params = {params: val}
sps(params)
sr('/{0} exch SetDefaults'.format(model))
@check_stack
def GetDefaults(model, keys=None, output=''):
"""Return default parameters of the given model, specified by a string.
Parameters
----------
model : str
Name of the model
keys : str or list, optional
String or a list of strings naming model properties. `GetDefaults` then
returns a single value or a list of values belonging to the keys
given.
output : str, optional
Whether the returned data should be in a format
(``output='json'``). Default is ''.
Returns
-------
dict
A dictionary of default parameters.
type
If keys is a string, the corrsponding default parameter is returned.
list
If keys is a list of strings, a list of corrsponding default parameters
is returned.
str :
If `output` is ``json``, returns parameters in JSON format.
Raises
------
TypeError
"""
if keys is None:
cmd = "/{0} GetDefaults".format(model)
elif is_literal(keys):
cmd = '/{0} GetDefaults /{1} get'.format(model, keys)
elif is_iterable(keys):
keys_str = " ".join("/{0}".format(x) for x in keys)
cmd = "/{0} GetDefaults [ {1} ] {{ 1 index exch get }}"\
.format(model, keys_str) + " Map exch pop"
else:
raise TypeError("keys should be either a string or an iterable")
sr(cmd)
result = spp()
if output == 'json':
result = to_json(result)
return result
@check_stack
def CopyModel(existing, new, params=None):
"""Create a new model by copying an existing one.
Parameters
----------
existing : str
Name of existing model
new : str
Name of the copied model
params : dict, optional
Default parameters assigned to the copy. Not provided parameters are
taken from the existing model.
"""
model_deprecation_warning(existing)
if params is not None:
sps(params)
sr("/%s /%s 3 2 roll CopyModel" % (existing, new))
else:
sr("/%s /%s CopyModel" % (existing, new))
|
hakonsbm/nest-simulator
|
pynest/nest/lib/hl_api_models.py
|
Python
|
gpl-2.0
| 5,333
|
[
"NEURON"
] |
a72f930125c36ed1e4e7ddc1d206fa0481742dddf46e7ed0f03f99f8cda71554
|
from django.conf import settings
from django.db import models
from reviews.models import Review, ReviewSegment
"""
These models extend the basic review models with new fields. You do not need to
do this, but you can do it.
Take a look at the forms.py, too.
Also visit the __init__.py.
This all works only if the REVIEW_APP value in settings.py is set to this app.
"""
class TestReview(Review):
price = models.IntegerField()
class TestReviewSegment(ReviewSegment):
title = models.CharField(max_length=200)
"""
These are test models. They are used to show how reviews are assigned to models.
Please look at the "entry.html" to see how this works.
Like the comments you need to combine your models and the review system in the
templates.
The models itself are not important, we have two to show that the system can
do this.
"""
class Car(models.Model):
name = models.CharField(max_length = 100)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('car_detail', [str(self.id)])
class Restaurant(models.Model):
name = models.CharField(max_length = 100)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('restaurant_detail', [str(self.id)])
|
shockflash/reviews
|
testapp/testdata/models.py
|
Python
|
bsd-3-clause
| 1,308
|
[
"VisIt"
] |
958937a55a0870d954622b9dd9b6293f6a20cbe73db5d878605fd3a17ca75e73
|
# Tests writing amber format with amber parameters
# Special isopeptide bond between two residues
import os
from vmd import atomsel, molecule
dir = os.path.dirname(__file__)
#==============================================================================
def test_amber_custom_residues(tmpdir):
from Dabble.param import AmberWriter
# Generate the file
p = str(tmpdir.mkdir("3nob_custom"))
molid = molecule.load("mae", os.path.join(dir, "prepped.mae"))
w = AmberWriter(molid, tmp_dir=p, forcefield="amber", hmr=False,
extra_topos=[os.path.join(dir, "glx.off"),
os.path.join(dir, "lyx.off")],
extra_params=[os.path.join(dir, "join.frcmod"),
os.path.join(dir, "analogies.frcmod")],
override_defaults=False)
w.write(os.path.join(p, "test"))
# Load the output file and start checking it
m2 = molecule.load("parm7", os.path.join(p, "test.prmtop"),
"rst7", os.path.join(p, "test.inpcrd"))
molecule.set_top(m2)
# Check the two custom residues are present
assert(len(atomsel("resname GLX")) == 7)
assert(len(atomsel("resname LYX")) == 20)
# Check the custom residues have gaff2 atom types
assert("n" in atomsel("resname LYX").get("type"))
assert("n2" in atomsel("resname GLX").get("type"))
# Check the normal residues have ff14SB atom types
assert("N" in atomsel("resname LYS").get("type"))
assert("N" in atomsel("resname GLY").get("type"))
# Check that the isopeptide bond is there
lybonds = []
for x in atomsel("resname LYX").bonds:
lybonds.extend(x)
assert(any(x in lybonds for x in atomsel("resname GLX").get("index")))
#==============================================================================
|
drorlab/dabble
|
test/3NOB/test_3nob.py
|
Python
|
gpl-2.0
| 1,849
|
[
"Amber",
"VMD"
] |
005fe33e2022f55cacd3041aba3cd42e067ae4b6f8a72899bc4bc4f29be0ef7a
|
# Authors:
# Martin Kosek <mkosek@redhat.com>
# Pavel Zuna <pzuna@redhat.com>
#
# Copyright (C) 2010 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import netaddr
import time
import re
import binascii
import dns.name
import dns.exception
import dns.rdatatype
import dns.resolver
import encodings.idna
from ipalib.request import context
from ipalib import api, errors, output
from ipalib import Command
from ipalib.capabilities import VERSION_WITHOUT_CAPABILITIES
from ipalib.parameters import (Flag, Bool, Int, Decimal, Str, StrEnum, Any,
DeprecatedParam, DNSNameParam)
from ipalib.plugable import Registry
from ipalib.plugins.baseldap import *
from ipalib import _, ngettext
from ipalib import messages
from ipalib.util import (normalize_zonemgr,
get_dns_forward_zone_update_policy,
get_dns_reverse_zone_update_policy,
get_reverse_zone_default, REVERSE_DNS_ZONES,
normalize_zone, validate_dnssec_global_forwarder,
DNSSECSignatureMissingError, UnresolvableRecordError,
EDNS0UnsupportedError, DNSSECValidationError,
validate_dnssec_zone_forwarder_step1,
validate_dnssec_zone_forwarder_step2)
from ipapython.ipautil import CheckedIPAddress, is_host_resolvable
from ipapython.dnsutil import DNSName
__doc__ = _("""
Domain Name System (DNS)
""") + _("""
Manage DNS zone and resource records.
""") + _("""
SUPPORTED ZONE TYPES
* Master zone (dnszone-*), contains authoritative data.
* Forward zone (dnsforwardzone-*), forwards queries to configured forwarders
(a set of DNS servers).
""") + _("""
USING STRUCTURED PER-TYPE OPTIONS
""") + _("""
There are many structured DNS RR types where DNS data stored in LDAP server
is not just a scalar value, for example an IP address or a domain name, but
a data structure which may be often complex. A good example is a LOC record
[RFC1876] which consists of many mandatory and optional parts (degrees,
minutes, seconds of latitude and longitude, altitude or precision).
""") + _("""
It may be difficult to manipulate such DNS records without making a mistake
and entering an invalid value. DNS module provides an abstraction over these
raw records and allows to manipulate each RR type with specific options. For
each supported RR type, DNS module provides a standard option to manipulate
a raw records with format --<rrtype>-rec, e.g. --mx-rec, and special options
for every part of the RR structure with format --<rrtype>-<partname>, e.g.
--mx-preference and --mx-exchanger.
""") + _("""
When adding a record, either RR specific options or standard option for a raw
value can be used, they just should not be combined in one add operation. When
modifying an existing entry, new RR specific options can be used to change
one part of a DNS record, where the standard option for raw value is used
to specify the modified value. The following example demonstrates
a modification of MX record preference from 0 to 1 in a record without
modifying the exchanger:
ipa dnsrecord-mod --mx-rec="0 mx.example.com." --mx-preference=1
""") + _("""
EXAMPLES:
""") + _("""
Add new zone:
ipa dnszone-add example.com --admin-email=admin@example.com
""") + _("""
Add system permission that can be used for per-zone privilege delegation:
ipa dnszone-add-permission example.com
""") + _("""
Modify the zone to allow dynamic updates for hosts own records in realm EXAMPLE.COM:
ipa dnszone-mod example.com --dynamic-update=TRUE
""") + _("""
This is the equivalent of:
ipa dnszone-mod example.com --dynamic-update=TRUE \\
--update-policy="grant EXAMPLE.COM krb5-self * A; grant EXAMPLE.COM krb5-self * AAAA; grant EXAMPLE.COM krb5-self * SSHFP;"
""") + _("""
Modify the zone to allow zone transfers for local network only:
ipa dnszone-mod example.com --allow-transfer=192.0.2.0/24
""") + _("""
Add new reverse zone specified by network IP address:
ipa dnszone-add --name-from-ip=192.0.2.0/24
""") + _("""
Add second nameserver for example.com:
ipa dnsrecord-add example.com @ --ns-rec=nameserver2.example.com
""") + _("""
Add a mail server for example.com:
ipa dnsrecord-add example.com @ --mx-rec="10 mail1"
""") + _("""
Add another record using MX record specific options:
ipa dnsrecord-add example.com @ --mx-preference=20 --mx-exchanger=mail2
""") + _("""
Add another record using interactive mode (started when dnsrecord-add, dnsrecord-mod,
or dnsrecord-del are executed with no options):
ipa dnsrecord-add example.com @
Please choose a type of DNS resource record to be added
The most common types for this type of zone are: NS, MX, LOC
DNS resource record type: MX
MX Preference: 30
MX Exchanger: mail3
Record name: example.com
MX record: 10 mail1, 20 mail2, 30 mail3
NS record: nameserver.example.com., nameserver2.example.com.
""") + _("""
Delete previously added nameserver from example.com:
ipa dnsrecord-del example.com @ --ns-rec=nameserver2.example.com.
""") + _("""
Add LOC record for example.com:
ipa dnsrecord-add example.com @ --loc-rec="49 11 42.4 N 16 36 29.6 E 227.64m"
""") + _("""
Add new A record for www.example.com. Create a reverse record in appropriate
reverse zone as well. In this case a PTR record "2" pointing to www.example.com
will be created in zone 2.0.192.in-addr.arpa.
ipa dnsrecord-add example.com www --a-rec=192.0.2.2 --a-create-reverse
""") + _("""
Add new PTR record for www.example.com
ipa dnsrecord-add 2.0.192.in-addr.arpa. 2 --ptr-rec=www.example.com.
""") + _("""
Add new SRV records for LDAP servers. Three quarters of the requests
should go to fast.example.com, one quarter to slow.example.com. If neither
is available, switch to backup.example.com.
ipa dnsrecord-add example.com _ldap._tcp --srv-rec="0 3 389 fast.example.com"
ipa dnsrecord-add example.com _ldap._tcp --srv-rec="0 1 389 slow.example.com"
ipa dnsrecord-add example.com _ldap._tcp --srv-rec="1 1 389 backup.example.com"
""") + _("""
The interactive mode can be used for easy modification:
ipa dnsrecord-mod example.com _ldap._tcp
No option to modify specific record provided.
Current DNS record contents:
SRV record: 0 3 389 fast.example.com, 0 1 389 slow.example.com, 1 1 389 backup.example.com
Modify SRV record '0 3 389 fast.example.com'? Yes/No (default No):
Modify SRV record '0 1 389 slow.example.com'? Yes/No (default No): y
SRV Priority [0]: (keep the default value)
SRV Weight [1]: 2 (modified value)
SRV Port [389]: (keep the default value)
SRV Target [slow.example.com]: (keep the default value)
1 SRV record skipped. Only one value per DNS record type can be modified at one time.
Record name: _ldap._tcp
SRV record: 0 3 389 fast.example.com, 1 1 389 backup.example.com, 0 2 389 slow.example.com
""") + _("""
After this modification, three fifths of the requests should go to
fast.example.com and two fifths to slow.example.com.
""") + _("""
An example of the interactive mode for dnsrecord-del command:
ipa dnsrecord-del example.com www
No option to delete specific record provided.
Delete all? Yes/No (default No): (do not delete all records)
Current DNS record contents:
A record: 192.0.2.2, 192.0.2.3
Delete A record '192.0.2.2'? Yes/No (default No):
Delete A record '192.0.2.3'? Yes/No (default No): y
Record name: www
A record: 192.0.2.2 (A record 192.0.2.3 has been deleted)
""") + _("""
Show zone example.com:
ipa dnszone-show example.com
""") + _("""
Find zone with "example" in its domain name:
ipa dnszone-find example
""") + _("""
Find records for resources with "www" in their name in zone example.com:
ipa dnsrecord-find example.com www
""") + _("""
Find A records with value 192.0.2.2 in zone example.com
ipa dnsrecord-find example.com --a-rec=192.0.2.2
""") + _("""
Show records for resource www in zone example.com
ipa dnsrecord-show example.com www
""") + _("""
Delegate zone sub.example to another nameserver:
ipa dnsrecord-add example.com ns.sub --a-rec=203.0.113.1
ipa dnsrecord-add example.com sub --ns-rec=ns.sub.example.com.
""") + _("""
Delete zone example.com with all resource records:
ipa dnszone-del example.com
""") + _("""
If a global forwarder is configured, all queries for which this server is not
authoritative (e.g. sub.example.com) will be routed to the global forwarder.
Global forwarding configuration can be overridden per-zone.
""") + _("""
Semantics of forwarding in IPA matches BIND semantics and depends on the type
of zone:
* Master zone: local BIND replies authoritatively to queries for data in
the given zone (including authoritative NXDOMAIN answers) and forwarding
affects only queries for names below zone cuts (NS records) of locally
served zones.
* Forward zone: forward zone contains no authoritative data. BIND forwards
queries, which cannot be answered from its local cache, to configured
forwarders.
""") + _("""
Semantics of the --forwarder-policy option:
* none - disable forwarding for the given zone.
* first - forward all queries to configured forwarders. If they fail,
do resolution using DNS root servers.
* only - forward all queries to configured forwarders and if they fail,
return failure.
""") + _("""
Disable global forwarding for given sub-tree:
ipa dnszone-mod example.com --forward-policy=none
""") + _("""
This configuration forwards all queries for names outside the example.com
sub-tree to global forwarders. Normal recursive resolution process is used
for names inside the example.com sub-tree (i.e. NS records are followed etc.).
""") + _("""
Forward all requests for the zone external.example.com to another forwarder
using a "first" policy (it will send the queries to the selected forwarder
and if not answered it will use global root servers):
ipa dnsforwardzone-add external.example.com --forward-policy=first \\
--forwarder=203.0.113.1
""") + _("""
Change forward-policy for external.example.com:
ipa dnsforwardzone-mod external.example.com --forward-policy=only
""") + _("""
Show forward zone external.example.com:
ipa dnsforwardzone-show external.example.com
""") + _("""
List all forward zones:
ipa dnsforwardzone-find
""") + _("""
Delete forward zone external.example.com:
ipa dnsforwardzone-del external.example.com
""") + _("""
Resolve a host name to see if it exists (will add default IPA domain
if one is not included):
ipa dns-resolve www.example.com
ipa dns-resolve www
""") + _("""
GLOBAL DNS CONFIGURATION
""") + _("""
DNS configuration passed to command line install script is stored in a local
configuration file on each IPA server where DNS service is configured. These
local settings can be overridden with a common configuration stored in LDAP
server:
""") + _("""
Show global DNS configuration:
ipa dnsconfig-show
""") + _("""
Modify global DNS configuration and set a list of global forwarders:
ipa dnsconfig-mod --forwarder=203.0.113.113
""")
register = Registry()
# supported resource record types
_record_types = (
u'A', u'AAAA', u'A6', u'AFSDB', u'APL', u'CERT', u'CNAME', u'DHCID', u'DLV',
u'DNAME', u'DS', u'HIP', u'HINFO', u'IPSECKEY', u'KEY', u'KX', u'LOC',
u'MD', u'MINFO', u'MX', u'NAPTR', u'NS', u'NSEC', u'NXT', u'PTR', u'RRSIG',
u'RP', u'SIG', u'SPF', u'SRV', u'SSHFP', u'TLSA', u'TXT',
)
# DNS zone record identificator
_dns_zone_record = DNSName.empty
# most used record types, always ask for those in interactive prompt
_top_record_types = ('A', 'AAAA', )
_rev_top_record_types = ('PTR', )
_zone_top_record_types = ('NS', 'MX', 'LOC', )
# attributes derived from record types
_record_attributes = [str('%srecord' % t.lower()) for t in _record_types]
# Deprecated
# supported DNS classes, IN = internet, rest is almost never used
_record_classes = (u'IN', u'CS', u'CH', u'HS')
# IN record class
_IN = dns.rdataclass.IN
# NS record type
_NS = dns.rdatatype.from_text('NS')
_output_permissions = (
output.summary,
output.Output('result', bool, _('True means the operation was successful')),
output.Output('value', unicode, _('Permission value')),
)
def _rname_validator(ugettext, zonemgr):
try:
DNSName(zonemgr) # test only if it is valid domain name
except (ValueError, dns.exception.SyntaxError) as e:
return unicode(e)
return None
def _create_zone_serial():
"""
Generate serial number for zones. bind-dyndb-ldap expects unix time in
to be used for SOA serial.
SOA serial in a date format would also work, but it may be set to far
future when many DNS updates are done per day (more than 100). Unix
timestamp is more resilient to this issue.
"""
return int(time.time())
def _reverse_zone_name(netstr):
try:
netaddr.IPAddress(str(netstr))
except (netaddr.AddrFormatError, ValueError):
pass
else:
# use more sensible default prefix than netaddr default
return unicode(get_reverse_zone_default(netstr))
net = netaddr.IPNetwork(netstr)
items = net.ip.reverse_dns.split('.')
if net.version == 4:
return u'.'.join(items[4 - net.prefixlen / 8:])
elif net.version == 6:
return u'.'.join(items[32 - net.prefixlen / 4:])
else:
return None
def _validate_ipaddr(ugettext, ipaddr, ip_version=None):
try:
ip = netaddr.IPAddress(str(ipaddr), flags=netaddr.INET_PTON)
if ip_version is not None:
if ip.version != ip_version:
return _('invalid IP address version (is %(value)d, must be %(required_value)d)!') \
% dict(value=ip.version, required_value=ip_version)
except (netaddr.AddrFormatError, ValueError):
return _('invalid IP address format')
return None
def _validate_ip4addr(ugettext, ipaddr):
return _validate_ipaddr(ugettext, ipaddr, 4)
def _validate_ip6addr(ugettext, ipaddr):
return _validate_ipaddr(ugettext, ipaddr, 6)
def _validate_ipnet(ugettext, ipnet):
try:
net = netaddr.IPNetwork(ipnet)
except (netaddr.AddrFormatError, ValueError, UnboundLocalError):
return _('invalid IP network format')
return None
def _validate_bind_aci(ugettext, bind_acis):
if not bind_acis:
return
bind_acis = bind_acis.split(';')
if bind_acis[-1]:
return _('each ACL element must be terminated with a semicolon')
else:
bind_acis.pop(-1)
for bind_aci in bind_acis:
if bind_aci in ("any", "none", "localhost", "localnets"):
continue
if bind_aci.startswith('!'):
bind_aci = bind_aci[1:]
try:
ip = CheckedIPAddress(bind_aci, parse_netmask=True,
allow_network=True, allow_loopback=True)
except (netaddr.AddrFormatError, ValueError) as e:
return unicode(e)
except UnboundLocalError:
return _(u"invalid address format")
def _normalize_bind_aci(bind_acis):
if not bind_acis:
return
bind_acis = bind_acis.split(';')
normalized = []
for bind_aci in bind_acis:
if not bind_aci:
continue
if bind_aci in ("any", "none", "localhost", "localnets"):
normalized.append(bind_aci)
continue
prefix = ""
if bind_aci.startswith('!'):
bind_aci = bind_aci[1:]
prefix = "!"
try:
ip = CheckedIPAddress(bind_aci, parse_netmask=True,
allow_network=True, allow_loopback=True)
if '/' in bind_aci: # addr with netmask
netmask = "/%s" % ip.prefixlen
else:
netmask = ""
normalized.append(u"%s%s%s" % (prefix, str(ip), netmask))
continue
except:
normalized.append(bind_aci)
continue
acis = u';'.join(normalized)
acis += u';'
return acis
def _validate_bind_forwarder(ugettext, forwarder):
ip_address, sep, port = forwarder.partition(u' port ')
ip_address_validation = _validate_ipaddr(ugettext, ip_address)
if ip_address_validation is not None:
return ip_address_validation
if sep:
try:
port = int(port)
if port < 0 or port > 65535:
raise ValueError()
except ValueError:
return _('%(port)s is not a valid port' % dict(port=port))
return None
def _validate_nsec3param_record(ugettext, value):
_nsec3param_pattern = (r'^(?P<alg>\d+) (?P<flags>\d+) (?P<iter>\d+) '
r'(?P<salt>([0-9a-fA-F]{2})+|-)$')
rec = re.compile(_nsec3param_pattern, flags=re.U)
result = rec.match(value)
if result is None:
return _(u'expected format: <0-255> <0-255> <0-65535> '
'even-length_hexadecimal_digits_or_hyphen')
alg = int(result.group('alg'))
flags = int(result.group('flags'))
iterations = int(result.group('iter'))
salt = result.group('salt')
if alg > 255:
return _('algorithm value: allowed interval 0-255')
if flags > 255:
return _('flags value: allowed interval 0-255')
if iterations > 65535:
return _('iterations value: allowed interval 0-65535')
if salt == u'-':
return None
try:
binascii.a2b_hex(salt)
except TypeError as e:
return _('salt value: %(err)s') % {'err': e}
return None
def _hostname_validator(ugettext, value):
assert isinstance(value, DNSName)
if len(value.make_absolute().labels) < 3:
return _('invalid domain-name: not fully qualified')
return None
def _no_wildcard_validator(ugettext, value):
"""Disallow usage of wildcards as RFC 4592 section 4 recommends
"""
assert isinstance(value, DNSName)
if value.is_wild():
return _('should not be a wildcard domain name (RFC 4592 section 4)')
return None
def is_forward_record(zone, str_address):
addr = netaddr.IPAddress(str_address)
if addr.version == 4:
result = api.Command['dnsrecord_find'](zone, arecord=str_address)
elif addr.version == 6:
result = api.Command['dnsrecord_find'](zone, aaaarecord=str_address)
else:
raise ValueError('Invalid address family')
return result['count'] > 0
def add_forward_record(zone, name, str_address):
addr = netaddr.IPAddress(str_address)
try:
if addr.version == 4:
api.Command['dnsrecord_add'](zone, name, arecord=str_address)
elif addr.version == 6:
api.Command['dnsrecord_add'](zone, name, aaaarecord=str_address)
else:
raise ValueError('Invalid address family')
except errors.EmptyModlist:
pass # the entry already exists and matches
def get_reverse_zone(ipaddr, prefixlen=None):
ip = netaddr.IPAddress(str(ipaddr))
revdns = DNSName(unicode(ip.reverse_dns))
if prefixlen is None:
revzone = None
result = api.Command['dnszone_find']()['result']
for zone in result:
zonename = zone['idnsname'][0]
if (revdns.is_subdomain(zonename.make_absolute()) and
(revzone is None or zonename.is_subdomain(revzone))):
revzone = zonename
else:
if ip.version == 4:
pos = 4 - prefixlen / 8
elif ip.version == 6:
pos = 32 - prefixlen / 4
items = ip.reverse_dns.split('.')
revzone = DNSName(items[pos:])
try:
api.Command['dnszone_show'](revzone)
except errors.NotFound:
revzone = None
if revzone is None:
raise errors.NotFound(
reason=_('DNS reverse zone for IP address %(addr)s not found') % dict(addr=ipaddr)
)
revname = revdns.relativize(revzone)
return revzone, revname
def add_records_for_host_validation(option_name, host, domain, ip_addresses, check_forward=True, check_reverse=True):
assert isinstance(host, DNSName)
assert isinstance(domain, DNSName)
try:
api.Command['dnszone_show'](domain)['result']
except errors.NotFound:
raise errors.NotFound(
reason=_('DNS zone %(zone)s not found') % dict(zone=domain)
)
if not isinstance(ip_addresses, (tuple, list)):
ip_addresses = [ip_addresses]
for ip_address in ip_addresses:
try:
ip = CheckedIPAddress(ip_address, match_local=False)
except Exception as e:
raise errors.ValidationError(name=option_name, error=unicode(e))
if check_forward:
if is_forward_record(domain, unicode(ip)):
raise errors.DuplicateEntry(
message=_(u'IP address %(ip)s is already assigned in domain %(domain)s.')\
% dict(ip=str(ip), domain=domain))
if check_reverse:
try:
prefixlen = None
if not ip.defaultnet:
prefixlen = ip.prefixlen
# we prefer lookup of the IP through the reverse zone
revzone, revname = get_reverse_zone(ip, prefixlen)
reverse = api.Command['dnsrecord_find'](revzone, idnsname=revname)
if reverse['count'] > 0:
raise errors.DuplicateEntry(
message=_(u'Reverse record for IP address %(ip)s already exists in reverse zone %(zone)s.')\
% dict(ip=str(ip), zone=revzone))
except errors.NotFound:
pass
def add_records_for_host(host, domain, ip_addresses, add_forward=True, add_reverse=True):
assert isinstance(host, DNSName)
assert isinstance(domain, DNSName)
if not isinstance(ip_addresses, (tuple, list)):
ip_addresses = [ip_addresses]
for ip_address in ip_addresses:
ip = CheckedIPAddress(ip_address, match_local=False)
if add_forward:
add_forward_record(domain, host, unicode(ip))
if add_reverse:
try:
prefixlen = None
if not ip.defaultnet:
prefixlen = ip.prefixlen
revzone, revname = get_reverse_zone(ip, prefixlen)
addkw = {'ptrrecord': host.derelativize(domain).ToASCII()}
api.Command['dnsrecord_add'](revzone, revname, **addkw)
except errors.EmptyModlist:
# the entry already exists and matches
pass
def _dns_name_to_string(value, raw=False):
if isinstance(value, unicode):
try:
value = DNSName(value)
except Exception:
return value
assert isinstance(value, DNSName)
if raw:
return value.ToASCII()
else:
return unicode(value)
def _check_entry_objectclass(entry, objectclasses):
"""
Check if entry contains all objectclasses
"""
if not isinstance(objectclasses, (list, tuple)):
objectclasses = [objectclasses, ]
if not entry.get('objectclass'):
return False
entry_objectclasses = [o.lower() for o in entry['objectclass']]
for o in objectclasses:
if o not in entry_objectclasses:
return False
return True
def _check_DN_objectclass(ldap, dn, objectclasses):
try:
entry = ldap.get_entry(dn, [u'objectclass', ])
except Exception:
return False
else:
return _check_entry_objectclass(entry, objectclasses)
class DNSRecord(Str):
# a list of parts that create the actual raw DNS record
parts = None
# an optional list of parameters used in record-specific operations
extra = None
supported = True
# supported RR types: https://fedorahosted.org/bind-dyndb-ldap/browser/doc/schema
label_format = _("%s record")
part_label_format = "%s %s"
doc_format = _('Raw %s records')
option_group_format = _('%s Record')
see_rfc_msg = _("(see RFC %s for details)")
part_name_format = "%s_part_%s"
extra_name_format = "%s_extra_%s"
cli_name_format = "%s_%s"
format_error_msg = None
kwargs = Str.kwargs + (
('validatedns', bool, True),
('normalizedns', bool, True),
)
# should be replaced in subclasses
rrtype = None
rfc = None
def __init__(self, name=None, *rules, **kw):
if self.rrtype not in _record_types:
raise ValueError("Unknown RR type: %s. Must be one of %s" % \
(str(self.rrtype), ", ".join(_record_types)))
if not name:
name = "%srecord*" % self.rrtype.lower()
kw.setdefault('cli_name', '%s_rec' % self.rrtype.lower())
kw.setdefault('label', self.label_format % self.rrtype)
kw.setdefault('doc', self.doc_format % self.rrtype)
kw.setdefault('option_group', self.option_group_format % self.rrtype)
kw['csv'] = True
if not self.supported:
kw['flags'] = ('no_option',)
super(DNSRecord, self).__init__(name, *rules, **kw)
def _get_part_values(self, value):
values = value.split()
if len(values) != len(self.parts):
return None
return tuple(values)
def _part_values_to_string(self, values, index, idna=True):
self._validate_parts(values)
parts = []
for v in values:
if v is None:
continue
elif isinstance(v, DNSName) and idna:
v = v.ToASCII()
elif not isinstance(v, unicode):
v = unicode(v)
parts.append(v)
return u" ".join(parts)
def get_parts_from_kw(self, kw, raise_on_none=True):
part_names = tuple(self.part_name_format % (self.rrtype.lower(), part.name) \
for part in self.parts)
vals = tuple(kw.get(part_name) for part_name in part_names)
if all(val is None for val in vals):
return
if raise_on_none:
for val_id,val in enumerate(vals):
if val is None and self.parts[val_id].required:
cli_name = self.cli_name_format % (self.rrtype.lower(), self.parts[val_id].name)
raise errors.ConversionError(name=self.name,
error=_("'%s' is a required part of DNS record") % cli_name)
return vals
def _validate_parts(self, parts):
if len(parts) != len(self.parts):
raise errors.ValidationError(name=self.name,
error=_("Invalid number of parts!"))
def _convert_scalar(self, value, index=None):
if isinstance(value, (tuple, list)):
return self._part_values_to_string(value, index)
return super(DNSRecord, self)._convert_scalar(value, index)
def normalize(self, value):
if self.normalizedns:
if isinstance(value, (tuple, list)):
value = tuple(
self._normalize_parts(v) for v in value \
if v is not None
)
elif value is not None:
value = (self._normalize_parts(value),)
return super(DNSRecord, self).normalize(value)
def _normalize_parts(self, value):
"""
Normalize a DNS record value using normalizers for its parts.
"""
if self.parts is None:
return value
try:
values = self._get_part_values(value)
if not values:
return value
converted_values = [ part._convert_scalar(values[part_id]) \
if values[part_id] is not None else None
for part_id, part in enumerate(self.parts)
]
new_values = [ part.normalize(converted_values[part_id]) \
for part_id, part in enumerate(self.parts) ]
value = self._convert_scalar(new_values)
except Exception:
# cannot normalize, rather return original value than fail
pass
return value
def _rule_validatedns(self, _, value):
if not self.validatedns:
return
if value is None:
return
if value is None:
return
if not self.supported:
return _('DNS RR type "%s" is not supported by bind-dyndb-ldap plugin') \
% self.rrtype
if self.parts is None:
return
# validate record format
values = self._get_part_values(value)
if not values:
if not self.format_error_msg:
part_names = [part.name.upper() for part in self.parts]
if self.rfc:
see_rfc_msg = " " + self.see_rfc_msg % self.rfc
else:
see_rfc_msg = ""
return _('format must be specified as "%(format)s" %(rfcs)s') \
% dict(format=" ".join(part_names), rfcs=see_rfc_msg)
else:
return self.format_error_msg
# validate every part
for part_id, part in enumerate(self.parts):
val = part.normalize(values[part_id])
val = part.convert(val)
part.validate(val)
return None
def _convert_dnsrecord_part(self, part):
"""
All parts of DNSRecord need to be processed and modified before they
can be added to global DNS API. For example a prefix need to be added
before part name so that the name is unique in the global namespace.
"""
name = self.part_name_format % (self.rrtype.lower(), part.name)
cli_name = self.cli_name_format % (self.rrtype.lower(), part.name)
label = self.part_label_format % (self.rrtype, unicode(part.label))
option_group = self.option_group_format % self.rrtype
flags = list(part.flags) + ['dnsrecord_part', 'virtual_attribute',]
if not part.required:
flags.append('dnsrecord_optional')
if not self.supported:
flags.append("no_option")
return part.clone_rename(name,
cli_name=cli_name,
label=label,
required=False,
option_group=option_group,
flags=flags,
hint=self.name,) # name of parent RR param
def _convert_dnsrecord_extra(self, extra):
"""
Parameters for special per-type behavior need to be processed in the
same way as record parts in _convert_dnsrecord_part().
"""
name = self.extra_name_format % (self.rrtype.lower(), extra.name)
cli_name = self.cli_name_format % (self.rrtype.lower(), extra.name)
label = self.part_label_format % (self.rrtype, unicode(extra.label))
option_group = self.option_group_format % self.rrtype
flags = list(extra.flags) + ['dnsrecord_extra', 'virtual_attribute',]
return extra.clone_rename(name,
cli_name=cli_name,
label=label,
required=False,
option_group=option_group,
flags=flags,
hint=self.name,) # name of parent RR param
def get_parts(self):
if self.parts is None:
return tuple()
return tuple(self._convert_dnsrecord_part(part) for part in self.parts)
def get_extra(self):
if self.extra is None:
return tuple()
return tuple(self._convert_dnsrecord_extra(extra) for extra in self.extra)
def __get_part_param(self, cmd, part, output_kw, default=None):
name = self.part_name_format % (self.rrtype.lower(), part.name)
label = self.part_label_format % (self.rrtype, unicode(part.label))
optional = not part.required
output_kw[name] = cmd.prompt_param(part,
optional=optional,
label=label)
def prompt_parts(self, cmd, mod_dnsvalue=None):
mod_parts = None
if mod_dnsvalue is not None:
mod_parts = self._get_part_values(mod_dnsvalue)
user_options = {}
if self.parts is None:
return user_options
for part_id, part in enumerate(self.parts):
if mod_parts:
default = mod_parts[part_id]
else:
default = None
self.__get_part_param(cmd, part, user_options, default)
return user_options
def prompt_missing_parts(self, cmd, kw, prompt_optional=False):
user_options = {}
if self.parts is None:
return user_options
for part in self.parts:
name = self.part_name_format % (self.rrtype.lower(), part.name)
if name in kw:
continue
optional = not part.required
if optional and not prompt_optional:
continue
default = part.get_default(**kw)
self.__get_part_param(cmd, part, user_options, default)
return user_options
# callbacks for per-type special record behavior
def dnsrecord_add_pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
def dnsrecord_add_post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
class ForwardRecord(DNSRecord):
extra = (
Flag('create_reverse?',
label=_('Create reverse'),
doc=_('Create reverse record for this IP Address'),
flags=['no_update']
),
)
def dnsrecord_add_pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
reverse_option = self._convert_dnsrecord_extra(self.extra[0])
if options.get(reverse_option.name):
records = entry_attrs.get(self.name, [])
if not records:
# --<rrtype>-create-reverse is set, but there are not records
raise errors.RequirementError(name=self.name)
for record in records:
add_records_for_host_validation(self.name, keys[-1], keys[-2], record,
check_forward=False,
check_reverse=True)
setattr(context, '%s_reverse' % self.name, entry_attrs.get(self.name))
def dnsrecord_add_post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
rev_records = getattr(context, '%s_reverse' % self.name, [])
if rev_records:
# make sure we don't run this post callback action again in nested
# commands, line adding PTR record in add_records_for_host
delattr(context, '%s_reverse' % self.name)
for record in rev_records:
try:
add_records_for_host(keys[-1], keys[-2], record,
add_forward=False, add_reverse=True)
except Exception as e:
raise errors.NonFatalError(
reason=_('Cannot create reverse record for "%(value)s": %(exc)s') \
% dict(value=record, exc=unicode(e)))
class UnsupportedDNSRecord(DNSRecord):
"""
Records which are not supported by IPA CLI, but we allow to show them if
LDAP contains these records.
"""
supported = False
def _get_part_values(self, value):
return tuple()
class ARecord(ForwardRecord):
rrtype = 'A'
rfc = 1035
parts = (
Str('ip_address',
_validate_ip4addr,
label=_('IP Address'),
),
)
class A6Record(DNSRecord):
rrtype = 'A6'
rfc = 3226
parts = (
Str('data',
label=_('Record data'),
),
)
def _get_part_values(self, value):
# A6 RR type is obsolete and only a raw interface is provided
return (value,)
class AAAARecord(ForwardRecord):
rrtype = 'AAAA'
rfc = 3596
parts = (
Str('ip_address',
_validate_ip6addr,
label=_('IP Address'),
),
)
class AFSDBRecord(DNSRecord):
rrtype = 'AFSDB'
rfc = 1183
parts = (
Int('subtype?',
label=_('Subtype'),
minvalue=0,
maxvalue=65535,
),
DNSNameParam('hostname',
label=_('Hostname'),
),
)
class APLRecord(UnsupportedDNSRecord):
rrtype = 'APL'
rfc = 3123
class CERTRecord(DNSRecord):
rrtype = 'CERT'
rfc = 4398
parts = (
Int('type',
label=_('Certificate Type'),
minvalue=0,
maxvalue=65535,
),
Int('key_tag',
label=_('Key Tag'),
minvalue=0,
maxvalue=65535,
),
Int('algorithm',
label=_('Algorithm'),
minvalue=0,
maxvalue=255,
),
Str('certificate_or_crl',
label=_('Certificate/CRL'),
),
)
class CNAMERecord(DNSRecord):
rrtype = 'CNAME'
rfc = 1035
parts = (
DNSNameParam('hostname',
label=_('Hostname'),
doc=_('A hostname which this alias hostname points to'),
),
)
class DHCIDRecord(UnsupportedDNSRecord):
rrtype = 'DHCID'
rfc = 4701
class DNAMERecord(DNSRecord):
rrtype = 'DNAME'
rfc = 2672
parts = (
DNSNameParam('target',
label=_('Target'),
),
)
class DSRecord(DNSRecord):
rrtype = 'DS'
rfc = 4034
parts = (
Int('key_tag',
label=_('Key Tag'),
minvalue=0,
maxvalue=65535,
),
Int('algorithm',
label=_('Algorithm'),
minvalue=0,
maxvalue=255,
),
Int('digest_type',
label=_('Digest Type'),
minvalue=0,
maxvalue=255,
),
Str('digest',
label=_('Digest'),
pattern=r'^[0-9a-fA-F]+$',
pattern_errmsg=u'only hexadecimal digits are allowed'
),
)
class DLVRecord(DSRecord):
# must use same attributes as DSRecord
rrtype = 'DLV'
rfc = 4431
class HINFORecord(UnsupportedDNSRecord):
rrtype = 'HINFO'
rfc = 1035
class HIPRecord(UnsupportedDNSRecord):
rrtype = 'HIP'
rfc = 5205
class KEYRecord(UnsupportedDNSRecord):
# managed by BIND itself
rrtype = 'KEY'
rfc = 2535
class IPSECKEYRecord(UnsupportedDNSRecord):
rrtype = 'IPSECKEY'
rfc = 4025
class KXRecord(DNSRecord):
rrtype = 'KX'
rfc = 2230
parts = (
Int('preference',
label=_('Preference'),
doc=_('Preference given to this exchanger. Lower values are more preferred'),
minvalue=0,
maxvalue=65535,
),
DNSNameParam('exchanger',
label=_('Exchanger'),
doc=_('A host willing to act as a key exchanger'),
),
)
class LOCRecord(DNSRecord):
rrtype = 'LOC'
rfc = 1876
parts = (
Int('lat_deg',
label=_('Degrees Latitude'),
minvalue=0,
maxvalue=90,
),
Int('lat_min?',
label=_('Minutes Latitude'),
minvalue=0,
maxvalue=59,
),
Decimal('lat_sec?',
label=_('Seconds Latitude'),
minvalue='0.0',
maxvalue='59.999',
precision=3,
),
StrEnum('lat_dir',
label=_('Direction Latitude'),
values=(u'N', u'S',),
),
Int('lon_deg',
label=_('Degrees Longitude'),
minvalue=0,
maxvalue=180,
),
Int('lon_min?',
label=_('Minutes Longitude'),
minvalue=0,
maxvalue=59,
),
Decimal('lon_sec?',
label=_('Seconds Longitude'),
minvalue='0.0',
maxvalue='59.999',
precision=3,
),
StrEnum('lon_dir',
label=_('Direction Longitude'),
values=(u'E', u'W',),
),
Decimal('altitude',
label=_('Altitude'),
minvalue='-100000.00',
maxvalue='42849672.95',
precision=2,
),
Decimal('size?',
label=_('Size'),
minvalue='0.0',
maxvalue='90000000.00',
precision=2,
),
Decimal('h_precision?',
label=_('Horizontal Precision'),
minvalue='0.0',
maxvalue='90000000.00',
precision=2,
),
Decimal('v_precision?',
label=_('Vertical Precision'),
minvalue='0.0',
maxvalue='90000000.00',
precision=2,
),
)
format_error_msg = _("""format must be specified as
"d1 [m1 [s1]] {"N"|"S"} d2 [m2 [s2]] {"E"|"W"} alt["m"] [siz["m"] [hp["m"] [vp["m"]]]]"
where:
d1: [0 .. 90] (degrees latitude)
d2: [0 .. 180] (degrees longitude)
m1, m2: [0 .. 59] (minutes latitude/longitude)
s1, s2: [0 .. 59.999] (seconds latitude/longitude)
alt: [-100000.00 .. 42849672.95] BY .01 (altitude in meters)
siz, hp, vp: [0 .. 90000000.00] (size/precision in meters)
See RFC 1876 for details""")
def _get_part_values(self, value):
regex = re.compile(
r'(?P<d1>\d{1,2}\s+)'
r'(?:(?P<m1>\d{1,2}\s+)'
r'(?P<s1>\d{1,2}(?:\.\d{1,3})?\s+)?)?'
r'(?P<dir1>[NS])\s+'
r'(?P<d2>\d{1,3}\s+)'
r'(?:(?P<m2>\d{1,2}\s+)'
r'(?P<s2>\d{1,2}(?:\.\d{1,3})?\s+)?)?'
r'(?P<dir2>[WE])\s+'
r'(?P<alt>-?\d{1,8}(?:\.\d{1,2})?)m?'
r'(?:\s+(?P<siz>\d{1,8}(?:\.\d{1,2})?)m?'
r'(?:\s+(?P<hp>\d{1,8}(?:\.\d{1,2})?)m?'
r'(?:\s+(?P<vp>\d{1,8}(?:\.\d{1,2})?)m?\s*)?)?)?$')
m = regex.match(value)
if m is None:
return None
return tuple(x.strip() if x is not None else x for x in m.groups())
def _validate_parts(self, parts):
super(LOCRecord, self)._validate_parts(parts)
# create part_name -> part_id map first
part_name_map = dict((part.name, part_id) \
for part_id,part in enumerate(self.parts))
requirements = ( ('lat_sec', 'lat_min'),
('lon_sec', 'lon_min'),
('h_precision', 'size'),
('v_precision', 'h_precision', 'size') )
for req in requirements:
target_part = req[0]
if parts[part_name_map[target_part]] is not None:
required_parts = req[1:]
if any(parts[part_name_map[part]] is None for part in required_parts):
target_cli_name = self.cli_name_format % (self.rrtype.lower(), req[0])
required_cli_names = [ self.cli_name_format % (self.rrtype.lower(), part)
for part in req[1:] ]
error = _("'%(required)s' must not be empty when '%(name)s' is set") % \
dict(required=', '.join(required_cli_names),
name=target_cli_name)
raise errors.ValidationError(name=self.name, error=error)
class MDRecord(UnsupportedDNSRecord):
# obsoleted, use MX instead
rrtype = 'MD'
rfc = 1035
class MINFORecord(UnsupportedDNSRecord):
rrtype = 'MINFO'
rfc = 1035
class MXRecord(DNSRecord):
rrtype = 'MX'
rfc = 1035
parts = (
Int('preference',
label=_('Preference'),
doc=_('Preference given to this exchanger. Lower values are more preferred'),
minvalue=0,
maxvalue=65535,
),
DNSNameParam('exchanger',
label=_('Exchanger'),
doc=_('A host willing to act as a mail exchanger'),
),
)
class NSRecord(DNSRecord):
rrtype = 'NS'
rfc = 1035
parts = (
DNSNameParam('hostname',
label=_('Hostname'),
),
)
class NSECRecord(UnsupportedDNSRecord):
# managed by BIND itself
rrtype = 'NSEC'
rfc = 4034
def _validate_naptr_flags(ugettext, flags):
allowed_flags = u'SAUP'
flags = flags.replace('"','').replace('\'','')
for flag in flags:
if flag not in allowed_flags:
return _('flags must be one of "S", "A", "U", or "P"')
class NAPTRRecord(DNSRecord):
rrtype = 'NAPTR'
rfc = 2915
parts = (
Int('order',
label=_('Order'),
minvalue=0,
maxvalue=65535,
),
Int('preference',
label=_('Preference'),
minvalue=0,
maxvalue=65535,
),
Str('flags',
_validate_naptr_flags,
label=_('Flags'),
normalizer=lambda x:x.upper()
),
Str('service',
label=_('Service'),
),
Str('regexp',
label=_('Regular Expression'),
),
Str('replacement',
label=_('Replacement'),
),
)
class NXTRecord(UnsupportedDNSRecord):
rrtype = 'NXT'
rfc = 2535
class PTRRecord(DNSRecord):
rrtype = 'PTR'
rfc = 1035
parts = (
DNSNameParam('hostname',
#RFC 2317 section 5.2 -- can be relative
label=_('Hostname'),
doc=_('The hostname this reverse record points to'),
),
)
class RPRecord(UnsupportedDNSRecord):
rrtype = 'RP'
rfc = 1183
class SRVRecord(DNSRecord):
rrtype = 'SRV'
rfc = 2782
parts = (
Int('priority',
label=_('Priority'),
minvalue=0,
maxvalue=65535,
),
Int('weight',
label=_('Weight'),
minvalue=0,
maxvalue=65535,
),
Int('port',
label=_('Port'),
minvalue=0,
maxvalue=65535,
),
DNSNameParam('target',
label=_('Target'),
doc=_('The domain name of the target host or \'.\' if the service is decidedly not available at this domain'),
),
)
def _sig_time_validator(ugettext, value):
time_format = "%Y%m%d%H%M%S"
try:
time.strptime(value, time_format)
except ValueError:
return _('the value does not follow "YYYYMMDDHHMMSS" time format')
class SIGRecord(UnsupportedDNSRecord):
# managed by BIND itself
rrtype = 'SIG'
rfc = 2535
class SPFRecord(UnsupportedDNSRecord):
rrtype = 'SPF'
rfc = 4408
class RRSIGRecord(UnsupportedDNSRecord):
# managed by BIND itself
rrtype = 'RRSIG'
rfc = 4034
class SSHFPRecord(DNSRecord):
rrtype = 'SSHFP'
rfc = 4255
parts = (
Int('algorithm',
label=_('Algorithm'),
minvalue=0,
maxvalue=255,
),
Int('fp_type',
label=_('Fingerprint Type'),
minvalue=0,
maxvalue=255,
),
Str('fingerprint',
label=_('Fingerprint'),
),
)
def _get_part_values(self, value):
# fingerprint part can contain space in LDAP, return it as one part
values = value.split(None, 2)
if len(values) != len(self.parts):
return None
return tuple(values)
class TLSARecord(DNSRecord):
rrtype = 'TLSA'
rfc = 6698
parts = (
Int('cert_usage',
label=_('Certificate Usage'),
minvalue=0,
maxvalue=255,
),
Int('selector',
label=_('Selector'),
minvalue=0,
maxvalue=255,
),
Int('matching_type',
label=_('Matching Type'),
minvalue=0,
maxvalue=255,
),
Str('cert_association_data',
label=_('Certificate Association Data'),
),
)
class TXTRecord(DNSRecord):
rrtype = 'TXT'
rfc = 1035
parts = (
Str('data',
label=_('Text Data'),
),
)
def _get_part_values(self, value):
# ignore any space in TXT record
return (value,)
_dns_records = (
ARecord(),
AAAARecord(),
A6Record(),
AFSDBRecord(),
APLRecord(),
CERTRecord(),
CNAMERecord(),
DHCIDRecord(),
DLVRecord(),
DNAMERecord(),
DSRecord(),
HIPRecord(),
IPSECKEYRecord(),
KEYRecord(),
KXRecord(),
LOCRecord(),
MXRecord(),
NAPTRRecord(),
NSRecord(),
NSECRecord(),
PTRRecord(),
RRSIGRecord(),
RPRecord(),
SIGRecord(),
SPFRecord(),
SRVRecord(),
SSHFPRecord(),
TLSARecord(),
TXTRecord(),
)
def __dns_record_options_iter():
for opt in (Any('dnsrecords?',
label=_('Records'),
flags=['no_create', 'no_search', 'no_update'],),
Str('dnstype?',
label=_('Record type'),
flags=['no_create', 'no_search', 'no_update'],),
Str('dnsdata?',
label=_('Record data'),
flags=['no_create', 'no_search', 'no_update'],)):
# These 3 options are used in --structured format. They are defined
# rather in takes_params than has_output_params because of their
# order - they should be printed to CLI before any DNS part param
yield opt
for option in _dns_records:
yield option
for part in option.get_parts():
yield part
for extra in option.get_extra():
yield extra
_dns_record_options = tuple(__dns_record_options_iter())
_dns_supported_record_types = tuple(record.rrtype for record in _dns_records \
if record.supported)
def check_ns_rec_resolvable(zone, name):
assert isinstance(zone, DNSName)
assert isinstance(name, DNSName)
if name.is_empty():
name = zone.make_absolute()
elif not name.is_absolute():
# this is a DNS name relative to the zone
name = name.derelativize(zone.make_absolute())
try:
return api.Command['dns_resolve'](unicode(name))
except errors.NotFound:
raise errors.NotFound(
reason=_('Nameserver \'%(host)s\' does not have a corresponding '
'A/AAAA record') % {'host': name}
)
def dns_container_exists(ldap):
try:
ldap.get_entry(DN(api.env.container_dns, api.env.basedn), [])
except errors.NotFound:
return False
return True
def default_zone_update_policy(zone):
if zone.is_reverse():
return get_dns_reverse_zone_update_policy(api.env.realm, zone.ToASCII())
else:
return get_dns_forward_zone_update_policy(api.env.realm)
dnszone_output_params = (
Str('managedby',
label=_('Managedby permission'),
),
)
def _convert_to_idna(value):
"""
Function converts a unicode value to idna, without extra validation.
If conversion fails, None is returned
"""
assert isinstance(value, unicode)
try:
idna_val = value
start_dot = u''
end_dot = u''
if idna_val.startswith(u'.'):
idna_val = idna_val[1:]
start_dot = u'.'
if idna_val.endswith(u'.'):
idna_val = idna_val[:-1]
end_dot = u'.'
idna_val = encodings.idna.nameprep(idna_val)
idna_val = re.split(r'(?<!\\)\.', idna_val)
idna_val = u'%s%s%s' % (start_dot,
u'.'.join(encodings.idna.ToASCII(x)
for x in idna_val),
end_dot)
return idna_val
except Exception:
pass
return None
def _create_idn_filter(cmd, ldap, *args, **options):
term = args[-1]
if term:
#include idna values to search
term_idna = _convert_to_idna(term)
if term_idna and term != term_idna:
term = (term, term_idna)
search_kw = {}
attr_extra_filters = []
for attr, value in cmd.args_options_2_entry(**options).iteritems():
if not isinstance(value, list):
value = [value]
for i, v in enumerate(value):
if isinstance(v, DNSName):
value[i] = v.ToASCII()
elif attr in map_names_to_records:
record = map_names_to_records[attr]
parts = record._get_part_values(v)
if parts is None:
value[i] = v
continue
try:
value[i] = record._part_values_to_string(parts, None)
except errors.ValidationError:
value[i] = v
#create MATCH_ANY filter for multivalue
if len(value) > 1:
f = ldap.make_filter({attr: value}, rules=ldap.MATCH_ANY)
attr_extra_filters.append(f)
else:
search_kw[attr] = value
if cmd.obj.search_attributes:
search_attrs = cmd.obj.search_attributes
else:
search_attrs = cmd.obj.default_attributes
if cmd.obj.search_attributes_config:
config = ldap.get_ipa_config()
config_attrs = config.get(cmd.obj.search_attributes_config, [])
if len(config_attrs) == 1 and (isinstance(config_attrs[0],
basestring)):
search_attrs = config_attrs[0].split(',')
search_kw['objectclass'] = cmd.obj.object_class
attr_filter = ldap.make_filter(search_kw, rules=ldap.MATCH_ALL)
if attr_extra_filters:
#combine filter if there is any idna value
attr_extra_filters.append(attr_filter)
attr_filter = ldap.combine_filters(attr_extra_filters,
rules=ldap.MATCH_ALL)
search_kw = {}
for a in search_attrs:
search_kw[a] = term
term_filter = ldap.make_filter(search_kw, exact=False)
member_filter = cmd.get_member_filter(ldap, **options)
filter = ldap.combine_filters(
(term_filter, attr_filter, member_filter), rules=ldap.MATCH_ALL
)
return filter
map_names_to_records = {"%srecord" % record.rrtype.lower(): record for record
in _dns_records if record.supported}
def _records_idn_postprocess(record, **options):
for attr in record.keys():
attr = attr.lower()
try:
param = map_names_to_records[attr]
except KeyError:
continue
if not isinstance(param, DNSRecord):
continue
part_params = param.get_parts()
rrs = []
for dnsvalue in record[attr]:
parts = param._get_part_values(dnsvalue)
if parts is None:
continue
parts = list(parts)
try:
for (i, p) in enumerate(parts):
if isinstance(part_params[i], DNSNameParam):
parts[i] = DNSName(p)
rrs.append(param._part_values_to_string(parts, None,
idna=options.get('raw', False)))
except (errors.ValidationError, errors.ConversionError):
rrs.append(dnsvalue)
record[attr] = rrs
def _normalize_zone(zone):
if isinstance(zone, unicode):
# normalize only non-IDNA zones
try:
return unicode(zone.encode('ascii')).lower()
except UnicodeError:
pass
return zone
def _get_auth_zone_ldap(name):
"""
Find authoritative zone in LDAP for name. Only active zones are considered.
:param name:
:return: (zone, truncated)
zone: authoritative zone, or None if authoritative zone is not in LDAP
"""
assert isinstance(name, DNSName)
ldap = api.Backend.ldap2
# Create all possible parent zone names
search_name = name.make_absolute()
zone_names = []
for i in xrange(len(search_name)):
zone_name_abs = DNSName(search_name[i:]).ToASCII()
zone_names.append(zone_name_abs)
# compatibility with IPA < 4.0, zone name can be relative
zone_names.append(zone_name_abs[:-1])
# Create filters
objectclass_filter = ldap.make_filter({'objectclass':'idnszone'})
zonenames_filter = ldap.make_filter({'idnsname': zone_names})
zoneactive_filter = ldap.make_filter({'idnsZoneActive': 'true'})
complete_filter = ldap.combine_filters(
[objectclass_filter, zonenames_filter, zoneactive_filter],
rules=ldap.MATCH_ALL
)
try:
entries, truncated = ldap.find_entries(
filter=complete_filter,
attrs_list=['idnsname'],
base_dn=DN(api.env.container_dns, api.env.basedn),
scope=ldap.SCOPE_ONELEVEL
)
except errors.NotFound:
return None, False
# always use absolute zones
matched_auth_zones = [entry.single_value['idnsname'].make_absolute()
for entry in entries]
# return longest match
return max(matched_auth_zones, key=len), truncated
def _get_longest_match_ns_delegation_ldap(zone, name):
"""
Searches for deepest delegation for name in LDAP zone.
NOTE: NS record in zone apex is not considered as delegation.
It returns None if there is no delegation outside of zone apex.
Example:
zone: example.com.
name: ns.sub.example.com.
records:
extra.ns.sub.example.com.
sub.example.com.
example.com
result: sub.example.com.
:param zone: zone name
:param name:
:return: (match, truncated);
match: delegation name if success, or None if no delegation record exists
"""
assert isinstance(zone, DNSName)
assert isinstance(name, DNSName)
ldap = api.Backend.ldap2
# get zone DN
zone_dn = api.Object.dnszone.get_dn(zone)
if name.is_absolute():
relative_record_name = name.relativize(zone.make_absolute())
else:
relative_record_name = name
# Name is zone apex
if relative_record_name.is_empty():
return None, False
# create list of possible record names
possible_record_names = [DNSName(relative_record_name[i:]).ToASCII()
for i in xrange(len(relative_record_name))]
# search filters
name_filter = ldap.make_filter({'idnsname': [possible_record_names]})
objectclass_filter = ldap.make_filter({'objectclass': 'idnsrecord'})
complete_filter = ldap.combine_filters(
[name_filter, objectclass_filter],
rules=ldap.MATCH_ALL
)
try:
entries, truncated = ldap.find_entries(
filter=complete_filter,
attrs_list=['idnsname', 'nsrecord'],
base_dn=zone_dn,
scope=ldap.SCOPE_ONELEVEL
)
except errors.NotFound:
return None, False
matched_records = []
# test if entry contains NS records
for entry in entries:
if entry.get('nsrecord'):
matched_records.append(entry.single_value['idnsname'])
if not matched_records:
return None, truncated
# return longest match
return max(matched_records, key=len), truncated
def _find_subtree_forward_zones_ldap(name, child_zones_only=False):
"""
Search for forwardzone <name> and all child forwardzones
Filter: (|(*.<name>.)(<name>.))
:param name:
:param child_zones_only: search only for child zones
:return: (list of zonenames, truncated), list is empty if no zone found
"""
assert isinstance(name, DNSName)
ldap = api.Backend.ldap2
# prepare for filter "*.<name>."
search_name = u".%s" % name.make_absolute().ToASCII()
# we need to search zone with and without last dot, due compatibility
# with IPA < 4.0
search_names = [search_name, search_name[:-1]]
# Create filters
objectclass_filter = ldap.make_filter({'objectclass':'idnsforwardzone'})
zonenames_filter = ldap.make_filter({'idnsname': search_names}, exact=False,
trailing_wildcard=False)
if not child_zones_only:
# find also zone with exact name
exact_name = name.make_absolute().ToASCII()
# we need to search zone with and without last dot, due compatibility
# with IPA < 4.0
exact_names = [exact_name, exact_name[-1]]
exact_name_filter = ldap.make_filter({'idnsname': exact_names})
zonenames_filter = ldap.combine_filters([zonenames_filter,
exact_name_filter])
zoneactive_filter = ldap.make_filter({'idnsZoneActive': 'true'})
complete_filter = ldap.combine_filters(
[objectclass_filter, zonenames_filter, zoneactive_filter],
rules=ldap.MATCH_ALL
)
try:
entries, truncated = ldap.find_entries(
filter=complete_filter,
attrs_list=['idnsname'],
base_dn=DN(api.env.container_dns, api.env.basedn),
scope=ldap.SCOPE_ONELEVEL
)
except errors.NotFound:
return [], False
result = [entry.single_value['idnsname'].make_absolute()
for entry in entries]
return result, truncated
def _get_zone_which_makes_fw_zone_ineffective(fwzonename):
"""
Check if forward zone is effective.
If parent zone exists as authoritative zone, the forward zone will not
forward queries by default. It is necessary to delegate authority
to forward zone with a NS record.
Example:
Forward zone: sub.example.com
Zone: example.com
Forwarding will not work, because the server thinks it is authoritative
for zone and will return NXDOMAIN
Adding record: sub.example.com NS ns.sub.example.com.
will delegate authority, and IPA DNS server will forward DNS queries.
:param fwzonename: forwardzone
:return: (zone, truncated)
zone: None if effective, name of authoritative zone otherwise
"""
assert isinstance(fwzonename, DNSName)
auth_zone, truncated_zone = _get_auth_zone_ldap(fwzonename)
if not auth_zone:
return None, truncated_zone
delegation_record_name, truncated_ns =\
_get_longest_match_ns_delegation_ldap(auth_zone, fwzonename)
truncated = truncated_ns or truncated_zone
if delegation_record_name:
return None, truncated
return auth_zone, truncated
def _add_warning_fw_zone_is_not_effective(result, fwzone, version):
"""
Adds warning message to result, if required
"""
authoritative_zone, truncated = \
_get_zone_which_makes_fw_zone_ineffective(fwzone)
if authoritative_zone:
# forward zone is not effective and forwarding will not work
messages.add_message(
version, result,
messages.ForwardzoneIsNotEffectiveWarning(
fwzone=fwzone, authzone=authoritative_zone,
ns_rec=fwzone.relativize(authoritative_zone)
)
)
class DNSZoneBase(LDAPObject):
"""
Base class for DNS Zone
"""
container_dn = api.env.container_dns
object_class = ['top']
possible_objectclasses = ['ipadnszone']
default_attributes = [
'idnsname', 'idnszoneactive', 'idnsforwarders', 'idnsforwardpolicy'
]
takes_params = (
DNSNameParam('idnsname',
_no_wildcard_validator, # RFC 4592 section 4
only_absolute=True,
cli_name='name',
label=_('Zone name'),
doc=_('Zone name (FQDN)'),
default_from=lambda name_from_ip: _reverse_zone_name(name_from_ip),
normalizer=_normalize_zone,
primary_key=True,
),
Str('name_from_ip?', _validate_ipnet,
label=_('Reverse zone IP network'),
doc=_('IP network to create reverse zone name from'),
flags=('virtual_attribute',),
),
Bool('idnszoneactive?',
cli_name='zone_active',
label=_('Active zone'),
doc=_('Is zone active?'),
flags=['no_create', 'no_update'],
attribute=True,
),
Str('idnsforwarders*',
_validate_bind_forwarder,
cli_name='forwarder',
label=_('Zone forwarders'),
doc=_('Per-zone forwarders. A custom port can be specified '
'for each forwarder using a standard format "IP_ADDRESS port PORT"'),
csv=True,
),
StrEnum('idnsforwardpolicy?',
cli_name='forward_policy',
label=_('Forward policy'),
doc=_('Per-zone conditional forwarding policy. Set to "none" to '
'disable forwarding to global forwarder for this zone. In '
'that case, conditional zone forwarders are disregarded.'),
values=(u'only', u'first', u'none'),
),
)
def get_dn(self, *keys, **options):
if not dns_container_exists(self.api.Backend.ldap2):
raise errors.NotFound(reason=_('DNS is not configured'))
zone = keys[-1]
assert isinstance(zone, DNSName)
assert zone.is_absolute()
zone_a = zone.ToASCII()
# special case when zone is the root zone ('.')
if zone == DNSName.root:
return super(DNSZoneBase, self).get_dn(zone_a, **options)
# try first relative name, a new zone has to be added as absolute
# otherwise ObjectViolation is raised
zone_a = zone_a[:-1]
dn = super(DNSZoneBase, self).get_dn(zone_a, **options)
try:
self.backend.get_entry(dn, [''])
except errors.NotFound:
zone_a = u"%s." % zone_a
dn = super(DNSZoneBase, self).get_dn(zone_a, **options)
return dn
def permission_name(self, zone):
assert isinstance(zone, DNSName)
return u"Manage DNS zone %s" % zone.ToASCII()
def get_name_in_zone(self, zone, hostname):
"""
Get name of a record that is to be added to a new zone. I.e. when
we want to add record "ipa.lab.example.com" in a zone "example.com",
this function should return "ipa.lab". Returns None when record cannot
be added to a zone. Returns '@' when the hostname is the zone record.
"""
assert isinstance(zone, DNSName)
assert zone.is_absolute()
assert isinstance(hostname, DNSName)
if not hostname.is_absolute():
return hostname
if hostname.is_subdomain(zone):
return hostname.relativize(zone)
return None
def _remove_permission(self, zone):
permission_name = self.permission_name(zone)
try:
api.Command['permission_del'](permission_name, force=True)
except errors.NotFound as e:
if zone == DNSName.root: # special case root zone
raise
# compatibility, older IPA versions which allows to create zone
# without absolute zone name
permission_name_rel = self.permission_name(
zone.relativize(DNSName.root)
)
try:
api.Command['permission_del'](permission_name_rel, force=True)
except errors.NotFound:
raise e # re-raise original exception
def _make_zonename_absolute(self, entry_attrs, **options):
"""
Zone names can be relative in IPA < 4.0, make sure we always return
absolute zone name from ldap
"""
if options.get('raw'):
return
if "idnsname" in entry_attrs:
entry_attrs.single_value['idnsname'] = (
entry_attrs.single_value['idnsname'].make_absolute())
class DNSZoneBase_add(LDAPCreate):
has_output_params = LDAPCreate.has_output_params + dnszone_output_params
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
try:
entry = ldap.get_entry(dn)
except errors.NotFound:
pass
else:
if _check_entry_objectclass(entry, self.obj.object_class):
self.obj.handle_duplicate_entry(*keys)
else:
raise errors.DuplicateEntry(
message=_(u'Only one zone type is allowed per zone name')
)
entry_attrs['idnszoneactive'] = 'TRUE'
return dn
class DNSZoneBase_del(LDAPDelete):
def pre_callback(self, ldap, dn, *nkeys, **options):
assert isinstance(dn, DN)
if not _check_DN_objectclass(ldap, dn, self.obj.object_class):
self.obj.handle_not_found(*nkeys)
return dn
def post_callback(self, ldap, dn, *keys, **options):
try:
self.obj._remove_permission(keys[-1])
except errors.NotFound:
pass
return True
class DNSZoneBase_mod(LDAPUpdate):
has_output_params = LDAPUpdate.has_output_params + dnszone_output_params
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
self.obj._make_zonename_absolute(entry_attrs, **options)
return dn
class DNSZoneBase_find(LDAPSearch):
__doc__ = _('Search for DNS zones (SOA records).')
has_output_params = LDAPSearch.has_output_params + dnszone_output_params
def args_options_2_params(self, *args, **options):
# FIXME: Check that name_from_ip is valid. This is necessary because
# custom validation rules, including _validate_ipnet, are not
# used when doing a search. Once we have a parameter type for
# IP network objects, this will no longer be necessary, as the
# parameter type will handle the validation itself (see
# <https://fedorahosted.org/freeipa/ticket/2266>).
if 'name_from_ip' in options:
self.obj.params['name_from_ip'](unicode(options['name_from_ip']))
return super(DNSZoneBase_find, self).args_options_2_params(*args, **options)
def args_options_2_entry(self, *args, **options):
if 'name_from_ip' in options:
if 'idnsname' not in options:
options['idnsname'] = self.obj.params['idnsname'].get_default(**options)
del options['name_from_ip']
search_kw = super(DNSZoneBase_find, self).args_options_2_entry(*args,
**options)
name = search_kw.get('idnsname')
if name:
search_kw['idnsname'] = [name, name.relativize(DNSName.root)]
return search_kw
def pre_callback(self, ldap, filter, attrs_list, base_dn, scope, *args, **options):
assert isinstance(base_dn, DN)
# Check if DNS container exists must be here for find methods
if not dns_container_exists(self.api.Backend.ldap2):
raise errors.NotFound(reason=_('DNS is not configured'))
filter = _create_idn_filter(self, ldap, *args, **options)
return (filter, base_dn, scope)
def post_callback(self, ldap, entries, truncated, *args, **options):
for entry_attrs in entries:
self.obj._make_zonename_absolute(entry_attrs, **options)
return truncated
class DNSZoneBase_show(LDAPRetrieve):
has_output_params = LDAPRetrieve.has_output_params + dnszone_output_params
def pre_callback(self, ldap, dn, attrs_list, *keys, **options):
assert isinstance(dn, DN)
if not _check_DN_objectclass(ldap, dn, self.obj.object_class):
self.obj.handle_not_found(*keys)
return dn
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
self.obj._make_zonename_absolute(entry_attrs, **options)
return dn
class DNSZoneBase_disable(LDAPQuery):
has_output = output.standard_value
def execute(self, *keys, **options):
ldap = self.obj.backend
dn = self.obj.get_dn(*keys, **options)
entry = ldap.get_entry(dn, ['idnszoneactive', 'objectclass'])
if not _check_entry_objectclass(entry, self.obj.object_class):
self.obj.handle_not_found(*keys)
entry['idnszoneactive'] = ['FALSE']
try:
ldap.update_entry(entry)
except errors.EmptyModlist:
pass
return dict(result=True, value=pkey_to_value(keys[-1], options))
class DNSZoneBase_enable(LDAPQuery):
has_output = output.standard_value
def execute(self, *keys, **options):
ldap = self.obj.backend
dn = self.obj.get_dn(*keys, **options)
entry = ldap.get_entry(dn, ['idnszoneactive', 'objectclass'])
if not _check_entry_objectclass(entry, self.obj.object_class):
self.obj.handle_not_found(*keys)
entry['idnszoneactive'] = ['TRUE']
try:
ldap.update_entry(entry)
except errors.EmptyModlist:
pass
return dict(result=True, value=pkey_to_value(keys[-1], options))
class DNSZoneBase_add_permission(LDAPQuery):
has_output = _output_permissions
msg_summary = _('Added system permission "%(value)s"')
def execute(self, *keys, **options):
ldap = self.obj.backend
dn = self.obj.get_dn(*keys, **options)
try:
entry_attrs = ldap.get_entry(dn, ['objectclass'])
except errors.NotFound:
self.obj.handle_not_found(*keys)
else:
if not _check_entry_objectclass(entry_attrs, self.obj.object_class):
self.obj.handle_not_found(*keys)
permission_name = self.obj.permission_name(keys[-1])
# compatibility with older IPA versions which allows relative zonenames
if keys[-1] != DNSName.root: # special case root zone
permission_name_rel = self.obj.permission_name(
keys[-1].relativize(DNSName.root)
)
try:
api.Object['permission'].get_dn_if_exists(permission_name_rel)
except errors.NotFound:
pass
else:
# permission exists without absolute domain name
raise errors.DuplicateEntry(
message=_('permission "%(value)s" already exists') % {
'value': permission_name
}
)
permission = api.Command['permission_add_noaci'](permission_name,
ipapermissiontype=u'SYSTEM'
)['result']
dnszone_ocs = entry_attrs.get('objectclass')
if dnszone_ocs:
for oc in dnszone_ocs:
if oc.lower() == 'ipadnszone':
break
else:
dnszone_ocs.append('ipadnszone')
entry_attrs['managedby'] = [permission['dn']]
ldap.update_entry(entry_attrs)
return dict(
result=True,
value=pkey_to_value(permission_name, options),
)
class DNSZoneBase_remove_permission(LDAPQuery):
has_output = _output_permissions
msg_summary = _('Removed system permission "%(value)s"')
def execute(self, *keys, **options):
ldap = self.obj.backend
dn = self.obj.get_dn(*keys, **options)
try:
entry = ldap.get_entry(dn, ['managedby', 'objectclass'])
except errors.NotFound:
self.obj.handle_not_found(*keys)
else:
if not _check_entry_objectclass(entry, self.obj.object_class):
self.obj.handle_not_found(*keys)
entry['managedby'] = None
try:
ldap.update_entry(entry)
except errors.EmptyModlist:
# managedBy attribute is clean, lets make sure there is also no
# dangling DNS zone permission
pass
permission_name = self.obj.permission_name(keys[-1])
self.obj._remove_permission(keys[-1])
return dict(
result=True,
value=pkey_to_value(permission_name, options),
)
@register()
class dnszone(DNSZoneBase):
"""
DNS Zone, container for resource records.
"""
object_name = _('DNS zone')
object_name_plural = _('DNS zones')
object_class = DNSZoneBase.object_class + ['idnsrecord', 'idnszone']
default_attributes = DNSZoneBase.default_attributes + [
'idnssoamname', 'idnssoarname', 'idnssoaserial', 'idnssoarefresh',
'idnssoaretry', 'idnssoaexpire', 'idnssoaminimum', 'idnsallowquery',
'idnsallowtransfer', 'idnssecinlinesigning',
] + _record_attributes
label = _('DNS Zones')
label_singular = _('DNS Zone')
takes_params = DNSZoneBase.takes_params + (
DNSNameParam('idnssoamname?',
cli_name='name_server',
label=_('Authoritative nameserver'),
doc=_('Authoritative nameserver domain name'),
default=None, # value will be added in precallback from ldap
),
DNSNameParam('idnssoarname',
_rname_validator,
cli_name='admin_email',
label=_('Administrator e-mail address'),
doc=_('Administrator e-mail address'),
default=DNSName(u'hostmaster'),
normalizer=normalize_zonemgr,
autofill=True,
),
Int('idnssoaserial',
cli_name='serial',
label=_('SOA serial'),
doc=_('SOA record serial number'),
minvalue=1,
maxvalue=4294967295,
default_from=_create_zone_serial,
autofill=True,
),
Int('idnssoarefresh',
cli_name='refresh',
label=_('SOA refresh'),
doc=_('SOA record refresh time'),
minvalue=0,
maxvalue=2147483647,
default=3600,
autofill=True,
),
Int('idnssoaretry',
cli_name='retry',
label=_('SOA retry'),
doc=_('SOA record retry time'),
minvalue=0,
maxvalue=2147483647,
default=900,
autofill=True,
),
Int('idnssoaexpire',
cli_name='expire',
label=_('SOA expire'),
doc=_('SOA record expire time'),
default=1209600,
minvalue=0,
maxvalue=2147483647,
autofill=True,
),
Int('idnssoaminimum',
cli_name='minimum',
label=_('SOA minimum'),
doc=_('How long should negative responses be cached'),
default=3600,
minvalue=0,
maxvalue=2147483647,
autofill=True,
),
Int('dnsttl?',
cli_name='ttl',
label=_('Time to live'),
doc=_('Time to live for records at zone apex'),
minvalue=0,
maxvalue=2147483647, # see RFC 2181
),
StrEnum('dnsclass?',
# Deprecated
cli_name='class',
flags=['no_option'],
values=_record_classes,
),
Str('idnsupdatepolicy?',
cli_name='update_policy',
label=_('BIND update policy'),
doc=_('BIND update policy'),
default_from=lambda idnsname: default_zone_update_policy(idnsname),
autofill=True
),
Bool('idnsallowdynupdate?',
cli_name='dynamic_update',
label=_('Dynamic update'),
doc=_('Allow dynamic updates.'),
attribute=True,
default=False,
autofill=True
),
Str('idnsallowquery?',
_validate_bind_aci,
normalizer=_normalize_bind_aci,
cli_name='allow_query',
label=_('Allow query'),
doc=_('Semicolon separated list of IP addresses or networks which are allowed to issue queries'),
default=u'any;', # anyone can issue queries by default
autofill=True,
),
Str('idnsallowtransfer?',
_validate_bind_aci,
normalizer=_normalize_bind_aci,
cli_name='allow_transfer',
label=_('Allow transfer'),
doc=_('Semicolon separated list of IP addresses or networks which are allowed to transfer the zone'),
default=u'none;', # no one can issue queries by default
autofill=True,
),
Bool('idnsallowsyncptr?',
cli_name='allow_sync_ptr',
label=_('Allow PTR sync'),
doc=_('Allow synchronization of forward (A, AAAA) and reverse (PTR) records in the zone'),
),
Bool('idnssecinlinesigning?',
cli_name='dnssec',
default=False,
label=_('Allow in-line DNSSEC signing'),
doc=_('Allow inline DNSSEC signing of records in the zone'),
),
Str('nsec3paramrecord?',
_validate_nsec3param_record,
cli_name='nsec3param_rec',
label=_('NSEC3PARAM record'),
doc=_('NSEC3PARAM record for zone in format: hash_algorithm flags iterations salt'),
pattern=r'^\d+ \d+ \d+ (([0-9a-fA-F]{2})+|-)$',
pattern_errmsg=(u'expected format: <0-255> <0-255> <0-65535> '
'even-length_hexadecimal_digits_or_hyphen'),
),
)
# Permissions will be apllied for forwardzones too
# Store permissions into api.env.basedn, dns container could not exists
managed_permissions = {
'System: Add DNS Entries': {
'non_object': True,
'ipapermright': {'add'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('idnsname=*', 'cn=dns', api.env.basedn),
'replaces': [
'(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:add dns entries";allow (add) groupdn = "ldap:///cn=add dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Read DNS Entries': {
'non_object': True,
'ipapermright': {'read', 'search', 'compare'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('idnsname=*', 'cn=dns', api.env.basedn),
'ipapermdefaultattr': {
'objectclass',
'a6record', 'aaaarecord', 'afsdbrecord', 'aplrecord', 'arecord',
'certrecord', 'cn', 'cnamerecord', 'dhcidrecord', 'dlvrecord',
'dnamerecord', 'dnsclass', 'dnsttl', 'dsrecord',
'hinforecord', 'hiprecord', 'idnsallowdynupdate',
'idnsallowquery', 'idnsallowsyncptr', 'idnsallowtransfer',
'idnsforwarders', 'idnsforwardpolicy', 'idnsname',
'idnssecinlinesigning', 'idnssoaexpire', 'idnssoaminimum',
'idnssoamname', 'idnssoarefresh', 'idnssoaretry',
'idnssoarname', 'idnssoaserial', 'idnsupdatepolicy',
'idnszoneactive', 'ipseckeyrecord','keyrecord', 'kxrecord',
'locrecord', 'managedby', 'mdrecord', 'minforecord',
'mxrecord', 'naptrrecord', 'nsecrecord', 'nsec3paramrecord',
'nsrecord', 'nxtrecord', 'ptrrecord', 'rprecord', 'rrsigrecord',
'sigrecord', 'spfrecord', 'srvrecord', 'sshfprecord',
'tlsarecord', 'txtrecord', 'unknownrecord',
},
'replaces_system': ['Read DNS Entries'],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Remove DNS Entries': {
'non_object': True,
'ipapermright': {'delete'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('idnsname=*', 'cn=dns', api.env.basedn),
'replaces': [
'(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:remove dns entries";allow (delete) groupdn = "ldap:///cn=remove dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Update DNS Entries': {
'non_object': True,
'ipapermright': {'write'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('idnsname=*', 'cn=dns', api.env.basedn),
'ipapermdefaultattr': {
'a6record', 'aaaarecord', 'afsdbrecord', 'aplrecord', 'arecord',
'certrecord', 'cn', 'cnamerecord', 'dhcidrecord', 'dlvrecord',
'dnamerecord', 'dnsclass', 'dnsttl', 'dsrecord',
'hinforecord', 'hiprecord', 'idnsallowdynupdate',
'idnsallowquery', 'idnsallowsyncptr', 'idnsallowtransfer',
'idnsforwarders', 'idnsforwardpolicy', 'idnsname',
'idnssecinlinesigning', 'idnssoaexpire', 'idnssoaminimum',
'idnssoamname', 'idnssoarefresh', 'idnssoaretry',
'idnssoarname', 'idnssoaserial', 'idnsupdatepolicy',
'idnszoneactive', 'ipseckeyrecord','keyrecord', 'kxrecord',
'locrecord', 'managedby', 'mdrecord', 'minforecord',
'mxrecord', 'naptrrecord', 'nsecrecord', 'nsec3paramrecord',
'nsrecord', 'nxtrecord', 'ptrrecord', 'rprecord', 'rrsigrecord',
'sigrecord', 'spfrecord', 'srvrecord', 'sshfprecord',
'tlsarecord', 'txtrecord', 'unknownrecord',
},
'replaces': [
'(targetattr = "idnsname || cn || idnsallowdynupdate || dnsttl || dnsclass || arecord || aaaarecord || a6record || nsrecord || cnamerecord || ptrrecord || srvrecord || txtrecord || mxrecord || mdrecord || hinforecord || minforecord || afsdbrecord || sigrecord || keyrecord || locrecord || nxtrecord || naptrrecord || kxrecord || certrecord || dnamerecord || dsrecord || sshfprecord || rrsigrecord || nsecrecord || idnsname || idnszoneactive || idnssoamname || idnssoarname || idnssoaserial || idnssoarefresh || idnssoaretry || idnssoaexpire || idnssoaminimum || idnsupdatepolicy")(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:update dns entries";allow (write) groupdn = "ldap:///cn=update dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
'(targetattr = "idnsname || cn || idnsallowdynupdate || dnsttl || dnsclass || arecord || aaaarecord || a6record || nsrecord || cnamerecord || ptrrecord || srvrecord || txtrecord || mxrecord || mdrecord || hinforecord || minforecord || afsdbrecord || sigrecord || keyrecord || locrecord || nxtrecord || naptrrecord || kxrecord || certrecord || dnamerecord || dsrecord || sshfprecord || rrsigrecord || nsecrecord || idnsname || idnszoneactive || idnssoamname || idnssoarname || idnssoaserial || idnssoarefresh || idnssoaretry || idnssoaexpire || idnssoaminimum || idnsupdatepolicy || idnsallowquery || idnsallowtransfer || idnsallowsyncptr || idnsforwardpolicy || idnsforwarders")(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:update dns entries";allow (write) groupdn = "ldap:///cn=update dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
'(targetattr = "idnsname || cn || idnsallowdynupdate || dnsttl || dnsclass || arecord || aaaarecord || a6record || nsrecord || cnamerecord || ptrrecord || srvrecord || txtrecord || mxrecord || mdrecord || hinforecord || minforecord || afsdbrecord || sigrecord || keyrecord || locrecord || nxtrecord || naptrrecord || kxrecord || certrecord || dnamerecord || dsrecord || sshfprecord || rrsigrecord || nsecrecord || idnsname || idnszoneactive || idnssoamname || idnssoarname || idnssoaserial || idnssoarefresh || idnssoaretry || idnssoaexpire || idnssoaminimum || idnsupdatepolicy || idnsallowquery || idnsallowtransfer || idnsallowsyncptr || idnsforwardpolicy || idnsforwarders || managedby")(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:update dns entries";allow (write) groupdn = "ldap:///cn=update dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Read DNSSEC metadata': {
'non_object': True,
'ipapermright': {'read', 'search', 'compare'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=dns', api.env.basedn),
'ipapermtargetfilter': ['(objectclass=idnsSecKey)'],
'ipapermdefaultattr': {
'idnsSecAlgorithm', 'idnsSecKeyCreated', 'idnsSecKeyPublish',
'idnsSecKeyActivate', 'idnsSecKeyInactive', 'idnsSecKeyDelete',
'idnsSecKeyZone', 'idnsSecKeyRevoke', 'idnsSecKeySep',
'idnsSecKeyRef', 'cn', 'objectclass',
},
'default_privileges': {'DNS Administrators'},
},
'System: Manage DNSSEC metadata': {
'non_object': True,
'ipapermright': {'all'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=dns', api.env.basedn),
'ipapermtargetfilter': ['(objectclass=idnsSecKey)'],
'ipapermdefaultattr': {
'idnsSecAlgorithm', 'idnsSecKeyCreated', 'idnsSecKeyPublish',
'idnsSecKeyActivate', 'idnsSecKeyInactive', 'idnsSecKeyDelete',
'idnsSecKeyZone', 'idnsSecKeyRevoke', 'idnsSecKeySep',
'idnsSecKeyRef', 'cn', 'objectclass',
},
'default_privileges': {'DNS Servers'},
},
'System: Manage DNSSEC keys': {
'non_object': True,
'ipapermright': {'all'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=keys', 'cn=sec', 'cn=dns', api.env.basedn),
'ipapermdefaultattr': {
'ipaPublicKey', 'ipaPrivateKey', 'ipaSecretKey',
'ipaWrappingMech','ipaWrappingKey',
'ipaSecretKeyRef', 'ipk11Private', 'ipk11Modifiable', 'ipk11Label',
'ipk11Copyable', 'ipk11Destroyable', 'ipk11Trusted',
'ipk11CheckValue', 'ipk11StartDate', 'ipk11EndDate',
'ipk11UniqueId', 'ipk11PublicKeyInfo', 'ipk11Distrusted',
'ipk11Subject', 'ipk11Id', 'ipk11Local', 'ipk11KeyType',
'ipk11Derive', 'ipk11KeyGenMechanism', 'ipk11AllowedMechanisms',
'ipk11Encrypt', 'ipk11Verify', 'ipk11VerifyRecover', 'ipk11Wrap',
'ipk11WrapTemplate', 'ipk11Sensitive', 'ipk11Decrypt',
'ipk11Sign', 'ipk11SignRecover', 'ipk11Unwrap',
'ipk11Extractable', 'ipk11AlwaysSensitive',
'ipk11NeverExtractable', 'ipk11WrapWithTrusted',
'ipk11UnwrapTemplate', 'ipk11AlwaysAuthenticate',
'objectclass',
},
'default_privileges': {'DNS Servers'},
},
}
def _rr_zone_postprocess(self, record, **options):
#Decode IDN ACE form to Unicode, raw records are passed directly from LDAP
if options.get('raw', False):
return
_records_idn_postprocess(record, **options)
def _warning_forwarding(self, result, **options):
if ('idnsforwarders' in result['result']):
messages.add_message(options.get('version', VERSION_WITHOUT_CAPABILITIES),
result, messages.ForwardersWarning())
def _warning_dnssec_experimental(self, result, *keys, **options):
# add warning when user use option --dnssec
if 'idnssecinlinesigning' in options:
if options['idnssecinlinesigning'] is True:
messages.add_message(options['version'], result,
messages.DNSSECWarning(
additional_info=_("Visit 'http://www.freeipa.org/page/Releases/4.1.0#DNSSEC_Support'.")
))
else:
messages.add_message(options['version'], result,
messages.DNSSECWarning(
additional_info=_("If you encounter any problems please "
"report them and restart 'named' service on affected IPA "
"server.")
))
def _warning_name_server_option(self, result, context, **options):
if getattr(context, 'show_warning_nameserver_option', False):
messages.add_message(
options['version'],
result, messages.OptionSemanticChangedWarning(
label=_(u"setting Authoritative nameserver"),
current_behavior=_(u"It is used only for setting the "
u"SOA MNAME attribute."),
hint=_(u"NS record(s) can be edited in zone apex - '@'. ")
)
)
def _warning_fw_zone_is_not_effective(self, result, *keys, **options):
"""
Warning if any operation with zone causes, a child forward zone is
not effective
"""
zone = keys[-1]
affected_fw_zones, truncated = _find_subtree_forward_zones_ldap(
zone, child_zones_only=True)
if not affected_fw_zones:
return
for fwzone in affected_fw_zones:
_add_warning_fw_zone_is_not_effective(result, fwzone,
options['version'])
@register()
class dnszone_add(DNSZoneBase_add):
__doc__ = _('Create new DNS zone (SOA record).')
takes_options = DNSZoneBase_add.takes_options + (
Flag('force',
label=_('Force'),
doc=_('Force DNS zone creation even if nameserver is not resolvable.'),
),
# Deprecated
# ip-address option is not used anymore, we have to keep it
# due to compability with clients older than 4.1
Str('ip_address?',
flags=['no_option', ]
),
)
def _warning_deprecated_option(self, result, **options):
if 'ip_address' in options:
messages.add_message(
options['version'],
result,
messages.OptionDeprecatedWarning(
option='ip-address',
additional_info=u"Value will be ignored.")
)
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
dn = super(dnszone_add, self).pre_callback(
ldap, dn, entry_attrs, attrs_list, *keys, **options)
nameservers = [normalize_zone(x) for x in api.Object.dnsrecord.get_dns_masters()]
server = normalize_zone(api.env.host)
zone = keys[-1]
if entry_attrs.get('idnssoamname'):
if zone.is_reverse() and not entry_attrs['idnssoamname'].is_absolute():
raise errors.ValidationError(
name='name-server',
error=_("Nameserver for reverse zone cannot be a relative DNS name"))
# verify if user specified server is resolvable
if not options['force']:
check_ns_rec_resolvable(keys[0], entry_attrs['idnssoamname'])
# show warning about --name-server option
context.show_warning_nameserver_option = True
else:
# user didn't specify SOA mname
if server in nameservers:
# current ipa server is authoritative nameserver in SOA record
entry_attrs['idnssoamname'] = [server]
else:
# a first DNS capable server is authoritative nameserver in SOA record
entry_attrs['idnssoamname'] = [nameservers[0]]
# all ipa DNS servers should be in NS zone record (as absolute domain name)
entry_attrs['nsrecord'] = nameservers
return dn
def execute(self, *keys, **options):
result = super(dnszone_add, self).execute(*keys, **options)
self._warning_deprecated_option(result, **options)
self.obj._warning_forwarding(result, **options)
self.obj._warning_dnssec_experimental(result, *keys, **options)
self.obj._warning_name_server_option(result, context, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
# Add entry to realmdomains
# except for our own domain, forward zones, reverse zones and root zone
zone = keys[0]
if (zone != DNSName(api.env.domain).make_absolute() and
not options.get('idnsforwarders') and
not zone.is_reverse() and
zone != DNSName.root):
try:
api.Command['realmdomains_mod'](add_domain=unicode(zone),
force=True)
except (errors.EmptyModlist, errors.ValidationError):
pass
self.obj._rr_zone_postprocess(entry_attrs, **options)
return dn
@register()
class dnszone_del(DNSZoneBase_del):
__doc__ = _('Delete DNS zone (SOA record).')
msg_summary = _('Deleted DNS zone "%(value)s"')
def execute(self, *keys, **options):
result = super(dnszone_del, self).execute(*keys, **options)
nkeys = keys[-1] # we can delete more zones
for key in nkeys:
self.obj._warning_fw_zone_is_not_effective(result, key, **options)
return result
def post_callback(self, ldap, dn, *keys, **options):
super(dnszone_del, self).post_callback(ldap, dn, *keys, **options)
# Delete entry from realmdomains
# except for our own domain, reverse zone, and root zone
zone = keys[0].make_absolute()
if (zone != DNSName(api.env.domain).make_absolute() and
not zone.is_reverse() and zone != DNSName.root
):
try:
api.Command['realmdomains_mod'](del_domain=unicode(zone),
force=True)
except (errors.AttrValueNotFound, errors.ValidationError):
pass
return True
@register()
class dnszone_mod(DNSZoneBase_mod):
__doc__ = _('Modify DNS zone (SOA record).')
takes_options = DNSZoneBase_mod.takes_options + (
Flag('force',
label=_('Force'),
doc=_('Force nameserver change even if nameserver not in DNS'),
),
)
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
if not _check_DN_objectclass(ldap, dn, self.obj.object_class):
self.obj.handle_not_found(*keys)
if 'idnssoamname' in entry_attrs:
nameserver = entry_attrs['idnssoamname']
if nameserver:
if not nameserver.is_empty() and not options['force']:
check_ns_rec_resolvable(keys[0], nameserver)
context.show_warning_nameserver_option = True
else:
# empty value, this option is required by ldap
raise errors.ValidationError(
name='name_server',
error=_(u"is required"))
return dn
def execute(self, *keys, **options):
result = super(dnszone_mod, self).execute(*keys, **options)
self.obj._warning_forwarding(result, **options)
self.obj._warning_dnssec_experimental(result, *keys, **options)
self.obj._warning_name_server_option(result, context, **options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
dn = super(dnszone_mod, self).post_callback(ldap, dn, entry_attrs,
*keys, **options)
self.obj._rr_zone_postprocess(entry_attrs, **options)
return dn
@register()
class dnszone_find(DNSZoneBase_find):
__doc__ = _('Search for DNS zones (SOA records).')
takes_options = DNSZoneBase_find.takes_options + (
Flag('forward_only',
label=_('Forward zones only'),
cli_name='forward_only',
doc=_('Search for forward zones only'),
),
)
def pre_callback(self, ldap, filter, attrs_list, base_dn, scope, *args, **options):
assert isinstance(base_dn, DN)
filter, base, dn = super(dnszone_find, self).pre_callback(ldap, filter,
attrs_list, base_dn, scope, *args, **options)
if options.get('forward_only', False):
search_kw = {}
search_kw['idnsname'] = [revzone.ToASCII() for revzone in
REVERSE_DNS_ZONES.keys()]
rev_zone_filter = ldap.make_filter(search_kw,
rules=ldap.MATCH_NONE,
exact=False,
trailing_wildcard=False)
filter = ldap.combine_filters((rev_zone_filter, filter),
rules=ldap.MATCH_ALL)
return (filter, base_dn, scope)
def post_callback(self, ldap, entries, truncated, *args, **options):
truncated = super(dnszone_find, self).post_callback(ldap, entries,
truncated, *args,
**options)
for entry_attrs in entries:
self.obj._rr_zone_postprocess(entry_attrs, **options)
return truncated
@register()
class dnszone_show(DNSZoneBase_show):
__doc__ = _('Display information about a DNS zone (SOA record).')
def execute(self, *keys, **options):
result = super(dnszone_show, self).execute(*keys, **options)
self.obj._warning_forwarding(result, **options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
dn = super(dnszone_show, self).post_callback(ldap, dn, entry_attrs,
*keys, **options)
self.obj._rr_zone_postprocess(entry_attrs, **options)
return dn
@register()
class dnszone_disable(DNSZoneBase_disable):
__doc__ = _('Disable DNS Zone.')
msg_summary = _('Disabled DNS zone "%(value)s"')
def execute(self, *keys, **options):
result = super(dnszone_disable, self).execute(*keys, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
return result
@register()
class dnszone_enable(DNSZoneBase_enable):
__doc__ = _('Enable DNS Zone.')
msg_summary = _('Enabled DNS zone "%(value)s"')
def execute(self, *keys, **options):
result = super(dnszone_enable, self).execute(*keys, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
return result
@register()
class dnszone_add_permission(DNSZoneBase_add_permission):
__doc__ = _('Add a permission for per-zone access delegation.')
@register()
class dnszone_remove_permission(DNSZoneBase_remove_permission):
__doc__ = _('Remove a permission for per-zone access delegation.')
@register()
class dnsrecord(LDAPObject):
"""
DNS record.
"""
parent_object = 'dnszone'
container_dn = api.env.container_dns
object_name = _('DNS resource record')
object_name_plural = _('DNS resource records')
object_class = ['top', 'idnsrecord']
permission_filter_objectclasses = ['idnsrecord']
default_attributes = ['idnsname'] + _record_attributes
rdn_is_primary_key = True
label = _('DNS Resource Records')
label_singular = _('DNS Resource Record')
takes_params = (
DNSNameParam('idnsname',
cli_name='name',
label=_('Record name'),
doc=_('Record name'),
primary_key=True,
),
Int('dnsttl?',
cli_name='ttl',
label=_('Time to live'),
doc=_('Time to live'),
),
StrEnum('dnsclass?',
# Deprecated
cli_name='class',
flags=['no_option'],
values=_record_classes,
),
) + _dns_record_options
structured_flag = Flag('structured',
label=_('Structured'),
doc=_('Parse all raw DNS records and return them in a structured way'),
)
def _dsrecord_pre_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
dsrecords = entry_attrs.get('dsrecord')
if dsrecords and self.is_pkey_zone_record(*keys):
raise errors.ValidationError(
name='dsrecord',
error=unicode(_('DS record must not be in zone apex (RFC 4035 section 2.4)')))
def _nsrecord_pre_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
nsrecords = entry_attrs.get('nsrecord')
if options.get('force', False) or nsrecords is None:
return
for nsrecord in nsrecords:
check_ns_rec_resolvable(keys[0], DNSName(nsrecord))
def _idnsname_pre_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
if keys[-1].is_absolute():
if keys[-1].is_subdomain(keys[-2]):
entry_attrs['idnsname'] = [keys[-1].relativize(keys[-2])]
elif not self.is_pkey_zone_record(*keys):
raise errors.ValidationError(name='idnsname',
error=unicode(_('out-of-zone data: record name must '
'be a subdomain of the zone or a '
'relative name')))
# dissallowed wildcard (RFC 4592 section 4)
no_wildcard_rtypes = ['DNAME', 'DS', 'NS']
if (keys[-1].is_wild() and
any(entry_attrs.get('%srecord' % r.lower())
for r in no_wildcard_rtypes)
):
raise errors.ValidationError(
name='idnsname',
error=(_('owner of %(types)s records '
'should not be a wildcard domain name (RFC 4592 section 4)') %
{'types': ', '.join(no_wildcard_rtypes)}
)
)
def _ptrrecord_pre_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
ptrrecords = entry_attrs.get('ptrrecord')
if ptrrecords is None:
return
zone = keys[-2]
if self.is_pkey_zone_record(*keys):
addr = _dns_zone_record
else:
addr = keys[-1]
zone_len = 0
for valid_zone in REVERSE_DNS_ZONES:
if zone.is_subdomain(valid_zone):
zone = zone.relativize(valid_zone)
zone_name = valid_zone
zone_len = REVERSE_DNS_ZONES[valid_zone]
if not zone_len:
allowed_zones = ', '.join([unicode(revzone) for revzone in
REVERSE_DNS_ZONES.keys()])
raise errors.ValidationError(name='ptrrecord',
error=unicode(_('Reverse zone for PTR record should be a sub-zone of one the following fully qualified domains: %s') % allowed_zones))
addr_len = len(addr.labels)
# Classless zones (0/25.0.0.10.in-addr.arpa.) -> skip check
# zone has to be checked without reverse domain suffix (in-addr.arpa.)
for sign in ('/', '-'):
for name in (zone, addr):
for label in name.labels:
if sign in label:
return
ip_addr_comp_count = addr_len + len(zone.labels)
if ip_addr_comp_count != zone_len:
raise errors.ValidationError(name='ptrrecord',
error=unicode(_('Reverse zone %(name)s requires exactly '
'%(count)d IP address components, '
'%(user_count)d given')
% dict(name=zone_name,
count=zone_len,
user_count=ip_addr_comp_count)))
def run_precallback_validators(self, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
ldap = self.api.Backend.ldap2
for rtype in entry_attrs.keys():
rtype_cb = getattr(self, '_%s_pre_callback' % rtype, None)
if rtype_cb:
rtype_cb(ldap, dn, entry_attrs, *keys, **options)
def is_pkey_zone_record(self, *keys):
assert isinstance(keys[-1], DNSName)
assert isinstance(keys[-2], DNSName)
idnsname = keys[-1]
zonename = keys[-2]
if idnsname.is_empty() or idnsname == zonename:
return True
return False
def check_zone(self, zone, **options):
"""
Check if zone exists and if is master zone
"""
parent_object = self.api.Object[self.parent_object]
dn = parent_object.get_dn(zone, **options)
ldap = self.api.Backend.ldap2
try:
entry = ldap.get_entry(dn, ['objectclass'])
except errors.NotFound:
parent_object.handle_not_found(zone)
else:
# only master zones can contain records
if 'idnszone' not in [x.lower() for x in entry.get('objectclass', [])]:
raise errors.ValidationError(
name='dnszoneidnsname',
error=_(u'only master zones can contain records')
)
return dn
def get_dn(self, *keys, **options):
if not dns_container_exists(self.api.Backend.ldap2):
raise errors.NotFound(reason=_('DNS is not configured'))
dn = self.check_zone(keys[-2], **options)
if self.is_pkey_zone_record(*keys):
return dn
#Make RR name relative if possible
relative_name = keys[-1].relativize(keys[-2]).ToASCII()
keys = keys[:-1] + (relative_name,)
return super(dnsrecord, self).get_dn(*keys, **options)
def attr_to_cli(self, attr):
try:
cliname = attr[:-len('record')].upper()
except IndexError:
cliname = attr
return cliname
def get_dns_masters(self):
ldap = self.api.Backend.ldap2
base_dn = DN(('cn', 'masters'), ('cn', 'ipa'), ('cn', 'etc'), self.api.env.basedn)
ldap_filter = '(&(objectClass=ipaConfigObject)(cn=DNS))'
dns_masters = []
try:
entries = ldap.find_entries(filter=ldap_filter, base_dn=base_dn)[0]
for entry in entries:
try:
master = entry.dn[1]['cn']
dns_masters.append(master)
except (IndexError, KeyError):
pass
except errors.NotFound:
return []
return dns_masters
def has_cli_options(self, options, no_option_msg, allow_empty_attrs=False):
if any(k in options for k in ('setattr', 'addattr', 'delattr', 'rename')):
return
has_options = False
for attr in options.keys():
if attr in self.params and not self.params[attr].primary_key:
if options[attr] or allow_empty_attrs:
has_options = True
break
if not has_options:
raise errors.OptionError(no_option_msg)
def get_record_entry_attrs(self, entry_attrs):
entry_attrs = entry_attrs.copy()
for attr in entry_attrs.keys():
if attr not in self.params or self.params[attr].primary_key:
del entry_attrs[attr]
return entry_attrs
def postprocess_record(self, record, **options):
if options.get('structured', False):
for attr in record.keys():
# attributes in LDAPEntry may not be normalized
attr = attr.lower()
try:
param = self.params[attr]
except KeyError:
continue
if not isinstance(param, DNSRecord):
continue
parts_params = param.get_parts()
for dnsvalue in record[attr]:
dnsentry = {
u'dnstype' : unicode(param.rrtype),
u'dnsdata' : dnsvalue
}
values = param._get_part_values(dnsvalue)
if values is None:
continue
for val_id, val in enumerate(values):
if val is not None:
#decode IDN
if isinstance(parts_params[val_id], DNSNameParam):
dnsentry[parts_params[val_id].name] = \
_dns_name_to_string(val,
options.get('raw', False))
else:
dnsentry[parts_params[val_id].name] = val
record.setdefault('dnsrecords', []).append(dnsentry)
del record[attr]
elif not options.get('raw', False):
#Decode IDN ACE form to Unicode, raw records are passed directly from LDAP
_records_idn_postprocess(record, **options)
def get_rrparam_from_part(self, part_name):
"""
Get an instance of DNSRecord parameter that has part_name as its part.
If such parameter is not found, None is returned
:param part_name Part parameter name
"""
try:
param = self.params[part_name]
if not any(flag in param.flags for flag in \
('dnsrecord_part', 'dnsrecord_extra')):
return None
# All DNS record part or extra parameters contain a name of its
# parent RR parameter in its hint attribute
rrparam = self.params[param.hint]
except (KeyError, AttributeError):
return None
return rrparam
def iterate_rrparams_by_parts(self, kw, skip_extra=False):
"""
Iterates through all DNSRecord instances that has at least one of its
parts or extra options in given dictionary. It returns the DNSRecord
instance only for the first occurence of part/extra option.
:param kw Dictionary with DNS record parts or extra options
:param skip_extra Skip DNS record extra options, yield only DNS records
with a real record part
"""
processed = []
for opt in kw:
rrparam = self.get_rrparam_from_part(opt)
if rrparam is None:
continue
if skip_extra and 'dnsrecord_extra' in self.params[opt].flags:
continue
if rrparam.name not in processed:
processed.append(rrparam.name)
yield rrparam
def updated_rrattrs(self, old_entry, entry_attrs):
"""Returns updated RR attributes
"""
rrattrs = {}
if old_entry is not None:
old_rrattrs = dict((key, value) for key, value in old_entry.iteritems()
if key in self.params and
isinstance(self.params[key], DNSRecord))
rrattrs.update(old_rrattrs)
new_rrattrs = dict((key, value) for key, value in entry_attrs.iteritems()
if key in self.params and
isinstance(self.params[key], DNSRecord))
rrattrs.update(new_rrattrs)
return rrattrs
def check_record_type_collisions(self, keys, rrattrs):
# Test that only allowed combination of record types was created
# CNAME record validation
cnames = rrattrs.get('cnamerecord')
if cnames is not None:
if len(cnames) > 1:
raise errors.ValidationError(name='cnamerecord',
error=_('only one CNAME record is allowed per name '
'(RFC 2136, section 1.1.5)'))
if any(rrvalue is not None
and rrattr != 'cnamerecord'
for rrattr, rrvalue in rrattrs.iteritems()):
raise errors.ValidationError(name='cnamerecord',
error=_('CNAME record is not allowed to coexist '
'with any other record (RFC 1034, section 3.6.2)'))
# DNAME record validation
dnames = rrattrs.get('dnamerecord')
if dnames is not None:
if len(dnames) > 1:
raise errors.ValidationError(name='dnamerecord',
error=_('only one DNAME record is allowed per name '
'(RFC 6672, section 2.4)'))
# DNAME must not coexist with CNAME, but this is already checked earlier
# NS record validation
# NS record can coexist only with A, AAAA, DS, and other NS records (except zone apex)
# RFC 2181 section 6.1,
allowed_records = ['AAAA', 'A', 'DS', 'NS']
nsrecords = rrattrs.get('nsrecord')
if nsrecords and not self.is_pkey_zone_record(*keys):
for r_type in _record_types:
if (r_type not in allowed_records
and rrattrs.get('%srecord' % r_type.lower())
):
raise errors.ValidationError(
name='nsrecord',
error=_('NS record is not allowed to coexist with an '
'%(type)s record except when located in a '
'zone root record (RFC 2181, section 6.1)') %
{'type': r_type})
def check_record_type_dependencies(self, keys, rrattrs):
# Test that all record type dependencies are satisfied
# DS record validation
# DS record requires to coexists with NS record
dsrecords = rrattrs.get('dsrecord')
nsrecords = rrattrs.get('nsrecord')
# DS record cannot be in zone apex, checked in pre-callback validators
if dsrecords and not nsrecords:
raise errors.ValidationError(
name='dsrecord',
error=_('DS record requires to coexist with an '
'NS record (RFC 4592 section 4.6, RFC 4035 section 2.4)'))
def _entry2rrsets(self, entry_attrs, dns_name, dns_domain):
'''Convert entry_attrs to a dictionary {rdtype: rrset}.
:returns:
None if entry_attrs is None
{rdtype: None} if RRset of given type is empty
{rdtype: RRset} if RRset of given type is non-empty
'''
record_attr_suf = 'record'
ldap_rrsets = {}
if not entry_attrs:
# all records were deleted => name should not exist in DNS
return None
for attr, value in entry_attrs.iteritems():
if not attr.endswith(record_attr_suf):
continue
rdtype = dns.rdatatype.from_text(attr[0:-len(record_attr_suf)])
if not value:
ldap_rrsets[rdtype] = None # RRset is empty
continue
try:
# TTL here can be arbitrary value because it is ignored
# during comparison
ldap_rrset = dns.rrset.from_text(
dns_name, 86400, dns.rdataclass.IN, rdtype,
*map(str, value))
# make sure that all names are absolute so RRset
# comparison will work
for ldap_rr in ldap_rrset:
ldap_rr.choose_relativity(origin=dns_domain,
relativize=False)
ldap_rrsets[rdtype] = ldap_rrset
except dns.exception.SyntaxError as e:
self.log.error('DNS syntax error: %s %s %s: %s', dns_name,
dns.rdatatype.to_text(rdtype), value, e)
raise
return ldap_rrsets
def wait_for_modified_attr(self, ldap_rrset, rdtype, dns_name):
'''Wait until DNS resolver returns up-to-date answer for given RRset
or until the maximum number of attempts is reached.
Number of attempts is controlled by self.api.env['wait_for_dns'].
:param ldap_rrset:
None if given rdtype should not exist or
dns.rrset.RRset to match against data in DNS.
:param dns_name: FQDN to query
:type dns_name: dns.name.Name
:return: None if data in DNS and LDAP match
:raises errors.DNSDataMismatch: if data in DNS and LDAP doesn't match
:raises dns.exception.DNSException: if DNS resolution failed
'''
resolver = dns.resolver.Resolver()
resolver.set_flags(0) # disable recursion (for NS RR checks)
max_attempts = int(self.api.env['wait_for_dns'])
warn_attempts = max_attempts / 2
period = 1 # second
attempt = 0
log_fn = self.log.debug
log_fn('querying DNS server: expecting answer {%s}', ldap_rrset)
wait_template = 'waiting for DNS answer {%s}: got {%s} (attempt %s); '\
'waiting %s seconds before next try'
while attempt < max_attempts:
if attempt >= warn_attempts:
log_fn = self.log.warn
attempt += 1
try:
dns_answer = resolver.query(dns_name, rdtype,
dns.rdataclass.IN,
raise_on_no_answer=False)
dns_rrset = None
if rdtype == _NS:
# NS records can be in Authority section (sometimes)
dns_rrset = dns_answer.response.get_rrset(
dns_answer.response.authority, dns_name, _IN, rdtype)
if not dns_rrset:
# Look for NS and other data in Answer section
dns_rrset = dns_answer.rrset
if dns_rrset == ldap_rrset:
log_fn('DNS answer matches expectations (attempt %s)',
attempt)
return
log_msg = wait_template % (ldap_rrset, dns_answer.response,
attempt, period)
except (dns.resolver.NXDOMAIN,
dns.resolver.YXDOMAIN,
dns.resolver.NoNameservers,
dns.resolver.Timeout) as e:
if attempt >= max_attempts:
raise
else:
log_msg = wait_template % (ldap_rrset, type(e), attempt,
period)
log_fn(log_msg)
time.sleep(period)
# Maximum number of attempts was reached
else:
raise errors.DNSDataMismatch(expected=ldap_rrset, got=dns_rrset)
def wait_for_modified_attrs(self, entry_attrs, dns_name, dns_domain):
'''Wait until DNS resolver returns up-to-date answer for given entry
or until the maximum number of attempts is reached.
:param entry_attrs:
None if the entry was deleted from LDAP or
LDAPEntry instance containing at least all modified attributes.
:param dns_name: FQDN
:type dns_name: dns.name.Name
:raises errors.DNSDataMismatch: if data in DNS and LDAP doesn't match
'''
# represent data in LDAP as dictionary rdtype => rrset
ldap_rrsets = self._entry2rrsets(entry_attrs, dns_name, dns_domain)
nxdomain = ldap_rrsets is None
if nxdomain:
# name should not exist => ask for A record and check result
ldap_rrsets = {dns.rdatatype.from_text('A'): None}
for rdtype, ldap_rrset in ldap_rrsets.iteritems():
try:
self.wait_for_modified_attr(ldap_rrset, rdtype, dns_name)
except dns.resolver.NXDOMAIN as e:
if nxdomain:
continue
else:
e = errors.DNSDataMismatch(expected=ldap_rrset,
got="NXDOMAIN")
self.log.error(e)
raise e
except dns.resolver.NoNameservers as e:
# Do not raise exception if we have got SERVFAILs.
# Maybe the user has created an invalid zone intentionally.
self.log.warn('waiting for DNS answer {%s}: got {%s}; '
'ignoring', ldap_rrset, type(e))
continue
except dns.exception.DNSException as e:
err_desc = str(type(e))
err_str = str(e)
if err_str:
err_desc += ": %s" % err_str
e = errors.DNSDataMismatch(expected=ldap_rrset, got=err_desc)
self.log.error(e)
raise e
def wait_for_modified_entries(self, entries):
'''Call wait_for_modified_attrs for all entries in given dict.
:param entries:
Dict {(dns_domain, dns_name): entry_for_wait_for_modified_attrs}
'''
for entry_name, entry in entries.iteritems():
dns_domain = entry_name[0]
dns_name = entry_name[1].derelativize(dns_domain)
self.wait_for_modified_attrs(entry, dns_name, dns_domain)
def warning_if_ns_change_cause_fwzone_ineffective(self, result, *keys,
**options):
"""Detect if NS record change can make forward zones ineffective due
missing delegation. Run after parent's execute method.
"""
record_name_absolute = keys[-1]
zone = keys[-2]
if not record_name_absolute.is_absolute():
record_name_absolute = record_name_absolute.derelativize(zone)
affected_fw_zones, truncated = _find_subtree_forward_zones_ldap(
record_name_absolute)
if not affected_fw_zones:
return
for fwzone in affected_fw_zones:
_add_warning_fw_zone_is_not_effective(result, fwzone,
options['version'])
@register()
class dnsrecord_add(LDAPCreate):
__doc__ = _('Add new DNS resource record.')
no_option_msg = 'No options to add a specific record provided.\n' \
"Command help may be consulted for all supported record types."
takes_options = LDAPCreate.takes_options + (
Flag('force',
label=_('Force'),
flags=['no_option', 'no_output'],
doc=_('force NS record creation even if its hostname is not in DNS'),
),
dnsrecord.structured_flag,
)
def args_options_2_entry(self, *keys, **options):
self.obj.has_cli_options(options, self.no_option_msg)
return super(dnsrecord_add, self).args_options_2_entry(*keys, **options)
def interactive_prompt_callback(self, kw):
try:
self.obj.has_cli_options(kw, self.no_option_msg)
# Some DNS records were entered, do not use full interactive help
# We should still ask user for required parts of DNS parts he is
# trying to add in the same way we do for standard LDAP parameters
#
# Do not ask for required parts when any "extra" option is used,
# it can be used to fill all required params by itself
new_kw = {}
for rrparam in self.obj.iterate_rrparams_by_parts(kw, skip_extra=True):
user_options = rrparam.prompt_missing_parts(self, kw,
prompt_optional=False)
new_kw.update(user_options)
kw.update(new_kw)
return
except errors.OptionError:
pass
try:
idnsname = DNSName(kw['idnsname'])
except Exception as e:
raise errors.ValidationError(name='idnsname', error=unicode(e))
try:
zonename = DNSName(kw['dnszoneidnsname'])
except Exception as e:
raise errors.ValidationError(name='dnszoneidnsname', error=unicode(e))
# check zone type
if idnsname.is_empty():
common_types = u', '.join(_zone_top_record_types)
elif zonename.is_reverse():
common_types = u', '.join(_rev_top_record_types)
else:
common_types = u', '.join(_top_record_types)
self.Backend.textui.print_plain(_(u'Please choose a type of DNS resource record to be added'))
self.Backend.textui.print_plain(_(u'The most common types for this type of zone are: %s\n') %\
common_types)
ok = False
while not ok:
rrtype = self.Backend.textui.prompt(_(u'DNS resource record type'))
if rrtype is None:
return
try:
name = '%srecord' % rrtype.lower()
param = self.params[name]
if not isinstance(param, DNSRecord):
raise ValueError()
if not param.supported:
raise ValueError()
except (KeyError, ValueError):
all_types = u', '.join(_dns_supported_record_types)
self.Backend.textui.print_plain(_(u'Invalid or unsupported type. Allowed values are: %s') % all_types)
continue
ok = True
user_options = param.prompt_parts(self)
kw.update(user_options)
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
precallback_attrs = []
processed_attrs = []
for option in options:
try:
param = self.params[option]
except KeyError:
continue
rrparam = self.obj.get_rrparam_from_part(option)
if rrparam is None:
continue
if 'dnsrecord_part' in param.flags:
if rrparam.name in processed_attrs:
# this record was already entered
continue
if rrparam.name in entry_attrs:
# this record is entered both via parts and raw records
raise errors.ValidationError(name=param.cli_name or param.name,
error=_('Raw value of a DNS record was already set by "%(name)s" option') \
% dict(name=rrparam.cli_name or rrparam.name))
parts = rrparam.get_parts_from_kw(options)
dnsvalue = [rrparam._convert_scalar(parts)]
entry_attrs[rrparam.name] = dnsvalue
processed_attrs.append(rrparam.name)
continue
if 'dnsrecord_extra' in param.flags:
# do not run precallback for unset flags
if isinstance(param, Flag) and not options[option]:
continue
# extra option is passed, run per-type pre_callback for given RR type
precallback_attrs.append(rrparam.name)
# Run pre_callback validators
self.obj.run_precallback_validators(dn, entry_attrs, *keys, **options)
# run precallback also for all new RR type attributes in entry_attrs
for attr in entry_attrs.keys():
try:
param = self.params[attr]
except KeyError:
continue
if not isinstance(param, DNSRecord):
continue
precallback_attrs.append(attr)
precallback_attrs = list(set(precallback_attrs))
for attr in precallback_attrs:
# run per-type
try:
param = self.params[attr]
except KeyError:
continue
param.dnsrecord_add_pre_callback(ldap, dn, entry_attrs, attrs_list, *keys, **options)
# Store all new attrs so that DNSRecord post callback is called for
# new attributes only and not for all attributes in the LDAP entry
setattr(context, 'dnsrecord_precallback_attrs', precallback_attrs)
# We always want to retrieve all DNS record attributes to test for
# record type collisions (#2601)
try:
old_entry = ldap.get_entry(dn, _record_attributes)
except errors.NotFound:
old_entry = None
else:
for attr in entry_attrs.keys():
if attr not in _record_attributes:
continue
if entry_attrs[attr] is None:
entry_attrs[attr] = []
if not isinstance(entry_attrs[attr], (tuple, list)):
vals = [entry_attrs[attr]]
else:
vals = list(entry_attrs[attr])
entry_attrs[attr] = list(set(old_entry.get(attr, []) + vals))
rrattrs = self.obj.updated_rrattrs(old_entry, entry_attrs)
self.obj.check_record_type_dependencies(keys, rrattrs)
self.obj.check_record_type_collisions(keys, rrattrs)
context.dnsrecord_entry_mods = getattr(context, 'dnsrecord_entry_mods',
{})
context.dnsrecord_entry_mods[(keys[0], keys[1])] = entry_attrs.copy()
return dn
def exc_callback(self, keys, options, exc, call_func, *call_args, **call_kwargs):
if call_func.__name__ == 'add_entry':
if isinstance(exc, errors.DuplicateEntry):
# A new record is being added to existing LDAP DNS object
# Update can be safely run as old record values has been
# already merged in pre_callback
ldap = self.obj.backend
entry_attrs = self.obj.get_record_entry_attrs(call_args[0])
update = ldap.get_entry(entry_attrs.dn, entry_attrs.keys())
update.update(entry_attrs)
ldap.update_entry(update, **call_kwargs)
return
raise exc
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
for attr in getattr(context, 'dnsrecord_precallback_attrs', []):
param = self.params[attr]
param.dnsrecord_add_post_callback(ldap, dn, entry_attrs, *keys, **options)
if self.obj.is_pkey_zone_record(*keys):
entry_attrs[self.obj.primary_key.name] = [_dns_zone_record]
self.obj.postprocess_record(entry_attrs, **options)
if self.api.env['wait_for_dns']:
self.obj.wait_for_modified_entries(context.dnsrecord_entry_mods)
return dn
@register()
class dnsrecord_mod(LDAPUpdate):
__doc__ = _('Modify a DNS resource record.')
no_option_msg = 'No options to modify a specific record provided.'
takes_options = LDAPUpdate.takes_options + (
dnsrecord.structured_flag,
)
def args_options_2_entry(self, *keys, **options):
self.obj.has_cli_options(options, self.no_option_msg, True)
return super(dnsrecord_mod, self).args_options_2_entry(*keys, **options)
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
if options.get('rename') and self.obj.is_pkey_zone_record(*keys):
# zone rename is not allowed
raise errors.ValidationError(name='rename',
error=_('DNS zone root record cannot be renamed'))
# check if any attr should be updated using structured instead of replaced
# format is recordname : (old_value, new_parts)
updated_attrs = {}
for param in self.obj.iterate_rrparams_by_parts(options, skip_extra=True):
parts = param.get_parts_from_kw(options, raise_on_none=False)
if parts is None:
# old-style modification
continue
old_value = entry_attrs.get(param.name)
if not old_value:
raise errors.RequirementError(name=param.name)
if isinstance(old_value, (tuple, list)):
if len(old_value) > 1:
raise errors.ValidationError(name=param.name,
error=_('DNS records can be only updated one at a time'))
old_value = old_value[0]
updated_attrs[param.name] = (old_value, parts)
# Run pre_callback validators
self.obj.run_precallback_validators(dn, entry_attrs, *keys, **options)
# current entry is needed in case of per-dns-record-part updates and
# for record type collision check
try:
old_entry = ldap.get_entry(dn, _record_attributes)
except errors.NotFound:
self.obj.handle_not_found(*keys)
if updated_attrs:
for attr in updated_attrs:
param = self.params[attr]
old_dnsvalue, new_parts = updated_attrs[attr]
if old_dnsvalue not in old_entry.get(attr, []):
attr_name = unicode(param.label or param.name)
raise errors.AttrValueNotFound(attr=attr_name,
value=old_dnsvalue)
old_entry[attr].remove(old_dnsvalue)
old_parts = param._get_part_values(old_dnsvalue)
modified_parts = tuple(part if part is not None else old_parts[part_id] \
for part_id,part in enumerate(new_parts))
new_dnsvalue = [param._convert_scalar(modified_parts)]
entry_attrs[attr] = list(set(old_entry[attr] + new_dnsvalue))
rrattrs = self.obj.updated_rrattrs(old_entry, entry_attrs)
self.obj.check_record_type_dependencies(keys, rrattrs)
self.obj.check_record_type_collisions(keys, rrattrs)
context.dnsrecord_entry_mods = getattr(context, 'dnsrecord_entry_mods',
{})
context.dnsrecord_entry_mods[(keys[0], keys[1])] = entry_attrs.copy()
return dn
def execute(self, *keys, **options):
result = super(dnsrecord_mod, self).execute(*keys, **options)
# remove if empty
if not self.obj.is_pkey_zone_record(*keys):
rename = options.get('rename')
if rename is not None:
keys = keys[:-1] + (rename,)
dn = self.obj.get_dn(*keys, **options)
ldap = self.obj.backend
old_entry = ldap.get_entry(dn, _record_attributes)
del_all = True
for attr in old_entry.keys():
if old_entry[attr]:
del_all = False
break
if del_all:
result = self.obj.methods.delentry(*keys,
version=options['version'])
# we need to modify delete result to match mod output type
# only one value is expected, not a list
if client_has_capability(options['version'], 'primary_key_types'):
assert len(result['value']) == 1
result['value'] = result['value'][0]
# indicate that entry was deleted
context.dnsrecord_entry_mods[(keys[0], keys[1])] = None
if self.api.env['wait_for_dns']:
self.obj.wait_for_modified_entries(context.dnsrecord_entry_mods)
if 'nsrecord' in options:
self.obj.warning_if_ns_change_cause_fwzone_ineffective(result,
*keys,
**options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
if self.obj.is_pkey_zone_record(*keys):
entry_attrs[self.obj.primary_key.name] = [_dns_zone_record]
self.obj.postprocess_record(entry_attrs, **options)
return dn
def interactive_prompt_callback(self, kw):
try:
self.obj.has_cli_options(kw, self.no_option_msg, True)
except errors.OptionError:
pass
else:
# some record type entered, skip this helper
return
# get DNS record first so that the NotFound exception is raised
# before the helper would start
dns_record = api.Command['dnsrecord_show'](kw['dnszoneidnsname'], kw['idnsname'])['result']
rec_types = [rec_type for rec_type in dns_record if rec_type in _record_attributes]
self.Backend.textui.print_plain(_("No option to modify specific record provided."))
# ask user for records to be removed
self.Backend.textui.print_plain(_(u'Current DNS record contents:\n'))
record_params = []
for attr in dns_record:
try:
param = self.params[attr]
except KeyError:
continue
if not isinstance(param, DNSRecord):
continue
record_params.append(param)
rec_type_content = u', '.join(dns_record[param.name])
self.Backend.textui.print_plain(u'%s: %s' % (param.label, rec_type_content))
self.Backend.textui.print_plain(u'')
# ask what records to remove
for param in record_params:
rec_values = list(dns_record[param.name])
for rec_value in dns_record[param.name]:
rec_values.remove(rec_value)
mod_value = self.Backend.textui.prompt_yesno(
_("Modify %(name)s '%(value)s'?") % dict(name=param.label, value=rec_value), default=False)
if mod_value is True:
user_options = param.prompt_parts(self, mod_dnsvalue=rec_value)
kw[param.name] = [rec_value]
kw.update(user_options)
if rec_values:
self.Backend.textui.print_plain(ngettext(
u'%(count)d %(type)s record skipped. Only one value per DNS record type can be modified at one time.',
u'%(count)d %(type)s records skipped. Only one value per DNS record type can be modified at one time.',
0) % dict(count=len(rec_values), type=param.rrtype))
break
@register()
class dnsrecord_delentry(LDAPDelete):
"""
Delete DNS record entry.
"""
msg_summary = _('Deleted record "%(value)s"')
NO_CLI = True
@register()
class dnsrecord_del(LDAPUpdate):
__doc__ = _('Delete DNS resource record.')
has_output = output.standard_multi_delete
no_option_msg = _('Neither --del-all nor options to delete a specific record provided.\n'\
"Command help may be consulted for all supported record types.")
takes_options = (
Flag('del_all',
default=False,
label=_('Delete all associated records'),
),
dnsrecord.structured_flag,
)
def get_options(self):
for option in super(dnsrecord_del, self).get_options():
if any(flag in option.flags for flag in \
('dnsrecord_part', 'dnsrecord_extra',)):
continue
elif option.name in ('rename', ):
# options only valid for dnsrecord-mod
continue
elif isinstance(option, DNSRecord):
yield option.clone(option_group=None)
continue
yield option
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
try:
old_entry = ldap.get_entry(dn, _record_attributes)
except errors.NotFound:
self.obj.handle_not_found(*keys)
for attr in entry_attrs.keys():
if attr not in _record_attributes:
continue
if not isinstance(entry_attrs[attr], (tuple, list)):
vals = [entry_attrs[attr]]
else:
vals = entry_attrs[attr]
for val in vals:
try:
old_entry[attr].remove(val)
except (KeyError, ValueError):
try:
param = self.params[attr]
attr_name = unicode(param.label or param.name)
except:
attr_name = attr
raise errors.AttrValueNotFound(attr=attr_name, value=val)
entry_attrs[attr] = list(set(old_entry[attr]))
rrattrs = self.obj.updated_rrattrs(old_entry, entry_attrs)
self.obj.check_record_type_dependencies(keys, rrattrs)
del_all = False
if not self.obj.is_pkey_zone_record(*keys):
record_found = False
for attr in old_entry.keys():
if old_entry[attr]:
record_found = True
break
del_all = not record_found
# set del_all flag in context
# when the flag is enabled, the entire DNS record object is deleted
# in a post callback
context.del_all = del_all
context.dnsrecord_entry_mods = getattr(context, 'dnsrecord_entry_mods',
{})
context.dnsrecord_entry_mods[(keys[0], keys[1])] = entry_attrs.copy()
return dn
def execute(self, *keys, **options):
if options.get('del_all', False):
if self.obj.is_pkey_zone_record(*keys):
raise errors.ValidationError(
name='del_all',
error=_('Zone record \'%s\' cannot be deleted') \
% _dns_zone_record
)
result = self.obj.methods.delentry(*keys,
version=options['version'])
if self.api.env['wait_for_dns']:
entries = {(keys[0], keys[1]): None}
self.obj.wait_for_modified_entries(entries)
else:
result = super(dnsrecord_del, self).execute(*keys, **options)
result['value'] = pkey_to_value([keys[-1]], options)
if getattr(context, 'del_all', False) and not \
self.obj.is_pkey_zone_record(*keys):
result = self.obj.methods.delentry(*keys,
version=options['version'])
context.dnsrecord_entry_mods[(keys[0], keys[1])] = None
if self.api.env['wait_for_dns']:
self.obj.wait_for_modified_entries(context.dnsrecord_entry_mods)
if 'nsrecord' in options or options.get('del_all', False):
self.obj.warning_if_ns_change_cause_fwzone_ineffective(result,
*keys,
**options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
if self.obj.is_pkey_zone_record(*keys):
entry_attrs[self.obj.primary_key.name] = [_dns_zone_record]
self.obj.postprocess_record(entry_attrs, **options)
return dn
def args_options_2_entry(self, *keys, **options):
self.obj.has_cli_options(options, self.no_option_msg)
return super(dnsrecord_del, self).args_options_2_entry(*keys, **options)
def interactive_prompt_callback(self, kw):
if kw.get('del_all', False):
return
try:
self.obj.has_cli_options(kw, self.no_option_msg)
except errors.OptionError:
pass
else:
# some record type entered, skip this helper
return
# get DNS record first so that the NotFound exception is raised
# before the helper would start
dns_record = api.Command['dnsrecord_show'](kw['dnszoneidnsname'], kw['idnsname'])['result']
rec_types = [rec_type for rec_type in dns_record if rec_type in _record_attributes]
self.Backend.textui.print_plain(_("No option to delete specific record provided."))
user_del_all = self.Backend.textui.prompt_yesno(_("Delete all?"), default=False)
if user_del_all is True:
kw['del_all'] = True
return
# ask user for records to be removed
self.Backend.textui.print_plain(_(u'Current DNS record contents:\n'))
present_params = []
for attr in dns_record:
try:
param = self.params[attr]
except KeyError:
continue
if not isinstance(param, DNSRecord):
continue
present_params.append(param)
rec_type_content = u', '.join(dns_record[param.name])
self.Backend.textui.print_plain(u'%s: %s' % (param.label, rec_type_content))
self.Backend.textui.print_plain(u'')
# ask what records to remove
for param in present_params:
deleted_values = []
for rec_value in dns_record[param.name]:
user_del_value = self.Backend.textui.prompt_yesno(
_("Delete %(name)s '%(value)s'?")
% dict(name=param.label, value=rec_value), default=False)
if user_del_value is True:
deleted_values.append(rec_value)
if deleted_values:
kw[param.name] = tuple(deleted_values)
@register()
class dnsrecord_show(LDAPRetrieve):
__doc__ = _('Display DNS resource.')
takes_options = LDAPRetrieve.takes_options + (
dnsrecord.structured_flag,
)
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
if self.obj.is_pkey_zone_record(*keys):
entry_attrs[self.obj.primary_key.name] = [_dns_zone_record]
self.obj.postprocess_record(entry_attrs, **options)
return dn
@register()
class dnsrecord_find(LDAPSearch):
__doc__ = _('Search for DNS resources.')
takes_options = LDAPSearch.takes_options + (
dnsrecord.structured_flag,
)
def get_options(self):
for option in super(dnsrecord_find, self).get_options():
if any(flag in option.flags for flag in \
('dnsrecord_part', 'dnsrecord_extra',)):
continue
elif isinstance(option, DNSRecord):
yield option.clone(option_group=None)
continue
yield option
def pre_callback(self, ldap, filter, attrs_list, base_dn, scope, *args, **options):
assert isinstance(base_dn, DN)
# validate if zone is master zone
self.obj.check_zone(args[-2], **options)
filter = _create_idn_filter(self, ldap, *args, **options)
return (filter, base_dn, ldap.SCOPE_SUBTREE)
def post_callback(self, ldap, entries, truncated, *args, **options):
if entries:
zone_obj = self.api.Object[self.obj.parent_object]
zone_dn = zone_obj.get_dn(args[0])
if entries[0].dn == zone_dn:
entries[0][zone_obj.primary_key.name] = [_dns_zone_record]
for entry in entries:
self.obj.postprocess_record(entry, **options)
return truncated
@register()
class dns_resolve(Command):
__doc__ = _('Resolve a host name in DNS.')
has_output = output.standard_value
msg_summary = _('Found \'%(value)s\'')
takes_args = (
Str('hostname',
label=_('Hostname'),
),
)
def execute(self, *args, **options):
query=args[0]
if query.find(api.env.domain) == -1 and query.find('.') == -1:
query = '%s.%s.' % (query, api.env.domain)
if query[-1] != '.':
query = query + '.'
if not is_host_resolvable(query):
raise errors.NotFound(
reason=_('Host \'%(host)s\' not found') % {'host': query}
)
return dict(result=True, value=query)
@register()
class dns_is_enabled(Command):
"""
Checks if any of the servers has the DNS service enabled.
"""
NO_CLI = True
has_output = output.standard_value
base_dn = DN(('cn', 'masters'), ('cn', 'ipa'), ('cn', 'etc'), api.env.basedn)
filter = '(&(objectClass=ipaConfigObject)(cn=DNS))'
def execute(self, *args, **options):
ldap = self.api.Backend.ldap2
dns_enabled = False
try:
ent = ldap.find_entries(filter=self.filter, base_dn=self.base_dn)
if len(ent):
dns_enabled = True
except Exception as e:
pass
return dict(result=dns_enabled, value=pkey_to_value(None, options))
@register()
class dnsconfig(LDAPObject):
"""
DNS global configuration object
"""
object_name = _('DNS configuration options')
default_attributes = [
'idnsforwardpolicy', 'idnsforwarders', 'idnsallowsyncptr'
]
label = _('DNS Global Configuration')
label_singular = _('DNS Global Configuration')
takes_params = (
Str('idnsforwarders*',
_validate_bind_forwarder,
cli_name='forwarder',
label=_('Global forwarders'),
doc=_('Global forwarders. A custom port can be specified for each '
'forwarder using a standard format "IP_ADDRESS port PORT"'),
csv=True,
),
StrEnum('idnsforwardpolicy?',
cli_name='forward_policy',
label=_('Forward policy'),
doc=_('Global forwarding policy. Set to "none" to disable '
'any configured global forwarders.'),
values=(u'only', u'first', u'none'),
),
Bool('idnsallowsyncptr?',
cli_name='allow_sync_ptr',
label=_('Allow PTR sync'),
doc=_('Allow synchronization of forward (A, AAAA) and reverse (PTR) records'),
),
DeprecatedParam('idnszonerefresh?',
cli_name='zone_refresh',
label=_('Zone refresh interval'),
),
)
managed_permissions = {
'System: Write DNS Configuration': {
'non_object': True,
'ipapermright': {'write'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=dns', api.env.basedn),
'ipapermtargetfilter': ['(objectclass=idnsConfigObject)'],
'ipapermdefaultattr': {
'idnsallowsyncptr', 'idnsforwarders', 'idnsforwardpolicy',
'idnspersistentsearch', 'idnszonerefresh'
},
'replaces': [
'(targetattr = "idnsforwardpolicy || idnsforwarders || idnsallowsyncptr || idnszonerefresh || idnspersistentsearch")(target = "ldap:///cn=dns,$SUFFIX")(version 3.0;acl "permission:Write DNS Configuration";allow (write) groupdn = "ldap:///cn=Write DNS Configuration,cn=permissions,cn=pbac,$SUFFIX";)',
],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Read DNS Configuration': {
'non_object': True,
'ipapermright': {'read'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=dns', api.env.basedn),
'ipapermtargetfilter': ['(objectclass=idnsConfigObject)'],
'ipapermdefaultattr': {
'objectclass',
'idnsallowsyncptr', 'idnsforwarders', 'idnsforwardpolicy',
'idnspersistentsearch', 'idnszonerefresh'
},
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
}
def get_dn(self, *keys, **kwargs):
if not dns_container_exists(self.api.Backend.ldap2):
raise errors.NotFound(reason=_('DNS is not configured'))
return DN(api.env.container_dns, api.env.basedn)
def get_dnsconfig(self, ldap):
entry = ldap.get_entry(self.get_dn(), None)
return entry
def postprocess_result(self, result):
if not any(param in result['result'] for param in self.params):
result['summary'] = unicode(_('Global DNS configuration is empty'))
@register()
class dnsconfig_mod(LDAPUpdate):
__doc__ = _('Modify global DNS configuration.')
def interactive_prompt_callback(self, kw):
# show informative message on client side
# server cannot send messages asynchronous
if kw.get('idnsforwarders', False):
self.Backend.textui.print_plain(
_("Server will check DNS forwarder(s)."))
self.Backend.textui.print_plain(
_("This may take some time, please wait ..."))
def execute(self, *keys, **options):
# test dnssec forwarders
forwarders = options.get('idnsforwarders')
result = super(dnsconfig_mod, self).execute(*keys, **options)
self.obj.postprocess_result(result)
if forwarders:
for forwarder in forwarders:
try:
validate_dnssec_global_forwarder(forwarder, log=self.log)
except DNSSECSignatureMissingError as e:
messages.add_message(
options['version'],
result, messages.DNSServerDoesNotSupportDNSSECWarning(
server=forwarder, error=e,
)
)
except EDNS0UnsupportedError as e:
messages.add_message(
options['version'],
result, messages.DNSServerDoesNotSupportEDNS0Warning(
server=forwarder, error=e,
)
)
except UnresolvableRecordError as e:
messages.add_message(
options['version'],
result, messages.DNSServerValidationWarning(
server=forwarder, error=e
)
)
return result
@register()
class dnsconfig_show(LDAPRetrieve):
__doc__ = _('Show the current global DNS configuration.')
def execute(self, *keys, **options):
result = super(dnsconfig_show, self).execute(*keys, **options)
self.obj.postprocess_result(result)
return result
@register()
class dnsforwardzone(DNSZoneBase):
"""
DNS Forward zone, container for resource records.
"""
object_name = _('DNS forward zone')
object_name_plural = _('DNS forward zones')
object_class = DNSZoneBase.object_class + ['idnsforwardzone']
label = _('DNS Forward Zones')
label_singular = _('DNS Forward Zone')
default_forward_policy = u'first'
# managed_permissions: permissions was apllied in dnszone class, do NOT
# add them here, they should not be applied twice.
def _warning_fw_zone_is_not_effective(self, result, *keys, **options):
fwzone = keys[-1]
_add_warning_fw_zone_is_not_effective(result, fwzone,
options['version'])
def _warning_if_forwarders_do_not_work(self, result, new_zone,
*keys, **options):
fwzone = keys[-1]
forwarders = options.get('idnsforwarders', [])
any_forwarder_work = False
for forwarder in forwarders:
try:
validate_dnssec_zone_forwarder_step1(forwarder, fwzone,
log=self.log)
except UnresolvableRecordError as e:
messages.add_message(
options['version'],
result, messages.DNSServerValidationWarning(
server=forwarder, error=e
)
)
except EDNS0UnsupportedError as e:
messages.add_message(
options['version'],
result, messages.DNSServerDoesNotSupportEDNS0Warning(
server=forwarder, error=e
)
)
else:
any_forwarder_work = True
if not any_forwarder_work:
# do not test DNSSEC validation if there is no valid forwarder
return
# resolve IP address of any DNS replica
# FIXME: https://fedorahosted.org/bind-dyndb-ldap/ticket/143
# we currenly should to test all IPA DNS replica, because DNSSEC
# validation is configured just in named.conf per replica
ipa_dns_masters = [normalize_zone(x) for x in
api.Object.dnsrecord.get_dns_masters()]
if not ipa_dns_masters:
# something very bad happened, DNS is installed, but no IPA DNS
# servers available
self.log.error("No IPA DNS server can be found, but integrated DNS "
"is installed")
return
ipa_dns_ip = None
for rdtype in (dns.rdatatype.A, dns.rdatatype.AAAA):
try:
ans = dns.resolver.query(ipa_dns_masters[0], rdtype)
except dns.exception.DNSException:
continue
else:
ipa_dns_ip = str(ans.rrset.items[0])
break
if not ipa_dns_ip:
self.log.error("Cannot resolve %s hostname", ipa_dns_masters[0])
return
# sleep a bit, adding new zone to BIND from LDAP may take a while
if new_zone:
time.sleep(5)
# Test if IPA is able to receive replies from forwarders
try:
validate_dnssec_zone_forwarder_step2(ipa_dns_ip, fwzone,
log=self.log)
except DNSSECValidationError as e:
messages.add_message(
options['version'],
result, messages.DNSSECValidationFailingWarning(error=e)
)
except UnresolvableRecordError as e:
messages.add_message(
options['version'],
result, messages.DNSServerValidationWarning(
server=ipa_dns_ip, error=e
)
)
@register()
class dnsforwardzone_add(DNSZoneBase_add):
__doc__ = _('Create new DNS forward zone.')
def interactive_prompt_callback(self, kw):
# show informative message on client side
# server cannot send messages asynchronous
if kw.get('idnsforwarders', False):
self.Backend.textui.print_plain(
_("Server will check DNS forwarder(s)."))
self.Backend.textui.print_plain(
_("This may take some time, please wait ..."))
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
dn = super(dnsforwardzone_add, self).pre_callback(ldap, dn,
entry_attrs, attrs_list, *keys, **options)
if 'idnsforwardpolicy' not in entry_attrs:
entry_attrs['idnsforwardpolicy'] = self.obj.default_forward_policy
if (not entry_attrs.get('idnsforwarders') and
entry_attrs['idnsforwardpolicy'] != u'none'):
raise errors.ValidationError(name=u'idnsforwarders',
error=_('Please specify forwarders.'))
return dn
def execute(self, *keys, **options):
result = super(dnsforwardzone_add, self).execute(*keys, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
if options.get('idnsforwarders'):
print result, keys, options
self.obj._warning_if_forwarders_do_not_work(
result, True, *keys, **options)
return result
@register()
class dnsforwardzone_del(DNSZoneBase_del):
__doc__ = _('Delete DNS forward zone.')
msg_summary = _('Deleted DNS forward zone "%(value)s"')
@register()
class dnsforwardzone_mod(DNSZoneBase_mod):
__doc__ = _('Modify DNS forward zone.')
def interactive_prompt_callback(self, kw):
# show informative message on client side
# server cannot send messages asynchronous
if kw.get('idnsforwarders', False):
self.Backend.textui.print_plain(
_("Server will check DNS forwarder(s)."))
self.Backend.textui.print_plain(
_("This may take some time, please wait ..."))
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
try:
entry = ldap.get_entry(dn)
except errors.NotFound:
self.obj.handle_not_found(*keys)
if not _check_entry_objectclass(entry, self.obj.object_class):
self.obj.handle_not_found(*keys)
policy = self.obj.default_forward_policy
forwarders = []
if 'idnsforwarders' in entry_attrs:
forwarders = entry_attrs['idnsforwarders']
elif 'idnsforwarders' in entry:
forwarders = entry['idnsforwarders']
if 'idnsforwardpolicy' in entry_attrs:
policy = entry_attrs['idnsforwardpolicy']
elif 'idnsforwardpolicy' in entry:
policy = entry['idnsforwardpolicy']
if not forwarders and policy != u'none':
raise errors.ValidationError(name=u'idnsforwarders',
error=_('Please specify forwarders.'))
return dn
def execute(self, *keys, **options):
result = super(dnsforwardzone_mod, self).execute(*keys, **options)
if options.get('idnsforwarders'):
self.obj._warning_if_forwarders_do_not_work(result, False, *keys,
**options)
return result
@register()
class dnsforwardzone_find(DNSZoneBase_find):
__doc__ = _('Search for DNS forward zones.')
@register()
class dnsforwardzone_show(DNSZoneBase_show):
__doc__ = _('Display information about a DNS forward zone.')
has_output_params = LDAPRetrieve.has_output_params + dnszone_output_params
@register()
class dnsforwardzone_disable(DNSZoneBase_disable):
__doc__ = _('Disable DNS Forward Zone.')
msg_summary = _('Disabled DNS forward zone "%(value)s"')
@register()
class dnsforwardzone_enable(DNSZoneBase_enable):
__doc__ = _('Enable DNS Forward Zone.')
msg_summary = _('Enabled DNS forward zone "%(value)s"')
def execute(self, *keys, **options):
result = super(dnsforwardzone_enable, self).execute(*keys, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
return result
@register()
class dnsforwardzone_add_permission(DNSZoneBase_add_permission):
__doc__ = _('Add a permission for per-forward zone access delegation.')
@register()
class dnsforwardzone_remove_permission(DNSZoneBase_remove_permission):
__doc__ = _('Remove a permission for per-forward zone access delegation.')
|
pspacek/freeipa
|
ipalib/plugins/dns.py
|
Python
|
gpl-3.0
| 168,418
|
[
"VisIt"
] |
a3b50959ecd9756cfee5063d953b81e4155e4196cdc20079dd0d2c8f342f009c
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from functools import reduce
import numpy
from pyscf import gto
from pyscf import scf
from pyscf.lo import orth
mol = gto.Mole()
mol.verbose = 0
mol.output = None
mol.atom = '''
O 0. 0. 0
1 0. -0.757 0.587
1 0. 0.757 0.587'''
mol.basis = 'cc-pvdz'
mol.build()
mf = scf.RHF(mol)
class KnowValues(unittest.TestCase):
def test_orth(self):
numpy.random.seed(10)
n = 100
a = numpy.random.random((n,n))
s = numpy.dot(a.T, a)
c = orth.lowdin(s)
self.assertTrue(numpy.allclose(reduce(numpy.dot, (c.T, s, c)),
numpy.eye(n)))
x1 = numpy.dot(a, c)
x2 = orth.vec_lowdin(a)
d = numpy.dot(x1.T,x2)
d[numpy.diag_indices(n)] = 0
self.assertAlmostEqual(numpy.linalg.norm(d), 0, 9)
self.assertAlmostEqual(numpy.linalg.norm(c), 36.56738258719514, 9)
self.assertAlmostEqual(abs(c).sum(), 2655.5580057303964, 7)
def test_schmidt(self):
numpy.random.seed(10)
n = 100
a = numpy.random.random((n,n))
s = numpy.dot(a.T, a)
c = orth.schmidt(s)
self.assertTrue(numpy.allclose(reduce(numpy.dot, (c.T, s, c)),
numpy.eye(n)))
x1 = numpy.dot(a, c)
x2 = orth.vec_schmidt(a)
d = numpy.dot(x1.T,x2)
d[numpy.diag_indices(n)] = 0
self.assertAlmostEqual(numpy.linalg.norm(d), 0, 9)
self.assertAlmostEqual(numpy.linalg.norm(c), 36.56738258719514, 9)
self.assertAlmostEqual(abs(c).sum(), 1123.2089785000373, 7)
def test_weight_orth(self):
numpy.random.seed(10)
n = 100
a = numpy.random.random((n,n))
s = numpy.dot(a.T, a)
weight = numpy.random.random(n)
c = orth.weight_orth(s, weight)
self.assertTrue(numpy.allclose(reduce(numpy.dot, (c.T, s, c)),
numpy.eye(n)))
self.assertAlmostEqual(numpy.linalg.norm(c), 36.56738258719514, 8)
self.assertAlmostEqual(abs(c).sum(), 1908.8535852660757, 6)
def test_orth_ao(self):
c0 = orth.pre_orth_ao(mol, method='scf')
self.assertAlmostEqual(abs(c0).sum(), 33.48215772351, 8)
c = orth.orth_ao(mol, 'lowdin', c0)
self.assertAlmostEqual(abs(c).sum(), 94.21571091299639, 8)
c = orth.orth_ao(mol, 'meta_lowdin', c0)
self.assertAlmostEqual(abs(c).sum(), 92.15697348744733, 8)
c = orth.orth_ao(mol, 'meta_lowdin', 'sto-3g')
self.assertAlmostEqual(abs(c).sum(), 90.12324660084619, 8)
c = orth.orth_ao(mol, 'meta_lowdin', None)
self.assertAlmostEqual(abs(c).sum(), 83.71349158130113, 8)
def test_ghost_atm_meta_lowdin(self):
mol = gto.Mole()
mol.atom = [["O" , (0. , 0. , 0.)],
['ghost' , (0. , -0.757, 0.587)],
[1 , (0. , 0.757 , 0.587)] ]
mol.spin = 1
mol.basis = {'O':'ccpvdz', 'H':'ccpvdz',
'GHOST': gto.basis.load('631g','H')}
mol.build()
c = orth.orth_ao(mol, method='meta_lowdin')
self.assertAlmostEqual(numpy.linalg.norm(c), 7.9067188905237256, 9)
def test_pre_orth_ao_with_ecp(self):
mol = gto.M(atom='Cu 0. 0. 0.; H 0. 0. -1.56; H 0. 0. 1.56',
basis={'Cu':'lanl2dz', 'H':'ccpvdz'},
ecp = {'cu':'lanl2dz'},
charge=-1,
verbose=0)
c0 = orth.pre_orth_ao(mol, method='ano')
self.assertAlmostEqual(numpy.linalg.norm(c0), 5.9621174285790959, 9)
if __name__ == "__main__":
print("Test orth")
unittest.main()
|
sunqm/pyscf
|
pyscf/lo/test/test_orth.py
|
Python
|
apache-2.0
| 4,348
|
[
"PySCF"
] |
c23c5a7399fd1d1859cd661db19b267682a520a255dc863dcefcff5c22762137
|
#!/usr/bin/env python
#
# Appcelerator Titanium Module Packager
#
#
import os, sys, glob, string
import zipfile
from datetime import date
try:
import json
except:
import simplejson as json
cwd = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
os.chdir(cwd)
required_module_keys = ['name','version','moduleid','description','copyright','license','copyright','platform','minsdk']
module_defaults = {
'description':'My module',
'author': 'Your Name',
'license' : 'Specify your license',
'copyright' : 'Copyright (c) %s by Your Company' % str(date.today().year),
}
module_license_default = "TODO: place your license here and we'll include it in the module distribution"
def find_sdk(config):
sdk = config['TITANIUM_SDK']
return os.path.expandvars(os.path.expanduser(sdk))
def replace_vars(config,token):
idx = token.find('$(')
while idx != -1:
idx2 = token.find(')',idx+2)
if idx2 == -1: break
key = token[idx+2:idx2]
if not config.has_key(key): break
token = token.replace('$(%s)' % key, config[key])
idx = token.find('$(')
return token
def read_ti_xcconfig():
contents = open(os.path.join(cwd,'titanium.xcconfig')).read()
config = {}
for line in contents.splitlines(False):
line = line.strip()
if line[0:2]=='//': continue
idx = line.find('=')
if idx > 0:
key = line[0:idx].strip()
value = line[idx+1:].strip()
config[key] = replace_vars(config,value)
return config
def generate_doc(config):
docdir = os.path.join(cwd,'documentation')
if not os.path.exists(docdir):
print "Couldn't find documentation file at: %s" % docdir
return None
sdk = find_sdk(config)
support_dir = os.path.join(sdk,'module','support')
sys.path.append(support_dir)
try:
import markdown2 as markdown
except ImportError:
import markdown
documentation = []
for file in os.listdir(docdir):
if file in ignoreFiles or os.path.isdir(os.path.join(docdir, file)):
continue
md = open(os.path.join(docdir,file)).read()
html = markdown.markdown(md)
documentation.append({file:html});
return documentation
def compile_js(manifest,config):
js_file = os.path.join(cwd,'assets','com.meeech.mixpanel.js')
if not os.path.exists(js_file): return
sdk = find_sdk(config)
iphone_dir = os.path.join(sdk,'iphone')
sys.path.insert(0,iphone_dir)
from compiler import Compiler
path = os.path.basename(js_file)
compiler = Compiler(cwd, manifest['moduleid'], manifest['name'], 'commonjs')
metadata = compiler.make_function_from_file(path,js_file)
exports = open('metadata.json','w')
json.dump({'exports':compiler.exports }, exports)
exports.close()
method = metadata['method']
eq = path.replace('.','_')
method = ' return %s;' % method
f = os.path.join(cwd,'Classes','ComMeeechMixpanelModuleAssets.m')
c = open(f).read()
idx = c.find('return ')
before = c[0:idx]
after = """
}
@end
"""
newc = before + method + after
if newc!=c:
x = open(f,'w')
x.write(newc)
x.close()
def die(msg):
print msg
sys.exit(1)
def warn(msg):
print "[WARN] %s" % msg
def validate_license():
c = open(os.path.join(cwd,'LICENSE')).read()
if c.find(module_license_default)!=-1:
warn('please update the LICENSE file with your license text before distributing')
def validate_manifest():
path = os.path.join(cwd,'manifest')
f = open(path)
if not os.path.exists(path): die("missing %s" % path)
manifest = {}
for line in f.readlines():
line = line.strip()
if line[0:1]=='#': continue
if line.find(':') < 0: continue
key,value = line.split(':')
manifest[key.strip()]=value.strip()
for key in required_module_keys:
if not manifest.has_key(key): die("missing required manifest key '%s'" % key)
if module_defaults.has_key(key):
defvalue = module_defaults[key]
curvalue = manifest[key]
if curvalue==defvalue: warn("please update the manifest key: '%s' to a non-default value" % key)
return manifest,path
ignoreFiles = ['.DS_Store','.gitignore','libTitanium.a','titanium.jar','README','com.meeech.mixpanel.js']
ignoreDirs = ['.DS_Store','.svn','.git','CVSROOT']
def zip_dir(zf,dir,basepath,ignore=[]):
for root, dirs, files in os.walk(dir):
for name in ignoreDirs:
if name in dirs:
dirs.remove(name) # don't visit ignored directories
for file in files:
if file in ignoreFiles: continue
e = os.path.splitext(file)
if len(e)==2 and e[1]=='.pyc':continue
from_ = os.path.join(root, file)
to_ = from_.replace(dir, basepath, 1)
zf.write(from_, to_)
def glob_libfiles():
files = []
for libfile in glob.glob('build/**/*.a'):
if libfile.find('Release-')!=-1:
files.append(libfile)
return files
def build_module(manifest,config):
rc = os.system("xcodebuild -sdk iphoneos -configuration Release")
if rc != 0:
die("xcodebuild failed")
rc = os.system("xcodebuild -sdk iphonesimulator -configuration Release")
if rc != 0:
die("xcodebuild failed")
# build the merged library using lipo
moduleid = manifest['moduleid']
libpaths = ''
for libfile in glob_libfiles():
libpaths+='%s ' % libfile
os.system("lipo %s -create -output build/lib%s.a" %(libpaths,moduleid))
def package_module(manifest,mf,config):
name = manifest['name'].lower()
moduleid = manifest['moduleid'].lower()
version = manifest['version']
modulezip = '%s-iphone-%s.zip' % (moduleid,version)
if os.path.exists(modulezip): os.remove(modulezip)
zf = zipfile.ZipFile(modulezip, 'w', zipfile.ZIP_DEFLATED)
modulepath = 'modules/iphone/%s/%s' % (moduleid,version)
zf.write(mf,'%s/manifest' % modulepath)
libname = 'lib%s.a' % moduleid
zf.write('build/%s' % libname, '%s/%s' % (modulepath,libname))
docs = generate_doc(config)
if docs!=None:
for doc in docs:
for file, html in doc.iteritems():
filename = string.replace(file,'.md','.html')
zf.writestr('%s/documentation/%s'%(modulepath,filename),html)
for dn in ('assets','example','platform'):
if os.path.exists(dn):
zip_dir(zf,dn,'%s/%s' % (modulepath,dn),['README'])
zf.write('LICENSE','%s/LICENSE' % modulepath)
zf.write('module.xcconfig','%s/module.xcconfig' % modulepath)
exports_file = 'metadata.json'
if os.path.exists(exports_file):
zf.write(exports_file, '%s/%s' % (modulepath, exports_file))
zf.close()
if __name__ == '__main__':
manifest,mf = validate_manifest()
validate_license()
config = read_ti_xcconfig()
compile_js(manifest,config)
build_module(manifest,config)
package_module(manifest,mf,config)
sys.exit(0)
|
meeech/Titanium-Mixpanel-Module
|
build.py
|
Python
|
mit
| 6,445
|
[
"VisIt"
] |
1834be6bff81678da1ac9a4fce147478e64fc7cb8c3d03f7c2c9ff0da4604ceb
|
# -*- mode: python; -*-
##
## Utilities for working with paired-end reads and
## fragment distributions
##
import os
import sys
import glob
import time
import pysam
from scipy import *
from numpy import *
import misopy
from misopy.parse_csv import *
import misopy.Gene as gene_utils
import misopy.sam_utils as sam_utils
import misopy.exon_utils as exon_utils
from collections import defaultdict
def get_insert_dist_array(interval_to_paired_dists,
delim='\t'):
"""
Read insert length distribution as array of numbers.
"""
insert_dist = []
for interval, paired_dists in interval_to_paired_dists.iteritems():
insert_dist.extend(paired_dists)
return array(insert_dist)
def parse_insert_len_params(insert_len_header):
params = {}
insert_len_header = insert_len_header.strip()
if insert_len_header.startswith("#"):
insert_len_header = insert_len_header[1:]
for param in insert_len_header.split(","):
p, v = param.split("=")
params[p] = v
return params
def filter_insert_len(interval_to_dists,
sd_max):
# Get vector of insert lengths
insert_dist = get_insert_dist_array(interval_to_dists)
mu, sdev, dispersion, num_pairs = \
compute_insert_len_stats(insert_dist)
filtered_interval_to_dists = defaultdict(list)
min_cutoff = mu - (sd_max * sdev)
max_cutoff = mu + (sd_max * sdev)
print "Excluding values < %.2f or > %.2f" \
%(min_cutoff, max_cutoff)
for interval, dists in interval_to_dists.iteritems():
dists = array(dists)
filtered_dists = delete(dists, nonzero(dists < min_cutoff)[0])
filtered_dists = delete(filtered_dists,
nonzero(dists > max_cutoff)[0])
filtered_interval_to_dists[interval] = filtered_dists
return filtered_interval_to_dists
def load_insert_len(insert_dist_filename,
delim='\t'):
print "Loading insert length from: %s" %(insert_dist_filename)
insert_dist_file = open(insert_dist_filename, "r")
insert_lens = []
params_header = insert_dist_file.readline().strip()
# Get parameters of distribution from header
params = parse_insert_len_params(params_header)
for line in insert_dist_file:
# Skip header
if line.startswith("#"):
continue
insert_fields = line.strip().split(delim)
if len(insert_fields) != 2:
continue
lens_list = insert_fields[1].split(",")
curr_lens = [int(l) for l in lens_list]
insert_lens.extend(curr_lens)
insert_dist = array(insert_lens)
insert_dist_file.close()
return insert_dist, params
def bedtools_map_bam_to_bed(bam_filename, gff_intervals_filename):
"""
Map BAM file to GFF intervals and return the result as a
BED file.
Returns a stream to a BED file with the results
"""
bedtools_cmd = "intersectBed -abam %s -b %s -wa -wb -bed -f 1" \
%(bam_filename, gff_intervals_filename)
print "Executing: %s" %(bedtools_cmd)
if (not os.path.isfile(bam_filename)) or \
(not os.path.isfile(gff_intervals_filename)):
raise Exception, "Error: %s or %s do not exist." \
%(bam_filename,
gff_intervals_filename)
bed_stream = os.popen(bedtools_cmd)
return bed_stream
def parse_tagBam_intervals(bam_read,
gff_coords=True):
"""
Return a list of intervals that are present in the current
BAM line returned by tagBam. These intervals are encoded
in the YB option of the BAM flag.
- If convert_coords is True, we add 1 to the
BAM coordinate to make it 1-based
"""
gff_aligned_regions = bam_read.opt("YB")
parsed_regions = gff_aligned_regions.split("gff:")[1:]
gff_intervals = []
for region in parsed_regions:
strand = region.split(",")[3]
chrom, coord_field = region.split(",")[0].split(":")
region_start, region_end = coord_field.split("-")
region_start, region_end = int(region_start), \
int(region_end)
if gff_coords:
region_start += 1
curr_interval_str = "%s:%d-%d:%s" \
%(chrom,
region_start,
region_end,
strand)
gff_intervals.append(curr_interval_str)
return gff_intervals
def compute_inserts_from_paired_mates(paired_reads):
"""
Get insert lengths from paired-up paired ends reads
aligned to a set of constitutive exon intervals.
Return mapping from intervals to distances of read pairs
that land in them.
"""
# Mapping from interval to
interval_to_paired_dists = defaultdict(list)
num_skipped = 0
num_kept = 0
for read_id, read_pair in paired_reads.iteritems():
to_skip = False
# Get the intervals that each read pair lands in
# Consider here only the mate pairs that map to
# the same interval, and to exactly one interval, and
# not in a junction
left_mate, right_mate = read_pair
left_mate_intervals = parse_tagBam_intervals(left_mate)
right_mate_intervals = parse_tagBam_intervals(right_mate)
# If either of the mates lands in more than one set of intervals,
# discard it.
if (len(left_mate_intervals) != 1 or \
len(right_mate_intervals) != 1):
to_skip = True
elif left_mate_intervals[0] != right_mate_intervals[0]:
# If each maps to one interval, but it's not the same,
# also discard it.
to_skip = True
elif (len(left_mate.cigar) != 1 or \
len(right_mate.cigar) != 1):
# One of the read mates was in a junction
to_skip = True
elif (left_mate.cigar[0][0] != 0 or \
right_mate.cigar[0][0] != 0):
# Both CIGAR operations must be M (matches)
to_skip = True
if to_skip:
# One of the conditions was violated, so skip read pair
num_skipped += 1
continue
# We have a match, so compute insert length distance,
# defined as the distance between the start position
# of the left and the end position of the right mate
left_start = left_mate.pos
left_end = sam_utils.cigar_to_end_coord(left_start,
left_mate.cigar)
right_start = right_mate.pos
right_end = sam_utils.cigar_to_end_coord(right_start,
right_mate.cigar)
# Get the current GFF interval string
curr_gff_interval = left_mate_intervals[0]
# Insert length is right.end - left.start + 1
insert_len = right_end - left_start + 1
if insert_len <= 0:
print "WARNING: 0 or negative insert length detected " \
"in region %s." %(curr_gff_interval)
continue
interval_to_paired_dists[curr_gff_interval].append(insert_len)
num_kept += 1
print "Used %d paired mates, threw out %d" \
%(num_kept, num_skipped)
return interval_to_paired_dists
def compute_insert_len(bams_to_process,
const_exons_gff_filename,
output_dir,
min_exon_size,
no_bam_filter=False,
sd_max=2):
"""
Compute insert length distribution and output it to the given
directory.
Arguments:
- bams_to_process: a list of BAM files to process
- const_gff_filename: GFF with constitutive exons
"""
bams_str = "\n ".join(bams_to_process)
num_bams = len(bams_to_process)
print "Computing insert length distribution of %d files:\n %s" \
%(num_bams, bams_str)
print " - Using const. exons from: %s" %(const_exons_gff_filename)
print " - Outputting to: %s" %(output_dir)
print " - Minimum exon size used: %d" %(min_exon_size)
if not os.path.isdir(output_dir):
print "Making directory: %s" %(output_dir)
os.makedirs(output_dir)
all_constitutive = True
const_exons, f = \
exon_utils.get_const_exons_by_gene(const_exons_gff_filename,
output_dir,
# Treat all exons as constitutive
all_constitutive=True,
min_size=min_exon_size)
filter_reads = not no_bam_filter
if filter_reads:
print "Filtering BAM reads"
else:
print "Turning off filtering of BAM reads"
for bam_filename in bams_to_process:
t1 = time.time()
output_filename = os.path.join(output_dir,
"%s.insert_len" \
%(os.path.basename(bam_filename)))
if not os.path.isfile(bam_filename):
print "Cannot find BAM file %s" %(bam_filename)
print "Quitting..."
sys.exit(1)
print "Fetching reads in constitutive exons"
mapped_bam_filename = exon_utils.map_bam2gff(bam_filename,
const_exons_gff_filename,
output_dir)
if mapped_bam_filename == None:
raise Exception, "Error: Insert length computation failed."
# Load mapped BAM filename
mapped_bam = pysam.Samfile(mapped_bam_filename, "rb")
###
### TODO: Rewrite this so that you only pair reads within an interval
###
paired_reads = sam_utils.pair_sam_reads(mapped_bam,
filter_reads=filter_reads)
num_paired_reads = len(paired_reads)
if num_paired_reads == 0:
print "WARNING: no paired mates in %s. Skipping...\n"\
"Are you sure the read IDs match? If your BAM paired flags are "\
"unset, try using --no-bam-filter." \
%(bam_filename)
continue
print "Using %d paired mates" %(num_paired_reads)
interval_to_paired_dists = compute_inserts_from_paired_mates(paired_reads)
summarize_insert_len_dist(interval_to_paired_dists, output_filename,
sd_max=sd_max)
t2 = time.time()
print "Insert length computation took %.2f seconds." %(t2 - t1)
# def pair_reads_from_bed_intervals(bed_stream):
# """
# Match up read mates with each other, indexed by the BED interval
# that they fall in.
# Return a dictionary of BED region mapping to a set of read pairs.
# Arguments:
# - bed_filename: file with BED reads and the region they map to.
# Returns.
# """
# return
# def compute_insert_len(bam_filename, gff_filename, output_dir,
# min_exon_size):
# """
# Compute insert length distribution and output it to the given
# directory.
# """
# print "Computing insert length distribution of %s" %(bam_filename)
# print " - Using gene models from: %s" %(gff_filename)
# print " - Outputting to: %s" %(output_dir)
# print " - Minimum exon size used: %d" %(min_exon_size)
# if not os.path.isdir(output_dir):
# print "Making directory: %s" %(output_dir)
# os.makedirs(output_dir)
# output_filename = os.path.join(output_dir,
# "%s.insert_len" %(os.path.basename(bam_filename)))
# # Load BAM file with reads
# bamfile = sam_utils.load_bam_reads(bam_filename)
# # Load the genes from the GFF
# print "Loading genes from GFF..."
# t1 = time.time()
# gff_genes = gene_utils.load_genes_from_gff(gff_filename)
# t2 = time.time()
# print " - Loading genes from GFF took %.2f seconds" %(t2 - t1)
# insert_lengths = []
# t1 = time.time()
# relevant_region = 0
# for gene_id, gene_info in gff_genes.iteritems():
# gene_obj = gene_info["gene_object"]
# # Get all the constitutive parts
# const_parts = gene_obj.get_const_parts()
# chrom = gene_obj.chrom
# # Consider only the large constitutive parts
# for part in const_parts:
# if part.len >= min_exon_size:
# # Get all the reads that land in the coordinates of the exon
# try:
# exon_reads = bamfile.fetch(chrom, part.start, part.end)
# except ValueError:
# print "Could not fetch from region: ", chrom, part.start, part.end
# continue
# # Pair all the paired-end reads that land there
# paired_reads = sam_utils.pair_sam_reads(exon_reads)
# num_paired_reads = len(paired_reads)
# if num_paired_reads == 0:
# continue
# print "Found %d region" %(relevant_region)
# relevant_region += 1
# # Compute the insert length of each read
# for read_pair_id, read_pair in paired_reads.iteritems():
# if len(read_pair) != 2:
# # Skip non-paired reads
# continue
# left_read, right_read = read_pair
# insert_len = right_read.pos - left_read.pos + 1
# if insert_len > 0:
# insert_lengths.append(insert_len)
# else:
# print "Negative or zero insert length ignored..."
# # Output results to file
# output_file = open(output_filename, 'w')
# insert_length_str = "\n".join(map(str, insert_lengths))
# output_file.write(insert_length_str)
# output_file.close()
# t2 = time.time()
# print "Insert length computation took %.2f seconds." %(t2 - t1)
def output_insert_len_dist(interval_to_paired_dists,
output_file):
"""
Output insert length distribution indexed by regions.
"""
header = "#%s\t%s\n" %("region", "insert_len")
output_file.write(header)
for region, insert_lens in interval_to_paired_dists.iteritems():
if len(insert_lens) == 0:
continue
str_lens = ",".join([str(l) for l in insert_lens])
output_line = "%s\t%s\n" %(region, str_lens)
output_file.write(output_line)
def compute_insert_len_stats(insert_dist):
"""
Return insert length statistics.
"""
# Compute mean and standard deviation of insert
# length distribution
mu = mean(insert_dist)
sdev = std(insert_dist)
# Compute dispersion (d), where
#
# d = sdev / sqrt(mean)
#
# dispersion measures how variable
# the insert length distribution is
# about the mean
dispersion = sdev / sqrt(float(mu))
# Number of read pairs used
num_pairs = len(insert_dist)
return mu, sdev, dispersion, num_pairs
def summarize_insert_len_dist(interval_to_paired_dists,
output_filename,
sd_max=2):
"""
Summarize insert len distributions.
"""
print "Summarizing insert length distribution.."
print " - Output file: %s" %(output_filename)
output_file = open(output_filename, "w")
print "Removing values %d-many deviations outside the mean" \
%(sd_max)
# Filter insert length distribution based on sd_max
filtered_interval_to_dist = filter_insert_len(interval_to_paired_dists,
sd_max)
filtered_insert_dist = get_insert_dist_array(filtered_interval_to_dist)
if len(filtered_insert_dist) == 0:
print "Error: Could not find any properly mated pairs to " \
"compute insert length with. Are you sure your BAM reads " \
"are properly paired and map the chromosome headers in the " \
"constitutive exon file?"
sys.exit(1)
mu, sdev, dispersion, num_pairs = \
compute_insert_len_stats(filtered_insert_dist)
print "mean\tsdev\tdispersion"
print "%.1f\t%.1f\t%.1f" \
%(mu, sdev, dispersion)
min_insert = min(filtered_insert_dist)
max_insert = max(filtered_insert_dist)
print "min insert: %d" %(min_insert)
print "max insert: %d" %(max_insert)
# Write headers
header_line = "#%s=%.1f,%s=%.1f,%s=%.1f,%s=%d\n" \
%("mean", mu,
"sdev", sdev,
"dispersion", dispersion,
"num_pairs", num_pairs)
output_file.write(header_line)
# Write raw insert lengths indexed by region
output_insert_len_dist(filtered_interval_to_dist,
output_file)
output_file.close()
def greeting():
print "Utility for computing insert length distributions from paired-end " \
"BAM files."
print "Part of MISO (Mixture of Isoforms model)\n"
print "See --help for usage.\n"
def main():
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--compute-insert-len", dest="compute_insert_len", nargs=2, default=None,
help="Compute insert length for given sample. Takes as input "
"(1) a comma-separated list of sorted, indexed BAM files with headers "
"(or a single BAM filename), (2) a GFF file with constitutive exons. "
"Outputs the insert length distribution into the output directory.")
parser.add_option("--no-bam-filter", dest="no_bam_filter", action="store_true", default=False,
help="If provided, this ignores the BAM file flags that state whether the read was paired "
"or not, and instead uses only the read IDs to pair up the mates. Use this if your "
"paired-end BAM was the result of a samtools merge operation.")
parser.add_option("--min-exon-size", dest="min_exon_size", nargs=1, type="int", default=500,
help="Minimum size of constitutive exon (in nucleotides) that should be used "
"in the computation. Default is 500 bp.")
parser.add_option("--sd-max", dest="sd_max", nargs=1, default=2, type="int",
help="Number of standard deviations used to define outliers. By default, set "
"to 2, meaning that any points at least 2*sigma away from the mean of the "
"insert length distribution will be discarded.")
parser.add_option("--output-dir", dest="output_dir", nargs=1, default=None,
help="Output directory.")
(options, args) = parser.parse_args()
if options.compute_insert_len is None:
greeting()
return
if options.output_dir is None:
greeting()
print "Error: need --output-dir."
return
output_dir = os.path.abspath(os.path.expanduser(options.output_dir))
sd_max = options.sd_max
if options.compute_insert_len != None:
bams_to_process = [os.path.abspath(os.path.expanduser(f)) for f in \
options.compute_insert_len[0].split(",")]
gff_filename = os.path.abspath(os.path.expanduser(options.compute_insert_len[1]))
compute_insert_len(bams_to_process, gff_filename, output_dir,
options.min_exon_size,
no_bam_filter=options.no_bam_filter,
sd_max=sd_max)
if __name__ == "__main__":
main()
|
Xinglab/rmats2sashimiplot
|
src/MISO/misopy/pe_utils.py
|
Python
|
gpl-2.0
| 19,818
|
[
"pysam"
] |
044a1b8ef86b9bf18a189ecc37186e9a296deb74335c943d664b106e886adf48
|
"""Code for checking for local names and superfluous import statements.
This code provides searches for local symbols in the AST, assignments and such
things.
"""
# This file is part of the Snakefood open source package.
# See http://furius.ca/snakefood/ for licensing details.
# stdlib imports
import compiler
__all__ = ('get_names_from_ast', 'filter_unused_imports',
'NamesVisitor', 'AssignVisitor', 'AllVisitor')
def get_names_from_ast(ast):
"Find all the names being referenced/used."
vis = NamesVisitor()
compiler.walk(ast, vis)
dotted_names, simple_names = vis.finalize()
return (dotted_names, simple_names)
def filter_unused_imports(ast, found_imports):
"""
Given the ast and the list of found imports in the file, find out which of
the imports are not used and return two lists: a list of used imports, and a
list of unused imports.
"""
used_imports, unused_imports = [], []
# Find all the names being referenced/used.
dotted_names, simple_names = get_names_from_ast(ast)
# Find all the names being exported via __all__.
vis = AllVisitor()
compiler.walk(ast, vis)
exported = vis.finalize()
# Check that all imports have been referenced at least once.
usednames = set(x[0] for x in dotted_names)
usednames.update(x[0] for x in exported)
used_imports = []
for x in found_imports:
_, _, lname, lineno, _, _ = x
if lname is not None and lname not in usednames:
unused_imports.append(x)
else:
used_imports.append(x)
return used_imports, unused_imports
class Visitor(object):
"Base class for our visitors."
def continue_(self, node):
for child in node.getChildNodes():
self.visit(child)
class NamesVisitor(Visitor):
"""AST visitor that finds all the identifier references that are defined,
including dotted references. This includes all free names and names with
attribute references.
"""
def __init__(self):
self.dotted = []
self.simple = []
self.attributes = []
def visitName(self, node):
self.attributes.append(node.name)
self.attributes.reverse()
attribs = self.attributes
for i in xrange(1, len(attribs)+1):
self.dotted.append(('.'.join(attribs[0:i]), node.lineno))
self.simple.append((attribs[0], node.lineno))
self.attributes = []
def visitGetattr(self, node):
self.attributes.append(node.attrname)
self.continue_(node)
def finalize(self):
return self.dotted, self.simple
class AssignVisitor(Visitor):
"""AST visitor that builds a list of all potential names that are being
assigned to. This is used later to heuristically figure out if a name being
refered to is never assigned to nor in the imports."""
def __init__(self):
self.assnames = []
self.in_class = False
def visitAssName(self, node):
self.assnames.append((node.name, node.lineno))
self.continue_(node)
def visitClass(self, node):
self.assnames.append((node.name, node.lineno))
prev, self.in_class = self.in_class, True
self.continue_(node)
self.in_class = prev
def visitFunction(self, node):
# Avoid method definitions.
if not self.in_class:
self.assnames.append((node.name, node.lineno))
self.continue_(node)
def finalize(self):
return self.assnames
class AllVisitor(Visitor):
"""AST visitor that find an __all__ directive and accumulates the list of
constants in it."""
def __init__(self):
self.all = []
self.in_assign = False
self.in_all = False
def visitAssign(self, node):
prev, self.in_assign = self.in_assign, True
self.continue_(node)
self.in_assign = prev
def visitAssName(self, node):
if self.in_assign and node.name == '__all__':
self.in_all = True
self.continue_(node)
def visitConst(self, node):
if self.in_assign and self.in_all:
self.all.append((node.value, node.lineno))
self.continue_(node)
def finalize(self):
return self.all
|
GreatFruitOmsk/snakefood
|
lib/python/snakefood/local.py
|
Python
|
gpl-2.0
| 4,247
|
[
"VisIt"
] |
e305eaa39611663c1710a4b5d49c7d03e7b60c25f07f9787134c1f4461dd429e
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class VotcaCsg(CMakePackage):
"""Versatile Object-oriented Toolkit for Coarse-graining
Applications (VOTCA) is a package intended to reduce the amount of
routine work when doing systematic coarse-graining of various
systems. The core is written in C++.
This package contains the VOTCA coarse-graining engine.
"""
homepage = "http://www.votca.org"
url = "https://github.com/votca/csg/tarball/v1.4"
git = "https://github.com/votca/csg.git"
maintainers = ['junghans']
version('master', branch='master')
version('stable', branch='stable')
version('1.6.3', sha256='35456b1f3116364b10ada37d99798294bd2d3df2e670cef3936251f88036ef88')
version('1.6.2', sha256='96b244b282005259832ed6ec0dc22dafe132dcfc3d73dcd8e53b62f40befb545')
version('1.6.1', sha256='ed12bcb1ccdf71f54e21cdcc9803add4b8ebdc6b8263cb5b0034f5db01e31dbb')
version('1.6', sha256='8cf6a4ac3ef7347c720a44d8a676f8cbd1462e162f6113de39f27b89354465ea')
version('1.5.1', sha256='7fca1261bd267bf38d2edd26259730fed3126c0c3fd91fb81940dbe17bb568fd')
version('1.5', sha256='160387cdc51f87dd20ff2e2eed97086beee415d48f3c92f4199f6109068c8ff4')
version('1.4.1', sha256='41dccaecadd0165c011bec36a113629e27745a5a133d1a042efe4356acdb5450')
version('1.4', sha256='c13e7febd792de8c3d426203f089bd4d33b8067f9db5e8840e4579c88b61146e')
depends_on("cmake@2.8:", type='build')
for v in ["1.4", "1.4.1", "1.5", "1.5.1", "1.6", "1.6.1", "1.6.2",
"1.6.3", "master", "stable"]:
depends_on('votca-tools@%s' % v, when="@%s:%s.0" % (v, v))
depends_on("boost")
depends_on("gromacs~mpi@5.1:2019.9999")
depends_on("hdf5~mpi")
|
iulian787/spack
|
var/spack/repos/builtin/packages/votca-csg/package.py
|
Python
|
lgpl-2.1
| 1,911
|
[
"Gromacs"
] |
6385154c910e7725aa9faf2744066b07886556b457c2bb46875d1d42b867a362
|
from catkit import Gratoms
import numpy as np
import ase
import re
try:
from math import gcd
except ImportError:
from fractions import gcd
def running_mean(array, N=5):
"""Calculate the running mean of array for N instances.
Parameters
----------
array : array_like | ndarray (N,)
Array of values to have a average taken from.
N : int
Number of values to take an average with.
Returns
-------
running_mean : ndarray (N + 1,)
Mean value of the running average.
"""
length = len(array)
if length < N:
N = length
cumsum = np.cumsum(np.insert(array, 0, 0))
running_mean = (cumsum[N:] - cumsum[:-N]) / float(N)
return running_mean
def to_gratoms(atoms, edges=None):
"""Convert and atom object to a gratoms object."""
gratoms = Gratoms(
numbers=atoms.numbers,
positions=atoms.positions,
pbc=atoms.pbc,
cell=atoms.cell,
edges=edges
)
if atoms.constraints:
gratoms.set_constraint(atoms.constraints)
return gratoms
def get_atomic_numbers(formula, return_count=False):
"""Return the atomic numbers associated with a chemical formula.
Parameters
----------
formula : string
A chemical formula to parse into atomic numbers.
return_count : bool
Return the count of each element in the formula.
Returns
-------
numbers : ndarray (n,)
Element numbers in associated species.
counts : ndarray (n,)
Count of each element in a species.
"""
parse = re.findall('[A-Z][a-z]?|[0-9]+', formula)
values = {}
for i, e in enumerate(parse):
if e.isdigit():
values[parse[i - 1]] += int(e) - 1
else:
if e not in values:
values[e] = 1
else:
values[e] += 1
numbers = np.array([
ase.data.chemical_symbols.index(k) for k in values.keys()])
srt = np.argsort(numbers)
numbers = numbers[srt]
if return_count:
counts = np.array([v for v in values.values()])[srt]
return numbers, counts
return numbers
def get_reference_energies(species, energies):
"""Get reference energies for the elements in a set of molecules.
Parameters
----------
species : list (n,)
Chemical formulas for each molecular species.
energies : list (n,)
Total energies associated with each species.
Returns
-------
elements : ndarray (n,)
Atomic elements associated with all species.
references : ndarray (n,)
Reference energies associated with each element.
"""
if not isinstance(energies, np.ndarray):
energies = np.array(energies)
A = np.zeros((len(species), len(species)))
elements = np.zeros(len(species), dtype=int)
n = 0
# Construct the elements array as they appear
for i, s in enumerate(species):
num, cnt = get_atomic_numbers(s, True)
for j in num[~np.in1d(num, elements)]:
elements[n] = j
n += 1
A[i][np.in1d(elements, num)] = cnt
references = np.linalg.solve(A, energies)
srt = np.argsort(elements)
references = references[srt]
elements = elements[srt]
return elements, references
def parse_slice(slice_name):
"""Return a correctly parsed slice from input of varying types."""
if isinstance(slice_name, (slice)):
_slice = slice_name
elif isinstance(slice_name, type(None)):
_slice = slice(None)
elif isinstance(slice_name, int):
i = int(slice_name)
_slice = slice(i, i + 1)
elif isinstance(slice_name, str):
if slice_name.isdigit():
i = int(slice_name)
_slice = slice(i, i + 1)
else:
split = slice_name.split(':')
split = [int(_) if _.lstrip('-').isdigit()
else None for _ in split]
_slice = slice(*split)
return _slice
def ext_gcd(a, b):
"""Extension of greatest common divisor."""
if b == 0:
return 1, 0
elif a % b == 0:
return 0, 1
else:
x, y = ext_gcd(b, a % b)
return y, x - y * (a // b)
def list_gcd(values):
"""Return the greatest common divisor of a list of values."""
if isinstance(values[0], float):
values = np.array(values, dtype=int)
gcd_func = np.frompyfunc(gcd, 2, 1)
_gcd = np.ufunc.reduce(gcd_func, values)
return _gcd
|
jboes/CatKit
|
catkit/gen/utils/utilities.py
|
Python
|
gpl-3.0
| 4,498
|
[
"ASE"
] |
01bdb9f6f7ec38a07802e86795703de8abb174de6b1a9bd43e4438ac13255c59
|
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from .fake_webapp import EXAMPLE_APP
class StatusCodeTest(object):
def test_should_visit_index_of_example_app_and_get_200_status_code(self):
self.browser.visit(EXAMPLE_APP)
self.assertEqual(200, self.browser.status_code)
self.assertEqual("200 - OK", str(self.browser.status_code))
def test_should_visit_error_of_example_app_and_not_get_200_status_code(self):
self.browser.visit(EXAMPLE_APP + 'error.html')
self.assertNotEqual(200, self.browser.status_code)
self.assertEqual('404 - Not Found', str(self.browser.status_code))
|
bmcculley/splinter
|
tests/status_code.py
|
Python
|
bsd-3-clause
| 767
|
[
"VisIt"
] |
6aa7c8921e1133a26457405ed4a243ad19ec7b847fea47a4c5b7056a29117366
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
This example shows how to specify auxiliary basis for density fitting integrals.
The format and input convention of auxbasis are the same to the AO basis.
See also examples/gto/04-input_basis.py
'''
import tempfile
from pyscf import gto, scf, df
#
# If auxbasis is not specified, default optimal auxiliary basis (if possible)
# or even-tempered gaussian functions will be generated as auxbasis
#
mol = gto.M(atom='N1 0 0 0; N2 0 0 1.2', basis={'N1':'ccpvdz', 'N2':'tzp'})
mf = scf.RHF(mol).density_fit()
mf.kernel()
print('Default auxbasis', mf.with_df.auxmol.basis)
#
# The default basis is generated in the function df.make_auxbasis. It returns
# a basis dict for the DF auxiliary basis. In the real calculations, you can
# first generate the default basis then make modification.
#
auxbasis = df.make_auxbasis(mol)
print(mf.with_df.auxmol.basis == auxbasis)
auxbasis['N2'] = 'ccpvdz jkfit'
mf = scf.RHF(mol).density_fit(auxbasis=auxbasis)
mf.kernel()
#
# Input with key argument auxbasis='xxx' in .density_fit function
# This auxbasis will be used for all elements in the system.
#
mol = gto.M(atom='N1 0 0 0; N2 0 0 1.2', basis='ccpvdz')
mf = scf.RHF(mol).density_fit(auxbasis='weigend')
mf.kernel()
#
# The DF basis can be assigned to with_df.auxbasis attribute.
# Like the AO basis input, DF basis can be specified separately for each element.
#
mf = scf.RHF(mol).density_fit()
mf.with_df.auxbasis = {'default': 'weigend', 'N2': 'ahlrichs'}
mf.kernel()
#
# Combined basis set is also supported in DF basis input.
#
mf = scf.RHF(mol).density_fit()
mf.with_df.auxbasis = ('weigend','sto3g')
mf.kernel()
#
# Even-tempered Gaussian DF basis can be generated based on the AO basis.
# In the following example, the exponents of auxbasis are
# alpha = a * 1.7^i i = 0..N
# where a and N are determined by the smallest and largest exponets of AO basis.
#
mf = scf.RHF(mol).density_fit()
mf.with_df.auxbasis = df.aug_etb(mol, beta=1.7)
mf.kernel()
|
gkc1000/pyscf
|
examples/df/01-auxbasis.py
|
Python
|
apache-2.0
| 2,034
|
[
"Gaussian",
"PySCF"
] |
c3392b6feed2d9c7c63ff83a37453f9e4b3241f056a51e1ac41fb56231b9c444
|
import os
import vtk
import csv
import math
from vtk.util.numpy_support import numpy_to_vtk, vtk_to_numpy
import numpy as np
import sklearn.utils
#-----------------------------------------------------------------------
# SPECIFIC FUNCTIONS
#-----------------------------------------------------------------------
def check_images(ifile_image1, ifile_image2):
"""Check whether the extent, spacing and origin of two images are the
same."""
# read images
print "Checking images", ifile_image1, "and", ifile_image2
image1 = readmetaimage(ifile_image1)
image2 = readmetaimage(ifile_image2)
# compare image extents
extent1 = image1.GetExtent()
extent2 = image2.GetExtent()
if (extent1[0] == extent2[0] and
extent1[1] == extent2[1] and
extent1[2] == extent2[2] and
extent1[3] == extent2[3] and
extent1[4] == extent2[4] and
extent1[5] == extent2[5]):
match = True
print 'Extents match'
else:
match = False
print ('Greyscale image and binary mask have different dimensions.\n' +
'Fix this before proceeding with the benchmark.')
# compare image spacings
spacing1 = image1.GetSpacing()
spacing2 = image2.GetSpacing()
if (spacing1[0] == spacing2[0] and
spacing1[1] == spacing2[1] and
spacing1[2] == spacing2[2]):
match = True
print 'Spacings match'
else:
match = False
print ('Greyscale image and binary mask have different spacings.\n' +
'Fix this before proceeding with the benchmark.')
# compare image origins
origin1 = image1.GetOrigin()
origin2 = image2.GetOrigin()
if (origin1[0] == origin2[0] and
origin1[1] == origin2[1] and
origin1[2] == origin2[2]):
match = True
print 'Origin match'
else:
match = False
print ('Greyscale image and binary mask have different origins.\n' +
'Fix this before proceeding with the benchmark.')
return match
def clip_mitral(surface, ifile_plane):
"""Clip atrium at level of mitral valve."""
# read plane information from csv-file
planeinfo = np.genfromtxt(ifile_plane, delimiter=',')
normal = planeinfo[0].flatten().tolist()
point = planeinfo[1].flatten().tolist()
insideout = int(planeinfo[2, 0].flatten())
# clip atrium at level of mitral valve
clippedsurface = planeclip(surface, point, normal, insideout)
clippedsurface = extractlargestregion(clippedsurface)
return clippedsurface
def clip_vein_endpoint(surface, ifile_sufix, targetdistance):
"""Clip vein the targetdistance away from the body."""
regionslabels = getregionslabels()
# extract the body from the surface
# including all points (alloff=1) to avoid holes after appending
body = pointthreshold(surface, 'autolabels',
regionslabels['body'], regionslabels['laa'], 1)
body = extractlargestregion(body)
# initialize appender with the body
appender = vtk.vtkAppendPolyData()
appender.AddInput(body)
for k in range(1,5):
index = 'pv' + str(k)
# extract vein
# excluding some points (alloff=0)
# to avoid overlapping edges after appending
vein = pointthreshold(surface, 'autolabels', regionslabels[index],
regionslabels[index], 0)
# load the centreline and the clipoint
cl = readvtp(os.path.join(ifile_sufix,
'clvein' + str(k) + '.vtp'))
clippointid = int(np.loadtxt(os.path.join(ifile_sufix,
'clippointid' + str(k) + '.csv')))
clippoint0 = cl.GetPoint(clippointid)
clipnormal = (np.array(cl.GetPoint(clippointid + 1)) -
np.array(cl.GetPoint(clippointid - 1)))
abscissasarray = cl.GetPointData().GetArray('Abscissas')
startabscissa = abscissasarray.GetValue(clippointid)
currentabscissa = 0
currentid = clippointid
# find clip point
while ((currentabscissa < targetdistance) and
(currentabscissa >= 0) and
(currentid >= 0)):
currentid -= 1
currentabscissa = startabscissa - abscissasarray.GetValue(currentid)
if currentid > 0:
currentid = currentid + 1
else:
# vein ended before target distance
# then clip 2 mm before end of centreline (5x0.4 mm) from end point
currentid = 4
# clip and append
clippoint1 = cl.GetPoint(currentid)
clippedvein = planeclip(vein, clippoint1, clipnormal, 0)
# keep region closest to ostium point
clippedvein = extractclosestpointregion(clippedvein, clippoint0)
# clip generates new points to make a flat cut. The values may be
# interpolated. we want all values to rounded to a certain label value.
clippedvein = roundpointarray(clippedvein, 'autolabels')
appender.AddInput(clippedvein)
# collect body + veins
appender.Update()
clippedsurface = appender.GetOutput()
clippedsurface = cleanpolydata(clippedsurface)
return clippedsurface
def compute_dice(ifile_image, surfacetarget, ofile=''):
"""Generate image from surfacetarget and compute Dice-metric with respect to
ifile_image."""
# load the whole image
refimage = readmetaimage(ifile_image)
orispacing = refimage.GetSpacing()
spacing = [orispacing[0], orispacing[1], orispacing[2]]
bounds = refimage.GetBounds()
# make reference and target image
# body label is generated with the whole surface
indexes = ['laa','pv1','pv2','pv3','pv4']
targetimage = imagefordice(surfacetarget, spacing, bounds, indexes,
'autolabels')
if ofile:
print "saving", ofile
writemetaimage(targetimage, ofile)
# extract each label and compute metric body and pvs (no laa)
regionslabels = getregionslabels()
# initialise metric dictionary
metric_all = {'body': [0.], 'pvs': [0.]}
# body metric
refimagelabel = imagethresholdbetween(refimage,
regionslabels['body'] - 0.5,
regionslabels['body'] + 0.5)
targetimagelabel = imagethresholdbetween(targetimage,
regionslabels['body'] - 0.5,
regionslabels['body'] + 0.5)
# compute metric
metric = dicemetric(refimagelabel, targetimagelabel)
metric_all['body'] = [metric]
# pvs metric
metric = [0., 0., 0., 0.]
for k in range(0, 4):
index = 'pv' + str(k + 1)
refimagelabel = imagethresholdbetween(refimage,
regionslabels[index] - 0.5,
regionslabels[index] + 0.5)
targetimagelabel = imagethresholdbetween(targetimage,
regionslabels[index] - 0.5,
regionslabels[index] + 0.5)
# compute metric
metric[k] = dicemetric(refimagelabel, targetimagelabel)
metric_all['pvs'] = metric
return metric_all
def compute_s2s_error(surface, surfacetarget, nsamples, ofile=''):
"""Compute the symmetric surface-to-surface distance (s2s-metric) for
surface and surfacetarget. Resample the s2s-array to nsamples."""
# cap the surfaces to improve surface to surface distance on clipped areas
surfacecap = capsurface(surface,'autolabels')
edges = extractboundaryedge(surfacecap)
if edges.GetNumberOfPoints() > 0:
surfacecap = fillholes(surfacecap)
surfacetargetcap = capsurface(surfacetarget, 'autolabels')
edges = extractboundaryedge(surfacetargetcap)
if edges.GetNumberOfPoints() > 0:
surfacetargetcap = fillholes(surfacetargetcap)
# compute distances
seg2gtsurf = surface2surfacedistance(surfacecap, surfacetargetcap, 'S2S')
gt2segsurf = surface2surfacedistance(surfacetargetcap, surfacecap, 'S2S')
if ofile:
writevtp(seg2gtsurf, ofile + 'seg2gt.vtp')
writevtp(gt2segsurf, ofile + 'gt2seg.vtp')
# to have ~ same amount of samples per case
# extract body and pvs
# re sample to nsamples per case
indexes = ['body', 'pvs']
rfrom = {'body': 36, 'pvs': 76}
rto = {'body': 36, 'pvs': 79}
# initialise metric dictionary
s2s_all = {'body': [0.], 'pvs': [0.]}
for index in indexes:
# extracting each region
# including all points
seg2gtsurf = pointthreshold(seg2gtsurf, 'autolabels',
rfrom[index], rto[index], 1)
gt2segsurf = pointthreshold(gt2segsurf, 'autolabels',
rfrom[index], rto[index], 1)
# turning distance array into numpy
if (seg2gtsurf.GetPointData().GetArray('S2S') != None):
seg2gtarray = vtk_to_numpy(seg2gtsurf.GetPointData().
GetArray('S2S'))
else:
seg2gtarray = []
if (gt2segsurf.GetPointData().GetArray('S2S') != None):
gt2segarray = vtk_to_numpy(gt2segsurf.GetPointData().
GetArray('S2S'))
else:
gt2segarray = []
# resample to nsamples (unless array smaller than that)
if len(seg2gtarray) > nsamples:
seg2gtarray = resamplearray(seg2gtarray, nsamples)
if len(gt2segarray) > nsamples:
gt2segarray = resamplearray(gt2segarray, nsamples)
# concatenate error to have symmetric metric
superarray = np.concatenate([seg2gtarray, gt2segarray])
s2s_all[index] = superarray
return s2s_all
def labels2mesh(image, label, radius=1.):
"""Generate mesh from label image using Marching Cubes algorithm."""
# threshold all values above label
image = imagethresholdupper(image, label)
image = imageopenclose(image, 0, 1, radius)
# after threshold the image is binary
surface = marchingcubes(image, 1, 1)
surface = smoothtaubin(surface)
surfaceout = cleanpolydata(surface)
return surfaceout
def transfer_gtlabels(surface, target, arrayname):
"""Project labels in array with arrayname from surface to target."""
# labels
regionslabels = getregionslabels()
indexes = ['pv1', 'pv2', 'pv3', 'pv4', 'laa']
# cleaning
target = cleanpolydata(target)
numberofpoints = target.GetNumberOfPoints()
# create array
gtlabelsarray = vtk.vtkDoubleArray()
gtlabelsarray.SetName(arrayname)
gtlabelsarray.SetNumberOfTuples(numberofpoints)
target.GetPointData().AddArray(gtlabelsarray)
# initialize with body label
gtlabelsarray.FillComponent(0, regionslabels['body'])
# get labels from surface
gtlabelsurface = surface.GetPointData().GetArray(arrayname)
# initiate locator
locator = vtk.vtkPointLocator()
locator.SetDataSet(surface)
locator.BuildLocator()
# go through each point of target surface
for i in range(numberofpoints):
# determine closest point on surface
point = target.GetPoint(i)
closestpointid = locator.FindClosestPoint(point)
# get label of point
value = gtlabelsurface.GetValue(closestpointid)
# assign label to target point
gtlabelsarray.SetValue(i, value)
# check that there is only one region per pv/laa label
for index in indexes:
# for each region, check if there are other regions on the surface
# with the same label. If so, keep largest region
# and relabel small regions to body label
target = filldisconnectedregion(target,
arrayname,
regionslabels[index],
regionslabels['body'])
# for each region, fill small patches (i.e. body label)
# with corresponding region label
target = fillpatch(target,
arrayname,
regionslabels[index],
regionslabels['body'])
# relabel isolated points in the body with vein label (e.g. close to ostia)
target = fillpatch(target,
arrayname,
regionslabels['body'],
regionslabels['body'])
return target
#-----------------------------------------------------------------------
# GENERAL FUNCTIONS
#-----------------------------------------------------------------------
def addvectors(point1, point2):
"""Add two vectors."""
return [point1[0] + point2[0],
point1[1] + point2[1],
point1[2] + point2[2]]
def cellthreshold(polydata, arrayname, start=0, end=1):
"""Extract those cells from polydata whose celldata values are within a
specified range."""
threshold = vtk.vtkThreshold()
threshold.SetInput(polydata)
threshold.SetInputArrayToProcess(0, 0, 0,
vtk.vtkDataObject.FIELD_ASSOCIATION_CELLS,
arrayname)
threshold.ThresholdBetween(start, end)
threshold.Update()
surfer = vtk.vtkDataSetSurfaceFilter()
surfer.SetInput(threshold.GetOutput())
surfer.Update()
return surfer.GetOutput()
def cleanpolydata(polydata):
"""Apply VTK mesh cleaning filter to polydata."""
cleaner = vtk.vtkCleanPolyData()
cleaner.SetInput(polydata)
cleaner.Update()
return cleaner.GetOutput()
def dicemetric(image_reference, image_target):
"""Compute overlap between two images."""
# for overlap image: add two images and extract region with value a + b
image_overlap = imagesum(image_reference, image_target)
overlap = imagethresholdupper(image_overlap, 2)
# image scalars to numpy array
# this allows us to have quick access to pixel data
reference_array = vtk_to_numpy(image_reference.GetPointData().GetScalars())
target_array = vtk_to_numpy(image_target.GetPointData().GetScalars())
overlap_array = vtk_to_numpy(overlap.GetPointData().GetScalars())
# to compute dice metric, count non-zero pixels
pix_reference = np.sum(reference_array)
pix_target = np.sum(target_array)
pix_overlap = np.sum(overlap_array)
if (pix_target + pix_reference ) > 0:
DM = 2.0 * pix_overlap / (pix_target + pix_reference)
else:
DM = 0
print 'Dice metric', DM
return DM
def mindistancetopolydata(reference, polydata):
"""Compute minimum distance between two polydata."""
refdist = 1000000
# initiate point locator
locator = vtk.vtkPointLocator()
locator.SetDataSet(reference)
locator.BuildLocator()
# go through each point of polydata
for i in range(polydata.GetNumberOfPoints()):
point = polydata.GetPoint(i)
# determine closest point on target.
closestpointid = locator.FindClosestPoint(point)
dist = euclideandistance(point,
reference.GetPoint(closestpointid))
if dist < refdist:
refdist = dist
return refdist
def euclideandistance(point1, point2):
"""Compute Euclidean distance between two points."""
return math.sqrt((point1[0] - point2[0])**2 +
(point1[1] - point2[1])**2 +
(point1[2] - point2[2])**2)
def extractboundaryedge(polydata):
"""Extract boundary edges of a surface mesh."""
edge = vtk.vtkFeatureEdges()
edge.SetInput(polydata)
edge.FeatureEdgesOff()
edge.NonManifoldEdgesOff()
edge.Update()
return edge.GetOutput()
def extractconnectedregion(polydata, regionid):
"""Run connectivity filter to assign regionsids and return region with
given regionid."""
# extract surface
surfer = vtk.vtkDataSetSurfaceFilter()
surfer.SetInput(polydata)
surfer.Update()
# clean before connectivity filter
# to avoid artificial regionIds
cleaner = vtk.vtkCleanPolyData()
cleaner.SetInput(surfer.GetOutput())
cleaner.Update()
# extract all regions
connect = vtk.vtkPolyDataConnectivityFilter()
connect.SetInput(cleaner.GetOutput())
connect.SetExtractionModeToAllRegions()
connect.ColorRegionsOn()
connect.Update()
# threshold especified region
surface = pointthreshold(connect.GetOutput(), 'RegionId',
float(regionid), float(regionid))
return surface
def extractclosestpointregion(polydata, point=[0, 0, 0]):
"""Extract region closest to specified point."""
# extract surface
surfer = vtk.vtkDataSetSurfaceFilter()
surfer.SetInput(polydata)
surfer.Update()
# clean before connectivity filter
# to avoid artificial regionIds
cleaner = vtk.vtkCleanPolyData()
cleaner.SetInput(surfer.GetOutput())
cleaner.Update()
# extract regions closest to point
connect = vtk.vtkPolyDataConnectivityFilter()
connect.SetInput(cleaner.GetOutput())
connect.SetExtractionModeToClosestPointRegion()
connect.SetClosestPoint(point)
connect.FullScalarConnectivityOn()
connect.Update()
return connect.GetOutput()
def extractlargestregion(polydata):
"""Extract largest of several disconnected regions."""
# extract surface
surfer = vtk.vtkDataSetSurfaceFilter()
surfer.SetInput(polydata)
surfer.Update()
# clean before connectivity filter
# to avoid artificial regionIds
cleaner = vtk.vtkCleanPolyData()
cleaner.SetInput(surfer.GetOutput())
cleaner.Update()
# extract largest region
connect = vtk.vtkPolyDataConnectivityFilter()
connect.SetInput(cleaner.GetOutput())
connect.SetExtractionModeToLargestRegion()
connect.Update()
# cleaning phantom points
cleaner = vtk.vtkCleanPolyData()
cleaner.SetInput(connect.GetOutput())
cleaner.Update()
return cleaner.GetOutput()
def fillholes(polydata, holesize=1000000):
"""Fill holes in surface. Use holesize to specify the maximum 'radius' of the
holes to be filled."""
filler = vtk.vtkFillHolesFilter()
filler.SetInput(polydata)
filler.SetHoleSize(holesize)
filler.Update()
return filler.GetOutput()
def fillpatch(surface, arrayname, value, patchvalue):
"""Replace value for patchvalue in specified array of surface."""
# extract main body
body = pointthreshold(surface, arrayname, patchvalue, patchvalue)
mainbody = extractlargestregion(body)
areamainbody = surfacearea(mainbody)
edgesmainbody = extractboundaryedge(mainbody)
# extract subpart
submesh = pointthreshold(surface, arrayname, value, value, 0)
# assuming the patch label is known
patches = pointthreshold(surface, arrayname, patchvalue, patchvalue, 0)
# if there is more than one edge, smaller edges should be patches
edges = extractboundaryedge(submesh)
if edges.GetNumberOfPoints() > 0:
# the edge closest to the body is the ostium
# hence it should not be a patch
smallestdistr = findadjoiningregionid(edgesmainbody,edges)
nedges = getregionsrange(edges)
# loop again to fill the patch
for r in range(int(nedges[1]) + 1):
# taking centroid of edge
smalledge = extractconnectedregion(edges, r)
centroid = pointsetcentreofmass(smalledge)
if (r != smallestdistr):
patch = extractclosestpointregion(patches, centroid)
# check patch is smaller than body, based on surface area
areapatch = surfacearea(patch)
if areapatch < 0.5 * areamainbody:
transferlabels(surface, patch, arrayname, value)
return surface
def filldisconnectedregion(targetsurface, arrayname, label, rlabel):
"""Find disconnected regions with the same label.
Replace its label to the second closest label."""
# extract label region
subpd = pointthreshold(targetsurface, arrayname, label, label, 1)
if subpd.GetNumberOfPoints() > 0:
# find largest regionid
regionid = findlargestregionid(subpd)
regionsrange = getregionsrange(subpd)
# loop through all regions to relabel small regions
if regionsrange[1] > 0.0:
for j in range(int(regionsrange[1]) + 1):
# for other regions, replace value
if j != regionid:
subsubpd = extractconnectedregion(subpd, j)
transferlabels(targetsurface, subsubpd, arrayname, rlabel)
return targetsurface
def findadjoiningregionid(reference, target):
"""Find the regionid in target closest to any reference region."""
nregions = getregionsrange(target)
smallestdist = 1000000
smallestdistr = 0
# iterate over regions to find the adjoining regions
if nregions > 0:
for r in range(int(nregions[1]) + 1):
# taking centroid of region
smallregion = extractconnectedregion(target, r)
# find region closest to reference
currentdist = mindistancetopolydata(reference, smallregion)
if currentdist < smallestdist:
smallestdist = currentdist
smallestdistr = r
return smallestdistr
def findlargestregionid(polydata):
"""Get id of largest of several disconnected regions."""
# extract surface
surfer = vtk.vtkDataSetSurfaceFilter()
surfer.SetInput(polydata)
surfer.Update()
# clean before connectivity filter
# to avoid artificial regionIds
cleaner = vtk.vtkCleanPolyData()
cleaner.SetInput(surfer.GetOutput())
cleaner.Update()
# extract all connected regions
connect = vtk.vtkPolyDataConnectivityFilter()
connect.SetInput(cleaner.GetOutput())
connect.SetExtractionModeToAllRegions()
connect.ColorRegionsOn()
connect.Update()
# extract surface
surfer = vtk.vtkDataSetSurfaceFilter()
surfer.SetInput(connect.GetOutput())
surfer.Update()
# compute range
regions = surfer.GetOutput().GetPointData().GetArray('RegionId')
regionsrange = regions.GetRange()
maxpoints = 0
largestregionid = regionsrange[0]
# if more than one region, find the largest
if (regionsrange[1] > 0.0):
for j in range(int(regionsrange[0]), int(regionsrange[1]) + 1):
outsurf = pointthreshold(surfer.GetOutput(), 'RegionId', j, j)
numberofpoints = outsurf.GetNumberOfPoints()
if (numberofpoints> maxpoints):
maxpoints = numberofpoints
largestregionid = j
return largestregionid
def getregionsrange(polydata):
"""Return range of connected regions."""
# extract surface
surfer = vtk.vtkDataSetSurfaceFilter()
surfer.SetInput(polydata)
surfer.Update()
# clean before connectivity filter
# to avoid artificial regionIds
cleaner = vtk.vtkCleanPolyData()
cleaner.SetInput(surfer.GetOutput())
cleaner.Update()
# extract all connected regions
connect = vtk.vtkPolyDataConnectivityFilter()
connect.SetInput(cleaner.GetOutput())
connect.SetExtractionModeToAllRegions()
connect.ColorRegionsOn()
connect.Update()
# extract surface
surfer = vtk.vtkDataSetSurfaceFilter()
surfer.SetInput(connect.GetOutput())
surfer.Update()
# get range
regions = surfer.GetOutput().GetPointData().GetArray('RegionId')
regionsrange = regions.GetRange()
return regionsrange
def imagefordice(surface, spacing, bounds, indexes, arrayname):
"""Compute image from surface using imagestencil. Values of labels
on surface are preserved."""
# labels
regionslabels = getregionslabels()
# cap whole surface with flat covers
surfacecap = capsurface(surface)
# if we still have edges, run fill holes
edges = extractboundaryedge(surfacecap)
if edges.GetNumberOfPoints() > 0:
surfacecap = fillholes(surfacecap)
# make image with the whole surface
wholeimage = mesh2image(surfacecap, spacing, bounds, 1)
# each label
for index in indexes:
vein = pointthreshold(surface, arrayname,
regionslabels[index], regionslabels[index], 0)
# cap vein with flat covers
veincap = capsurface(vein)
# if we still have edges, run fill holes
edges = extractboundaryedge(veincap)
if edges.GetNumberOfPoints() > 0:
veincap = fillholes(veincap)
# generate image using imagestencil
veinimage = mesh2image(veincap, spacing, bounds, 1)
# add the images
wholeimage = imagesum(wholeimage, veinimage)
# pixels with value 2 correspond to vein region
wholeimage = imagereplacevalue(wholeimage, 2, regionslabels[index])
# pixels remaining with value 1 correspond to body
wholeimage = imagereplacevalue(wholeimage, 1, regionslabels['body'])
return wholeimage
def getregionslabels():
"""Return dictionary linking regionids to anatomical locations."""
regionslabels = {'body': 36,
'laa': 37,
'pv2': 76,
'pv1': 77,
'pv3': 78,
'pv4': 79}
return regionslabels
def mesh2image(pd, spacing, bounds, value=255):
"""Generates an image in which pixels inside the surface set to value."""
# start with white image
dim, origin = bounds_dim_origin(bounds,spacing)
whiteimage = image_from_value(spacing,dim,origin,value)
# polygonal data to image stencil
pol2stenc = vtk.vtkPolyDataToImageStencil()
pol2stenc.SetInput(pd)
pol2stenc.SetOutputOrigin(origin)
pol2stenc.SetOutputSpacing(spacing)
pol2stenc.SetOutputWholeExtent(0, dim[0] ,
0, dim[1] ,
0, dim[2] )
pol2stenc.Update()
# cut the corresponding white image and set the background
imgstenc = vtk.vtkImageStencil()
imgstenc.SetInput(whiteimage)
imgstenc.SetStencil(pol2stenc.GetOutput())
imgstenc.ReverseStencilOff()
imgstenc.SetBackgroundValue(0)
imgstenc.Update()
return imgstenc.GetOutput()
def bounds_dim_origin(bounds,spacing):
"""Compute origin and extent based on bounds."""
# compute dimensions
dim = [0, 0, 0]
origin = [0, 0, 0]
for i in range(0, 3):
dim[i] = int(math.ceil((bounds[i * 2 + 1] - bounds[i * 2]) /
spacing[i]))
# origin is the lower bound (double)
for i in range(0, 3):
origin[i] = bounds[i * 2]
return dim,origin
def image_from_value(spacing, dim, origin, value=255):
"""Generate a white image with a defined spacing and bounds."""
# initialise image
image = vtk.vtkImageData()
image.SetSpacing(spacing)
image.SetDimensions(dim)
image.SetExtent(0, dim[0],
0, dim[1],
0, dim[2])
image.SetOrigin(origin)
image.SetScalarTypeToUnsignedChar()
image.SetNumberOfScalarComponents(1)
image.AllocateScalars()
imagescalars = image.GetPointData().GetScalars()
# Fill component is much faster than visiting each element to set value
imagescalars.FillComponent(0, value)
image.Update()
return image
def capsurface(polydata, arrayname=''):
"""Cap holes in surface with a flat cover."""
# generates a flat cover for a convex hole defined by edges
fedges = extractboundaryedge(polydata)
# find each connected edge
connect = vtk.vtkPolyDataConnectivityFilter()
connect.SetInput(fedges)
connect.Update()
ncontours = connect.GetNumberOfExtractedRegions()
append = vtk.vtkAppendPolyData()
append.AddInput(polydata)
# generate each flat cover
for i in range(ncontours):
connect.AddSpecifiedRegion(i)
connect.SetExtractionModeToSpecifiedRegions()
connect.Update()
edges = connect.GetOutput()
cover = vtk.vtkPolyData()
generatecover(edges, cover, arrayname)
# append to original polydata
append.AddInput(cover)
connect.DeleteSpecifiedRegion(i)
append.Update()
outsurface = cleanpolydata(append.GetOutput())
return outsurface
def generatecover(edges, cover, arrayname=''):
"""Create caps for capping a surface with holes."""
# create the building blocks of polydata.
polys = vtk.vtkCellArray()
points = vtk.vtkPoints()
surfilt = vtk.vtkCleanPolyData()
surfilt.SetInput( edges )
surfilt.Update()
points.DeepCopy(surfilt.GetOutput().GetPoints())
npoints = points.GetNumberOfPoints()
if arrayname:
# keep pre existing array
array = surfilt.GetOutput().GetPointData().GetArray(arrayname)
arraynp = vtk_to_numpy(array)
array.InsertNextValue(np.mean(arraynp))
# add centroid
centr = np.zeros(3)
for i in range( npoints ):
pt = np.zeros(3)
points.GetPoint(i,pt)
centr = centr + pt
centr = centr / npoints
cntpt = points.InsertNextPoint(centr)
# add cells
for i in range(surfilt.GetOutput().GetNumberOfCells()):
cell = surfilt.GetOutput().GetCell(i)
polys.InsertNextCell(3)
polys.InsertCellPoint(cell.GetPointId(0))
polys.InsertCellPoint(cell.GetPointId(1))
polys.InsertCellPoint(cntpt)
# assign the pieces to the polydata
cover.SetPoints(points)
cover.SetPolys(polys)
if arrayname:
cover.GetPointData().AddArray(array)
def imageopenclose(image, openvalue, closevalue, kernelsize):
"""Performs opening and closing morphological operations
with a 3D ellipsoidal kernel."""
openClose = vtk.vtkImageOpenClose3D()
openClose.SetInput(image)
openClose.SetOpenValue(openvalue)
openClose.SetCloseValue(closevalue)
openClose.SetKernelSize(kernelsize, kernelsize, kernelsize)
openClose.ReleaseDataFlagOff()
openClose.GetOutput()
openClose.GetCloseValue()
openClose.GetOpenValue()
return openClose.GetOutput()
def imagereplacevalue(image, const1, const2):
"""Replaces the scalar value in a image with another."""
kfilter = vtk.vtkImageMathematics()
kfilter.SetInput1(image)
kfilter.SetConstantC(const1)
kfilter.SetConstantK(const2)
kfilter.SetOperationToReplaceCByK()
kfilter.Update()
return kfilter.GetOutput()
def imagesum(image1, image2):
"""Adds two images."""
sumfilter = vtk.vtkImageMathematics()
sumfilter.SetInput1(image1)
sumfilter.SetInput2(image2)
sumfilter.SetOperationToAdd()
sumfilter.Update()
return sumfilter.GetOutput()
def imagethresholdbetween(image, t1, t2, invalue=1.0):
"""Thresholds an image between t1 and t2."""
tfilter = vtk.vtkImageThreshold()
tfilter.SetInput(image)
tfilter.ThresholdBetween(t1, t2)
tfilter.SetOutValue(0.0)
tfilter.SetInValue(invalue)
tfilter.Update()
return tfilter.GetOutput()
def imagethresholdupper(image, t, invalue=1.0):
"""Thresholds values equal or greater than t."""
tfilter = vtk.vtkImageThreshold()
tfilter.SetInput(image)
tfilter.ThresholdByUpper(t)
tfilter.SetOutValue(0.0)
tfilter.SetInValue(invalue)
tfilter.Update()
return tfilter.GetOutput()
def marchingcubes(image, startlabel, endlabel):
"""Generates object boundaries from labelled volumes using
Marching Cubes algorithm."""
discretecubes = vtk.vtkDiscreteMarchingCubes()
discretecubes.SetInput(image)
discretecubes.GenerateValues(endlabel - startlabel + 1,
startlabel, endlabel)
discretecubes.Update()
return discretecubes.GetOutput()
def planeclip(surface, point, normal, insideout=1):
"""Clip a surface using the plane perpendicular to normal
and centred at point."""
clipplane = vtk.vtkPlane()
clipplane.SetOrigin(point)
clipplane.SetNormal(normal)
clipper = vtk.vtkClipPolyData()
clipper.SetInput(surface)
clipper.SetClipFunction(clipplane)
if insideout:
clipper.InsideOutOn()
else:
clipper.InsideOutOff()
clipper.Update()
return clipper.GetOutput()
def pointsetcentreofmass(polydata):
"""Compute the centre of mass of a polydata."""
centre = [0, 0, 0]
for i in range(polydata.GetNumberOfPoints()):
point = [polydata.GetPoints().GetPoint(i)[0],
polydata.GetPoints().GetPoint(i)[1],
polydata.GetPoints().GetPoint(i)[2]]
centre = addvectors(centre, point)
return dividevector(centre, polydata.GetNumberOfPoints())
def dividevector(point, n):
"""Divide vector by scalar value."""
nr = float(n)
return [point[0]/nr, point[1]/nr, point[2]/nr]
def pointthreshold(polydata, arrayname, start=0, end=1, alloff=0):
"""Threshold between start and end values in array. By default,
threshold excludes points whose neighbours do not satisfy
the threshold value. Enabling the flag 'alloff' disables
this setting to include all points."""
threshold = vtk.vtkThreshold()
threshold.SetInput(polydata)
threshold.SetInputArrayToProcess(0, 0, 0,
vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS,
arrayname)
threshold.ThresholdBetween(start, end)
if (alloff):
threshold.AllScalarsOff()
threshold.Update()
surfer = vtk.vtkDataSetSurfaceFilter()
surfer.SetInput(threshold.GetOutput())
surfer.Update()
return surfer.GetOutput()
def readmetaimage(filename):
"""Read a metaimage."""
reader = vtk.vtkMetaImageReader()
reader.SetFileName(filename)
reader.Update()
return reader.GetOutput()
def readvtp(filename, dataarrays=True):
"""Read polydata in XML format."""
reader = vtk.vtkXMLPolyDataReader()
reader.SetFileName(filename)
reader.Update()
if not dataarrays:
for i in range(reader.GetNumberOfPointArrays()):
arrayname = reader.GetPointArrayName(i)
reader.SetPointArrayStatus(arrayname, 0)
for i in range(reader.GetNumberOfCellArrays()):
arrayname = reader.GetCellArrayName(i)
reader.SetPointArrayStatus(arrayname, 0)
reader.Update()
return reader.GetOutput()
def resamplearray(x, n):
"""Resample an array to n samples using a bootstrapping technique."""
y = sklearn.utils.resample(x, n_samples=n)
return y
def roundpointarray(polydata, name):
"""Round values in point array."""
# get original array
array = polydata.GetPointData().GetArray(name)
# round labels
for i in range(polydata.GetNumberOfPoints()):
value = array.GetValue(i)
array.SetValue(i, round(value))
return polydata
def smoothtaubin(polydata, iterations=15, angle=120, passband=0.001):
"""Execute volume reserving smoothing."""
smoother = vtk.vtkWindowedSincPolyDataFilter()
smoother.SetInput(polydata)
smoother.SetNumberOfIterations(iterations)
smoother.BoundarySmoothingOff()
smoother.FeatureEdgeSmoothingOff()
smoother.SetFeatureAngle(angle)
smoother.SetPassBand(passband)
smoother.NonManifoldSmoothingOn()
smoother.NormalizeCoordinatesOn()
smoother.Update()
return smoother.GetOutput()
def surfacearea(polydata):
"""Compute surface area of polydata."""
properties = vtk.vtkMassProperties()
properties.SetInput(polydata)
properties.Update()
return properties.GetSurfaceArea()
def surface2surfacedistance(ref, target, arrayname):
"""Compute distance between two surfaces. Output is added as point array."""
# adapted from vtkvmtkSurfaceDistance
# initialise
locator = vtk.vtkCellLocator()
genericcell = vtk.vtkGenericCell()
cellid = vtk.mutable(0)
point = [0., 0., 0.]
closestpoint = [0., 0., 0.]
subid = vtk.mutable(0)
distance2 = vtk.mutable(0)
# create array
distarray = vtk.vtkDoubleArray()
distarray.SetName(arrayname)
distarray.SetNumberOfTuples(target.GetNumberOfPoints())
target.GetPointData().AddArray(distarray)
# build locator
locator.SetDataSet(ref)
locator.BuildLocator()
# compute distance
for i in range(target.GetNumberOfPoints()):
point = target.GetPoint(i)
locator.FindClosestPoint(point, closestpoint, genericcell, cellid,
subid, distance2)
distance = math.sqrt(distance2)
# add value to array
distarray.SetValue(i, distance)
target.Update()
return target
def transferlabels(target, reference, arrayname, value):
"""Project array from reference surface to target surface using closest point."""
# initiate point locator
locator = vtk.vtkPointLocator()
locator.SetDataSet(target)
locator.BuildLocator()
# get array from target
array = target.GetPointData().GetArray(arrayname)
# go through each point of reference target
for i in range(reference.GetNumberOfPoints()):
point = reference.GetPoint(i)
# determine closest point on target.
closestpointid = locator.FindClosestPoint(point)
array.SetValue(closestpointid, value)
return target
def visualise(surface, reference, case, arrayname, mini, maxi):
"""Visualise surface with colormap based on arrayname.
Reference surface is visualise with alpha = 0.5."""
#Create a lookup table to map point data to colors
lut = vtk.vtkLookupTable()
lut.SetNumberOfTableValues(255)
lut.SetValueRange(0, 255)
# qualitative data from colorbrewer
lut.SetTableValue(0, 0, 0, 0, 1) #black
lut.SetTableValue(mini, 1, 1, 1, 1) # white
lut.SetTableValue(mini + 1, 77/255., 175/255., 74/255. , 1) # green
lut.SetTableValue(maxi - 3, 152/255., 78/255., 163/255., 1) # purple
lut.SetTableValue(maxi - 2, 255/255., 127/255., 0., 1) # orange
lut.SetTableValue(maxi - 1, 55/255., 126/255., 184/255., 1) # blue
lut.SetTableValue(maxi, 166/255., 86/255., 40/255., 1) # brown
lut.Build()
# create a text actor
txt = vtk.vtkTextActor()
txt.SetInput(case)
txtprop=txt.GetTextProperty()
txtprop.SetFontFamilyToArial()
txtprop.SetFontSize(18)
txtprop.SetColor(0, 0, 0)
txt.SetDisplayPosition(20, 30)
# create a rendering window, renderer, and renderwindowinteractor
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
style = vtk.vtkInteractorStyleTrackballCamera()
iren.SetInteractorStyle(style)
iren.SetRenderWindow(renWin)
# surface mapper and actor
surfacemapper = vtk.vtkPolyDataMapper()
surfacemapper.SetInput(surface)
surfacemapper.SetScalarModeToUsePointFieldData()
surfacemapper.SelectColorArray(arrayname)
surfacemapper.SetLookupTable(lut)
surfacemapper.SetScalarRange(0, 255)
surfaceactor = vtk.vtkActor()
surfaceactor.SetMapper(surfacemapper)
# refsurface mapper and actor
refmapper = vtk.vtkPolyDataMapper()
refmapper.SetInput(reference)
refmapper.SetScalarModeToUsePointFieldData()
refmapper.SelectColorArray(arrayname)
refmapper.SetLookupTable(lut)
refmapper.SetScalarRange(0, 255)
refactor = vtk.vtkActor()
refactor.GetProperty().SetOpacity(0.5)
refactor.SetMapper(refmapper)
# assign actors to the renderer
ren.AddActor(refactor)
ren.AddActor(surfaceactor)
ren.AddActor(txt)
# set the background and size, zoom in and render
ren.SetBackground(1, 1, 1)
renWin.SetSize(1280, 960)
ren.ResetCamera()
ren.GetActiveCamera().Zoom(1)
# enable user interface interactor
iren.Initialize()
renWin.Render()
iren.Start()
def writearray2csv(array, ofile, label=''):
"""Write array to csv."""
f = open(ofile, 'wb')
for i in range(len(array)):
if label:
line = str(label[i]) + ', ' + str(array[i]) + '\n'
else:
line = str(array[i]) + '\n'
f.write(line)
f.close()
def writevtk(surface, filename):
"""Write vtkPolyData file."""
writer = vtk.vtkPolyDataWriter()
writer.SetInput(surface)
writer.SetFileTypeToASCII()
writer.SetFileName(filename)
writer.Write()
def writevtp(surface, filename):
"""Write vtkPolyData file in XML format."""
writer = vtk.vtkXMLPolyDataWriter()
writer.SetInput(surface)
writer.SetFileName(filename)
writer.Write()
def writemetaimage(image, filename):
"""Write image file in mhd format."""
writer = vtk.vtkMetaImageWriter()
writer.SetFileName(filename)
writer.SetInput(image)
writer.Write()
|
catactg/lasc
|
code/lasc_benchmark_tools.py
|
Python
|
bsd-2-clause
| 40,887
|
[
"VTK"
] |
edb021a96aea31aaf0347cd43c431b933d98fd1243010d63ec44aeb39f426572
|
#!/usr/bin/env python
import json
import logging
import os
import string
import subprocess
import sys
import time
import uuid
from random import choice
from twisted.internet import reactor, defer
from twisted.internet.task import deferLater
from twisted.internet.defer import CancelledError
from twisted.python import log
from twisted.web import server, resource, http
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from twisted.web.static import File
from vtk.web import upload
try:
import argparse
except ImportError:
import _argparse as argparse
sample_config_file = """
Here is a sample of what a configuration file could look like:
{
## ===============================
## General launcher configuration
## ===============================
"configuration": {
"host" : "localhost",
"port" : 8080,
"endpoint": "paraview", # SessionManager Endpoint
"content": "/.../www", # Optional: Directory shared over HTTP
"proxy_file" : "/.../proxy-mapping.txt", # Proxy-Mapping file for Apache
"sessionURL" : "ws://${host}:${port}/ws", # ws url used by the client to connect to the started process
"timeout" : 25, # Wait time in second after process start
"log_dir" : "/.../viz-logs", # Directory for log files
"upload_dir" : "/.../data", # If launcher should act as upload server, where to put files
"fields" : ["file", "host", "port", "updir"] # List of fields that should be send back to client
},
## ===============================
## Useful session vars for client
## ===============================
"sessionData" : { "updir": "/Home" }, # Tells client which path to updateFileBrowser after uploads
## ===============================
## Resources list for applications
## ===============================
"resources" : [ { "host" : "localhost", "port_range" : [9001, 9003] } ],
## ===============================
## Set of properties for cmd line
## ===============================
"properties" : {
"vtkpython" : "/.../VTK/build/bin/vtkpython",
"pvpython" : "/.../ParaView/build/bin/pvpython",
"vtk_python_path": "/.../VTK/build/Wrapping/Python/vtk/web",
"pv_python_path": "/.../ParaView/build/lib/site-packages/paraview/web",
"plugins_path": "/.../ParaView/build/lib",
"dataDir": "/.../path/to/data/directory"
},
## ===============================
## Application list with cmd lines
## ===============================
"apps" : {
"cone" : {
"cmd" : [
"${vtkpython}", "${vtk_python_path}/vtk_web_cone.py", "--port", "$port" ],
"ready_line" : "Starting factory"
},
"graph" : {
"cmd" : [
"${vtkpython}", "${vtk_python_path}/vtk_web_graph.py", "--port", "$port",
"--vertices", "${numberOfVertices}", "--edges", "${numberOfEdges}" ],
"ready_line" : "Starting factory"
},
"phylotree" : {
"cmd" : [
"${vtkpython}", "${vtk_python_path}/vtk_web_phylogenetic_tree.py", "--port", "$port",
"--tree", "${dataDir}/visomics/${treeFile}", "--table", "${dataDir}/visomics/${tableFile}" ],
"ready_line" : "Starting factory"
},
"filebrowser" : {
"cmd" : [
"${vtkpython}", "${vtk_python_path}/vtk_web_filebrowser.py",
"--port", "${port}", "--data-dir", "${dataDir}" ],
"ready_line" : "Starting factory"
},
"data_prober": {
"cmd": [
"${pvpython}", "-dr", "${pv_python_path}/pv_web_data_prober.py",
"--port", "${port}", "--data-dir", "${dataDir}", "-f" ],
"ready_line" : "Starting factory"
},
"visualizer": {
"cmd": [
"${pvpython}", "-dr", "${pv_python_path}/pv_web_visualizer.py",
"--plugins", "${plugins_path}/libPointSprite_Plugin.so", "--port", "${port}",
"--data-dir", "${dataDir}", "--load-file", "${dataDir}/${fileToLoad}",
"--authKey", "${secret}", "-f" ],
"ready_line" : "Starting factory"
},
"loader": {
"cmd": [
"${pvpython}", "-dr", "${pv_python_path}/pv_web_file_loader.py",
"--port", "${port}", "--data-dir", "${dataDir}",
"--load-file", "${dataDir}/${fileToLoad}", "-f" ],
"ready_line" : "Starting factory"
},
"launcher" : {
"cmd": [
"/.../ParaView/Web/Applications/Parallel/server/launcher.sh",
"${port}", "${client}", "${resources}", "${file}" ],
"ready_line" : "Starting factory"
},
"your_app": {
"cmd": [
"your_shell_script.sh", "--resource-host", "${host}", "--resource-port", "${port}",
"--session-id", "${id}", "--generated-password", "${secret}",
"--application-key", "${application}" ],
"ready_line": "Output line from your shell script indicating process is ready"
}
}
"""
# =============================================================================
# Helper module methods
# =============================================================================
def generatePassword():
return ''.join(choice(string.letters + string.digits) for _ in xrange(16))
# -----------------------------------------------------------------------------
def validateKeySet(obj, expected_keys, object_name):
all_key_found = True
for key in expected_keys:
if not obj.has_key(key):
print "ERROR: %s is missing %s key." % (object_name, key)
all_key_found = False
return all_key_found
# -----------------------------------------------------------------------------
def replaceVariables(template_str, variable_list):
for key_pair in variable_list:
item_template = string.Template(template_str)
template_str = item_template.safe_substitute(key_pair)
if "$" in template_str:
logging.error("Some properties could not be resolved: " + template_str)
return template_str
# -----------------------------------------------------------------------------
def replaceList(template_list, variable_list):
result_list = []
for str in template_list:
result_list.append(replaceVariables(str, variable_list))
return result_list
# -----------------------------------------------------------------------------
def filterResponse(obj, public_keys):
public_keys.extend(['id', 'sessionURL', 'sessionManagerURL'])
filtered_output = {}
for field in obj:
if field in public_keys:
filtered_output[field] = obj[field]
return filtered_output
# -----------------------------------------------------------------------------
def extractSessionId(request):
path = request.path.split('/')
if len(path) < 3:
return None
return str(path[2])
# =============================================================================
# Session manager
# =============================================================================
class SessionManager(object):
def __init__(self, config, mapping):
self.sessions = {}
self.config = config
self.resources = ResourceManager(config["resources"])
self.mapping = mapping
def createSession(self, options):
# Assign id and store options
id = str(uuid.uuid1())
# Assign resource to session
host, port = self.resources.getNextResource()
# Do we have resources
if host:
options['id'] = id
options['host'] = host
options['port'] = port
if not options.has_key('secret'):
options['secret'] = generatePassword()
options['sessionURL'] = replaceVariables(self.config['configuration']['sessionURL'], [options, self.config['properties']])
options['cmd'] = replaceList(self.config['apps'][options['application']]['cmd'], [options, self.config['properties']])
if self.config.has_key('sessionData') :
for key in self.config['sessionData'] :
options[key] = replaceVariables(self.config['sessionData'][key], [options, self.config['properties']])
self.sessions[id] = options
self.mapping.update(self.sessions)
return options
return None
def deleteSession(self, id):
host = self.sessions[id]['host']
port = self.sessions[id]['port']
self.resources.freeResource(host, port)
del self.sessions[id]
self.mapping.update(self.sessions)
def getSession(self, id):
if self.sessions.has_key(id):
return self.sessions[id]
return None
# =============================================================================
# Proxy manager
# =============================================================================
class ProxyMappingManager(object):
def update(sessions):
pass
class ProxyMappingManagerTXT(ProxyMappingManager):
def __init__(self, file_path, pattern="%s %s:%d\n"):
self.file_path = file_path
self.pattern = pattern
def update(self, sessions):
with open(self.file_path, "w") as map_file:
for id in sessions:
map_file.write(self.pattern % (id, sessions[id]['host'], sessions[id]['port']))
# =============================================================================
# Resource manager
# =============================================================================
class ResourceManager(object):
"""
Class that provides methods to keep track on available resources (host/port)
"""
def __init__(self, resourceList):
self.resources = {}
for resource in resourceList:
host = resource['host']
portList = range(resource['port_range'][0],resource['port_range'][1]+1)
if self.resources.has_key(host):
self.resources[host]['available'].extend(portList)
else:
self.resources[host] = { 'available': portList, 'used': []}
def getNextResource(self):
"""
Return a (host, port) pair if any available otherwise will return None
"""
# find host with max availibility
winner = None
availibilityCount = 0
for host in self.resources:
if availibilityCount < len(self.resources[host]['available']):
availibilityCount = len(self.resources[host]['available'])
winner = host
if winner:
port = self.resources[winner]['available'].pop()
self.resources[winner]['used'].append(port)
return (winner, port)
return (None, None)
def freeResource(self, host, port):
"""
Free a previously reserved resource
"""
if self.resources.has_key(host) and port in self.resources[host]['used']:
self.resources[host]['used'].remove(port)
self.resources[host]['available'].append(port)
# =============================================================================
# Process manager
# =============================================================================
class ProcessManager(object):
def __init__(self, configuration):
self.config = configuration
self.log_dir = configuration['configuration']['log_dir']
self.processes = {}
def __del__(self):
for id in self.processes:
self.processes[id].terminate()
def _getLogFilePath(self, id):
return "%s%s%s.txt" % (self.log_dir, os.sep, id)
def startProcess(self, session):
proc = None
# Create output log file
logFilePath = self._getLogFilePath(session['id'])
with open(logFilePath, "a+", 0) as log_file:
try:
proc = subprocess.Popen(session['cmd'], stdout=log_file, stderr=log_file)
self.processes[session['id']] = proc
except:
logging.error("The command line failed")
logging.error(' '.join(map(str, session['cmd'])))
return None
return proc
def stopProcess(self, id):
proc = self.processes[id]
del self.processes[id]
try:
proc.terminate()
except:
pass # we tried
def listEndedProcess(self):
session_to_release = []
for id in self.processes:
if self.processes[id].poll() is not None:
session_to_release.append(id)
return session_to_release
def isRunning(self, id):
return self.processes[id].poll() is None
# ========================================================================
# Look for ready line in process output. Return True if found, False
# otherwise. If no ready_line is configured and process is running return
# False. This will then rely on the timout time.
# ========================================================================
def isReady(self, session, count = 0):
id = session['id']
# The process has to be running to be ready!
if not self.isRunning(id) and count < 60:
return False
# Give up after 60 seconds if still not running
if not self.isRunning(id):
return True
application = self.config['apps'][session['application']]
ready_line = application.get('ready_line', None)
# If no ready_line is configured and the process is running then thats
# enough.
if not ready_line:
return False
ready = False
# Check the output for ready_line
logFilePath = self._getLogFilePath(session['id'])
with open(logFilePath, "r", 0) as log_file:
for line in log_file.readlines():
if ready_line in line:
ready = True
break
return ready
# ===========================================================================
# Class to implement requests to POST, GET and DELETE methods
# ===========================================================================
class LauncherResource(resource.Resource, object):
def __init__(self, options, config):
super(LauncherResource, self).__init__()
self._options = options
self._config = config
self.time_to_wait = int(config['configuration']['timeout'])
self.field_filter = config['configuration']['fields']
self.session_manager = SessionManager(config,ProxyMappingManagerTXT(config['configuration']['proxy_file']))
self.process_manager = ProcessManager(config)
def getChild(self, path, request):
return self
def __del__(self):
logging.warning("Server factory shutting down. Stopping all processes")
# ========================================================================
# Handle POST request
# ========================================================================
def render_POST(self, request):
payload = json.loads(request.content.getvalue())
# Make sure the request has all the expected keys
if not validateKeySet(payload, ["application"], "Launch request"):
request.setResponseCode(http.BAD_REQUEST)
return json.dumps({"error": "The request is not complete"})
# Try to free any available resource
id_to_free = self.process_manager.listEndedProcess()
for id in id_to_free:
self.session_manager.deleteSession(id)
self.process_manager.stopProcess(id)
# Create new session
session = self.session_manager.createSession(payload)
# No resource available
if not session:
request.setResponseCode(http.SERVICE_UNAVAILABLE)
return json.dumps({"error": "All the resources are currently taken"})
# Start process
proc = self.process_manager.startProcess(session)
if not proc:
request.setResponseCode(http.SERVICE_UNAVAILABLE)
return json.dumps({"error": "The process did not properly start. %s" % str(session['cmd'])})
# local function to act as errback for Deferred objects.
def errback(error):
# Filter out CancelledError and propagate rest
if error.type != CancelledError:
return error
# Deferred object set to timeout request if process doesn't start in time
timeout_deferred = deferLater(reactor, self.time_to_wait, lambda: request)
timeout_deferred.addCallback(self._delayedRenderTimeout, session)
timeout_deferred.addErrback(errback)
# Make sure other deferred is canceled once one has been fired
request.notifyFinish().addCallback(lambda x: timeout_deferred.cancel())
# If a ready_line is configured create a Deferred object to wait for
# ready line to be produced
if 'ready_line' in self._config['apps'][session['application']]:
ready_deferred = self._waitForReady(session, request)
ready_deferred.addCallback(self._delayedRenderReady, session)
ready_deferred.addErrback(errback)
# Make sure other deferred is canceled once one has been fired
request.notifyFinish().addCallback(lambda x: ready_deferred.cancel())
return NOT_DONE_YET
# ========================================================================
# Wait for session to be ready. Rather than blocking keep using callLater(...)
# to schedule self in reactor. Return a Deferred object whose callback will
# be triggered when the session is ready
# ========================================================================
def _waitForReady(self, session, request, count=0, d=None):
if not d:
d = defer.Deferred()
if not 'startTimedOut' in session and \
not self.process_manager.isReady(session, count + 1):
reactor.callLater(1, self._waitForReady, session, request, count + 1, d)
else:
d.callback(request)
return d
# ========================================================================
# Called when the timeout out expires. Check if process is now ready
# and send response to client.
# ========================================================================
def _delayedRenderTimeout(self, request, session):
ready = self.process_manager.isReady(session, 0)
if ready:
request.write(json.dumps(filterResponse(session, self.field_filter)))
request.setResponseCode(http.OK)
else:
request.write(json.dumps({"error": "Session did not start before timeout expired. Check session logs."}))
# Mark the session as timed out and clean up the process
session['startTimedOut'] = True
self.session_manager.deleteSession(session['id'])
self.process_manager.stopProcess(session['id'])
request.setResponseCode(http.SERVICE_UNAVAILABLE)
request.finish()
# ========================================================================
# Called when the process is ready ( the ready line has been read from the
# process output).
# ========================================================================
def _delayedRenderReady(self, request, session):
filterkeys = self.field_filter
if session['secret'] in session['cmd']:
filterkeys = self.field_filter + [ 'secret' ]
request.write(json.dumps(filterResponse(session, filterkeys)))
request.setResponseCode(http.OK)
request.finish()
# =========================================================================
# Handle GET request
# =========================================================================
def render_GET(self, request):
id = extractSessionId(request)
if not id:
message = "id not provided in GET request"
logging.error(message)
request.setResponseCode(http.BAD_REQUEST)
return json.dumps({"error":message})
logging.info("GET request received for id: %s" % id)
session = self.session_manager.getSession(id)
if not session:
message = "No session with id: %s" % id
logging.error(message)
request.setResponseCode(http.NOT_FOUND)
return json.dumps({"error":message})
# Return session meta-data
request.setResponseCode(http.OK)
return json.dumps(filterResponse(session, self.field_filter))
# =========================================================================
# Handle DELETE request
# =========================================================================
def render_DELETE(self, request):
id = extractSessionId(request)
if not id:
message = "id not provided in DELETE request"
logging.error(message)
request.setResponseCode(http.BAD_REQUEST)
return json.dumps({"error":message})
logging.info("DELETE request received for id: %s" % id)
session = self.session_manager.getSession(id)
if not session:
message = "No session with id: %s" % id
logging.error(message)
request.setResponseCode(http.NOT_FOUND)
return json.dumps({"error":message})
# Remove session
self.session_manager.deleteSession(id)
self.process_manager.stopProcess(id)
message = "Deleted session with id: %s" % id
logging.info(message)
request.setResponseCode(http.OK)
return session
# =============================================================================
# Start the web server
# =============================================================================
def startWebServer(options, config):
# Extract properties from config
log_dir = str(config["configuration"]["log_dir"])
content = str(config["configuration"]["content"])
endpoint = str(config["configuration"]["endpoint"])
host = str(config["configuration"]["host"])
port = int(config["configuration"]["port"])
# Setup logging
logFileName = log_dir + os.sep + "launcherLog.log"
formatting = '%(asctime)s:%(levelname)s:%(name)s:%(message)s'
logging.basicConfig(level=logging.DEBUG, filename=logFileName, filemode='w', format=formatting)
observer = log.PythonLoggingObserver()
observer.start()
if options.debug:
console = logging.StreamHandler(sys.stdout)
console.setLevel(logging.INFO)
formatter = logging.Formatter(formatting)
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
# Initialize web resource
web_resource = File(content) if (len(content) > 0) else resource.Resource()
# Attach launcher
web_resource.putChild(endpoint, LauncherResource(options, config))
# Check if launcher should act as a file upload server as well
if config["configuration"].has_key("upload_dir"):
from upload import UploadPage
updir = replaceVariables(config['configuration']['upload_dir'], [config['properties']])
uploadResource = UploadPage(updir)
web_resource.putChild("upload", uploadResource)
site = server.Site(web_resource)
reactor.listenTCP(port, site, interface=host)
reactor.run()
# =============================================================================
# Parse config file
# =============================================================================
def parseConfig(options):
# Read values from the configuration file
try:
config = json.loads(open(options.config[0]).read())
except:
message = "ERROR: Unable to read config file.\n"
message += str(sys.exc_info()[1]) + "\n" + str(sys.exc_info()[2])
print message
print sample_config_file
sys.exit(2)
expected_keys = ["configuration", "apps", "properties", "resources"]
if not validateKeySet(config, expected_keys, "Config file"):
print sample_config_file
sys.exit(2)
expected_keys = ["endpoint", "host", "port", "proxy_file", "sessionURL", "timeout", "log_dir", "fields"]
if not validateKeySet(config["configuration"], expected_keys, "file.configuration"):
print sample_config_file
sys.exit(2)
if not config["configuration"].has_key("content"):
config["configuration"]["content"] = ""
return config
# =============================================================================
# Setup default arguments to be parsed
# -d, --debug
# -t, --proxyFileType Type of proxy file (txt, dbm)
# =============================================================================
def add_arguments(parser):
parser.add_argument("config", type=str, nargs=1,
help="configuration file for the launcher")
parser.add_argument("-d", "--debug",
help="log debugging messages to stdout",
action="store_true")
return parser
# =============================================================================
# Parse arguments
# =============================================================================
def start(argv=None,
description="VTKWeb Launcher"):
parser = argparse.ArgumentParser(description=description)
add_arguments(parser)
args = parser.parse_args(argv)
config = parseConfig(args)
startWebServer(args, config)
# =============================================================================
# Main
# =============================================================================
if __name__ == "__main__":
start()
|
ashray/VTK-EVM
|
Web/Python/vtk/web/launcher.py
|
Python
|
bsd-3-clause
| 26,500
|
[
"ParaView",
"VTK"
] |
bffc798cbc661f94eaa2d5f03893f3ed6361a056b0984b70c0ff0e7500248b5f
|
#!/usr/bin/env python
#pylint: disable=missing-docstring
####################################################################################################
# DO NOT MODIFY THIS HEADER #
# MOOSE - Multiphysics Object Oriented Simulation Environment #
# #
# (c) 2010 Battelle Energy Alliance, LLC #
# ALL RIGHTS RESERVED #
# #
# Prepared by Battelle Energy Alliance, LLC #
# Under Contract No. DE-AC07-05ID14517 #
# With the U. S. Department of Energy #
# #
# See COPYRIGHT for full restrictions #
####################################################################################################
import os
import unittest
import subprocess
import shutil
import tempfile
import mooseutils
import MooseDocs
class TestSQA(unittest.TestCase):
"""
Test the build works across directories.
"""
SITE_DIR = tempfile.mkdtemp(dir=os.path.join(os.getenv('HOME'), '.local', 'share', 'moose'))
WORKING_DIR = os.getcwd()
def setUp(self):
"""
Runs prior to each test.
"""
os.chdir(os.path.join(MooseDocs.ROOT_DIR, 'test', 'docs'))
if not os.path.exists(self.SITE_DIR):
os.makedirs(self.SITE_DIR)
def tearDown(self):
"""
Runs after each test.
"""
os.chdir(self.WORKING_DIR)
shutil.rmtree(self.SITE_DIR)
def testBuild(self):
"""
Test that sqa demo is working.
"""
exe = mooseutils.find_moose_executable(os.path.join(MooseDocs.MOOSE_DIR, 'test'))
self.assertTrue(os.path.isfile(exe), "The moose_test executable does not exist.")
c = ['./moosedocs.py', 'build', '--config-file', 'sqa.yml', '--clean', '--site-dir',
self.SITE_DIR]
proc = subprocess.Popen(c, cwd=os.path.join(MooseDocs.MOOSE_DIR, 'test', 'docs'),
stdout=subprocess.PIPE)
out = proc.stdout.read()
self.assertIn(out, 'WARNINGS: 0 ERRORS: 4\n')
with open(os.path.join(self.SITE_DIR, 'sqa', 'test_srs', 'index.html'), 'r') as fid:
html = fid.read()
self.assertIn('Testing testing testing', html)
self.assertIn('Missing Template Item: project_description', html)
self.assertIn('Missing Template Item: system_scope', html)
self.assertIn('<span class="moose-collection-name">F1.50</span>', html)
self.assertIn('<div class="collapsible-header moose-group-header">Transient Analysis</div>',
html)
self.assertIn('<li id="requirement-F1.10">', html)
with open(os.path.join(self.SITE_DIR, 'sqa', 'test_rtm', 'index.html'), 'r') as fid:
html = fid.read()
self.assertIn('<div class="collapsible-header moose-group-header">Transient Analysis</div>',
html)
self.assertIn('<a href="../test_srs/index.html">F1.10</a>', html)
self.assertIn('<span class="moose-sqa-error">F9.99</span>', html)
with open(os.path.join(self.SITE_DIR, 'sqa', 'test_v_and_v', 'index.html'), 'r') as fid:
html = fid.read()
self.assertIn('<a href="validation/V1-01/index.html">V1.01</a>', html)
link = os.path.join(MooseDocs.ROOT_DIR,
'test/docs/content/sqa/test_v_and_v/validation/V1-02.md')
self.assertIn('<a class="moose-bad-link" href="{}">V1.02</a>'.format(link), html)
self.assertIn('<span class="new badge danger" data-badge-caption="danger">1', html)
self.assertIn('<span class="new badge error" data-badge-caption="error">1', html)
with open(os.path.join(self.SITE_DIR, 'sqa', 'index.html'), 'r') as fid:
html = fid.read()
self.assertIn('<span class="moose-page-status" data-filename="test_srs/index.html">', html)
self.assertIn('<span class="new badge pass" data-badge-caption="pass">', html)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Chuban/moose
|
python/MooseDocs/tests/sqa/test_sqa.py
|
Python
|
lgpl-2.1
| 4,670
|
[
"MOOSE"
] |
8601f0bf50599a5d859a7ab893d12a3bee96fccbd693d9a8c357848a1c3f7cd1
|
########################################################################
# File: RegisterOperation.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2013/03/19 13:55:14
########################################################################
""" :mod: RegisterFile
==================
.. module: RegisterFile
:synopsis: register operation handler
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
RegisterFile operation handler
"""
__RCSID__ = "$Id $"
# #
# @file RegisterOperation.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2013/03/19 13:55:24
# @brief Definition of RegisterOperation class.
# # imports
from DIRAC import S_OK, S_ERROR
from DIRAC.FrameworkSystem.Client.MonitoringClient import gMonitor
from DIRAC.RequestManagementSystem.private.OperationHandlerBase import OperationHandlerBase
from DIRAC.DataManagementSystem.Client.DataManager import DataManager
########################################################################
class RegisterFile( OperationHandlerBase ):
"""
.. class:: RegisterOperation
RegisterFile operation handler
"""
def __init__( self, operation = None, csPath = None ):
"""c'tor
:param self: self reference
:param Operation operation: Operation instance
:param str csPath: CS path for this handler
"""
OperationHandlerBase.__init__( self, operation, csPath )
# # RegisterFile specific monitor info
gMonitor.registerActivity( "RegisterAtt", "Attempted file registrations",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RegisterOK", "Successful file registrations",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RegisterFail", "Failed file registrations",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM )
def __call__( self ):
""" call me maybe """
# # counter for failed files
failedFiles = 0
# # catalog(s) to use
catalogs = self.operation.Catalog
if catalogs:
catalogs = [ cat.strip() for cat in catalogs.split( ',' ) ]
dm = DataManager( catalogs = catalogs )
# # get waiting files
waitingFiles = self.getWaitingFilesList()
# # loop over files
for opFile in waitingFiles:
gMonitor.addMark( "RegisterAtt", 1 )
# # get LFN
lfn = opFile.LFN
# # and others
fileTuple = ( lfn , opFile.PFN, opFile.Size, self.operation.targetSEList[0], opFile.GUID, opFile.Checksum )
# # call DataManager
registerFile = dm.registerFile( fileTuple )
# # check results
if not registerFile["OK"] or lfn in registerFile["Value"]["Failed"]:
gMonitor.addMark( "RegisterFail", 1 )
# self.dataLoggingClient().addFileRecord( lfn, "RegisterFail", ','.join( catalogs ) if catalogs else "all catalogs", "", "RegisterFile" )
reason = registerFile.get( "Message", registerFile.get( "Value", {} ).get( "Failed", {} ).get( lfn, 'Unknown' ) )
errorStr = "failed to register LFN %s: %s" % ( lfn, reason )
opFile.Error = errorStr
self.log.warn( errorStr )
failedFiles += 1
else:
gMonitor.addMark( "RegisterOK", 1 )
# self.dataLoggingClient().addFileRecord( lfn, "Register", ','.join( catalogs ) if catalogs else "all catalogs", "", "RegisterFile" )
self.log.info( "file %s has been registered at %s" % ( lfn, ','.join( catalogs ) if catalogs else "all catalogs" ) )
opFile.Status = "Done"
# # final check
if failedFiles:
self.log.info( "all files processed, %s files failed to register" % failedFiles )
self.operation.Error = "some files failed to register"
return S_ERROR( self.operation.Error )
return S_OK()
|
miloszz/DIRAC
|
DataManagementSystem/Agent/RequestOperations/RegisterFile.py
|
Python
|
gpl-3.0
| 3,811
|
[
"DIRAC"
] |
766bcba8b68a4ce1daae2c0a75c1272b3787c7c2f03c0d0db874fc0be4679e68
|
from openanalysis.base_data_structures import UnionFind, PriorityQueue
import openanalysis.tree_growth as TreeGrowth
def kruskal_mst(G):
"""
Finds Minimum Spanning Tree of graph by Kruskal's Algorithm
:param G: networkx graph
:return: iterator through edges of Minimum spanning Tree
"""
edge_list = sorted(G.edges(data=True), key=lambda x: x[2]['weight'])
subtrees = UnionFind()
for u, v, w in edge_list:
if subtrees[u] != subtrees[v]:
yield (u, v, w)
subtrees.union(u, v)
def prim(G):
"""
Finds Minimum Spanning Tree of graph by Prim's Algorithm
:param G: networkx graph
:return: iterator through edges of Minimum spanning Tree
"""
V = G.nodes() # Set of all vertices of G
while V:
# We pop the nodes as soon as they are visited,
# so this means "until all the nodes are visited"
u = V.pop(0) # Now remove the first vertex and start building the tree
visited = {u} # Set of visited nodes
stringe_heap = [] # Store the stringe nodes with weights
import heapq
for v in G.neighbors(u):
heapq.heappush(stringe_heap,
(G.edge[u][v]['weight'], u, v))
# Now build the min heap storing (weight,source,dest) tuples
# Tuples are sorted by their first element
# Now start popping from heap,and build MST
while stringe_heap:
weight, u_star, v_star = heapq.heappop(stringe_heap)
if v_star in visited: # No need to do anything since v_star is already visited
continue
visited.add(v_star) # Mark dest as visited
V.remove(v_star)
yield (u_star, v_star, {'weight': weight}) # yield the edge
for w_star in G.neighbors(v_star): # Update strige heap with neighbour edges of v_star
if w_star not in visited:
heapq.heappush(stringe_heap, (G.edge[v_star][w_star]['weight'], v_star, w_star))
def dfs(G, root=None):
"""
Iterates through edges of DFS tree of G
:param G: networkx Graph
:param root: node to start DFS from. If it is none, DFS is done for all components of G
else DFS is done for components connected with root
:return: Iterator of edges of DFS tree
"""
visited = set()
if root is None:
nodes = G.nodes() # nodes to visit
else:
nodes = [root]
for start in nodes:
if start in visited:
continue
visited.add(start)
stack = [(start, child) for child in sorted(G.neighbors(start), reverse=True)]
while stack:
parent, child = stack.pop()
if child not in visited:
visited.add(child)
yield (parent, child)
stack += [(child, grandchild) for grandchild in sorted(G.neighbors(child), reverse=True)]
def bfs(G, root=None):
"""
Iterates through edges of DFS tree of G
:param G: networkx Graph
:param root: node to start DFS from. If it is none, DFS is done for all components of G
else DFS is done for components connected with root
:return: Iterator of edges of DFS tree
"""
visited = set()
if root is None:
nodes = G.nodes()
else:
nodes = [root]
for start in nodes:
if start in visited:
continue
visited.add(start)
Q = [start]
while Q:
current = Q.pop(0)
for n in sorted(G.neighbors(current)):
if n not in visited:
visited.add(n)
Q.append(n)
yield (current, n)
def dijkstra(G, source=None):
"""
Returns edges of Single source shortest path starting form source
:param G: networkx Graph
:param source: source to compute the distances from
:return: Iterator through edges of SSSP Tree
"""
if source is None: source = G.nodes()[0]
V = G.nodes()
dist, prev = {}, {}
Q = PriorityQueue()
for v in V:
dist[v] = float("inf")
prev[v] = None
Q.add_task(task=v, priority=dist[v])
dist[source] = 0
Q.update_task(task=source, new_priority=dist[source])
visited = set()
for i in range(0, len(G.nodes())):
u_star = Q.remove_min()
if prev[u_star] is not None:
yield (u_star, prev[u_star])
visited.add(u_star)
for u in G.neighbors(u_star):
if u not in visited and dist[u_star] + G.edge[u][u_star]['weight'] < dist[u]:
dist[u] = dist[u_star] + G.edge[u][u_star]['weight']
prev[u] = u_star
Q.update_task(u, dist[u])
if __name__ == "__main__":
TreeGrowth.tree_growth_visualizer(bfs)
|
OpenWeavers/openanalysis
|
analysistest/tree_growth.py
|
Python
|
gpl-3.0
| 4,835
|
[
"VisIt"
] |
6fe6e8705028547f2047fb40792b41707849fc1291ae220460335102edf7fa80
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.experimental.SqlDataset`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.experimental.kernel_tests import sql_dataset_test_base
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class SqlDatasetTest(sql_dataset_test_base.SqlDatasetTestBase):
# Test that SqlDataset can read from a database table.
def testReadResultSet(self):
for _ in range(2): # Run twice to verify statelessness of db operations.
dataset = self._createSqlDataset(
query="SELECT first_name, last_name, motto FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string),
num_repeats=2)
self.assertDatasetProduces(
dataset,
expected_output=[(b"John", b"Doe", b"Hi!"),
(b"Jane", b"Moe", b"Hi again!")] * 2,
num_test_iterations=2)
# Test that SqlDataset works on a join query.
def testReadResultSetJoinQuery(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT students.first_name, state, motto FROM students "
"INNER JOIN people "
"ON students.first_name = people.first_name "
"AND students.last_name = people.last_name",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
self.assertEqual((b"John", b"California", b"Hi!"),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that SqlDataset can read a database entry with a null-terminator
# in the middle of the text and place the entry in a `string` tensor.
def testReadResultSetNullTerminator(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, favorite_nonsense_word "
"FROM students ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
self.assertEqual((b"John", b"Doe", b"n\0nsense"), self.evaluate(get_next()))
self.assertEqual((b"Jane", b"Moe", b"nonsense\0"),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that SqlDataset works when used on two different queries.
# Because the output types of the dataset must be determined at graph-creation
# time, the two queries must have the same number and types of columns.
def testReadResultSetReuseSqlDataset(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, motto FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
self.assertEqual((b"John", b"Doe", b"Hi!"), self.evaluate(get_next()))
self.assertEqual((b"Jane", b"Moe", b"Hi again!"), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, state FROM people "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
self.assertEqual((b"John", b"Doe", b"California"),
self.evaluate(get_next()))
self.assertEqual((b"Benjamin", b"Franklin", b"Pennsylvania"),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that an `OutOfRangeError` is raised on the first call to
# `get_next_str_only` if result set is empty.
def testReadEmptyResultSet(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, motto FROM students "
"WHERE first_name = 'Nonexistent'",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that an error is raised when `driver_name` is invalid.
def testReadResultSetWithInvalidDriverName(self):
with self.assertRaises(errors.InvalidArgumentError):
dataset = self._createSqlDataset(
driver_name="sqlfake",
query="SELECT first_name, last_name, motto FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string))
self.assertDatasetProduces(dataset, expected_output=[])
# Test that an error is raised when a column name in `query` is nonexistent
def testReadResultSetWithInvalidColumnName(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, fake_column FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.UnknownError):
self.evaluate(get_next())
# Test that an error is raised when there is a syntax error in `query`.
def testReadResultSetOfQueryWithSyntaxError(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELEmispellECT first_name, last_name, motto FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.UnknownError):
self.evaluate(get_next())
# Test that an error is raised when the number of columns in `query`
# does not match the length of `, output_types`.
def testReadResultSetWithMismatchBetweenColumnsAndOutputTypes(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(get_next())
# Test that no results are returned when `query` is an insert query rather
# than a select query. In particular, the error refers to the number of
# output types passed to the op not matching the number of columns in the
# result set of the query (namely, 0 for an insert statement.)
def testReadResultSetOfInsertQuery(self):
get_next = self.getNext(
self._createSqlDataset(
query="INSERT INTO students (first_name, last_name, motto) "
"VALUES ('Foo', 'Bar', 'Baz'), ('Fizz', 'Buzz', 'Fizzbuzz')",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table and
# place it in an `int8` tensor.
def testReadResultSetInt8(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int8)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a negative or 0-valued integer from a
# SQLite database table and place it in an `int8` tensor.
def testReadResultSetInt8NegativeAndZero(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, income, favorite_negative_number "
"FROM students "
"WHERE first_name = 'John' ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int8, dtypes.int8)))
self.assertEqual((b"John", 0, -2), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a large (positive or negative) integer from
# a SQLite database table and place it in an `int8` tensor.
def testReadResultSetInt8MaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT desk_number, favorite_negative_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.int8, dtypes.int8)))
self.assertEqual((9, -2), self.evaluate(get_next()))
# Max and min values of int8
self.assertEqual((127, -128), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table and
# place it in an `int16` tensor.
def testReadResultSetInt16(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int16)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a negative or 0-valued integer from a
# SQLite database table and place it in an `int16` tensor.
def testReadResultSetInt16NegativeAndZero(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, income, favorite_negative_number "
"FROM students "
"WHERE first_name = 'John' ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int16, dtypes.int16)))
self.assertEqual((b"John", 0, -2), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a large (positive or negative) integer from
# a SQLite database table and place it in an `int16` tensor.
def testReadResultSetInt16MaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, favorite_medium_sized_number "
"FROM students ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int16)))
# Max value of int16
self.assertEqual((b"John", 32767), self.evaluate(get_next()))
# Min value of int16
self.assertEqual((b"Jane", -32768), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table and
# place it in an `int32` tensor.
def testReadResultSetInt32(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int32)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
# Test that `SqlDataset` can read a negative or 0-valued integer from a
# SQLite database table and place it in an `int32` tensor.
def testReadResultSetInt32NegativeAndZero(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, income FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int32)))
self.assertEqual((b"John", 0), self.evaluate(get_next()))
self.assertEqual((b"Jane", -20000), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a large (positive or negative) integer from
# a SQLite database table and place it in an `int32` tensor.
def testReadResultSetInt32MaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, favorite_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int32)))
# Max value of int32
self.assertEqual((b"John", 2147483647), self.evaluate(get_next()))
# Min value of int32
self.assertEqual((b"Jane", -2147483648), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a numeric `varchar` from a SQLite database
# table and place it in an `int32` tensor.
def testReadResultSetInt32VarCharColumnAsInt(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, school_id FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int32)))
self.assertEqual((b"John", 123), self.evaluate(get_next()))
self.assertEqual((b"Jane", 1000), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table
# and place it in an `int64` tensor.
def testReadResultSetInt64(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int64)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a negative or 0-valued integer from a
# SQLite database table and place it in an `int64` tensor.
def testReadResultSetInt64NegativeAndZero(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, income FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int64)))
self.assertEqual((b"John", 0), self.evaluate(get_next()))
self.assertEqual((b"Jane", -20000), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a large (positive or negative) integer from
# a SQLite database table and place it in an `int64` tensor.
def testReadResultSetInt64MaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, favorite_big_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int64)))
# Max value of int64
self.assertEqual((b"John", 9223372036854775807), self.evaluate(get_next()))
# Min value of int64
self.assertEqual((b"Jane", -9223372036854775808), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table and
# place it in a `uint8` tensor.
def testReadResultSetUInt8(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.uint8)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read the minimum and maximum uint8 values from a
# SQLite database table and place them in `uint8` tensors.
def testReadResultSetUInt8MinAndMaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, brownie_points FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.uint8)))
# Min value of uint8
self.assertEqual((b"John", 0), self.evaluate(get_next()))
# Max value of uint8
self.assertEqual((b"Jane", 255), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table
# and place it in a `uint16` tensor.
def testReadResultSetUInt16(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.uint16)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read the minimum and maximum uint16 values from a
# SQLite database table and place them in `uint16` tensors.
def testReadResultSetUInt16MinAndMaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, account_balance FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.uint16)))
# Min value of uint16
self.assertEqual((b"John", 0), self.evaluate(get_next()))
# Max value of uint16
self.assertEqual((b"Jane", 65535), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a 0-valued and 1-valued integer from a
# SQLite database table and place them as `True` and `False` respectively
# in `bool` tensors.
def testReadResultSetBool(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, registration_complete FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.bool)))
self.assertEqual((b"John", True), self.evaluate(get_next()))
self.assertEqual((b"Jane", False), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer that is not 0-valued or 1-valued
# from a SQLite database table and place it as `True` in a `bool` tensor.
def testReadResultSetBoolNotZeroOrOne(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, favorite_medium_sized_number "
"FROM students ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.bool)))
self.assertEqual((b"John", True), self.evaluate(get_next()))
self.assertEqual((b"Jane", True), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a float from a SQLite database table
# and place it in a `float64` tensor.
def testReadResultSetFloat64(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, victories FROM townspeople "
"ORDER BY first_name",
output_types=(dtypes.string, dtypes.string, dtypes.float64)))
self.assertEqual((b"George", b"Washington", 20.0),
self.evaluate(get_next()))
self.assertEqual((b"John", b"Adams", -19.95), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a float from a SQLite database table beyond
# the precision of 64-bit IEEE, without throwing an error. Test that
# `SqlDataset` identifies such a value as equal to itself.
def testReadResultSetFloat64OverlyPrecise(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, accolades FROM townspeople "
"ORDER BY first_name",
output_types=(dtypes.string, dtypes.string, dtypes.float64)))
self.assertEqual(
(b"George", b"Washington",
1331241.321342132321324589798264627463827647382647382643874),
self.evaluate(get_next()))
self.assertEqual(
(b"John", b"Adams",
1331241321342132321324589798264627463827647382647382643874.0),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a float from a SQLite database table,
# representing the largest integer representable as a 64-bit IEEE float
# such that the previous integer is also representable as a 64-bit IEEE float.
# Test that `SqlDataset` can distinguish these two numbers.
def testReadResultSetFloat64LargestConsecutiveWholeNumbersNotEqual(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, triumphs FROM townspeople "
"ORDER BY first_name",
output_types=(dtypes.string, dtypes.string, dtypes.float64)))
self.assertNotEqual((b"George", b"Washington", 9007199254740992.0),
self.evaluate(get_next()))
self.assertNotEqual((b"John", b"Adams", 9007199254740991.0),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
if __name__ == "__main__":
test.main()
|
kevin-coder/tensorflow-fork
|
tensorflow/python/data/experimental/kernel_tests/sql_dataset_test.py
|
Python
|
apache-2.0
| 22,191
|
[
"MOE"
] |
35e3e76c1d08656f0f5db9fd7f6609988b6909377413bde625fcc75b258bda44
|
#
# Copyright (C) 2015-2021 University of Oxford
#
# This file is part of msprime.
#
# msprime is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# msprime is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with msprime. If not, see <http://www.gnu.org/licenses/>.
#
"""
Module responsible for defining and running ancestry simulations.
"""
from __future__ import annotations
import collections.abc
import copy
import dataclasses
import inspect
import json
import logging
import math
import struct
import sys
from typing import ClassVar
from typing import Union
import numpy as np
import tskit
from . import core
from . import demography as demog
from . import intervals
from . import mutations
from . import provenance
from msprime import _msprime
logger: logging.Logger = logging.getLogger(__name__)
def _model_factory(model):
"""
Returns a simulation model corresponding to the specified model.
- If model is None, the default simulation model is returned.
- If model is a string, return the corresponding model instance.
- If model is an instance of AncestryModel, return a copy of it.
- Otherwise raise a type error.
"""
model_map = {
"hudson": StandardCoalescent(),
"smc": SmcApproxCoalescent(),
"smc_prime": SmcPrimeApproxCoalescent(),
"dtwf": DiscreteTimeWrightFisher(),
"wf_ped": WrightFisherPedigree(),
}
if model is None:
model_instance = StandardCoalescent()
elif isinstance(model, str):
lower_model = model.lower()
if lower_model not in model_map:
raise ValueError(
"Model '{}' unknown. Choose from {}".format(
model, list(model_map.keys())
)
)
model_instance = model_map[lower_model]
elif not isinstance(model, AncestryModel):
raise TypeError(
"Simulation model must be a string or an instance of AncestryModel"
)
else:
model_instance = model
return model_instance
def _parse_model_change_events(events):
"""
Parses the specified list of events provided in model_arg[1:] into
AncestryModelChange events. There are two different forms supported,
and model descriptions are anything supported by model_factory.
"""
err = (
"Simulation model change events must be either a two-tuple "
"(time, model), describing the time of the model change and "
"the new model or be an instance of AncestryModelChange."
)
model_change_events = []
for event in events:
if isinstance(event, (tuple, list)):
if len(event) != 2:
raise ValueError(err)
t = event[0]
if t is not None:
try:
t = float(t)
except (TypeError, ValueError):
raise ValueError(
"Model change times must be either a floating point "
"value or None"
)
event = AncestryModelChange(t, _model_factory(event[1]))
elif isinstance(event, AncestryModelChange):
# We don't want to modify our inputs, so take a deep copy.
event = copy.copy(event)
event.model = _model_factory(event.model)
else:
raise TypeError(err)
model_change_events.append(event)
return model_change_events
def _parse_model_arg(model_arg):
"""
Parses the specified model argument from the simulate function,
returning the initial model and any model change events.
"""
err = (
"The model argument must be either (a) a value that can be "
"interpreted as a simulation model or (b) a list in which "
"the first element is a model description and the remaining "
"elements are model change events. These can either be described "
"by a (time, model) tuple or AncestryModelChange instances."
)
if isinstance(model_arg, (list, tuple)):
if len(model_arg) < 1:
raise ValueError(err)
model = _model_factory(model_arg[0])
model_change_events = _parse_model_change_events(model_arg[1:])
else:
model = _model_factory(model_arg)
model_change_events = []
return model, model_change_events
def _filter_events(demographic_events):
"""
Returns a tuple (demographic_events, model_change_events) which separates
out the AncestryModelChange events from the list. This is to support the
pre-1.0 syntax for model changes, where they were included in the
demographic_events parameter.
"""
filtered_events = []
model_change_events = []
for event in demographic_events:
if isinstance(event, AncestryModelChange):
model_change_events.append(event)
else:
filtered_events.append(event)
# Make sure any model references are resolved.
model_change_events = _parse_model_change_events(model_change_events)
return filtered_events, model_change_events
def _check_population_configurations(population_configurations):
err = (
"Population configurations must be a list of PopulationConfiguration instances"
)
for config in population_configurations:
if not isinstance(config, demog.PopulationConfiguration):
raise TypeError(err)
# This class is only used in the 0.x interface.
Sample = collections.namedtuple("Sample", ["population", "time"])
def _samples_factory(sample_size, samples, population_configurations):
"""
Returns a list of Sample objects, given the specified inputs.
"""
the_samples = []
if sample_size is not None:
if samples is not None:
raise ValueError("Cannot specify sample size and samples simultaneously.")
if population_configurations is not None:
raise ValueError(
"Cannot specify sample size and population_configurations "
"simultaneously."
)
s = Sample(population=0, time=0.0)
the_samples = [s for _ in range(sample_size)]
# If we have population configurations we may have embedded sample_size
# values telling us how many samples to take from each population.
if population_configurations is not None:
_check_population_configurations(population_configurations)
if samples is None:
the_samples = []
for j, conf in enumerate(population_configurations):
if conf.sample_size is not None:
the_samples += [Sample(j, 0) for _ in range(conf.sample_size)]
else:
for conf in population_configurations:
if conf.sample_size is not None:
raise ValueError(
"Cannot specify population configuration sample size"
" and samples simultaneously"
)
the_samples = samples
elif samples is not None:
the_samples = samples
return the_samples
def _demography_factory(
Ne, population_configurations, migration_matrix, demographic_events
):
demography = demog.Demography.from_old_style(
population_configurations,
migration_matrix=migration_matrix,
demographic_events=demographic_events,
Ne=Ne,
ignore_sample_size=True,
)
return demography.validate()
def _build_initial_tables(*, sequence_length, samples, ploidy, demography, pedigree):
# NOTE: this is only used in the simulate() codepath.
tables = tskit.TableCollection(sequence_length)
if pedigree is None:
for index, (population, time) in enumerate(samples):
tables.nodes.add_row(
flags=tskit.NODE_IS_SAMPLE,
time=time,
population=population,
)
if population < 0:
raise ValueError(f"Negative population ID in sample at index {index}")
if population >= demography.num_populations:
raise ValueError(
f"Invalid population reference '{population}' in sample "
f"at index {index}"
)
else:
# TODO This should be removed - pedigree code path should only be callable
# from sim_ancestry
for parents, time, is_sample in zip(
pedigree.parents, pedigree.time, pedigree.is_sample
):
# We encode the parents in the metadata for now, but see
# https://github.com/tskit-dev/tskit/issues/852
encoded_parents = struct.pack("=ii", *parents)
ind_id = tables.individuals.add_row(0, metadata=encoded_parents)
node_flags = tskit.NODE_IS_SAMPLE if is_sample else 0
for _ in range(ploidy):
tables.nodes.add_row(node_flags, time, population=0, individual=ind_id)
# This is for the simulate() code path so we don't add metadata schemas
# and insert the user metadata in directly as encoded JSON, as before.
for population in demography.populations:
encoded_metadata = b""
if population.extra_metadata is not None:
encoded_metadata = json.dumps(population.extra_metadata).encode()
tables.populations.add_row(encoded_metadata)
return tables
def _parse_simulate(
sample_size=None,
*,
Ne=1,
length=None,
recombination_rate=None,
recombination_map=None,
population_configurations=None,
pedigree=None,
migration_matrix=None,
samples=None,
demographic_events=None,
model=None,
record_migrations=False,
from_ts=None,
start_time=None,
end_time=None,
record_full_arg=False,
num_labels=None,
random_seed=None,
):
"""
Argument parser for the simulate frontend. Interprets all the parameters
and returns an appropriate instance of Simulator.
"""
if Ne <= 0:
raise ValueError("Population size must be positive")
samples_specified = (
sample_size is None
and population_configurations is None
and samples is None
and from_ts is None
)
if samples_specified:
raise ValueError(
"Either sample_size, samples, population_configurations or from_ts must "
"be specified"
)
samples = _samples_factory(sample_size, samples, population_configurations)
model, model_change_events = _parse_model_arg(model)
if demographic_events is not None:
demographic_events, old_style_model_change_events = _filter_events(
demographic_events
)
if len(old_style_model_change_events) > 0:
if len(model_change_events) > 0:
raise ValueError(
"Cannot specify AncestryModelChange events using both new-style "
"and pre 1.0 syntax"
)
model_change_events = old_style_model_change_events
demography = _demography_factory(
Ne, population_configurations, migration_matrix, demographic_events
)
# The logic for checking from_ts and recombination map is bound together
# in a complicated way, so we can factor them out into separate functions.
if from_ts is None:
if len(samples) < 2:
raise ValueError("Sample size must be >= 2")
else:
if len(samples) > 0:
raise ValueError("Cannot specify samples with from_ts")
if not isinstance(from_ts, tskit.TreeSequence):
raise TypeError("from_ts must be a TreeSequence instance.")
if demography.num_populations != from_ts.num_populations:
raise ValueError(
"Mismatch in the number of populations in from_ts and simulation "
"parameters. The number of populations in the simulation must be "
"equal to the number of populations in from_ts"
)
discrete_genome = False
if recombination_map is None:
# Default to 1 if no from_ts; otherwise default to the sequence length
# of from_ts
if from_ts is None:
the_length = 1 if length is None else length
else:
the_length = from_ts.sequence_length if length is None else length
the_rate = 0 if recombination_rate is None else recombination_rate
if the_length <= 0:
raise ValueError("Cannot provide non-positive sequence length")
if the_rate < 0:
raise ValueError("Cannot provide negative recombination rate")
recombination_map = intervals.RateMap.uniform(the_length, the_rate)
else:
if isinstance(recombination_map, intervals.RecombinationMap):
if recombination_map._is_discrete:
logger.info("Emulating v0.x discrete sites simulation")
discrete_genome = True
# Convert from the legacy RecombinationMap class
recombination_map = recombination_map.map
elif not isinstance(recombination_map, intervals.RateMap):
raise TypeError("RateMap instance required.")
if length is not None or recombination_rate is not None:
raise ValueError(
"Cannot specify length/recombination_rate along with "
"a recombination map"
)
if from_ts is not None:
if recombination_map.sequence_length != from_ts.sequence_length:
raise ValueError(
"Recombination map and from_ts must have identical " "sequence_length"
)
if num_labels is not None and num_labels < 1:
raise ValueError("Must have at least one structured coalescent label")
if from_ts is None:
tables = _build_initial_tables(
sequence_length=recombination_map.sequence_length,
samples=samples,
# FIXME not clear how this is all working now. We shouldn't have
# the pedigree as a parameter here at all which would probably
# simplify things.
ploidy=2,
demography=demography,
pedigree=pedigree,
)
else:
tables = from_ts.tables
# It's useful to call _parse_simulate outside the context of the main
# entry point - so we want to get good seeds in this case too.
random_seed = _parse_random_seed(random_seed)
random_generator = _msprime.RandomGenerator(random_seed)
sim = Simulator(
tables=tables,
recombination_map=recombination_map,
model=model,
store_migrations=record_migrations,
store_full_arg=record_full_arg,
start_time=start_time,
end_time=end_time,
num_labels=num_labels,
demography=demography,
model_change_events=model_change_events,
# Defaults for the values that are not supported through simulate()
gene_conversion_map=intervals.RateMap.uniform(
recombination_map.sequence_length, 0
),
gene_conversion_tract_length=0,
discrete_genome=discrete_genome,
ploidy=2,
random_generator=random_generator,
)
return sim
def _parse_random_seed(seed):
"""
Parse the specified random seed value. If no seed is provided, generate a
high-quality random seed.
"""
if seed is None:
seed = core.get_random_seed()
seed = int(seed)
return seed
def _parse_replicate_index(*, replicate_index, random_seed, num_replicates):
"""
Parse the replicate_index value, and ensure that its value makes sense
in the context of the other parameters.
"""
if replicate_index is None:
return None
if random_seed is None:
raise ValueError("Cannot specify the replicate_index without a random_seed")
if num_replicates is not None:
raise ValueError("Cannot specify the replicate_index as well as num_replicates")
replicate_index = int(replicate_index)
if replicate_index < 0:
raise ValueError("Cannot specify negative replicate_index.")
return replicate_index
def _build_provenance(command, random_seed, frame):
"""
Builds a provenance dictionary suitable for use as the basis
of tree sequence provenance in replicate simulations. Uses the
specified stack frame to determine the values of the arguments
passed in, with a few exceptions.
"""
argspec = inspect.getargvalues(frame)
# num_replicates is excluded as provenance is per replicate
# replicate index is excluded as it is inserted for each replicate
parameters = {
"command": command,
**{
arg: argspec.locals[arg]
for arg in argspec.args
if arg not in ["num_replicates", "replicate_index"]
},
}
parameters["random_seed"] = random_seed
return provenance.get_provenance_dict(parameters)
def simulate(
sample_size=None,
*,
Ne=1,
length=None,
recombination_rate=None,
recombination_map=None,
mutation_rate=None,
population_configurations=None,
pedigree=None,
migration_matrix=None,
demographic_events=None,
samples=None,
model=None,
record_migrations=False,
random_seed=None,
replicate_index=None,
mutation_generator=None,
num_replicates=None,
from_ts=None,
start_time=None,
end_time=None,
record_full_arg=False,
num_labels=None,
record_provenance=True,
):
"""
Simulates the coalescent with recombination under the specified model
parameters and returns the resulting :class:`tskit.TreeSequence`. Note that
Ne is the effective diploid population size (so the effective number
of genomes in the population is 2*Ne), but ``sample_size`` is the
number of (monoploid) genomes sampled.
:param int sample_size: The number of sampled monoploid genomes. If not
specified or None, this defaults to the sum of the subpopulation sample
sizes. Either ``sample_size``, ``population_configurations`` or
``samples`` must be specified.
:param float Ne: The effective (diploid) population size. This defaults to
1 if not specified.
:param float length: The length of the simulated region in bases.
This parameter cannot be used along with ``recombination_map``.
Defaults to 1 if not specified.
:param float recombination_rate: The rate of recombination per base
per generation. This parameter cannot be used along with
``recombination_map``. Defaults to 0 if not specified.
:param recombination_map: The map
describing the changing rates of recombination along the simulated
chromosome. This parameter cannot be used along with the
``recombination_rate`` or ``length`` parameters, as these
values are encoded within the map. Defaults to a uniform rate as
described in the ``recombination_rate`` parameter if not specified.
:type recombination_map: :class:`.RecombinationMap`
:param float mutation_rate: The rate of infinite sites
mutations per unit of sequence length per generation.
If not specified, no mutations are generated. This option only
allows for infinite sites mutations with a binary (i.e., 0/1)
alphabet. For more control over the mutational process, please
use the :func:`.mutate` function.
:param list population_configurations: The list of
:class:`.PopulationConfiguration` instances describing the
sampling configuration, relative sizes and growth rates of
the populations to be simulated. If this is not specified,
a single population with a sample of size ``sample_size``
is assumed.
:type population_configurations: list or None.
:param list migration_matrix: The matrix describing the rates of migration
between all pairs of populations. If :math:`N` populations are defined
in the ``population_configurations`` parameter, then the migration
matrix must be an :math:`N \\times N` matrix with 0 on the diagonal,
consisting of :math:`N` lists of length :math:`N` or an :math:`N
\\times N` numpy array. The :math:`[j, k]^{th}` element of the
migration matrix gives the expected number of migrants moving from
population :math:`k` to population :math:`j` per generation, divided by
the size of population :math:`j`. When simulating from the
discrete-time Wright-Fisher model (``model = "dtwf"``), the row sums of
the migration matrix must not exceed 1. There are no sum constraints for
migration rates in continuous-time models.
:param list demographic_events: The list of demographic events to
simulate. Demographic events describe changes to the populations
in the past. Events should be supplied in non-decreasing
order of time in the past. Events with the same time value will be
applied sequentially in the order that they were supplied before the
simulation algorithm continues with the next time step.
:param list samples: The list specifying the location and time of
all samples. This parameter may be used to specify historical
samples, and cannot be used in conjunction with the ``sample_size``
parameter. Each sample is a (``population``, ``time``) pair
such that the sample in position ``j`` in the list of samples
is drawn in the specified population at the specfied time. Time
is measured in generations ago, as elsewhere.
:param int random_seed: The random seed. If this is `None`, a
random seed will be automatically generated. Valid random
seeds must be between 1 and :math:`2^{32} - 1`.
:param int num_replicates: The number of replicates of the specified
parameters to simulate. If this is not specified or None,
no replication is performed and a :class:`tskit.TreeSequence` object
returned. If `num_replicates` is provided, the specified
number of replicates is performed, and an iterator over the
resulting :class:`tskit.TreeSequence` objects returned.
:param tskit.TreeSequence from_ts: If specified, initialise the simulation
from the root segments of this tree sequence and return the
completed tree sequence. Please see :ref:`here
<sec_ancestry_initial_state>` for details on the required properties
of this tree sequence and its interactions with other parameters.
(Default: None).
:param float start_time: If specified, set the initial time that the
simulation starts to this value. If not specified, the start
time is zero if performing a simulation of a set of samples,
or is the time of the oldest node if simulating from an
existing tree sequence (see the ``from_ts`` parameter).
:param float end_time: If specified, terminate the simulation at the
specified time. In the returned tree sequence, all rootward paths from
samples with time < end_time will end in a node with one child with
time equal to end_time. Sample nodes with time >= end_time will
also be present in the output tree sequence. If not specified or ``None``,
run the simulation until all samples have an MRCA at all positions in
the genome.
:param bool record_full_arg: If True, record all intermediate nodes
arising from common ancestor and recombination events in the output
tree sequence. This will result in unary nodes (i.e., nodes in marginal
trees that have only one child). Defaults to False.
:param model: The simulation model to use.
This can either be a string (e.g., ``"smc_prime"``) or an instance of
a simulation model class (e.g, ``msprime.DiscreteTimeWrightFisher()``.
Please see the :ref:`sec_ancestry_models` section for more details
on specifying ancestry models.
:type model: str or simulation model instance
:param bool record_provenance: If True, record all configuration and parameters
required to recreate the tree sequence. These can be accessed
via ``TreeSequence.provenances()``).
:return: The :class:`tskit.TreeSequence` object representing the results
of the simulation if no replication is performed, or an
iterator over the independent replicates simulated if the
`num_replicates` parameter has been used.
:rtype: :class:`tskit.TreeSequence` or an iterator over
:class:`tskit.TreeSequence` replicates.
"""
replicate_index = _parse_replicate_index(
random_seed=random_seed,
num_replicates=num_replicates,
replicate_index=replicate_index,
)
random_seed = _parse_random_seed(random_seed)
provenance_dict = None
if record_provenance:
frame = inspect.currentframe()
provenance_dict = _build_provenance("simulate", random_seed, frame)
if mutation_generator is not None:
# This error was added in version 0.6.1.
raise ValueError(
"mutation_generator is not longer supported. Please use "
"msprime.mutate instead"
)
if mutation_rate is not None:
# There is ambiguity in how we should throw mutations onto partially
# built tree sequences: on the whole thing, or must the newly added
# topology? Before or after start_time? We avoid this complexity by
# asking the user to use mutate(), which should have the required
# flexibility.
if from_ts is not None:
raise ValueError(
"Cannot specify mutation rate combined with from_ts. Please use "
"msprime.mutate on the final tree sequence instead"
)
# There is ambiguity in how the start_time argument should interact with
# the mutation generator: should we throw mutations down on the whole
# tree or just the (partial) edges after start_time? To avoid complicating
# things here, make the user use mutate() which should have the flexibility
# to do whatever is needed.
if start_time is not None and start_time > 0:
raise ValueError(
"Cannot specify mutation rate combined with a non-zero "
"start_time. Please use msprime.mutate on the returned "
"tree sequence instead"
)
mutation_rate = float(mutation_rate)
sim = _parse_simulate(
sample_size=sample_size,
Ne=Ne,
length=length,
recombination_rate=recombination_rate,
recombination_map=recombination_map,
population_configurations=population_configurations,
pedigree=pedigree,
migration_matrix=migration_matrix,
demographic_events=demographic_events,
samples=samples,
model=model,
record_migrations=record_migrations,
from_ts=from_ts,
start_time=start_time,
end_time=end_time,
record_full_arg=record_full_arg,
num_labels=num_labels,
random_seed=random_seed,
)
return _wrap_replicates(
sim,
num_replicates=num_replicates,
replicate_index=replicate_index,
provenance_dict=provenance_dict,
mutation_rate=mutation_rate,
)
def _wrap_replicates(
simulator,
*,
num_replicates,
replicate_index,
provenance_dict,
mutation_rate=None,
):
"""
Wrapper for the logic used to run replicate simulations for the two
frontends.
"""
if num_replicates is None and replicate_index is None:
# Default single-replicate case.
replicate_index = 0
if replicate_index is not None:
num_replicates = replicate_index + 1
iterator = simulator.run_replicates(
num_replicates,
mutation_rate=mutation_rate,
provenance_dict=provenance_dict,
)
if replicate_index is not None:
deque = collections.deque(iterator, maxlen=1)
return deque.pop()
else:
return iterator
def _parse_rate_map(rate_param, sequence_length, name):
"""
Parse the specified input rate parameter value into a rate map.
"""
# Note: in the future we might have another clause here where we
# allow for a different map per population. This could be
# accepted as either a list of N rate maps, or a dictionary mapping
# population names to maps.
# See https://github.com/tskit-dev/msprime/issues/1095
msg_head = f"Error in parsing rate map for {name}: "
if isinstance(rate_param, intervals.RateMap):
rate_map = rate_param
if rate_map.sequence_length != sequence_length:
raise ValueError(msg_head + "sequence_length must match")
else:
rate_param = 0 if rate_param is None else float(rate_param)
rate_map = intervals.RateMap.uniform(sequence_length, rate_param)
return rate_map
def _insert_sample_sets(sample_sets, demography, default_ploidy, tables):
"""
Insert the samples described in the specified {population_id: num_samples}
map into the specified set of tables.
"""
for sample_set in sample_sets:
n = sample_set.num_samples
population = demography[sample_set.population]
time = population.sampling_time if sample_set.time is None else sample_set.time
ploidy = default_ploidy if sample_set.ploidy is None else sample_set.ploidy
logger.info(
f"Sampling {n} individuals with ploidy {ploidy} in population "
f"{population.id} (name='{population.name}') at time {time}"
)
node_individual = len(tables.individuals) + np.repeat(
np.arange(n, dtype=np.int32), ploidy
)
ind_flags = np.zeros(n, dtype=np.uint32)
tables.individuals.append_columns(flags=ind_flags)
N = n * ploidy
tables.nodes.append_columns(
flags=np.full(N, tskit.NODE_IS_SAMPLE, dtype=np.uint32),
time=np.full(N, time),
population=np.full(N, population.id, dtype=np.int32),
individual=node_individual,
)
def _parse_sample_sets(sample_sets, demography):
# Don't modify the inputs.
sample_sets = copy.deepcopy(sample_sets)
for sample_set in sample_sets:
if not isinstance(sample_set, SampleSet):
raise TypeError("msprime.SampleSet object required")
if not core.isinteger(sample_set.num_samples):
raise TypeError(
"The number of samples to draw from a population must be an integer"
)
sample_set.num_samples = int(sample_set.num_samples)
if sample_set.num_samples < 0:
raise ValueError("Number of samples cannot be negative")
if sample_set.population is None:
if demography.num_populations == 1:
sample_set.population = 0
else:
raise ValueError(
"Must specify a SampleSet population in multipopulation models"
)
if sum(sample_set.num_samples for sample_set in sample_sets) == 0:
raise ValueError("Zero samples specified")
return sample_sets
def _parse_samples(samples, demography, ploidy, tables):
"""
Parse the specified "samples" value for sim_ancestry and insert them into the
specified tables.
"""
if isinstance(samples, collections.abc.Sequence):
sample_sets = samples
elif isinstance(samples, collections.abc.Mapping):
sample_sets = [
SampleSet(num_samples, population)
for population, num_samples in samples.items()
]
elif core.isinteger(samples):
if len(tables.populations) != 1:
raise ValueError(
"Numeric samples can only be used in single population models. "
"Please use Demography.sample() to generate a list of samples "
"for your model, which can be used instead."
)
sample_sets = [SampleSet(samples)]
else:
raise TypeError(
f"The value '{samples}' cannot be interpreted as sample specification. "
"Samples must either be a single integer, a dict that maps populations "
"to the number of samples for that population, or a list of SampleSet "
"objects. Please see the online documentation for more details on "
"the different forms."
)
sample_sets = _parse_sample_sets(sample_sets, demography)
_insert_sample_sets(sample_sets, demography, ploidy, tables)
def _parse_sim_ancestry(
samples=None,
*,
sequence_length=None,
recombination_rate=None,
gene_conversion_rate=None,
gene_conversion_tract_length=None,
discrete_genome=None,
population_size=None,
demography=None,
ploidy=None,
model=None,
initial_state=None,
start_time=None,
end_time=None,
record_migrations=None,
record_full_arg=None,
num_labels=None,
random_seed=None,
init_for_debugger=False,
):
"""
Argument parser for the sim_ancestry frontend. Interprets all the parameters
and returns an appropriate instance of Simulator.
"""
# As a general rule we try to cast any input value to the required types
# early and in a way that provides an interpretable traceback.
# Simple defaults.
start_time = 0 if start_time is None else float(start_time)
end_time = math.inf if end_time is None else float(end_time)
discrete_genome = core._parse_flag(discrete_genome, default=True)
record_full_arg = core._parse_flag(record_full_arg, default=False)
record_migrations = core._parse_flag(record_migrations, default=False)
if initial_state is not None:
if isinstance(initial_state, tskit.TreeSequence):
initial_state = initial_state.dump_tables()
elif not isinstance(initial_state, tskit.TableCollection):
raise TypeError(
"initial_state must either be a TreeSequence or TableCollection instance"
)
if sequence_length is None:
# These are all the cases in which we derive the sequence_length
# from somewhere else.
if initial_state is not None:
sequence_length = initial_state.sequence_length
elif recombination_rate is None and gene_conversion_rate is None:
# In this case, we're doing single-locus simulations, so a sequence
# length of 1 makes sense.
sequence_length = 1
elif isinstance(recombination_rate, intervals.RateMap):
sequence_length = recombination_rate.sequence_length
elif isinstance(gene_conversion_rate, intervals.RateMap):
sequence_length = gene_conversion_rate.sequence_length
else:
raise ValueError(
"A sequence_length value must be specified. This can be either "
"via the the sequence_length parameter itself, of implicitly "
"through using a RateMap instance for the recombination_rate "
"or gene_conversion_rate parameters, or via the initial_state "
"tables. "
)
else:
sequence_length = float(sequence_length)
assert sequence_length is not None
if discrete_genome and math.floor(sequence_length) != sequence_length:
raise ValueError("Must have integer sequence length with discrete_genome=True")
recombination_map = _parse_rate_map(
recombination_rate, sequence_length, "recombination"
)
gene_conversion_map = _parse_rate_map(
gene_conversion_rate, sequence_length, "gene conversion"
)
if gene_conversion_tract_length is None:
if gene_conversion_rate is None:
# It doesn't matter what the tract_length is, just set a
# value to keep the low-level code happy.
gene_conversion_tract_length = 1
else:
raise ValueError(
"Must specify tract length when simulating gene conversion"
)
else:
if gene_conversion_rate is None:
raise ValueError(
"Must specify gene conversion rate along with tract length"
)
gene_conversion_tract_length = float(gene_conversion_tract_length)
# Default to diploid
ploidy = 2 if ploidy is None else ploidy
if not core.isinteger(ploidy):
raise TypeError("ploidy must be an integer")
ploidy = int(ploidy)
if ploidy < 1:
raise ValueError("ploidy must be >= 1")
model, model_change_events = _parse_model_arg(model)
is_dtwf = isinstance(model, DiscreteTimeWrightFisher)
# Check the demography. If no demography is specified, we default to a
# single-population model with a given population size. If an initial
# state is provided, we default to using that number of populations.
if demography is None:
if is_dtwf:
# A default size of 1 isn't so smart for DTWF and almost certainly
# an error.
if population_size is None:
raise ValueError(
"When using the DTWF model, the population size must be set "
"explicitly, either using the population_size or demography "
"arguments."
)
num_populations = 1 if initial_state is None else len(initial_state.populations)
population_size = 1 if population_size is None else float(population_size)
demography = demog.Demography.isolated_model(
[population_size] * num_populations
)
elif isinstance(demography, demog.Demography):
if population_size is not None:
raise ValueError("Cannot specify demography and population size")
else:
raise TypeError("demography argument must be an instance of msprime.Demography")
demography = demography.validate()
if initial_state is None:
if samples is None and not init_for_debugger:
raise ValueError(
"Either the samples or initial_state arguments must be provided"
)
initial_state = tskit.TableCollection(sequence_length)
demography.insert_populations(initial_state)
if not init_for_debugger:
_parse_samples(samples, demography, ploidy, initial_state)
else:
if samples is not None:
raise ValueError("Cannot specify both samples and initial_state")
if sequence_length != initial_state.sequence_length:
raise ValueError(
"The initial_state sequence length must be consistent with the"
"value derived from either the sequence_length, "
"recombination_rate or gene_conversion_rate parameters."
)
if len(initial_state.populations) == 0:
raise ValueError(
"initial_state tables must define at least one population."
)
# It's useful to call _parse_sim_ancestry outside the context of the main
# entry point - so we want to get good seeds in this case too.
random_seed = _parse_random_seed(random_seed)
random_generator = _msprime.RandomGenerator(random_seed)
return Simulator(
tables=initial_state,
recombination_map=recombination_map,
gene_conversion_map=gene_conversion_map,
gene_conversion_tract_length=gene_conversion_tract_length,
discrete_genome=discrete_genome,
ploidy=ploidy,
demography=demography,
model=model,
model_change_events=model_change_events,
store_migrations=record_migrations,
store_full_arg=record_full_arg,
start_time=start_time,
end_time=end_time,
num_labels=num_labels,
random_generator=random_generator,
)
def sim_ancestry(
samples=None,
*,
demography=None,
sequence_length=None,
discrete_genome=None,
recombination_rate=None,
gene_conversion_rate=None,
gene_conversion_tract_length=None,
population_size=None,
ploidy=None,
model=None,
initial_state=None,
start_time=None,
end_time=None,
record_migrations=None,
record_full_arg=None,
num_labels=None,
random_seed=None,
num_replicates=None,
replicate_index=None,
record_provenance=None,
):
"""
Simulates an ancestral process described by a given model, demography and
samples, and return a :class:`tskit.TreeSequence` (or a sequence of
replicate tree sequences).
:param samples: The sampled individuals as either an integer, specifying
the number of individuals to sample in a single-population model;
or a list of :class:`.SampleSet` objects defining the properties of
groups of similar samples; or as a mapping in which the keys
are population identifiers (either an integer ID or string name)
and the values are the number of samples to take from the corresponding
population at its default sampling time. It is important to note that
samples correspond to *individuals* here, and each sampled individual
is usually associated with :math:`k` sample *nodes* (or genomes) when
``ploidy`` = :math:`k`. See :ref:`sec_ancestry_samples` for further details.
Either ``samples`` or ``initial_state`` must be specified.
:param demography: The demographic model to simulate, describing the
extant and ancestral populations, their population sizes and growth
rates, their migration rates, and demographic events affecting the
populations over time. See the :ref:`sec_demography` section for
details on how to specify demographic models and
:ref:`sec_ancestry_samples` for details on how to specify the
populations that samples are drawn from. If not specified (or None) we
default to a single population with constant size 1
(see also the ``population_size`` parameter).
:param int ploidy: The number of monoploid genomes per sample individual
(Default=2). See :ref:`sec_ancestry_ploidy` for usage examples.
:param float sequence_length: The length of the genome sequence to simulate.
See :ref:`sec_ancestry_genome_length` for usage examples
for this parameter and how it interacts with other parameters.
:param bool discrete_genome: If True (the default) simulation occurs
in discrete genome coordinates such that recombination and
gene conversion breakpoints always occur at integer positions.
Thus, multiple (e.g.) recombinations can occur at the same
genome position. If ``discrete_genome`` is False simulations
are performed using continuous genome coordinates. In this
case multiple events at precisely the same genome location are very
unlikely (but technically possible).
See :ref:`sec_ancestry_discrete_genome` for usage examples.
:param recombination_rate: The rate of recombination along the sequence;
can be either a single value (specifying a single rate over the entire
sequence) or an instance of :class:`RateMap`.
See :ref:`sec_ancestry_recombination` for usage examples
for this parameter and how it interacts with other parameters.
:param gene_conversion_rate: The rate of gene conversion along the sequence;
can be a single value (specifying a single rate over the entire
sequence). Currently an instance of :class:`RateMap` is not supported.
If provided, a value for ``gene_conversion_tract_length`` must also be
specified. See :ref:`sec_ancestry_gene_conversion` for usage examples
for this parameter and how it interacts with other parameters.
:param gene_conversion_tract_length: The mean length of the gene conversion
tracts. For discrete genomes the tract lengths are geometrically
distributed with mean ``gene_conversion_tract_length``, which must be
greater than or equal to 1. For continuous genomes the tract lengths are
exponentially distributed with mean ``gene_conversion_tract_length``,
which must be larger than 0.
:param population_size: The size of the default single population
:class:`.Demography`. If not specified, defaults to 1. Cannot be specified
along with the ``demography`` parameter. See the :ref:`sec_demography`
section for more details on demographic models and population sizes
and the :ref:`sec_ancestry_population_size` section for usage examples.
:param int random_seed: The random seed. If this is not specified or `None`,
a high-quality random seed will be automatically generated. Valid random
seeds must be between 1 and :math:`2^{32} - 1`.
See :ref:`sec_ancestry_random_seed` for usage examples.
:param int num_replicates: The number of replicates of the specified
parameters to simulate. If this is not specified or `None`,
no replication is performed and a :class:`tskit.TreeSequence` object
returned. If `num_replicates` is provided, the specified
number of replicates is performed, and an iterator over the
resulting :class:`tskit.TreeSequence` objects returned.
See :ref:`sec_ancestry_replication` for examples.
:param bool record_full_arg: If True, record all intermediate nodes
arising from common ancestor and recombination events in the output
tree sequence. This will result in unary nodes (i.e., nodes in marginal
trees that have only one child). Defaults to False.
See :ref:`sec_ancestry_full_arg` for examples.
:param bool record_migrations: If True, record all migration events
that occur in the :ref:`tskit:sec_migration_table_definition` of
the output tree sequence. Defaults to False.
See :ref:`sec_ancestry_record_migrations` for examples.
:param tskit.TreeSequence initial_state: If specified, initialise the
simulation from the root segments of this tree sequence and return the
completed tree sequence. Please see
:ref:`sec_ancestry_initial_state` for details of the required
properties of this tree sequence and its interactions with other parameters.
(Default: None).
:param float start_time: If specified, set the initial time that the
simulation starts to this value. If not specified, the start
time is zero if performing a simulation of a set of samples,
or is the time of the oldest node if simulating from an
existing tree sequence (see the ``initial_state`` parameter).
See :ref:`sec_ancestry_start_time` for examples.
:param float end_time: If specified, terminate the simulation at the
specified time. In the returned tree sequence, all rootward paths from
samples with time < ``end_time`` will end in a node with one child with
time equal to end_time. Any sample nodes with time >= ``end_time`` will
also be present in the output tree sequence. If not specified or ``None``,
run the simulation until all samples have an MRCA at all positions in
the genome. See :ref:`sec_ancestry_end_time` for examples.
:param model: The ancestry model to use.
This can either be a string (e.g., ``"smc_prime"``) or an instance of
an ancestry model class (e.g, ``msprime.DiscreteTimeWrightFisher()``.
Please see the :ref:`sec_ancestry_models` section for more details
on specifying ancestry models.
:type model: str or .AncestryModel
:return: The :class:`tskit.TreeSequence` object representing the results
of the simulation if no replication is performed, or an
iterator over the independent replicates simulated if the
`num_replicates` parameter has been used.
:rtype: :class:`tskit.TreeSequence` or an iterator over
:class:`tskit.TreeSequence` replicates.
"""
record_provenance = True if record_provenance is None else record_provenance
replicate_index = _parse_replicate_index(
random_seed=random_seed,
num_replicates=num_replicates,
replicate_index=replicate_index,
)
random_seed = _parse_random_seed(random_seed)
provenance_dict = None
if record_provenance:
frame = inspect.currentframe()
provenance_dict = _build_provenance("sim_ancestry", random_seed, frame)
sim = _parse_sim_ancestry(
samples=samples,
sequence_length=sequence_length,
recombination_rate=recombination_rate,
gene_conversion_rate=gene_conversion_rate,
gene_conversion_tract_length=gene_conversion_tract_length,
discrete_genome=discrete_genome,
population_size=population_size,
demography=demography,
ploidy=ploidy,
model=model,
initial_state=initial_state,
start_time=start_time,
end_time=end_time,
record_migrations=record_migrations,
record_full_arg=record_full_arg,
num_labels=num_labels,
random_seed=random_seed,
)
return _wrap_replicates(
sim,
num_replicates=num_replicates,
replicate_index=replicate_index,
provenance_dict=provenance_dict,
)
class Simulator(_msprime.Simulator):
"""
Class to simulate trees under a variety of population models.
Note: this class is not intended to be instantiated directly
and is only for internal library use. The interface may change
arbitrarily between versions.
"""
def __init__(
self,
*,
tables,
recombination_map,
gene_conversion_map,
gene_conversion_tract_length,
discrete_genome,
ploidy,
demography,
model_change_events,
random_generator,
model=None,
store_migrations=False,
store_full_arg=False,
start_time=None,
end_time=None,
num_labels=None,
):
# We always need at least n segments, so no point in making
# allocation any smaller than this.
num_samples = len(tables.nodes)
block_size = 64 * 1024
segment_block_size = max(block_size, num_samples)
avl_node_block_size = block_size
node_mapping_block_size = block_size
if num_labels is None:
num_labels = self._choose_num_labels(model, model_change_events)
# Now, convert the high-level values into their low-level
# counterparts.
ll_simulation_model = model.get_ll_representation()
ll_population_configuration = [pop.asdict() for pop in demography.populations]
ll_demographic_events = [
event.get_ll_representation() for event in demography.events
]
ll_recomb_map = recombination_map.asdict()
ll_tables = _msprime.LightweightTableCollection(tables.sequence_length)
ll_tables.fromdict(tables.asdict())
# FIXME support arbitrary gene conversion maps.
# https://github.com/tskit-dev/msprime/issues/1212
assert len(gene_conversion_map.rate) == 1
gene_conversion_rate = gene_conversion_map.rate[0]
start_time = -1 if start_time is None else start_time
super().__init__(
tables=ll_tables,
recombination_map=ll_recomb_map,
start_time=start_time,
random_generator=random_generator,
model=ll_simulation_model,
migration_matrix=demography.migration_matrix,
population_configuration=ll_population_configuration,
demographic_events=ll_demographic_events,
store_migrations=store_migrations,
store_full_arg=store_full_arg,
num_labels=num_labels,
segment_block_size=segment_block_size,
avl_node_block_size=avl_node_block_size,
node_mapping_block_size=node_mapping_block_size,
gene_conversion_rate=gene_conversion_rate,
gene_conversion_tract_length=gene_conversion_tract_length,
discrete_genome=discrete_genome,
ploidy=ploidy,
)
# highlevel attributes used externally that have no lowlevel equivalent
self.end_time = end_time
self.model_change_events = model_change_events
self.demography = demography
# Temporary, until we add the low-level infrastructure for the gc map
# when we'll take the same approach as the recombination map.
self.gene_conversion_map = gene_conversion_map
def copy_tables(self):
"""
Returns a copy of the underlying table collection. This is useful
for testing and avoids using the LightweightTableCollection object,
which is returned by self.tables.
"""
return tskit.TableCollection.fromdict(self.tables.asdict())
@property
def sample_configuration(self):
"""
Returns a list of the number of samples in each of the populations.
"""
tables = self.copy_tables()
num_samples = [0 for _ in tables.populations]
for node in tables.nodes:
if (node.flags & tskit.NODE_IS_SAMPLE) != 0:
num_samples[node.population] += 1
return num_samples
@property
def recombination_map(self):
return intervals.RateMap(**super().recombination_map)
def _choose_num_labels(self, model, model_change_events):
"""
Choose the number of labels appropriately, given the simulation
models that will be simulated.
"""
num_labels = 1
models = [model] + [event.model for event in model_change_events]
for model in models:
if isinstance(model, SweepGenicSelection):
num_labels = 2
return num_labels
def _run_until(self, end_time, event_chunk=None, debug_func=None):
# This is a pretty big default event chunk so that we don't spend
# too much time going back and forth into Python. We could imagine
# doing something a bit more sophisticated where we try to tune the
# number of events so that we end up with roughly 10 second slices
# (say).
if event_chunk is None:
event_chunk = 10 ** 4
if event_chunk <= 0:
raise ValueError("Must have at least 1 event per chunk")
logger.info("Running model %s until max time: %f", self.model, end_time)
while super().run(end_time, event_chunk) == _msprime.EXIT_MAX_EVENTS:
logger.debug("time=%g ancestors=%d", self.time, self.num_ancestors)
if debug_func is not None:
debug_func(self)
def run(self, event_chunk=None, debug_func=None):
"""
Runs the simulation until complete coalescence has occurred.
"""
for event in self.model_change_events:
# If the event time is a callable, we compute the end_time
# as a function of the current simulation time.
current_time = self.time
model_start_time = event.time
if callable(event.time):
model_start_time = event.time(current_time)
# If model_start_time is None, we run until the current
# model completes. Note that when event.time is a callable
# it can also return None for this behaviour.
if model_start_time is None:
model_start_time = np.inf
if model_start_time < current_time:
raise ValueError(
"Model start times out of order or not computed correctly. "
f"current time = {current_time}; start_time = {model_start_time}"
)
self._run_until(model_start_time, event_chunk, debug_func)
logger.info(
"model %s ended at time=%g nodes=%d edges=%d",
self.model,
self.time,
self.num_nodes,
self.num_edges,
)
if self.time > model_start_time:
raise NotImplementedError(
"The previously running model does not support ending early "
"and the requested model change cannot be performed. Please "
"open an issue on GitHub if this functionality is something "
"you require"
)
ll_new_model = event.model.get_ll_representation()
self.model = ll_new_model
end_time = np.inf if self.end_time is None else self.end_time
self._run_until(end_time, event_chunk, debug_func)
self.finalise_tables()
logger.info(
"Completed at time=%g nodes=%d edges=%d",
self.time,
self.num_nodes,
self.num_edges,
)
def run_replicates(
self,
num_replicates,
*,
mutation_rate=None,
provenance_dict=None,
):
"""
Sequentially yield the specified number of simulation replicates.
"""
encoded_provenance = None
# The JSON is modified for each replicate to insert the replicate number.
# To avoid repeatedly encoding the same JSON (which can take milliseconds)
# we insert a replaceable string.
placeholder = "@@_REPLICATE_INDEX_@@"
if provenance_dict is not None:
provenance_dict["parameters"]["replicate_index"] = placeholder
encoded_provenance = provenance.json_encode_provenance(
provenance_dict, num_replicates
)
for replicate_index in range(num_replicates):
self.run()
if mutation_rate is not None:
# This is only called from simulate() or the ms interface,
# so does not need any further parameters.
mutations._simple_mutate(
tables=self.tables,
random_generator=self.random_generator,
sequence_length=self.sequence_length,
rate=mutation_rate,
)
tables = tskit.TableCollection.fromdict(self.tables.asdict())
replicate_provenance = None
if encoded_provenance is not None:
replicate_provenance = encoded_provenance.replace(
f'"{placeholder}"', str(replicate_index)
)
tables.provenances.add_row(replicate_provenance)
yield tables.tree_sequence()
self.reset()
@dataclasses.dataclass
class SampleSet:
"""
TODO document
"""
num_samples: int
population: Union[int, str, None] = None
time: Union[float, None] = None
ploidy: Union[int, None] = None
def asdict(self):
return dataclasses.asdict(self)
# TODO update the documentation here to state that using this class is
# deprecated, and users should use the model=[...] notation instead.
@dataclasses.dataclass
class AncestryModelChange:
"""
An event representing a change of underlying :ref:`ancestry model
<sec_ancestry_models>`.
:param float time: The time at which the ancestry model changes
to the new model, in generations. After this time, all internal
tree nodes, edges and migrations are the result of the new model.
If time is set to None (the default), the model change will occur
immediately after the previous model has completed. If time is a
callable, the time at which the model changes is the result
of calling this function with the time that the previous model
started with as a parameter.
:param model: The new ancestry model to use.
This can either be a string (e.g., ``"smc_prime"``) or an instance of
an ancestry model class (e.g, ``msprime.DiscreteTimeWrightFisher()``.
Please see the :ref:`sec_ancestry_models` section for more details
on specifying these models. If this is None (the default) the model is
changed to the standard coalescent.
:type model: str or .AncestryModel
"""
time: Union[float, None] = None
model: Union[str, AncestryModel, None] = None
def asdict(self):
return dataclasses.asdict(self)
class SimulationModelChange(AncestryModelChange):
"""
Deprecated 0.x way to describe an :class:`AncestryModelChange`.
"""
@dataclasses.dataclass
class AncestryModel:
"""
Abstract superclass of all ancestry models.
"""
name: ClassVar[str]
def get_ll_representation(self):
return {"name": self.name}
def asdict(self):
return dataclasses.asdict(self)
class StandardCoalescent(AncestryModel):
"""
The classical coalescent with recombination model (i.e., Hudson's algorithm).
The string ``"hudson"`` can be used to refer to this model.
This is the default simulation model.
"""
name = "hudson"
class SmcApproxCoalescent(AncestryModel):
"""
The original SMC model defined by McVean and Cardin. This
model is implemented using a naive rejection sampling approach
and so it may not be any more efficient to simulate than the
standard Hudson model.
The string ``"smc"`` can be used to refer to this model.
"""
name = "smc"
class SmcPrimeApproxCoalescent(AncestryModel):
"""
The SMC' model defined by Marjoram and Wall as an improvement on the
original SMC. model is implemented using a naive rejection sampling
approach and so it may not be any more efficient to simulate than the
standard Hudson model.
The string ``"smc_prime"`` can be used to refer to this model.
"""
name = "smc_prime"
class DiscreteTimeWrightFisher(AncestryModel):
"""
A discrete backwards-time Wright-Fisher model, with diploid back-and-forth
recombination. The string ``"dtwf"`` can be used to refer to this model.
Wright-Fisher simulations are performed very similarly to coalescent
simulations, with all parameters denoting the same quantities in both
models. Because events occur at discrete times however, the order in which
they occur matters. Each generation consists of the following ordered
events:
- Migration events. As in the Hudson coalescent, these move single extant
lineages between populations. Because migration events occur before
lineages choose parents, migrant lineages choose parents from their new
population in the same generation.
- Demographic events. All events with `previous_generation < event_time <=
current_generation` are carried out here.
- Lineages draw parents. Each (monoploid) extant lineage draws a parent
from their current population.
- Diploid recombination. Each parent is diploid, so all child lineages
recombine back-and-forth into the same two parental genome copies. These
become two independent lineages in the next generation.
- Historical sampling events. All historical samples with
`previous_generation < sample_time <= current_generation` are inserted.
"""
name = "dtwf"
class WrightFisherPedigree(AncestryModel):
# TODO Complete documentation.
# TODO Since the pedigree is a necessary parameter for this simulation
# model and it cannot be used with any other model we should make it a
# parametric model where the parameter is the pedigree. This would
# streamline a bunch of logic.
"""
Backwards-time simulations through a pre-specified pedigree, with diploid
individuals and back-and-forth recombination. The string ``"wf_ped"`` can
be used to refer to this model.
"""
name = "wf_ped"
class ParametricAncestryModel(AncestryModel):
"""
The superclass of ancestry models that require extra parameters.
"""
def get_ll_representation(self):
d = super().get_ll_representation()
d.update(self.__dict__)
return d
@dataclasses.dataclass
class BetaCoalescent(ParametricAncestryModel):
"""
A Lambda-coalescent with multiple mergers in the haploid cases, or a
Xi-coalescent with simultaneous multiple mergers in the polyploid case.
There are two main differences between the Beta-coalescent and the
standard coalescent. Firstly, the number of lineages that take part in each
common ancestor event is random, with distribution determined by moments of
the :math:`Beta(2 - \\alpha, \\alpha)`-distribution. In particular, when there
are :math:`n` lineages, each set of :math:`k \\leq n` of them participates in a
common ancestor event at rate
.. math::
\\frac{1}{B(2 - \\alpha, \\alpha)}
\\int_0^1 x^{k - \\alpha - 1} (1 - x)^{n - k + \\alpha - 1} dx,
where :math:`B(2 - \\alpha, \\alpha)` is the Beta-function.
If ploidy = 1, then all participating lineages merge into one common ancestor,
corresponding to haploid, single-parent reproduction.
If ploidy = :math:`p > 1`, all participating lineages split randomly into
:math:`2 p` groups, corresponding to two-parent reproduction with :math:`p` copies
of each chromosome per parent. All lineages within each group merge simultaneously.
Secondly, the number of generations between common ancestor events predicted by the
Beta-coalescent is proportional to :math:`N^{\\alpha - 1}`, where :math:`N` is
the population size. Specifically, the mean number of generations until
two lineages undergo a common ancestor event is
.. math::
G = \\frac{m^{\\alpha} N^{\\alpha - 1}}{\\alpha B(2 - \\alpha, \\alpha)},
if ploidy = 1, and
.. math::
G = \\frac{2 p m^{\\alpha} (N / 2)^{\\alpha - 1}}
{\\alpha B(2 - \\alpha, \\alpha)},
if ploidy = :math:`p > 1`, where :math:`m` is the mean number of juveniles per
family given by
.. math::
m = 2 + \\frac{2^{\\alpha}}{3^{\\alpha - 1} (\\alpha - 1)},
if ploidy > 1, and
.. math::
m = 1 + \\frac{1}{2^{\\alpha - 1} (\\alpha - 1)},
if ploidy = 1.
In the polyploid case we divide the population size :math:`N` by two
because we assume the :math:`N` polyploid individuals form :math:`N / 2`
two-parent families in which reproduction takes place.
.. warning::
The number of generations between common ancestor events :math:`G` depends
both on the population size :math:`N` and :math:`\\alpha`,
and can be dramatically shorter than in the case of the
standard coalescent. For :math:`\\alpha \\approx 1` that is due to
insensitivity of :math:`G` to :math:`N` --- see
:ref:`sec_ancestry_models_multiple_mergers` for an illustration.
For :math:`\\alpha \\approx 2`, :math:`G` is almost linear in
:math:`N`, but can nevertheless be small because
:math:`B(2 - \\alpha, \\alpha) \\rightarrow \\infty` as
:math:`\\alpha \\rightarrow 2`. As a result, population sizes
must often be many orders of magnitude larger than census population sizes
to obtain realistic amounts of diversity in simulated samples.
See `Schweinsberg (2003)
<https://www.sciencedirect.com/science/article/pii/S0304414903000280>`_
for the derivation of the common ancestor event rate,
as well as the number of generations between common ancestor events.
Note however that Schweinsberg (2003) only covers the haploid case.
For details of the diploid extension, see
`Blath et al. (2013) <https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3527250/>`_,
and `Birkner et al. (2018) <https://projecteuclid.org/euclid.ejp/1527818427>`_
for a diploid version of the Schweinsberg (2003) model specifically.
The general polyploid model is analogous to the diploid case, with
:math:`2 p` available copies of parental chromsomes per common ancestor event,
and hence up to :math:`2 p` simultaneous mergers.
:param float alpha: Determines the degree of skewness in the family size
distribution, and must satisfy :math:`1 < \\alpha < 2`. Smaller values of
:math:`\\alpha` correspond to greater skewness, and :math:`\\alpha = 2`
would coincide with the standard coalescent.
:param float truncation_point: The maximum number of juveniles :math:`K` born to
one family as a fraction of the population size :math:`N`. Must satisfy
:math:`0 < K \\leq \\inf`. Determines the maximum fraction of the population
replaced by offspring in one reproduction event, :math:`\\tau`, via
:math:`\\tau = K / (K + m)`, where :math:`m` is the mean juvenile number
above. The default is :math:`K = \\inf`, which corresponds to the standard
Beta-coalescent with :math:`\\tau = 1`. When :math:`K < \\inf`, the number of
lineages participating in a common ancestor event is determined by moments
of the Beta:math:`(2 - \\alpha, \\alpha)` distribution conditioned on not
exceeding :math:`\\tau`, and the Beta-function in the expression
for :math:`G` is replaced by the incomplete Beta-function
:math:`B(\\tau; 2 - \\alpha, \\alpha)`.
"""
name = "beta"
alpha: Union[float, None] = None
truncation_point: float = sys.float_info.max
@dataclasses.dataclass
class DiracCoalescent(ParametricAncestryModel):
"""
A Lambda-coalescent with multiple mergers in the haploid cases, or a
Xi-coalescent with simultaneous multiple mergers in the polyploid case.
The Dirac-coalescent is an implementation of the model of
`Blath et al. (2013) <https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3527250/>`_
The simulation proceeds similarly to the standard coalescent.
In addition to binary common ancestor events at rate :math:`n (n - 1) / 2` when
there are :math:`n` lineages, potential multiple merger events take place
at rate :math:`c > 0`. Each lineage participates in each multiple merger
event independently with probability :math:`0 < \\psi \\leq 1`.
If ploidy = 1, then all participating lineages merge into one common ancestor,
corresponding to haploid, single-parent reproduction.
If ploidy = :math:`p > 1`, all participating lineages split randomly into
:math:`2 p` groups, corresponding to two-parent reproduction with :math:`p` copies
of each chromosome per parent. All lineages within each group merge simultaneously.
.. warning::
The Dirac-coalescent is obtained as a scaling limit of Moran models,
rather than Wright-Fisher models. As a consequence, the number of generations
between coalescence events is proportional to :math:`N^2`,
rather than :math:`N` generations as in the standard coalescent.
See :ref:`sec_ancestry_models_multiple_mergers` for an illustration
of how this affects simulation output in practice.
:param float c: Determines the rate of potential multiple merger events.
We require :math:`c > 0`.
:param float psi: Determines the fraction of the population replaced by
offspring in one large reproduction event, i.e. one reproduction event
giving rise to potential multiple mergers when viewed backwards in time.
We require :math:`0 < \\psi \\leq 1`.
"""
name = "dirac"
psi: Union[float, None] = None
c: Union[float, None] = None
@dataclasses.dataclass
class SweepGenicSelection(ParametricAncestryModel):
"""
A selective sweep that has occured in the history of the sample.
This will lead to a burst of rapid coalescence near the selected site.
The strength of selection during the sweep is determined by the
parameter :math:`s`. Here we define s such that the
fitness of the three genotypes at our benefical locus are
:math:`W_{bb}=1`, :math:`W_{Bb}=1 + s/2`, :math:`W_{BB}=1 + s`.
Thus fitness of the heterozygote is intermediate to the
two homozygotes.
The model is one of a
a structured coalescent where selective backgrounds are defined as in
`Braverman et al. (1995) <https://www.ncbi.nlm.nih.gov/pmc/articles/PMC1206652/>`_
The implementation details here follow closely to those in discoal,
`Kern and Schrider (2016)
<https://www.ncbi.nlm.nih.gov/pmc/articles/PMC5167068/>`_
See :ref:`sec_ancestry_models_selective_sweeps` for a basic usage and example and
:ref:`sec_ancestry_models_sweep_types` for details on how to specify different
types of sweeps.
.. warning::
If the effective strength of selection (:math:`2Ns`) is sufficiently large
the time difference between successive events can be smaller than
the finite precision available, leading to zero length branches
in the output trees. As this is not allowed by tskit, an error
will be raised.
.. warning::
Currently models with more than one population and a selective sweep
are not implemented. Further population size change during the sweep
is not yet possible in msprime.
:param float position: the location of the beneficial allele along the
chromosome.
:param float start_frequency: population frequency of the benefical
allele at the start of the selective sweep. E.g., for a *de novo*
allele in a diploid population of size N, start frequency would be
:math:`1/2N`.
:param float end_frequency: population frequency of the beneficial
allele at the end of the selective sweep.
:param float s: :math:`s` is the selection coefficient of the beneficial mutation.
:param float dt: dt is the small increment of time for stepping through
the sweep phase of the model. a good rule of thumb is for this to be
approximately :math:`1/40N` or smaller.
"""
name = "sweep_genic_selection"
position: Union[float, None] = None
start_frequency: Union[float, None] = None
end_frequency: Union[float, None] = None
s: Union[float, None] = None
dt: Union[float, None] = None
|
jeromekelleher/msprime
|
msprime/ancestry.py
|
Python
|
gpl-3.0
| 74,467
|
[
"DIRAC"
] |
d1ab078ed1d32b8e43cffbc6b6c4dcf822859a6255b454fc2ab28fbe2f0f926f
|
import pandas as pd
import nmrpystar
import mdtraj as md
t0 = md.load("./Trajectories/1am7_1.dcd", top="./1am7_fixed.pdb")[0:50]
t1 = md.load("./Trajectories/1am7_1.dcd", top="./1am7_fixed.pdb")[-50:]
prediction0 = md.nmr.chemical_shifts_shiftx2(t0).mean(1) # Average over time dimensions
prediction1 = md.nmr.chemical_shifts_shiftx2(t1).mean(1) # Average over time dimensions
parsed = nmrpystar.parse(open("./16664.str").read())
print(parsed.status)
q = parsed.value.saves["assigned_chem_shift_list_1"].loops[1]
x = pd.DataFrame(q.rows, columns=q.keys)
x = x[["Atom_chem_shift.Seq_ID", "Atom_chem_shift.Atom_ID", "Atom_chem_shift.Val"]]
x.rename(columns={"Atom_chem_shift.Seq_ID":"resSeq", "Atom_chem_shift.Atom_ID":"name", "Atom_chem_shift.Val":"value"}, inplace=True)
# Need to make dtypes match to do eventual comparison.
x["resSeq"] = x["resSeq"].astype('int')
x["value"] = x["value"].astype('float')
expt = x.set_index(["resSeq", "name"]).value
prediction0.name = "value"
prediction1.name = "value"
delta0 = (expt - prediction0).dropna()
rms0 = (delta0 ** 2.).reset_index().groupby("name").value.mean() ** 0.5
delta1 = (expt - prediction1).dropna()
rms1 = (delta1 ** 2.).reset_index().groupby("name").value.mean() ** 0.5
|
hainm/open-forcefield-group
|
nmr/code/compare_shifts_T4_test.py
|
Python
|
gpl-2.0
| 1,238
|
[
"MDTraj"
] |
cd8339b37bbcf30079aa864c19dca216197d8f14456688da4d00c6d6726c6f29
|
# Version: 0.15+dev
"""The Versioneer - like a rocketeer, but for versions.
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy
* [![Latest Version]
(https://pypip.in/version/versioneer/badge.svg?style=flat)
](https://pypi.python.org/pypi/versioneer/)
* [![Build Status]
(https://travis-ci.org/warner/python-versioneer.png?branch=master)
](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* add a `[versioneer]` section to your setup.cfg (see below)
* run `versioneer install` in your source tree, commit the results
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
"tarball from tag" feature
* a release tarball, produced by "setup.py sdist", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
about recent "tags" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example `git describe --tags --dirty --always` reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the `git archive` command. As a result, generated tarballs will
contain enough information to get the proper version.
To allow `setup.py` to compute a version too, a `versioneer.py` is added to
the top level of your source tree, next to `setup.py` and the `setup.cfg`
that configures it. This overrides several distutils/setuptools commands to
compute the version when invoked, and changes `setup.py build` and `setup.py
sdist` to replace `_version.py` with a small static file that contains just
the generated version data.
## Installation
First, decide on values for the following configuration variables:
* `VCS`: the version control system you use. Currently accepts "git".
* `style`: the style of version string to be produced. See "Styles" below for
details. Defaults to "pep440", which looks like
`TAG[+DISTANCE.gSHORTHASH[.dirty]]`.
* `versionfile_source`:
A project-relative pathname into which the generated version strings should
be written. This is usually a `_version.py` next to your project's main
`__init__.py` file, so it can be imported at runtime. If your project uses
`src/myproject/__init__.py`, this should be `src/myproject/_version.py`.
This file should be checked in to your VCS as usual: the copy created below
by `setup.py setup_versioneer` will include code that parses expanded VCS
keywords in generated tarballs. The 'build' and 'sdist' commands will
replace it with a copy that has just the calculated version string.
This must be set even if your project does not have any modules (and will
therefore never import `_version.py`), since "setup.py sdist" -based trees
still need somewhere to record the pre-calculated version strings. Anywhere
in the source tree should do. If there is a `__init__.py` next to your
`_version.py`, the `setup.py setup_versioneer` command (described below)
will append some `__version__`-setting assignments, if they aren't already
present.
* `versionfile_build`:
Like `versionfile_source`, but relative to the build directory instead of
the source directory. These will differ when your setup.py uses
'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`,
then you will probably have `versionfile_build='myproject/_version.py'` and
`versionfile_source='src/myproject/_version.py'`.
If this is set to None, then `setup.py build` will not attempt to rewrite
any `_version.py` in the built tree. If your project does not have any
libraries (e.g. if it only builds a script), then you should use
`versionfile_build = None` and override `distutils.command.build_scripts`
to explicitly insert a copy of `versioneer.get_version()` into your
generated script.
* `tag_prefix`:
a string, like 'PROJECTNAME-', which appears at the start of all VCS tags.
If your tags look like 'myproject-1.2.0', then you should use
tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this
should be an empty string, using either `tag_prefix=` or `tag_prefix=''`.
* `parentdir_prefix`:
a optional string, frequently the same as tag_prefix, which appears at the
start of all unpacked tarball filenames. If your tarball unpacks into
'myproject-1.2.0', this should be 'myproject-'. To disable this feature,
just omit the field from your `setup.cfg`.
This tool provides one script, named `versioneer`. That script has one mode,
"install", which writes a copy of `versioneer.py` into the current directory
and runs `versioneer.py setup` to finish the installation.
To versioneer-enable your project:
* 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and
populating it with the configuration values you decided earlier (note that
the option names are not case-sensitive):
````
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
````
* 2: Run `versioneer install`. This will do the following:
* copy `versioneer.py` into the top of your source tree
* create `_version.py` in the right place (`versionfile_source`)
* modify your `__init__.py` (if one exists next to `_version.py`) to define
`__version__` (by calling a function from `_version.py`)
* modify your `MANIFEST.in` to include both `versioneer.py` and the
generated `_version.py` in sdist tarballs
`versioneer install` will complain about any problems it finds with your
`setup.py` or `setup.cfg`. Run it multiple times until you have fixed all
the problems.
* 3: add a `import versioneer` to your setup.py, and add the following
arguments to the setup() call:
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
* 4: commit these changes to your VCS. To make sure you won't forget,
`versioneer install` will mark everything it touched for addition using
`git add`. Don't forget to add `setup.py` and `setup.cfg` too.
## Post-Installation Usage
Once established, all uses of your tree from a VCS checkout should get the
current version string. All generated tarballs should include an embedded
version string (so users who unpack them will not need a VCS tool installed).
If you distribute your project through PyPI, then the release process should
boil down to two steps:
* 1: git tag 1.0
* 2: python setup.py register sdist upload
If you distribute it through github (i.e. users use github to generate
tarballs with `git archive`), the process is:
* 1: git tag 1.0
* 2: git push; git push --tags
Versioneer will report "0+untagged.NUMCOMMITS.gHASH" until your tree has at
least one tag in its history.
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different flavors of version
information:
* `['version']`: A condensed version string, rendered using the selected
style. This is the most commonly used value for the project's version
string. The default "pep440" style yields strings like `0.11`,
`0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
below for alternative styles.
* `['full-revisionid']`: detailed revision identifier. For Git, this is the
full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
this is only accurate if run in a VCS checkout, otherwise it is likely to
be False or None
* `['error']`: if the version string could not be computed, this will be set
to a string describing the problem, otherwise it will be None. It may be
useful to throw an exception in setup.py if this is set, to avoid e.g.
creating tarballs with a version string of "unknown".
Some variants are more useful than others. Including `full-revisionid` in a
bug report should allow developers to reconstruct the exact code being tested
(or indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an "about" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
The installer adds the following text to your `__init__.py` to place a basic
version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Styles
The setup.cfg `style=` configuration controls how the VCS information is
rendered into a version string.
The default style, "pep440", produces a PEP440-compliant string, equal to the
un-prefixed tag name for actual releases, and containing an additional "local
version" section with more detail for in-between builds. For Git, this is
TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
that this commit is two revisions ("+2") beyond the "0.11" tag. For released
software (exactly equal to a known tag), the identifier will only contain the
stripped tag, e.g. "0.11".
Other styles are available. See details.md in the Versioneer source tree for
descriptions.
## Debugging
Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
to return a version of "0+unknown". To investigate the problem, run `setup.py
version`, which will run the version-lookup code in a verbose mode, and will
display the full contents of `get_versions()` (including the `error` string,
which may help identify what went wrong).
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* edit `setup.cfg`, if necessary, to include any new configuration settings
indicated by the release notes
* re-run `versioneer install` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
### Upgrading to 0.15
Starting with this version, Versioneer is configured with a `[versioneer]`
section in your `setup.cfg` file. Earlier versions required the `setup.py` to
set attributes on the `versioneer` module immediately after import. The new
version will refuse to run (raising an exception during import) until you
have provided the necessary `setup.cfg` section.
In addition, the Versioneer package provides an executable named
`versioneer`, and the installation process is driven by running `versioneer
install`. In 0.14 and earlier, the executable was named
`versioneer-installer` and was run without an argument.
### Upgrading to 0.14
0.14 changes the format of the version string. 0.13 and earlier used
hyphen-separated strings like "0.11-2-g1076c97-dirty". 0.14 and beyond use a
plus-separated "local version" section strings, with dot-separated
components, like "0.11+2.g1076c97". PEP440-strict tools did not like the old
format, but should be ok with the new one.
### Upgrading from 0.11 to 0.12
Nothing special.
### Upgrading from 0.10 to 0.11
You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running
`setup.py setup_versioneer`. This will enable the use of additional
version-control systems (SVN, etc) in the future.
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is dedicated to the public
domain. The `_version.py` that it creates is also in the public domain.
Specifically, both are released under the Creative Commons "Public Domain
Dedication" license (CC0-1.0), as described in
https://creativecommons.org/publicdomain/zero/1.0/ .
"""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_root():
"""Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py .
"""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
err = ("Versioneer was unable to run the project root directory. "
"Versioneer requires setup.py to be executed from "
"its immediate directory (like 'python setup.py COMMAND'), "
"or in a way that lets it use sys.argv[0] to find the root "
"(like 'python path/to/setup.py COMMAND').")
raise VersioneerBadRootError(err)
try:
# Certain runtime workflows (setup.py install/develop in a setuptools
# tree) execute all dependencies in a single python process, so
# "versioneer" may be imported multiple times, and python's shared
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
me = os.path.realpath(os.path.abspath(__file__))
if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]:
print("Warning: build in %s is using versioneer.py from %s"
% (os.path.dirname(me), versioneer_py))
except NameError:
pass
return root
def get_config_from_root(root):
"""Read the project setup.cfg file to determine Versioneer config."""
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
parser = configparser.SafeConfigParser()
with open(setup_cfg, "r") as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
def get(parser, name):
if parser.has_option("versioneer", name):
return parser.get("versioneer", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, "style") or ""
cfg.versionfile_source = get(parser, "versionfile_source")
cfg.versionfile_build = get(parser, "versionfile_build")
cfg.tag_prefix = get(parser, "tag_prefix")
if cfg.tag_prefix in ("''", '""'):
cfg.tag_prefix = ""
cfg.parentdir_prefix = get(parser, "parentdir_prefix")
cfg.verbose = get(parser, "verbose")
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
return None
return stdout
LONG_VERSION_PY['git'] = r'''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.15+dev (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "%(STYLE)s"
cfg.tag_prefix = "%(TAG_PREFIX)s"
cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes
both the project name and a version string.
"""
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%%s', but '%%s' doesn't start with "
"prefix '%%s'" %% (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = [r.strip() for r in refnames.strip("()").split(",")]
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(set(refs) - tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None, "branch": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags",
"branch": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %%s" %% root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7
# and below, it is necessary to run "git update-index --refresh" first.
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%%s*" %% tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# abbrev-ref available with git >= 1.7
branch_name = run_command(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
cwd=root).strip()
if branch_name == 'HEAD':
branches = run_command(GITS, ["branch", "--contains"],
cwd=root).split('\n')
branches = [branch[2:] for branch in branches if branch[4:5] != '(']
if 'master' in branches:
branch_name = 'master'
elif not branches:
branch_name = None
else:
# Pick the first branch that is returned. Good or bad.
branch_name = branches[0]
branch_name = branch_name.replace(' ', '.').replace('(', '').replace(')', '')
pieces['branch'] = branch_name
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%%s' doesn't start with prefix '%%s'"
print(fmt %% (full_tag, tag_prefix))
pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
%% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
# Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc.
default_maint_branch_regexp = ".*([0-9]+\.)+x$"
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%%d" %% pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%%d" %% pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%%s" %% pieces["short"]
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%%s" %% pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def add_one_to_version(version_string, number_index_to_increment=-1):
"""
Add one to a version string at the given numeric indices.
>>> add_one_to_version('v1.2.3')
'v1.2.4'
"""
# Break up the tag by number groups (preserving multi-digit
# numbers as multidigit)
parts = re.split("([0-9]+)", version_string)
digit_parts = [(i, part) for i, part in enumerate(parts)
if part.isdigit()]
# Deal with negative indexing.
increment_at_index = ((number_index_to_increment + len(digit_parts))
%% len(digit_parts))
for n_seen, (i, part) in enumerate(digit_parts):
if n_seen == increment_at_index:
parts[i] = str(int(part) + 1)
elif n_seen > increment_at_index:
parts[i] = '0'
return ''.join(parts)
def render_pep440_branch_based(pieces):
# [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is
# included for dirty.
# exceptions:
# 1: no tags. 0.0.0.devDISTANCE[+gHEX]
master = pieces.get('branch') == 'master'
maint = re.match(default_maint_branch_regexp,
pieces.get('branch') or '')
# If we are on a tag, just pep440-pre it.
if pieces["closest-tag"] and not (pieces["distance"] or
pieces["dirty"]):
rendered = pieces["closest-tag"]
else:
# Put a default closest-tag in.
if not pieces["closest-tag"]:
pieces["closest-tag"] = '0.0.0'
if pieces["distance"] or pieces["dirty"]:
if maint:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post%%d" %% pieces["distance"]
else:
rendered = add_one_to_version(pieces["closest-tag"])
if pieces["distance"]:
rendered += ".dev%%d" %% pieces["distance"]
# Put the branch name in if it isn't master nor a
# maintenance branch.
plus = '+'
if not (master or maint):
rendered += "%%s%%s" %% (plus,
pieces.get('branch') or
'unknown_branch')
plus = '_'
if pieces["dirty"]:
rendered += "%%sg%%s" %% (plus, pieces["short"])
else:
rendered = pieces["closest-tag"]
return rendered
STYLES = {'default': render_pep440,
'pep440': render_pep440,
'pep440-pre': render_pep440_pre,
'pep440-post': render_pep440_post,
'pep440-old': render_pep440_old,
'git-describe': render_git_describe,
'git-describe-long': render_git_describe_long,
'pep440-old': render_pep440_old,
'pep440-branch-based': render_pep440_branch_based,
}
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style:
style = 'default'
renderer = STYLES.get(style)
if not renderer:
raise ValueError("unknown style '%%s'" %% style)
rendered = renderer(pieces)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree"}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version"}
'''
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = [r.strip() for r in refnames.strip("()").split(",")]
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(set(refs) - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None, "branch": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags",
"branch": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7
# and below, it is necessary to run "git update-index --refresh" first.
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# abbrev-ref available with git >= 1.7
branch_name = run_command(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
cwd=root).strip()
if branch_name == 'HEAD':
branches = run_command(GITS, ["branch", "--contains"],
cwd=root).split('\n')
branches = [branch[2:] for branch in branches if branch[4:5] != '(']
if 'master' in branches:
branch_name = 'master'
elif not branches:
branch_name = None
else:
# Pick the first branch that is returned. Good or bad.
branch_name = branches[0]
branch_name = branch_name.replace(' ', '.').replace('(', '').replace(')', '')
pieces['branch'] = branch_name
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int('3') # total number of commits
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
"""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
for export-time keyword substitution.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes
both the project name and a version string.
"""
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.15+dev) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
import sys
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
"""
def versions_from_file(filename):
"""Try to determine the version from _version.py if present."""
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
raise NotThisMethod("no version_json in _version.py")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
"""Write the given version number to the given _version.py file."""
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=(",", ": "))
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
# Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc.
default_maint_branch_regexp = ".*([0-9]+\.)+x$"
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def add_one_to_version(version_string, number_index_to_increment=-1):
"""
Add one to a version string at the given numeric indices.
>>> add_one_to_version('v1.2.3')
'v1.2.4'
"""
# Break up the tag by number groups (preserving multi-digit
# numbers as multidigit)
parts = re.split("([0-9]+)", version_string)
digit_parts = [(i, part) for i, part in enumerate(parts)
if part.isdigit()]
# Deal with negative indexing.
increment_at_index = ((number_index_to_increment + len(digit_parts))
% len(digit_parts))
for n_seen, (i, part) in enumerate(digit_parts):
if n_seen == increment_at_index:
parts[i] = str(int(part) + 1)
elif n_seen > increment_at_index:
parts[i] = '0'
return ''.join(parts)
def render_pep440_branch_based(pieces):
# [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is
# included for dirty.
# exceptions:
# 1: no tags. 0.0.0.devDISTANCE[+gHEX]
master = pieces.get('branch') == 'master'
maint = re.match(default_maint_branch_regexp,
pieces.get('branch') or '')
# If we are on a tag, just pep440-pre it.
if pieces["closest-tag"] and not (pieces["distance"] or
pieces["dirty"]):
rendered = pieces["closest-tag"]
else:
# Put a default closest-tag in.
if not pieces["closest-tag"]:
pieces["closest-tag"] = '0.0.0'
if pieces["distance"] or pieces["dirty"]:
if maint:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post%d" % pieces["distance"]
else:
rendered = add_one_to_version(pieces["closest-tag"])
if pieces["distance"]:
rendered += ".dev%d" % pieces["distance"]
# Put the branch name in if it isn't master nor a
# maintenance branch.
plus = '+'
if not (master or maint):
rendered += "%s%s" % (plus,
pieces.get('branch') or
'unknown_branch')
plus = '_'
if pieces["dirty"]:
rendered += "%sg%s" % (plus, pieces["short"])
else:
rendered = pieces["closest-tag"]
return rendered
STYLES = {'default': render_pep440,
'pep440': render_pep440,
'pep440-pre': render_pep440_pre,
'pep440-post': render_pep440_post,
'pep440-old': render_pep440_old,
'git-describe': render_git_describe,
'git-describe-long': render_git_describe_long,
'pep440-old': render_pep440_old,
'pep440-branch-based': render_pep440_branch_based,
}
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style:
style = 'default'
renderer = STYLES.get(style)
if not renderer:
raise ValueError("unknown style '%s'" % style)
rendered = renderer(pieces)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
class VersioneerBadRootError(Exception):
"""The project root directory is unknown or missing key files."""
def get_versions(verbose=False):
"""Get the project version from whatever source is available.
Returns dict with two keys: 'version' and 'full'.
"""
if "versioneer" in sys.modules:
# see the discussion in cmdclass.py:get_cmdclass()
del sys.modules["versioneer"]
root = get_root()
cfg = get_config_from_root(root)
assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
handlers = HANDLERS.get(cfg.VCS)
assert handlers, "unrecognized VCS '%s'" % cfg.VCS
verbose = verbose or cfg.verbose
assert cfg.versionfile_source is not None, \
"please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
versionfile_abs = os.path.join(root, cfg.versionfile_source)
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = handlers.get("get_keywords")
from_keywords_f = handlers.get("keywords")
if get_keywords_f and from_keywords_f:
try:
keywords = get_keywords_f(versionfile_abs)
ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
if verbose:
print("got version from expanded keyword %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
from_vcs_f = handlers.get("pieces_from_vcs")
if from_vcs_f:
try:
pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print("got version from VCS %s" % ver)
return ver
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print("got version from parentdir %s" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print("unable to compute version")
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None, "error": "unable to compute version"}
def get_version():
"""Get the short version string for this project."""
return get_versions()["version"]
def get_cmdclass():
"""Get the custom setuptools/distutils subclasses used by Versioneer."""
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
# 'easy_install .'), in which subdependencies of the main project are
# built (using setup.py bdist_egg) in the same python process. Assume
# a main project A and a dependency B, which use different versions
# of Versioneer. A's setup.py imports A's Versioneer, leaving it in
# sys.modules by the time B's setup.py is executed, causing B to run
# with the wrong versioneer. Setuptools wraps the sub-dep builds in a
# sandbox that restores sys.modules to it's pre-build state, so the
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
# Also see https://github.com/warner/python-versioneer/issues/52
cmds = {}
# we add "version" to both distutils and setuptools
from distutils.core import Command
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
print(" dirty: %s" % vers.get("dirty"))
if vers["error"]:
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
# we override "build_py" in both distutils and setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
# distutils/install -> distutils/build ->..
# setuptools/bdist_wheel -> distutils/install ->..
# setuptools/bdist_egg -> distutils/install_lib -> build_py
# setuptools/install -> bdist_egg ->..
# setuptools/develop -> ?
# we override different "build_py" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.build_py import build_py as _build_py
else:
from distutils.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["build_exe"] = cmd_build_exe
del cmds["build_py"]
# we override different "sdist" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
cmds["sdist"] = cmd_sdist
return cmds
CONFIG_ERROR = """
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
"""
SAMPLE_CONFIG = """
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
"""
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
def do_setup():
"""Main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
except (EnvironmentError, configparser.NoSectionError,
configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print(" creating %s" % cfg.versionfile_source)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
"__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(root, "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if cfg.versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
cfg.versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % cfg.versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-time keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
"""Validate the contents of setup.py against Versioneer's expectations."""
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")
if "versioneer.get_cmdclass()" in line:
found.add("cmdclass")
if "versioneer.get_version()" in line:
found.add("get_version")
if "versioneer.VCS" in line:
setters = True
if "versioneer.versionfile_source" in line:
setters = True
if len(found) != 3:
print("")
print("Your setup.py appears to be missing some important items")
print("(but I might be wrong). Please make sure it has something")
print("roughly like the following:")
print("")
print(" import versioneer")
print(" setup( version=versioneer.get_version(),")
print(" cmdclass=versioneer.get_cmdclass(), ...)")
print("")
errors += 1
if setters:
print("You should remove lines like 'versioneer.VCS = ' and")
print("'versioneer.versionfile_source = ' . This configuration")
print("now lives in setup.cfg, and should be removed from setup.py")
print("")
errors += 1
return errors
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1)
|
lbdreyer/nc-time-axis
|
versioneer.py
|
Python
|
bsd-3-clause
| 72,477
|
[
"Brian"
] |
d2ac4c3371c86ea58ebbfb3371c9bf54bc72b5314b5c2103c459ebb813e369ea
|
# -*- coding: utf-8 -*-
# Author: Vincent Dubourg <vincent.dubourg@gmail.com>
# (mostly translation, see implementation details)
# Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>
# (converting to a object-oriented, more modular design)
# Licence: BSD 3 clause
"""
The built-in correlation models submodule for the gaussian_process module.
"""
from abc import ABCMeta, abstractmethod
import numpy as np
from sklearn.utils import check_array
from sklearn.externals.six import with_metaclass
MACHINE_EPSILON = np.finfo(np.double).eps
def l1_cross_differences(X):
"""
Computes the nonzero componentwise differences between the vectors
in X.
Parameters
----------
X: array_like
An array with shape (n_samples, n_features)
Returns
-------
D: array with shape (n_samples * (n_samples - 1) / 2, n_features)
The array of componentwise differences.
ij: arrays with shape (n_samples * (n_samples - 1) / 2, 2)
The indices i and j of the vectors in X associated to the cross-
distances in D: D[k] = np.abs(X[ij[k, 0]] - Y[ij[k, 1]]).
"""
X = check_array(X)
n_samples, n_features = X.shape
n_nonzero_cross_diff = n_samples * (n_samples - 1) // 2
ij = np.zeros((n_nonzero_cross_diff, 2), dtype=np.int)
D = np.zeros((n_nonzero_cross_diff, n_features))
ll_1 = 0
for k in range(n_samples - 1):
ll_0 = ll_1
ll_1 = ll_0 + n_samples - k - 1
ij[ll_0:ll_1, 0] = k
ij[ll_0:ll_1, 1] = np.arange(k + 1, n_samples)
D[ll_0:ll_1] = X[k] - X[(k + 1):n_samples]
return D, ij.astype(np.int)
class StationaryCorrelation(with_metaclass(ABCMeta, object)):
""" Base-class for stationary correlation models for Gaussian Processes.
Stationary correlation models dependent only on the relative distance
and not on the absolute positions of the respective datapoints. We can thus
work internally solely on these distances.
"""
def __init__(self):
pass
def fit(self, X, nugget=10. * MACHINE_EPSILON):
""" Fits the correlation model for training data X
Parameters
----------
X : array_like, shape=(n_samples, n_features)
An array of training datapoints at which observations were made,
i.e., where the outputs y are known
nugget : double or ndarray, optional
The Gaussian Process nugget parameter
The nugget is added to the diagonal of the assumed training
covariance; in this way it acts as a Tikhonov regularization in
the problem. In the special case of the squared exponential
correlation function, the nugget mathematically represents the
variance of the input values. Default assumes a nugget close to
machine precision for the sake of robustness
(nugget = 10. * MACHINE_EPSILON).
"""
self.X = X
self.nugget = nugget
self.n_samples = X.shape[0]
# Calculate array with shape (n_eval, n_features) giving the
# componentwise distances between locations x and x' at which the
# correlation model should be evaluated.
self.D, self.ij = l1_cross_differences(self.X)
if (np.min(np.sum(self.D, axis=1)) == 0.
and not isinstance(self, PureNugget)):
raise Exception("Multiple input features cannot have the same"
" value.")
def __call__(self, theta, X=None):
""" Compute correlation for given correlation parameter(s) theta.
Parameters
----------
theta : array_like
An array with giving the autocorrelation parameter(s).
Dimensionality depends on the specific correlation model; often
shape (1,) corresponds to an isotropic correlation model and shape
(n_features,) to a anisotropic one.
X : array_like, shape(n_eval, n_features)
An array containing the n_eval query points whose correlation with
the training datapoints shall be computed. If None, autocorrelation
of the training datapoints is computed instead.
Returns
-------
r : array_like, shape=(n_eval, n_samples) if X != None
(n_samples, n_samples) if X == None
An array containing the values of the correlation model.
"""
theta = np.asarray(theta, dtype=np.float)
if X is not None:
# Get pairwise componentwise L1-differences to the input training
# set
d = X[:, np.newaxis, :] - self.X[np.newaxis, :, :]
d = d.reshape((-1, X.shape[1]))
else:
# No external datapoints given; auto-correlation of training set
# is used instead
d = self.D
if d.ndim > 1:
n_features = d.shape[1]
else:
n_features = 1
# Compute the correlation for the respective correlation model (handled
# by subclass)
r = self._compute_corr(theta, d, n_features)
if X is not None:
# Convert to 2d matrix
return r.reshape(-1, self.n_samples)
else:
# Auto-correlation computed only for upper triangular part of
# matrix. Fill diagonal with 1+nugget and the lower triangular
# by exploiting symmetry of matrix
R = np.eye(self.n_samples) * (1. + self.nugget)
R[self.ij[:, 0], self.ij[:, 1]] = r
R[self.ij[:, 1], self.ij[:, 0]] = r
return R
def log_prior(self, theta):
""" Returns the (log) prior probability of parameters theta.
The prior is assumed to be uniform over the parameter space.
NOTE: The returned quantity is an improper prior as its integral over
the parameter space is not equal to 1.
Parameters
----------
theta : array_like, shape=(1,) or (n_features,)
An array with shape 1 (isotropic) or n_features (anisotropic)
giving the autocorrelation parameter(s).
Returns
-------
log_p : float
The (log) prior probability of parameters theta. An improper
probability.
"""
return 0
@abstractmethod
def _compute_corr(self, theta, d, n_features):
""" Correlation for given pairwise, component-wise L1-differences.
Parameters
----------
theta : array_like, shape=(1,) or (n_features,)
An array with shape 1 (isotropic) or n_features (anisotropic)
giving the autocorrelation parameter(s).
d : array_like, shape=(n_eval, n_features)
An array with the pairwise, component-wise L1-differences of x
and x' at which the correlation model should be evaluated.
Returns
-------
r : array_like, shape=(n_eval, )
An array containing the values of the autocorrelation model.
"""
class AbsoluteExponential(StationaryCorrelation):
""" Absolute exponential autocorrelation model.
Absolute exponential autocorrelation model (Ornstein-Uhlenbeck stochastic
process)::
n
theta, d --> r(theta, d) = exp( sum - theta_i * d_i )
i = 1
"""
def _compute_corr(self, theta, d, n_features):
""" Correlation for given pairwise, component-wise L1-differences.
Parameters
----------
theta : array_like, shape=(1,) or (n_features,)
An array with shape 1 (isotropic) or n_features (anisotropic)
giving the autocorrelation parameter(s).
d : array_like, shape=(n_eval, n_features)
An array with the pairwise, component-wise L1-differences of x
and x' at which the correlation model should be evaluated.
Returns
-------
r : array_like, shape=(n_eval, )
An array containing the values of the autocorrelation model.
"""
d = np.asarray(d, dtype=np.float)
d = np.abs(d)
if theta.size == 1:
return np.exp(- theta[0] * np.sum(d, axis=1))
elif theta.size != n_features:
raise ValueError("Length of theta must be 1 or %s" % n_features)
else:
return np.exp(- np.sum(theta.reshape(1, n_features) * d, axis=1))
class SquaredExponential(StationaryCorrelation):
""" Squared exponential correlation model.
Squared exponential correlation model (Radial Basis Function).
(Infinitely differentiable stochastic process, very smooth)::
n
theta, d --> r(theta, d) = exp( sum - theta_i * (d_i)^2 )
i = 1
"""
def _compute_corr(self, theta, d, n_features):
""" Correlation for given pairwise, component-wise L1-differences.
Parameters
----------
theta : array_like, shape=(1,) [isotropic]
(n_features,) [anisotropic] or
(k*n_features,) [factor analysis distance]
An array encoding the autocorrelation parameter(s).
d : array_like, shape=(n_eval, n_features)
An array with the pairwise, component-wise L1-differences of x
and x' at which the correlation model should be evaluated.
Returns
-------
r : array_like, shape=(n_eval, )
An array containing the values of the autocorrelation model.
"""
d = np.asarray(d, dtype=np.float)
return np.exp(-self._quadratic_activation(theta, d, n_features))
def _quadratic_activation(self, theta, d, n_features):
""" Utility function for computing quadratic activation.
Computes the activation activ=d.T * M * d where M is a covariance
matrix of size n*n. The hyperparameters theta specify
* an isotropic covariance matrix, i.e., M = theta * I with I being the
identity, if theta has shape 1
* an automatic relevance determination model if theta has shape n,
in which the characteristic length scales of each dimension are
learned separately: M = diag(theta)
* a factor analysis distance model if theta has shape k*n for k> 1,
in which a low-rank approximation of the full matrix M is learned.
This low-rank approximation approximates the covariance matrix as
low-rank matrix plus a diagonal matrix:
M = Lambda * Lambda.T + diag(l),
where Lambda is a n*(k-1) matrix and l specifies the diagonal
matrix.
Parameters
----------
theta : array_like, shape=(1,) [isotropic]
(n_features,) [anisotropic] or
(k*n_features,) [factor analysis distance]
An array encoding the autocorrelation parameter(s). In the
case of the factor analysis distance, M is approximated by
M = Lambda * Lambda.T + diag(l), where l is encoded in the last n
entries of theta and Lambda is encoded row-wise in the first
entries of theta. Note that Lambda may contain negative entries
while theta is strictly positive; because of this, the entries of
Lambda are set to the logarithm with basis 10 of the corresponding
entries in theta.
array_like, shape=(n_eval, n_features)
An array giving the componentwise differences of x and x' at
which the quadratic activation should be evaluated.
Returns
-------
a : array_like, shape=(n_eval, )
An array with the activation values for the respective
componentwise differences d.
"""
if theta.size == 1: # case where M is isotropic: M = diag(theta[0])
return theta[0] * np.sum(d ** 2, axis=1)
elif theta.size == n_features: # anisotropic but diagonal case (ARD)
return np.sum(theta.reshape(1, n_features) * d ** 2, axis=1)
elif theta.size % n_features == 0:
# Factor analysis case: M = lambda*lambda.T + diag(l)
theta = theta.reshape((1, theta.size))
M = np.diag(theta[0, :n_features]) # the diagonal matrix part l
# The low-rank matrix contribution which allows accounting for
# correlations in the feature dimensions
# NOTE: these components of theta are passed through a log-function
# to allow negative values in Lambda
Lambda = np.log10(theta[0, n_features:].reshape((n_features, -1)))
M += Lambda.dot(Lambda.T)
return np.sum(d.dot(M) * d, -1)
else:
raise ValueError("Length of theta must be 1 or a multiple of %s."
% n_features)
class Matern_1_5(SquaredExponential):
""" Matern correlation model for nu=1.5.
Sample paths are once differentiable. Given by::
r(theta, dx) = (1 + np.sqrt(3*activ))*exp(-np.sqrt(3*activ))
where activ=dx.T * M * dx and M is a covariance matrix of size n*n.
See Rasmussen and Williams 2006, pp84 for details regarding the different
variants of the Matern kernel.
"""
def _compute_corr(self, theta, d, n_features):
""" Correlation for given pairwise, component-wise L1-differences.
Parameters
----------
theta : array_like, shape=(1,) [isotropic]
(n_features,) [anisotropic] or
(k*n_features,) [factor analysis distance]
An array encoding the autocorrelation parameter(s).
d : array_like, shape=(n_eval, n_features)
An array with the pairwise, component-wise L1-differences of x
and x' at which the correlation model should be evaluated.
Returns
-------
r : array_like, shape=(n_eval, )
An array containing the values of the autocorrelation model.
"""
d = np.asarray(d, dtype=np.float)
activ = self._quadratic_activation(theta, d, n_features)
tmp = np.sqrt(3 * activ) # temporary variable for preventing
# recomputation
return (1 + tmp) * np.exp(-tmp)
class Matern_2_5(SquaredExponential):
""" Matern correlation model for nu=2.5.
Sample paths are twice differentiable. Given by::
r(theta, dx) = (1 + np.sqrt(5*activ) + 5/3*activ)*exp(-np.sqrt(5*activ))
where activ=dx.T * M * dx and M is a covariance matrix of size n*n.
See Rasmussen and Williams 2006, pp84 for details regarding the different
variants of the Matern kernel.
"""
def _compute_corr(self, theta, d, n_features):
""" Correlation for given pairwise, component-wise L1-differences.
Parameters
----------
theta : array_like, shape=(1,) [isotropic]
(n_features,) [anisotropic] or
(k*n_features,) [factor analysis distance]
An array encoding the autocorrelation parameter(s).
d : array_like, shape=(n_eval, n_features)
An array with the pairwise, component-wise L1-differences of x
and x' at which the correlation model should be evaluated.
Returns
-------
r : array_like, shape=(n_eval, )
An array containing the values of the autocorrelation model.
"""
d = np.asarray(d, dtype=np.float)
activ = self._quadratic_activation(theta, d, n_features)
tmp = np.sqrt(5 * activ) # temporary variable for preventing
# recomputation
return (1 + tmp + 5.0 / 3.0 * activ) * np.exp(-tmp)
class GeneralizedExponential(StationaryCorrelation):
""" Generalized exponential correlation model.
Generalized exponential correlation model.
(Useful when one does not know the smoothness of the function to be
predicted.)::
n
theta, d --> r(theta, d) = exp( sum - theta_i * |d_i|^p )
i = 1
"""
def _compute_corr(self, theta, d, n_features):
""" Correlation for given pairwise, component-wise L1-differences.
Parameters
----------
theta : array_like, shape=(1+1,) or (n_features+1,)
An array with shape 1+1 (isotropic) or n_features+1 (anisotropic)
giving the autocorrelation parameter(s) (theta, p).
d : array_like, shape=(n_eval, n_features)
An array with the pairwise, component-wise L1-differences of x
and x' at which the correlation model should be evaluated.
Returns
-------
r : array_like, shape=(n_eval, )
An array containing the values of the autocorrelation model.
"""
d = np.asarray(d, dtype=np.float)
lth = theta.size
if n_features > 1 and lth == 2:
theta = np.hstack([np.repeat(theta[0], n_features), theta[1]])
elif lth != n_features + 1:
raise Exception("Length of theta must be 2 or %s"
% (n_features + 1))
else:
theta = theta.reshape(1, lth)
td = theta[:, 0:-1].reshape(1, n_features) \
* np.abs(d) ** theta[:, -1]
return np.exp(- np.sum(td, 1))
class PureNugget(StationaryCorrelation):
""" Spatial independence correlation model (pure nugget).
Useful when one wants to solve an ordinary least squares problem!::
n
theta, d --> r(theta, dx) = 1 if sum |d_i| == 0
i = 1
0 otherwise
"""
def _compute_corr(self, theta, d, n_features):
""" Correlation for given pairwise, component-wise L1-differences.
Parameters
----------
theta : array_like
None.
d : array_like, shape=(n_eval, n_features)
An array with the pairwise, component-wise L1-differences of x
and x' at which the correlation model should be evaluated.
Returns
-------
r : array_like
An array with shape (n_eval, ) with the values of the
autocorrelation model.
"""
d = np.asarray(d, dtype=np.float)
n_eval = d.shape[0]
r = np.zeros(n_eval)
r[np.all(d == 0., axis=1)] = 1.
return r
class Cubic(StationaryCorrelation):
""" Cubic correlation model.
Cubic correlation model::
theta, d --> r(theta, d) =
n
prod max(0, 1 - 3(theta_j*d_ij)^2 + 2(theta_j*d_ij)^3) , i = 1,...,m
j = 1
"""
def _compute_corr(self, theta, d, n_features):
""" Correlation for given pairwise, component-wise L1-differences.
Parameters
----------
theta : array_like, shape=(1,) or (n_features,)
An array with shape 1 (isotropic) or n_features (anisotropic)
giving the autocorrelation parameter(s).
d : array_like, shape=(n_eval, n_features)
An array with the pairwise, component-wise L1-differences of x
and x' at which the correlation model should be evaluated.
Returns
-------
r : array_like, shape=(n_eval, )
An array containing the values of the autocorrelation model.
"""
d = np.asarray(d, dtype=np.float)
lth = theta.size
if lth == 1:
td = np.abs(d) * theta
elif lth != n_features:
raise Exception("Length of theta must be 1 or " + str(n_features))
else:
td = np.abs(d) * theta.reshape(1, n_features)
td[td > 1.] = 1.
ss = 1. - td ** 2. * (3. - 2. * td)
return np.prod(ss, 1)
class Linear(StationaryCorrelation):
""" Linear correlation model.
Linear correlation model::
theta, d --> r(theta, d) =
n
prod max(0, 1 - theta_j*d_ij) , i = 1,...,m
j = 1
"""
def _compute_corr(self, theta, d, n_features):
""" Correlation for given pairwise, component-wise L1-differences.
Parameters
----------
theta : array_like, shape=(1,) or (n_features,)
An array with shape 1 (isotropic) or n_features (anisotropic)
giving the autocorrelation parameter(s).
d : array_like, shape=(n_eval, n_features)
An array with the pairwise, component-wise L1-differences of x
and x' at which the correlation model should be evaluated.
Returns
-------
r : array_like, shape=(n_eval, )
An array containing the values of the autocorrelation model.
"""
d = np.asarray(d, dtype=np.float)
lth = theta.size
if lth == 1:
td = np.abs(d) * theta
elif lth != n_features:
raise Exception("Length of theta must be 1 or %s" % n_features)
else:
td = np.abs(d) * theta.reshape(1, n_features)
td[td > 1.] = 1.
ss = 1. - td
return np.prod(ss, 1)
|
jmetzen/skgp
|
skgp/correlation_models/stationary.py
|
Python
|
bsd-3-clause
| 21,469
|
[
"Gaussian"
] |
0a3c490339b97cdfd030e23b4712f7a631c718713df2536ba33453e8603f803e
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyNetcdf4(PythonPackage):
"""Python interface to the netCDF Library."""
homepage = "https://github.com/Unidata/netcdf4-python"
url = "https://pypi.io/packages/source/n/netCDF4/netCDF4-1.2.7.tar.gz"
version('1.2.7', '77b357d78f9658dd973dee901f6d86f8')
version('1.2.3.1', '24fc0101c7c441709c230e76af611d53')
depends_on('py-setuptools', type='build')
depends_on('py-cython@0.19:', type='build')
depends_on('py-numpy@1.7:', type=('build', 'run'))
depends_on('netcdf')
depends_on('hdf5@1.8.0:')
|
skosukhin/spack
|
var/spack/repos/builtin/packages/py-netcdf4/package.py
|
Python
|
lgpl-2.1
| 1,807
|
[
"NetCDF"
] |
6ccbe3b8324d5977cf80fbbbc29b05765efcb5d03a3f3ebe066524c6ecb1bdcf
|
###############################
# This file is part of PyLaDa.
#
# Copyright (C) 2013 National Renewable Energy Lab
#
# PyLaDa is a high throughput computational platform for Physics. It aims to make it easier to submit
# large numbers of jobs on supercomputers. It provides a python interface to physical input, such as
# crystal structures, as well as to a number of DFT (VASP, CRYSTAL) and atomic potential programs. It
# is able to organise and launch computational jobs on PBS and SLURM.
#
# PyLaDa is free software: you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at
# your option) any later version.
#
# PyLaDa is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along with PyLaDa. If not, see
# <http://www.gnu.org/licenses/>.
###############################
""" Mixin classes for extraction objects. """
__docformat__ = 'restructuredtext en'
from ...tools.extract import search_factory
OutcarSearchMixin = search_factory('OutcarSearchMixin', 'OUTCAR', __name__)
class IOMixin(OutcarSearchMixin):
""" A mixin base clase which controls file IO.
Defines special property with file-like behaviors.
Makes it easier to change the behavior of the extraction class.
"""
def __init__(self, directory=None, OUTCAR=None, FUNCCAR=None, CONTCAR=None):
""" Initializes the extraction class.
:Parameters:
directory : str or None
path to the directory where the VASP output is located. If none,
will use current working directory. Can also be the path to the
OUTCAR file itself.
OUTCAR : str or None
If given, this name will be used, rather than files.OUTCAR.
CONTCAR : str or None
If given, this name will be used, rather than files.CONTCAR.
"""
from .. import files
object.__init__(self)
self.OUTCAR = OUTCAR if OUTCAR is not None else files.OUTCAR
""" Filename of the OUTCAR file from VASP. """
self.CONTCAR = CONTCAR if CONTCAR is not None else files.CONTCAR
""" Filename of the CONTCAR file from VASP. """
OutcarSearchMixin.__init__(self)
def __contcar__(self):
""" Returns path to FUNCCAR file.
:raise IOError: if the FUNCCAR file does not exist.
"""
from os.path import exists, join
path = join(self.directory, self.CONTCAR)
if not exists(path):
raise IOError("Path {0} does not exist.\n".format(path))
return open(path, 'r')
@property
def is_running(self):
""" True if program is running on this functional.
A file '.pylada_is_running' is created in the output folder when it is
set-up to run CRYSTAL_. The same file is removed when CRYSTAL_ returns
(more specifically, when the :py:class:`pylada.process.ProgramProcess` is
polled). Hence, this file serves as a marker of those jobs which are
currently running.
"""
from os.path import join, exists
is_run = exists(join(self.directory, '.pylada_is_running'))
return is_run
|
pylada/pylada-light
|
src/pylada/vasp/extract/mixin.py
|
Python
|
gpl-3.0
| 3,547
|
[
"CRYSTAL",
"VASP"
] |
ea02425fddce7d2eb97ca3e8ed0df1f9198a3abc53ca3d78e38d7bf5758cc4a3
|
# -*- coding: utf-8 -*-
#*******************************************************************
# * File: merge.py
# * Description:
# * Author: HarshaRani
# * E-mail: hrani@ncbs.res.in
# ********************************************************************/
# **********************************************************************
#** This program is part of 'MOOSE', the
#** Messaging Object Oriented Simulation Environment,
#** also known as GENESIS 3 base code.
#** copyright (C) 2003-2017 Upinder S. Bhalla. and NCBS
#Created : Friday Dec 16 23:19:00 2016(+0530)
#Version
#Last-Updated: Thursday Jan 12 17:30:33 2017(+0530)
# By: Harsha
#**********************************************************************/
# This program is used to merge models
# -- Model B is merged to modelA
#Rules are
#--In this models are mergered at group level (if exists)
import sys
import os
#from . import _moose as moose
import moose
import mtypes
from moose.chemUtil.chemConnectUtil import *
from moose.chemUtil.graphUtils import *
def mergeChemModel(A,B):
""" Merges two model or the path """
modelA,loadedA = loadModels(A)
modelB,loadedB = loadModels(B)
if not loadedA or not loadedB:
if not loadedA:
modelB = moose.Shell('/')
if not loadedB:
modelA = moose.Shell('/')
else:
directory, bfname = os.path.split(B)
global grpNotcopiedyet,poolListina
poolListina = {}
grpNotcopiedyet = []
dictComptA = dict( [ (i.name,i) for i in moose.wildcardFind(modelA+'/##[ISA=ChemCompt]') ] )
dictComptB = dict( [ (i.name,i) for i in moose.wildcardFind(modelB+'/##[ISA=ChemCompt]') ] )
poolNotcopiedyet = []
for key in list(dictComptB.keys()):
if key not in dictComptA:
# if compartmentname from modelB does not exist in modelA, then copy
copy = moose.copy(dictComptB[key],moose.element(modelA))
else:
#if compartmentname from modelB exist in modelA,
#volume is not same, then change volume of ModelB same as ModelA
if abs(dictComptA[key].volume - dictComptB[key].volume):
#hack for now
while (abs(dictComptA[key].volume - dictComptB[key].volume) != 0.0):
dictComptB[key].volume = float(dictComptA[key].volume)
dictComptA = dict( [ (i.name,i) for i in moose.wildcardFind(modelA+'/##[ISA=ChemCompt]') ] )
#Mergering pool
poolMerge(dictComptA[key],dictComptB[key],poolNotcopiedyet)
if grpNotcopiedyet:
# objA = moose.element(comptA).parent.name
# if not moose.exists(objA+'/'+comptB.name+'/'+bpath.name):
# print bpath
# moose.copy(bpath,moose.element(objA+'/'+comptB.name))
pass
comptAdict = comptList(modelA)
poolListina = {}
poolListina = updatePoolList(comptAdict)
funcNotallowed = []
R_Duplicated, R_Notcopiedyet,R_Daggling = [], [], []
E_Duplicated, E_Notcopiedyet,E_Daggling = [], [], []
for key in list(dictComptB.keys()):
funcNotallowed = []
funcNotallowed = functionMerge(dictComptA,dictComptB,key)
poolListina = updatePoolList(dictComptA)
R_Duplicated,R_Notcopiedyet,R_Daggling = reacMerge(dictComptA,dictComptB,key,poolListina)
poolListina = updatePoolList(dictComptA)
E_Duplicated,E_Notcopiedyet,E_Daggling = enzymeMerge(dictComptA,dictComptB,key,poolListina)
print("\n Model is merged to %s" %modelA)
if funcNotallowed:
print( "\nPool already connected to a function, this function is not to connect to same pool, since no two function are allowed to connect to same pool:")
for fl in list(funcNotallowed):
print("\t [Pool]: %s [Function]: %s \n" %(str(fl.parent.name), str(fl.path)))
if R_Duplicated or E_Duplicated:
print ("Reaction / Enzyme are Duplicate"
"\n 1. The once whoes substrate / product names are different for a give reaction name "
"\n 2. its compartment to which it belongs to may be is different"
"\n Models have to decide to keep or delete these reaction/enzyme")
if E_Duplicated:
print("Reaction: ")
for rd in list(R_Duplicated):
print ("%s " %str(rd.name))
if E_Duplicated:
print ("Enzyme:")
for ed in list(E_Duplicated):
print ("%s " %str(ed.name))
if R_Notcopiedyet or E_Notcopiedyet:
print ("\nThese Reaction/Enzyme in model are not dagging but while copying the associated substrate or product is missing")
if R_Notcopiedyet:
print("Reaction: ")
for rd in list(R_Notcopiedyet):
print ("%s " %str(rd.name))
if E_Notcopiedyet:
print ("Enzyme:")
for ed in list(E_Notcopiedyet):
print ("%s " %str(ed.name))
if R_Daggling or E_Daggling:
print ("\n Daggling reaction/enzyme are not not allowed in moose, these are not merged")
if R_Daggling:
print("Reaction: ")
for rd in list(R_Daggling):
print ("%s " %str(rd.name))
if E_Daggling:
print ("Enzyme:")
for ed in list(E_Daggling):
print ("%s " %str(ed.name))
def functionMerge(comptA,comptB,key):
funcNotallowed = []
comptApath = moose.element(comptA[key]).path
comptBpath = moose.element(comptB[key]).path
funcListina = moose.wildcardFind(comptApath+'/##[ISA=PoolBase]')
funcListinb = moose.wildcardFind(comptBpath+'/##[ISA=Function]')
objA = moose.element(comptApath).parent.name
objB = moose.element(comptBpath).parent.name
#For function mergering pool name is taken as reference
funcNotcopiedyet = []
for fb in funcListinb:
if fb.parent.className in ['ZombiePool','Pool','ZombieBufPool','BufPool']:
objA = moose.element(comptApath).parent.name
fbpath = fb.path
#funcpath = fbpath[fbpath.find(findCompartment(fb).name)-1:len(fbpath)]
funcparentB = fb.parent.path
funcpath = fbpath.replace(objB,objA)
funcparentA = funcparentB.replace(objB,objA)
tt = moose.element(funcparentA).neighbors['setN']
if tt:
funcNotallowed.append(fb)
else:
if len(moose.element(fb.path+'/x').neighbors["input"]):
#inputB = moose.element(fb.path+'/x').neighbors["input"]
inputB = subprdList(moose.element(fb.path+'/x'),"input")
inputB_expr = fb.expr
if moose.exists(funcpath):
#inputA = moose.element(objA+funcpath+'/x').neighbors["input"]
inputA = subprdList(moose.element(funcpath+'/x'),"input")
inputA_expr = moose.element(funcpath).expr
hassameExpr = False
if inputA_expr == inputB_expr:
hassameExpr = True
hassameLen,hassameS,hassameVols = same_len_name_vol(inputA,inputB)
if not all((hassameLen,hassameS,hassameVols,hassameExpr)):
fb.name = fb.name+'_duplicatedF'
createFunction(fb,inputB,objB,objA)
else:
#function doesnot exist then copy
if len(inputB):
volinput = []
for inb in inputB:
volinput.append(findCompartment(moose.element(inb)).volume)
if len(set(volinput)) == 1:
# If all the input connected belongs to one compartment then copy
createFunction(fb,inputB,objB,objA)
else:
# moose doesn't allow function input to come from different compartment
funcNotallowed.append(fb)
return funcNotallowed
def createFunction(fb,inputB,objB,objA):
fapath1 = fb.path.replace(objB,objA)
fapath = fapath1.replace('[0]','')
if not moose.exists(fapath):
# if fb.parent.className in ['CubeMesh','CyclMesh']:
# des = moose.Function('/'+objA+'/'+fb.parent.name+'/'+fb.name)
# elif fb.parent.className in ['Pool','ZombiePool','BufPool','ZombieBufPool']:
# for akey in list(poolListina[findCompartment(fb).name]):
# if fb.parent.name == akey.name:
# des = moose.Function(akey.path+'/'+fb.name)
des = moose.Function(fapath)
moose.connect(des, 'valueOut', moose.element(fapath).parent,'setN' )
for src in inputB:
pool = ((src.path).replace(objB,objA)).replace('[0]','')
numVariables = des.numVars
expr = ""
expr = (des.expr+'+'+'x'+str(numVariables))
expr = expr.lstrip("0 +")
expr = expr.replace(" ","")
des.expr = expr
moose.connect( pool, 'nOut', des.x[numVariables], 'input' )
#if fb.expr != des.expr:
# print "Function ",des, " which is duplicated from modelB, expression is different, this is tricky in moose to know what those constants are connected to "
# print "ModelB ", fb, fb.expr, "\nModelA ",des, des.expr
def comptList(modelpath):
comptdict = {}
for ca in moose.wildcardFind(modelpath+'/##[ISA=ChemCompt]'):
comptdict[ca.name] = ca
return comptdict
def loadModels(filename):
""" load models into moose if file, if moosepath itself it passes back the path and
delete solver if exist """
modelpath = '/'
loaded = False
if os.path.isfile(filename) :
modelpath = filename[filename.rfind('/'): filename.rfind('.')]
ext = os.path.splitext(filename)[1]
filename = filename.strip()
modeltype = mtypes.getType(filename)
subtype = mtypes.getSubtype(filename, modeltype)
if subtype == 'kkit' or modeltype == "cspace":
moose.loadModel(filename,modelpath)
loaded = True
elif subtype == 'sbml':
#moose.ReadSBML()
pass
else:
print("This file is not supported for mergering")
modelpath = moose.Shell('/')
elif moose.exists(filename):
modelpath = filename
loaded = True
## default is 'ee' solver while loading the model using moose.loadModel,
## yet deleteSolver is called just to be assured
if loaded:
deleteSolver(modelpath)
return modelpath,loaded
def deleteSolver(modelRoot):
compts = moose.wildcardFind(modelRoot+'/##[ISA=ChemCompt]')
for compt in compts:
if moose.exists(compt.path+'/stoich'):
st = moose.element(compt.path+'/stoich')
st_ksolve = st.ksolve
moose.delete(st)
if moose.exists((st_ksolve).path):
moose.delete(st_ksolve)
def poolMerge(comptA,comptB,poolNotcopiedyet):
aCmptGrp = moose.wildcardFind(comptA.path+'/#[TYPE=Neutral]')
aCmptGrp = aCmptGrp +(moose.element(comptA.path),)
bCmptGrp = moose.wildcardFind(comptB.path+'/#[TYPE=Neutral]')
bCmptGrp = bCmptGrp +(moose.element(comptB.path),)
objA = moose.element(comptA.path).parent.name
objB = moose.element(comptB.path).parent.name
for bpath in bCmptGrp:
grp_cmpt = ((bpath.path).replace(objB,objA)).replace('[0]','')
if moose.exists(grp_cmpt) :
if moose.element(grp_cmpt).className != bpath.className:
grp_cmpt = grp_cmpt+'_grp'
bpath.name = bpath.name+"_grp"
l = moose.Neutral(grp_cmpt)
else:
moose.Neutral(grp_cmpt)
apath = moose.element(bpath.path.replace(objB,objA))
bpoollist = moose.wildcardFind(bpath.path+'/#[ISA=PoolBase]')
apoollist = moose.wildcardFind(apath.path+'/#[ISA=PoolBase]')
for bpool in bpoollist:
if bpool.name not in [apool.name for apool in apoollist]:
copied = copy_deleteUnlyingPoolObj(bpool,apath)
if copied == False:
#hold it for later, this pool may be under enzyme, as cplx
poolNotcopiedyet.append(bpool)
def copy_deleteUnlyingPoolObj(pool,path):
# check if this pool is under compartement or under enzyme?(which is enzyme_cplx)
# if enzyme_cplx then don't copy untill this perticular enzyme is copied
# case: This enzyme_cplx might exist in modelA if enzyme exist
# which will automatically copie's the pool
copied = False
if pool.parent.className not in ["Enz","ZombieEnz"]:
poolcopied = moose.copy(pool,path)
copied = True
# deleting function and enzyme which gets copied if exist under pool
# This is done to ensure daggling function / enzyme not copied.
funclist = []
for types in ['setConc','setN','increment']:
funclist.extend(moose.element(poolcopied).neighbors[types])
for fl in funclist:
moose.delete(fl)
enzlist = moose.element(poolcopied).neighbors['reac']
for el in list(set(enzlist)):
moose.delete(el.path)
return copied
def updatePoolList(comptAdict):
for key,value in list(comptAdict.items()):
plist = moose.wildcardFind(value.path+'/##[ISA=PoolBase]')
poolListina[key] = plist
return poolListina
def enzymeMerge(comptA,comptB,key,poolListina):
war_msg = ""
RE_Duplicated, RE_Notcopiedyet, RE_Daggling = [], [], []
comptApath = moose.element(comptA[key]).path
comptBpath = moose.element(comptB[key]).path
objA = moose.element(comptApath).parent.name
objB = moose.element(comptBpath).parent.name
enzyListina = moose.wildcardFind(comptApath+'/##[ISA=EnzBase]')
enzyListinb = moose.wildcardFind(comptBpath+'/##[ISA=EnzBase]')
for eb in enzyListinb:
eBsubname, eBprdname = [],[]
eBsubname = subprdList(eb,"sub")
eBprdname = subprdList(eb,"prd")
allexists, allexistp = False, False
allclean = False
poolinAlist = poolListina[findCompartment(eb).name]
for pA in poolinAlist:
if eb.parent.name == pA.name:
eapath = eb.parent.path.replace(objB,objA)
if not moose.exists(eapath+'/'+eb.name):
#This will take care
# -- If same enzparent name but different enzyme name
# -- or different parent/enzyme name
if eBsubname and eBprdname:
allexists = checkexist(eBsubname,objB,objA)
allexistp = checkexist(eBprdname,objB,objA)
if allexists and allexistp:
enzPool = moose.element(pA.path)
eapath = eb.parent.path.replace(objB,objA)
enz = moose.element(moose.copy(eb,moose.element(eapath)))
enzPool = enz.parent
if eb.className in ["ZombieEnz","Enz"]:
moose.connect(moose.element(enz),"enz",enzPool,"reac")
if eb.className in ["ZombieMMenz","MMenz"]:
moose.connect(enzPool,"nOut",enz,"enzDest")
connectObj(enz,eBsubname,"sub",comptA,war_msg)
connectObj(enz,eBprdname,"prd",comptA,war_msg)
allclean = True
else:
# didn't find sub or prd for this Enzyme
#print ("didn't find sub or prd for this reaction" )
RE_Notcopiedyet.append(eb)
else:
# -- it is dagging reaction
RE_Daggling.append(eb)
#print ("This reaction \""+eb.path+"\" has no substrate/product daggling reaction are not copied")
#war_msg = war_msg+"\nThis reaction \""+eb.path+"\" has no substrate/product daggling reaction are not copied"
else:
#Same Enzyme name
# -- Same substrate and product including same volume then don't copy
# -- different substrate/product or if sub/prd's volume is different then DUPLICATE the Enzyme
allclean = False
#ea = moose.element('/'+obj+'/'+enzcompartment.name+'/'+enzparent.name+'/'+eb.name)
#ea = moose.element(pA.path+'/'+eb.name)
ea = moose.element(eb.path.replace(objB,objA))
eAsubname = subprdList(ea,"sub")
eBsubname = subprdList(eb,"sub")
hasSamenoofsublen,hasSameS,hasSamevols = same_len_name_vol(eAsubname,eBsubname)
eAprdname = subprdList(ea,"prd")
eBprdname = subprdList(eb,"prd")
hasSamenoofprdlen,hasSameP,hasSamevolp = same_len_name_vol(eAprdname,eBprdname)
if not all((hasSamenoofsublen,hasSameS,hasSamevols,hasSamenoofprdlen,hasSameP,hasSamevolp)):
# May be different substrate or product or volume of Sub/prd may be different,
# Duplicating the enzyme
if eBsubname and eBprdname:
allexists,allexistp = False,False
allexists = checkexist(eBsubname,objB,objA)
allexistp = checkexist(eBprdname,objB,objA)
if allexists and allexistp:
eb.name = eb.name+"_duplicated"
if eb.className in ["ZombieEnz","Enz"]:
eapath = eb.parent.path.replace(objB,objA)
enz = moose.copy(eb,moose.element(eapath))
moose.connect(enz, 'enz', eapath, 'reac' )
if eb.className in ["ZombieMMenz","MMenz"]:
eapath = eb.parent.path.replace(objB,objA)
enz = moose.copy(eb.name,moose.element(eapath))
enzinfo = moose.Annotator(enz.path+'/info')
moose.connect(moose.element(enz).parent,"nOut",moose.element(enz),"enzDest")
#moose.connect(moose.element(enz),"enz",moose.element(enz).parent,"reac")
#moose.connect( cplxItem, 'reac', enz, 'cplx' )
connectObj(enz,eBsubname,"sub",comptA,war_msg)
connectObj(enz,eBprdname,"prd",comptA,war_msg)
RE_Duplicated.append(enz)
allclean = True
else:
allclean = False
else:
allclean = True
if not allclean:
# didn't find sub or prd for this enzyme
# -- it may be connected Enzyme cplx
if eBsubname and eBprdname:
RE_Notcopiedyet.append(eb)
#print ("This Enzyme \""+eb.path+"\" has no substrate/product must be connect to cplx")
#war_msg = war_msg+ "\nThis Enzyme \""+rb.path+"\" has no substrate/product must be connect to cplx"
else:
RE_Daggling.append(eb)
#print ("This enzyme \""+eb.path+"\" has no substrate/product daggling reaction are not copied")
#war_msg = war_msg+"\nThis reaction \""+eb.path+"\" has no substrate/product daggling reaction are not copied"
return RE_Duplicated,RE_Notcopiedyet,RE_Daggling
def reacMerge(comptA,comptB,key,poolListina):
RE_Duplicated, RE_Notcopiedyet, RE_Daggling = [], [], []
war_msg = ""
comptApath = moose.element(comptA[key]).path
comptBpath = moose.element(comptB[key]).path
objA = moose.element(comptApath).parent.name
objB = moose.element(comptBpath).parent.name
reacListina = moose.wildcardFind(comptApath+'/##[ISA=ReacBase]')
reacListinb = moose.wildcardFind(comptBpath+'/##[ISA=ReacBase]')
for rb in reacListinb:
rBsubname, rBprdname = [],[]
rBsubname = subprdList(rb,"sub")
rBprdname = subprdList(rb,"prd")
allexists, allexistp = False, False
allclean = False
if rb.name not in [ra.name for ra in reacListina]:
# reaction name not found then copy
# And assuming that pools are copied earlier EXPECT POOL CPLX
#To be assured the it takes correct compartment name incase reaction sub's
#belongs to different compt
key = findCompartment(rb).name
if rBsubname and rBprdname:
allexists = checkexist(rBsubname,objB,objA)
allexistp = checkexist(rBprdname,objB,objA)
if allexists and allexistp:
rapath = rb.parent.path.replace(objB,objA)
reac = moose.copy(rb,moose.element(rapath))
connectObj(reac,rBsubname,"sub",comptA,war_msg)
connectObj(reac,rBprdname,"prd",comptA,war_msg)
allclean = True
else:
# didn't find sub or prd for this reaction
# -- it may be connected Enzyme cplx
RE_Notcopiedyet.append(rb)
else:
# -- it is dagging reaction
RE_Daggling.append(rb)
#print ("This reaction \""+rb.path+"\" has no substrate/product daggling reaction are not copied")
#war_msg = war_msg+"\nThis reaction \""+rb.path+"\" has no substrate/product daggling reaction are not copied"
else:
#Same reaction name
# -- Same substrate and product including same volume then don't copy
# -- different substrate/product or if sub/prd's volume is different then DUPLICATE the reaction
allclean = False
for ra in reacListina:
if rb.name == ra.name:
rAsubname = subprdList(ra,"sub")
rBsubname = subprdList(rb,"sub")
hasSamenoofsublen,hasSameS,hasSamevols = same_len_name_vol(rAsubname,rBsubname)
rAprdname = subprdList(ra,"prd")
rBprdname = subprdList(rb,"prd")
hasSamenoofprdlen,hasSameP,hasSamevolp = same_len_name_vol(rAprdname,rBprdname)
if not all((hasSamenoofsublen,hasSameS,hasSamevols,hasSamenoofprdlen,hasSameP,hasSamevolp)):
# May be different substrate or product or volume of Sub/prd may be different,
# Duplicating the reaction
if rBsubname and rBprdname:
allexists,allexistp = False,False
allexists = checkexist(rBsubname,objB,objA)
allexistp = checkexist(rBprdname,objB,objA)
if allexists and allexistp:
rb.name = rb.name+"_duplicated"
#reac = moose.Reac(comptA[key].path+'/'+rb.name+"_duplicated")
rapath = rb.parent.path.replace(objB,objA)
reac = moose.copy(rb,moose.element(rapath))
connectObj(reac,rBsubname,"sub",comptA,war_msg)
connectObj(reac,rBprdname,"prd",comptA,war_msg)
RE_Duplicated.append(reac)
allclean = True
else:
allclean = False
else:
allclean = True
if not allclean:
# didn't find sub or prd for this reaction
# -- it may be connected Enzyme cplx
if rBsubname and rBprdname:
RE_Notcopiedyet.append(rb)
#print ("This reaction \""+rb.path+"\" has no substrate/product must be connect to cplx")
#war_msg = war_msg+ "\nThis reaction \""+rb.path+"\" has no substrate/product must be connect to cplx"
else:
RE_Daggling.append(rb)
#print ("This reaction \""+rb.path+"\" has no substrate/product daggling reaction are not copied")
#war_msg = war_msg+"\nThis reaction \""+rb.path+"\" has no substrate/product daggling reaction are not copied"
return RE_Duplicated,RE_Notcopiedyet,RE_Daggling
def subprdList(reac,subprd):
#print "Reac ",reac
rtype = moose.element(reac).neighbors[subprd]
rname = []
for rs in rtype:
rname.append(moose.element(rs))
return rname
def same_len_name_vol(rA,rB):
uaS = set(rA)
ubS = set(rB)
aS = set([uas.name for uas in uaS])
bS = set([ubs.name for ubs in ubS])
hassameLen = False
hassameSP = False
hassamevol = False
hassamevollist = []
if (len(rA) == len(rB) ):
hassameLen = True
if not (len (aS.union(bS) - aS.intersection(bS))):
hassameSP = True
if rB and rA:
rAdict = dict( [ (i.name,i) for i in (rA) ] )
rBdict = dict( [ (i.name,i) for i in (rB) ] )
for key,bpath in rBdict.items():
apath = rAdict[key]
comptA = moose.element(findCompartment(apath))
comptB = moose.element(findCompartment(bpath))
if not abs(comptA.volume -comptB.volume):
hassamevollist.append(True)
else:
hassamevollist.append(False)
if len(set(hassamevollist))==1:
for x in set(hassamevollist):
hassamevol = x
return ( hassameLen,hassameSP,hassamevol)
def connectObj(reac,spList,spType,comptA,war_msg):
#It should not come here unless the sub/prd is connected to enzyme cplx pool
allclean = False
for rsp in spList:
for akey in list(poolListina[findCompartment(rsp).name]):
if rsp.name == akey.name:
if moose.exists(akey.path):
moose.connect(moose.element(reac), spType, moose.element(akey), 'reac', 'OneToOne')
allclean = True
else:
#It should not come here unless the sub/prd is connected to enzyme cplx pool
#print ("This pool \""+rsp.name+"\" doesnot exists in this "+comptName+" compartment to connect to this reaction \""+reac.name+"\"")
#war_msg = war_msg+ "This pool \""+rsp.name+"\" doesnot exists in this "+comptName+" compartment to connect to this reaction \""+reac.name+"\""
allclean = False
return allclean
def checkexist(spList,objB,objA):
allexistL = []
allexist = False
for rsp in spList:
found = False
rspPath = rsp.path.replace(objB,objA)
if moose.exists(rspPath):
found = True
allexistL.append(found)
if len(set(allexistL))==1:
for x in set(allexistL):
allexist = x
return allexist
def findCompartment(element):
while not mooseIsInstance(element,["CubeMesh","CyclMesh"]):
element = element.parent
return element
def mooseIsInstance(element, classNames):
return moose.element(element).__class__.__name__ in classNames
if __name__ == "__main__":
modelA = '/home/harsha/genesis_files/gfile/acc92.g'
modelB = '/home/harsha/genesis_files/gfile/acc50.g'
mergered = mergeChemModel(modelA,modelB)
|
dharmasam9/moose-core
|
python/moose/merge/merge.py
|
Python
|
gpl-3.0
| 28,641
|
[
"MOOSE"
] |
5a90cbb2de812274a85d94af91c33fdce76fb2f796d48fdf06b770c3f9c6b2ce
|
#
# Copyright (C) 2013,2014,2015,2016 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This is a modified version of the lj liquid script, which simulates
# a two component lj liquid.
# By switching the lj interaction between the two components
# from atractive to purely repulsive, de-mixing can be achieved.
# 1. Setup and equilibrate LJ liquid
from __future__ import print_function
import espressomd
import os
import numpy as np
from espressomd import visualization
from threading import Thread
n_part = 200
density = 0.4442
skin = 0.4
time_step = 0.01
eq_tstep = 0.01
temperature = 0.728
box_l = np.power(n_part/density, 1.0/3.0)
warm_steps = 100
warm_n_time = 2000
min_dist = 0.87
# integration
sampling_interval = 10
equilibration_interval = 1000
sampling_iterations = 10000
equilibration_iterations= 10
# Interaction parameters (Lennard Jones)
#############################################################
lj_eps = 1.0
lj_sig = 1.0
lj_cut = 2.5*lj_sig
lj_cap = 20
# This is the cutoff of the interaction between species 0 and 1.
# By setting it to 2**(1./6.) *lj_sig, it can be made purely repulsive
lj_cut_mixed =2.5 * lj_sig
lj_cut_mixed =2**(1./6.) * lj_sig
# System setup
#############################################################
system = espressomd.System(box_l=[1.0, 1.0, 1.0])
if not os.path.exists('data') :
os.mkdir('data')
system.time_step = time_step
system.cell_system.skin = skin
system.box_l = [box_l, box_l, box_l]
# Here, lj interactions need to be setup for both components
# as well as for the mixed case of component 0 interacting with
# component 1.
# component 0
system.non_bonded_inter[0, 0].lennard_jones.set_params(
epsilon=lj_eps, sigma=lj_sig,
cutoff=lj_cut, shift="auto")
# component 1
system.non_bonded_inter[1, 1].lennard_jones.set_params(
epsilon=lj_eps, sigma=lj_sig,
cutoff=lj_cut, shift="auto")
# mixed case
system.non_bonded_inter[0, 1].lennard_jones.set_params(
epsilon=lj_eps, sigma=lj_sig,
cutoff=lj_cut_mixed, shift="auto")
system.force_cap = lj_cap
print("LJ-parameters:")
print(system.non_bonded_inter[0, 0].lennard_jones.get_params())
# Thermostat
system.thermostat.set_langevin(kT=temperature, gamma=1.0)
# Particle setup
#############################################################
volume = box_l * box_l * box_l
for i in range(n_part):
system.part.add(id=i, pos=np.random.random(3) * system.box_l)
# Every 2nd particle should be of component 1
if i%2==1: system.part[i].type=1
#############################################################
# Warmup Integration #
#############################################################
print("""
Start warmup integration:
At maximum {} times {} steps
Stop if minimal distance is larger than {}
""".strip().format(warm_n_time, warm_steps, min_dist))
i = 0
act_min_dist = system.analysis.mindist()
while i < warm_n_time and act_min_dist < min_dist :
system.integrator.run(warm_steps)
act_min_dist = system.analysis.mindist()
print("run {} at time = {} (LJ cap= {} ) min dist = {}".strip().format(i, system.time, lj_cap, act_min_dist))
i+=1
lj_cap += 1.0
system.force_cap = lj_cap
system.force_cap = 0
def loop():
while True:
system.integrator.run(100)
visualizer.update()
visualizer = visualization.mayaviLive(system)
#Start simulation in seperate thread
t = Thread(target=loop)
t.daemon = True
t.start()
#Start blocking visualizer
visualizer.start()
|
KonradBreitsprecher/espresso
|
doc/tutorials/01-lennard_jones/scripts/two-component-visualization.py
|
Python
|
gpl-3.0
| 4,208
|
[
"ESPResSo"
] |
c8224aab5f4eb379a8d166f0de41aff6a0de293101f1549c25e8cd648f6160e0
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com> 2016
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
BOOLEANS_TRUE = ['y', 'yes', 'on', '1', 'true', 1, True]
BOOLEANS_FALSE = ['n', 'no', 'off', '0', 'false', 0, False]
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
SIZE_RANGES = { 'Y': 1<<80, 'Z': 1<<70, 'E': 1<<60, 'P': 1<<50, 'T': 1<<40, 'G': 1<<30, 'M': 1<<20, 'K': 1<<10, 'B': 1 }
# ansible modules can be written in any language. To simplify
# development of Python modules, the functions available here can
# be used to do many common tasks
import locale
import os
import re
import pipes
import shlex
import subprocess
import sys
import types
import time
import select
import shutil
import stat
import tempfile
import traceback
import grp
import pwd
import platform
import errno
import datetime
from itertools import repeat, chain
try:
import syslog
HAS_SYSLOG=True
except ImportError:
HAS_SYSLOG=False
try:
from systemd import journal
has_journal = True
except ImportError:
has_journal = False
HAVE_SELINUX=False
try:
import selinux
HAVE_SELINUX=True
except ImportError:
pass
# Python2 & 3 way to get NoneType
NoneType = type(None)
try:
from collections import Sequence, Mapping
except ImportError:
# python2.5
Sequence = (list, tuple)
Mapping = (dict,)
# Note: When getting Sequence from collections, it matches with strings. If
# this matters, make sure to check for strings before checking for sequencetype
try:
from collections.abc import KeysView
SEQUENCETYPE = (Sequence, KeysView)
except:
SEQUENCETYPE = Sequence
try:
import json
# Detect the python-json library which is incompatible
# Look for simplejson if that's the case
try:
if not isinstance(json.loads, types.FunctionType) or not isinstance(json.dumps, types.FunctionType):
raise ImportError
except AttributeError:
raise ImportError
except ImportError:
try:
import simplejson as json
except ImportError:
print('\n{"msg": "Error: ansible requires the stdlib json or simplejson module, neither was found!", "failed": true}')
sys.exit(1)
except SyntaxError:
print('\n{"msg": "SyntaxError: probably due to installed simplejson being for a different python version", "failed": true}')
sys.exit(1)
AVAILABLE_HASH_ALGORITHMS = dict()
try:
import hashlib
# python 2.7.9+ and 2.7.0+
for attribute in ('available_algorithms', 'algorithms'):
algorithms = getattr(hashlib, attribute, None)
if algorithms:
break
if algorithms is None:
# python 2.5+
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
for algorithm in algorithms:
AVAILABLE_HASH_ALGORITHMS[algorithm] = getattr(hashlib, algorithm)
except ImportError:
import sha
AVAILABLE_HASH_ALGORITHMS = {'sha1': sha.sha}
try:
import md5
AVAILABLE_HASH_ALGORITHMS['md5'] = md5.md5
except ImportError:
pass
from ansible.module_utils.pycompat24 import get_exception, literal_eval
from ansible.module_utils.six import (PY2, PY3, b, binary_type, integer_types,
iteritems, text_type, string_types)
from ansible.module_utils.six.moves import map, reduce
from ansible.module_utils._text import to_native, to_bytes, to_text
PASSWORD_MATCH = re.compile(r'^(?:.+[-_\s])?pass(?:[-_\s]?(?:word|phrase|wrd|wd)?)(?:[-_\s].+)?$', re.I)
_NUMBERTYPES = tuple(list(integer_types) + [float])
# Deprecated compat. Only kept in case another module used these names Using
# ansible.module_utils.six is preferred
NUMBERTYPES = _NUMBERTYPES
imap = map
try:
# Python 2
unicode
except NameError:
# Python 3
unicode = text_type
try:
# Python 2.6+
bytes
except NameError:
# Python 2.4
bytes = binary_type
try:
# Python 2
basestring
except NameError:
# Python 3
basestring = string_types
_literal_eval = literal_eval
# End of deprecated names
# Internal global holding passed in params. This is consulted in case
# multiple AnsibleModules are created. Otherwise each AnsibleModule would
# attempt to read from stdin. Other code should not use this directly as it
# is an internal implementation detail
_ANSIBLE_ARGS = None
FILE_COMMON_ARGUMENTS=dict(
src = dict(),
mode = dict(type='raw'),
owner = dict(),
group = dict(),
seuser = dict(),
serole = dict(),
selevel = dict(),
setype = dict(),
follow = dict(type='bool', default=False),
# not taken by the file module, but other modules call file so it must ignore them.
content = dict(no_log=True),
backup = dict(),
force = dict(),
remote_src = dict(), # used by assemble
regexp = dict(), # used by assemble
delimiter = dict(), # used by assemble
directory_mode = dict(), # used by copy
unsafe_writes = dict(type='bool'), # should be available to any module using atomic_move
)
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
# Can't use 07777 on Python 3, can't use 0o7777 on Python 2.4
PERM_BITS = int('07777', 8) # file mode permission bits
EXEC_PERM_BITS = int('00111', 8) # execute permission bits
DEFAULT_PERM = int('0666', 8) # default file permission bits
def get_platform():
''' what's the platform? example: Linux is a platform. '''
return platform.system()
def get_distribution():
''' return the distribution name '''
if platform.system() == 'Linux':
try:
supported_dists = platform._supported_dists + ('arch','alpine')
distribution = platform.linux_distribution(supported_dists=supported_dists)[0].capitalize()
if not distribution and os.path.isfile('/etc/system-release'):
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
if 'Amazon' in distribution:
distribution = 'Amazon'
else:
distribution = 'OtherLinux'
except:
# FIXME: MethodMissing, I assume?
distribution = platform.dist()[0].capitalize()
else:
distribution = None
return distribution
def get_distribution_version():
''' return the distribution version '''
if platform.system() == 'Linux':
try:
distribution_version = platform.linux_distribution()[1]
if not distribution_version and os.path.isfile('/etc/system-release'):
distribution_version = platform.linux_distribution(supported_dists=['system'])[1]
except:
# FIXME: MethodMissing, I assume?
distribution_version = platform.dist()[1]
else:
distribution_version = None
return distribution_version
def get_all_subclasses(cls):
'''
used by modules like Hardware or Network fact classes to retrieve all subclasses of a given class.
__subclasses__ return only direct sub classes. This one go down into the class tree.
'''
# Retrieve direct subclasses
subclasses = cls.__subclasses__()
to_visit = list(subclasses)
# Then visit all subclasses
while to_visit:
for sc in to_visit:
# The current class is now visited, so remove it from list
to_visit.remove(sc)
# Appending all subclasses to visit and keep a reference of available class
for ssc in sc.__subclasses__():
subclasses.append(ssc)
to_visit.append(ssc)
return subclasses
def load_platform_subclass(cls, *args, **kwargs):
'''
used by modules like User to have different implementations based on detected platform. See User
module for an example.
'''
this_platform = get_platform()
distribution = get_distribution()
subclass = None
# get the most specific superclass for this platform
if distribution is not None:
for sc in get_all_subclasses(cls):
if sc.distribution is not None and sc.distribution == distribution and sc.platform == this_platform:
subclass = sc
if subclass is None:
for sc in get_all_subclasses(cls):
if sc.platform == this_platform and sc.distribution is None:
subclass = sc
if subclass is None:
subclass = cls
return super(cls, subclass).__new__(subclass)
def json_dict_unicode_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns)
'''
if isinstance(d, text_type):
return to_bytes(d, encoding=encoding, errors=errors)
elif isinstance(d, dict):
return dict(map(json_dict_unicode_to_bytes, iteritems(d), repeat(encoding), repeat(errors)))
elif isinstance(d, list):
return list(map(json_dict_unicode_to_bytes, d, repeat(encoding), repeat(errors)))
elif isinstance(d, tuple):
return tuple(map(json_dict_unicode_to_bytes, d, repeat(encoding), repeat(errors)))
else:
return d
def json_dict_bytes_to_unicode(d, encoding='utf-8', errors='surrogate_or_strict'):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns)
'''
if isinstance(d, binary_type):
# Warning, can traceback
return to_text(d, encoding=encoding, errors=errors)
elif isinstance(d, dict):
return dict(map(json_dict_bytes_to_unicode, iteritems(d), repeat(encoding), repeat(errors)))
elif isinstance(d, list):
return list(map(json_dict_bytes_to_unicode, d, repeat(encoding), repeat(errors)))
elif isinstance(d, tuple):
return tuple(map(json_dict_bytes_to_unicode, d, repeat(encoding), repeat(errors)))
else:
return d
def return_values(obj):
""" Return native stringified values from datastructures.
For use with removing sensitive values pre-jsonification."""
if isinstance(obj, (text_type, binary_type)):
if obj:
yield to_native(obj, errors='surrogate_or_strict')
return
elif isinstance(obj, SEQUENCETYPE):
for element in obj:
for subelement in return_values(element):
yield subelement
elif isinstance(obj, Mapping):
for element in obj.items():
for subelement in return_values(element[1]):
yield subelement
elif isinstance(obj, (bool, NoneType)):
# This must come before int because bools are also ints
return
elif isinstance(obj, NUMBERTYPES):
yield to_native(obj, nonstring='simplerepr')
else:
raise TypeError('Unknown parameter type: %s, %s' % (type(obj), obj))
def remove_values(value, no_log_strings):
""" Remove strings in no_log_strings from value. If value is a container
type, then remove a lot more"""
if isinstance(value, (text_type, binary_type)):
# Need native str type
native_str_value = value
if isinstance(value, text_type):
value_is_text = True
if PY2:
native_str_value = to_bytes(value, encoding='utf-8', errors='surrogate_or_strict')
elif isinstance(value, binary_type):
value_is_text = False
if PY3:
native_str_value = to_text(value, encoding='utf-8', errors='surrogate_or_strict')
if native_str_value in no_log_strings:
return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
for omit_me in no_log_strings:
native_str_value = native_str_value.replace(omit_me, '*' * 8)
if value_is_text and isinstance(native_str_value, binary_type):
value = to_text(native_str_value, encoding='utf-8', errors='surrogate_or_replace')
elif not value_is_text and isinstance(native_str_value, text_type):
value = to_bytes(native_str_value, encoding='utf-8', errors='surrogate_or_replace')
else:
value = native_str_value
elif isinstance(value, SEQUENCETYPE):
return [remove_values(elem, no_log_strings) for elem in value]
elif isinstance(value, Mapping):
return dict((k, remove_values(v, no_log_strings)) for k, v in value.items())
elif isinstance(value, tuple(chain(NUMBERTYPES, (bool, NoneType)))):
stringy_value = to_native(value, encoding='utf-8', errors='surrogate_or_strict')
if stringy_value in no_log_strings:
return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
for omit_me in no_log_strings:
if omit_me in stringy_value:
return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
elif isinstance(value, datetime.datetime):
value = value.isoformat()
else:
raise TypeError('Value of unknown type: %s, %s' % (type(value), value))
return value
def heuristic_log_sanitize(data, no_log_values=None):
''' Remove strings that look like passwords from log messages '''
# Currently filters:
# user:pass@foo/whatever and http://username:pass@wherever/foo
# This code has false positives and consumes parts of logs that are
# not passwds
# begin: start of a passwd containing string
# end: end of a passwd containing string
# sep: char between user and passwd
# prev_begin: where in the overall string to start a search for
# a passwd
# sep_search_end: where in the string to end a search for the sep
data = to_native(data)
output = []
begin = len(data)
prev_begin = begin
sep = 1
while sep:
# Find the potential end of a passwd
try:
end = data.rindex('@', 0, begin)
except ValueError:
# No passwd in the rest of the data
output.insert(0, data[0:begin])
break
# Search for the beginning of a passwd
sep = None
sep_search_end = end
while not sep:
# URL-style username+password
try:
begin = data.rindex('://', 0, sep_search_end)
except ValueError:
# No url style in the data, check for ssh style in the
# rest of the string
begin = 0
# Search for separator
try:
sep = data.index(':', begin + 3, end)
except ValueError:
# No separator; choices:
if begin == 0:
# Searched the whole string so there's no password
# here. Return the remaining data
output.insert(0, data[0:begin])
break
# Search for a different beginning of the password field.
sep_search_end = begin
continue
if sep:
# Password was found; remove it.
output.insert(0, data[end:prev_begin])
output.insert(0, '********')
output.insert(0, data[begin:sep + 1])
prev_begin = begin
output = ''.join(output)
if no_log_values:
output = remove_values(output, no_log_values)
return output
def bytes_to_human(size, isbits=False, unit=None):
base = 'Bytes'
if isbits:
base = 'bits'
suffix = ''
for suffix, limit in sorted(iteritems(SIZE_RANGES), key=lambda item: -item[1]):
if (unit is None and size >= limit) or unit is not None and unit.upper() == suffix[0]:
break
if limit != 1:
suffix += base[0]
else:
suffix = base
return '%.2f %s' % (float(size)/ limit, suffix)
def human_to_bytes(number, default_unit=None, isbits=False):
'''
Convert number in string format into bytes (ex: '2K' => 2048) or using unit argument
ex:
human_to_bytes('10M') <=> human_to_bytes(10, 'M')
'''
m = re.search('^\s*(\d*\.?\d*)\s*([A-Za-z]+)?', str(number), flags=re.IGNORECASE)
if m is None:
raise ValueError("human_to_bytes() can't interpret following string: %s" % str(number))
try:
num = float(m.group(1))
except:
raise ValueError("human_to_bytes() can't interpret following number: %s (original input string: %s)" % (m.group(1), number))
unit = m.group(2)
if unit is None:
unit = default_unit
if unit is None:
''' No unit given, returning raw number '''
return int(round(num))
range_key = unit[0].upper()
try:
limit = SIZE_RANGES[range_key]
except:
raise ValueError("human_to_bytes() failed to convert %s (unit = %s). The suffix must be one of %s" % (number, unit, ", ".join(SIZE_RANGES.keys())))
# default value
unit_class = 'B'
unit_class_name = 'byte'
# handling bits case
if isbits:
unit_class = 'b'
unit_class_name = 'bit'
# check unit value if more than one character (KB, MB)
if len(unit) > 1:
expect_message = 'expect %s%s or %s' % (range_key, unit_class, range_key)
if range_key == 'B':
expect_message = 'expect %s or %s' % (unit_class, unit_class_name)
if unit_class_name in unit.lower():
pass
elif unit[1] != unit_class:
raise ValueError("human_to_bytes() failed to convert %s. Value is not a valid string (%s)" % (number, expect_message))
return int(round(num * limit))
def is_executable(path):
'''is the given path executable?
Limitations:
* Does not account for FSACLs.
* Most times we really want to know "Can the current user execute this
file" This function does not tell us that, only if an execute bit is set.
'''
# These are all bitfields so first bitwise-or all the permissions we're
# looking for, then bitwise-and with the file's mode to determine if any
# execute bits are set.
return ((stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) & os.stat(path)[stat.ST_MODE])
def _load_params():
''' read the modules parameters and store them globally.
This function may be needed for certain very dynamic custom modules which
want to process the parameters that are being handed the module. Since
this is so closely tied to the implementation of modules we cannot
guarantee API stability for it (it may change between versions) however we
will try not to break it gratuitously. It is certainly more future-proof
to call this function and consume its outputs than to implement the logic
inside it as a copy in your own code.
'''
global _ANSIBLE_ARGS
if _ANSIBLE_ARGS is not None:
buffer = _ANSIBLE_ARGS
else:
# debug overrides to read args from file or cmdline
# Avoid tracebacks when locale is non-utf8
# We control the args and we pass them as utf8
if len(sys.argv) > 1:
if os.path.isfile(sys.argv[1]):
fd = open(sys.argv[1], 'rb')
buffer = fd.read()
fd.close()
else:
buffer = sys.argv[1]
if PY3:
buffer = buffer.encode('utf-8', errors='surrogateescape')
# default case, read from stdin
else:
if PY2:
buffer = sys.stdin.read()
else:
buffer = sys.stdin.buffer.read()
_ANSIBLE_ARGS = buffer
try:
params = json.loads(buffer.decode('utf-8'))
except ValueError:
# This helper used too early for fail_json to work.
print('\n{"msg": "Error: Module unable to decode valid JSON on stdin. Unable to figure out what parameters were passed", "failed": true}')
sys.exit(1)
if PY2:
params = json_dict_unicode_to_bytes(params)
try:
return params['ANSIBLE_MODULE_ARGS']
except KeyError:
# This helper does not have access to fail_json so we have to print
# json output on our own.
print('\n{"msg": "Error: Module unable to locate ANSIBLE_MODULE_ARGS in json data from stdin. Unable to figure out what parameters were passed", "failed": true}')
sys.exit(1)
def env_fallback(*args, **kwargs):
''' Load value from environment '''
for arg in args:
if arg in os.environ:
return os.environ[arg]
else:
raise AnsibleFallbackNotFound
def _lenient_lowercase(lst):
"""Lowercase elements of a list.
If an element is not a string, pass it through untouched.
"""
lowered = []
for value in lst:
try:
lowered.append(value.lower())
except AttributeError:
lowered.append(value)
return lowered
class AnsibleFallbackNotFound(Exception):
pass
class AnsibleModule(object):
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
required_one_of=None, add_file_common_args=False, supports_check_mode=False,
required_if=None):
'''
common code for quickly building an ansible module in Python
(although you can write modules in anything that can return JSON)
see library/* for examples
'''
self._name = os.path.basename(__file__) #initialize name until we can parse from options
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
self.no_log = no_log
self.cleanup_files = []
self._debug = False
self._diff = False
self._verbosity = 0
# May be used to set modifications to the environment for any
# run_command invocation
self.run_command_environ_update = {}
self.aliases = {}
self._legal_inputs = ['_ansible_check_mode', '_ansible_no_log', '_ansible_debug', '_ansible_diff', '_ansible_verbosity', '_ansible_selinux_special_fs', '_ansible_module_name', '_ansible_version', '_ansible_syslog_facility']
if add_file_common_args:
for k, v in FILE_COMMON_ARGUMENTS.items():
if k not in self.argument_spec:
self.argument_spec[k] = v
self._load_params()
self._set_fallbacks()
# append to legal_inputs and then possibly check against them
try:
self.aliases = self._handle_aliases()
except Exception:
e = get_exception()
# Use exceptions here because it isn't safe to call fail_json until no_log is processed
print('\n{"failed": true, "msg": "Module alias error: %s"}' % str(e))
sys.exit(1)
# Save parameter values that should never be logged
self.no_log_values = set()
# Use the argspec to determine which args are no_log
for arg_name, arg_opts in self.argument_spec.items():
if arg_opts.get('no_log', False):
# Find the value for the no_log'd param
no_log_object = self.params.get(arg_name, None)
if no_log_object:
self.no_log_values.update(return_values(no_log_object))
# check the locale as set by the current environment, and reset to
# a known valid (LANG=C) if it's an invalid/unavailable locale
self._check_locale()
self._check_arguments(check_invalid_arguments)
# check exclusive early
if not bypass_checks:
self._check_mutually_exclusive(mutually_exclusive)
self._set_defaults(pre=True)
self._CHECK_ARGUMENT_TYPES_DISPATCHER = {
'str': self._check_type_str,
'list': self._check_type_list,
'dict': self._check_type_dict,
'bool': self._check_type_bool,
'int': self._check_type_int,
'float': self._check_type_float,
'path': self._check_type_path,
'raw': self._check_type_raw,
'jsonarg': self._check_type_jsonarg,
'json': self._check_type_jsonarg,
'bytes': self._check_type_bytes,
'bits': self._check_type_bits,
}
if not bypass_checks:
self._check_required_arguments()
self._check_argument_types()
self._check_argument_values()
self._check_required_together(required_together)
self._check_required_one_of(required_one_of)
self._check_required_if(required_if)
self._set_defaults(pre=False)
if not self.no_log:
self._log_invocation()
# finally, make sure we're in a sane working dir
self._set_cwd()
def load_file_common_arguments(self, params):
'''
many modules deal with files, this encapsulates common
options that the file module accepts such that it is directly
available to all modules and they can share code.
'''
path = params.get('path', params.get('dest', None))
if path is None:
return {}
else:
path = os.path.expanduser(os.path.expandvars(path))
b_path = to_bytes(path, errors='surrogate_or_strict')
# if the path is a symlink, and we're following links, get
# the target of the link instead for testing
if params.get('follow', False) and os.path.islink(b_path):
b_path = os.path.realpath(b_path)
path = to_native(b_path)
mode = params.get('mode', None)
owner = params.get('owner', None)
group = params.get('group', None)
# selinux related options
seuser = params.get('seuser', None)
serole = params.get('serole', None)
setype = params.get('setype', None)
selevel = params.get('selevel', None)
secontext = [seuser, serole, setype]
if self.selinux_mls_enabled():
secontext.append(selevel)
default_secontext = self.selinux_default_context(path)
for i in range(len(default_secontext)):
if i is not None and secontext[i] == '_default':
secontext[i] = default_secontext[i]
return dict(
path=path, mode=mode, owner=owner, group=group,
seuser=seuser, serole=serole, setype=setype,
selevel=selevel, secontext=secontext,
)
# Detect whether using selinux that is MLS-aware.
# While this means you can set the level/range with
# selinux.lsetfilecon(), it may or may not mean that you
# will get the selevel as part of the context returned
# by selinux.lgetfilecon().
def selinux_mls_enabled(self):
if not HAVE_SELINUX:
return False
if selinux.is_selinux_mls_enabled() == 1:
return True
else:
return False
def selinux_enabled(self):
if not HAVE_SELINUX:
seenabled = self.get_bin_path('selinuxenabled')
if seenabled is not None:
(rc,out,err) = self.run_command(seenabled)
if rc == 0:
self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
return False
if selinux.is_selinux_enabled() == 1:
return True
else:
return False
# Determine whether we need a placeholder for selevel/mls
def selinux_initial_context(self):
context = [None, None, None]
if self.selinux_mls_enabled():
context.append(None)
return context
# If selinux fails to find a default, return an array of None
def selinux_default_context(self, path, mode=0):
context = self.selinux_initial_context()
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
ret = selinux.matchpathcon(to_native(path, errors='surrogate_or_strict'), mode)
except OSError:
return context
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def selinux_context(self, path):
context = self.selinux_initial_context()
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
ret = selinux.lgetfilecon_raw(to_native(path, errors='surrogate_or_strict'))
except OSError:
e = get_exception()
if e.errno == errno.ENOENT:
self.fail_json(path=path, msg='path %s does not exist' % path)
else:
self.fail_json(path=path, msg='failed to retrieve selinux context')
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def user_and_group(self, filename):
filename = os.path.expanduser(os.path.expandvars(filename))
b_filename = to_bytes(filename, errors='surrogate_or_strict')
st = os.lstat(b_filename)
uid = st.st_uid
gid = st.st_gid
return (uid, gid)
def find_mount_point(self, path):
path = os.path.realpath(os.path.expanduser(os.path.expandvars(path)))
while not os.path.ismount(path):
path = os.path.dirname(path)
return path
def is_special_selinux_path(self, path):
"""
Returns a tuple containing (True, selinux_context) if the given path is on a
NFS or other 'special' fs mount point, otherwise the return will be (False, None).
"""
try:
f = open('/proc/mounts', 'r')
mount_data = f.readlines()
f.close()
except:
return (False, None)
path_mount_point = self.find_mount_point(path)
for line in mount_data:
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
if path_mount_point == mount_point:
for fs in self._selinux_special_fs:
if fs in fstype:
special_context = self.selinux_context(path_mount_point)
return (True, special_context)
return (False, None)
def set_default_selinux_context(self, path, changed):
if not HAVE_SELINUX or not self.selinux_enabled():
return changed
context = self.selinux_default_context(path)
return self.set_context_if_different(path, context, False)
def set_context_if_different(self, path, context, changed, diff=None):
if not HAVE_SELINUX or not self.selinux_enabled():
return changed
cur_context = self.selinux_context(path)
new_context = list(cur_context)
# Iterate over the current context instead of the
# argument context, which may have selevel.
(is_special_se, sp_context) = self.is_special_selinux_path(path)
if is_special_se:
new_context = sp_context
else:
for i in range(len(cur_context)):
if len(context) > i:
if context[i] is not None and context[i] != cur_context[i]:
new_context[i] = context[i]
elif context[i] is None:
new_context[i] = cur_context[i]
if cur_context != new_context:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['secontext'] = cur_context
if 'after' not in diff:
diff['after'] = {}
diff['after']['secontext'] = new_context
try:
if self.check_mode:
return True
rc = selinux.lsetfilecon(to_native(path),
str(':'.join(new_context)))
except OSError:
e = get_exception()
self.fail_json(path=path, msg='invalid selinux context: %s' % str(e), new_context=new_context, cur_context=cur_context, input_was=context)
if rc != 0:
self.fail_json(path=path, msg='set selinux context failed')
changed = True
return changed
def set_owner_if_different(self, path, owner, changed, diff=None):
path = os.path.expanduser(os.path.expandvars(path))
b_path = to_bytes(path, errors='surrogate_or_strict')
if owner is None:
return changed
orig_uid, orig_gid = self.user_and_group(path)
try:
uid = int(owner)
except ValueError:
try:
uid = pwd.getpwnam(owner).pw_uid
except KeyError:
self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
if orig_uid != uid:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['owner'] = orig_uid
if 'after' not in diff:
diff['after'] = {}
diff['after']['owner'] = uid
if self.check_mode:
return True
try:
os.lchown(b_path, uid, -1)
except OSError:
self.fail_json(path=path, msg='chown failed')
changed = True
return changed
def set_group_if_different(self, path, group, changed, diff=None):
path = os.path.expanduser(os.path.expandvars(path))
b_path = to_bytes(path, errors='surrogate_or_strict')
if group is None:
return changed
orig_uid, orig_gid = self.user_and_group(b_path)
try:
gid = int(group)
except ValueError:
try:
gid = grp.getgrnam(group).gr_gid
except KeyError:
self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
if orig_gid != gid:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['group'] = orig_gid
if 'after' not in diff:
diff['after'] = {}
diff['after']['group'] = gid
if self.check_mode:
return True
try:
os.lchown(b_path, -1, gid)
except OSError:
self.fail_json(path=path, msg='chgrp failed')
changed = True
return changed
def set_mode_if_different(self, path, mode, changed, diff=None):
b_path = to_bytes(path, errors='surrogate_or_strict')
b_path = os.path.expanduser(os.path.expandvars(b_path))
path_stat = os.lstat(b_path)
if mode is None:
return changed
if not isinstance(mode, int):
try:
mode = int(mode, 8)
except Exception:
try:
mode = self._symbolic_mode_to_octal(path_stat, mode)
except Exception:
e = get_exception()
self.fail_json(path=path,
msg="mode must be in octal or symbolic form",
details=str(e))
if mode != stat.S_IMODE(mode):
# prevent mode from having extra info orbeing invalid long number
self.fail_json(path=path, msg="Invalid mode supplied, only permission info is allowed", details=mode)
prev_mode = stat.S_IMODE(path_stat.st_mode)
if prev_mode != mode:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['mode'] = '0%03o' % prev_mode
if 'after' not in diff:
diff['after'] = {}
diff['after']['mode'] = '0%03o' % mode
if self.check_mode:
return True
# FIXME: comparison against string above will cause this to be executed
# every time
try:
if hasattr(os, 'lchmod'):
os.lchmod(b_path, mode)
else:
if not os.path.islink(b_path):
os.chmod(b_path, mode)
else:
# Attempt to set the perms of the symlink but be
# careful not to change the perms of the underlying
# file while trying
underlying_stat = os.stat(b_path)
os.chmod(b_path, mode)
new_underlying_stat = os.stat(b_path)
if underlying_stat.st_mode != new_underlying_stat.st_mode:
os.chmod(b_path, stat.S_IMODE(underlying_stat.st_mode))
except OSError:
e = get_exception()
if os.path.islink(b_path) and e.errno == errno.EPERM: # Can't set mode on symbolic links
pass
elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
pass
else:
raise e
except Exception:
e = get_exception()
self.fail_json(path=path, msg='chmod failed', details=str(e))
path_stat = os.lstat(b_path)
new_mode = stat.S_IMODE(path_stat.st_mode)
if new_mode != prev_mode:
changed = True
return changed
def _symbolic_mode_to_octal(self, path_stat, symbolic_mode):
new_mode = stat.S_IMODE(path_stat.st_mode)
mode_re = re.compile(r'^(?P<users>[ugoa]+)(?P<operator>[-+=])(?P<perms>[rwxXst-]*|[ugo])$')
for mode in symbolic_mode.split(','):
match = mode_re.match(mode)
if match:
users = match.group('users')
operator = match.group('operator')
perms = match.group('perms')
if users == 'a':
users = 'ugo'
for user in users:
mode_to_apply = self._get_octal_mode_from_symbolic_perms(path_stat, user, perms)
new_mode = self._apply_operation_to_mode(user, operator, mode_to_apply, new_mode)
else:
raise ValueError("bad symbolic permission for mode: %s" % mode)
return new_mode
def _apply_operation_to_mode(self, user, operator, mode_to_apply, current_mode):
if operator == '=':
if user == 'u': mask = stat.S_IRWXU | stat.S_ISUID
elif user == 'g': mask = stat.S_IRWXG | stat.S_ISGID
elif user == 'o': mask = stat.S_IRWXO | stat.S_ISVTX
# mask out u, g, or o permissions from current_mode and apply new permissions
inverse_mask = mask ^ PERM_BITS
new_mode = (current_mode & inverse_mask) | mode_to_apply
elif operator == '+':
new_mode = current_mode | mode_to_apply
elif operator == '-':
new_mode = current_mode - (current_mode & mode_to_apply)
return new_mode
def _get_octal_mode_from_symbolic_perms(self, path_stat, user, perms):
prev_mode = stat.S_IMODE(path_stat.st_mode)
is_directory = stat.S_ISDIR(path_stat.st_mode)
has_x_permissions = (prev_mode & EXEC_PERM_BITS) > 0
apply_X_permission = is_directory or has_x_permissions
# Permission bits constants documented at:
# http://docs.python.org/2/library/stat.html#stat.S_ISUID
if apply_X_permission:
X_perms = {
'u': {'X': stat.S_IXUSR},
'g': {'X': stat.S_IXGRP},
'o': {'X': stat.S_IXOTH}
}
else:
X_perms = {
'u': {'X': 0},
'g': {'X': 0},
'o': {'X': 0}
}
user_perms_to_modes = {
'u': {
'r': stat.S_IRUSR,
'w': stat.S_IWUSR,
'x': stat.S_IXUSR,
's': stat.S_ISUID,
't': 0,
'u': prev_mode & stat.S_IRWXU,
'g': (prev_mode & stat.S_IRWXG) << 3,
'o': (prev_mode & stat.S_IRWXO) << 6 },
'g': {
'r': stat.S_IRGRP,
'w': stat.S_IWGRP,
'x': stat.S_IXGRP,
's': stat.S_ISGID,
't': 0,
'u': (prev_mode & stat.S_IRWXU) >> 3,
'g': prev_mode & stat.S_IRWXG,
'o': (prev_mode & stat.S_IRWXO) << 3 },
'o': {
'r': stat.S_IROTH,
'w': stat.S_IWOTH,
'x': stat.S_IXOTH,
's': 0,
't': stat.S_ISVTX,
'u': (prev_mode & stat.S_IRWXU) >> 6,
'g': (prev_mode & stat.S_IRWXG) >> 3,
'o': prev_mode & stat.S_IRWXO }
}
# Insert X_perms into user_perms_to_modes
for key, value in X_perms.items():
user_perms_to_modes[key].update(value)
or_reduce = lambda mode, perm: mode | user_perms_to_modes[user][perm]
return reduce(or_reduce, perms, 0)
def set_fs_attributes_if_different(self, file_args, changed, diff=None):
# set modes owners and context as needed
changed = self.set_context_if_different(
file_args['path'], file_args['secontext'], changed, diff
)
changed = self.set_owner_if_different(
file_args['path'], file_args['owner'], changed, diff
)
changed = self.set_group_if_different(
file_args['path'], file_args['group'], changed, diff
)
changed = self.set_mode_if_different(
file_args['path'], file_args['mode'], changed, diff
)
return changed
def set_directory_attributes_if_different(self, file_args, changed, diff=None):
return self.set_fs_attributes_if_different(file_args, changed, diff)
def set_file_attributes_if_different(self, file_args, changed, diff=None):
return self.set_fs_attributes_if_different(file_args, changed, diff)
def add_path_info(self, kwargs):
'''
for results that are files, supplement the info about the file
in the return path with stats about the file path.
'''
path = kwargs.get('path', kwargs.get('dest', None))
if path is None:
return kwargs
b_path = to_bytes(path, errors='surrogate_or_strict')
if os.path.exists(b_path):
(uid, gid) = self.user_and_group(path)
kwargs['uid'] = uid
kwargs['gid'] = gid
try:
user = pwd.getpwuid(uid)[0]
except KeyError:
user = str(uid)
try:
group = grp.getgrgid(gid)[0]
except KeyError:
group = str(gid)
kwargs['owner'] = user
kwargs['group'] = group
st = os.lstat(b_path)
kwargs['mode'] = '0%03o' % stat.S_IMODE(st[stat.ST_MODE])
# secontext not yet supported
if os.path.islink(b_path):
kwargs['state'] = 'link'
elif os.path.isdir(b_path):
kwargs['state'] = 'directory'
elif os.stat(b_path).st_nlink > 1:
kwargs['state'] = 'hard'
else:
kwargs['state'] = 'file'
if HAVE_SELINUX and self.selinux_enabled():
kwargs['secontext'] = ':'.join(self.selinux_context(path))
kwargs['size'] = st[stat.ST_SIZE]
else:
kwargs['state'] = 'absent'
return kwargs
def _check_locale(self):
'''
Uses the locale module to test the currently set locale
(per the LANG and LC_CTYPE environment settings)
'''
try:
# setting the locale to '' uses the default locale
# as it would be returned by locale.getdefaultlocale()
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
# fallback to the 'C' locale, which may cause unicode
# issues but is preferable to simply failing because
# of an unknown locale
locale.setlocale(locale.LC_ALL, 'C')
os.environ['LANG'] = 'C'
os.environ['LC_ALL'] = 'C'
os.environ['LC_MESSAGES'] = 'C'
except Exception:
e = get_exception()
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e)
def _handle_aliases(self):
# this uses exceptions as it happens before we can safely call fail_json
aliases_results = {} #alias:canon
for (k,v) in self.argument_spec.items():
self._legal_inputs.append(k)
aliases = v.get('aliases', None)
default = v.get('default', None)
required = v.get('required', False)
if default is not None and required:
# not alias specific but this is a good place to check this
raise Exception("internal error: required and default are mutually exclusive for %s" % k)
if aliases is None:
continue
if not isinstance(aliases, SEQUENCETYPE) or isinstance(aliases, (binary_type, text_type)):
raise Exception('internal error: aliases must be a list or tuple')
for alias in aliases:
self._legal_inputs.append(alias)
aliases_results[alias] = k
if alias in self.params:
self.params[k] = self.params[alias]
return aliases_results
def _check_arguments(self, check_invalid_arguments):
self._syslog_facility = 'LOG_USER'
for (k,v) in list(self.params.items()):
if k == '_ansible_check_mode' and v:
self.check_mode = True
elif k == '_ansible_no_log':
self.no_log = self.boolean(v)
elif k == '_ansible_debug':
self._debug = self.boolean(v)
elif k == '_ansible_diff':
self._diff = self.boolean(v)
elif k == '_ansible_verbosity':
self._verbosity = v
elif k == '_ansible_selinux_special_fs':
self._selinux_special_fs = v
elif k == '_ansible_syslog_facility':
self._syslog_facility = v
elif k == '_ansible_version':
self.ansible_version = v
elif k == '_ansible_module_name':
self._name = v
elif check_invalid_arguments and k not in self._legal_inputs:
self.fail_json(msg="unsupported parameter for module: %s" % k)
#clean up internal params:
if k.startswith('_ansible_'):
del self.params[k]
if self.check_mode and not self.supports_check_mode:
self.exit_json(skipped=True, msg="remote module (%s) does not support check mode" % self._name)
def _count_terms(self, check):
count = 0
for term in check:
if term in self.params:
count += 1
return count
def _check_mutually_exclusive(self, spec):
if spec is None:
return
for check in spec:
count = self._count_terms(check)
if count > 1:
self.fail_json(msg="parameters are mutually exclusive: %s" % (check,))
def _check_required_one_of(self, spec):
if spec is None:
return
for check in spec:
count = self._count_terms(check)
if count == 0:
self.fail_json(msg="one of the following is required: %s" % ','.join(check))
def _check_required_together(self, spec):
if spec is None:
return
for check in spec:
counts = [ self._count_terms([field]) for field in check ]
non_zero = [ c for c in counts if c > 0 ]
if len(non_zero) > 0:
if 0 in counts:
self.fail_json(msg="parameters are required together: %s" % (check,))
def _check_required_arguments(self):
''' ensure all required arguments are present '''
missing = []
for (k,v) in self.argument_spec.items():
required = v.get('required', False)
if required and k not in self.params:
missing.append(k)
if len(missing) > 0:
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
def _check_required_if(self, spec):
''' ensure that parameters which conditionally required are present '''
if spec is None:
return
for (key, val, requirements) in spec:
missing = []
if key in self.params and self.params[key] == val:
for check in requirements:
count = self._count_terms((check,))
if count == 0:
missing.append(check)
if len(missing) > 0:
self.fail_json(msg="%s is %s but the following are missing: %s" % (key, val, ','.join(missing)))
def _check_argument_values(self):
''' ensure all arguments have the requested values, and there are no stray arguments '''
for (k,v) in self.argument_spec.items():
choices = v.get('choices',None)
if choices is None:
continue
if isinstance(choices, SEQUENCETYPE) and not isinstance(choices, (binary_type, text_type)):
if k in self.params:
if self.params[k] not in choices:
# PyYaml converts certain strings to bools. If we can unambiguously convert back, do so before checking
# the value. If we can't figure this out, module author is responsible.
lowered_choices = None
if self.params[k] == 'False':
lowered_choices = _lenient_lowercase(choices)
FALSEY = frozenset(BOOLEANS_FALSE)
overlap = FALSEY.intersection(choices)
if len(overlap) == 1:
# Extract from a set
(self.params[k],) = overlap
if self.params[k] == 'True':
if lowered_choices is None:
lowered_choices = _lenient_lowercase(choices)
TRUTHY = frozenset(BOOLEANS_TRUE)
overlap = TRUTHY.intersection(choices)
if len(overlap) == 1:
(self.params[k],) = overlap
if self.params[k] not in choices:
choices_str=",".join([to_native(c) for c in choices])
msg="value of %s must be one of: %s, got: %s" % (k, choices_str, self.params[k])
self.fail_json(msg=msg)
else:
self.fail_json(msg="internal error: choices for argument %s are not iterable: %s" % (k, choices))
def safe_eval(self, value, locals=None, include_exceptions=False):
# do not allow method calls to modules
if not isinstance(value, string_types):
# already templated to a datavaluestructure, perhaps?
if include_exceptions:
return (value, None)
return value
if re.search(r'\w\.\w+\(', value):
if include_exceptions:
return (value, None)
return value
# do not allow imports
if re.search(r'import \w+', value):
if include_exceptions:
return (value, None)
return value
try:
result = literal_eval(value)
if include_exceptions:
return (result, None)
else:
return result
except Exception:
e = get_exception()
if include_exceptions:
return (value, e)
return value
def _check_type_str(self, value):
if isinstance(value, string_types):
return value
# Note: This could throw a unicode error if value's __str__() method
# returns non-ascii. Have to port utils.to_bytes() if that happens
return str(value)
def _check_type_list(self, value):
if isinstance(value, list):
return value
if isinstance(value, string_types):
return value.split(",")
elif isinstance(value, int) or isinstance(value, float):
return [ str(value) ]
raise TypeError('%s cannot be converted to a list' % type(value))
def _check_type_dict(self, value):
if isinstance(value, dict):
return value
if isinstance(value, string_types):
if value.startswith("{"):
try:
return json.loads(value)
except:
(result, exc) = self.safe_eval(value, dict(), include_exceptions=True)
if exc is not None:
raise TypeError('unable to evaluate string as dictionary')
return result
elif '=' in value:
fields = []
field_buffer = []
in_quote = False
in_escape = False
for c in value.strip():
if in_escape:
field_buffer.append(c)
in_escape = False
elif c == '\\':
in_escape = True
elif not in_quote and c in ('\'', '"'):
in_quote = c
elif in_quote and in_quote == c:
in_quote = False
elif not in_quote and c in (',', ' '):
field = ''.join(field_buffer)
if field:
fields.append(field)
field_buffer = []
else:
field_buffer.append(c)
field = ''.join(field_buffer)
if field:
fields.append(field)
return dict(x.split("=", 1) for x in fields)
else:
raise TypeError("dictionary requested, could not parse JSON or key=value")
raise TypeError('%s cannot be converted to a dict' % type(value))
def _check_type_bool(self, value):
if isinstance(value, bool):
return value
if isinstance(value, string_types) or isinstance(value, int):
return self.boolean(value)
raise TypeError('%s cannot be converted to a bool' % type(value))
def _check_type_int(self, value):
if isinstance(value, int):
return value
if isinstance(value, string_types):
return int(value)
raise TypeError('%s cannot be converted to an int' % type(value))
def _check_type_float(self, value):
if isinstance(value, float):
return value
if isinstance(value, (binary_type, text_type, int)):
return float(value)
raise TypeError('%s cannot be converted to a float' % type(value))
def _check_type_path(self, value):
value = self._check_type_str(value)
return os.path.expanduser(os.path.expandvars(value))
def _check_type_jsonarg(self, value):
# Return a jsonified string. Sometimes the controller turns a json
# string into a dict/list so transform it back into json here
if isinstance(value, (text_type, binary_type)):
return value.strip()
else:
if isinstance(value, (list, tuple, dict)):
return json.dumps(value)
raise TypeError('%s cannot be converted to a json string' % type(value))
def _check_type_raw(self, value):
return value
def _check_type_bytes(self, value):
try:
self.human_to_bytes(value)
except ValueError:
raise TypeError('%s cannot be converted to a Byte value' % type(value))
def _check_type_bits(self, value):
try:
self.human_to_bytes(value, isbits=True)
except ValueError:
raise TypeError('%s cannot be converted to a Bit value' % type(value))
def _check_argument_types(self):
''' ensure all arguments have the requested type '''
for (k, v) in self.argument_spec.items():
wanted = v.get('type', None)
if k not in self.params:
continue
if wanted is None:
# Mostly we want to default to str.
# For values set to None explicitly, return None instead as
# that allows a user to unset a parameter
if self.params[k] is None:
continue
wanted = 'str'
value = self.params[k]
if value is None:
continue
try:
type_checker = self._CHECK_ARGUMENT_TYPES_DISPATCHER[wanted]
except KeyError:
self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
try:
self.params[k] = type_checker(value)
except (TypeError, ValueError):
self.fail_json(msg="argument %s is of type %s and we were unable to convert to %s" % (k, type(value), wanted))
def _set_defaults(self, pre=True):
for (k,v) in self.argument_spec.items():
default = v.get('default', None)
if pre == True:
# this prevents setting defaults on required items
if default is not None and k not in self.params:
self.params[k] = default
else:
# make sure things without a default still get set None
if k not in self.params:
self.params[k] = default
def _set_fallbacks(self):
for k,v in self.argument_spec.items():
fallback = v.get('fallback', (None,))
fallback_strategy = fallback[0]
fallback_args = []
fallback_kwargs = {}
if k not in self.params and fallback_strategy is not None:
for item in fallback[1:]:
if isinstance(item, dict):
fallback_kwargs = item
else:
fallback_args = item
try:
self.params[k] = fallback_strategy(*fallback_args, **fallback_kwargs)
except AnsibleFallbackNotFound:
continue
def _load_params(self):
''' read the input and set the params attribute.
This method is for backwards compatibility. The guts of the function
were moved out in 2.1 so that custom modules could read the parameters.
'''
# debug overrides to read args from file or cmdline
self.params = _load_params()
def _log_to_syslog(self, msg):
if HAS_SYSLOG:
module = 'ansible-%s' % self._name
facility = getattr(syslog, self._syslog_facility, syslog.LOG_USER)
syslog.openlog(str(module), 0, facility)
syslog.syslog(syslog.LOG_INFO, msg)
def debug(self, msg):
if self._debug:
self.log(msg)
def log(self, msg, log_args=None):
if not self.no_log:
if log_args is None:
log_args = dict()
module = 'ansible-%s' % self._name
if isinstance(module, binary_type):
module = module.decode('utf-8', 'replace')
# 6655 - allow for accented characters
if not isinstance(msg, (binary_type, text_type)):
raise TypeError("msg should be a string (got %s)" % type(msg))
# We want journal to always take text type
# syslog takes bytes on py2, text type on py3
if isinstance(msg, binary_type):
journal_msg = remove_values(msg.decode('utf-8', 'replace'), self.no_log_values)
else:
# TODO: surrogateescape is a danger here on Py3
journal_msg = remove_values(msg, self.no_log_values)
if PY3:
syslog_msg = journal_msg
else:
syslog_msg = journal_msg.encode('utf-8', 'replace')
if has_journal:
journal_args = [("MODULE", os.path.basename(__file__))]
for arg in log_args:
journal_args.append((arg.upper(), str(log_args[arg])))
try:
journal.send(u"%s %s" % (module, journal_msg), **dict(journal_args))
except IOError:
# fall back to syslog since logging to journal failed
self._log_to_syslog(syslog_msg)
else:
self._log_to_syslog(syslog_msg)
def _log_invocation(self):
''' log that ansible ran the module '''
# TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging.
log_args = dict()
for param in self.params:
canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', False)
arg_type = arg_opts.get('type', 'str')
if self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
# try to capture all passwords/passphrase named fields
elif arg_type != 'bool' and PASSWORD_MATCH.search(param):
log_args[param] = 'NOT_LOGGING_PASSWORD'
else:
param_val = self.params[param]
if not isinstance(param_val, (text_type, binary_type)):
param_val = str(param_val)
elif isinstance(param_val, text_type):
param_val = param_val.encode('utf-8')
log_args[param] = heuristic_log_sanitize(param_val, self.no_log_values)
msg = []
for arg in log_args:
arg_val = log_args[arg]
if not isinstance(arg_val, (text_type, binary_type)):
arg_val = str(arg_val)
elif isinstance(arg_val, text_type):
arg_val = arg_val.encode('utf-8')
msg.append('%s=%s' % (arg, arg_val))
if msg:
msg = 'Invoked with %s' % ' '.join(msg)
else:
msg = 'Invoked'
self.log(msg, log_args=log_args)
def _set_cwd(self):
try:
cwd = os.getcwd()
if not os.access(cwd, os.F_OK|os.R_OK):
raise
return cwd
except:
# we don't have access to the cwd, probably because of sudo.
# Try and move to a neutral location to prevent errors
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try:
if os.access(cwd, os.F_OK|os.R_OK):
os.chdir(cwd)
return cwd
except:
pass
# we won't error here, as it may *not* be a problem,
# and we don't want to break modules unnecessarily
return None
def get_bin_path(self, arg, required=False, opt_dirs=[]):
'''
find system executable in PATH.
Optional arguments:
- required: if executable is not found and required is true, fail_json
- opt_dirs: optional list of directories to search in addition to PATH
if found return full path; otherwise return None
'''
sbin_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin']
paths = []
for d in opt_dirs:
if d is not None and os.path.exists(d):
paths.append(d)
paths += os.environ.get('PATH', '').split(os.pathsep)
bin_path = None
# mangle PATH to include /sbin dirs
for p in sbin_paths:
if p not in paths and os.path.exists(p):
paths.append(p)
for d in paths:
if not d:
continue
path = os.path.join(d, arg)
if os.path.exists(path) and is_executable(path):
bin_path = path
break
if required and bin_path is None:
self.fail_json(msg='Failed to find required executable %s' % arg)
return bin_path
def boolean(self, arg):
''' return a bool for the arg '''
if arg is None or isinstance(arg, bool):
return arg
if isinstance(arg, string_types):
arg = arg.lower()
if arg in BOOLEANS_TRUE:
return True
elif arg in BOOLEANS_FALSE:
return False
else:
self.fail_json(msg='Boolean %s not in either boolean list' % arg)
def jsonify(self, data):
for encoding in ("utf-8", "latin-1"):
try:
return json.dumps(data, encoding=encoding)
# Old systems using old simplejson module does not support encoding keyword.
except TypeError:
try:
new_data = json_dict_bytes_to_unicode(data, encoding=encoding)
except UnicodeDecodeError:
continue
return json.dumps(new_data)
except UnicodeDecodeError:
continue
self.fail_json(msg='Invalid unicode encoding encountered')
def from_json(self, data):
return json.loads(data)
def add_cleanup_file(self, path):
if path not in self.cleanup_files:
self.cleanup_files.append(path)
def do_cleanup_files(self):
for path in self.cleanup_files:
self.cleanup(path)
def exit_json(self, **kwargs):
''' return from the module, without error '''
self.add_path_info(kwargs)
if not 'changed' in kwargs:
kwargs['changed'] = False
if 'invocation' not in kwargs:
kwargs['invocation'] = {'module_args': self.params}
kwargs = remove_values(kwargs, self.no_log_values)
self.do_cleanup_files()
print('\n%s' % self.jsonify(kwargs))
sys.exit(0)
def fail_json(self, **kwargs):
''' return from the module, with an error message '''
self.add_path_info(kwargs)
assert 'msg' in kwargs, "implementation error -- msg to explain the error is required"
kwargs['failed'] = True
if 'invocation' not in kwargs:
kwargs['invocation'] = {'module_args': self.params}
kwargs = remove_values(kwargs, self.no_log_values)
self.do_cleanup_files()
print('\n%s' % self.jsonify(kwargs))
sys.exit(1)
def fail_on_missing_params(self, required_params=None):
''' This is for checking for required params when we can not check via argspec because we
need more information than is simply given in the argspec.
'''
if not required_params:
return
missing_params = []
for required_param in required_params:
if not self.params.get(required_param):
missing_params.append(required_param)
if missing_params:
self.fail_json(msg="missing required arguments: %s" % ','.join(missing_params))
def digest_from_file(self, filename, algorithm):
''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. '''
if not os.path.exists(filename):
return None
if os.path.isdir(filename):
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
# preserve old behaviour where the third parameter was a hash algorithm object
if hasattr(algorithm, 'hexdigest'):
digest_method = algorithm
else:
try:
digest_method = AVAILABLE_HASH_ALGORITHMS[algorithm]()
except KeyError:
self.fail_json(msg="Could not hash file '%s' with algorithm '%s'. Available algorithms: %s" %
(filename, algorithm, ', '.join(AVAILABLE_HASH_ALGORITHMS)))
blocksize = 64 * 1024
infile = open(filename, 'rb')
block = infile.read(blocksize)
while block:
digest_method.update(block)
block = infile.read(blocksize)
infile.close()
return digest_method.hexdigest()
def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file().
Do not use this function unless you have no other choice for:
1) Optional backwards compatibility
2) Compatibility with a third party protocol
This function will not work on systems complying with FIPS-140-2.
Most uses of this function can use the module.sha1 function instead.
'''
if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
raise ValueError('MD5 not available. Possibly running in FIPS mode')
return self.digest_from_file(filename, 'md5')
def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha1')
def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha256')
def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure'''
backupdest = ''
if os.path.exists(fn):
# backups named basename-YYYY-MM-DD@HH:MM:SS~
ext = time.strftime("%Y-%m-%d@%H:%M:%S~", time.localtime(time.time()))
backupdest = '%s.%s.%s' % (fn, os.getpid(), ext)
try:
shutil.copy2(fn, backupdest)
except (shutil.Error, IOError):
e = get_exception()
self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, e))
return backupdest
def cleanup(self, tmpfile):
if os.path.exists(tmpfile):
try:
os.unlink(tmpfile)
except OSError:
e = get_exception()
sys.stderr.write("could not cleanup %s: %s" % (tmpfile, e))
def atomic_move(self, src, dest, unsafe_writes=False):
'''atomically move src to dest, copying attributes from dest, returns true on success
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
to work around limitations, corner cases and ensure selinux context is saved if possible'''
context = None
dest_stat = None
b_src = to_bytes(src, errors='surrogate_or_strict')
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if os.path.exists(b_dest):
try:
dest_stat = os.stat(b_dest)
os.chmod(b_src, dest_stat.st_mode & PERM_BITS)
os.chown(b_src, dest_stat.st_uid, dest_stat.st_gid)
except OSError:
e = get_exception()
if e.errno != errno.EPERM:
raise
if self.selinux_enabled():
context = self.selinux_context(dest)
else:
if self.selinux_enabled():
context = self.selinux_default_context(dest)
creating = not os.path.exists(b_dest)
try:
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
os.rename(b_src, b_dest)
except (IOError, OSError):
e = get_exception()
if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY, errno.EBUSY]:
# only try workarounds for errno 18 (cross device), 1 (not permitted), 13 (permission denied)
# and 26 (text file busy) which happens on vagrant synced folders and other 'exotic' non posix file systems
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
else:
b_dest_dir = os.path.dirname(b_dest)
# Use bytes here. In the shippable CI, this fails with
# a UnicodeError with surrogateescape'd strings for an unknown
# reason (doesn't happen in a local Ubuntu16.04 VM)
native_dest_dir = b_dest_dir
native_suffix = os.path.basename(b_dest)
native_prefix = b('.ansible_tmp')
try:
tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(
prefix=native_prefix, dir=native_dest_dir, suffix=native_suffix)
except (OSError, IOError):
e = get_exception()
self.fail_json(msg='The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), e))
except TypeError:
# We expect that this is happening because python3.4.x and
# below can't handle byte strings in mkstemp(). Traceback
# would end in something like:
# file = _os.path.join(dir, pre + name + suf)
# TypeError: can't concat bytes to str
self.fail_json(msg='Failed creating temp file for atomic move. This usually happens when using Python3 less than Python3.5. Please use Python2.x or Python3.5 or greater.', exception=sys.exc_info())
b_tmp_dest_name = to_bytes(tmp_dest_name, errors='surrogate_or_strict')
try:
try:
# close tmp file handle before file operations to prevent text file busy errors on vboxfs synced folders (windows host)
os.close(tmp_dest_fd)
# leaves tmp file behind when sudo and not root
try:
shutil.move(b_src, b_tmp_dest_name)
except OSError:
# cleanup will happen by 'rm' of tempdir
# copy2 will preserve some metadata
shutil.copy2(b_src, b_tmp_dest_name)
if self.selinux_enabled():
self.set_context_if_different(
b_tmp_dest_name, context, False)
try:
tmp_stat = os.stat(b_tmp_dest_name)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(b_tmp_dest_name, dest_stat.st_uid, dest_stat.st_gid)
except OSError:
e = get_exception()
if e.errno != errno.EPERM:
raise
try:
os.rename(b_tmp_dest_name, b_dest)
except (shutil.Error, OSError, IOError):
e = get_exception()
if unsafe_writes:
self._unsafe_writes(b_tmp_dest_name, b_dest, e)
else:
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
except (shutil.Error, OSError, IOError):
e = get_exception()
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
finally:
self.cleanup(b_tmp_dest_name)
if creating:
# make sure the file has the correct permissions
# based on the current value of umask
umask = os.umask(0)
os.umask(umask)
os.chmod(b_dest, DEFAULT_PERM & ~umask)
try:
os.chown(b_dest, os.geteuid(), os.getegid())
except OSError:
# We're okay with trying our best here. If the user is not
# root (or old Unices) they won't be able to chown.
pass
if self.selinux_enabled():
# rename might not preserve context
self.set_context_if_different(dest, context, False)
def _unsafe_writes(self, src, dest, exception):
# sadly there are some situations where we cannot ensure atomicity, but only if
# the user insists and we get the appropriate error we update the file unsafely
if exception.errno == errno.EBUSY:
#TODO: issue warning that this is an unsafe operation, but doing it cause user insists
try:
try:
out_dest = open(dest, 'wb')
in_src = open(src, 'rb')
shutil.copyfileobj(in_src, out_dest)
finally: # assuring closed files in 2.4 compatible way
if out_dest:
out_dest.close()
if in_src:
in_src.close()
except (shutil.Error, OSError, IOError):
e = get_exception()
self.fail_json(msg='Could not write data to file (%s) from (%s): %s' % (dest, src, e))
else:
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, exception))
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict'):
'''
Execute a command, returns rc, stdout, and stderr.
:arg args: is the command to run
* If args is a list, the command will be run with shell=False.
* If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
* If args is a string and use_unsafe_shell=True it runs with shell=True.
:kw check_rc: Whether to call fail_json in case of non zero RC.
Default False
:kw close_fds: See documentation for subprocess.Popen(). Default True
:kw executable: See documentation for subprocess.Popen(). Default None
:kw data: If given, information to write to the stdin of the command
:kw binary_data: If False, append a newline to the data. Default False
:kw path_prefix: If given, additional path to find the command in.
This adds to the PATH environment vairable so helper commands in
the same directory can also be found
:kw cwd: If given, working directory to run the command inside
:kw use_unsafe_shell: See `args` parameter. Default False
:kw prompt_regex: Regex string (not a compiled regex) which can be
used to detect prompts in the stdout which would otherwise cause
the execution to hang (especially if no input data is specified)
:kw environ_update: dictionary to *update* os.environ with
:kw umask: Umask to be used when running the command. Default None
:kw encoding: Since we return native strings, on python3 we need to
know the encoding to use to transform from bytes to text. If you
want to always get bytes back, use encoding=None. The default is
"utf-8". This does not affect transformation of strings given as
args.
:kw errors: Since we return native strings, on python3 we need to
transform stdout and stderr from bytes to text. If the bytes are
undecodable in the ``encoding`` specified, then use this error
handler to deal with them. The default is ``surrogate_or_strict``
which means that the bytes will be decoded using the
surrogateescape error handler if available (available on all
python3 versions we support) otherwise a UnicodeError traceback
will be raised. This does not affect transformations of strings
given as args.
:returns: A 3-tuple of return code (integer), stdout (native string),
and stderr (native string). On python2, stdout and stderr are both
byte strings. On python3, stdout and stderr are text strings converted
according to the encoding and errors parameters. If you want byte
strings on python3, use encoding=None to turn decoding to text off.
'''
shell = False
if isinstance(args, list):
if use_unsafe_shell:
args = " ".join([pipes.quote(x) for x in args])
shell = True
elif isinstance(args, (binary_type, text_type)) and use_unsafe_shell:
shell = True
elif isinstance(args, (binary_type, text_type)):
# On python2.6 and below, shlex has problems with text type
# On python3, shlex needs a text type.
if PY2:
args = to_bytes(args, errors='surrogate_or_strict')
elif PY3:
args = to_text(args, errors='surrogateescape')
args = shlex.split(args)
else:
msg = "Argument 'args' to run_command must be list or string"
self.fail_json(rc=257, cmd=args, msg=msg)
prompt_re = None
if prompt_regex:
if isinstance(prompt_regex, text_type):
if PY3:
prompt_regex = to_bytes(prompt_regex, errors='surrogateescape')
elif PY2:
prompt_regex = to_bytes(prompt_regex, errors='surrogate_or_strict')
try:
prompt_re = re.compile(prompt_regex, re.MULTILINE)
except re.error:
self.fail_json(msg="invalid prompt regular expression given to run_command")
# expand things like $HOME and ~
if not shell:
args = [ os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None ]
rc = 0
msg = None
st_in = None
# Manipulate the environ we'll send to the new process
old_env_vals = {}
# We can set this from both an attribute and per call
for key, val in self.run_command_environ_update.items():
old_env_vals[key] = os.environ.get(key, None)
os.environ[key] = val
if environ_update:
for key, val in environ_update.items():
old_env_vals[key] = os.environ.get(key, None)
os.environ[key] = val
if path_prefix:
old_env_vals['PATH'] = os.environ['PATH']
os.environ['PATH'] = "%s:%s" % (path_prefix, os.environ['PATH'])
# If using test-module and explode, the remote lib path will resemble ...
# /tmp/test_module_scratch/debug_dir/ansible/module_utils/basic.py
# If using ansible or ansible-playbook with a remote system ...
# /tmp/ansible_vmweLQ/ansible_modlib.zip/ansible/module_utils/basic.py
# Clean out python paths set by ansiballz
if 'PYTHONPATH' in os.environ:
pypaths = os.environ['PYTHONPATH'].split(':')
pypaths = [x for x in pypaths \
if not x.endswith('/ansible_modlib.zip') \
and not x.endswith('/debug_dir')]
os.environ['PYTHONPATH'] = ':'.join(pypaths)
if not os.environ['PYTHONPATH']:
del os.environ['PYTHONPATH']
# create a printable version of the command for use
# in reporting later, which strips out things like
# passwords from the args list
to_clean_args = args
if PY2:
if isinstance(args, text_type):
to_clean_args = to_bytes(args)
else:
if isinstance(args, binary_type):
to_clean_args = to_text(args)
if isinstance(args, (text_type, binary_type)):
to_clean_args = shlex.split(to_clean_args)
clean_args = []
is_passwd = False
for arg in to_clean_args:
if is_passwd:
is_passwd = False
clean_args.append('********')
continue
if PASSWD_ARG_RE.match(arg):
sep_idx = arg.find('=')
if sep_idx > -1:
clean_args.append('%s=********' % arg[:sep_idx])
continue
else:
is_passwd = True
arg = heuristic_log_sanitize(arg, self.no_log_values)
clean_args.append(arg)
clean_args = ' '.join(pipes.quote(arg) for arg in clean_args)
if data:
st_in = subprocess.PIPE
kwargs = dict(
executable=executable,
shell=shell,
close_fds=close_fds,
stdin=st_in,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# store the pwd
prev_dir = os.getcwd()
# make sure we're in the right working directory
if cwd and os.path.isdir(cwd):
cwd = os.path.abspath(os.path.expanduser(cwd))
kwargs['cwd'] = cwd
try:
os.chdir(cwd)
except (OSError, IOError):
e = get_exception()
self.fail_json(rc=e.errno, msg="Could not open %s, %s" % (cwd, str(e)))
old_umask = None
if umask:
old_umask = os.umask(umask)
try:
if self._debug:
self.log('Executing: ' + clean_args)
cmd = subprocess.Popen(args, **kwargs)
# the communication logic here is essentially taken from that
# of the _communicate() function in ssh.py
stdout = b('')
stderr = b('')
rpipes = [cmd.stdout, cmd.stderr]
if data:
if not binary_data:
data += '\n'
if isinstance(data, text_type):
data = to_bytes(data)
cmd.stdin.write(data)
cmd.stdin.close()
while True:
rfd, wfd, efd = select.select(rpipes, [], rpipes, 1)
if cmd.stdout in rfd:
dat = os.read(cmd.stdout.fileno(), 9000)
stdout += dat
if dat == b(''):
rpipes.remove(cmd.stdout)
if cmd.stderr in rfd:
dat = os.read(cmd.stderr.fileno(), 9000)
stderr += dat
if dat == b(''):
rpipes.remove(cmd.stderr)
# if we're checking for prompts, do it now
if prompt_re:
if prompt_re.search(stdout) and not data:
if encoding:
stdout = to_native(stdout, encoding=encoding, errors=errors)
else:
stdout = stdout
return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
# only break out if no pipes are left to read or
# the pipes are completely read and
# the process is terminated
if (not rpipes or not rfd) and cmd.poll() is not None:
break
# No pipes are left to read but process is not yet terminated
# Only then it is safe to wait for the process to be finished
# NOTE: Actually cmd.poll() is always None here if rpipes is empty
elif not rpipes and cmd.poll() == None:
cmd.wait()
# The process is terminated. Since no pipes to read from are
# left, there is no need to call select() again.
break
cmd.stdout.close()
cmd.stderr.close()
rc = cmd.returncode
except (OSError, IOError):
e = get_exception()
self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(e)))
self.fail_json(rc=e.errno, msg=to_native(e), cmd=clean_args)
except Exception:
e = get_exception()
self.log("Error Executing CMD:%s Exception:%s" % (clean_args,to_native(traceback.format_exc())))
self.fail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args)
# Restore env settings
for key, val in old_env_vals.items():
if val is None:
del os.environ[key]
else:
os.environ[key] = val
if old_umask:
os.umask(old_umask)
if rc != 0 and check_rc:
msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values)
self.fail_json(cmd=clean_args, rc=rc, stdout=stdout, stderr=stderr, msg=msg)
# reset the pwd
os.chdir(prev_dir)
if encoding is not None:
return (rc, to_native(stdout, encoding=encoding, errors=errors),
to_native(stderr, encoding=encoding, errors=errors))
return (rc, stdout, stderr)
def append_to_file(self, filename, str):
filename = os.path.expandvars(os.path.expanduser(filename))
fh = open(filename, 'a')
fh.write(str)
fh.close()
def bytes_to_human(self, size):
return bytes_to_human(size)
# for backwards compatibility
pretty_bytes = bytes_to_human
def human_to_bytes(self, number, isbits=False):
return human_to_bytes(number, isbits)
#
# Backwards compat
#
# In 2.0, moved from inside the module to the toplevel
is_executable = is_executable
def get_module_path():
return os.path.dirname(os.path.realpath(__file__))
|
eerorika/ansible
|
lib/ansible/module_utils/basic.py
|
Python
|
gpl-3.0
| 92,348
|
[
"VisIt"
] |
35b968e0aafdd8e1954440b7470a6fbe706ca4d92d907c8060d06ec82b4e972e
|
from collections import namedtuple
from ua_parser import user_agent_parser
from .compat import string_types
MOBILE_DEVICE_FAMILIES = (
'iPhone',
'iPod',
'Generic Smartphone',
'Generic Feature Phone',
'PlayStation Vita',
'iOS-Device'
)
PC_OS_FAMILIES = (
'Windows 95',
'Windows 98',
'Windows ME',
'Solaris',
)
MOBILE_OS_FAMILIES = (
'Windows Phone',
'Windows Phone OS', # Earlier versions of ua-parser returns Windows Phone OS
'Symbian OS',
'Bada',
'Windows CE',
'Windows Mobile',
'Maemo',
)
MOBILE_BROWSER_FAMILIES = (
'Opera Mobile',
'Opera Mini',
)
TABLET_DEVICE_FAMILIES = (
'iPad',
'BlackBerry Playbook',
'Blackberry Playbook', # Earlier versions of ua-parser returns "Blackberry" instead of "BlackBerry"
'Kindle',
'Kindle Fire',
'Kindle Fire HD',
'Galaxy Tab',
'Xoom',
'Dell Streak',
)
TOUCH_CAPABLE_OS_FAMILIES = (
'iOS',
'Android',
'Windows Phone',
'Windows Phone OS',
'Windows RT',
'Windows CE',
'Windows Mobile',
'Firefox OS',
'MeeGo',
)
TOUCH_CAPABLE_DEVICE_FAMILIES = (
'BlackBerry Playbook',
'Blackberry Playbook',
'Kindle Fire',
)
EMAIL_PROGRAM_FAMILIES = {
'Outlook',
'Windows Live Mail',
'AirMail',
'Apple Mail',
'Outlook',
'Thunderbird',
'Lightning',
'ThunderBrowse',
'Windows Live Mail',
'The Bat!',
'Lotus Notes',
'IBM Notes',
'Barca',
'MailBar',
'kmail2',
'YahooMobileMail'
}
def verify_attribute(attribute):
if isinstance(attribute, string_types) and attribute.isdigit():
return int(attribute)
return attribute
def parse_version(major=None, minor=None, patch=None, patch_minor=None):
# Returns version number tuple, attributes will be integer if they're numbers
major = verify_attribute(major)
minor = verify_attribute(minor)
patch = verify_attribute(patch)
patch_minor = verify_attribute(patch_minor)
return tuple(
filter(lambda x: x is not None, (major, minor, patch, patch_minor))
)
Browser = namedtuple('Browser', ['family', 'version', 'version_string'])
def parse_browser(family, major=None, minor=None, patch=None, patch_minor=None):
# Returns a browser object
version = parse_version(major, minor, patch)
version_string = '.'.join([str(v) for v in version])
return Browser(family, version, version_string)
OperatingSystem = namedtuple('OperatingSystem', ['family', 'version', 'version_string'])
def parse_operating_system(family, major=None, minor=None, patch=None, patch_minor=None):
version = parse_version(major, minor, patch)
version_string = '.'.join([str(v) for v in version])
return OperatingSystem(family, version, version_string)
Device = namedtuple('Device', ['family', 'brand', 'model'])
def parse_device(family, brand, model):
return Device(family, brand, model)
class UserAgent(object):
def __init__(self, user_agent_string):
ua_dict = user_agent_parser.Parse(user_agent_string)
self.ua_string = user_agent_string
self.os = parse_operating_system(**ua_dict['os'])
self.browser = parse_browser(**ua_dict['user_agent'])
self.device = parse_device(**ua_dict['device'])
def __str__(self):
device = self.is_pc and "PC" or self.device.family
os = ("%s %s" % (self.os.family, self.os.version_string)).strip()
browser = ("%s %s" % (self.browser.family, self.browser.version_string)).strip()
return " / ".join([device, os, browser])
def __unicode__(self):
return unicode(str(self))
def _is_android_tablet(self):
# Newer Android tablets don't have "Mobile" in their user agent string,
# older ones like Galaxy Tab still have "Mobile" though they're not
if ('Mobile Safari' not in self.ua_string and
self.browser.family != "Firefox Mobile"):
return True
return False
def _is_blackberry_touch_capable_device(self):
# A helper to determine whether a BB phone has touch capabilities
# Blackberry Bold Touch series begins with 99XX
if 'Blackberry 99' in self.device.family:
return True
if 'Blackberry 95' in self.device.family: # BB Storm devices
return True
if 'Blackberry 95' in self.device.family: # BB Torch devices
return True
return False
@property
def is_tablet(self):
if self.device.family in TABLET_DEVICE_FAMILIES:
return True
if (self.os.family == 'Android' and self._is_android_tablet()):
return True
if self.os.family.startswith('Windows RT'):
return True
if self.os.family == 'Firefox OS' and 'Mobile' not in self.browser.family:
return True
return False
@property
def is_mobile(self):
# First check for mobile device and mobile browser families
if self.device.family in MOBILE_DEVICE_FAMILIES:
return True
if self.browser.family in MOBILE_BROWSER_FAMILIES:
return True
# Device is considered Mobile OS is Android and not tablet
# This is not fool proof but would have to suffice for now
if ((self.os.family == 'Android' or self.os.family == 'Firefox OS')
and not self.is_tablet):
return True
if self.os.family == 'BlackBerry OS' and self.device.family != 'Blackberry Playbook':
return True
if self.os.family in MOBILE_OS_FAMILIES:
return True
# TODO: remove after https://github.com/tobie/ua-parser/issues/126 is closed
if 'J2ME' in self.ua_string or 'MIDP' in self.ua_string:
return True
# This is here mainly to detect Google's Mobile Spider
if 'iPhone;' in self.ua_string:
return True
if 'Googlebot-Mobile' in self.ua_string:
return True
# Mobile Spiders should be identified as mobile
if self.device.family == 'Spider' and 'Mobile' in self.browser.family:
return True
# Nokia mobile
if 'NokiaBrowser' in self.ua_string and 'Mobile' in self.ua_string:
return True
return False
@property
def is_touch_capable(self):
# TODO: detect touch capable Nokia devices
if self.os.family in TOUCH_CAPABLE_OS_FAMILIES:
return True
if self.device.family in TOUCH_CAPABLE_DEVICE_FAMILIES:
return True
if self.os.family.startswith('Windows 8') and 'Touch' in self.ua_string:
return True
if 'BlackBerry' in self.os.family and self._is_blackberry_touch_capable_device():
return True
return False
@property
def is_pc(self):
# Returns True for "PC" devices (Windows, Mac and Linux)
if 'Windows NT' in self.ua_string or self.os.family in PC_OS_FAMILIES:
return True
# TODO: remove after https://github.com/tobie/ua-parser/issues/127 is closed
if self.os.family == 'Mac OS X' and 'Silk' not in self.ua_string:
return True
# Maemo has 'Linux' and 'X11' in UA, but it is not for PC
if 'Maemo' in self.ua_string:
return False
if 'Chrome OS' in self.os.family:
return True
if 'Linux' in self.ua_string and 'X11' in self.ua_string:
return True
return False
@property
def is_bot(self):
return True if self.device.family == 'Spider' else False
@property
def is_email_client(self):
if self.browser.family in EMAIL_PROGRAM_FAMILIES:
return True
return False
def parse(user_agent_string):
return UserAgent(user_agent_string)
|
public-ink/public-ink
|
server/appengine/lib/user_agents/parsers.py
|
Python
|
gpl-3.0
| 7,813
|
[
"Galaxy"
] |
73c17577e931d663d462d0fcd8f3dfcfde47b2c7a3befe24dd74303aa663c27f
|
# -*- coding: utf-8 -*-
"""
Bok choy acceptance tests for Entrance exams in the LMS
"""
from __future__ import absolute_import
from textwrap import dedent
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.courseware import CoursewarePage
from common.test.acceptance.tests.helpers import UniqueCourseTest
class EntranceExamTest(UniqueCourseTest):
"""
Base class for tests of Entrance Exams in the LMS.
"""
USERNAME = "joe_student"
EMAIL = "joe@example.com"
def setUp(self):
super(EntranceExamTest, self).setUp()
self.xqueue_grade_response = None
self.courseware_page = CoursewarePage(self.browser, self.course_id)
# Install a course with a hierarchy and problems
course_fixture = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name'],
settings={
'entrance_exam_enabled': 'true',
'entrance_exam_minimum_score_pct': '50'
}
)
problem = self.get_problem()
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(problem)
)
).install()
entrance_exam_subsection = None
outline = course_fixture.studio_course_outline_as_json
for child in outline['child_info']['children']:
if child.get('display_name') == "Entrance Exam":
entrance_exam_subsection = child['child_info']['children'][0]
if entrance_exam_subsection:
course_fixture.create_xblock(entrance_exam_subsection['id'], problem)
# Auto-auth register for the course.
AutoAuthPage(self.browser, username=self.USERNAME, email=self.EMAIL,
course_id=self.course_id, staff=False).visit()
def get_problem(self):
""" Subclasses should override this to complete the fixture """
raise NotImplementedError()
class EntranceExamPassTest(EntranceExamTest):
"""
Tests the scenario when a student passes entrance exam.
"""
def get_problem(self):
"""
Create a multiple choice problem
"""
xml = dedent("""
<problem>
<multiplechoiceresponse>
<label>What is height of eiffel tower without the antenna?.</label>
<choicegroup type="MultipleChoice">
<choice correct="false">324 meters<choicehint>Antenna is 24 meters high</choicehint></choice>
<choice correct="true">300 meters</choice>
<choice correct="false">224 meters</choice>
<choice correct="false">400 meters</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'HEIGHT OF EIFFEL TOWER', data=xml)
|
ESOedX/edx-platform
|
common/test/acceptance/tests/lms/test_lms_entrance_exams.py
|
Python
|
agpl-3.0
| 3,073
|
[
"VisIt"
] |
28c92e9240ca453de8a900b6a829f306dfa0e3daed83238f069ae15a092c3e0a
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import json
import os
import pytest
import re
import shutil
import tarfile
import yaml
from io import BytesIO, StringIO
from units.compat.mock import MagicMock
import ansible.module_utils.six.moves.urllib.error as urllib_error
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError
from ansible.galaxy import collection, api
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.utils import context_objects as co
from ansible.utils.display import Display
def call_galaxy_cli(args):
orig = co.GlobalCLIArgs._Singleton__instance
co.GlobalCLIArgs._Singleton__instance = None
try:
GalaxyCLI(args=['ansible-galaxy', 'collection'] + args).run()
finally:
co.GlobalCLIArgs._Singleton__instance = orig
def artifact_json(namespace, name, version, dependencies, server):
json_str = json.dumps({
'artifact': {
'filename': '%s-%s-%s.tar.gz' % (namespace, name, version),
'sha256': '2d76f3b8c4bab1072848107fb3914c345f71a12a1722f25c08f5d3f51f4ab5fd',
'size': 1234,
},
'download_url': '%s/download/%s-%s-%s.tar.gz' % (server, namespace, name, version),
'metadata': {
'namespace': namespace,
'name': name,
'dependencies': dependencies,
},
'version': version
})
return to_text(json_str)
def artifact_versions_json(namespace, name, versions, galaxy_api, available_api_versions=None):
results = []
available_api_versions = available_api_versions or {}
api_version = 'v2'
if 'v3' in available_api_versions:
api_version = 'v3'
for version in versions:
results.append({
'href': '%s/api/%s/%s/%s/versions/%s/' % (galaxy_api.api_server, api_version, namespace, name, version),
'version': version,
})
if api_version == 'v2':
json_str = json.dumps({
'count': len(versions),
'next': None,
'previous': None,
'results': results
})
if api_version == 'v3':
response = {'meta': {'count': len(versions)},
'data': results,
'links': {'first': None,
'last': None,
'next': None,
'previous': None},
}
json_str = json.dumps(response)
return to_text(json_str)
def error_json(galaxy_api, errors_to_return=None, available_api_versions=None):
errors_to_return = errors_to_return or []
available_api_versions = available_api_versions or {}
response = {}
api_version = 'v2'
if 'v3' in available_api_versions:
api_version = 'v3'
if api_version == 'v2':
assert len(errors_to_return) <= 1
if errors_to_return:
response = errors_to_return[0]
if api_version == 'v3':
response['errors'] = errors_to_return
json_str = json.dumps(response)
return to_text(json_str)
@pytest.fixture(autouse='function')
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
@pytest.fixture()
def collection_artifact(request, tmp_path_factory):
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
namespace = 'ansible_namespace'
collection = 'collection'
skeleton_path = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'collection_skeleton')
collection_path = os.path.join(test_dir, namespace, collection)
call_galaxy_cli(['init', '%s.%s' % (namespace, collection), '-c', '--init-path', test_dir,
'--collection-skeleton', skeleton_path])
dependencies = getattr(request, 'param', None)
if dependencies:
galaxy_yml = os.path.join(collection_path, 'galaxy.yml')
with open(galaxy_yml, 'rb+') as galaxy_obj:
existing_yaml = yaml.safe_load(galaxy_obj)
existing_yaml['dependencies'] = dependencies
galaxy_obj.seek(0)
galaxy_obj.write(to_bytes(yaml.safe_dump(existing_yaml)))
galaxy_obj.truncate()
call_galaxy_cli(['build', collection_path, '--output-path', test_dir])
collection_tar = os.path.join(test_dir, '%s-%s-0.1.0.tar.gz' % (namespace, collection))
return to_bytes(collection_path), to_bytes(collection_tar)
@pytest.fixture()
def galaxy_server():
context.CLIARGS._store = {'ignore_certs': False}
galaxy_api = api.GalaxyAPI(None, 'test_server', 'https://galaxy.ansible.com')
return galaxy_api
def test_build_requirement_from_path(collection_artifact):
actual = collection.CollectionRequirement.from_path(collection_artifact[0], True)
assert actual.namespace == u'ansible_namespace'
assert actual.name == u'collection'
assert actual.b_path == collection_artifact[0]
assert actual.api is None
assert actual.skip is True
assert actual.versions == set([u'*'])
assert actual.latest_version == u'*'
assert actual.dependencies == {}
def test_build_requirement_from_path_with_manifest(collection_artifact):
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
manifest_value = json.dumps({
'collection_info': {
'namespace': 'namespace',
'name': 'name',
'version': '1.1.1',
'dependencies': {
'ansible_namespace.collection': '*'
}
}
})
with open(manifest_path, 'wb') as manifest_obj:
manifest_obj.write(to_bytes(manifest_value))
actual = collection.CollectionRequirement.from_path(collection_artifact[0], True)
# While the folder name suggests a different collection, we treat MANIFEST.json as the source of truth.
assert actual.namespace == u'namespace'
assert actual.name == u'name'
assert actual.b_path == collection_artifact[0]
assert actual.api is None
assert actual.skip is True
assert actual.versions == set([u'1.1.1'])
assert actual.latest_version == u'1.1.1'
assert actual.dependencies == {'ansible_namespace.collection': '*'}
def test_build_requirement_from_path_invalid_manifest(collection_artifact):
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
with open(manifest_path, 'wb') as manifest_obj:
manifest_obj.write(b"not json")
expected = "Collection file at '%s' does not contain a valid json string." % to_native(manifest_path)
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_path(collection_artifact[0], True)
def test_build_requirement_from_tar(collection_artifact):
actual = collection.CollectionRequirement.from_tar(collection_artifact[1], True, True)
assert actual.namespace == u'ansible_namespace'
assert actual.name == u'collection'
assert actual.b_path == collection_artifact[1]
assert actual.api is None
assert actual.skip is False
assert actual.versions == set([u'0.1.0'])
assert actual.latest_version == u'0.1.0'
assert actual.dependencies == {}
def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
test_file = os.path.join(test_dir, b'fake.tar.gz')
with open(test_file, 'wb') as test_obj:
test_obj.write(b"\x00\x01\x02\x03")
expected = "Collection artifact at '%s' is not a valid tar file." % to_native(test_file)
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_tar(test_file, True, True)
def test_build_requirement_from_tar_no_manifest(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
json_data = to_bytes(json.dumps(
{
'files': [],
'format': 1,
}
))
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(json_data)
tar_info = tarfile.TarInfo('FILES.json')
tar_info.size = len(json_data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
expected = "Collection at '%s' does not contain the required file MANIFEST.json." % to_native(tar_path)
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_tar(tar_path, True, True)
def test_build_requirement_from_tar_no_files(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
json_data = to_bytes(json.dumps(
{
'collection_info': {},
}
))
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(json_data)
tar_info = tarfile.TarInfo('MANIFEST.json')
tar_info.size = len(json_data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
expected = "Collection at '%s' does not contain the required file FILES.json." % to_native(tar_path)
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_tar(tar_path, True, True)
def test_build_requirement_from_tar_invalid_manifest(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
json_data = b"not a json"
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(json_data)
tar_info = tarfile.TarInfo('MANIFEST.json')
tar_info.size = len(json_data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
expected = "Collection tar file member MANIFEST.json does not contain a valid json string."
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_tar(tar_path, True, True)
def test_build_requirement_from_name(galaxy_server, monkeypatch):
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['2.1.9', '2.1.10']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.api == galaxy_server
assert actual.skip is False
assert actual.versions == set([u'2.1.9', u'2.1.10'])
assert actual.latest_version == u'2.1.10'
assert actual.dependencies is None
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
def test_build_requirement_from_name_with_prerelease(galaxy_server, monkeypatch):
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['1.0.1', '2.0.1-beta.1', '2.0.1']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.api == galaxy_server
assert actual.skip is False
assert actual.versions == set([u'1.0.1', u'2.0.1'])
assert actual.latest_version == u'2.0.1'
assert actual.dependencies is None
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
def test_build_requirment_from_name_with_prerelease_explicit(galaxy_server, monkeypatch):
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1-beta.1', None, None,
{})
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.1-beta.1', True,
True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.api == galaxy_server
assert actual.skip is False
assert actual.versions == set([u'2.0.1-beta.1'])
assert actual.latest_version == u'2.0.1-beta.1'
assert actual.dependencies == {}
assert mock_get_info.call_count == 1
assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.1-beta.1')
def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch):
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['1.0.1', '1.0.2', '1.0.3']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
broken_server = copy.copy(galaxy_server)
broken_server.api_server = 'https://broken.com/'
mock_404 = MagicMock()
mock_404.side_effect = api.GalaxyError(urllib_error.HTTPError('https://galaxy.server.com', 404, 'msg', {},
StringIO()), "custom msg")
monkeypatch.setattr(broken_server, 'get_collection_versions', mock_404)
actual = collection.CollectionRequirement.from_name('namespace.collection', [broken_server, galaxy_server],
'>1.0.1', False, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
# assert actual.api == galaxy_server
assert actual.skip is False
assert actual.versions == set([u'1.0.2', u'1.0.3'])
assert actual.latest_version == u'1.0.3'
assert actual.dependencies is None
assert mock_404.call_count == 1
assert mock_404.mock_calls[0][1] == ('namespace', 'collection')
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
def test_build_requirement_from_name_missing(galaxy_server, monkeypatch):
mock_open = MagicMock()
mock_open.side_effect = api.GalaxyError(urllib_error.HTTPError('https://galaxy.server.com', 404, 'msg', {},
StringIO()), "")
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_open)
expected = "Failed to find collection namespace.collection:*"
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server, galaxy_server], '*', False,
True)
def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch):
mock_open = MagicMock()
mock_open.side_effect = api.GalaxyError(urllib_error.HTTPError('https://galaxy.server.com', 401, 'msg', {},
StringIO()), "error")
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_open)
expected = "error (HTTP Code: 401, Message: msg)"
with pytest.raises(api.GalaxyError, match=re.escape(expected)):
collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server, galaxy_server], '*', False)
def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch):
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.0', None, None,
{})
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.0', True,
True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.api == galaxy_server
assert actual.skip is False
assert actual.versions == set([u'2.0.0'])
assert actual.latest_version == u'2.0.0'
assert actual.dependencies == {}
assert mock_get_info.call_count == 1
assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.0')
def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server, monkeypatch):
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['2.0.0', '2.0.1', '2.0.2']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1', None, None,
{})
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '>=2.0.1,<2.0.2',
True, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.api == galaxy_server
assert actual.skip is False
assert actual.versions == set([u'2.0.1'])
assert actual.latest_version == u'2.0.1'
assert actual.dependencies == {}
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
assert mock_get_info.call_count == 1
assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.1')
def test_build_requirement_from_name_multiple_version_results(galaxy_server, monkeypatch):
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['2.0.0', '2.0.1', '2.0.2', '2.0.3', '2.0.4', '2.0.5']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '!=2.0.2',
True, True)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.b_path is None
assert actual.api == galaxy_server
assert actual.skip is False
assert actual.versions == set([u'2.0.0', u'2.0.1', u'2.0.3', u'2.0.4', u'2.0.5'])
assert actual.latest_version == u'2.0.5'
assert actual.dependencies is None
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
@pytest.mark.parametrize('versions, requirement, expected_filter, expected_latest', [
[['1.0.0', '1.0.1'], '*', ['1.0.0', '1.0.1'], '1.0.1'],
[['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<1.1.0', ['1.0.5'], '1.0.5'],
[['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<=1.0.5', ['1.0.5'], '1.0.5'],
[['1.0.0', '1.0.5', '1.1.0'], '>=1.1.0', ['1.1.0'], '1.1.0'],
[['1.0.0', '1.0.5', '1.1.0'], '!=1.1.0', ['1.0.0', '1.0.5'], '1.0.5'],
[['1.0.0', '1.0.5', '1.1.0'], '==1.0.5', ['1.0.5'], '1.0.5'],
[['1.0.0', '1.0.5', '1.1.0'], '1.0.5', ['1.0.5'], '1.0.5'],
[['1.0.0', '2.0.0', '3.0.0'], '>=2', ['2.0.0', '3.0.0'], '3.0.0'],
])
def test_add_collection_requirements(versions, requirement, expected_filter, expected_latest):
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', versions, requirement,
False)
assert req.versions == set(expected_filter)
assert req.latest_version == expected_latest
def test_add_collection_requirement_to_unknown_installed_version():
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
skip=True)
expected = "Cannot meet requirement namespace.name:1.0.0 as it is already installed at version 'unknown'."
with pytest.raises(AnsibleError, match=expected):
req.add_requirement(str(req), '1.0.0')
def test_add_collection_wildcard_requirement_to_unknown_installed_version():
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
skip=True)
req.add_requirement(str(req), '*')
assert req.versions == set('*')
assert req.latest_version == '*'
def test_add_collection_requirement_with_conflict(galaxy_server):
expected = "Cannot meet requirement ==1.0.2 for dependency namespace.name from source '%s'. Available versions " \
"before last requirement added: 1.0.0, 1.0.1\n" \
"Requirements from:\n" \
"\tbase - 'namespace.name:==1.0.2'" % galaxy_server.api_server
with pytest.raises(AnsibleError, match=expected):
collection.CollectionRequirement('namespace', 'name', None, galaxy_server, ['1.0.0', '1.0.1'], '==1.0.2',
False)
def test_add_requirement_to_existing_collection_with_conflict(galaxy_server):
req = collection.CollectionRequirement('namespace', 'name', None, galaxy_server, ['1.0.0', '1.0.1'], '*', False)
expected = "Cannot meet dependency requirement 'namespace.name:1.0.2' for collection namespace.collection2 from " \
"source '%s'. Available versions before last requirement added: 1.0.0, 1.0.1\n" \
"Requirements from:\n" \
"\tbase - 'namespace.name:*'\n" \
"\tnamespace.collection2 - 'namespace.name:1.0.2'" % galaxy_server.api_server
with pytest.raises(AnsibleError, match=re.escape(expected)):
req.add_requirement('namespace.collection2', '1.0.2')
def test_add_requirement_to_installed_collection_with_conflict():
source = 'https://galaxy.ansible.com'
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
skip=True)
expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
"Use --force to overwrite"
with pytest.raises(AnsibleError, match=re.escape(expected)):
req.add_requirement(None, '1.0.2')
def test_add_requirement_to_installed_collection_with_conflict_as_dep():
source = 'https://galaxy.ansible.com'
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
skip=True)
expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
"Use --force-with-deps to overwrite"
with pytest.raises(AnsibleError, match=re.escape(expected)):
req.add_requirement('namespace.collection2', '1.0.2')
def test_install_skipped_collection(monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
req = collection.CollectionRequirement('namespace', 'name', None, 'source', ['1.0.0'], '*', False, skip=True)
req.install(None, None)
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Skipping 'namespace.name' as it is already installed"
def test_install_collection(collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
collection_tar = collection_artifact[1]
output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
os.makedirs(os.path.join(collection_path, b'delete_me')) # Create a folder to verify the install cleans out the dir
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
os.makedirs(temp_path)
req = collection.CollectionRequirement.from_tar(collection_tar, True, True)
req.install(to_text(output_path), temp_path)
# Ensure the temp directory is empty, nothing is left behind
assert os.listdir(temp_path) == []
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
% to_text(collection_path)
def test_install_collection_with_download(galaxy_server, collection_artifact, monkeypatch):
collection_tar = collection_artifact[1]
output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_download = MagicMock()
mock_download.return_value = collection_tar
monkeypatch.setattr(collection, '_download_file', mock_download)
monkeypatch.setattr(galaxy_server, '_available_api_versions', {'v2': 'v2/'})
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
os.makedirs(temp_path)
meta = api.CollectionVersionMetadata('ansible_namespace', 'collection', '0.1.0', 'https://downloadme.com',
'myhash', {})
req = collection.CollectionRequirement('ansible_namespace', 'collection', None, galaxy_server,
['0.1.0'], '*', False, metadata=meta)
req.install(to_text(output_path), temp_path)
# Ensure the temp directory is empty, nothing is left behind
assert os.listdir(temp_path) == []
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
% to_text(collection_path)
assert mock_download.call_count == 1
assert mock_download.mock_calls[0][1][0] == 'https://downloadme.com'
assert mock_download.mock_calls[0][1][1] == temp_path
assert mock_download.mock_calls[0][1][2] == 'myhash'
assert mock_download.mock_calls[0][1][3] is True
def test_install_collections_from_tar(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
shutil.rmtree(collection_path)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
collection.install_collections([(to_text(collection_tar), '*', None,)], to_text(temp_path),
[u'https://galaxy.ansible.com'], True, False, False, False, False)
assert os.path.isdir(collection_path)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
with open(os.path.join(collection_path, b'MANIFEST.json'), 'rb') as manifest_obj:
actual_manifest = json.loads(to_text(manifest_obj.read()))
assert actual_manifest['collection_info']['namespace'] == 'ansible_namespace'
assert actual_manifest['collection_info']['name'] == 'collection'
assert actual_manifest['collection_info']['version'] == '0.1.0'
# Filter out the progress cursor display calls.
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2]]
assert len(display_msgs) == 3
assert display_msgs[0] == "Process install dependency map"
assert display_msgs[1] == "Starting collection install process"
assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
def test_install_collections_existing_without_force(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
# If we don't delete collection_path it will think the original build skeleton is installed so we expect a skip
collection.install_collections([(to_text(collection_tar), '*', None,)], to_text(temp_path),
[u'https://galaxy.ansible.com'], True, False, False, False, False)
assert os.path.isdir(collection_path)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'README.md', b'docs', b'galaxy.yml', b'playbooks', b'plugins', b'roles']
# Filter out the progress cursor display calls.
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2]]
assert len(display_msgs) == 4
# Msg1 is the warning about not MANIFEST.json, cannot really check message as it has line breaks which varies based
# on the path size
assert display_msgs[1] == "Process install dependency map"
assert display_msgs[2] == "Starting collection install process"
assert display_msgs[3] == "Skipping 'ansible_namespace.collection' as it is already installed"
# Makes sure we don't get stuck in some recursive loop
@pytest.mark.parametrize('collection_artifact', [
{'ansible_namespace.collection': '>=0.0.1'},
], indirect=True)
def test_install_collection_with_circular_dependency(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
shutil.rmtree(collection_path)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
collection.install_collections([(to_text(collection_tar), '*', None,)], to_text(temp_path),
[u'https://galaxy.ansible.com'], True, False, False, False, False)
assert os.path.isdir(collection_path)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles']
with open(os.path.join(collection_path, b'MANIFEST.json'), 'rb') as manifest_obj:
actual_manifest = json.loads(to_text(manifest_obj.read()))
assert actual_manifest['collection_info']['namespace'] == 'ansible_namespace'
assert actual_manifest['collection_info']['name'] == 'collection'
assert actual_manifest['collection_info']['version'] == '0.1.0'
# Filter out the progress cursor display calls.
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2]]
assert len(display_msgs) == 3
assert display_msgs[0] == "Process install dependency map"
assert display_msgs[1] == "Starting collection install process"
assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
|
kvar/ansible
|
test/units/galaxy/test_collection_install.py
|
Python
|
gpl-3.0
| 31,481
|
[
"Galaxy"
] |
d3401cbce8e96cc32304bc7a52cec337f2c4d04116954ac62d727e3589c89c2c
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/client/graphs.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import collections
import string
from king_phisher import color
from king_phisher import ipaddress
from king_phisher import its
from king_phisher import ua_parser
from king_phisher import utilities
from king_phisher.client import client_rpc
from king_phisher.client import gui_utilities
from king_phisher.client.widget import extras
from king_phisher.constants import ColorHexCode
from king_phisher.constants import OSFamily
from boltons import iterutils
from gi.repository import Gtk
from smoke_zephyr.requirements import check_requirements
from smoke_zephyr.utilities import unique
try:
import matplotlib
matplotlib.rcParams['backend'] = 'GTK3Cairo'
from matplotlib import dates
from matplotlib import patches
from matplotlib import pyplot
from matplotlib import ticker
from matplotlib import lines
from matplotlib.backends.backend_gtk3cairo import FigureCanvasGTK3Cairo as FigureCanvas
from matplotlib.backends.backend_gtk3cairo import FigureManagerGTK3Cairo as FigureManager
from matplotlib.backends.backend_gtk3 import NavigationToolbar2GTK3 as NavigationToolbar
except ImportError:
has_matplotlib = False
"""Whether the :py:mod:`matplotlib` module is available."""
else:
if not its.frozen and check_requirements(['matplotlib>=1.5.1']):
has_matplotlib = False
else:
has_matplotlib = True
try:
import mpl_toolkits.basemap
except ImportError:
has_matplotlib_basemap = False
"""Whether the :py:mod:`mpl_toolkits.basemap` module is available."""
else:
if not its.frozen and check_requirements(['basemap>=1.0.7']):
has_matplotlib_basemap = False
else:
has_matplotlib_basemap = True
EXPORTED_GRAPHS = {}
MPL_COLOR_NULL = 'darkcyan'
__all__ = ('export_graph_provider', 'get_graph', 'get_graphs', 'CampaignGraph')
def export_graph_provider(cls):
"""
Decorator to mark classes as valid graph providers. This decorator also sets
the :py:attr:`~.CampaignGraph.name` attribute.
:param class cls: The class to mark as a graph provider.
:return: The *cls* parameter is returned.
"""
if not issubclass(cls, CampaignGraph):
raise RuntimeError("{0} is not a subclass of CampaignGraph".format(cls.__name__))
if not cls.is_available:
return None
graph_name = cls.__name__[13:]
cls.name = graph_name
EXPORTED_GRAPHS[graph_name] = cls
return cls
def get_graph(graph_name):
"""
Return the graph providing class for *graph_name*. The class providing the
specified graph must have been previously exported using
:py:func:`.export_graph_provider`.
:param str graph_name: The name of the graph provider.
:return: The graph provider class.
:rtype: :py:class:`.CampaignGraph`
"""
return EXPORTED_GRAPHS.get(graph_name)
def get_graphs():
"""
Get a list of all registered graph providers.
:return: All registered graph providers.
:rtype: list
"""
return sorted(EXPORTED_GRAPHS.keys())
class GraphBase(object):
"""
A basic graph provider for using :py:mod:`matplotlib` to create graph
representations of campaign data. This class is meant to be subclassed
by real providers.
"""
name = 'Unknown'
"""The name of the graph provider."""
name_human = 'Unknown'
"""The human readable name of the graph provider used for UI identification."""
graph_title = 'Unknown'
"""The title that will be given to the graph."""
table_subscriptions = []
"""A list of tables from which information is needed to produce the graph."""
is_available = True
def __init__(self, application, size_request=None, style_context=None):
"""
:param tuple size_request: The size to set for the canvas.
"""
self.application = application
self.style_context = style_context
self.config = application.config
"""A reference to the King Phisher client configuration."""
self.figure, _ = pyplot.subplots()
self.figure.set_facecolor(self.get_color('bg', ColorHexCode.WHITE))
self.axes = self.figure.get_axes()
self.canvas = FigureCanvas(self.figure)
self.manager = None
self.minimum_size = (380, 200)
"""An absolute minimum size for the canvas."""
if size_request is not None:
self.resize(*size_request)
self.canvas.mpl_connect('button_press_event', self.mpl_signal_canvas_button_pressed)
self.canvas.show()
self.navigation_toolbar = NavigationToolbar(self.canvas, self.application.get_active_window())
self.popup_menu = Gtk.Menu.new()
menu_item = Gtk.MenuItem.new_with_label('Export')
menu_item.connect('activate', self.signal_activate_popup_menu_export)
self.popup_menu.append(menu_item)
menu_item = Gtk.MenuItem.new_with_label('Refresh')
menu_item.connect('activate', self.signal_activate_popup_refresh)
self.popup_menu.append(menu_item)
menu_item = Gtk.CheckMenuItem.new_with_label('Show Toolbar')
menu_item.connect('toggled', self.signal_toggled_popup_menu_show_toolbar)
self._menu_item_show_toolbar = menu_item
self.popup_menu.append(menu_item)
self.popup_menu.show_all()
self.navigation_toolbar.hide()
self._legend = None
@property
def rpc(self):
return self.application.rpc
@staticmethod
def _ax_hide_ticks(ax):
for tick in ax.yaxis.get_major_ticks():
tick.tick1On = False
tick.tick2On = False
@staticmethod
def _ax_set_spine_color(ax, spine_color):
for pos in ('top', 'right', 'bottom', 'left'):
ax.spines[pos].set_color(spine_color)
def add_legend_patch(self, legend_rows, fontsize=None):
if self._legend is not None:
self._legend.remove()
self._legend = None
fontsize = fontsize or self.fontsize_scale
legend_bbox = self.figure.legend(
tuple(patches.Patch(color=patch_color) for patch_color, _ in legend_rows),
tuple(label for _, label in legend_rows),
borderaxespad=1.25,
fontsize=fontsize,
frameon=True,
handlelength=1.5,
handletextpad=0.75,
labelspacing=0.3,
loc='lower right'
)
legend_bbox.legendPatch.set_linewidth(0)
self._legend = legend_bbox
def get_color(self, color_name, default):
"""
Get a color by its style name such as 'fg' for foreground. If the
specified color does not exist, default will be returned. The underlying
logic for this function is provided by
:py:func:`~.gui_utilities.gtk_style_context_get_color`.
:param str color_name: The style name of the color.
:param default: The default color to return if the specified one was not found.
:return: The desired color if it was found.
:rtype: tuple
"""
color_name = 'theme_color_graph_' + color_name
sc_color = gui_utilities.gtk_style_context_get_color(self.style_context, color_name, default)
return (sc_color.red, sc_color.green, sc_color.blue)
def make_window(self):
"""
Create a window from the figure manager.
:return: The graph in a new, dedicated window.
:rtype: :py:class:`Gtk.Window`
"""
if self.manager is None:
self.manager = FigureManager(self.canvas, 0)
self.navigation_toolbar.destroy()
self.navigation_toolbar = self.manager.toolbar
self._menu_item_show_toolbar.set_active(True)
window = self.manager.window
window.set_transient_for(self.application.get_active_window())
window.set_title(self.graph_title)
return window
@property
def fontsize_scale(self):
scale = self.markersize_scale
if scale < 5:
fontsize = 'xx-small'
elif scale < 7:
fontsize = 'x-small'
elif scale < 9:
fontsize = 'small'
else:
fontsize = 'medium'
return fontsize
@property
def markersize_scale(self):
bbox = self.axes[0].get_window_extent().transformed(self.figure.dpi_scale_trans.inverted())
return bbox.width * self.figure.dpi * 0.01
def mpl_signal_canvas_button_pressed(self, event):
if event.button != 3:
return
self.popup_menu.popup(None, None, None, None, event.button, Gtk.get_current_event_time())
return True
def signal_activate_popup_menu_export(self, action):
dialog = extras.FileChooserDialog('Export Graph', self.application.get_active_window())
file_name = self.config['campaign_name'] + '.png'
response = dialog.run_quick_save(file_name)
dialog.destroy()
if not response:
return
destination_file = response['target_path']
self.figure.savefig(destination_file, format='png')
def signal_activate_popup_refresh(self, event):
self.refresh()
def signal_toggled_popup_menu_show_toolbar(self, widget):
if widget.get_property('active'):
self.navigation_toolbar.show()
else:
self.navigation_toolbar.hide()
def resize(self, width=0, height=0):
"""
Attempt to resize the canvas. Regardless of the parameters the canvas
will never be resized to be smaller than :py:attr:`.minimum_size`.
:param int width: The desired width of the canvas.
:param int height: The desired height of the canvas.
"""
min_width, min_height = self.minimum_size
width = max(width, min_width)
height = max(height, min_height)
self.canvas.set_size_request(width, height)
class CampaignGraph(GraphBase):
"""
Graph format used for the graphs generated in the dashboard and
in the create graphs tab.
"""
def _load_graph(self, info_cache):
raise NotImplementedError()
def load_graph(self):
"""Load the graph information via :py:meth:`.refresh`."""
self.refresh()
def refresh(self, info_cache=None, stop_event=None):
"""
Refresh the graph data by retrieving the information from the
remote server.
:param dict info_cache: An optional cache of data tables.
:param stop_event: An optional object indicating that the operation should stop.
:type stop_event: :py:class:`threading.Event`
:return: A dictionary of cached tables from the server.
:rtype: dict
"""
info_cache = (info_cache or {})
if not self.rpc:
return info_cache
for table in self.table_subscriptions:
if stop_event and stop_event.is_set():
return info_cache
if not table in info_cache:
query_filter = None
if 'campaign_id' in client_rpc.database_table_objects[table].__slots__:
query_filter = {'campaign_id': self.config['campaign_id']}
info_cache[table] = tuple(self.rpc.remote_table(table, query_filter=query_filter))
for ax in self.axes:
ax.clear()
if self._legend is not None:
self._legend.remove()
self._legend = None
self._load_graph(info_cache)
self.figure.suptitle(
self.graph_title,
color=self.get_color('fg', ColorHexCode.BLACK),
size=14,
weight='bold',
y=0.97
)
self.canvas.draw()
return info_cache
class CampaignBarGraph(CampaignGraph):
yticklabel_fmt = "{0:,}"
def __init__(self, *args, **kwargs):
super(CampaignBarGraph, self).__init__(*args, **kwargs)
self.figure.subplots_adjust(top=0.85, right=0.85, bottom=0.05, left=0.225)
ax = self.axes[0]
ax.tick_params(
axis='both',
top='off',
right='off',
bottom='off',
left='off',
labelbottom='off'
)
ax.invert_yaxis()
self.axes.append(ax.twinx())
def _barh(self, ax, bars, height, max_bars=None):
# define the necessary colors
color_bg = self.get_color('bg', ColorHexCode.WHITE)
color_bar_bg = self.get_color('bar_bg', ColorHexCode.GRAY)
color_bar_fg = self.get_color('bar_fg', ColorHexCode.BLACK)
ax.set_axis_bgcolor(color_bg)
self.resize(height=60 + 20 * len(bars))
# draw the foreground / filled bar
bar_container = ax.barh(
range(len(bars)),
bars,
height=height,
color=color_bar_fg,
linewidth=0
)
# draw the background / unfilled bar
largest_bar = (max(bars) if len(bars) else 0)
ax.barh(
range(len(bars)),
[largest_bar - bar for bar in bars],
left=bars,
height=height,
color=color_bar_bg,
linewidth=0
)
return bar_container
def _load_graph(self, info_cache):
raise NotImplementedError()
def _graph_null_bar(self, title):
return self.graph_bar([0], 1, [''], xlabel=title)
def graph_bar(self, bars, max_bars, yticklabels, xlabel=None):
"""
Create a horizontal bar graph with better defaults for the standard use
cases.
:param list bars: The values of the bars to graph.
:param int max_bars: The number to treat as the logical maximum number of plotted bars.
:param list yticklabels: The labels to use on the x-axis.
:param str xlabel: The label to give to the y-axis.
:return: The bars created using :py:mod:`matplotlib`
:rtype: `matplotlib.container.BarContainer`
"""
height = 0.275
color_bg = self.get_color('bg', ColorHexCode.WHITE)
color_fg = self.get_color('fg', ColorHexCode.BLACK)
ax1, ax2 = self.axes # primary axis
bar_container = self._barh(ax1, bars, height, max_bars)
yticks = [float(y) + (height / 2) for y in range(len(bars))]
# this makes the top bar shorter than the rest
# ax1.set_ybound(0, max(len(bars), max_bars))
ax1.set_yticks(yticks)
ax1.set_yticklabels(yticklabels, color=color_fg, size=10)
ax2.set_yticks(yticks)
ax2.set_yticklabels([self.yticklabel_fmt.format(bar) for bar in bars], color=color_fg, size=12)
ax2.set_ylim(ax1.get_ylim())
# remove the y-axis tick marks
self._ax_hide_ticks(ax1)
self._ax_hide_ticks(ax2)
self._ax_set_spine_color(ax1, color_bg)
self._ax_set_spine_color(ax2, color_bg)
if xlabel:
ax1.set_xlabel(xlabel, color=color_fg, size=12)
return bar_container
class CampaignLineGraph(CampaignGraph):
def __init__(self, *args, **kwargs):
super(CampaignLineGraph, self).__init__(*args, **kwargs)
def _load_graph(self, info_cache):
raise NotImplementedError()
class CampaignPieGraph(CampaignGraph):
def __init__(self, *args, **kwargs):
super(CampaignPieGraph, self).__init__(*args, **kwargs)
self.figure.subplots_adjust(top=0.85, right=0.75, bottom=0.05, left=0.05)
def _load_graph(self, info_cache):
raise NotImplementedError()
def _graph_null_pie(self, title):
ax = self.axes[0]
ax.pie(
(100,),
autopct='%1.0f%%',
colors=(self.get_color('pie_low', ColorHexCode.GRAY),),
labels=(title,),
shadow=True,
startangle=225,
textprops={'color': self.get_color('fg', ColorHexCode.BLACK)}
)
ax.axis('equal')
return
def graph_pie(self, parts, autopct=None, labels=None, legend_labels=None):
colors = color.get_scale(
self.get_color('pie_low', ColorHexCode.BLACK),
self.get_color('pie_high', ColorHexCode.GRAY),
len(parts),
ascending=False
)
ax = self.axes[0]
pie = ax.pie(
parts,
autopct=autopct,
colors=colors,
explode=[0.1] + ([0] * (len(parts) - 1)),
labels=labels or tuple("{0:.1f}%".format(p) for p in parts),
labeldistance=1.15,
shadow=True,
startangle=45,
textprops={'color': self.get_color('fg', ColorHexCode.BLACK)},
wedgeprops={'linewidth': 0}
)
ax.axis('equal')
if legend_labels is not None:
self.add_legend_patch(tuple(zip(colors, legend_labels)), fontsize='x-small')
return pie
@export_graph_provider
class CampaignGraphDepartmentComparison(CampaignBarGraph):
"""Display a graph which compares the different departments."""
graph_title = 'Department Comparison'
name_human = 'Bar - Department Comparison'
table_subscriptions = ('company_departments', 'messages', 'visits')
yticklabel_fmt = "{0:.01f}%"
def _load_graph(self, info_cache):
departments = info_cache['company_departments']
departments = dict((department.id, department.name) for department in departments)
messages = info_cache['messages']
message_departments = dict((message.id, departments[message.company_department_id]) for message in messages if message.company_department_id is not None)
if not len(message_departments):
self._graph_null_bar('')
return
messages = [message for message in messages if message.id in message_departments]
visits = info_cache['visits']
visits = [visit for visit in visits if visit.message_id in message_departments]
visits = unique(visits, key=lambda visit: visit.message_id)
department_visits = collections.Counter()
department_visits.update(message_departments[visit.message_id] for visit in visits)
department_totals = collections.Counter()
department_totals.update(message_departments[message.id] for message in messages)
department_scores = dict((department, (float(department_visits[department]) / float(total)) * 100) for department, total in department_totals.items())
department_scores = sorted(department_scores.items(), key=lambda x: (x[1], x[0]), reverse=True)
department_scores = collections.OrderedDict(department_scores)
yticklabels, bars = zip(*department_scores.items())
self.graph_bar(bars, len(yticklabels), yticklabels)
return
@export_graph_provider
class CampaignGraphOverview(CampaignBarGraph):
"""Display a graph which represents an overview of the campaign."""
graph_title = 'Campaign Overview'
name_human = 'Bar - Campaign Overview'
table_subscriptions = ('credentials', 'visits')
def _load_graph(self, info_cache):
rpc = self.rpc
visits = info_cache['visits']
creds = info_cache['credentials']
messages_count = rpc('db/table/count', 'messages', query_filter={'campaign_id': self.config['campaign_id']})
messages_not_opened = rpc('db/table/count', 'messages', query_filter={'campaign_id': self.config['campaign_id'], 'opened': None})
bars = []
bars.append(messages_count)
bars.append(messages_count - messages_not_opened)
bars.append(len(visits))
bars.append(len(unique(visits, key=lambda visit: visit.message_id)))
if len(creds):
bars.append(len(creds))
bars.append(len(unique(creds, key=lambda cred: cred.message_id)))
yticklabels = ('Messages', 'Opened', 'Visits', 'Unique\nVisits', 'Credentials', 'Unique\nCredentials')
self.graph_bar(bars, len(yticklabels), yticklabels[:len(bars)])
return
@export_graph_provider
class CampaignGraphVisitorInfo(CampaignBarGraph):
"""Display a graph which shows the different operating systems seen from visitors."""
graph_title = 'Campaign Visitor OS Information'
name_human = 'Bar - Visitor OS Information'
table_subscriptions = ('visits',)
def _load_graph(self, info_cache):
visits = info_cache['visits']
operating_systems = collections.Counter()
for visit in visits:
user_agent = None
if visit.visitor_details:
user_agent = ua_parser.parse_user_agent(visit.visitor_details)
operating_systems.update([user_agent.os_name if user_agent and user_agent.os_name else 'Unknown OS'])
os_names = sorted(operating_systems.keys())
bars = [operating_systems[os_name] for os_name in os_names]
self.graph_bar(bars, len(OSFamily), os_names)
return
@export_graph_provider
class CampaignGraphVisitorInfoPie(CampaignPieGraph):
"""Display a graph which compares the different operating systems seen from visitors."""
graph_title = 'Campaign Visitor OS Information'
name_human = 'Pie - Visitor OS Information'
table_subscriptions = ('visits',)
def _load_graph(self, info_cache):
visits = info_cache['visits']
if not len(visits):
self._graph_null_pie('No Visitor Information')
return
operating_systems = collections.Counter()
for visit in visits:
ua = ua_parser.parse_user_agent(visit.visitor_details)
operating_systems.update([ua.os_name or 'Unknown OS' if ua else 'Unknown OS'])
(os_names, count) = tuple(zip(*reversed(sorted(operating_systems.items(), key=lambda item: item[1]))))
self.graph_pie(count, labels=tuple("{0:,}".format(os) for os in count), legend_labels=os_names)
return
@export_graph_provider
class CampaignGraphVisitsTimeline(CampaignLineGraph):
"""Display a graph which represents the visits of a campaign over time."""
graph_title = 'Campaign Visits Timeline'
name_human = 'Line - Visits Timeline'
table_subscriptions = ('visits',)
def _load_graph(self, info_cache):
# define the necessary colors
color_bg = self.get_color('bg', ColorHexCode.WHITE)
color_fg = self.get_color('fg', ColorHexCode.BLACK)
color_line_bg = self.get_color('line_bg', ColorHexCode.WHITE)
color_line_fg = self.get_color('line_fg', ColorHexCode.BLACK)
visits = info_cache['visits']
first_visits = [utilities.datetime_utc_to_local(visit.first_visit) for visit in visits]
ax = self.axes[0]
ax.tick_params(
axis='both',
which='both',
colors=color_fg,
top='off',
bottom='off'
)
ax.set_axis_bgcolor(color_line_bg)
ax.set_ylabel('Number of Visits', color=self.get_color('fg', ColorHexCode.WHITE), size=10)
self._ax_hide_ticks(ax)
self._ax_set_spine_color(ax, color_bg)
if not len(first_visits):
ax.set_yticks((0,))
ax.set_xticks((0,))
return
first_visits.sort()
ax.plot_date(
first_visits,
range(1, len(first_visits) + 1),
'-',
color=color_line_fg,
linewidth=6
)
self.figure.autofmt_xdate()
self.figure.subplots_adjust(top=0.85, right=0.95, bottom=0.25, left=0.1)
locator = dates.AutoDateLocator()
ax.xaxis.set_major_locator(locator)
ax.xaxis.set_major_formatter(dates.AutoDateFormatter(locator))
return
@export_graph_provider
class CampaignGraphMessageResults(CampaignPieGraph):
"""Display the percentage of messages which resulted in a visit."""
graph_title = 'Campaign Message Results'
name_human = 'Pie - Message Results'
table_subscriptions = ('credentials', 'visits')
def _load_graph(self, info_cache):
rpc = self.rpc
messages_count = rpc('db/table/count', 'messages', query_filter={'campaign_id': self.config['campaign_id']})
if not messages_count:
self._graph_null_pie('No Messages Sent')
return
visits_count = len(unique(info_cache['visits'], key=lambda visit: visit.message_id))
credentials_count = len(unique(info_cache['credentials'], key=lambda cred: cred.message_id))
if not credentials_count <= visits_count <= messages_count:
raise ValueError('credential visit and message counts are inconsistent')
labels = ['Without Visit', 'With Visit', 'With Credentials']
sizes = []
sizes.append((float(messages_count - visits_count) / float(messages_count)) * 100)
sizes.append((float(visits_count - credentials_count) / float(messages_count)) * 100)
sizes.append((float(credentials_count) / float(messages_count)) * 100)
if not credentials_count:
labels.pop()
sizes.pop()
if not visits_count:
labels.pop()
sizes.pop()
self.graph_pie(sizes, legend_labels=labels)
return
class CampaignGraphVisitsMap(CampaignGraph):
"""A base class to display a map which shows the locations of visit origins."""
graph_title = 'Campaign Visit Locations'
table_subscriptions = ('credentials', 'visits')
is_available = has_matplotlib_basemap
draw_states = False
def _load_graph(self, info_cache):
visits = unique(info_cache['visits'], key=lambda visit: visit.message_id)
cred_ips = set(cred.message_id for cred in info_cache['credentials'])
cred_ips = set([visit.visitor_ip for visit in visits if visit.message_id in cred_ips])
color_fg = self.get_color('fg', ColorHexCode.BLACK)
color_land = self.get_color('map_land', ColorHexCode.GRAY)
color_water = self.get_color('map_water', ColorHexCode.WHITE)
ax = self.axes[0]
bm = mpl_toolkits.basemap.Basemap(resolution='c', ax=ax, **self.basemap_args)
if self.draw_states:
bm.drawstates()
bm.drawcoastlines()
bm.drawcountries()
bm.fillcontinents(color=color_land, lake_color=color_water)
parallels = bm.drawparallels(
(-60, -30, 0, 30, 60),
labels=(1, 1, 0, 0)
)
self._map_set_line_color(parallels, color_fg)
meridians = bm.drawmeridians(
(0, 90, 180, 270),
labels=(0, 0, 0, 1)
)
self._map_set_line_color(meridians, color_fg)
bm.drawmapboundary(
fill_color=color_water,
linewidth=0
)
if not visits:
return
ctr = collections.Counter()
ctr.update([visit.visitor_ip for visit in visits])
base_markersize = self.markersize_scale
base_markersize = max(base_markersize, 3.05)
base_markersize = min(base_markersize, 9)
self._plot_visitor_map_points(bm, ctr, base_markersize, cred_ips)
self.add_legend_patch(((self.color_with_creds, 'With Credentials'), (self.color_without_creds, 'Without Credentials')))
return
def _resolve_geolocations(self, all_ips):
geo_locations = {}
public_ips = []
for visitor_ip in all_ips:
ip = ipaddress.ip_address(visitor_ip)
if ip.is_private or ip.is_loopback:
continue
public_ips.append(visitor_ip)
public_ips.sort()
for ip_chunk in iterutils.chunked(public_ips, 100):
geo_locations.update(self.rpc.geoip_lookup_multi(ip_chunk))
return geo_locations
def _plot_visitor_map_points(self, bm, ctr, base_markersize, cred_ips):
o_high = float(max(ctr.values()))
o_low = float(min(ctr.values()))
color_with_creds = self.color_with_creds
color_without_creds = self.color_without_creds
geo_locations = self._resolve_geolocations(ctr.keys())
for visitor_ip, geo_location in geo_locations.items():
if not (geo_location.coordinates.longitude and geo_location.coordinates.latitude):
continue
occurrences = ctr[visitor_ip]
pts = bm(geo_location.coordinates.longitude, geo_location.coordinates.latitude)
if o_high == o_low:
markersize = 2.0
else:
markersize = 1.0 + (float(occurrences) - o_low) / (o_high - o_low)
markersize = markersize * base_markersize
bm.plot(
pts[0],
pts[1],
'o',
markeredgewidth=0,
markerfacecolor=(color_with_creds if visitor_ip in cred_ips else color_without_creds),
markersize=markersize
)
return
def _map_set_line_color(self, map_lines, line_color):
for sub_lines, texts in map_lines.values():
for line in sub_lines:
line.set_color(line_color)
for text in texts:
text.set_color(line_color)
@property
def color_with_creds(self):
return self.get_color('map_marker1', ColorHexCode.RED)
@property
def color_without_creds(self):
return self.get_color('map_marker2', ColorHexCode.YELLOW)
@export_graph_provider
class CampaignGraphVisitsMapUSA(CampaignGraphVisitsMap):
"""Display a map of the USA which shows the locations of visit origins."""
name_human = 'Map - Visit Locations (USA)'
draw_states = True
basemap_args = dict(projection='lcc', lat_1=30, lon_0=-90, llcrnrlon=-122.5, llcrnrlat=12.5, urcrnrlon=-45, urcrnrlat=50)
@export_graph_provider
class CampaignGraphVisitsMapWorld(CampaignGraphVisitsMap):
"""Display a map of the world which shows the locations of visit origins."""
name_human = 'Map - Visit Locations (World)'
basemap_args = dict(projection='kav7', lon_0=0)
@export_graph_provider
class CampaignGraphPasswordComplexityPie(CampaignPieGraph):
"""Display a graph which displays the number of passwords which meet standard complexity requirements."""
graph_title = 'Campaign Password Complexity'
name_human = 'Pie - Password Complexity'
table_subscriptions = ('credentials',)
def _load_graph(self, info_cache):
passwords = set(cred.password for cred in info_cache['credentials'])
if not len(passwords):
self._graph_null_pie('No Credential Information')
return
ctr = collections.Counter()
ctr.update(self._check_complexity(password) for password in passwords)
self.graph_pie((ctr[True], ctr[False]), autopct='%1.1f%%', legend_labels=('Complex', 'Not Complex'))
return
def _check_complexity(self, password):
if len(password) < 8:
return False
met = 0
for char_set in (string.ascii_uppercase, string.ascii_lowercase, string.digits, string.punctuation):
for char in password:
if char in char_set:
met += 1
break
return met >= 3
class CampaignCompGraph(GraphBase):
""" Display selected campaigns data by order of campaign start date."""
graph_title = 'Campaign Comparison Graph'
name_human = 'Graph'
def __init__(self, *args, **kwargs):
super(CampaignCompGraph, self).__init__(*args, **kwargs)
ax = self.axes[0]
self.axes.append(ax.twinx())
ax2 = self.axes[1]
self._config_axes(ax, ax2)
self._campaigns = []
def _calc(self, stats, key, comp_key='messages'):
return 0 if stats[comp_key] == 0 else (float(stats[key]) / stats[comp_key]) * 100
def _config_axes(self, ax, ax2):
# define the necessary colors
color_bg = self.get_color('bg', ColorHexCode.WHITE)
color_fg = self.get_color('fg', ColorHexCode.BLACK)
color_line_bg = self.get_color('line_bg', ColorHexCode.WHITE)
ax.tick_params(
axis='both',
which='both',
colors=color_fg,
top='off',
bottom='off'
)
ax2.tick_params(
axis='both',
which='both',
colors=color_fg,
top='off',
bottom='off'
)
ax.set_axis_bgcolor(color_line_bg)
ax2.set_axis_bgcolor(color_line_bg)
title = pyplot.title('Campaign Comparison', color=color_fg, size=self.markersize_scale * 1.75, loc='left')
title.set_position([0.075, 1.05])
ax.set_ylabel('Percent Visits/Credentials', color=color_fg, size=self.markersize_scale * 1.5)
ax.set_xlabel('Campaign Name', color=color_fg, size=self.markersize_scale * 1.5)
self._ax_hide_ticks(ax)
self._ax_hide_ticks(ax2)
ax2.set_ylabel('Messages', color=color_fg, size=self.markersize_scale * 1.25, rotation=270, labelpad=20)
self._ax_set_spine_color(ax, color_bg)
self._ax_set_spine_color(ax2, color_bg)
ax2.get_yaxis().set_major_locator(ticker.MaxNLocator(integer=True))
ax.tick_params(axis='x', labelsize=10, pad=5)
def load_graph(self, campaigns):
"""
Load the information to compare the specified and paint it to the
canvas. Campaigns are graphed on the X-axis in the order that they are
provided. No sorting of campaigns is done by this method.
:param tuple campaigns: A tuple containing campaign IDs to compare.
"""
ax = self.axes[0]
ax2 = self.axes[1]
ax.clear()
ax2.clear()
self._config_axes(ax, ax2)
rpc = self.rpc
ellipsize = lambda text: (text if len(text) < 20 else text[:17] + '...')
visits_line_color = self.get_color('line_fg', ColorHexCode.RED)
creds_line_color = self.get_color('map_marker1', ColorHexCode.BLACK)
messages_color = '#046D8B'
trained_color = '#77c67f'
ax.grid(True)
ax.set_xticks(range(len(campaigns)))
ax.set_xticklabels([ellipsize(rpc.remote_table_row('campaigns', cid).name) for cid in campaigns])
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(self.markersize_scale * 1.25)
labels = ax.get_xticklabels()
pyplot.setp(labels, rotation=15)
self._campaigns = campaigns
campaigns = [rpc('/campaign/stats', cid) for cid in campaigns]
ax2.plot([stats['messages'] for stats in campaigns], label='Messages', color=messages_color, lw=3)
if sum(stats['messages-trained'] for stats in campaigns):
ax.plot([self._calc(stats, 'messages-trained', 'visits-unique') for stats in campaigns], label='Trained (Visited)', color=trained_color, lw=3)
ax.plot([self._calc(stats, 'messages-trained') for stats in campaigns], label='Trained (All)', color=trained_color, lw=3, ls='dashed')
ax.plot([self._calc(stats, 'visits') for stats in campaigns], label='Visits', color=visits_line_color, lw=3)
ax.plot([self._calc(stats, 'visits-unique') for stats in campaigns], label='Unique Visits', color=visits_line_color, lw=3, ls='dashed')
if sum(stats['credentials'] for stats in campaigns):
ax.plot([self._calc(stats, 'credentials') for stats in campaigns], label='Credentials', color=creds_line_color, lw=3)
ax.plot([self._calc(stats, 'credentials-unique') for stats in campaigns], label='Unique Credentials', color=creds_line_color, lw=3, ls='dashed')
ax.set_ylim((0, 100))
ax2.set_ylim(bottom=0)
self.canvas.set_size_request(500 + 50 * (len(campaigns) - 1), 500)
legend_patch = [
(visits_line_color, 'solid', 'Visits'),
(visits_line_color, 'dotted', 'Unique Visits')
]
if sum(stats['credentials'] for stats in campaigns):
legend_patch.extend([
(creds_line_color, 'solid', 'Credentials'),
(creds_line_color, 'dotted', 'Unique Credentials')
])
if sum(stats['messages-trained'] for stats in campaigns):
legend_patch.extend([
(trained_color, 'solid', 'Trained (Visited)'),
(trained_color, 'dotted', 'Trained (All)')
])
legend_patch.append(
(messages_color, 'solid', 'Messages')
)
self.add_legend_patch(legend_patch)
pyplot.tight_layout()
def add_legend_patch(self, legend_rows, fontsize=None):
if self._legend is not None:
self._legend.remove()
self._legend = None
legend_bbox = self.figure.legend(
tuple(lines.Line2D([], [], color=patch_color, lw=3, ls=style) for patch_color, style, _ in legend_rows),
tuple(label for _, _, label in legend_rows),
borderaxespad=1,
columnspacing=1.5,
fontsize=self.fontsize_scale,
ncol=3,
frameon=True,
handlelength=2,
handletextpad=0.5,
labelspacing=0.5,
loc='upper right'
)
legend_bbox.get_frame().set_facecolor(self.get_color('line_bg', ColorHexCode.GRAY))
for text in legend_bbox.get_texts():
text.set_color('white')
legend_bbox.legendPatch.set_linewidth(0)
self._legend = legend_bbox
def refresh(self):
self.load_graph(self._campaigns)
|
hdemeyer/king-phisher
|
king_phisher/client/graphs.py
|
Python
|
bsd-3-clause
| 33,983
|
[
"VisIt"
] |
8904e023ac5166cf81128057504e09fa41f93a7b8b3202f90eb933ecaba58511
|
from __future__ import division, absolute_import, print_function
import os
import sys
import types
import re
import warnings
from numpy.core.numerictypes import issubclass_, issubsctype, issubdtype
from numpy.core import ndarray, ufunc, asarray
import numpy as np
# getargspec and formatargspec were removed in Python 3.6
from numpy.compat import getargspec, formatargspec
__all__ = [
'issubclass_', 'issubsctype', 'issubdtype', 'deprecate',
'deprecate_with_doc', 'get_include', 'info', 'source', 'who',
'lookfor', 'byte_bounds', 'safe_eval'
]
def get_include():
"""
Return the directory that contains the NumPy \\*.h header files.
Extension modules that need to compile against NumPy should use this
function to locate the appropriate include directory.
Notes
-----
When using ``distutils``, for example in ``setup.py``.
::
import numpy as np
...
Extension('extension_name', ...
include_dirs=[np.get_include()])
...
"""
import numpy
if numpy.show_config is None:
# running from numpy source directory
d = os.path.join(os.path.dirname(numpy.__file__), 'core', 'include')
else:
# using installed numpy core headers
import numpy.core as core
d = os.path.join(os.path.dirname(core.__file__), 'include')
return d
def _set_function_name(func, name):
func.__name__ = name
return func
class _Deprecate(object):
"""
Decorator class to deprecate old functions.
Refer to `deprecate` for details.
See Also
--------
deprecate
"""
def __init__(self, old_name=None, new_name=None, message=None):
self.old_name = old_name
self.new_name = new_name
self.message = message
def __call__(self, func, *args, **kwargs):
"""
Decorator call. Refer to ``decorate``.
"""
old_name = self.old_name
new_name = self.new_name
message = self.message
import warnings
if old_name is None:
try:
old_name = func.__name__
except AttributeError:
old_name = func.__name__
if new_name is None:
depdoc = "`%s` is deprecated!" % old_name
else:
depdoc = "`%s` is deprecated, use `%s` instead!" % \
(old_name, new_name)
if message is not None:
depdoc += "\n" + message
def newfunc(*args,**kwds):
"""`arrayrange` is deprecated, use `arange` instead!"""
warnings.warn(depdoc, DeprecationWarning, stacklevel=2)
return func(*args, **kwds)
newfunc = _set_function_name(newfunc, old_name)
doc = func.__doc__
if doc is None:
doc = depdoc
else:
doc = '\n\n'.join([depdoc, doc])
newfunc.__doc__ = doc
try:
d = func.__dict__
except AttributeError:
pass
else:
newfunc.__dict__.update(d)
return newfunc
def deprecate(*args, **kwargs):
"""
Issues a DeprecationWarning, adds warning to `old_name`'s
docstring, rebinds ``old_name.__name__`` and returns the new
function object.
This function may also be used as a decorator.
Parameters
----------
func : function
The function to be deprecated.
old_name : str, optional
The name of the function to be deprecated. Default is None, in
which case the name of `func` is used.
new_name : str, optional
The new name for the function. Default is None, in which case the
deprecation message is that `old_name` is deprecated. If given, the
deprecation message is that `old_name` is deprecated and `new_name`
should be used instead.
message : str, optional
Additional explanation of the deprecation. Displayed in the
docstring after the warning.
Returns
-------
old_func : function
The deprecated function.
Examples
--------
Note that ``olduint`` returns a value after printing Deprecation
Warning:
>>> olduint = np.deprecate(np.uint)
>>> olduint(6)
/usr/lib/python2.5/site-packages/numpy/lib/utils.py:114:
DeprecationWarning: uint32 is deprecated
warnings.warn(str1, DeprecationWarning, stacklevel=2)
6
"""
# Deprecate may be run as a function or as a decorator
# If run as a function, we initialise the decorator class
# and execute its __call__ method.
if args:
fn = args[0]
args = args[1:]
# backward compatibility -- can be removed
# after next release
if 'newname' in kwargs:
kwargs['new_name'] = kwargs.pop('newname')
if 'oldname' in kwargs:
kwargs['old_name'] = kwargs.pop('oldname')
return _Deprecate(*args, **kwargs)(fn)
else:
return _Deprecate(*args, **kwargs)
deprecate_with_doc = lambda msg: _Deprecate(message=msg)
#--------------------------------------------
# Determine if two arrays can share memory
#--------------------------------------------
def byte_bounds(a):
"""
Returns pointers to the end-points of an array.
Parameters
----------
a : ndarray
Input array. It must conform to the Python-side of the array
interface.
Returns
-------
(low, high) : tuple of 2 integers
The first integer is the first byte of the array, the second
integer is just past the last byte of the array. If `a` is not
contiguous it will not use every byte between the (`low`, `high`)
values.
Examples
--------
>>> I = np.eye(2, dtype='f'); I.dtype
dtype('float32')
>>> low, high = np.byte_bounds(I)
>>> high - low == I.size*I.itemsize
True
>>> I = np.eye(2, dtype='G'); I.dtype
dtype('complex192')
>>> low, high = np.byte_bounds(I)
>>> high - low == I.size*I.itemsize
True
"""
ai = a.__array_interface__
a_data = ai['data'][0]
astrides = ai['strides']
ashape = ai['shape']
bytes_a = asarray(a).dtype.itemsize
a_low = a_high = a_data
if astrides is None:
# contiguous case
a_high += a.size * bytes_a
else:
for shape, stride in zip(ashape, astrides):
if stride < 0:
a_low += (shape-1)*stride
else:
a_high += (shape-1)*stride
a_high += bytes_a
return a_low, a_high
#-----------------------------------------------------------------------------
# Function for output and information on the variables used.
#-----------------------------------------------------------------------------
def who(vardict=None):
"""
Print the NumPy arrays in the given dictionary.
If there is no dictionary passed in or `vardict` is None then returns
NumPy arrays in the globals() dictionary (all NumPy arrays in the
namespace).
Parameters
----------
vardict : dict, optional
A dictionary possibly containing ndarrays. Default is globals().
Returns
-------
out : None
Returns 'None'.
Notes
-----
Prints out the name, shape, bytes and type of all of the ndarrays
present in `vardict`.
Examples
--------
>>> a = np.arange(10)
>>> b = np.ones(20)
>>> np.who()
Name Shape Bytes Type
===========================================================
a 10 40 int32
b 20 160 float64
Upper bound on total bytes = 200
>>> d = {'x': np.arange(2.0), 'y': np.arange(3.0), 'txt': 'Some str',
... 'idx':5}
>>> np.who(d)
Name Shape Bytes Type
===========================================================
y 3 24 float64
x 2 16 float64
Upper bound on total bytes = 40
"""
if vardict is None:
frame = sys._getframe().f_back
vardict = frame.f_globals
sta = []
cache = {}
for name in vardict.keys():
if isinstance(vardict[name], ndarray):
var = vardict[name]
idv = id(var)
if idv in cache.keys():
namestr = name + " (%s)" % cache[idv]
original = 0
else:
cache[idv] = name
namestr = name
original = 1
shapestr = " x ".join(map(str, var.shape))
bytestr = str(var.nbytes)
sta.append([namestr, shapestr, bytestr, var.dtype.name,
original])
maxname = 0
maxshape = 0
maxbyte = 0
totalbytes = 0
for k in range(len(sta)):
val = sta[k]
if maxname < len(val[0]):
maxname = len(val[0])
if maxshape < len(val[1]):
maxshape = len(val[1])
if maxbyte < len(val[2]):
maxbyte = len(val[2])
if val[4]:
totalbytes += int(val[2])
if len(sta) > 0:
sp1 = max(10, maxname)
sp2 = max(10, maxshape)
sp3 = max(10, maxbyte)
prval = "Name %s Shape %s Bytes %s Type" % (sp1*' ', sp2*' ', sp3*' ')
print(prval + "\n" + "="*(len(prval)+5) + "\n")
for k in range(len(sta)):
val = sta[k]
print("%s %s %s %s %s %s %s" % (val[0], ' '*(sp1-len(val[0])+4),
val[1], ' '*(sp2-len(val[1])+5),
val[2], ' '*(sp3-len(val[2])+5),
val[3]))
print("\nUpper bound on total bytes = %d" % totalbytes)
return
#-----------------------------------------------------------------------------
# NOTE: pydoc defines a help function which works similarly to this
# except it uses a pager to take over the screen.
# combine name and arguments and split to multiple lines of width
# characters. End lines on a comma and begin argument list indented with
# the rest of the arguments.
def _split_line(name, arguments, width):
firstwidth = len(name)
k = firstwidth
newstr = name
sepstr = ", "
arglist = arguments.split(sepstr)
for argument in arglist:
if k == firstwidth:
addstr = ""
else:
addstr = sepstr
k = k + len(argument) + len(addstr)
if k > width:
k = firstwidth + 1 + len(argument)
newstr = newstr + ",\n" + " "*(firstwidth+2) + argument
else:
newstr = newstr + addstr + argument
return newstr
_namedict = None
_dictlist = None
# Traverse all module directories underneath globals
# to see if something is defined
def _makenamedict(module='numpy'):
module = __import__(module, globals(), locals(), [])
thedict = {module.__name__:module.__dict__}
dictlist = [module.__name__]
totraverse = [module.__dict__]
while True:
if len(totraverse) == 0:
break
thisdict = totraverse.pop(0)
for x in thisdict.keys():
if isinstance(thisdict[x], types.ModuleType):
modname = thisdict[x].__name__
if modname not in dictlist:
moddict = thisdict[x].__dict__
dictlist.append(modname)
totraverse.append(moddict)
thedict[modname] = moddict
return thedict, dictlist
def _info(obj, output=sys.stdout):
"""Provide information about ndarray obj.
Parameters
----------
obj : ndarray
Must be ndarray, not checked.
output
Where printed output goes.
Notes
-----
Copied over from the numarray module prior to its removal.
Adapted somewhat as only numpy is an option now.
Called by info.
"""
extra = ""
tic = ""
bp = lambda x: x
cls = getattr(obj, '__class__', type(obj))
nm = getattr(cls, '__name__', cls)
strides = obj.strides
endian = obj.dtype.byteorder
print("class: ", nm, file=output)
print("shape: ", obj.shape, file=output)
print("strides: ", strides, file=output)
print("itemsize: ", obj.itemsize, file=output)
print("aligned: ", bp(obj.flags.aligned), file=output)
print("contiguous: ", bp(obj.flags.contiguous), file=output)
print("fortran: ", obj.flags.fortran, file=output)
print(
"data pointer: %s%s" % (hex(obj.ctypes._as_parameter_.value), extra),
file=output
)
print("byteorder: ", end=' ', file=output)
if endian in ['|', '=']:
print("%s%s%s" % (tic, sys.byteorder, tic), file=output)
byteswap = False
elif endian == '>':
print("%sbig%s" % (tic, tic), file=output)
byteswap = sys.byteorder != "big"
else:
print("%slittle%s" % (tic, tic), file=output)
byteswap = sys.byteorder != "little"
print("byteswap: ", bp(byteswap), file=output)
print("type: %s" % obj.dtype, file=output)
def info(object=None, maxwidth=76, output=sys.stdout, toplevel='numpy'):
"""
Get help information for a function, class, or module.
Parameters
----------
object : object or str, optional
Input object or name to get information about. If `object` is a
numpy object, its docstring is given. If it is a string, available
modules are searched for matching objects. If None, information
about `info` itself is returned.
maxwidth : int, optional
Printing width.
output : file like object, optional
File like object that the output is written to, default is
``stdout``. The object has to be opened in 'w' or 'a' mode.
toplevel : str, optional
Start search at this level.
See Also
--------
source, lookfor
Notes
-----
When used interactively with an object, ``np.info(obj)`` is equivalent
to ``help(obj)`` on the Python prompt or ``obj?`` on the IPython
prompt.
Examples
--------
>>> np.info(np.polyval) # doctest: +SKIP
polyval(p, x)
Evaluate the polynomial p at x.
...
When using a string for `object` it is possible to get multiple results.
>>> np.info('fft') # doctest: +SKIP
*** Found in numpy ***
Core FFT routines
...
*** Found in numpy.fft ***
fft(a, n=None, axis=-1)
...
*** Repeat reference found in numpy.fft.fftpack ***
*** Total of 3 references found. ***
"""
global _namedict, _dictlist
# Local import to speed up numpy's import time.
import pydoc
import inspect
if (hasattr(object, '_ppimport_importer') or
hasattr(object, '_ppimport_module')):
object = object._ppimport_module
elif hasattr(object, '_ppimport_attr'):
object = object._ppimport_attr
if object is None:
info(info)
elif isinstance(object, ndarray):
_info(object, output=output)
elif isinstance(object, str):
if _namedict is None:
_namedict, _dictlist = _makenamedict(toplevel)
numfound = 0
objlist = []
for namestr in _dictlist:
try:
obj = _namedict[namestr][object]
if id(obj) in objlist:
print("\n "
"*** Repeat reference found in %s *** " % namestr,
file=output
)
else:
objlist.append(id(obj))
print(" *** Found in %s ***" % namestr, file=output)
info(obj)
print("-"*maxwidth, file=output)
numfound += 1
except KeyError:
pass
if numfound == 0:
print("Help for %s not found." % object, file=output)
else:
print("\n "
"*** Total of %d references found. ***" % numfound,
file=output
)
elif inspect.isfunction(object):
name = object.__name__
arguments = formatargspec(*getargspec(object))
if len(name+arguments) > maxwidth:
argstr = _split_line(name, arguments, maxwidth)
else:
argstr = name + arguments
print(" " + argstr + "\n", file=output)
print(inspect.getdoc(object), file=output)
elif inspect.isclass(object):
name = object.__name__
arguments = "()"
try:
if hasattr(object, '__init__'):
arguments = formatargspec(
*getargspec(object.__init__.__func__)
)
arglist = arguments.split(', ')
if len(arglist) > 1:
arglist[1] = "("+arglist[1]
arguments = ", ".join(arglist[1:])
except Exception:
pass
if len(name+arguments) > maxwidth:
argstr = _split_line(name, arguments, maxwidth)
else:
argstr = name + arguments
print(" " + argstr + "\n", file=output)
doc1 = inspect.getdoc(object)
if doc1 is None:
if hasattr(object, '__init__'):
print(inspect.getdoc(object.__init__), file=output)
else:
print(inspect.getdoc(object), file=output)
methods = pydoc.allmethods(object)
if methods != []:
print("\n\nMethods:\n", file=output)
for meth in methods:
if meth[0] == '_':
continue
thisobj = getattr(object, meth, None)
if thisobj is not None:
methstr, other = pydoc.splitdoc(
inspect.getdoc(thisobj) or "None"
)
print(" %s -- %s" % (meth, methstr), file=output)
elif (sys.version_info[0] < 3
and isinstance(object, types.InstanceType)):
# check for __call__ method
# types.InstanceType is the type of the instances of oldstyle classes
print("Instance of class: ", object.__class__.__name__, file=output)
print(file=output)
if hasattr(object, '__call__'):
arguments = formatargspec(
*getargspec(object.__call__.__func__)
)
arglist = arguments.split(', ')
if len(arglist) > 1:
arglist[1] = "("+arglist[1]
arguments = ", ".join(arglist[1:])
else:
arguments = "()"
if hasattr(object, 'name'):
name = "%s" % object.name
else:
name = "<name>"
if len(name+arguments) > maxwidth:
argstr = _split_line(name, arguments, maxwidth)
else:
argstr = name + arguments
print(" " + argstr + "\n", file=output)
doc = inspect.getdoc(object.__call__)
if doc is not None:
print(inspect.getdoc(object.__call__), file=output)
print(inspect.getdoc(object), file=output)
else:
print(inspect.getdoc(object), file=output)
elif inspect.ismethod(object):
name = object.__name__
arguments = formatargspec(
*getargspec(object.__func__)
)
arglist = arguments.split(', ')
if len(arglist) > 1:
arglist[1] = "("+arglist[1]
arguments = ", ".join(arglist[1:])
else:
arguments = "()"
if len(name+arguments) > maxwidth:
argstr = _split_line(name, arguments, maxwidth)
else:
argstr = name + arguments
print(" " + argstr + "\n", file=output)
print(inspect.getdoc(object), file=output)
elif hasattr(object, '__doc__'):
print(inspect.getdoc(object), file=output)
def source(object, output=sys.stdout):
"""
Print or write to a file the source code for a NumPy object.
The source code is only returned for objects written in Python. Many
functions and classes are defined in C and will therefore not return
useful information.
Parameters
----------
object : numpy object
Input object. This can be any object (function, class, module,
...).
output : file object, optional
If `output` not supplied then source code is printed to screen
(sys.stdout). File object must be created with either write 'w' or
append 'a' modes.
See Also
--------
lookfor, info
Examples
--------
>>> np.source(np.interp) #doctest: +SKIP
In file: /usr/lib/python2.6/dist-packages/numpy/lib/function_base.py
def interp(x, xp, fp, left=None, right=None):
\"\"\".... (full docstring printed)\"\"\"
if isinstance(x, (float, int, number)):
return compiled_interp([x], xp, fp, left, right).item()
else:
return compiled_interp(x, xp, fp, left, right)
The source code is only returned for objects written in Python.
>>> np.source(np.array) #doctest: +SKIP
Not available for this object.
"""
# Local import to speed up numpy's import time.
import inspect
try:
print("In file: %s\n" % inspect.getsourcefile(object), file=output)
print(inspect.getsource(object), file=output)
except Exception:
print("Not available for this object.", file=output)
# Cache for lookfor: {id(module): {name: (docstring, kind, index), ...}...}
# where kind: "func", "class", "module", "object"
# and index: index in breadth-first namespace traversal
_lookfor_caches = {}
# regexp whose match indicates that the string may contain a function
# signature
_function_signature_re = re.compile(r"[a-z0-9_]+\(.*[,=].*\)", re.I)
def lookfor(what, module=None, import_modules=True, regenerate=False,
output=None):
"""
Do a keyword search on docstrings.
A list of of objects that matched the search is displayed,
sorted by relevance. All given keywords need to be found in the
docstring for it to be returned as a result, but the order does
not matter.
Parameters
----------
what : str
String containing words to look for.
module : str or list, optional
Name of module(s) whose docstrings to go through.
import_modules : bool, optional
Whether to import sub-modules in packages. Default is True.
regenerate : bool, optional
Whether to re-generate the docstring cache. Default is False.
output : file-like, optional
File-like object to write the output to. If omitted, use a pager.
See Also
--------
source, info
Notes
-----
Relevance is determined only roughly, by checking if the keywords occur
in the function name, at the start of a docstring, etc.
Examples
--------
>>> np.lookfor('binary representation')
Search results for 'binary representation'
------------------------------------------
numpy.binary_repr
Return the binary representation of the input number as a string.
numpy.core.setup_common.long_double_representation
Given a binary dump as given by GNU od -b, look for long double
numpy.base_repr
Return a string representation of a number in the given base system.
...
"""
import pydoc
# Cache
cache = _lookfor_generate_cache(module, import_modules, regenerate)
# Search
# XXX: maybe using a real stemming search engine would be better?
found = []
whats = str(what).lower().split()
if not whats:
return
for name, (docstring, kind, index) in cache.items():
if kind in ('module', 'object'):
# don't show modules or objects
continue
ok = True
doc = docstring.lower()
for w in whats:
if w not in doc:
ok = False
break
if ok:
found.append(name)
# Relevance sort
# XXX: this is full Harrison-Stetson heuristics now,
# XXX: it probably could be improved
kind_relevance = {'func': 1000, 'class': 1000,
'module': -1000, 'object': -1000}
def relevance(name, docstr, kind, index):
r = 0
# do the keywords occur within the start of the docstring?
first_doc = "\n".join(docstr.lower().strip().split("\n")[:3])
r += sum([200 for w in whats if w in first_doc])
# do the keywords occur in the function name?
r += sum([30 for w in whats if w in name])
# is the full name long?
r += -len(name) * 5
# is the object of bad type?
r += kind_relevance.get(kind, -1000)
# is the object deep in namespace hierarchy?
r += -name.count('.') * 10
r += max(-index / 100, -100)
return r
def relevance_value(a):
return relevance(a, *cache[a])
found.sort(key=relevance_value)
# Pretty-print
s = "Search results for '%s'" % (' '.join(whats))
help_text = [s, "-"*len(s)]
for name in found[::-1]:
doc, kind, ix = cache[name]
doclines = [line.strip() for line in doc.strip().split("\n")
if line.strip()]
# find a suitable short description
try:
first_doc = doclines[0].strip()
if _function_signature_re.search(first_doc):
first_doc = doclines[1].strip()
except IndexError:
first_doc = ""
help_text.append("%s\n %s" % (name, first_doc))
if not found:
help_text.append("Nothing found.")
# Output
if output is not None:
output.write("\n".join(help_text))
elif len(help_text) > 10:
pager = pydoc.getpager()
pager("\n".join(help_text))
else:
print("\n".join(help_text))
def _lookfor_generate_cache(module, import_modules, regenerate):
"""
Generate docstring cache for given module.
Parameters
----------
module : str, None, module
Module for which to generate docstring cache
import_modules : bool
Whether to import sub-modules in packages.
regenerate : bool
Re-generate the docstring cache
Returns
-------
cache : dict {obj_full_name: (docstring, kind, index), ...}
Docstring cache for the module, either cached one (regenerate=False)
or newly generated.
"""
global _lookfor_caches
# Local import to speed up numpy's import time.
import inspect
if sys.version_info[0] >= 3:
# In Python3 stderr, stdout are text files.
from io import StringIO
else:
from StringIO import StringIO
if module is None:
module = "numpy"
if isinstance(module, str):
try:
__import__(module)
except ImportError:
return {}
module = sys.modules[module]
elif isinstance(module, list) or isinstance(module, tuple):
cache = {}
for mod in module:
cache.update(_lookfor_generate_cache(mod, import_modules,
regenerate))
return cache
if id(module) in _lookfor_caches and not regenerate:
return _lookfor_caches[id(module)]
# walk items and collect docstrings
cache = {}
_lookfor_caches[id(module)] = cache
seen = {}
index = 0
stack = [(module.__name__, module)]
while stack:
name, item = stack.pop(0)
if id(item) in seen:
continue
seen[id(item)] = True
index += 1
kind = "object"
if inspect.ismodule(item):
kind = "module"
try:
_all = item.__all__
except AttributeError:
_all = None
# import sub-packages
if import_modules and hasattr(item, '__path__'):
for pth in item.__path__:
for mod_path in os.listdir(pth):
this_py = os.path.join(pth, mod_path)
init_py = os.path.join(pth, mod_path, '__init__.py')
if (os.path.isfile(this_py) and
mod_path.endswith('.py')):
to_import = mod_path[:-3]
elif os.path.isfile(init_py):
to_import = mod_path
else:
continue
if to_import == '__init__':
continue
try:
old_stdout = sys.stdout
old_stderr = sys.stderr
try:
sys.stdout = StringIO()
sys.stderr = StringIO()
__import__("%s.%s" % (name, to_import))
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
# Catch SystemExit, too
except BaseException:
continue
for n, v in _getmembers(item):
try:
item_name = getattr(v, '__name__', "%s.%s" % (name, n))
mod_name = getattr(v, '__module__', None)
except NameError:
# ref. SWIG's global cvars
# NameError: Unknown C global variable
item_name = "%s.%s" % (name, n)
mod_name = None
if '.' not in item_name and mod_name:
item_name = "%s.%s" % (mod_name, item_name)
if not item_name.startswith(name + '.'):
# don't crawl "foreign" objects
if isinstance(v, ufunc):
# ... unless they are ufuncs
pass
else:
continue
elif not (inspect.ismodule(v) or _all is None or n in _all):
continue
stack.append(("%s.%s" % (name, n), v))
elif inspect.isclass(item):
kind = "class"
for n, v in _getmembers(item):
stack.append(("%s.%s" % (name, n), v))
elif hasattr(item, "__call__"):
kind = "func"
try:
doc = inspect.getdoc(item)
except NameError:
# ref SWIG's NameError: Unknown C global variable
doc = None
if doc is not None:
cache[name] = (doc, kind, index)
return cache
def _getmembers(item):
import inspect
try:
members = inspect.getmembers(item)
except Exception:
members = [(x, getattr(item, x)) for x in dir(item)
if hasattr(item, x)]
return members
#-----------------------------------------------------------------------------
# The following SafeEval class and company are adapted from Michael Spencer's
# ASPN Python Cookbook recipe:
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/364469
# Accordingly it is mostly Copyright 2006 by Michael Spencer.
# The recipe, like most of the other ASPN Python Cookbook recipes was made
# available under the Python license.
# http://www.python.org/license
# It has been modified to:
# * handle unary -/+
# * support True/False/None
# * raise SyntaxError instead of a custom exception.
class SafeEval(object):
"""
Object to evaluate constant string expressions.
This includes strings with lists, dicts and tuples using the abstract
syntax tree created by ``compiler.parse``.
.. deprecated:: 1.10.0
See Also
--------
safe_eval
"""
def __init__(self):
# 2014-10-15, 1.10
warnings.warn("SafeEval is deprecated in 1.10 and will be removed.",
DeprecationWarning, stacklevel=2)
def visit(self, node):
cls = node.__class__
meth = getattr(self, 'visit' + cls.__name__, self.default)
return meth(node)
def default(self, node):
raise SyntaxError("Unsupported source construct: %s"
% node.__class__)
def visitExpression(self, node):
return self.visit(node.body)
def visitNum(self, node):
return node.n
def visitStr(self, node):
return node.s
def visitBytes(self, node):
return node.s
def visitDict(self, node,**kw):
return dict([(self.visit(k), self.visit(v))
for k, v in zip(node.keys, node.values)])
def visitTuple(self, node):
return tuple([self.visit(i) for i in node.elts])
def visitList(self, node):
return [self.visit(i) for i in node.elts]
def visitUnaryOp(self, node):
import ast
if isinstance(node.op, ast.UAdd):
return +self.visit(node.operand)
elif isinstance(node.op, ast.USub):
return -self.visit(node.operand)
else:
raise SyntaxError("Unknown unary op: %r" % node.op)
def visitName(self, node):
if node.id == 'False':
return False
elif node.id == 'True':
return True
elif node.id == 'None':
return None
else:
raise SyntaxError("Unknown name: %s" % node.id)
def visitNameConstant(self, node):
return node.value
def safe_eval(source):
"""
Protected string evaluation.
Evaluate a string containing a Python literal expression without
allowing the execution of arbitrary non-literal code.
Parameters
----------
source : str
The string to evaluate.
Returns
-------
obj : object
The result of evaluating `source`.
Raises
------
SyntaxError
If the code has invalid Python syntax, or if it contains
non-literal code.
Examples
--------
>>> np.safe_eval('1')
1
>>> np.safe_eval('[1, 2, 3]')
[1, 2, 3]
>>> np.safe_eval('{"foo": ("bar", 10.0)}')
{'foo': ('bar', 10.0)}
>>> np.safe_eval('import os')
Traceback (most recent call last):
...
SyntaxError: invalid syntax
>>> np.safe_eval('open("/home/user/.ssh/id_dsa").read()')
Traceback (most recent call last):
...
SyntaxError: Unsupported source construct: compiler.ast.CallFunc
"""
# Local import to speed up numpy's import time.
import ast
return ast.literal_eval(source)
def _median_nancheck(data, result, axis, out):
"""
Utility function to check median result from data for NaN values at the end
and return NaN in that case. Input result can also be a MaskedArray.
Parameters
----------
data : array
Input data to median function
result : Array or MaskedArray
Result of median function
axis : {int, sequence of int, None}, optional
Axis or axes along which the median was computed.
out : ndarray, optional
Output array in which to place the result.
Returns
-------
median : scalar or ndarray
Median or NaN in axes which contained NaN in the input.
"""
if data.size == 0:
return result
data = np.rollaxis(data, axis, data.ndim)
n = np.isnan(data[..., -1])
# masked NaN values are ok
if np.ma.isMaskedArray(n):
n = n.filled(False)
if result.ndim == 0:
if n == True:
warnings.warn("Invalid value encountered in median",
RuntimeWarning, stacklevel=3)
if out is not None:
out[...] = data.dtype.type(np.nan)
result = out
else:
result = data.dtype.type(np.nan)
elif np.count_nonzero(n.ravel()) > 0:
warnings.warn("Invalid value encountered in median for" +
" %d results" % np.count_nonzero(n.ravel()),
RuntimeWarning, stacklevel=3)
result[n] = np.nan
return result
#-----------------------------------------------------------------------------
|
bringingheavendown/numpy
|
numpy/lib/utils.py
|
Python
|
bsd-3-clause
| 36,347
|
[
"VisIt"
] |
6f16d31c5e703b59247056c0474d5d02edd68620f3b59e43013ccd1e07541760
|
# coding: utf-8
from __future__ import unicode_literals
import binascii
import collections
import email
import getpass
import io
import itertools
import optparse
import os
import re
import shlex
import shutil
import socket
import struct
import subprocess
import sys
import xml.etree.ElementTree
try:
import urllib.request as compat_urllib_request
except ImportError: # Python 2
import urllib2 as compat_urllib_request
try:
import urllib.error as compat_urllib_error
except ImportError: # Python 2
import urllib2 as compat_urllib_error
try:
import urllib.parse as compat_urllib_parse
except ImportError: # Python 2
import urllib as compat_urllib_parse
try:
from urllib.parse import urlparse as compat_urllib_parse_urlparse
except ImportError: # Python 2
from urlparse import urlparse as compat_urllib_parse_urlparse
try:
import urllib.parse as compat_urlparse
except ImportError: # Python 2
import urlparse as compat_urlparse
try:
import urllib.response as compat_urllib_response
except ImportError: # Python 2
import urllib as compat_urllib_response
try:
import http.cookiejar as compat_cookiejar
except ImportError: # Python 2
import cookielib as compat_cookiejar
try:
import http.cookies as compat_cookies
except ImportError: # Python 2
import Cookie as compat_cookies
try:
import html.entities as compat_html_entities
except ImportError: # Python 2
import htmlentitydefs as compat_html_entities
try: # Python >= 3.3
compat_html_entities_html5 = compat_html_entities.html5
except AttributeError:
# Copied from CPython 3.5.1 html/entities.py
compat_html_entities_html5 = {
'Aacute': '\xc1',
'aacute': '\xe1',
'Aacute;': '\xc1',
'aacute;': '\xe1',
'Abreve;': '\u0102',
'abreve;': '\u0103',
'ac;': '\u223e',
'acd;': '\u223f',
'acE;': '\u223e\u0333',
'Acirc': '\xc2',
'acirc': '\xe2',
'Acirc;': '\xc2',
'acirc;': '\xe2',
'acute': '\xb4',
'acute;': '\xb4',
'Acy;': '\u0410',
'acy;': '\u0430',
'AElig': '\xc6',
'aelig': '\xe6',
'AElig;': '\xc6',
'aelig;': '\xe6',
'af;': '\u2061',
'Afr;': '\U0001d504',
'afr;': '\U0001d51e',
'Agrave': '\xc0',
'agrave': '\xe0',
'Agrave;': '\xc0',
'agrave;': '\xe0',
'alefsym;': '\u2135',
'aleph;': '\u2135',
'Alpha;': '\u0391',
'alpha;': '\u03b1',
'Amacr;': '\u0100',
'amacr;': '\u0101',
'amalg;': '\u2a3f',
'AMP': '&',
'amp': '&',
'AMP;': '&',
'amp;': '&',
'And;': '\u2a53',
'and;': '\u2227',
'andand;': '\u2a55',
'andd;': '\u2a5c',
'andslope;': '\u2a58',
'andv;': '\u2a5a',
'ang;': '\u2220',
'ange;': '\u29a4',
'angle;': '\u2220',
'angmsd;': '\u2221',
'angmsdaa;': '\u29a8',
'angmsdab;': '\u29a9',
'angmsdac;': '\u29aa',
'angmsdad;': '\u29ab',
'angmsdae;': '\u29ac',
'angmsdaf;': '\u29ad',
'angmsdag;': '\u29ae',
'angmsdah;': '\u29af',
'angrt;': '\u221f',
'angrtvb;': '\u22be',
'angrtvbd;': '\u299d',
'angsph;': '\u2222',
'angst;': '\xc5',
'angzarr;': '\u237c',
'Aogon;': '\u0104',
'aogon;': '\u0105',
'Aopf;': '\U0001d538',
'aopf;': '\U0001d552',
'ap;': '\u2248',
'apacir;': '\u2a6f',
'apE;': '\u2a70',
'ape;': '\u224a',
'apid;': '\u224b',
'apos;': "'",
'ApplyFunction;': '\u2061',
'approx;': '\u2248',
'approxeq;': '\u224a',
'Aring': '\xc5',
'aring': '\xe5',
'Aring;': '\xc5',
'aring;': '\xe5',
'Ascr;': '\U0001d49c',
'ascr;': '\U0001d4b6',
'Assign;': '\u2254',
'ast;': '*',
'asymp;': '\u2248',
'asympeq;': '\u224d',
'Atilde': '\xc3',
'atilde': '\xe3',
'Atilde;': '\xc3',
'atilde;': '\xe3',
'Auml': '\xc4',
'auml': '\xe4',
'Auml;': '\xc4',
'auml;': '\xe4',
'awconint;': '\u2233',
'awint;': '\u2a11',
'backcong;': '\u224c',
'backepsilon;': '\u03f6',
'backprime;': '\u2035',
'backsim;': '\u223d',
'backsimeq;': '\u22cd',
'Backslash;': '\u2216',
'Barv;': '\u2ae7',
'barvee;': '\u22bd',
'Barwed;': '\u2306',
'barwed;': '\u2305',
'barwedge;': '\u2305',
'bbrk;': '\u23b5',
'bbrktbrk;': '\u23b6',
'bcong;': '\u224c',
'Bcy;': '\u0411',
'bcy;': '\u0431',
'bdquo;': '\u201e',
'becaus;': '\u2235',
'Because;': '\u2235',
'because;': '\u2235',
'bemptyv;': '\u29b0',
'bepsi;': '\u03f6',
'bernou;': '\u212c',
'Bernoullis;': '\u212c',
'Beta;': '\u0392',
'beta;': '\u03b2',
'beth;': '\u2136',
'between;': '\u226c',
'Bfr;': '\U0001d505',
'bfr;': '\U0001d51f',
'bigcap;': '\u22c2',
'bigcirc;': '\u25ef',
'bigcup;': '\u22c3',
'bigodot;': '\u2a00',
'bigoplus;': '\u2a01',
'bigotimes;': '\u2a02',
'bigsqcup;': '\u2a06',
'bigstar;': '\u2605',
'bigtriangledown;': '\u25bd',
'bigtriangleup;': '\u25b3',
'biguplus;': '\u2a04',
'bigvee;': '\u22c1',
'bigwedge;': '\u22c0',
'bkarow;': '\u290d',
'blacklozenge;': '\u29eb',
'blacksquare;': '\u25aa',
'blacktriangle;': '\u25b4',
'blacktriangledown;': '\u25be',
'blacktriangleleft;': '\u25c2',
'blacktriangleright;': '\u25b8',
'blank;': '\u2423',
'blk12;': '\u2592',
'blk14;': '\u2591',
'blk34;': '\u2593',
'block;': '\u2588',
'bne;': '=\u20e5',
'bnequiv;': '\u2261\u20e5',
'bNot;': '\u2aed',
'bnot;': '\u2310',
'Bopf;': '\U0001d539',
'bopf;': '\U0001d553',
'bot;': '\u22a5',
'bottom;': '\u22a5',
'bowtie;': '\u22c8',
'boxbox;': '\u29c9',
'boxDL;': '\u2557',
'boxDl;': '\u2556',
'boxdL;': '\u2555',
'boxdl;': '\u2510',
'boxDR;': '\u2554',
'boxDr;': '\u2553',
'boxdR;': '\u2552',
'boxdr;': '\u250c',
'boxH;': '\u2550',
'boxh;': '\u2500',
'boxHD;': '\u2566',
'boxHd;': '\u2564',
'boxhD;': '\u2565',
'boxhd;': '\u252c',
'boxHU;': '\u2569',
'boxHu;': '\u2567',
'boxhU;': '\u2568',
'boxhu;': '\u2534',
'boxminus;': '\u229f',
'boxplus;': '\u229e',
'boxtimes;': '\u22a0',
'boxUL;': '\u255d',
'boxUl;': '\u255c',
'boxuL;': '\u255b',
'boxul;': '\u2518',
'boxUR;': '\u255a',
'boxUr;': '\u2559',
'boxuR;': '\u2558',
'boxur;': '\u2514',
'boxV;': '\u2551',
'boxv;': '\u2502',
'boxVH;': '\u256c',
'boxVh;': '\u256b',
'boxvH;': '\u256a',
'boxvh;': '\u253c',
'boxVL;': '\u2563',
'boxVl;': '\u2562',
'boxvL;': '\u2561',
'boxvl;': '\u2524',
'boxVR;': '\u2560',
'boxVr;': '\u255f',
'boxvR;': '\u255e',
'boxvr;': '\u251c',
'bprime;': '\u2035',
'Breve;': '\u02d8',
'breve;': '\u02d8',
'brvbar': '\xa6',
'brvbar;': '\xa6',
'Bscr;': '\u212c',
'bscr;': '\U0001d4b7',
'bsemi;': '\u204f',
'bsim;': '\u223d',
'bsime;': '\u22cd',
'bsol;': '\\',
'bsolb;': '\u29c5',
'bsolhsub;': '\u27c8',
'bull;': '\u2022',
'bullet;': '\u2022',
'bump;': '\u224e',
'bumpE;': '\u2aae',
'bumpe;': '\u224f',
'Bumpeq;': '\u224e',
'bumpeq;': '\u224f',
'Cacute;': '\u0106',
'cacute;': '\u0107',
'Cap;': '\u22d2',
'cap;': '\u2229',
'capand;': '\u2a44',
'capbrcup;': '\u2a49',
'capcap;': '\u2a4b',
'capcup;': '\u2a47',
'capdot;': '\u2a40',
'CapitalDifferentialD;': '\u2145',
'caps;': '\u2229\ufe00',
'caret;': '\u2041',
'caron;': '\u02c7',
'Cayleys;': '\u212d',
'ccaps;': '\u2a4d',
'Ccaron;': '\u010c',
'ccaron;': '\u010d',
'Ccedil': '\xc7',
'ccedil': '\xe7',
'Ccedil;': '\xc7',
'ccedil;': '\xe7',
'Ccirc;': '\u0108',
'ccirc;': '\u0109',
'Cconint;': '\u2230',
'ccups;': '\u2a4c',
'ccupssm;': '\u2a50',
'Cdot;': '\u010a',
'cdot;': '\u010b',
'cedil': '\xb8',
'cedil;': '\xb8',
'Cedilla;': '\xb8',
'cemptyv;': '\u29b2',
'cent': '\xa2',
'cent;': '\xa2',
'CenterDot;': '\xb7',
'centerdot;': '\xb7',
'Cfr;': '\u212d',
'cfr;': '\U0001d520',
'CHcy;': '\u0427',
'chcy;': '\u0447',
'check;': '\u2713',
'checkmark;': '\u2713',
'Chi;': '\u03a7',
'chi;': '\u03c7',
'cir;': '\u25cb',
'circ;': '\u02c6',
'circeq;': '\u2257',
'circlearrowleft;': '\u21ba',
'circlearrowright;': '\u21bb',
'circledast;': '\u229b',
'circledcirc;': '\u229a',
'circleddash;': '\u229d',
'CircleDot;': '\u2299',
'circledR;': '\xae',
'circledS;': '\u24c8',
'CircleMinus;': '\u2296',
'CirclePlus;': '\u2295',
'CircleTimes;': '\u2297',
'cirE;': '\u29c3',
'cire;': '\u2257',
'cirfnint;': '\u2a10',
'cirmid;': '\u2aef',
'cirscir;': '\u29c2',
'ClockwiseContourIntegral;': '\u2232',
'CloseCurlyDoubleQuote;': '\u201d',
'CloseCurlyQuote;': '\u2019',
'clubs;': '\u2663',
'clubsuit;': '\u2663',
'Colon;': '\u2237',
'colon;': ':',
'Colone;': '\u2a74',
'colone;': '\u2254',
'coloneq;': '\u2254',
'comma;': ',',
'commat;': '@',
'comp;': '\u2201',
'compfn;': '\u2218',
'complement;': '\u2201',
'complexes;': '\u2102',
'cong;': '\u2245',
'congdot;': '\u2a6d',
'Congruent;': '\u2261',
'Conint;': '\u222f',
'conint;': '\u222e',
'ContourIntegral;': '\u222e',
'Copf;': '\u2102',
'copf;': '\U0001d554',
'coprod;': '\u2210',
'Coproduct;': '\u2210',
'COPY': '\xa9',
'copy': '\xa9',
'COPY;': '\xa9',
'copy;': '\xa9',
'copysr;': '\u2117',
'CounterClockwiseContourIntegral;': '\u2233',
'crarr;': '\u21b5',
'Cross;': '\u2a2f',
'cross;': '\u2717',
'Cscr;': '\U0001d49e',
'cscr;': '\U0001d4b8',
'csub;': '\u2acf',
'csube;': '\u2ad1',
'csup;': '\u2ad0',
'csupe;': '\u2ad2',
'ctdot;': '\u22ef',
'cudarrl;': '\u2938',
'cudarrr;': '\u2935',
'cuepr;': '\u22de',
'cuesc;': '\u22df',
'cularr;': '\u21b6',
'cularrp;': '\u293d',
'Cup;': '\u22d3',
'cup;': '\u222a',
'cupbrcap;': '\u2a48',
'CupCap;': '\u224d',
'cupcap;': '\u2a46',
'cupcup;': '\u2a4a',
'cupdot;': '\u228d',
'cupor;': '\u2a45',
'cups;': '\u222a\ufe00',
'curarr;': '\u21b7',
'curarrm;': '\u293c',
'curlyeqprec;': '\u22de',
'curlyeqsucc;': '\u22df',
'curlyvee;': '\u22ce',
'curlywedge;': '\u22cf',
'curren': '\xa4',
'curren;': '\xa4',
'curvearrowleft;': '\u21b6',
'curvearrowright;': '\u21b7',
'cuvee;': '\u22ce',
'cuwed;': '\u22cf',
'cwconint;': '\u2232',
'cwint;': '\u2231',
'cylcty;': '\u232d',
'Dagger;': '\u2021',
'dagger;': '\u2020',
'daleth;': '\u2138',
'Darr;': '\u21a1',
'dArr;': '\u21d3',
'darr;': '\u2193',
'dash;': '\u2010',
'Dashv;': '\u2ae4',
'dashv;': '\u22a3',
'dbkarow;': '\u290f',
'dblac;': '\u02dd',
'Dcaron;': '\u010e',
'dcaron;': '\u010f',
'Dcy;': '\u0414',
'dcy;': '\u0434',
'DD;': '\u2145',
'dd;': '\u2146',
'ddagger;': '\u2021',
'ddarr;': '\u21ca',
'DDotrahd;': '\u2911',
'ddotseq;': '\u2a77',
'deg': '\xb0',
'deg;': '\xb0',
'Del;': '\u2207',
'Delta;': '\u0394',
'delta;': '\u03b4',
'demptyv;': '\u29b1',
'dfisht;': '\u297f',
'Dfr;': '\U0001d507',
'dfr;': '\U0001d521',
'dHar;': '\u2965',
'dharl;': '\u21c3',
'dharr;': '\u21c2',
'DiacriticalAcute;': '\xb4',
'DiacriticalDot;': '\u02d9',
'DiacriticalDoubleAcute;': '\u02dd',
'DiacriticalGrave;': '`',
'DiacriticalTilde;': '\u02dc',
'diam;': '\u22c4',
'Diamond;': '\u22c4',
'diamond;': '\u22c4',
'diamondsuit;': '\u2666',
'diams;': '\u2666',
'die;': '\xa8',
'DifferentialD;': '\u2146',
'digamma;': '\u03dd',
'disin;': '\u22f2',
'div;': '\xf7',
'divide': '\xf7',
'divide;': '\xf7',
'divideontimes;': '\u22c7',
'divonx;': '\u22c7',
'DJcy;': '\u0402',
'djcy;': '\u0452',
'dlcorn;': '\u231e',
'dlcrop;': '\u230d',
'dollar;': '$',
'Dopf;': '\U0001d53b',
'dopf;': '\U0001d555',
'Dot;': '\xa8',
'dot;': '\u02d9',
'DotDot;': '\u20dc',
'doteq;': '\u2250',
'doteqdot;': '\u2251',
'DotEqual;': '\u2250',
'dotminus;': '\u2238',
'dotplus;': '\u2214',
'dotsquare;': '\u22a1',
'doublebarwedge;': '\u2306',
'DoubleContourIntegral;': '\u222f',
'DoubleDot;': '\xa8',
'DoubleDownArrow;': '\u21d3',
'DoubleLeftArrow;': '\u21d0',
'DoubleLeftRightArrow;': '\u21d4',
'DoubleLeftTee;': '\u2ae4',
'DoubleLongLeftArrow;': '\u27f8',
'DoubleLongLeftRightArrow;': '\u27fa',
'DoubleLongRightArrow;': '\u27f9',
'DoubleRightArrow;': '\u21d2',
'DoubleRightTee;': '\u22a8',
'DoubleUpArrow;': '\u21d1',
'DoubleUpDownArrow;': '\u21d5',
'DoubleVerticalBar;': '\u2225',
'DownArrow;': '\u2193',
'Downarrow;': '\u21d3',
'downarrow;': '\u2193',
'DownArrowBar;': '\u2913',
'DownArrowUpArrow;': '\u21f5',
'DownBreve;': '\u0311',
'downdownarrows;': '\u21ca',
'downharpoonleft;': '\u21c3',
'downharpoonright;': '\u21c2',
'DownLeftRightVector;': '\u2950',
'DownLeftTeeVector;': '\u295e',
'DownLeftVector;': '\u21bd',
'DownLeftVectorBar;': '\u2956',
'DownRightTeeVector;': '\u295f',
'DownRightVector;': '\u21c1',
'DownRightVectorBar;': '\u2957',
'DownTee;': '\u22a4',
'DownTeeArrow;': '\u21a7',
'drbkarow;': '\u2910',
'drcorn;': '\u231f',
'drcrop;': '\u230c',
'Dscr;': '\U0001d49f',
'dscr;': '\U0001d4b9',
'DScy;': '\u0405',
'dscy;': '\u0455',
'dsol;': '\u29f6',
'Dstrok;': '\u0110',
'dstrok;': '\u0111',
'dtdot;': '\u22f1',
'dtri;': '\u25bf',
'dtrif;': '\u25be',
'duarr;': '\u21f5',
'duhar;': '\u296f',
'dwangle;': '\u29a6',
'DZcy;': '\u040f',
'dzcy;': '\u045f',
'dzigrarr;': '\u27ff',
'Eacute': '\xc9',
'eacute': '\xe9',
'Eacute;': '\xc9',
'eacute;': '\xe9',
'easter;': '\u2a6e',
'Ecaron;': '\u011a',
'ecaron;': '\u011b',
'ecir;': '\u2256',
'Ecirc': '\xca',
'ecirc': '\xea',
'Ecirc;': '\xca',
'ecirc;': '\xea',
'ecolon;': '\u2255',
'Ecy;': '\u042d',
'ecy;': '\u044d',
'eDDot;': '\u2a77',
'Edot;': '\u0116',
'eDot;': '\u2251',
'edot;': '\u0117',
'ee;': '\u2147',
'efDot;': '\u2252',
'Efr;': '\U0001d508',
'efr;': '\U0001d522',
'eg;': '\u2a9a',
'Egrave': '\xc8',
'egrave': '\xe8',
'Egrave;': '\xc8',
'egrave;': '\xe8',
'egs;': '\u2a96',
'egsdot;': '\u2a98',
'el;': '\u2a99',
'Element;': '\u2208',
'elinters;': '\u23e7',
'ell;': '\u2113',
'els;': '\u2a95',
'elsdot;': '\u2a97',
'Emacr;': '\u0112',
'emacr;': '\u0113',
'empty;': '\u2205',
'emptyset;': '\u2205',
'EmptySmallSquare;': '\u25fb',
'emptyv;': '\u2205',
'EmptyVerySmallSquare;': '\u25ab',
'emsp13;': '\u2004',
'emsp14;': '\u2005',
'emsp;': '\u2003',
'ENG;': '\u014a',
'eng;': '\u014b',
'ensp;': '\u2002',
'Eogon;': '\u0118',
'eogon;': '\u0119',
'Eopf;': '\U0001d53c',
'eopf;': '\U0001d556',
'epar;': '\u22d5',
'eparsl;': '\u29e3',
'eplus;': '\u2a71',
'epsi;': '\u03b5',
'Epsilon;': '\u0395',
'epsilon;': '\u03b5',
'epsiv;': '\u03f5',
'eqcirc;': '\u2256',
'eqcolon;': '\u2255',
'eqsim;': '\u2242',
'eqslantgtr;': '\u2a96',
'eqslantless;': '\u2a95',
'Equal;': '\u2a75',
'equals;': '=',
'EqualTilde;': '\u2242',
'equest;': '\u225f',
'Equilibrium;': '\u21cc',
'equiv;': '\u2261',
'equivDD;': '\u2a78',
'eqvparsl;': '\u29e5',
'erarr;': '\u2971',
'erDot;': '\u2253',
'Escr;': '\u2130',
'escr;': '\u212f',
'esdot;': '\u2250',
'Esim;': '\u2a73',
'esim;': '\u2242',
'Eta;': '\u0397',
'eta;': '\u03b7',
'ETH': '\xd0',
'eth': '\xf0',
'ETH;': '\xd0',
'eth;': '\xf0',
'Euml': '\xcb',
'euml': '\xeb',
'Euml;': '\xcb',
'euml;': '\xeb',
'euro;': '\u20ac',
'excl;': '!',
'exist;': '\u2203',
'Exists;': '\u2203',
'expectation;': '\u2130',
'ExponentialE;': '\u2147',
'exponentiale;': '\u2147',
'fallingdotseq;': '\u2252',
'Fcy;': '\u0424',
'fcy;': '\u0444',
'female;': '\u2640',
'ffilig;': '\ufb03',
'fflig;': '\ufb00',
'ffllig;': '\ufb04',
'Ffr;': '\U0001d509',
'ffr;': '\U0001d523',
'filig;': '\ufb01',
'FilledSmallSquare;': '\u25fc',
'FilledVerySmallSquare;': '\u25aa',
'fjlig;': 'fj',
'flat;': '\u266d',
'fllig;': '\ufb02',
'fltns;': '\u25b1',
'fnof;': '\u0192',
'Fopf;': '\U0001d53d',
'fopf;': '\U0001d557',
'ForAll;': '\u2200',
'forall;': '\u2200',
'fork;': '\u22d4',
'forkv;': '\u2ad9',
'Fouriertrf;': '\u2131',
'fpartint;': '\u2a0d',
'frac12': '\xbd',
'frac12;': '\xbd',
'frac13;': '\u2153',
'frac14': '\xbc',
'frac14;': '\xbc',
'frac15;': '\u2155',
'frac16;': '\u2159',
'frac18;': '\u215b',
'frac23;': '\u2154',
'frac25;': '\u2156',
'frac34': '\xbe',
'frac34;': '\xbe',
'frac35;': '\u2157',
'frac38;': '\u215c',
'frac45;': '\u2158',
'frac56;': '\u215a',
'frac58;': '\u215d',
'frac78;': '\u215e',
'frasl;': '\u2044',
'frown;': '\u2322',
'Fscr;': '\u2131',
'fscr;': '\U0001d4bb',
'gacute;': '\u01f5',
'Gamma;': '\u0393',
'gamma;': '\u03b3',
'Gammad;': '\u03dc',
'gammad;': '\u03dd',
'gap;': '\u2a86',
'Gbreve;': '\u011e',
'gbreve;': '\u011f',
'Gcedil;': '\u0122',
'Gcirc;': '\u011c',
'gcirc;': '\u011d',
'Gcy;': '\u0413',
'gcy;': '\u0433',
'Gdot;': '\u0120',
'gdot;': '\u0121',
'gE;': '\u2267',
'ge;': '\u2265',
'gEl;': '\u2a8c',
'gel;': '\u22db',
'geq;': '\u2265',
'geqq;': '\u2267',
'geqslant;': '\u2a7e',
'ges;': '\u2a7e',
'gescc;': '\u2aa9',
'gesdot;': '\u2a80',
'gesdoto;': '\u2a82',
'gesdotol;': '\u2a84',
'gesl;': '\u22db\ufe00',
'gesles;': '\u2a94',
'Gfr;': '\U0001d50a',
'gfr;': '\U0001d524',
'Gg;': '\u22d9',
'gg;': '\u226b',
'ggg;': '\u22d9',
'gimel;': '\u2137',
'GJcy;': '\u0403',
'gjcy;': '\u0453',
'gl;': '\u2277',
'gla;': '\u2aa5',
'glE;': '\u2a92',
'glj;': '\u2aa4',
'gnap;': '\u2a8a',
'gnapprox;': '\u2a8a',
'gnE;': '\u2269',
'gne;': '\u2a88',
'gneq;': '\u2a88',
'gneqq;': '\u2269',
'gnsim;': '\u22e7',
'Gopf;': '\U0001d53e',
'gopf;': '\U0001d558',
'grave;': '`',
'GreaterEqual;': '\u2265',
'GreaterEqualLess;': '\u22db',
'GreaterFullEqual;': '\u2267',
'GreaterGreater;': '\u2aa2',
'GreaterLess;': '\u2277',
'GreaterSlantEqual;': '\u2a7e',
'GreaterTilde;': '\u2273',
'Gscr;': '\U0001d4a2',
'gscr;': '\u210a',
'gsim;': '\u2273',
'gsime;': '\u2a8e',
'gsiml;': '\u2a90',
'GT': '>',
'gt': '>',
'GT;': '>',
'Gt;': '\u226b',
'gt;': '>',
'gtcc;': '\u2aa7',
'gtcir;': '\u2a7a',
'gtdot;': '\u22d7',
'gtlPar;': '\u2995',
'gtquest;': '\u2a7c',
'gtrapprox;': '\u2a86',
'gtrarr;': '\u2978',
'gtrdot;': '\u22d7',
'gtreqless;': '\u22db',
'gtreqqless;': '\u2a8c',
'gtrless;': '\u2277',
'gtrsim;': '\u2273',
'gvertneqq;': '\u2269\ufe00',
'gvnE;': '\u2269\ufe00',
'Hacek;': '\u02c7',
'hairsp;': '\u200a',
'half;': '\xbd',
'hamilt;': '\u210b',
'HARDcy;': '\u042a',
'hardcy;': '\u044a',
'hArr;': '\u21d4',
'harr;': '\u2194',
'harrcir;': '\u2948',
'harrw;': '\u21ad',
'Hat;': '^',
'hbar;': '\u210f',
'Hcirc;': '\u0124',
'hcirc;': '\u0125',
'hearts;': '\u2665',
'heartsuit;': '\u2665',
'hellip;': '\u2026',
'hercon;': '\u22b9',
'Hfr;': '\u210c',
'hfr;': '\U0001d525',
'HilbertSpace;': '\u210b',
'hksearow;': '\u2925',
'hkswarow;': '\u2926',
'hoarr;': '\u21ff',
'homtht;': '\u223b',
'hookleftarrow;': '\u21a9',
'hookrightarrow;': '\u21aa',
'Hopf;': '\u210d',
'hopf;': '\U0001d559',
'horbar;': '\u2015',
'HorizontalLine;': '\u2500',
'Hscr;': '\u210b',
'hscr;': '\U0001d4bd',
'hslash;': '\u210f',
'Hstrok;': '\u0126',
'hstrok;': '\u0127',
'HumpDownHump;': '\u224e',
'HumpEqual;': '\u224f',
'hybull;': '\u2043',
'hyphen;': '\u2010',
'Iacute': '\xcd',
'iacute': '\xed',
'Iacute;': '\xcd',
'iacute;': '\xed',
'ic;': '\u2063',
'Icirc': '\xce',
'icirc': '\xee',
'Icirc;': '\xce',
'icirc;': '\xee',
'Icy;': '\u0418',
'icy;': '\u0438',
'Idot;': '\u0130',
'IEcy;': '\u0415',
'iecy;': '\u0435',
'iexcl': '\xa1',
'iexcl;': '\xa1',
'iff;': '\u21d4',
'Ifr;': '\u2111',
'ifr;': '\U0001d526',
'Igrave': '\xcc',
'igrave': '\xec',
'Igrave;': '\xcc',
'igrave;': '\xec',
'ii;': '\u2148',
'iiiint;': '\u2a0c',
'iiint;': '\u222d',
'iinfin;': '\u29dc',
'iiota;': '\u2129',
'IJlig;': '\u0132',
'ijlig;': '\u0133',
'Im;': '\u2111',
'Imacr;': '\u012a',
'imacr;': '\u012b',
'image;': '\u2111',
'ImaginaryI;': '\u2148',
'imagline;': '\u2110',
'imagpart;': '\u2111',
'imath;': '\u0131',
'imof;': '\u22b7',
'imped;': '\u01b5',
'Implies;': '\u21d2',
'in;': '\u2208',
'incare;': '\u2105',
'infin;': '\u221e',
'infintie;': '\u29dd',
'inodot;': '\u0131',
'Int;': '\u222c',
'int;': '\u222b',
'intcal;': '\u22ba',
'integers;': '\u2124',
'Integral;': '\u222b',
'intercal;': '\u22ba',
'Intersection;': '\u22c2',
'intlarhk;': '\u2a17',
'intprod;': '\u2a3c',
'InvisibleComma;': '\u2063',
'InvisibleTimes;': '\u2062',
'IOcy;': '\u0401',
'iocy;': '\u0451',
'Iogon;': '\u012e',
'iogon;': '\u012f',
'Iopf;': '\U0001d540',
'iopf;': '\U0001d55a',
'Iota;': '\u0399',
'iota;': '\u03b9',
'iprod;': '\u2a3c',
'iquest': '\xbf',
'iquest;': '\xbf',
'Iscr;': '\u2110',
'iscr;': '\U0001d4be',
'isin;': '\u2208',
'isindot;': '\u22f5',
'isinE;': '\u22f9',
'isins;': '\u22f4',
'isinsv;': '\u22f3',
'isinv;': '\u2208',
'it;': '\u2062',
'Itilde;': '\u0128',
'itilde;': '\u0129',
'Iukcy;': '\u0406',
'iukcy;': '\u0456',
'Iuml': '\xcf',
'iuml': '\xef',
'Iuml;': '\xcf',
'iuml;': '\xef',
'Jcirc;': '\u0134',
'jcirc;': '\u0135',
'Jcy;': '\u0419',
'jcy;': '\u0439',
'Jfr;': '\U0001d50d',
'jfr;': '\U0001d527',
'jmath;': '\u0237',
'Jopf;': '\U0001d541',
'jopf;': '\U0001d55b',
'Jscr;': '\U0001d4a5',
'jscr;': '\U0001d4bf',
'Jsercy;': '\u0408',
'jsercy;': '\u0458',
'Jukcy;': '\u0404',
'jukcy;': '\u0454',
'Kappa;': '\u039a',
'kappa;': '\u03ba',
'kappav;': '\u03f0',
'Kcedil;': '\u0136',
'kcedil;': '\u0137',
'Kcy;': '\u041a',
'kcy;': '\u043a',
'Kfr;': '\U0001d50e',
'kfr;': '\U0001d528',
'kgreen;': '\u0138',
'KHcy;': '\u0425',
'khcy;': '\u0445',
'KJcy;': '\u040c',
'kjcy;': '\u045c',
'Kopf;': '\U0001d542',
'kopf;': '\U0001d55c',
'Kscr;': '\U0001d4a6',
'kscr;': '\U0001d4c0',
'lAarr;': '\u21da',
'Lacute;': '\u0139',
'lacute;': '\u013a',
'laemptyv;': '\u29b4',
'lagran;': '\u2112',
'Lambda;': '\u039b',
'lambda;': '\u03bb',
'Lang;': '\u27ea',
'lang;': '\u27e8',
'langd;': '\u2991',
'langle;': '\u27e8',
'lap;': '\u2a85',
'Laplacetrf;': '\u2112',
'laquo': '\xab',
'laquo;': '\xab',
'Larr;': '\u219e',
'lArr;': '\u21d0',
'larr;': '\u2190',
'larrb;': '\u21e4',
'larrbfs;': '\u291f',
'larrfs;': '\u291d',
'larrhk;': '\u21a9',
'larrlp;': '\u21ab',
'larrpl;': '\u2939',
'larrsim;': '\u2973',
'larrtl;': '\u21a2',
'lat;': '\u2aab',
'lAtail;': '\u291b',
'latail;': '\u2919',
'late;': '\u2aad',
'lates;': '\u2aad\ufe00',
'lBarr;': '\u290e',
'lbarr;': '\u290c',
'lbbrk;': '\u2772',
'lbrace;': '{',
'lbrack;': '[',
'lbrke;': '\u298b',
'lbrksld;': '\u298f',
'lbrkslu;': '\u298d',
'Lcaron;': '\u013d',
'lcaron;': '\u013e',
'Lcedil;': '\u013b',
'lcedil;': '\u013c',
'lceil;': '\u2308',
'lcub;': '{',
'Lcy;': '\u041b',
'lcy;': '\u043b',
'ldca;': '\u2936',
'ldquo;': '\u201c',
'ldquor;': '\u201e',
'ldrdhar;': '\u2967',
'ldrushar;': '\u294b',
'ldsh;': '\u21b2',
'lE;': '\u2266',
'le;': '\u2264',
'LeftAngleBracket;': '\u27e8',
'LeftArrow;': '\u2190',
'Leftarrow;': '\u21d0',
'leftarrow;': '\u2190',
'LeftArrowBar;': '\u21e4',
'LeftArrowRightArrow;': '\u21c6',
'leftarrowtail;': '\u21a2',
'LeftCeiling;': '\u2308',
'LeftDoubleBracket;': '\u27e6',
'LeftDownTeeVector;': '\u2961',
'LeftDownVector;': '\u21c3',
'LeftDownVectorBar;': '\u2959',
'LeftFloor;': '\u230a',
'leftharpoondown;': '\u21bd',
'leftharpoonup;': '\u21bc',
'leftleftarrows;': '\u21c7',
'LeftRightArrow;': '\u2194',
'Leftrightarrow;': '\u21d4',
'leftrightarrow;': '\u2194',
'leftrightarrows;': '\u21c6',
'leftrightharpoons;': '\u21cb',
'leftrightsquigarrow;': '\u21ad',
'LeftRightVector;': '\u294e',
'LeftTee;': '\u22a3',
'LeftTeeArrow;': '\u21a4',
'LeftTeeVector;': '\u295a',
'leftthreetimes;': '\u22cb',
'LeftTriangle;': '\u22b2',
'LeftTriangleBar;': '\u29cf',
'LeftTriangleEqual;': '\u22b4',
'LeftUpDownVector;': '\u2951',
'LeftUpTeeVector;': '\u2960',
'LeftUpVector;': '\u21bf',
'LeftUpVectorBar;': '\u2958',
'LeftVector;': '\u21bc',
'LeftVectorBar;': '\u2952',
'lEg;': '\u2a8b',
'leg;': '\u22da',
'leq;': '\u2264',
'leqq;': '\u2266',
'leqslant;': '\u2a7d',
'les;': '\u2a7d',
'lescc;': '\u2aa8',
'lesdot;': '\u2a7f',
'lesdoto;': '\u2a81',
'lesdotor;': '\u2a83',
'lesg;': '\u22da\ufe00',
'lesges;': '\u2a93',
'lessapprox;': '\u2a85',
'lessdot;': '\u22d6',
'lesseqgtr;': '\u22da',
'lesseqqgtr;': '\u2a8b',
'LessEqualGreater;': '\u22da',
'LessFullEqual;': '\u2266',
'LessGreater;': '\u2276',
'lessgtr;': '\u2276',
'LessLess;': '\u2aa1',
'lesssim;': '\u2272',
'LessSlantEqual;': '\u2a7d',
'LessTilde;': '\u2272',
'lfisht;': '\u297c',
'lfloor;': '\u230a',
'Lfr;': '\U0001d50f',
'lfr;': '\U0001d529',
'lg;': '\u2276',
'lgE;': '\u2a91',
'lHar;': '\u2962',
'lhard;': '\u21bd',
'lharu;': '\u21bc',
'lharul;': '\u296a',
'lhblk;': '\u2584',
'LJcy;': '\u0409',
'ljcy;': '\u0459',
'Ll;': '\u22d8',
'll;': '\u226a',
'llarr;': '\u21c7',
'llcorner;': '\u231e',
'Lleftarrow;': '\u21da',
'llhard;': '\u296b',
'lltri;': '\u25fa',
'Lmidot;': '\u013f',
'lmidot;': '\u0140',
'lmoust;': '\u23b0',
'lmoustache;': '\u23b0',
'lnap;': '\u2a89',
'lnapprox;': '\u2a89',
'lnE;': '\u2268',
'lne;': '\u2a87',
'lneq;': '\u2a87',
'lneqq;': '\u2268',
'lnsim;': '\u22e6',
'loang;': '\u27ec',
'loarr;': '\u21fd',
'lobrk;': '\u27e6',
'LongLeftArrow;': '\u27f5',
'Longleftarrow;': '\u27f8',
'longleftarrow;': '\u27f5',
'LongLeftRightArrow;': '\u27f7',
'Longleftrightarrow;': '\u27fa',
'longleftrightarrow;': '\u27f7',
'longmapsto;': '\u27fc',
'LongRightArrow;': '\u27f6',
'Longrightarrow;': '\u27f9',
'longrightarrow;': '\u27f6',
'looparrowleft;': '\u21ab',
'looparrowright;': '\u21ac',
'lopar;': '\u2985',
'Lopf;': '\U0001d543',
'lopf;': '\U0001d55d',
'loplus;': '\u2a2d',
'lotimes;': '\u2a34',
'lowast;': '\u2217',
'lowbar;': '_',
'LowerLeftArrow;': '\u2199',
'LowerRightArrow;': '\u2198',
'loz;': '\u25ca',
'lozenge;': '\u25ca',
'lozf;': '\u29eb',
'lpar;': '(',
'lparlt;': '\u2993',
'lrarr;': '\u21c6',
'lrcorner;': '\u231f',
'lrhar;': '\u21cb',
'lrhard;': '\u296d',
'lrm;': '\u200e',
'lrtri;': '\u22bf',
'lsaquo;': '\u2039',
'Lscr;': '\u2112',
'lscr;': '\U0001d4c1',
'Lsh;': '\u21b0',
'lsh;': '\u21b0',
'lsim;': '\u2272',
'lsime;': '\u2a8d',
'lsimg;': '\u2a8f',
'lsqb;': '[',
'lsquo;': '\u2018',
'lsquor;': '\u201a',
'Lstrok;': '\u0141',
'lstrok;': '\u0142',
'LT': '<',
'lt': '<',
'LT;': '<',
'Lt;': '\u226a',
'lt;': '<',
'ltcc;': '\u2aa6',
'ltcir;': '\u2a79',
'ltdot;': '\u22d6',
'lthree;': '\u22cb',
'ltimes;': '\u22c9',
'ltlarr;': '\u2976',
'ltquest;': '\u2a7b',
'ltri;': '\u25c3',
'ltrie;': '\u22b4',
'ltrif;': '\u25c2',
'ltrPar;': '\u2996',
'lurdshar;': '\u294a',
'luruhar;': '\u2966',
'lvertneqq;': '\u2268\ufe00',
'lvnE;': '\u2268\ufe00',
'macr': '\xaf',
'macr;': '\xaf',
'male;': '\u2642',
'malt;': '\u2720',
'maltese;': '\u2720',
'Map;': '\u2905',
'map;': '\u21a6',
'mapsto;': '\u21a6',
'mapstodown;': '\u21a7',
'mapstoleft;': '\u21a4',
'mapstoup;': '\u21a5',
'marker;': '\u25ae',
'mcomma;': '\u2a29',
'Mcy;': '\u041c',
'mcy;': '\u043c',
'mdash;': '\u2014',
'mDDot;': '\u223a',
'measuredangle;': '\u2221',
'MediumSpace;': '\u205f',
'Mellintrf;': '\u2133',
'Mfr;': '\U0001d510',
'mfr;': '\U0001d52a',
'mho;': '\u2127',
'micro': '\xb5',
'micro;': '\xb5',
'mid;': '\u2223',
'midast;': '*',
'midcir;': '\u2af0',
'middot': '\xb7',
'middot;': '\xb7',
'minus;': '\u2212',
'minusb;': '\u229f',
'minusd;': '\u2238',
'minusdu;': '\u2a2a',
'MinusPlus;': '\u2213',
'mlcp;': '\u2adb',
'mldr;': '\u2026',
'mnplus;': '\u2213',
'models;': '\u22a7',
'Mopf;': '\U0001d544',
'mopf;': '\U0001d55e',
'mp;': '\u2213',
'Mscr;': '\u2133',
'mscr;': '\U0001d4c2',
'mstpos;': '\u223e',
'Mu;': '\u039c',
'mu;': '\u03bc',
'multimap;': '\u22b8',
'mumap;': '\u22b8',
'nabla;': '\u2207',
'Nacute;': '\u0143',
'nacute;': '\u0144',
'nang;': '\u2220\u20d2',
'nap;': '\u2249',
'napE;': '\u2a70\u0338',
'napid;': '\u224b\u0338',
'napos;': '\u0149',
'napprox;': '\u2249',
'natur;': '\u266e',
'natural;': '\u266e',
'naturals;': '\u2115',
'nbsp': '\xa0',
'nbsp;': '\xa0',
'nbump;': '\u224e\u0338',
'nbumpe;': '\u224f\u0338',
'ncap;': '\u2a43',
'Ncaron;': '\u0147',
'ncaron;': '\u0148',
'Ncedil;': '\u0145',
'ncedil;': '\u0146',
'ncong;': '\u2247',
'ncongdot;': '\u2a6d\u0338',
'ncup;': '\u2a42',
'Ncy;': '\u041d',
'ncy;': '\u043d',
'ndash;': '\u2013',
'ne;': '\u2260',
'nearhk;': '\u2924',
'neArr;': '\u21d7',
'nearr;': '\u2197',
'nearrow;': '\u2197',
'nedot;': '\u2250\u0338',
'NegativeMediumSpace;': '\u200b',
'NegativeThickSpace;': '\u200b',
'NegativeThinSpace;': '\u200b',
'NegativeVeryThinSpace;': '\u200b',
'nequiv;': '\u2262',
'nesear;': '\u2928',
'nesim;': '\u2242\u0338',
'NestedGreaterGreater;': '\u226b',
'NestedLessLess;': '\u226a',
'NewLine;': '\n',
'nexist;': '\u2204',
'nexists;': '\u2204',
'Nfr;': '\U0001d511',
'nfr;': '\U0001d52b',
'ngE;': '\u2267\u0338',
'nge;': '\u2271',
'ngeq;': '\u2271',
'ngeqq;': '\u2267\u0338',
'ngeqslant;': '\u2a7e\u0338',
'nges;': '\u2a7e\u0338',
'nGg;': '\u22d9\u0338',
'ngsim;': '\u2275',
'nGt;': '\u226b\u20d2',
'ngt;': '\u226f',
'ngtr;': '\u226f',
'nGtv;': '\u226b\u0338',
'nhArr;': '\u21ce',
'nharr;': '\u21ae',
'nhpar;': '\u2af2',
'ni;': '\u220b',
'nis;': '\u22fc',
'nisd;': '\u22fa',
'niv;': '\u220b',
'NJcy;': '\u040a',
'njcy;': '\u045a',
'nlArr;': '\u21cd',
'nlarr;': '\u219a',
'nldr;': '\u2025',
'nlE;': '\u2266\u0338',
'nle;': '\u2270',
'nLeftarrow;': '\u21cd',
'nleftarrow;': '\u219a',
'nLeftrightarrow;': '\u21ce',
'nleftrightarrow;': '\u21ae',
'nleq;': '\u2270',
'nleqq;': '\u2266\u0338',
'nleqslant;': '\u2a7d\u0338',
'nles;': '\u2a7d\u0338',
'nless;': '\u226e',
'nLl;': '\u22d8\u0338',
'nlsim;': '\u2274',
'nLt;': '\u226a\u20d2',
'nlt;': '\u226e',
'nltri;': '\u22ea',
'nltrie;': '\u22ec',
'nLtv;': '\u226a\u0338',
'nmid;': '\u2224',
'NoBreak;': '\u2060',
'NonBreakingSpace;': '\xa0',
'Nopf;': '\u2115',
'nopf;': '\U0001d55f',
'not': '\xac',
'Not;': '\u2aec',
'not;': '\xac',
'NotCongruent;': '\u2262',
'NotCupCap;': '\u226d',
'NotDoubleVerticalBar;': '\u2226',
'NotElement;': '\u2209',
'NotEqual;': '\u2260',
'NotEqualTilde;': '\u2242\u0338',
'NotExists;': '\u2204',
'NotGreater;': '\u226f',
'NotGreaterEqual;': '\u2271',
'NotGreaterFullEqual;': '\u2267\u0338',
'NotGreaterGreater;': '\u226b\u0338',
'NotGreaterLess;': '\u2279',
'NotGreaterSlantEqual;': '\u2a7e\u0338',
'NotGreaterTilde;': '\u2275',
'NotHumpDownHump;': '\u224e\u0338',
'NotHumpEqual;': '\u224f\u0338',
'notin;': '\u2209',
'notindot;': '\u22f5\u0338',
'notinE;': '\u22f9\u0338',
'notinva;': '\u2209',
'notinvb;': '\u22f7',
'notinvc;': '\u22f6',
'NotLeftTriangle;': '\u22ea',
'NotLeftTriangleBar;': '\u29cf\u0338',
'NotLeftTriangleEqual;': '\u22ec',
'NotLess;': '\u226e',
'NotLessEqual;': '\u2270',
'NotLessGreater;': '\u2278',
'NotLessLess;': '\u226a\u0338',
'NotLessSlantEqual;': '\u2a7d\u0338',
'NotLessTilde;': '\u2274',
'NotNestedGreaterGreater;': '\u2aa2\u0338',
'NotNestedLessLess;': '\u2aa1\u0338',
'notni;': '\u220c',
'notniva;': '\u220c',
'notnivb;': '\u22fe',
'notnivc;': '\u22fd',
'NotPrecedes;': '\u2280',
'NotPrecedesEqual;': '\u2aaf\u0338',
'NotPrecedesSlantEqual;': '\u22e0',
'NotReverseElement;': '\u220c',
'NotRightTriangle;': '\u22eb',
'NotRightTriangleBar;': '\u29d0\u0338',
'NotRightTriangleEqual;': '\u22ed',
'NotSquareSubset;': '\u228f\u0338',
'NotSquareSubsetEqual;': '\u22e2',
'NotSquareSuperset;': '\u2290\u0338',
'NotSquareSupersetEqual;': '\u22e3',
'NotSubset;': '\u2282\u20d2',
'NotSubsetEqual;': '\u2288',
'NotSucceeds;': '\u2281',
'NotSucceedsEqual;': '\u2ab0\u0338',
'NotSucceedsSlantEqual;': '\u22e1',
'NotSucceedsTilde;': '\u227f\u0338',
'NotSuperset;': '\u2283\u20d2',
'NotSupersetEqual;': '\u2289',
'NotTilde;': '\u2241',
'NotTildeEqual;': '\u2244',
'NotTildeFullEqual;': '\u2247',
'NotTildeTilde;': '\u2249',
'NotVerticalBar;': '\u2224',
'npar;': '\u2226',
'nparallel;': '\u2226',
'nparsl;': '\u2afd\u20e5',
'npart;': '\u2202\u0338',
'npolint;': '\u2a14',
'npr;': '\u2280',
'nprcue;': '\u22e0',
'npre;': '\u2aaf\u0338',
'nprec;': '\u2280',
'npreceq;': '\u2aaf\u0338',
'nrArr;': '\u21cf',
'nrarr;': '\u219b',
'nrarrc;': '\u2933\u0338',
'nrarrw;': '\u219d\u0338',
'nRightarrow;': '\u21cf',
'nrightarrow;': '\u219b',
'nrtri;': '\u22eb',
'nrtrie;': '\u22ed',
'nsc;': '\u2281',
'nsccue;': '\u22e1',
'nsce;': '\u2ab0\u0338',
'Nscr;': '\U0001d4a9',
'nscr;': '\U0001d4c3',
'nshortmid;': '\u2224',
'nshortparallel;': '\u2226',
'nsim;': '\u2241',
'nsime;': '\u2244',
'nsimeq;': '\u2244',
'nsmid;': '\u2224',
'nspar;': '\u2226',
'nsqsube;': '\u22e2',
'nsqsupe;': '\u22e3',
'nsub;': '\u2284',
'nsubE;': '\u2ac5\u0338',
'nsube;': '\u2288',
'nsubset;': '\u2282\u20d2',
'nsubseteq;': '\u2288',
'nsubseteqq;': '\u2ac5\u0338',
'nsucc;': '\u2281',
'nsucceq;': '\u2ab0\u0338',
'nsup;': '\u2285',
'nsupE;': '\u2ac6\u0338',
'nsupe;': '\u2289',
'nsupset;': '\u2283\u20d2',
'nsupseteq;': '\u2289',
'nsupseteqq;': '\u2ac6\u0338',
'ntgl;': '\u2279',
'Ntilde': '\xd1',
'ntilde': '\xf1',
'Ntilde;': '\xd1',
'ntilde;': '\xf1',
'ntlg;': '\u2278',
'ntriangleleft;': '\u22ea',
'ntrianglelefteq;': '\u22ec',
'ntriangleright;': '\u22eb',
'ntrianglerighteq;': '\u22ed',
'Nu;': '\u039d',
'nu;': '\u03bd',
'num;': '#',
'numero;': '\u2116',
'numsp;': '\u2007',
'nvap;': '\u224d\u20d2',
'nVDash;': '\u22af',
'nVdash;': '\u22ae',
'nvDash;': '\u22ad',
'nvdash;': '\u22ac',
'nvge;': '\u2265\u20d2',
'nvgt;': '>\u20d2',
'nvHarr;': '\u2904',
'nvinfin;': '\u29de',
'nvlArr;': '\u2902',
'nvle;': '\u2264\u20d2',
'nvlt;': '<\u20d2',
'nvltrie;': '\u22b4\u20d2',
'nvrArr;': '\u2903',
'nvrtrie;': '\u22b5\u20d2',
'nvsim;': '\u223c\u20d2',
'nwarhk;': '\u2923',
'nwArr;': '\u21d6',
'nwarr;': '\u2196',
'nwarrow;': '\u2196',
'nwnear;': '\u2927',
'Oacute': '\xd3',
'oacute': '\xf3',
'Oacute;': '\xd3',
'oacute;': '\xf3',
'oast;': '\u229b',
'ocir;': '\u229a',
'Ocirc': '\xd4',
'ocirc': '\xf4',
'Ocirc;': '\xd4',
'ocirc;': '\xf4',
'Ocy;': '\u041e',
'ocy;': '\u043e',
'odash;': '\u229d',
'Odblac;': '\u0150',
'odblac;': '\u0151',
'odiv;': '\u2a38',
'odot;': '\u2299',
'odsold;': '\u29bc',
'OElig;': '\u0152',
'oelig;': '\u0153',
'ofcir;': '\u29bf',
'Ofr;': '\U0001d512',
'ofr;': '\U0001d52c',
'ogon;': '\u02db',
'Ograve': '\xd2',
'ograve': '\xf2',
'Ograve;': '\xd2',
'ograve;': '\xf2',
'ogt;': '\u29c1',
'ohbar;': '\u29b5',
'ohm;': '\u03a9',
'oint;': '\u222e',
'olarr;': '\u21ba',
'olcir;': '\u29be',
'olcross;': '\u29bb',
'oline;': '\u203e',
'olt;': '\u29c0',
'Omacr;': '\u014c',
'omacr;': '\u014d',
'Omega;': '\u03a9',
'omega;': '\u03c9',
'Omicron;': '\u039f',
'omicron;': '\u03bf',
'omid;': '\u29b6',
'ominus;': '\u2296',
'Oopf;': '\U0001d546',
'oopf;': '\U0001d560',
'opar;': '\u29b7',
'OpenCurlyDoubleQuote;': '\u201c',
'OpenCurlyQuote;': '\u2018',
'operp;': '\u29b9',
'oplus;': '\u2295',
'Or;': '\u2a54',
'or;': '\u2228',
'orarr;': '\u21bb',
'ord;': '\u2a5d',
'order;': '\u2134',
'orderof;': '\u2134',
'ordf': '\xaa',
'ordf;': '\xaa',
'ordm': '\xba',
'ordm;': '\xba',
'origof;': '\u22b6',
'oror;': '\u2a56',
'orslope;': '\u2a57',
'orv;': '\u2a5b',
'oS;': '\u24c8',
'Oscr;': '\U0001d4aa',
'oscr;': '\u2134',
'Oslash': '\xd8',
'oslash': '\xf8',
'Oslash;': '\xd8',
'oslash;': '\xf8',
'osol;': '\u2298',
'Otilde': '\xd5',
'otilde': '\xf5',
'Otilde;': '\xd5',
'otilde;': '\xf5',
'Otimes;': '\u2a37',
'otimes;': '\u2297',
'otimesas;': '\u2a36',
'Ouml': '\xd6',
'ouml': '\xf6',
'Ouml;': '\xd6',
'ouml;': '\xf6',
'ovbar;': '\u233d',
'OverBar;': '\u203e',
'OverBrace;': '\u23de',
'OverBracket;': '\u23b4',
'OverParenthesis;': '\u23dc',
'par;': '\u2225',
'para': '\xb6',
'para;': '\xb6',
'parallel;': '\u2225',
'parsim;': '\u2af3',
'parsl;': '\u2afd',
'part;': '\u2202',
'PartialD;': '\u2202',
'Pcy;': '\u041f',
'pcy;': '\u043f',
'percnt;': '%',
'period;': '.',
'permil;': '\u2030',
'perp;': '\u22a5',
'pertenk;': '\u2031',
'Pfr;': '\U0001d513',
'pfr;': '\U0001d52d',
'Phi;': '\u03a6',
'phi;': '\u03c6',
'phiv;': '\u03d5',
'phmmat;': '\u2133',
'phone;': '\u260e',
'Pi;': '\u03a0',
'pi;': '\u03c0',
'pitchfork;': '\u22d4',
'piv;': '\u03d6',
'planck;': '\u210f',
'planckh;': '\u210e',
'plankv;': '\u210f',
'plus;': '+',
'plusacir;': '\u2a23',
'plusb;': '\u229e',
'pluscir;': '\u2a22',
'plusdo;': '\u2214',
'plusdu;': '\u2a25',
'pluse;': '\u2a72',
'PlusMinus;': '\xb1',
'plusmn': '\xb1',
'plusmn;': '\xb1',
'plussim;': '\u2a26',
'plustwo;': '\u2a27',
'pm;': '\xb1',
'Poincareplane;': '\u210c',
'pointint;': '\u2a15',
'Popf;': '\u2119',
'popf;': '\U0001d561',
'pound': '\xa3',
'pound;': '\xa3',
'Pr;': '\u2abb',
'pr;': '\u227a',
'prap;': '\u2ab7',
'prcue;': '\u227c',
'prE;': '\u2ab3',
'pre;': '\u2aaf',
'prec;': '\u227a',
'precapprox;': '\u2ab7',
'preccurlyeq;': '\u227c',
'Precedes;': '\u227a',
'PrecedesEqual;': '\u2aaf',
'PrecedesSlantEqual;': '\u227c',
'PrecedesTilde;': '\u227e',
'preceq;': '\u2aaf',
'precnapprox;': '\u2ab9',
'precneqq;': '\u2ab5',
'precnsim;': '\u22e8',
'precsim;': '\u227e',
'Prime;': '\u2033',
'prime;': '\u2032',
'primes;': '\u2119',
'prnap;': '\u2ab9',
'prnE;': '\u2ab5',
'prnsim;': '\u22e8',
'prod;': '\u220f',
'Product;': '\u220f',
'profalar;': '\u232e',
'profline;': '\u2312',
'profsurf;': '\u2313',
'prop;': '\u221d',
'Proportion;': '\u2237',
'Proportional;': '\u221d',
'propto;': '\u221d',
'prsim;': '\u227e',
'prurel;': '\u22b0',
'Pscr;': '\U0001d4ab',
'pscr;': '\U0001d4c5',
'Psi;': '\u03a8',
'psi;': '\u03c8',
'puncsp;': '\u2008',
'Qfr;': '\U0001d514',
'qfr;': '\U0001d52e',
'qint;': '\u2a0c',
'Qopf;': '\u211a',
'qopf;': '\U0001d562',
'qprime;': '\u2057',
'Qscr;': '\U0001d4ac',
'qscr;': '\U0001d4c6',
'quaternions;': '\u210d',
'quatint;': '\u2a16',
'quest;': '?',
'questeq;': '\u225f',
'QUOT': '"',
'quot': '"',
'QUOT;': '"',
'quot;': '"',
'rAarr;': '\u21db',
'race;': '\u223d\u0331',
'Racute;': '\u0154',
'racute;': '\u0155',
'radic;': '\u221a',
'raemptyv;': '\u29b3',
'Rang;': '\u27eb',
'rang;': '\u27e9',
'rangd;': '\u2992',
'range;': '\u29a5',
'rangle;': '\u27e9',
'raquo': '\xbb',
'raquo;': '\xbb',
'Rarr;': '\u21a0',
'rArr;': '\u21d2',
'rarr;': '\u2192',
'rarrap;': '\u2975',
'rarrb;': '\u21e5',
'rarrbfs;': '\u2920',
'rarrc;': '\u2933',
'rarrfs;': '\u291e',
'rarrhk;': '\u21aa',
'rarrlp;': '\u21ac',
'rarrpl;': '\u2945',
'rarrsim;': '\u2974',
'Rarrtl;': '\u2916',
'rarrtl;': '\u21a3',
'rarrw;': '\u219d',
'rAtail;': '\u291c',
'ratail;': '\u291a',
'ratio;': '\u2236',
'rationals;': '\u211a',
'RBarr;': '\u2910',
'rBarr;': '\u290f',
'rbarr;': '\u290d',
'rbbrk;': '\u2773',
'rbrace;': '}',
'rbrack;': ']',
'rbrke;': '\u298c',
'rbrksld;': '\u298e',
'rbrkslu;': '\u2990',
'Rcaron;': '\u0158',
'rcaron;': '\u0159',
'Rcedil;': '\u0156',
'rcedil;': '\u0157',
'rceil;': '\u2309',
'rcub;': '}',
'Rcy;': '\u0420',
'rcy;': '\u0440',
'rdca;': '\u2937',
'rdldhar;': '\u2969',
'rdquo;': '\u201d',
'rdquor;': '\u201d',
'rdsh;': '\u21b3',
'Re;': '\u211c',
'real;': '\u211c',
'realine;': '\u211b',
'realpart;': '\u211c',
'reals;': '\u211d',
'rect;': '\u25ad',
'REG': '\xae',
'reg': '\xae',
'REG;': '\xae',
'reg;': '\xae',
'ReverseElement;': '\u220b',
'ReverseEquilibrium;': '\u21cb',
'ReverseUpEquilibrium;': '\u296f',
'rfisht;': '\u297d',
'rfloor;': '\u230b',
'Rfr;': '\u211c',
'rfr;': '\U0001d52f',
'rHar;': '\u2964',
'rhard;': '\u21c1',
'rharu;': '\u21c0',
'rharul;': '\u296c',
'Rho;': '\u03a1',
'rho;': '\u03c1',
'rhov;': '\u03f1',
'RightAngleBracket;': '\u27e9',
'RightArrow;': '\u2192',
'Rightarrow;': '\u21d2',
'rightarrow;': '\u2192',
'RightArrowBar;': '\u21e5',
'RightArrowLeftArrow;': '\u21c4',
'rightarrowtail;': '\u21a3',
'RightCeiling;': '\u2309',
'RightDoubleBracket;': '\u27e7',
'RightDownTeeVector;': '\u295d',
'RightDownVector;': '\u21c2',
'RightDownVectorBar;': '\u2955',
'RightFloor;': '\u230b',
'rightharpoondown;': '\u21c1',
'rightharpoonup;': '\u21c0',
'rightleftarrows;': '\u21c4',
'rightleftharpoons;': '\u21cc',
'rightrightarrows;': '\u21c9',
'rightsquigarrow;': '\u219d',
'RightTee;': '\u22a2',
'RightTeeArrow;': '\u21a6',
'RightTeeVector;': '\u295b',
'rightthreetimes;': '\u22cc',
'RightTriangle;': '\u22b3',
'RightTriangleBar;': '\u29d0',
'RightTriangleEqual;': '\u22b5',
'RightUpDownVector;': '\u294f',
'RightUpTeeVector;': '\u295c',
'RightUpVector;': '\u21be',
'RightUpVectorBar;': '\u2954',
'RightVector;': '\u21c0',
'RightVectorBar;': '\u2953',
'ring;': '\u02da',
'risingdotseq;': '\u2253',
'rlarr;': '\u21c4',
'rlhar;': '\u21cc',
'rlm;': '\u200f',
'rmoust;': '\u23b1',
'rmoustache;': '\u23b1',
'rnmid;': '\u2aee',
'roang;': '\u27ed',
'roarr;': '\u21fe',
'robrk;': '\u27e7',
'ropar;': '\u2986',
'Ropf;': '\u211d',
'ropf;': '\U0001d563',
'roplus;': '\u2a2e',
'rotimes;': '\u2a35',
'RoundImplies;': '\u2970',
'rpar;': ')',
'rpargt;': '\u2994',
'rppolint;': '\u2a12',
'rrarr;': '\u21c9',
'Rrightarrow;': '\u21db',
'rsaquo;': '\u203a',
'Rscr;': '\u211b',
'rscr;': '\U0001d4c7',
'Rsh;': '\u21b1',
'rsh;': '\u21b1',
'rsqb;': ']',
'rsquo;': '\u2019',
'rsquor;': '\u2019',
'rthree;': '\u22cc',
'rtimes;': '\u22ca',
'rtri;': '\u25b9',
'rtrie;': '\u22b5',
'rtrif;': '\u25b8',
'rtriltri;': '\u29ce',
'RuleDelayed;': '\u29f4',
'ruluhar;': '\u2968',
'rx;': '\u211e',
'Sacute;': '\u015a',
'sacute;': '\u015b',
'sbquo;': '\u201a',
'Sc;': '\u2abc',
'sc;': '\u227b',
'scap;': '\u2ab8',
'Scaron;': '\u0160',
'scaron;': '\u0161',
'sccue;': '\u227d',
'scE;': '\u2ab4',
'sce;': '\u2ab0',
'Scedil;': '\u015e',
'scedil;': '\u015f',
'Scirc;': '\u015c',
'scirc;': '\u015d',
'scnap;': '\u2aba',
'scnE;': '\u2ab6',
'scnsim;': '\u22e9',
'scpolint;': '\u2a13',
'scsim;': '\u227f',
'Scy;': '\u0421',
'scy;': '\u0441',
'sdot;': '\u22c5',
'sdotb;': '\u22a1',
'sdote;': '\u2a66',
'searhk;': '\u2925',
'seArr;': '\u21d8',
'searr;': '\u2198',
'searrow;': '\u2198',
'sect': '\xa7',
'sect;': '\xa7',
'semi;': ';',
'seswar;': '\u2929',
'setminus;': '\u2216',
'setmn;': '\u2216',
'sext;': '\u2736',
'Sfr;': '\U0001d516',
'sfr;': '\U0001d530',
'sfrown;': '\u2322',
'sharp;': '\u266f',
'SHCHcy;': '\u0429',
'shchcy;': '\u0449',
'SHcy;': '\u0428',
'shcy;': '\u0448',
'ShortDownArrow;': '\u2193',
'ShortLeftArrow;': '\u2190',
'shortmid;': '\u2223',
'shortparallel;': '\u2225',
'ShortRightArrow;': '\u2192',
'ShortUpArrow;': '\u2191',
'shy': '\xad',
'shy;': '\xad',
'Sigma;': '\u03a3',
'sigma;': '\u03c3',
'sigmaf;': '\u03c2',
'sigmav;': '\u03c2',
'sim;': '\u223c',
'simdot;': '\u2a6a',
'sime;': '\u2243',
'simeq;': '\u2243',
'simg;': '\u2a9e',
'simgE;': '\u2aa0',
'siml;': '\u2a9d',
'simlE;': '\u2a9f',
'simne;': '\u2246',
'simplus;': '\u2a24',
'simrarr;': '\u2972',
'slarr;': '\u2190',
'SmallCircle;': '\u2218',
'smallsetminus;': '\u2216',
'smashp;': '\u2a33',
'smeparsl;': '\u29e4',
'smid;': '\u2223',
'smile;': '\u2323',
'smt;': '\u2aaa',
'smte;': '\u2aac',
'smtes;': '\u2aac\ufe00',
'SOFTcy;': '\u042c',
'softcy;': '\u044c',
'sol;': '/',
'solb;': '\u29c4',
'solbar;': '\u233f',
'Sopf;': '\U0001d54a',
'sopf;': '\U0001d564',
'spades;': '\u2660',
'spadesuit;': '\u2660',
'spar;': '\u2225',
'sqcap;': '\u2293',
'sqcaps;': '\u2293\ufe00',
'sqcup;': '\u2294',
'sqcups;': '\u2294\ufe00',
'Sqrt;': '\u221a',
'sqsub;': '\u228f',
'sqsube;': '\u2291',
'sqsubset;': '\u228f',
'sqsubseteq;': '\u2291',
'sqsup;': '\u2290',
'sqsupe;': '\u2292',
'sqsupset;': '\u2290',
'sqsupseteq;': '\u2292',
'squ;': '\u25a1',
'Square;': '\u25a1',
'square;': '\u25a1',
'SquareIntersection;': '\u2293',
'SquareSubset;': '\u228f',
'SquareSubsetEqual;': '\u2291',
'SquareSuperset;': '\u2290',
'SquareSupersetEqual;': '\u2292',
'SquareUnion;': '\u2294',
'squarf;': '\u25aa',
'squf;': '\u25aa',
'srarr;': '\u2192',
'Sscr;': '\U0001d4ae',
'sscr;': '\U0001d4c8',
'ssetmn;': '\u2216',
'ssmile;': '\u2323',
'sstarf;': '\u22c6',
'Star;': '\u22c6',
'star;': '\u2606',
'starf;': '\u2605',
'straightepsilon;': '\u03f5',
'straightphi;': '\u03d5',
'strns;': '\xaf',
'Sub;': '\u22d0',
'sub;': '\u2282',
'subdot;': '\u2abd',
'subE;': '\u2ac5',
'sube;': '\u2286',
'subedot;': '\u2ac3',
'submult;': '\u2ac1',
'subnE;': '\u2acb',
'subne;': '\u228a',
'subplus;': '\u2abf',
'subrarr;': '\u2979',
'Subset;': '\u22d0',
'subset;': '\u2282',
'subseteq;': '\u2286',
'subseteqq;': '\u2ac5',
'SubsetEqual;': '\u2286',
'subsetneq;': '\u228a',
'subsetneqq;': '\u2acb',
'subsim;': '\u2ac7',
'subsub;': '\u2ad5',
'subsup;': '\u2ad3',
'succ;': '\u227b',
'succapprox;': '\u2ab8',
'succcurlyeq;': '\u227d',
'Succeeds;': '\u227b',
'SucceedsEqual;': '\u2ab0',
'SucceedsSlantEqual;': '\u227d',
'SucceedsTilde;': '\u227f',
'succeq;': '\u2ab0',
'succnapprox;': '\u2aba',
'succneqq;': '\u2ab6',
'succnsim;': '\u22e9',
'succsim;': '\u227f',
'SuchThat;': '\u220b',
'Sum;': '\u2211',
'sum;': '\u2211',
'sung;': '\u266a',
'sup1': '\xb9',
'sup1;': '\xb9',
'sup2': '\xb2',
'sup2;': '\xb2',
'sup3': '\xb3',
'sup3;': '\xb3',
'Sup;': '\u22d1',
'sup;': '\u2283',
'supdot;': '\u2abe',
'supdsub;': '\u2ad8',
'supE;': '\u2ac6',
'supe;': '\u2287',
'supedot;': '\u2ac4',
'Superset;': '\u2283',
'SupersetEqual;': '\u2287',
'suphsol;': '\u27c9',
'suphsub;': '\u2ad7',
'suplarr;': '\u297b',
'supmult;': '\u2ac2',
'supnE;': '\u2acc',
'supne;': '\u228b',
'supplus;': '\u2ac0',
'Supset;': '\u22d1',
'supset;': '\u2283',
'supseteq;': '\u2287',
'supseteqq;': '\u2ac6',
'supsetneq;': '\u228b',
'supsetneqq;': '\u2acc',
'supsim;': '\u2ac8',
'supsub;': '\u2ad4',
'supsup;': '\u2ad6',
'swarhk;': '\u2926',
'swArr;': '\u21d9',
'swarr;': '\u2199',
'swarrow;': '\u2199',
'swnwar;': '\u292a',
'szlig': '\xdf',
'szlig;': '\xdf',
'Tab;': '\t',
'target;': '\u2316',
'Tau;': '\u03a4',
'tau;': '\u03c4',
'tbrk;': '\u23b4',
'Tcaron;': '\u0164',
'tcaron;': '\u0165',
'Tcedil;': '\u0162',
'tcedil;': '\u0163',
'Tcy;': '\u0422',
'tcy;': '\u0442',
'tdot;': '\u20db',
'telrec;': '\u2315',
'Tfr;': '\U0001d517',
'tfr;': '\U0001d531',
'there4;': '\u2234',
'Therefore;': '\u2234',
'therefore;': '\u2234',
'Theta;': '\u0398',
'theta;': '\u03b8',
'thetasym;': '\u03d1',
'thetav;': '\u03d1',
'thickapprox;': '\u2248',
'thicksim;': '\u223c',
'ThickSpace;': '\u205f\u200a',
'thinsp;': '\u2009',
'ThinSpace;': '\u2009',
'thkap;': '\u2248',
'thksim;': '\u223c',
'THORN': '\xde',
'thorn': '\xfe',
'THORN;': '\xde',
'thorn;': '\xfe',
'Tilde;': '\u223c',
'tilde;': '\u02dc',
'TildeEqual;': '\u2243',
'TildeFullEqual;': '\u2245',
'TildeTilde;': '\u2248',
'times': '\xd7',
'times;': '\xd7',
'timesb;': '\u22a0',
'timesbar;': '\u2a31',
'timesd;': '\u2a30',
'tint;': '\u222d',
'toea;': '\u2928',
'top;': '\u22a4',
'topbot;': '\u2336',
'topcir;': '\u2af1',
'Topf;': '\U0001d54b',
'topf;': '\U0001d565',
'topfork;': '\u2ada',
'tosa;': '\u2929',
'tprime;': '\u2034',
'TRADE;': '\u2122',
'trade;': '\u2122',
'triangle;': '\u25b5',
'triangledown;': '\u25bf',
'triangleleft;': '\u25c3',
'trianglelefteq;': '\u22b4',
'triangleq;': '\u225c',
'triangleright;': '\u25b9',
'trianglerighteq;': '\u22b5',
'tridot;': '\u25ec',
'trie;': '\u225c',
'triminus;': '\u2a3a',
'TripleDot;': '\u20db',
'triplus;': '\u2a39',
'trisb;': '\u29cd',
'tritime;': '\u2a3b',
'trpezium;': '\u23e2',
'Tscr;': '\U0001d4af',
'tscr;': '\U0001d4c9',
'TScy;': '\u0426',
'tscy;': '\u0446',
'TSHcy;': '\u040b',
'tshcy;': '\u045b',
'Tstrok;': '\u0166',
'tstrok;': '\u0167',
'twixt;': '\u226c',
'twoheadleftarrow;': '\u219e',
'twoheadrightarrow;': '\u21a0',
'Uacute': '\xda',
'uacute': '\xfa',
'Uacute;': '\xda',
'uacute;': '\xfa',
'Uarr;': '\u219f',
'uArr;': '\u21d1',
'uarr;': '\u2191',
'Uarrocir;': '\u2949',
'Ubrcy;': '\u040e',
'ubrcy;': '\u045e',
'Ubreve;': '\u016c',
'ubreve;': '\u016d',
'Ucirc': '\xdb',
'ucirc': '\xfb',
'Ucirc;': '\xdb',
'ucirc;': '\xfb',
'Ucy;': '\u0423',
'ucy;': '\u0443',
'udarr;': '\u21c5',
'Udblac;': '\u0170',
'udblac;': '\u0171',
'udhar;': '\u296e',
'ufisht;': '\u297e',
'Ufr;': '\U0001d518',
'ufr;': '\U0001d532',
'Ugrave': '\xd9',
'ugrave': '\xf9',
'Ugrave;': '\xd9',
'ugrave;': '\xf9',
'uHar;': '\u2963',
'uharl;': '\u21bf',
'uharr;': '\u21be',
'uhblk;': '\u2580',
'ulcorn;': '\u231c',
'ulcorner;': '\u231c',
'ulcrop;': '\u230f',
'ultri;': '\u25f8',
'Umacr;': '\u016a',
'umacr;': '\u016b',
'uml': '\xa8',
'uml;': '\xa8',
'UnderBar;': '_',
'UnderBrace;': '\u23df',
'UnderBracket;': '\u23b5',
'UnderParenthesis;': '\u23dd',
'Union;': '\u22c3',
'UnionPlus;': '\u228e',
'Uogon;': '\u0172',
'uogon;': '\u0173',
'Uopf;': '\U0001d54c',
'uopf;': '\U0001d566',
'UpArrow;': '\u2191',
'Uparrow;': '\u21d1',
'uparrow;': '\u2191',
'UpArrowBar;': '\u2912',
'UpArrowDownArrow;': '\u21c5',
'UpDownArrow;': '\u2195',
'Updownarrow;': '\u21d5',
'updownarrow;': '\u2195',
'UpEquilibrium;': '\u296e',
'upharpoonleft;': '\u21bf',
'upharpoonright;': '\u21be',
'uplus;': '\u228e',
'UpperLeftArrow;': '\u2196',
'UpperRightArrow;': '\u2197',
'Upsi;': '\u03d2',
'upsi;': '\u03c5',
'upsih;': '\u03d2',
'Upsilon;': '\u03a5',
'upsilon;': '\u03c5',
'UpTee;': '\u22a5',
'UpTeeArrow;': '\u21a5',
'upuparrows;': '\u21c8',
'urcorn;': '\u231d',
'urcorner;': '\u231d',
'urcrop;': '\u230e',
'Uring;': '\u016e',
'uring;': '\u016f',
'urtri;': '\u25f9',
'Uscr;': '\U0001d4b0',
'uscr;': '\U0001d4ca',
'utdot;': '\u22f0',
'Utilde;': '\u0168',
'utilde;': '\u0169',
'utri;': '\u25b5',
'utrif;': '\u25b4',
'uuarr;': '\u21c8',
'Uuml': '\xdc',
'uuml': '\xfc',
'Uuml;': '\xdc',
'uuml;': '\xfc',
'uwangle;': '\u29a7',
'vangrt;': '\u299c',
'varepsilon;': '\u03f5',
'varkappa;': '\u03f0',
'varnothing;': '\u2205',
'varphi;': '\u03d5',
'varpi;': '\u03d6',
'varpropto;': '\u221d',
'vArr;': '\u21d5',
'varr;': '\u2195',
'varrho;': '\u03f1',
'varsigma;': '\u03c2',
'varsubsetneq;': '\u228a\ufe00',
'varsubsetneqq;': '\u2acb\ufe00',
'varsupsetneq;': '\u228b\ufe00',
'varsupsetneqq;': '\u2acc\ufe00',
'vartheta;': '\u03d1',
'vartriangleleft;': '\u22b2',
'vartriangleright;': '\u22b3',
'Vbar;': '\u2aeb',
'vBar;': '\u2ae8',
'vBarv;': '\u2ae9',
'Vcy;': '\u0412',
'vcy;': '\u0432',
'VDash;': '\u22ab',
'Vdash;': '\u22a9',
'vDash;': '\u22a8',
'vdash;': '\u22a2',
'Vdashl;': '\u2ae6',
'Vee;': '\u22c1',
'vee;': '\u2228',
'veebar;': '\u22bb',
'veeeq;': '\u225a',
'vellip;': '\u22ee',
'Verbar;': '\u2016',
'verbar;': '|',
'Vert;': '\u2016',
'vert;': '|',
'VerticalBar;': '\u2223',
'VerticalLine;': '|',
'VerticalSeparator;': '\u2758',
'VerticalTilde;': '\u2240',
'VeryThinSpace;': '\u200a',
'Vfr;': '\U0001d519',
'vfr;': '\U0001d533',
'vltri;': '\u22b2',
'vnsub;': '\u2282\u20d2',
'vnsup;': '\u2283\u20d2',
'Vopf;': '\U0001d54d',
'vopf;': '\U0001d567',
'vprop;': '\u221d',
'vrtri;': '\u22b3',
'Vscr;': '\U0001d4b1',
'vscr;': '\U0001d4cb',
'vsubnE;': '\u2acb\ufe00',
'vsubne;': '\u228a\ufe00',
'vsupnE;': '\u2acc\ufe00',
'vsupne;': '\u228b\ufe00',
'Vvdash;': '\u22aa',
'vzigzag;': '\u299a',
'Wcirc;': '\u0174',
'wcirc;': '\u0175',
'wedbar;': '\u2a5f',
'Wedge;': '\u22c0',
'wedge;': '\u2227',
'wedgeq;': '\u2259',
'weierp;': '\u2118',
'Wfr;': '\U0001d51a',
'wfr;': '\U0001d534',
'Wopf;': '\U0001d54e',
'wopf;': '\U0001d568',
'wp;': '\u2118',
'wr;': '\u2240',
'wreath;': '\u2240',
'Wscr;': '\U0001d4b2',
'wscr;': '\U0001d4cc',
'xcap;': '\u22c2',
'xcirc;': '\u25ef',
'xcup;': '\u22c3',
'xdtri;': '\u25bd',
'Xfr;': '\U0001d51b',
'xfr;': '\U0001d535',
'xhArr;': '\u27fa',
'xharr;': '\u27f7',
'Xi;': '\u039e',
'xi;': '\u03be',
'xlArr;': '\u27f8',
'xlarr;': '\u27f5',
'xmap;': '\u27fc',
'xnis;': '\u22fb',
'xodot;': '\u2a00',
'Xopf;': '\U0001d54f',
'xopf;': '\U0001d569',
'xoplus;': '\u2a01',
'xotime;': '\u2a02',
'xrArr;': '\u27f9',
'xrarr;': '\u27f6',
'Xscr;': '\U0001d4b3',
'xscr;': '\U0001d4cd',
'xsqcup;': '\u2a06',
'xuplus;': '\u2a04',
'xutri;': '\u25b3',
'xvee;': '\u22c1',
'xwedge;': '\u22c0',
'Yacute': '\xdd',
'yacute': '\xfd',
'Yacute;': '\xdd',
'yacute;': '\xfd',
'YAcy;': '\u042f',
'yacy;': '\u044f',
'Ycirc;': '\u0176',
'ycirc;': '\u0177',
'Ycy;': '\u042b',
'ycy;': '\u044b',
'yen': '\xa5',
'yen;': '\xa5',
'Yfr;': '\U0001d51c',
'yfr;': '\U0001d536',
'YIcy;': '\u0407',
'yicy;': '\u0457',
'Yopf;': '\U0001d550',
'yopf;': '\U0001d56a',
'Yscr;': '\U0001d4b4',
'yscr;': '\U0001d4ce',
'YUcy;': '\u042e',
'yucy;': '\u044e',
'yuml': '\xff',
'Yuml;': '\u0178',
'yuml;': '\xff',
'Zacute;': '\u0179',
'zacute;': '\u017a',
'Zcaron;': '\u017d',
'zcaron;': '\u017e',
'Zcy;': '\u0417',
'zcy;': '\u0437',
'Zdot;': '\u017b',
'zdot;': '\u017c',
'zeetrf;': '\u2128',
'ZeroWidthSpace;': '\u200b',
'Zeta;': '\u0396',
'zeta;': '\u03b6',
'Zfr;': '\u2128',
'zfr;': '\U0001d537',
'ZHcy;': '\u0416',
'zhcy;': '\u0436',
'zigrarr;': '\u21dd',
'Zopf;': '\u2124',
'zopf;': '\U0001d56b',
'Zscr;': '\U0001d4b5',
'zscr;': '\U0001d4cf',
'zwj;': '\u200d',
'zwnj;': '\u200c',
}
try:
import http.client as compat_http_client
except ImportError: # Python 2
import httplib as compat_http_client
try:
from urllib.error import HTTPError as compat_HTTPError
except ImportError: # Python 2
from urllib2 import HTTPError as compat_HTTPError
try:
from urllib.request import urlretrieve as compat_urlretrieve
except ImportError: # Python 2
from urllib import urlretrieve as compat_urlretrieve
try:
from html.parser import HTMLParser as compat_HTMLParser
except ImportError: # Python 2
from HTMLParser import HTMLParser as compat_HTMLParser
try: # Python 2
from HTMLParser import HTMLParseError as compat_HTMLParseError
except ImportError: # Python <3.4
try:
from html.parser import HTMLParseError as compat_HTMLParseError
except ImportError: # Python >3.4
# HTMLParseError has been deprecated in Python 3.3 and removed in
# Python 3.5. Introducing dummy exception for Python >3.5 for compatible
# and uniform cross-version exceptiong handling
class compat_HTMLParseError(Exception):
pass
try:
from subprocess import DEVNULL
compat_subprocess_get_DEVNULL = lambda: DEVNULL
except ImportError:
compat_subprocess_get_DEVNULL = lambda: open(os.path.devnull, 'w')
try:
import http.server as compat_http_server
except ImportError:
import BaseHTTPServer as compat_http_server
try:
compat_str = unicode # Python 2
except NameError:
compat_str = str
try:
from urllib.parse import unquote_to_bytes as compat_urllib_parse_unquote_to_bytes
from urllib.parse import unquote as compat_urllib_parse_unquote
from urllib.parse import unquote_plus as compat_urllib_parse_unquote_plus
except ImportError: # Python 2
_asciire = (compat_urllib_parse._asciire if hasattr(compat_urllib_parse, '_asciire')
else re.compile(r'([\x00-\x7f]+)'))
# HACK: The following are the correct unquote_to_bytes, unquote and unquote_plus
# implementations from cpython 3.4.3's stdlib. Python 2's version
# is apparently broken (see https://github.com/rg3/youtube-dl/pull/6244)
def compat_urllib_parse_unquote_to_bytes(string):
"""unquote_to_bytes('abc%20def') -> b'abc def'."""
# Note: strings are encoded as UTF-8. This is only an issue if it contains
# unescaped non-ASCII characters, which URIs should not.
if not string:
# Is it a string-like object?
string.split
return b''
if isinstance(string, compat_str):
string = string.encode('utf-8')
bits = string.split(b'%')
if len(bits) == 1:
return string
res = [bits[0]]
append = res.append
for item in bits[1:]:
try:
append(compat_urllib_parse._hextochr[item[:2]])
append(item[2:])
except KeyError:
append(b'%')
append(item)
return b''.join(res)
def compat_urllib_parse_unquote(string, encoding='utf-8', errors='replace'):
"""Replace %xx escapes by their single-character equivalent. The optional
encoding and errors parameters specify how to decode percent-encoded
sequences into Unicode characters, as accepted by the bytes.decode()
method.
By default, percent-encoded sequences are decoded with UTF-8, and invalid
sequences are replaced by a placeholder character.
unquote('abc%20def') -> 'abc def'.
"""
if '%' not in string:
string.split
return string
if encoding is None:
encoding = 'utf-8'
if errors is None:
errors = 'replace'
bits = _asciire.split(string)
res = [bits[0]]
append = res.append
for i in range(1, len(bits), 2):
append(compat_urllib_parse_unquote_to_bytes(bits[i]).decode(encoding, errors))
append(bits[i + 1])
return ''.join(res)
def compat_urllib_parse_unquote_plus(string, encoding='utf-8', errors='replace'):
"""Like unquote(), but also replace plus signs by spaces, as required for
unquoting HTML form values.
unquote_plus('%7e/abc+def') -> '~/abc def'
"""
string = string.replace('+', ' ')
return compat_urllib_parse_unquote(string, encoding, errors)
try:
from urllib.parse import urlencode as compat_urllib_parse_urlencode
except ImportError: # Python 2
# Python 2 will choke in urlencode on mixture of byte and unicode strings.
# Possible solutions are to either port it from python 3 with all
# the friends or manually ensure input query contains only byte strings.
# We will stick with latter thus recursively encoding the whole query.
def compat_urllib_parse_urlencode(query, doseq=0, encoding='utf-8'):
def encode_elem(e):
if isinstance(e, dict):
e = encode_dict(e)
elif isinstance(e, (list, tuple,)):
list_e = encode_list(e)
e = tuple(list_e) if isinstance(e, tuple) else list_e
elif isinstance(e, compat_str):
e = e.encode(encoding)
return e
def encode_dict(d):
return dict((encode_elem(k), encode_elem(v)) for k, v in d.items())
def encode_list(l):
return [encode_elem(e) for e in l]
return compat_urllib_parse.urlencode(encode_elem(query), doseq=doseq)
try:
from urllib.request import DataHandler as compat_urllib_request_DataHandler
except ImportError: # Python < 3.4
# Ported from CPython 98774:1733b3bd46db, Lib/urllib/request.py
class compat_urllib_request_DataHandler(compat_urllib_request.BaseHandler):
def data_open(self, req):
# data URLs as specified in RFC 2397.
#
# ignores POSTed data
#
# syntax:
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
# mediatype := [ type "/" subtype ] *( ";" parameter )
# data := *urlchar
# parameter := attribute "=" value
url = req.get_full_url()
scheme, data = url.split(':', 1)
mediatype, data = data.split(',', 1)
# even base64 encoded data URLs might be quoted so unquote in any case:
data = compat_urllib_parse_unquote_to_bytes(data)
if mediatype.endswith(';base64'):
data = binascii.a2b_base64(data)
mediatype = mediatype[:-7]
if not mediatype:
mediatype = 'text/plain;charset=US-ASCII'
headers = email.message_from_string(
'Content-type: %s\nContent-length: %d\n' % (mediatype, len(data)))
return compat_urllib_response.addinfourl(io.BytesIO(data), headers, url)
try:
compat_basestring = basestring # Python 2
except NameError:
compat_basestring = str
try:
compat_chr = unichr # Python 2
except NameError:
compat_chr = chr
try:
from xml.etree.ElementTree import ParseError as compat_xml_parse_error
except ImportError: # Python 2.6
from xml.parsers.expat import ExpatError as compat_xml_parse_error
etree = xml.etree.ElementTree
class _TreeBuilder(etree.TreeBuilder):
def doctype(self, name, pubid, system):
pass
if sys.version_info[0] >= 3:
def compat_etree_fromstring(text):
return etree.XML(text, parser=etree.XMLParser(target=_TreeBuilder()))
else:
# python 2.x tries to encode unicode strings with ascii (see the
# XMLParser._fixtext method)
try:
_etree_iter = etree.Element.iter
except AttributeError: # Python <=2.6
def _etree_iter(root):
for el in root.findall('*'):
yield el
for sub in _etree_iter(el):
yield sub
# on 2.6 XML doesn't have a parser argument, function copied from CPython
# 2.7 source
def _XML(text, parser=None):
if not parser:
parser = etree.XMLParser(target=_TreeBuilder())
parser.feed(text)
return parser.close()
def _element_factory(*args, **kwargs):
el = etree.Element(*args, **kwargs)
for k, v in el.items():
if isinstance(v, bytes):
el.set(k, v.decode('utf-8'))
return el
def compat_etree_fromstring(text):
doc = _XML(text, parser=etree.XMLParser(target=_TreeBuilder(element_factory=_element_factory)))
for el in _etree_iter(doc):
if el.text is not None and isinstance(el.text, bytes):
el.text = el.text.decode('utf-8')
return doc
if hasattr(etree, 'register_namespace'):
compat_etree_register_namespace = etree.register_namespace
else:
def compat_etree_register_namespace(prefix, uri):
"""Register a namespace prefix.
The registry is global, and any existing mapping for either the
given prefix or the namespace URI will be removed.
*prefix* is the namespace prefix, *uri* is a namespace uri. Tags and
attributes in this namespace will be serialized with prefix if possible.
ValueError is raised if prefix is reserved or is invalid.
"""
if re.match(r"ns\d+$", prefix):
raise ValueError("Prefix format reserved for internal use")
for k, v in list(etree._namespace_map.items()):
if k == uri or v == prefix:
del etree._namespace_map[k]
etree._namespace_map[uri] = prefix
if sys.version_info < (2, 7):
# Here comes the crazy part: In 2.6, if the xpath is a unicode,
# .//node does not match if a node is a direct child of . !
def compat_xpath(xpath):
if isinstance(xpath, compat_str):
xpath = xpath.encode('ascii')
return xpath
else:
compat_xpath = lambda xpath: xpath
try:
from urllib.parse import parse_qs as compat_parse_qs
except ImportError: # Python 2
# HACK: The following is the correct parse_qs implementation from cpython 3's stdlib.
# Python 2's version is apparently totally broken
def _parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
encoding='utf-8', errors='replace'):
qs, _coerce_result = qs, compat_str
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
if strict_parsing:
raise ValueError('bad query field: %r' % (name_value,))
# Handle case of a control-name with no equal sign
if keep_blank_values:
nv.append('')
else:
continue
if len(nv[1]) or keep_blank_values:
name = nv[0].replace('+', ' ')
name = compat_urllib_parse_unquote(
name, encoding=encoding, errors=errors)
name = _coerce_result(name)
value = nv[1].replace('+', ' ')
value = compat_urllib_parse_unquote(
value, encoding=encoding, errors=errors)
value = _coerce_result(value)
r.append((name, value))
return r
def compat_parse_qs(qs, keep_blank_values=False, strict_parsing=False,
encoding='utf-8', errors='replace'):
parsed_result = {}
pairs = _parse_qsl(qs, keep_blank_values, strict_parsing,
encoding=encoding, errors=errors)
for name, value in pairs:
if name in parsed_result:
parsed_result[name].append(value)
else:
parsed_result[name] = [value]
return parsed_result
compat_os_name = os._name if os.name == 'java' else os.name
if compat_os_name == 'nt':
def compat_shlex_quote(s):
return s if re.match(r'^[-_\w./]+$', s) else '"%s"' % s.replace('"', '\\"')
else:
try:
from shlex import quote as compat_shlex_quote
except ImportError: # Python < 3.3
def compat_shlex_quote(s):
if re.match(r'^[-_\w./]+$', s):
return s
else:
return "'" + s.replace("'", "'\"'\"'") + "'"
try:
args = shlex.split('中文')
assert (isinstance(args, list) and
isinstance(args[0], compat_str) and
args[0] == '中文')
compat_shlex_split = shlex.split
except (AssertionError, UnicodeEncodeError):
# Working around shlex issue with unicode strings on some python 2
# versions (see http://bugs.python.org/issue1548891)
def compat_shlex_split(s, comments=False, posix=True):
if isinstance(s, compat_str):
s = s.encode('utf-8')
return list(map(lambda s: s.decode('utf-8'), shlex.split(s, comments, posix)))
def compat_ord(c):
if type(c) is int:
return c
else:
return ord(c)
if sys.version_info >= (3, 0):
compat_getenv = os.getenv
compat_expanduser = os.path.expanduser
def compat_setenv(key, value, env=os.environ):
env[key] = value
else:
# Environment variables should be decoded with filesystem encoding.
# Otherwise it will fail if any non-ASCII characters present (see #3854 #3217 #2918)
def compat_getenv(key, default=None):
from .utils import get_filesystem_encoding
env = os.getenv(key, default)
if env:
env = env.decode(get_filesystem_encoding())
return env
def compat_setenv(key, value, env=os.environ):
def encode(v):
from .utils import get_filesystem_encoding
return v.encode(get_filesystem_encoding()) if isinstance(v, compat_str) else v
env[encode(key)] = encode(value)
# HACK: The default implementations of os.path.expanduser from cpython do not decode
# environment variables with filesystem encoding. We will work around this by
# providing adjusted implementations.
# The following are os.path.expanduser implementations from cpython 2.7.8 stdlib
# for different platforms with correct environment variables decoding.
if compat_os_name == 'posix':
def compat_expanduser(path):
"""Expand ~ and ~user constructions. If user or $HOME is unknown,
do nothing."""
if not path.startswith('~'):
return path
i = path.find('/', 1)
if i < 0:
i = len(path)
if i == 1:
if 'HOME' not in os.environ:
import pwd
userhome = pwd.getpwuid(os.getuid()).pw_dir
else:
userhome = compat_getenv('HOME')
else:
import pwd
try:
pwent = pwd.getpwnam(path[1:i])
except KeyError:
return path
userhome = pwent.pw_dir
userhome = userhome.rstrip('/')
return (userhome + path[i:]) or '/'
elif compat_os_name in ('nt', 'ce'):
def compat_expanduser(path):
"""Expand ~ and ~user constructs.
If user or $HOME is unknown, do nothing."""
if path[:1] != '~':
return path
i, n = 1, len(path)
while i < n and path[i] not in '/\\':
i = i + 1
if 'HOME' in os.environ:
userhome = compat_getenv('HOME')
elif 'USERPROFILE' in os.environ:
userhome = compat_getenv('USERPROFILE')
elif 'HOMEPATH' not in os.environ:
return path
else:
try:
drive = compat_getenv('HOMEDRIVE')
except KeyError:
drive = ''
userhome = os.path.join(drive, compat_getenv('HOMEPATH'))
if i != 1: # ~user
userhome = os.path.join(os.path.dirname(userhome), path[1:i])
return userhome + path[i:]
else:
compat_expanduser = os.path.expanduser
if sys.version_info < (3, 0):
def compat_print(s):
from .utils import preferredencoding
print(s.encode(preferredencoding(), 'xmlcharrefreplace'))
else:
def compat_print(s):
assert isinstance(s, compat_str)
print(s)
if sys.version_info < (3, 0) and sys.platform == 'win32':
def compat_getpass(prompt, *args, **kwargs):
if isinstance(prompt, compat_str):
from .utils import preferredencoding
prompt = prompt.encode(preferredencoding())
return getpass.getpass(prompt, *args, **kwargs)
else:
compat_getpass = getpass.getpass
try:
compat_input = raw_input
except NameError: # Python 3
compat_input = input
# Python < 2.6.5 require kwargs to be bytes
try:
def _testfunc(x):
pass
_testfunc(**{'x': 0})
except TypeError:
def compat_kwargs(kwargs):
return dict((bytes(k), v) for k, v in kwargs.items())
else:
compat_kwargs = lambda kwargs: kwargs
try:
compat_numeric_types = (int, float, long, complex)
except NameError: # Python 3
compat_numeric_types = (int, float, complex)
if sys.version_info < (2, 7):
def compat_socket_create_connection(address, timeout, source_address=None):
host, port = address
err = None
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
else:
raise socket.error('getaddrinfo returns an empty list')
else:
compat_socket_create_connection = socket.create_connection
# Fix https://github.com/rg3/youtube-dl/issues/4223
# See http://bugs.python.org/issue9161 for what is broken
def workaround_optparse_bug9161():
op = optparse.OptionParser()
og = optparse.OptionGroup(op, 'foo')
try:
og.add_option('-t')
except TypeError:
real_add_option = optparse.OptionGroup.add_option
def _compat_add_option(self, *args, **kwargs):
enc = lambda v: (
v.encode('ascii', 'replace') if isinstance(v, compat_str)
else v)
bargs = [enc(a) for a in args]
bkwargs = dict(
(k, enc(v)) for k, v in kwargs.items())
return real_add_option(self, *bargs, **bkwargs)
optparse.OptionGroup.add_option = _compat_add_option
if hasattr(shutil, 'get_terminal_size'): # Python >= 3.3
compat_get_terminal_size = shutil.get_terminal_size
else:
_terminal_size = collections.namedtuple('terminal_size', ['columns', 'lines'])
def compat_get_terminal_size(fallback=(80, 24)):
columns = compat_getenv('COLUMNS')
if columns:
columns = int(columns)
else:
columns = None
lines = compat_getenv('LINES')
if lines:
lines = int(lines)
else:
lines = None
if columns is None or lines is None or columns <= 0 or lines <= 0:
try:
sp = subprocess.Popen(
['stty', 'size'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = sp.communicate()
_lines, _columns = map(int, out.split())
except Exception:
_columns, _lines = _terminal_size(*fallback)
if columns is None or columns <= 0:
columns = _columns
if lines is None or lines <= 0:
lines = _lines
return _terminal_size(columns, lines)
try:
itertools.count(start=0, step=1)
compat_itertools_count = itertools.count
except TypeError: # Python 2.6
def compat_itertools_count(start=0, step=1):
n = start
while True:
yield n
n += step
if sys.version_info >= (3, 0):
from tokenize import tokenize as compat_tokenize_tokenize
else:
from tokenize import generate_tokens as compat_tokenize_tokenize
try:
struct.pack('!I', 0)
except TypeError:
# In Python 2.6 and 2.7.x < 2.7.7, struct requires a bytes argument
# See https://bugs.python.org/issue19099
def compat_struct_pack(spec, *args):
if isinstance(spec, compat_str):
spec = spec.encode('ascii')
return struct.pack(spec, *args)
def compat_struct_unpack(spec, *args):
if isinstance(spec, compat_str):
spec = spec.encode('ascii')
return struct.unpack(spec, *args)
else:
compat_struct_pack = struct.pack
compat_struct_unpack = struct.unpack
try:
from future_builtins import zip as compat_zip
except ImportError: # not 2.6+ or is 3.x
try:
from itertools import izip as compat_zip # < 2.5 or 3.x
except ImportError:
compat_zip = zip
__all__ = [
'compat_HTMLParseError',
'compat_HTMLParser',
'compat_HTTPError',
'compat_basestring',
'compat_chr',
'compat_cookiejar',
'compat_cookies',
'compat_etree_fromstring',
'compat_etree_register_namespace',
'compat_expanduser',
'compat_get_terminal_size',
'compat_getenv',
'compat_getpass',
'compat_html_entities',
'compat_html_entities_html5',
'compat_http_client',
'compat_http_server',
'compat_input',
'compat_itertools_count',
'compat_kwargs',
'compat_numeric_types',
'compat_ord',
'compat_os_name',
'compat_parse_qs',
'compat_print',
'compat_setenv',
'compat_shlex_quote',
'compat_shlex_split',
'compat_socket_create_connection',
'compat_str',
'compat_struct_pack',
'compat_struct_unpack',
'compat_subprocess_get_DEVNULL',
'compat_tokenize_tokenize',
'compat_urllib_error',
'compat_urllib_parse',
'compat_urllib_parse_unquote',
'compat_urllib_parse_unquote_plus',
'compat_urllib_parse_unquote_to_bytes',
'compat_urllib_parse_urlencode',
'compat_urllib_parse_urlparse',
'compat_urllib_request',
'compat_urllib_request_DataHandler',
'compat_urllib_response',
'compat_urlparse',
'compat_urlretrieve',
'compat_xml_parse_error',
'compat_xpath',
'compat_zip',
'workaround_optparse_bug9161',
]
|
phihag/youtube-dl
|
youtube_dl/compat.py
|
Python
|
unlicense
| 90,595
|
[
"Bowtie"
] |
664e6e37ddc2c9b857ec9b60f12794fcd46fedc16db5272bad3507cabfa44bfd
|
#DeepLearning Demo in Python3
# notes xrange is named range
# print requires parenthisis ()
from numpy import exp, array, random, dot
class NeuralNetwork():
def __init__(self):
# Seed the random number generator, so it generates the same numbers
# every time the program runs.
random.seed(1)
# We model a single neuron, with 3 input connections and 1 output connection.
# We assign random weights to a 3 x 1 matrix, with values in the range -1 to 1
# and mean 0.
self.synaptic_weights = 2 * random.random((3, 1)) - 1
# The Sigmoid function, which describes an S shaped curve.
# We pass the weighted sum of the inputs through this function to
# normalise them between 0 and 1.
def __sigmoid(self, x):
return 1 / (1 + exp(-x))
# The derivative of the Sigmoid function.
# This is the gradient of the Sigmoid curve.
# It indicates how confident we are about the existing weight.
def __sigmoid_derivative(self, x):
return x * (1 - x)
# We train the neural network through a process of trial and error.
# Adjusting the synaptic weights each time.
def train(self, training_set_inputs, training_set_outputs, number_of_training_iterations):
for iteration in range(number_of_training_iterations):
# Pass the training set through our neural network (a single neuron).
output = self.think(training_set_inputs)
# Calculate the error (The difference between the desired output
# and the predicted output).
error = training_set_outputs - output
# Multiply the error by the input and again by the gradient of the Sigmoid curve.
# This means less confident weights are adjusted more.
# This means inputs, which are zero, do not cause changes to the weights.
adjustment = dot(training_set_inputs.T, error * self.__sigmoid_derivative(output))
# Adjust the weights.
self.synaptic_weights += adjustment
# The neural network thinks.
def think(self, inputs):
# Pass inputs through our neural network (our single neuron).
return self.__sigmoid(dot(inputs, self.synaptic_weights))
if __name__ == "__main__":
#Intialise a single neuron neural network.
neural_network = NeuralNetwork()
print ("Random starting synaptic weights: ")
print (neural_network.synaptic_weights)
# The training set. We have 4 examples, each consisting of 3 input values
# and 1 output value.
training_set_inputs = array([[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1]])
training_set_outputs = array([[0, 1, 1, 0]]).T
# Train the neural network using a training set.
# Do it 10,000 times and make small adjustments each time.
neural_network.train(training_set_inputs, training_set_outputs, 10000)
print ("New synaptic weights after training: ")
print (neural_network.synaptic_weights)
# Test the neural network with a new situation.
print ("Considering new situation [1, 0, 0] -> ?: ")
print (neural_network.think(array([1, 0, 0])))
|
bluewitch/Code-Blue-Python
|
NeuralNetworkDemo.py
|
Python
|
mit
| 3,145
|
[
"NEURON"
] |
8316f0d547ab9007a2ac1367120d18aeed2ee7c23fda64165e14b0acd9d02e3b
|
"""MDTraj: A modern, open library for the analysis of molecular dynamics trajectories
MDTraj is a python library that allows users to manipulate molecular dynamics
(MD) trajectories and perform a variety of analyses, including fast RMSD,
solvent accessible surface area, hydrogen bonding, etc. A highlight of MDTraj
is the wide variety of molecular dynamics trajectory file formats which are
supported, including RCSB pdb, GROMACS xtc, tng, and trr, CHARMM / NAMD dcd, AMBER
binpos, AMBER NetCDF, AMBER mdcrd, TINKER arc and MDTraj HDF5.
"""
from __future__ import print_function, absolute_import
import sys
from glob import glob
DOCLINES = __doc__.split("\n")
from setuptools import setup, Extension, find_packages
sys.path.insert(0, '.')
from basesetup import (write_version_py, build_ext,
StaticLibrary, CompilerDetection, parse_setuppy_commands)
try:
# add an optional --disable-openmp to disable OpenMP support
sys.argv.remove('--disable-openmp')
disable_openmp = True
except ValueError:
disable_openmp = False
##########################
VERSION = "1.9.6"
ISRELEASED = True
__version__ = VERSION
##########################
CLASSIFIERS = """\
Development Status :: 5 - Production/Stable
Intended Audience :: Science/Research
Intended Audience :: Developers
License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)
Programming Language :: C
Programming Language :: Python
Programming Language :: Python :: 3
Topic :: Scientific/Engineering :: Bio-Informatics
Topic :: Scientific/Engineering :: Chemistry
Operating System :: Microsoft :: Windows
Operating System :: POSIX
Operating System :: Unix
Operating System :: MacOS
"""
# Global info about compiler
compiler = CompilerDetection(disable_openmp)
compiler.initialize()
extra_cpp_libraries = []
if sys.platform == 'win32':
extra_cpp_libraries.append('Ws2_32')
# For determining if a path is relative (for dtr)
extra_cpp_libraries.append('Shlwapi')
################################################################################
# Declaration of the compiled extension modules (cython + c)
################################################################################
def format_extensions():
compiler_args = compiler.compiler_args_warn
xtc = Extension('mdtraj.formats.xtc',
sources=['mdtraj/formats/xtc/src/xdrfile.c',
'mdtraj/formats/xtc/src/xdr_seek.c',
'mdtraj/formats/xtc/src/xdrfile_xtc.c',
'mdtraj/formats/xtc/xtc.pyx',
],
include_dirs=['mdtraj/formats/xtc/include/',
'mdtraj/formats/xtc/'],
extra_compile_args=compiler_args)
trr = Extension('mdtraj.formats.trr',
sources=['mdtraj/formats/xtc/src/xdrfile.c',
'mdtraj/formats/xtc/src/xdr_seek.c',
'mdtraj/formats/xtc/src/xdrfile_trr.c',
'mdtraj/formats/xtc/trr.pyx'],
include_dirs=['mdtraj/formats/xtc/include/',
'mdtraj/formats/xtc/'],
extra_compile_args=compiler_args)
zlib_include_dirs = []
zlib_library_dirs = []
if sys.platform == 'win32':
# Conda puts the zlib headers in ./Library/... on windows
# If you're not using conda, good luck!
zlib_include_dirs += ["{}/Library/include".format(sys.prefix)]
zlib_library_dirs += ["{}/Library/lib".format(sys.prefix)]
else:
# On linux (and mac(?)) these paths should work for a standard
# install of python+zlib or a conda install of python+zlib
zlib_include_dirs += ["{}/include".format(sys.prefix)]
zlib_library_dirs += ["{}/lib".format(sys.prefix)]
tng = Extension('mdtraj.formats.tng',
sources=glob('mdtraj/formats/tng/src/compression/*.c') +
['mdtraj/formats/tng/src/lib/tng_io.c',
'mdtraj/formats/tng/src/lib/md5.c',
'mdtraj/formats/tng/tng.pyx'],
include_dirs=['mdtraj/formats/tng/include']
+ zlib_include_dirs,
define_macros=[('USE_ZLIB', 1)],
library_dirs=zlib_library_dirs,
libraries=['z'],
)
dcd = Extension('mdtraj.formats.dcd',
sources=['mdtraj/formats/dcd/src/dcdplugin.c',
'mdtraj/formats/dcd/dcd.pyx'],
include_dirs=["mdtraj/formats/dcd/include/",
'mdtraj/formats/dcd/'],
extra_compile_args=compiler_args)
binpos = Extension('mdtraj.formats.binpos',
sources=['mdtraj/formats/binpos/src/binposplugin.c',
'mdtraj/formats/binpos/binpos.pyx'],
include_dirs=['mdtraj/formats/binpos/include/',
'mdtraj/formats/binpos/'],
extra_compile_args=compiler_args)
dtr = Extension('mdtraj.formats.dtr',
sources=['mdtraj/formats/dtr/src/dtrplugin.cxx',
'mdtraj/formats/dtr/dtr.pyx'],
include_dirs=['mdtraj/formats/dtr/include/',
'mdtraj/formats/dtr/'],
define_macros=[('DESRES_READ_TIMESTEP2', 1)],
language='c++',
extra_compile_args=compiler_args,
libraries=extra_cpp_libraries)
return [xtc, trr, tng, dcd, binpos, dtr]
def rmsd_extensions():
compiler_args = (compiler.compiler_args_openmp + compiler.compiler_args_sse2 +
compiler.compiler_args_sse3 + compiler.compiler_args_opt +
compiler.compiler_args_warn)
compiler_libraries = compiler.compiler_libraries_openmp
libtheobald = StaticLibrary(
'mdtraj.core.lib.libtheobald',
sources=[
'mdtraj/rmsd/src/theobald_rmsd.cpp',
'mdtraj/rmsd/src/center.cpp'],
include_dirs=[
'mdtraj/rmsd/include'],
export_include=['mdtraj/rmsd/include/theobald_rmsd.h',
'mdtraj/rmsd/include/center.h'],
language="c++",
# don't enable OpenMP
extra_compile_args=(compiler.compiler_args_sse2 +
compiler.compiler_args_sse3 +
compiler.compiler_args_opt))
rmsd = Extension('mdtraj._rmsd',
sources=[
'mdtraj/rmsd/src/theobald_rmsd.cpp',
'mdtraj/rmsd/src/rotation.cpp',
'mdtraj/rmsd/src/center.cpp',
'mdtraj/rmsd/_rmsd.pyx'],
include_dirs=['mdtraj/rmsd/include'],
extra_compile_args=compiler_args,
libraries=compiler_libraries,
language="c++")
lprmsd = Extension('mdtraj._lprmsd',
sources=[
'mdtraj/rmsd/src/theobald_rmsd.cpp',
'mdtraj/rmsd/src/rotation.cpp',
'mdtraj/rmsd/src/center.cpp',
'mdtraj/rmsd/src/fancy_index.cpp',
'mdtraj/rmsd/src/Munkres.cpp',
'mdtraj/rmsd/src/euclidean_permutation.cpp',
'mdtraj/rmsd/_lprmsd.pyx'],
language='c++',
include_dirs=['mdtraj/rmsd/include'],
extra_compile_args=compiler_args,
libraries=compiler_libraries + extra_cpp_libraries)
return rmsd, lprmsd, libtheobald
def geometry_extensions():
compiler.initialize()
compiler_args = (
compiler.compiler_args_openmp +
compiler.compiler_args_sse2 + compiler.compiler_args_sse3 +
compiler.compiler_args_opt + compiler.compiler_args_warn)
define_macros = None
compiler_libraries = compiler.compiler_libraries_openmp + extra_cpp_libraries
return [
Extension('mdtraj.geometry._geometry',
sources=['mdtraj/geometry/src/sasa.cpp',
'mdtraj/geometry/src/dssp.cpp',
'mdtraj/geometry/src/geometry.cpp',
'mdtraj/geometry/src/_geometry.pyx',],
include_dirs=['mdtraj/geometry/include',
'mdtraj/geometry/src/kernels'],
depends=['mdtraj/geometry/src/kernels/anglekernels.h',
'mdtraj/geometry/src/kernels/dihedralkernels.h',
'mdtraj/geometry/src/kernels/distancekernels.h'],
define_macros=define_macros,
extra_compile_args=compiler_args,
libraries=compiler_libraries,
language='c++'),
Extension('mdtraj.geometry.drid',
sources=["mdtraj/geometry/drid.pyx",
"mdtraj/geometry/src/dridkernels.cpp",
"mdtraj/geometry/src/moments.cpp"],
include_dirs=["mdtraj/geometry/include"],
define_macros=define_macros,
extra_compile_args=compiler_args,
libraries=compiler_libraries,
language='c++'),
Extension('mdtraj.geometry.neighbors',
sources=["mdtraj/geometry/neighbors.pyx",
"mdtraj/geometry/src/neighbors.cpp"],
include_dirs=["mdtraj/geometry/include",],
define_macros=define_macros,
extra_compile_args=compiler_args,
libraries=compiler_libraries,
language='c++'),
Extension('mdtraj.geometry.neighborlist',
sources=["mdtraj/geometry/neighborlist.pyx",
"mdtraj/geometry/src/neighborlist.cpp"],
include_dirs=["mdtraj/geometry/include",],
define_macros=define_macros,
extra_compile_args=compiler_args,
libraries=compiler_libraries,
language='c++'),
]
write_version_py(VERSION, ISRELEASED, 'mdtraj/version.py')
metadata = \
dict(name='mdtraj',
author='Robert McGibbon',
author_email='rmcgibbo@gmail.com',
description=DOCLINES[0],
long_description="\n".join(DOCLINES[2:]),
version=__version__,
license='LGPLv2.1+',
url='http://mdtraj.org',
download_url = "https://github.com/rmcgibbo/mdtraj/releases/latest",
platforms=['Linux', 'Mac OS-X', 'Unix', 'Windows'],
classifiers=CLASSIFIERS.splitlines(),
packages=find_packages(),
cmdclass={'build_ext': build_ext},
install_requires=['numpy>=1.6',
'scipy',
'astunparse',
'pyparsing',
],
package_data={'mdtraj.formats.pdb': ['data/*'], },
zip_safe=False,
entry_points={'console_scripts':
['mdconvert = mdtraj.scripts.mdconvert:entry_point',
'mdinspect = mdtraj.scripts.mdinspect:entry_point']},
)
if __name__ == '__main__':
# Don't use numpy if we are just - non-build actions are required to succeed
# without NumPy for example when pip is used to install Scipy when
# NumPy is not yet present in the system.
run_build = parse_setuppy_commands()
if run_build:
extensions = format_extensions()
extensions.extend(rmsd_extensions())
extensions.extend(geometry_extensions())
# most extensions use numpy, add headers for it.
try:
import Cython as _c
from Cython.Build import cythonize
if _c.__version__ < '0.29':
raise ImportError("Too old")
except ImportError as e:
print('mdtrajs setup depends on Cython (>=0.29). Install it prior invoking setup.py')
print(e)
sys.exit(1)
try:
import numpy as np
except ImportError:
print('mdtrajs setup depends on NumPy. Install it prior invoking setup.py')
sys.exit(1)
for e in extensions:
e.include_dirs.append(np.get_include())
metadata['ext_modules'] = cythonize(extensions, language_level=sys.version_info[0])
setup(**metadata)
|
dwhswenson/mdtraj
|
setup.py
|
Python
|
lgpl-2.1
| 12,458
|
[
"Amber",
"CHARMM",
"Gromacs",
"MDTraj",
"NAMD",
"NetCDF",
"TINKER"
] |
bc62426570250cababc492aa92d10edf16ea93d7d1885adcd799da0d5ccf90f0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
from ajax_select import urls as ajax_select_urls
from arividam.siteconfig.views import RedirectUserView, SiteListView
from arividam.djangocms_news.views import check_promoted, promote_news
urlpatterns = [
#url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name='home'),
#url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name='about'),
url(r'^list-schools/$', SiteListView.as_view(), name='list-schools'),
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, include(admin.site.urls)),
# User management
url(r'^users/', include('arividam.users.urls', namespace='users')),
url(r'^accounts/', include('allauth.urls')),
url(r'^ajax_select/', include(ajax_select_urls)),
url(r'^messages/', include('postman.urls', namespace='postman', app_name='postman')),
url(r'inbox/notifications/', include('notifications.urls', namespace='notifications')),
url(r'^dashboard/', include('arividam.dashboard.urls', namespace='dashboard')),
url(r'^redirect/', RedirectUserView.as_view()),
url(r'^filer/', include('filer.urls')),
url(r'^check_promoted/(\d+)/$', check_promoted),
url(r'^promote_news/(\d+)/$', promote_news)
# url(r'^notifications/', include('arividam.notifications.urls', namespace='notifications')),
# Your stuff: custom urls includes go here
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
import debug_toolbar
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
url(r'^__debug__/', include(debug_toolbar.urls)),
]
#CMS catch-all patterns should be at the last
urlpatterns = urlpatterns + [
url(r'^', include('cms.urls')),
]
|
c4sc/arividam
|
config/urls.py
|
Python
|
mit
| 2,517
|
[
"VisIt"
] |
870c32c51dc5dd9da31c04969cc6a5d38707bce0a7e0acad35340b30303ee753
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Issues"),
"items": [
{
"type": "doctype",
"name": "Issue",
"description": _("Support queries from customers."),
"onboard": 1,
},
{
"type": "doctype",
"name": "Issue Type",
"description": _("Issue Type."),
},
{
"type": "doctype",
"name": "Issue Priority",
"description": _("Issue Priority."),
}
]
},
{
"label": _("Warranty"),
"items": [
{
"type": "doctype",
"name": "Warranty Claim",
"description": _("Warranty Claim against Serial No."),
},
{
"type": "doctype",
"name": "Serial No",
"description": _("Single unit of an Item."),
},
]
},
{
"label": _("Service Level Agreement"),
"items": [
{
"type": "doctype",
"name": "Service Level",
"description": _("Service Level."),
},
{
"type": "doctype",
"name": "Service Level Agreement",
"description": _("Service Level Agreement."),
}
]
},
{
"label": _("Maintenance"),
"items": [
{
"type": "doctype",
"name": "Maintenance Schedule",
},
{
"type": "doctype",
"name": "Maintenance Visit",
},
]
},
{
"label": _("Reports"),
"icon": "fa fa-list",
"items": [
{
"type": "page",
"name": "support-analytics",
"label": _("Support Analytics"),
"icon": "fa fa-bar-chart"
},
{
"type": "report",
"name": "Minutes to First Response for Issues",
"doctype": "Issue",
"is_query_report": True
},
{
"type": "report",
"name": "Support Hours",
"doctype": "Issue",
"is_query_report": True
},
]
},
{
"label": _("Settings"),
"icon": "fa fa-list",
"items": [
{
"type": "doctype",
"name": "Support Settings",
"label": _("Support Settings"),
},
]
},
]
|
ebukoz/thrive
|
erpnext/config/support.py
|
Python
|
gpl-3.0
| 1,960
|
[
"VisIt"
] |
9c045241c05d3fdac6cc972a35648978fb44c56dfc9da1b428ef2d55975b5031
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import unicode_literals
import unittest2 as unittest
from pymatgen.electronic_structure.core import Orbital, Spin
class SpinTest(unittest.TestCase):
def test_init(self):
self.assertEqual(int(Spin.up), 1)
self.assertEqual(int(Spin.down), -1)
def test_from_int(self):
self.assertEqual(Spin(1), Spin.up)
self.assertEqual(Spin(-1), Spin.down)
self.assertRaises(ValueError, Spin, 0)
def test_cached(self):
self.assertEqual(id(Spin(1)), id(Spin.up))
class OrbitalTest(unittest.TestCase):
def test_init(self):
for orb in Orbital:
self.assertEqual(Orbital(orb.value), orb)
self.assertRaises(ValueError, Orbital, 100)
def test_cached(self):
self.assertEqual(id(Orbital(0)), id(Orbital.s))
if __name__ == '__main__':
unittest.main()
|
aykol/pymatgen
|
pymatgen/electronic_structure/tests/test_core.py
|
Python
|
mit
| 969
|
[
"pymatgen"
] |
18c380ab49da780ed1bb748f73d1ac45fc6b0ba863ef4f1dd9300dfc83588ce4
|
"""Base classes and utilities for readers and writers.
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from base64 import encodestring, decodestring
import pprint
from . import py3compat
str_to_bytes = py3compat.str_to_bytes
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def restore_bytes(nb):
"""Restore bytes of image data from unicode-only formats.
Base64 encoding is handled elsewhere. Bytes objects in the notebook are
always b64-encoded. We DO NOT encode/decode around file formats.
"""
for ws in nb.worksheets:
for cell in ws.cells:
if cell.cell_type == 'code':
for output in cell.outputs:
if 'png' in output:
output.png = str_to_bytes(output.png, 'ascii')
if 'jpeg' in output:
output.jpeg = str_to_bytes(output.jpeg, 'ascii')
return nb
# output keys that are likely to have multiline values
_multiline_outputs = ['text', 'html', 'svg', 'latex', 'javascript', 'json']
# FIXME: workaround for old splitlines()
def _join_lines(lines):
"""join lines that have been written by splitlines()
Has logic to protect against `splitlines()`, which
should have been `splitlines(True)`
"""
if lines and lines[0].endswith(('\n', '\r')):
# created by splitlines(True)
return u''.join(lines)
else:
# created by splitlines()
return u'\n'.join(lines)
def rejoin_lines(nb):
"""rejoin multiline text into strings
For reversing effects of ``split_lines(nb)``.
This only rejoins lines that have been split, so if text objects were not split
they will pass through unchanged.
Used when reading JSON files that may have been passed through split_lines.
"""
for ws in nb.worksheets:
for cell in ws.cells:
if cell.cell_type == 'code':
if 'input' in cell and isinstance(cell.input, list):
cell.input = _join_lines(cell.input)
for output in cell.outputs:
for key in _multiline_outputs:
item = output.get(key, None)
if isinstance(item, list):
output[key] = _join_lines(item)
else: # text, heading cell
for key in ['source', 'rendered']:
item = cell.get(key, None)
if isinstance(item, list):
cell[key] = _join_lines(item)
return nb
def split_lines(nb):
"""split likely multiline text into lists of strings
For file output more friendly to line-based VCS. ``rejoin_lines(nb)`` will
reverse the effects of ``split_lines(nb)``.
Used when writing JSON files.
"""
for ws in nb.worksheets:
for cell in ws.cells:
if cell.cell_type == 'code':
if 'input' in cell and isinstance(cell.input, basestring):
cell.input = cell.input.splitlines(True)
for output in cell.outputs:
for key in _multiline_outputs:
item = output.get(key, None)
if isinstance(item, basestring):
output[key] = item.splitlines(True)
else: # text, heading cell
for key in ['source', 'rendered']:
item = cell.get(key, None)
if isinstance(item, basestring):
cell[key] = item.splitlines(True)
return nb
# b64 encode/decode are never actually used, because all bytes objects in
# the notebook are already b64-encoded, and we don't need/want to double-encode
def base64_decode(nb):
"""Restore all bytes objects in the notebook from base64-encoded strings.
Note: This is never used
"""
for ws in nb.worksheets:
for cell in ws.cells:
if cell.cell_type == 'code':
for output in cell.outputs:
if 'png' in output:
if isinstance(output.png, unicode):
output.png = output.png.encode('ascii')
output.png = decodestring(output.png)
if 'jpeg' in output:
if isinstance(output.jpeg, unicode):
output.jpeg = output.jpeg.encode('ascii')
output.jpeg = decodestring(output.jpeg)
return nb
def base64_encode(nb):
"""Base64 encode all bytes objects in the notebook.
These will be b64-encoded unicode strings
Note: This is never used
"""
for ws in nb.worksheets:
for cell in ws.cells:
if cell.cell_type == 'code':
for output in cell.outputs:
if 'png' in output:
output.png = encodestring(output.png).decode('ascii')
if 'jpeg' in output:
output.jpeg = encodestring(output.jpeg).decode('ascii')
return nb
class NotebookReader(object):
"""A class for reading notebooks."""
def reads(self, s, **kwargs):
"""Read a notebook from a string."""
raise NotImplementedError("loads must be implemented in a subclass")
def read(self, fp, **kwargs):
"""Read a notebook from a file like object"""
nbs = fp.read()
if not py3compat.PY3 and not isinstance(nbs, unicode):
nbs = py3compat.str_to_unicode(nbs)
return self.reads(nbs, **kwargs)
class NotebookWriter(object):
"""A class for writing notebooks."""
def writes(self, nb, **kwargs):
"""Write a notebook to a string."""
raise NotImplementedError("loads must be implemented in a subclass")
def write(self, nb, fp, **kwargs):
"""Write a notebook to a file like object"""
nbs = self.writes(nb,**kwargs)
if not py3compat.PY3 and not isinstance(nbs, unicode):
# this branch is likely only taken for JSON on Python 2
nbs = py3compat.str_to_unicode(nbs)
return fp.write(nbs)
|
maximsch2/SublimeIPythonNotebook
|
external/nbformat/rwbase.py
|
Python
|
gpl-3.0
| 6,753
|
[
"Brian"
] |
1e39838cb52af16c42b0861ab2f5f2795501875bc8135c3d9e2d1bb12e17061a
|
""" TaskQueueDB class is a front-end to the task queues db
"""
__RCSID__ = "$Id"
import types
import random
from DIRAC import gConfig, gLogger, S_OK, S_ERROR
from DIRAC.WorkloadManagementSystem.private.SharesCorrector import SharesCorrector
from DIRAC.WorkloadManagementSystem.private.Queues import maxCPUSegments
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.Core.Utilities import List
from DIRAC.Core.Utilities.DictCache import DictCache
from DIRAC.Core.Base.DB import DB
from DIRAC.Core.Security import Properties, CS
DEFAULT_GROUP_SHARE = 1000
TQ_MIN_SHARE = 0.001
singleValueDefFields = ( 'OwnerDN', 'OwnerGroup', 'Setup', 'CPUTime' )
multiValueDefFields = ( 'Sites', 'GridCEs', 'GridMiddlewares', 'BannedSites',
'Platforms', 'PilotTypes', 'SubmitPools', 'JobTypes', 'Tags' )
multiValueMatchFields = ( 'GridCE', 'Site', 'GridMiddleware', 'Platform',
'PilotType', 'SubmitPool', 'JobType', 'Tag' )
tagMatchFields = ( 'Tag', )
bannedJobMatchFields = ( 'Site', )
strictRequireMatchFields = ( 'SubmitPool', 'Platform', 'PilotType', 'Tag' )
mandatoryMatchFields = ( 'Setup', 'CPUTime' )
priorityIgnoredFields = ( 'Sites', 'BannedSites' )
class TaskQueueDB( DB ):
def __init__( self ):
random.seed()
DB.__init__( self, 'TaskQueueDB', 'WorkloadManagement/TaskQueueDB' )
self.__maxJobsInTQ = 5000
self.__defaultCPUSegments = maxCPUSegments
self.__maxMatchRetry = 3
self.__jobPriorityBoundaries = ( 0.001, 10 )
self.__groupShares = {}
self.__deleteTQWithDelay = DictCache( self.__deleteTQIfEmpty )
self.__opsHelper = Operations()
self.__ensureInsertionIsSingle = False
self.__sharesCorrector = SharesCorrector( self.__opsHelper )
result = self.__initializeDB()
if not result[ 'OK' ]:
raise Exception( "Can't create tables: %s" % result[ 'Message' ] )
def enableAllTaskQueues( self ):
""" Enable all Task queues
"""
return self.updateFields( "tq_TaskQueues", updateDict = { "Enabled" :"1" } )
def findOrphanJobs( self ):
""" Find jobs that are not in any task queue
"""
result = self._query( "select JobID from tq_Jobs WHERE TQId not in (SELECT TQId from tq_TaskQueues)" )
if not result[ 'OK' ]:
return result
return S_OK( [ row[0] for row in result[ 'Value' ] ] )
def isSharesCorrectionEnabled( self ):
return self.__getCSOption( "EnableSharesCorrection", False )
def __getCSOption( self, optionName, defValue ):
return self.__opsHelper.getValue( "JobScheduling/%s" % optionName, defValue )
def getValidPilotTypes( self ):
return self.__getCSOption( "AllPilotTypes", [ 'private' ] )
def __initializeDB( self ):
"""
Create the tables
"""
result = self._query( "show tables" )
if not result[ 'OK' ]:
return result
tablesInDB = [ t[0] for t in result[ 'Value' ] ]
tablesToCreate = {}
self.__tablesDesc = {}
self.__tablesDesc[ 'tq_TaskQueues' ] = { 'Fields' : { 'TQId' : 'INTEGER(10) UNSIGNED AUTO_INCREMENT NOT NULL',
'OwnerDN' : 'VARCHAR(255) NOT NULL',
'OwnerGroup' : 'VARCHAR(32) NOT NULL',
'Setup' : 'VARCHAR(32) NOT NULL',
'CPUTime' : 'BIGINT(20) UNSIGNED NOT NULL',
'Priority' : 'FLOAT NOT NULL',
'Enabled' : 'TINYINT(1) NOT NULL DEFAULT 0'
},
'PrimaryKey' : 'TQId',
'Indexes': { 'TQOwner': [ 'OwnerDN', 'OwnerGroup',
'Setup', 'CPUTime' ]
}
}
self.__tablesDesc[ 'tq_Jobs' ] = { 'Fields' : { 'TQId' : 'INTEGER(10) UNSIGNED NOT NULL',
'JobId' : 'INTEGER(11) UNSIGNED NOT NULL',
'Priority' : 'INTEGER UNSIGNED NOT NULL',
'RealPriority' : 'FLOAT NOT NULL'
},
'PrimaryKey' : 'JobId',
'Indexes': { 'TaskIndex': [ 'TQId' ] },
}
for multiField in multiValueDefFields:
tableName = 'tq_TQTo%s' % multiField
self.__tablesDesc[ tableName ] = { 'Fields' : { 'TQId' : 'INTEGER UNSIGNED NOT NULL',
'Value' : 'VARCHAR(64) NOT NULL'
},
'Indexes': { 'TaskIndex': [ 'TQId' ], '%sIndex' % multiField: [ 'Value' ] },
}
for tableName in self.__tablesDesc:
if not tableName in tablesInDB:
tablesToCreate[ tableName ] = self.__tablesDesc[ tableName ]
return self._createTables( tablesToCreate )
def getGroupsInTQs( self ):
cmdSQL = "SELECT DISTINCT( OwnerGroup ) FROM `tq_TaskQueues`"
result = self._query( cmdSQL )
if not result[ 'OK' ]:
return result
return S_OK( [ row[0] for row in result[ 'Value' ] ] )
def forceRecreationOfTables( self ):
dropSQL = "DROP TABLE IF EXISTS %s" % ", ".join( self.__tablesDesc )
result = self._update( dropSQL )
if not result[ 'OK' ]:
return result
return self._createTables( self.__tablesDesc )
def __strDict( self, dDict ):
lines = []
keyLength = 0
for key in dDict:
if len( key ) > keyLength:
keyLength = len( key )
for key in sorted( dDict ):
line = "%s: " % key
line = line.ljust( keyLength + 2 )
value = dDict[ key ]
if type( value ) in ( types.ListType, types.TupleType ):
line += ','.join( list( value ) )
else:
line += str( value )
lines.append( line )
return "{\n%s\n}" % "\n".join( lines )
def fitCPUTimeToSegments( self, cpuTime ):
"""
Fit the CPU time to the valid segments
"""
maxCPUSegments = self.__getCSOption( "taskQueueCPUTimeIntervals", self.__defaultCPUSegments )
try:
maxCPUSegments = [ int( seg ) for seg in maxCPUSegments ]
#Check segments in the CS
last = 0
for cpuS in maxCPUSegments:
if cpuS <= last:
maxCPUSegments = self.__defaultCPUSegments
break
last = cpuS
except:
maxCPUSegments = self.__defaultCPUSegments
#Map to a segment
for iP in range( len( maxCPUSegments ) ):
cpuSegment = maxCPUSegments[ iP ]
if cpuTime <= cpuSegment:
return cpuSegment
return maxCPUSegments[-1]
def _checkTaskQueueDefinition( self, tqDefDict ):
"""
Check a task queue definition dict is valid
"""
# Confine the LHCbPlatform legacy option here, use Platform everywhere else
# until the LHCbPlatform is no more used in the TaskQueueDB
if 'LHCbPlatforms' in tqDefDict and not "Platforms" in tqDefDict:
tqDefDict['Platforms'] = tqDefDict['LHCbPlatforms']
if 'SystemConfigs' in tqDefDict and not "Platforms" in tqDefDict:
tqDefDict['Platforms'] = tqDefDict['SystemConfigs']
for field in singleValueDefFields:
if field not in tqDefDict:
return S_ERROR( "Missing mandatory field '%s' in task queue definition" % field )
fieldValueType = type( tqDefDict[ field ] )
if field in [ "CPUTime" ]:
if fieldValueType not in ( types.IntType, types.LongType ):
return S_ERROR( "Mandatory field %s value type is not valid: %s" % ( field, fieldValueType ) )
else:
if fieldValueType not in ( types.StringType, types.UnicodeType ):
return S_ERROR( "Mandatory field %s value type is not valid: %s" % ( field, fieldValueType ) )
result = self._escapeString( tqDefDict[ field ] )
if not result[ 'OK' ]:
return result
tqDefDict[ field ] = result[ 'Value' ]
for field in multiValueDefFields:
if field not in tqDefDict:
continue
fieldValueType = type( tqDefDict[ field ] )
if fieldValueType not in ( types.ListType, types.TupleType ):
return S_ERROR( "Multi value field %s value type is not valid: %s" % ( field, fieldValueType ) )
result = self._escapeValues( tqDefDict[ field ] )
if not result[ 'OK' ]:
return result
tqDefDict[ field ] = result[ 'Value' ]
#FIXME: This is not used
if 'PrivatePilots' in tqDefDict:
validPilotTypes = self.getValidPilotTypes()
for pilotType in tqDefDict[ 'PrivatePilots' ]:
if pilotType not in validPilotTypes:
return S_ERROR( "PilotType %s is invalid" % pilotType )
return S_OK( tqDefDict )
def _checkMatchDefinition( self, tqMatchDict ):
"""
Check a task queue match dict is valid
"""
def travelAndCheckType( value, validTypes, escapeValues = True ):
valueType = type( value )
if valueType in ( types.ListType, types.TupleType ):
for subValue in value:
subValueType = type( subValue )
if subValueType not in validTypes:
return S_ERROR( "List contained type %s is not valid -> %s" % ( subValueType, validTypes ) )
if escapeValues:
return self._escapeValues( value )
return S_OK( value )
else:
if valueType not in validTypes:
return S_ERROR( "Type %s is not valid -> %s" % ( valueType, validTypes ) )
if escapeValues:
return self._escapeString( value )
return S_OK( value )
# Confine the LHCbPlatform legacy option here, use Platform everywhere else
# until the LHCbPlatform is no more used in the TaskQueueDB
if 'LHCbPlatform' in tqMatchDict and not "Platform" in tqMatchDict:
tqMatchDict['Platform'] = tqMatchDict['LHCbPlatform']
if 'SystemConfig' in tqMatchDict and not "Platform" in tqMatchDict:
tqMatchDict['Platform'] = tqMatchDict['SystemConfig']
for field in singleValueDefFields:
if field not in tqMatchDict:
if field in mandatoryMatchFields:
return S_ERROR( "Missing mandatory field '%s' in match request definition" % field )
continue
fieldValue = tqMatchDict[ field ]
if field in [ "CPUTime" ]:
result = travelAndCheckType( fieldValue, ( types.IntType, types.LongType ), escapeValues = False )
else:
result = travelAndCheckType( fieldValue, ( types.StringType, types.UnicodeType ) )
if not result[ 'OK' ]:
return S_ERROR( "Match definition field %s failed : %s" % ( field, result[ 'Message' ] ) )
tqMatchDict[ field ] = result[ 'Value' ]
#Check multivalue
for multiField in multiValueMatchFields:
for field in ( multiField, "Banned%s" % multiField ):
if field in tqMatchDict:
fieldValue = tqMatchDict[ field ]
result = travelAndCheckType( fieldValue, ( types.StringType, types.UnicodeType ) )
if not result[ 'OK' ]:
return S_ERROR( "Match definition field %s failed : %s" % ( field, result[ 'Message' ] ) )
tqMatchDict[ field ] = result[ 'Value' ]
return S_OK( tqMatchDict )
def __createTaskQueue( self, tqDefDict, priority = 1, connObj = False ):
"""
Create a task queue
Returns S_OK( tqId ) / S_ERROR
"""
if not connObj:
result = self._getConnection()
if not result[ 'OK' ]:
return S_ERROR( "Can't create task queue: %s" % result[ 'Message' ] )
connObj = result[ 'Value' ]
tqDefDict[ 'CPUTime' ] = self.fitCPUTimeToSegments( tqDefDict[ 'CPUTime' ] )
sqlSingleFields = [ 'TQId', 'Priority' ]
sqlValues = [ "0", str( priority ) ]
for field in singleValueDefFields:
sqlSingleFields.append( field )
sqlValues.append( tqDefDict[ field ] )
#Insert the TQ Disabled
sqlSingleFields.append( "Enabled" )
sqlValues.append( "0" )
cmd = "INSERT INTO tq_TaskQueues ( %s ) VALUES ( %s )" % ( ", ".join( sqlSingleFields ), ", ".join( [ str( v ) for v in sqlValues ] ) )
result = self._update( cmd, conn = connObj )
if not result[ 'OK' ]:
self.log.error( "Can't insert TQ in DB", result[ 'Value' ] )
return result
if 'lastRowId' in result:
tqId = result['lastRowId']
else:
result = self._query( "SELECT LAST_INSERT_ID()", conn = connObj )
if not result[ 'OK' ]:
self.cleanOrphanedTaskQueues( connObj = connObj )
return S_ERROR( "Can't determine task queue id after insertion" )
tqId = result[ 'Value' ][0][0]
for field in multiValueDefFields:
if field not in tqDefDict:
continue
values = List.uniqueElements( [ value for value in tqDefDict[ field ] if value.strip() ] )
if not values:
continue
cmd = "INSERT INTO `tq_TQTo%s` ( TQId, Value ) VALUES " % field
cmd += ", ".join( [ "( %s, %s )" % ( tqId, str( value ) ) for value in values ] )
result = self._update( cmd, conn = connObj )
if not result[ 'OK' ]:
self.log.error( "Failed to insert %s condition" % field, result[ 'Message' ] )
self.cleanOrphanedTaskQueues( connObj = connObj )
return S_ERROR( "Can't insert values %s for field %s: %s" % ( str( values ), field, result[ 'Message' ] ) )
self.log.info( "Created TQ %s" % tqId )
return S_OK( tqId )
def cleanOrphanedTaskQueues( self, connObj = False ):
"""
Delete all empty task queues
"""
self.log.info( "Cleaning orphaned TQs" )
result = self._update( "DELETE FROM `tq_TaskQueues` WHERE Enabled >= 1 AND TQId not in ( SELECT DISTINCT TQId from `tq_Jobs` )", conn = connObj )
if not result[ 'OK' ]:
return result
for mvField in multiValueDefFields:
result = self._update( "DELETE FROM `tq_TQTo%s` WHERE TQId not in ( SELECT DISTINCT TQId from `tq_TaskQueues` )" % mvField,
conn = connObj )
if not result[ 'OK' ]:
return result
return S_OK()
def __setTaskQueueEnabled( self, tqId, enabled = True, connObj = False ):
if enabled:
enabled = "+ 1"
else:
enabled = "- 1"
upSQL = "UPDATE `tq_TaskQueues` SET Enabled = Enabled %s WHERE TQId=%d" % ( enabled, tqId )
result = self._update( upSQL, conn = connObj )
if not result[ 'OK' ]:
self.log.error( "Error setting TQ state", "TQ %s State %s: %s" % ( tqId, enabled, result[ 'Message' ] ) )
return result
updated = result['Value'] > 0
if updated:
self.log.info( "Set enabled = %s for TQ %s" % ( enabled, tqId ) )
return S_OK( updated )
def __hackJobPriority( self, jobPriority ):
jobPriority = min( max( int( jobPriority ), self.__jobPriorityBoundaries[0] ), self.__jobPriorityBoundaries[1] )
if jobPriority == self.__jobPriorityBoundaries[0]:
return 10 ** ( -5 )
if jobPriority == self.__jobPriorityBoundaries[1]:
return 10 ** 6
return jobPriority
def insertJob( self, jobId, tqDefDict, jobPriority, skipTQDefCheck = False, numRetries = 10 ):
"""
Insert a job in a task queue
Returns S_OK( tqId ) / S_ERROR
"""
try:
long( jobId )
except ValueError:
return S_ERROR( "JobId is not a number!" )
retVal = self._getConnection()
if not retVal[ 'OK' ]:
return S_ERROR( "Can't insert job: %s" % retVal[ 'Message' ] )
connObj = retVal[ 'Value' ]
if not skipTQDefCheck:
tqDefDict = dict( tqDefDict )
retVal = self._checkTaskQueueDefinition( tqDefDict )
if not retVal[ 'OK' ]:
self.log.error( "TQ definition check failed", retVal[ 'Message' ] )
return retVal
tqDefDict = retVal[ 'Value' ]
tqDefDict[ 'CPUTime' ] = self.fitCPUTimeToSegments( tqDefDict[ 'CPUTime' ] )
self.log.info( "Inserting job %s with requirements: %s" % ( jobId, self.__strDict( tqDefDict ) ) )
retVal = self.__findAndDisableTaskQueue( tqDefDict, skipDefinitionCheck = True, connObj = connObj )
if not retVal[ 'OK' ]:
return retVal
tqInfo = retVal[ 'Value' ]
newTQ = False
if not tqInfo[ 'found' ]:
self.log.info( "Creating a TQ for job %s" % jobId )
retVal = self.__createTaskQueue( tqDefDict, 1, connObj = connObj )
if not retVal[ 'OK' ]:
return retVal
tqId = retVal[ 'Value' ]
newTQ = True
else:
tqId = tqInfo[ 'tqId' ]
self.log.info( "Found TQ %s for job %s requirements" % ( tqId, jobId ) )
try:
result = self.__insertJobInTaskQueue( jobId, tqId, int( jobPriority ), checkTQExists = False, connObj = connObj )
if not result[ 'OK' ]:
self.log.error( "Error inserting job in TQ", "Job %s TQ %s: %s" % ( jobId, tqId, result[ 'Message' ] ) )
return result
if newTQ:
self.recalculateTQSharesForEntity( tqDefDict[ 'OwnerDN' ], tqDefDict[ 'OwnerGroup' ], connObj = connObj )
finally:
self.__setTaskQueueEnabled( tqId, True )
return S_OK()
def __insertJobInTaskQueue( self, jobId, tqId, jobPriority, checkTQExists = True, connObj = False ):
"""
Insert a job in a given task queue
"""
self.log.info( "Inserting job %s in TQ %s with priority %s" % ( jobId, tqId, jobPriority ) )
if not connObj:
result = self._getConnection()
if not result[ 'OK' ]:
return S_ERROR( "Can't insert job: %s" % result[ 'Message' ] )
connObj = result[ 'Value' ]
if checkTQExists:
result = self._query( "SELECT tqId FROM `tq_TaskQueues` WHERE TQId = %s" % tqId, conn = connObj )
if not result[ 'OK' ] or len ( result[ 'Value' ] ) == 0:
return S_OK( "Can't find task queue with id %s: %s" % ( tqId, result[ 'Message' ] ) )
hackedPriority = self.__hackJobPriority( jobPriority )
result = self._update( "INSERT INTO tq_Jobs ( TQId, JobId, Priority, RealPriority ) VALUES ( %s, %s, %s, %f ) ON DUPLICATE KEY UPDATE TQId = %s, Priority = %s, RealPriority = %f" % ( tqId, jobId, jobPriority, hackedPriority, tqId, jobPriority, hackedPriority ), conn = connObj )
if not result[ 'OK' ]:
return result
return S_OK()
def __generateTQFindSQL( self, tqDefDict, skipDefinitionCheck = False, connObj = False ):
"""
Find a task queue that has exactly the same requirements
"""
if not skipDefinitionCheck:
tqDefDict = dict( tqDefDict )
result = self._checkTaskQueueDefinition( tqDefDict )
if not result[ 'OK' ]:
return result
tqDefDict = result[ 'Value' ]
sqlCondList = []
for field in singleValueDefFields:
sqlCondList.append( "`tq_TaskQueues`.%s = %s" % ( field, tqDefDict[ field ] ) )
#MAGIC SUBQUERIES TO ENSURE STRICT MATCH
for field in multiValueDefFields:
tableName = '`tq_TQTo%s`' % field
if field in tqDefDict and tqDefDict[ field ]:
firstQuery = "SELECT COUNT(%s.Value) FROM %s WHERE %s.TQId = `tq_TaskQueues`.TQId" % ( tableName, tableName, tableName )
grouping = "GROUP BY %s.TQId" % tableName
valuesList = List.uniqueElements( [ value.strip() for value in tqDefDict[ field ] if value.strip() ] )
numValues = len( valuesList )
secondQuery = "%s AND %s.Value in (%s)" % ( firstQuery, tableName,
",".join( [ "%s" % str( value ) for value in valuesList ] ) )
sqlCondList.append( "%s = (%s %s)" % ( numValues, firstQuery, grouping ) )
sqlCondList.append( "%s = (%s %s)" % ( numValues, secondQuery, grouping ) )
else:
sqlCondList.append( "`tq_TaskQueues`.TQId not in ( SELECT DISTINCT %s.TQId from %s )" % ( tableName, tableName ) )
#END MAGIC: That was easy ;)
return S_OK( " AND ".join( sqlCondList ) )
def __findAndDisableTaskQueue( self, tqDefDict, skipDefinitionCheck = False, retries = 10, connObj = False ):
""" Disable and find TQ
"""
for _ in range( retries ):
result = self.__findSmallestTaskQueue( tqDefDict, skipDefinitionCheck = skipDefinitionCheck, connObj = connObj )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
if not data[ 'found' ]:
return result
if data[ 'enabled' ] < 1:
gLogger.notice( "TaskQueue {tqId} seems to be already disabled ({enabled})".format( **data ) )
result = self.__setTaskQueueEnabled( data[ 'tqId' ], False )
if result[ 'OK' ]:
return S_OK( data )
return S_ERROR( "Could not disable TQ" )
def __findSmallestTaskQueue( self, tqDefDict, skipDefinitionCheck = False, connObj = False ):
"""
Find a task queue that has exactly the same requirements
"""
result = self.__generateTQFindSQL( tqDefDict, skipDefinitionCheck = skipDefinitionCheck,
connObj = connObj )
if not result[ 'OK' ]:
return result
sqlCmd = "SELECT COUNT( `tq_Jobs`.JobID ), `tq_TaskQueues`.TQId, `tq_TaskQueues`.Enabled FROM `tq_TaskQueues`, `tq_Jobs`"
sqlCmd = "%s WHERE `tq_TaskQueues`.TQId = `tq_Jobs`.TQId AND %s GROUP BY `tq_Jobs`.TQId ORDER BY COUNT( `tq_Jobs`.JobID ) ASC" % ( sqlCmd, result[ 'Value' ] )
result = self._query( sqlCmd, conn = connObj )
if not result[ 'OK' ]:
return S_ERROR( "Can't find task queue: %s" % result[ 'Message' ] )
data = result[ 'Value' ]
if len( data ) == 0 or data[0][0] >= self.__maxJobsInTQ:
return S_OK( { 'found' : False } )
return S_OK( { 'found' : True, 'tqId' : data[0][1], 'enabled' : data[0][2], 'jobs' : data[0][0] } )
def matchAndGetJob( self, tqMatchDict, numJobsPerTry = 50, numQueuesPerTry = 10, negativeCond = {} ):
"""
Match a job
"""
#Make a copy to avoid modification of original if escaping needs to be done
tqMatchDict = dict( tqMatchDict )
self.log.info( "Starting match for requirements", self.__strDict( tqMatchDict ) )
retVal = self._checkMatchDefinition( tqMatchDict )
if not retVal[ 'OK' ]:
self.log.error( "TQ match request check failed", retVal[ 'Message' ] )
return retVal
retVal = self._getConnection()
if not retVal[ 'OK' ]:
return S_ERROR( "Can't connect to DB: %s" % retVal[ 'Message' ] )
connObj = retVal[ 'Value' ]
preJobSQL = "SELECT `tq_Jobs`.JobId, `tq_Jobs`.TQId FROM `tq_Jobs` WHERE `tq_Jobs`.TQId = %s AND `tq_Jobs`.Priority = %s"
prioSQL = "SELECT `tq_Jobs`.Priority FROM `tq_Jobs` WHERE `tq_Jobs`.TQId = %s ORDER BY RAND() / `tq_Jobs`.RealPriority ASC LIMIT 1"
postJobSQL = " ORDER BY `tq_Jobs`.JobId ASC LIMIT %s" % numJobsPerTry
for _ in range( self.__maxMatchRetry ):
if 'JobID' in tqMatchDict:
# A certain JobID is required by the resource, so all TQ are to be considered
retVal = self.matchAndGetTaskQueue( tqMatchDict,
numQueuesToGet = 0,
skipMatchDictDef = True,
connObj = connObj )
preJobSQL = "%s AND `tq_Jobs`.JobId = %s " % ( preJobSQL, tqMatchDict['JobID'] )
else:
retVal = self.matchAndGetTaskQueue( tqMatchDict,
numQueuesToGet = numQueuesPerTry,
skipMatchDictDef = True,
negativeCond = negativeCond,
connObj = connObj )
if not retVal[ 'OK' ]:
return retVal
tqList = retVal[ 'Value' ]
if len( tqList ) == 0:
self.log.info( "No TQ matches requirements" )
return S_OK( { 'matchFound' : False, 'tqMatch' : tqMatchDict } )
for tqId, tqOwnerDN, tqOwnerGroup in tqList:
self.log.info( "Trying to extract jobs from TQ %s" % tqId )
retVal = self._query( prioSQL % tqId, conn = connObj )
if not retVal[ 'OK' ]:
return S_ERROR( "Can't retrieve winning priority for matching job: %s" % retVal[ 'Message' ] )
if len( retVal[ 'Value' ] ) == 0:
continue
prio = retVal[ 'Value' ][0][0]
retVal = self._query( "%s %s" % ( preJobSQL % ( tqId, prio ), postJobSQL ), conn = connObj )
if not retVal[ 'OK' ]:
return S_ERROR( "Can't begin transaction for matching job: %s" % retVal[ 'Message' ] )
jobTQList = [ ( row[0], row[1] ) for row in retVal[ 'Value' ] ]
if len( jobTQList ) == 0:
gLogger.info( "Task queue %s seems to be empty, triggering a cleaning" % tqId )
self.__deleteTQWithDelay.add( tqId, 300, ( tqId, tqOwnerDN, tqOwnerGroup ) )
while len( jobTQList ) > 0:
jobId, tqId = jobTQList.pop( random.randint( 0, len( jobTQList ) - 1 ) )
self.log.info( "Trying to extract job %s from TQ %s" % ( jobId, tqId ) )
retVal = self.deleteJob( jobId, connObj = connObj )
if not retVal[ 'OK' ]:
msgFix = "Could not take job"
msgVar = " %s out from the TQ %s: %s" % ( jobId, tqId, retVal[ 'Message' ] )
self.log.error( msgFix, msgVar )
return S_ERROR( msgFix + msgVar )
if retVal[ 'Value' ] == True :
self.log.info( "Extracted job %s with prio %s from TQ %s" % ( jobId, prio, tqId ) )
return S_OK( { 'matchFound' : True, 'jobId' : jobId, 'taskQueueId' : tqId, 'tqMatch' : tqMatchDict } )
self.log.info( "No jobs could be extracted from TQ %s" % tqId )
self.log.info( "Could not find a match after %s match retries" % self.__maxMatchRetry )
return S_ERROR( "Could not find a match after %s match retries" % self.__maxMatchRetry )
def matchAndGetTaskQueue( self, tqMatchDict, numQueuesToGet = 1, skipMatchDictDef = False,
negativeCond = {}, connObj = False ):
""" Get a queue that matches the requirements
"""
#Make a copy to avoid modification of original if escaping needs to be done
tqMatchDict = dict( tqMatchDict )
if not skipMatchDictDef:
retVal = self._checkMatchDefinition( tqMatchDict )
if not retVal[ 'OK' ]:
return retVal
retVal = self.__generateTQMatchSQL( tqMatchDict, numQueuesToGet = numQueuesToGet, negativeCond = negativeCond )
if not retVal[ 'OK' ]:
return retVal
matchSQL = retVal[ 'Value' ]
retVal = self._query( matchSQL, conn = connObj )
if not retVal[ 'OK' ]:
return retVal
return S_OK( [ ( row[0], row[1], row[2] ) for row in retVal[ 'Value' ] ] )
def __generateSQLSubCond( self, sqlString, value, boolOp = 'OR' ):
if type( value ) not in ( types.ListType, types.TupleType ):
return sqlString % str( value ).strip()
sqlORList = []
for v in value:
sqlORList.append( sqlString % str( v ).strip() )
return "( %s )" % ( " %s " % boolOp ).join( sqlORList )
def __generateNotSQL( self, tableDict, negativeCond ):
""" Generate negative conditions
Can be a list of dicts or a dict:
- list of dicts will be OR of conditional dicts
- dicts will be normal conditional dict ( kay1 in ( v1, v2, ... ) AND key2 in ( v3, v4, ... ) )
"""
condType = type( negativeCond )
if condType in ( types.ListType, types.TupleType ):
sqlCond = []
for cD in negativeCond:
sqlCond.append( self.__generateNotDictSQL( tableDict, cD ) )
return " ( %s )" % " OR ".join( sqlCond )
elif condType == types.DictType:
return self.__generateNotDictSQL( tableDict, negativeCond )
raise RuntimeError( "negativeCond has to be either a list or a dict and it's %s" % condType )
def __generateNotDictSQL( self, tableDict, negativeCond ):
""" Generate the negative sql condition from a standard condition dict
not ( cond1 and cond2 ) = ( not cond1 or not cond 2 )
For instance: { 'Site': 'S1', 'JobType': [ 'T1', 'T2' ] }
( not 'S1' in Sites or ( not 'T1' in JobType and not 'T2' in JobType ) )
S2 T1 -> not False or ( not True and not False ) -> True or ... -> True -> Eligible
S1 T3 -> not True or ( not False and not False ) -> False or (True and True ) -> True -> Eligible
S1 T1 -> not True or ( not True and not False ) -> False or ( False and True ) -> False -> Nop
"""
condList = []
for field in negativeCond:
if field in multiValueMatchFields:
fullTableN = '`tq_TQTo%ss`' % field
valList = negativeCond[ field ]
if type( valList ) not in ( types.TupleType, types.ListType ):
valList = ( valList, )
subList = []
for value in valList:
value = self._escapeString( value )[ 'Value' ]
sql = "%s NOT IN ( SELECT %s.Value FROM %s WHERE %s.TQId = tq.TQId )" % ( value,
fullTableN, fullTableN, fullTableN )
subList.append( sql )
condList.append( "( %s )" % " AND ".join( subList ) )
elif field in singleValueDefFields:
for value in negativeCond[field]:
value = self._escapeString( value )[ 'Value' ]
sql = "%s != tq.%s " % ( value, field )
condList.append( sql )
return "( %s )" % " OR ".join( condList )
def __generateTablesName( self, sqlTables, field ):
fullTableName = 'tq_TQTo%ss' % field
if fullTableName not in sqlTables:
tableN = field.lower()
sqlTables[ fullTableName ] = tableN
return tableN, "`%s`" % fullTableName,
return sqlTables[ fullTableName ], "`%s`" % fullTableName
def __generateTQMatchSQL( self, tqMatchDict, numQueuesToGet = 1, negativeCond = {} ):
"""
Generate the SQL needed to match a task queue
"""
#Only enabled TQs
sqlCondList = []
sqlTables = { "tq_TaskQueues" : "tq" }
#If OwnerDN and OwnerGroup are defined only use those combinations that make sense
if 'OwnerDN' in tqMatchDict and 'OwnerGroup' in tqMatchDict:
groups = tqMatchDict[ 'OwnerGroup' ]
if type( groups ) not in ( types.ListType, types.TupleType ):
groups = [ groups ]
dns = tqMatchDict[ 'OwnerDN' ]
if type( dns ) not in ( types.ListType, types.TupleType ):
dns = [ dns ]
ownerConds = []
for group in groups:
if Properties.JOB_SHARING in CS.getPropertiesForGroup( group.replace( '"', "" ) ):
ownerConds.append( "tq.OwnerGroup = %s" % group )
else:
for dn in dns:
ownerConds.append( "( tq.OwnerDN = %s AND tq.OwnerGroup = %s )" % ( dn, group ) )
sqlCondList.append( " OR ".join( ownerConds ) )
else:
#If not both are defined, just add the ones that are defined
for field in ( 'OwnerGroup', 'OwnerDN' ):
if field in tqMatchDict:
sqlCondList.append( self.__generateSQLSubCond( "tq.%s = %%s" % field,
tqMatchDict[ field ] ) )
#Type of single value conditions
for field in ( 'CPUTime', 'Setup' ):
if field in tqMatchDict:
if field in ( 'CPUTime' ):
sqlCondList.append( self.__generateSQLSubCond( "tq.%s <= %%s" % field, tqMatchDict[ field ] ) )
else:
sqlCondList.append( self.__generateSQLSubCond( "tq.%s = %%s" % field, tqMatchDict[ field ] ) )
#Match multi value fields
for field in multiValueMatchFields:
#It has to be %ss , with an 's' at the end because the columns names
# are plural and match options are singular
if field in tqMatchDict and tqMatchDict[ field ]:
_, fullTableN = self.__generateTablesName( sqlTables, field )
sqlMultiCondList = []
# if field != 'GridCE' or 'Site' in tqMatchDict:
# Jobs for masked sites can be matched if they specified a GridCE
# Site is removed from tqMatchDict if the Site is mask. In this case we want
# that the GridCE matches explicitly so the COUNT can not be 0. In this case we skip this
# condition
sqlMultiCondList.append( "( SELECT COUNT(%s.Value) FROM %s WHERE %s.TQId = tq.TQId ) = 0" % ( fullTableN, fullTableN, fullTableN ) )
if field in tagMatchFields:
if tqMatchDict[field] != '"Any"':
csql = self.__generateTagSQLSubCond( fullTableN, tqMatchDict[field] )
else:
csql = self.__generateSQLSubCond( "%%s IN ( SELECT %s.Value FROM %s WHERE %s.TQId = tq.TQId )" % ( fullTableN, fullTableN, fullTableN ), tqMatchDict[ field ] )
sqlMultiCondList.append( csql )
sqlCondList.append( "( %s )" % " OR ".join( sqlMultiCondList ) )
#In case of Site, check it's not in job banned sites
if field in bannedJobMatchFields:
fullTableN = '`tq_TQToBanned%ss`' % field
csql = self.__generateSQLSubCond( "%%s not in ( SELECT %s.Value FROM %s WHERE %s.TQId = tq.TQId )" % ( fullTableN,
fullTableN, fullTableN ), tqMatchDict[ field ], boolOp = 'OR' )
sqlCondList.append( csql )
#Resource banning
bannedField = "Banned%s" % field
if bannedField in tqMatchDict and tqMatchDict[ bannedField ]:
fullTableN = '`tq_TQTo%ss`' % field
csql = self.__generateSQLSubCond( "%%s not in ( SELECT %s.Value FROM %s WHERE %s.TQId = tq.TQId )" % ( fullTableN,
fullTableN, fullTableN ), tqMatchDict[ bannedField ], boolOp = 'OR' )
sqlCondList.append( csql )
#For certain fields, the require is strict. If it is not in the tqMatchDict, the job cannot require it
for field in strictRequireMatchFields:
if field in tqMatchDict:
continue
fullTableN = '`tq_TQTo%ss`' % field
sqlCondList.append( "( SELECT COUNT(%s.Value) FROM %s WHERE %s.TQId = tq.TQId ) = 0" % ( fullTableN, fullTableN, fullTableN ) )
# Add extra conditions
if negativeCond:
sqlCondList.append( self.__generateNotSQL( sqlTables, negativeCond ) )
#Generate the final query string
tqSqlCmd = "SELECT tq.TQId, tq.OwnerDN, tq.OwnerGroup FROM `tq_TaskQueues` tq WHERE %s" % ( " AND ".join( sqlCondList ) )
#Apply priorities
tqSqlCmd = "%s ORDER BY RAND() / tq.Priority ASC" % tqSqlCmd
#Do we want a limit?
if numQueuesToGet:
tqSqlCmd = "%s LIMIT %s" % ( tqSqlCmd, numQueuesToGet )
return S_OK( tqSqlCmd )
def __generateTagSQLSubCond( self, tableName, tagMatchList ):
""" Generate SQL condition where ALL the specified multiValue requirements must be
present in the matching resource list
"""
sql1 = "SELECT COUNT(%s.Value) FROM %s WHERE %s.TQId=tq.TQId" % ( tableName, tableName, tableName )
if type( tagMatchList ) in [types.ListType, types.TupleType]:
sql2 = sql1 + " AND %s.Value in ( %s )" % ( tableName, ','.join( [ "%s" % v for v in tagMatchList] ) )
else:
sql2 = sql1 + " AND %s.Value=%s" % ( tableName, tagMatchList )
sql = '( '+sql1+' ) = ('+sql2+' )'
return sql
def deleteJob( self, jobId, connObj = False ):
"""
Delete a job from the task queues
Return S_OK( True/False ) / S_ERROR
"""
if not connObj:
retVal = self._getConnection()
if not retVal[ 'OK' ]:
return S_ERROR( "Can't delete job: %s" % retVal[ 'Message' ] )
connObj = retVal[ 'Value' ]
retVal = self._query( "SELECT t.TQId, t.OwnerDN, t.OwnerGroup FROM `tq_TaskQueues` t, `tq_Jobs` j WHERE j.JobId = %s AND t.TQId = j.TQId" % jobId, conn = connObj )
if not retVal[ 'OK' ]:
return S_ERROR( "Could not get job from task queue %s: %s" % ( jobId, retVal[ 'Message' ] ) )
data = retVal[ 'Value' ]
if not data:
return S_OK( False )
tqId, tqOwnerDN, tqOwnerGroup = data[0]
self.log.info( "Deleting job %s" % jobId )
retVal = self._update( "DELETE FROM `tq_Jobs` WHERE JobId = %s" % jobId, conn = connObj )
if not retVal[ 'OK' ]:
return S_ERROR( "Could not delete job from task queue %s: %s" % ( jobId, retVal[ 'Message' ] ) )
if retVal['Value'] == 0:
#No job deleted
return S_OK( False )
#Always return S_OK() because job has already been taken out from the TQ
self.__deleteTQWithDelay.add( tqId, 300, ( tqId, tqOwnerDN, tqOwnerGroup ) )
return S_OK( True )
def getTaskQueueForJob( self, jobId, connObj = False ):
"""
Return TaskQueue for a given Job
Return S_OK( [TaskQueueID] ) / S_ERROR
"""
if not connObj:
retVal = self._getConnection()
if not retVal[ 'OK' ]:
return S_ERROR( "Can't get TQ for job: %s" % retVal[ 'Message' ] )
connObj = retVal[ 'Value' ]
retVal = self._query( 'SELECT TQId FROM `tq_Jobs` WHERE JobId = %s ' % jobId, conn = connObj )
if not retVal[ 'OK' ]:
return retVal
if not retVal['Value']:
return S_ERROR( 'Not in TaskQueues' )
return S_OK( retVal['Value'][0][0] )
def getTaskQueueForJobs( self, jobIDs, connObj = False ):
"""
Return TaskQueues for a given list of Jobs
"""
if not connObj:
retVal = self._getConnection()
if not retVal[ 'OK' ]:
return S_ERROR( "Can't get TQs for a job list: %s" % retVal[ 'Message' ] )
connObj = retVal[ 'Value' ]
jobString = ','.join( [ str( x ) for x in jobIDs ] )
retVal = self._query( 'SELECT JobId,TQId FROM `tq_Jobs` WHERE JobId in (%s) ' % jobString, conn = connObj )
if not retVal[ 'OK' ]:
return retVal
if not retVal['Value']:
return S_ERROR( 'Not in TaskQueues' )
resultDict = {}
for jobID, TQID in retVal['Value']:
resultDict[int( jobID )] = int( TQID )
return S_OK( resultDict )
def __getOwnerForTaskQueue( self, tqId, connObj = False ):
retVal = self._query( "SELECT OwnerDN, OwnerGroup from `tq_TaskQueues` WHERE TQId=%s" % tqId, conn = connObj )
if not retVal[ 'OK' ]:
return retVal
data = retVal[ 'Value' ]
if len( data ) == 0:
return S_OK( False )
return S_OK( retVal[ 'Value' ][0] )
def __deleteTQIfEmpty( self, args ):
( tqId, tqOwnerDN, tqOwnerGroup ) = args
retries = 3
while retries:
retries -= 1
result = self.deleteTaskQueueIfEmpty( tqId, tqOwnerDN, tqOwnerGroup )
if result[ 'OK' ]:
return
gLogger.error( "Could not delete TQ %s: %s" % ( tqId, result[ 'Message' ] ) )
def deleteTaskQueueIfEmpty( self, tqId, tqOwnerDN = False, tqOwnerGroup = False, connObj = False ):
"""
Try to delete a task queue if its empty
"""
if not connObj:
retVal = self._getConnection()
if not retVal[ 'OK' ]:
return S_ERROR( "Can't insert job: %s" % retVal[ 'Message' ] )
connObj = retVal[ 'Value' ]
if not tqOwnerDN or not tqOwnerGroup:
retVal = self.__getOwnerForTaskQueue( tqId, connObj = connObj )
if not retVal[ 'OK' ]:
return retVal
data = retVal[ 'Value' ]
if not data:
return S_OK( False )
tqOwnerDN, tqOwnerGroup = data
sqlCmd = "DELETE FROM `tq_TaskQueues` WHERE Enabled >= 1 AND `tq_TaskQueues`.TQId = %s" % tqId
sqlCmd = "%s AND `tq_TaskQueues`.TQId not in ( SELECT DISTINCT TQId from `tq_Jobs` )" % sqlCmd
retVal = self._update( sqlCmd, conn = connObj )
if not retVal[ 'OK' ]:
return S_ERROR( "Could not delete task queue %s: %s" % ( tqId, retVal[ 'Message' ] ) )
delTQ = retVal[ 'Value' ]
if delTQ > 0:
for mvField in multiValueDefFields:
retVal = self._update( "DELETE FROM `tq_TQTo%s` WHERE TQId = %s" % ( mvField, tqId ), conn = connObj )
if not retVal[ 'OK' ]:
return retVal
self.recalculateTQSharesForEntity( tqOwnerDN, tqOwnerGroup, connObj = connObj )
self.log.info( "Deleted empty and enabled TQ %s" % tqId )
return S_OK( True )
return S_OK( False )
def deleteTaskQueue( self, tqId, tqOwnerDN = False, tqOwnerGroup = False, connObj = False ):
"""
Try to delete a task queue even if it has jobs
"""
self.log.info( "Deleting TQ %s" % tqId )
if not connObj:
retVal = self._getConnection()
if not retVal[ 'OK' ]:
return S_ERROR( "Can't insert job: %s" % retVal[ 'Message' ] )
connObj = retVal[ 'Value' ]
if not tqOwnerDN or not tqOwnerGroup:
retVal = self.__getOwnerForTaskQueue( tqId, connObj = connObj )
if not retVal[ 'OK' ]:
return retVal
data = retVal[ 'Value' ]
if not data:
return S_OK( False )
tqOwnerDN, tqOwnerGroup = data
sqlCmd = "DELETE FROM `tq_TaskQueues` WHERE `tq_TaskQueues`.TQId = %s" % tqId
retVal = self._update( sqlCmd, conn = connObj )
if not retVal[ 'OK' ]:
return S_ERROR( "Could not delete task queue %s: %s" % ( tqId, retVal[ 'Message' ] ) )
delTQ = retVal[ 'Value' ]
sqlCmd = "DELETE FROM `tq_Jobs` WHERE `tq_Jobs`.TQId = %s" % tqId
retVal = self._update( sqlCmd, conn = connObj )
if not retVal[ 'OK' ]:
return S_ERROR( "Could not delete task queue %s: %s" % ( tqId, retVal[ 'Message' ] ) )
for field in multiValueDefFields:
retVal = self._update( "DELETE FROM `tq_TQTo%s` WHERE TQId = %s" % ( field, tqId ), conn = connObj )
if not retVal[ 'OK' ]:
return retVal
if delTQ > 0:
self.recalculateTQSharesForEntity( tqOwnerDN, tqOwnerGroup, connObj = connObj )
return S_OK( True )
return S_OK( False )
def getMatchingTaskQueues( self, tqMatchDict, negativeCond = False ):
"""
rename to have the same method as exposed in the Matcher
"""
return self.retrieveTaskQueuesThatMatch( tqMatchDict, negativeCond = negativeCond )
def getNumTaskQueues( self ):
"""
Get the number of task queues in the system
"""
sqlCmd = "SELECT COUNT( TQId ) FROM `tq_TaskQueues`"
retVal = self._query( sqlCmd )
if not retVal[ 'OK' ]:
return retVal
return S_OK( retVal[ 'Value' ][0][0] )
def retrieveTaskQueuesThatMatch( self, tqMatchDict, negativeCond = False ):
"""
Get the info of the task queues that match a resource
"""
result = self.matchAndGetTaskQueue( tqMatchDict, numQueuesToGet = 0, negativeCond = negativeCond )
if not result[ 'OK' ]:
return result
return self.retrieveTaskQueues( [ tqTuple[0] for tqTuple in result[ 'Value' ] ] )
def retrieveTaskQueues( self, tqIdList = False ):
"""
Get all the task queues
"""
sqlSelectEntries = [ "`tq_TaskQueues`.TQId", "`tq_TaskQueues`.Priority", "COUNT( `tq_Jobs`.TQId )" ]
sqlGroupEntries = [ "`tq_TaskQueues`.TQId", "`tq_TaskQueues`.Priority" ]
for field in singleValueDefFields:
sqlSelectEntries.append( "`tq_TaskQueues`.%s" % field )
sqlGroupEntries.append( "`tq_TaskQueues`.%s" % field )
sqlCmd = "SELECT %s FROM `tq_TaskQueues`, `tq_Jobs`" % ", ".join( sqlSelectEntries )
sqlTQCond = ""
if tqIdList != False:
if len( tqIdList ) == 0:
return S_OK( {} )
else:
sqlTQCond += " AND `tq_TaskQueues`.TQId in ( %s )" % ", ".join( [ str( id_ ) for id_ in tqIdList ] )
sqlCmd = "%s WHERE `tq_TaskQueues`.TQId = `tq_Jobs`.TQId %s GROUP BY %s" % ( sqlCmd,
sqlTQCond,
", ".join( sqlGroupEntries ) )
retVal = self._query( sqlCmd )
if not retVal[ 'OK' ]:
return S_ERROR( "Can't retrieve task queues info: %s" % retVal[ 'Message' ] )
tqData = {}
for record in retVal[ 'Value' ]:
tqId = record[0]
tqData[ tqId ] = { 'Priority' : record[1], 'Jobs' : record[2] }
record = record[3:]
for iP in range( len( singleValueDefFields ) ):
tqData[ tqId ][ singleValueDefFields[ iP ] ] = record[ iP ]
tqNeedCleaning = False
for field in multiValueDefFields:
table = "`tq_TQTo%s`" % field
sqlCmd = "SELECT %s.TQId, %s.Value FROM %s" % ( table, table, table )
retVal = self._query( sqlCmd )
if not retVal[ 'OK' ]:
return S_ERROR( "Can't retrieve task queues field % info: %s" % ( field, retVal[ 'Message' ] ) )
for record in retVal[ 'Value' ]:
tqId = record[0]
value = record[1]
if not tqId in tqData:
if tqIdList == False or tqId in tqIdList:
self.log.warn( "Task Queue %s is defined in field %s but does not exist, triggering a cleaning" % ( tqId, field ) )
tqNeedCleaning = True
else:
if field not in tqData[ tqId ]:
tqData[ tqId ][ field ] = []
tqData[ tqId ][ field ].append( value )
if tqNeedCleaning:
self.cleanOrphanedTaskQueues()
return S_OK( tqData )
def __updateGlobalShares( self ):
"""
Update internal structure for shares
"""
#Update group shares
self.__groupShares = self.getGroupShares()
#Apply corrections if enabled
if self.isSharesCorrectionEnabled():
result = self.getGroupsInTQs()
if not result[ 'OK' ]:
self.log.error( "Could not get groups in the TQs", result[ 'Message' ] )
activeGroups = result[ 'Value' ]
newShares = {}
for group in activeGroups:
if group in self.__groupShares:
newShares[ group ] = self.__groupShares[ group ]
newShares = self.__sharesCorrector.correctShares( newShares )
for group in self.__groupShares:
if group in newShares:
self.__groupShares[ group ] = newShares[ group ]
def recalculateTQSharesForAll( self ):
"""
Recalculate all priorities for TQ's
"""
if self.isSharesCorrectionEnabled():
self.log.info( "Updating correctors state" )
self.__sharesCorrector.update()
self.__updateGlobalShares()
self.log.info( "Recalculating shares for all TQs" )
retVal = self._getConnection()
if not retVal[ 'OK' ]:
return S_ERROR( "Can't insert job: %s" % retVal[ 'Message' ] )
result = self._query( "SELECT DISTINCT( OwnerGroup ) FROM `tq_TaskQueues`" )
if not result[ 'OK' ]:
return result
for group in [ r[0] for r in result[ 'Value' ] ]:
self.recalculateTQSharesForEntity( "all", group )
return S_OK()
def recalculateTQSharesForEntity( self, userDN, userGroup, connObj = False ):
"""
Recalculate the shares for a userDN/userGroup combo
"""
self.log.info( "Recalculating shares for %s@%s TQs" % ( userDN, userGroup ) )
if userGroup in self.__groupShares:
share = self.__groupShares[ userGroup ]
else:
share = float( DEFAULT_GROUP_SHARE )
if Properties.JOB_SHARING in CS.getPropertiesForGroup( userGroup ):
#If group has JobSharing just set prio for that entry, userDN is irrelevant
return self.__setPrioritiesForEntity( userDN, userGroup, share, connObj = connObj )
selSQL = "SELECT OwnerDN, COUNT(OwnerDN) FROM `tq_TaskQueues` WHERE OwnerGroup='%s' GROUP BY OwnerDN" % ( userGroup )
result = self._query( selSQL, conn = connObj )
if not result[ 'OK' ]:
return result
#Get owners in this group and the amount of times they appear
data = [ ( r[0], r[1] ) for r in result[ 'Value' ] if r ]
numOwners = len( data )
#If there are no owners do now
if numOwners == 0:
return S_OK()
#Split the share amongst the number of owners
share /= numOwners
entitiesShares = dict( [ ( row[0], share ) for row in data ] )
#If corrector is enabled let it work it's magic
if self.isSharesCorrectionEnabled():
entitiesShares = self.__sharesCorrector.correctShares( entitiesShares, group = userGroup )
#Keep updating
owners = dict( data )
#IF the user is already known and has more than 1 tq, the rest of the users don't need to be modified
#(The number of owners didn't change)
if userDN in owners and owners[ userDN ] > 1:
return self.__setPrioritiesForEntity( userDN, userGroup, entitiesShares[ userDN ], connObj = connObj )
#Oops the number of owners may have changed so we recalculate the prio for all owners in the group
for userDN in owners:
self.__setPrioritiesForEntity( userDN, userGroup, entitiesShares[ userDN ], connObj = connObj )
return S_OK()
def __setPrioritiesForEntity( self, userDN, userGroup, share, connObj = False, consolidationFunc = "AVG" ):
"""
Set the priority for a userDN/userGroup combo given a splitted share
"""
self.log.info( "Setting priorities to %s@%s TQs" % ( userDN, userGroup ) )
tqCond = [ "t.OwnerGroup='%s'" % userGroup ]
allowBgTQs = gConfig.getValue( "/Registry/Groups/%s/AllowBackgroundTQs" % userGroup, False )
if Properties.JOB_SHARING not in CS.getPropertiesForGroup( userGroup ):
tqCond.append( "t.OwnerDN='%s'" % userDN )
tqCond.append( "t.TQId = j.TQId" )
if consolidationFunc == 'AVG':
selectSQL = "SELECT j.TQId, SUM( j.RealPriority )/COUNT(j.RealPriority) FROM `tq_TaskQueues` t, `tq_Jobs` j WHERE "
elif consolidationFunc == 'SUM':
selectSQL = "SELECT j.TQId, SUM( j.RealPriority ) FROM `tq_TaskQueues` t, `tq_Jobs` j WHERE "
else:
return S_ERROR( "Unknown consolidation func %s for setting priorities" % consolidationFunc )
selectSQL += " AND ".join( tqCond )
selectSQL += " GROUP BY t.TQId"
result = self._query( selectSQL, conn = connObj )
if not result[ 'OK' ]:
return result
tqDict = dict( result[ 'Value' ] )
if len( tqDict ) == 0:
return S_OK()
#Calculate Sum of priorities
totalPrio = 0
for k in tqDict:
if tqDict[k] > 0.1 or not allowBgTQs:
totalPrio += tqDict[ k ]
#Update prio for each TQ
for tqId in tqDict:
if tqDict[ tqId ] > 0.1 or not allowBgTQs:
prio = ( share / totalPrio ) * tqDict[ tqId ]
else:
prio = TQ_MIN_SHARE
prio = max( prio, TQ_MIN_SHARE )
tqDict[ tqId ] = prio
#Generate groups of TQs that will have the same prio=sum(prios) maomenos
result = self.retrieveTaskQueues( list( tqDict ) )
if not result[ 'OK' ]:
return result
allTQsData = result[ 'Value' ]
tqGroups = {}
for tqid in allTQsData:
tqData = allTQsData[ tqid ]
for field in ( 'Jobs', 'Priority' ) + priorityIgnoredFields:
if field in tqData:
tqData.pop( field )
tqHash = []
for f in sorted( tqData ):
tqHash.append( "%s:%s" % ( f, tqData[ f ] ) )
tqHash = "|".join( tqHash )
if tqHash not in tqGroups:
tqGroups[ tqHash ] = []
tqGroups[ tqHash ].append( tqid )
tqGroups = [ tqGroups[ td ] for td in tqGroups ]
#Do the grouping
for tqGroup in tqGroups:
totalPrio = 0
if len( tqGroup ) < 2:
continue
for tqid in tqGroup:
totalPrio += tqDict[ tqid ]
for tqid in tqGroup:
tqDict[ tqid ] = totalPrio
#Group by priorities
prioDict = {}
for tqId in tqDict:
prio = tqDict[ tqId ]
if prio not in prioDict:
prioDict[ prio ] = []
prioDict[ prio ].append( tqId )
#Execute updates
for prio in prioDict:
tqList = ", ".join( [ str( tqId ) for tqId in prioDict[ prio ] ] )
updateSQL = "UPDATE `tq_TaskQueues` SET Priority=%.4f WHERE TQId in ( %s )" % ( prio, tqList )
self._update( updateSQL, conn = connObj )
return S_OK()
def getGroupShares( self ):
"""
Get all the shares as a DICT
"""
result = gConfig.getSections( "/Registry/Groups" )
if result[ 'OK' ]:
groups = result[ 'Value' ]
else:
groups = []
shares = {}
for group in groups:
shares[ group ] = gConfig.getValue( "/Registry/Groups/%s/JobShare" % group, DEFAULT_GROUP_SHARE )
return shares
|
marcelovilaca/DIRAC
|
WorkloadManagementSystem/DB/TaskQueueDB.py
|
Python
|
gpl-3.0
| 51,888
|
[
"DIRAC"
] |
976429c6ec00ecff9e67e622c9328760278fb403d550836d682fbe6f93a1329e
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import Tix
import tkFileDialog, tkMessageBox
import subprocess
from datetime import date
from fpdf import FPDF
from utils import settings
def main(_):
'''Currently aimed at producing product testing results for ASE
products but could change it to work for any product in the
future.
'''
# Create new external window.
if not _.getExtWin(_, title=u"Production Analysis Report"):
return
options = 'selectColor gold'
company_w = Tix.Select(_.extwin, label=u'公司',
radio=True, orientation='vertical', options=options)
# company_w.config(selectcolor='gold')
company_w.add(u'台茂化工儀器原料行', text=u'台茂化工儀器原料行',
background='purple')
company_w.invoke(u'台茂化工儀器原料行')
company_w.add(u'富茂工業原料行', text=u'富茂工業原料行',
background='purple')
company_w.add(u'永茂企業行', text=u'永茂企業行',
background='purple')
company_w.grid(row=0, column=0, columnspan=3, sticky='ew')
pname_SV = Tix.StringVar()
# pname_SV.trace('w', lambda: autofill())
product_w = Tix.ComboBox(_.extwin, label=u'品名', dropdown=True,
editable=True,
variable=pname_SV, command=lambda *args: autofill())
product_w.subwidget('entry').config(disabledforeground='black')
# product_w.entry.configure(textvariable=pname_SV)
ASE = _.dbm.get_cogroup(u'ASE')
MPN_list = []
for ea in ASE.products:
product_w.insert('end', ea.name)
MPN_list.append(ea.MPN)
product_w.grid(row=1, column=0, columnspan=3, sticky='ew')
# ASE_w = Tix.LabelEntry(_.extwin, label=u'料號')
# ASE_w.grid(row=2, column=0, sticky='ew')
lot_w = Tix.LabelEntry(_.extwin, label=u'批號')
lot_w.grid(row=3, column=0, columnspan=3, sticky='ew')
qty_w = Tix.LabelEntry(_.extwin, label=u'數量')
qty_w.grid(row=4, column=0, columnspan=3, sticky='ew')
tester_w = Tix.LabelEntry(_.extwin, label=u'取樣人員')
tester_w.grid(row=5, column=0, columnspan=3, sticky='ew')
gridlabels = [u'檢 驗 項 目', u'規 格', u'檢 驗 結 果']
for i in range(3):
Tix.Label(_.extwin, text=gridlabels[i]).grid(row=6, column=i)
egrid = []
for i in range(8):
egrid.append([])
for j in range(3):
egrid[i].append(Tix.Entry(_.extwin, justify='center'))
egrid[i][j].grid(row=i+10, column=j)
# Restore previous entries for a particular product
def autofill():
# Retrieve product record.
MPN = MPN_list[product_w.slistbox.listbox.index('active')]
p_rec = _.dbm.get_product(MPN)
_dict = p_rec.json()
if not _dict:
return
if _dict.get('amount'):
qty_w.entry.delete(0, 'end')
qty_w.entry.insert(0, _dict['amount'])
if _dict.get('tester'):
tester_w.entry.delete(0, 'end')
tester_w.entry.insert(0, _dict['tester'])
if _dict.get('lot_no'):
lot_w.entry.delete(0, 'end')
lot_w.entry.insert(0, _dict['lot_no'])
if _dict.get('test_params'):
tp = _dict['test_params']
for i in range(len(egrid)):
for j in range(len(egrid[0])):
egrid[i][j].delete(0, 'end')
egrid[i][j].insert(0, tp[i][j])
submit_w = Tix.Button(_.extwin, text=u'提交')
submit_w['command'] = lambda: submit()
submit_w.grid(row=50, column=0, columnspan=3)
def submit():
# Convert matrix of entry widgets into matrix of values.
for i in range(8):
for j in range(3):
egrid[i][j] = egrid[i][j].get()
# Retrieve product record.
MPN = MPN_list[product_w.slistbox.listbox.index('active')]
p_rec = _.dbm.get_product(MPN)
_.dbm.session.commit()
# Create dictioinary of values to pass to pdf writing method.
_dict = dict(
company=company_w['value'],
product=product_w['selection'],
ASE_pn=p_rec.ASE_PN,
lot_no=lot_w.entry.get(),
amount=qty_w.entry.get(),
tester=tester_w.entry.get(),
test_params=egrid,
)
create_qc_pdf(**_dict)
# Save options as JSON in product database record.
del _dict['company']
del _dict['product']
del _dict['ASE_pn']
# Update previous json
p_rec.json(_dict)
_.dbm.session.commit()
_.extwin.destroy()
font = r'C:\Windows\Fonts\simfang.ttf'
#font = r'C:\Windows\Fonts\simkai.ttf'
#font = r'C:\Windows\Fonts\simhei.ttf'
font = r'C:\Windows\Fonts\kaiu.ttf' # Shows superscript 3 but not 名
#font = r'C:\Windows\Fonts\fireflysung.ttf' # Shows superscript 3 but not 名
class myPDF(FPDF):
form_number = u'FM0716A'
# Left and right margin
lm = 30
rm = 178
def header(self):
lm = self.lm
rm = self.rm
mw = rm-lm
C = 'C'
try:
if True:
# Company logo left-top corner and smaller
self.image(u'png/logo.png', x=12, y=10, w=34)
else:
# Company logo centered and the top
self.image(u'png/logo.png', x=84, y=8, w=40)
except IOError as e:
print e
try:
self.image(u'png/signature1.png', x=48, y=240, w=24)
except IOError as e:
print e
self.add_font(family=u'SimHei', style='B', fname=font, uni=True) # Only .ttf and not .ttc
self.set_font(family=u'SimHei', style='B', size=16)
# self.set_xy(lm, 25)
# self.cell(mw, 10, u'台茂化工儀器原料行', align=C)
# Client name
self.set_font(u'SimHei', 'B', 16)
self.set_xy(lm, 37)
self.cell(mw, 8, u'產品檢驗報告', align=C)
# Fill in headers
self.set_font(u'SimHei', 'B', 13)
self.set_fill_color(240,240,240)
self.set_xy(30, 95) # Next cell auto-set to right
self.cell(48, 10, txt=u'檢 驗 項 目', align=C, fill=True)
self.cell(50, 10, txt=u'規 格', align=C, fill=True)
self.cell(50, 10, txt=u'檢 驗 結 果', align=C, fill=True)
self.set_xy(30, 210)
self.cell(66-lm, 15, txt=u'結果研判:', align=C)
self.cell(rm-66, 15, txt=u'符合規格', align=C)
self.set_xy(30, 225)
self.cell(104-lm, 10, txt=u'製表', align=C)
self.cell(rm-104, 10, txt=u'檢驗人員', align=C)
# Draw lines last, otherwise cell fill will overwrite.
# Top table borders
self.rect(lm, 50, mw, 40) # x, y, w, h
for ea in [65,73,81]:
self.line(lm, ea, rm, ea) # x1, y1, x2, y2
self.line(104, 65, 104, 90)
# Middle table borders
self.rect(lm, 95, mw, 205-95) # x, y, w, h
for ea in range(105, 205, 10):
self.line(lm, ea, rm, ea) # x1, y1, x2, y2
self.line(78, 95, 78, 205)
self.line(128, 95, 128, 205)
# Bottom table borders
self.rect(lm, 210, mw, 50) # x, y, w, h
for ea in [225,235]:
self.line(lm, ea, rm, ea) # x1, y1, x2, y2
self.line(66, 210, 66, 225)
self.line(104, 225, 104, 260)
def xycell(self, x, y, *args, **kwargs):
# Method to both set starting position and write cell in one command.
# Preceed 'cell' parameters with x and y position parameters.
self.set_xy(x, y)
self.cell(*args, **kwargs)
def footer(self):
self.set_font(family=u'SimHei', style='B', size=12)
# self.set_xy(155, -32)
# self.cell(21, 5, txt=self.form_number, align='C')
self.xycell(155, -32, 21, 5, txt=self.form_number, align='C')
headers = ['company',
'product',
'ASE_pn',
'lot_no',
'exp_period',
'amount',
'tester',
'test_params']
def create_qc_pdf(**kwargs):
try:
kwargs['company'] = kwargs.get('company', u'台茂化工儀器原料行')
kwargs['product'] = kwargs.get('product', u'product name?')
kwargs['ASE_pn'] = kwargs.get('ASE_pn', u'ASE PN?')
if not kwargs.get('lot_no'):
kwargs['make_date'] = date.today()
kwargs['test_date'] = date.today()
kwargs['lot_no'] = u'lot number?'
else:
year = 2000 + int(kwargs['lot_no'][1:3])
month = int(kwargs['lot_no'][3:5])
day = int(kwargs['lot_no'][5:7])
kwargs['make_date'] = date(year, month, day)
kwargs['test_date'] = date(year, month, day)
kwargs['exp_period'] = kwargs.get('exp_period', u'一年')
kwargs['amount'] = kwargs.get('amount', u'amount?')
kwargs['tester'] = kwargs.get('tester', u'tester?')
kwargs['test_params'] = kwargs.get('test_params', [])
except Exception as e:
print e
return
# Set placement and style of values
tm_branch = dict(x=30, y=25, w=178-30, h=10, align='C')
product_name = dict(x=31, y=50, w=104-31, h=15, align='L')
product_ASE_pn = dict(x=105, y=50, w=104-31, h=15, align='L')
make_date = dict(x=31, y=65, w=104-31, h=8, align='L')
test_date = dict(x=31, y=73, w=104-31, h=8, align='L')
exp_period = dict(x=31, y=81, w=104-31, h=9, align='L')
lot_no = dict(x=105, y=65, w=104-31, h=8, align='L')
amount = dict(x=105, y=73, w=104-31, h=8, align='L')
tester = dict(x=105, y=81, w=104-31, h=9, align='L')
# Create PDF
FPDF = myPDF('P','mm','A4')
FPDF.set_compression(False)
FPDF.set_creator('TM_2014')
FPDF.set_title(u'Quality inspection report for lot# {}'.format(kwargs['lot_no']))
FPDF.set_author(u'Taimau Chemicals')
FPDF.set_subject(kwargs['lot_no'])
# FPDF.set_subject(u'{} {}'.format(kwargs['product'], kwargs['lot_no']), isUTF8=True)
FPDF.alias_nb_pages()
FPDF.add_page() # Adding a page also creates a page break from last page
FPDF.add_font(family=u'SimHei', style='', fname=font, uni=True) # Only .ttf and not .ttc
FPDF.set_font(family=u'SimHei', style='', size=16)
FPDF.xycell(txt=kwargs['company'], **tm_branch)
FPDF.set_font(family=u'SimHei', style='B', size=13)
FPDF.xycell(txt=u'產品: {}'.format(kwargs['product']), **product_name)
FPDF.xycell(txt=u'料號: {}'.format(kwargs['ASE_pn']), **product_ASE_pn)
FPDF.xycell(txt=u'製造日期: {}'.format(kwargs['make_date']), **make_date)
FPDF.xycell(txt=u'檢驗日期: {}'.format(kwargs['test_date']), **test_date)
FPDF.xycell(txt=u'保存期間: {}'.format(kwargs['exp_period']), **exp_period)
FPDF.xycell(txt=u'批號: {}'.format(kwargs['lot_no']), **lot_no)
FPDF.xycell(txt=u'生產數量: {}'.format(kwargs['amount']), **amount)
FPDF.xycell(txt=u'取樣人員: {}'.format(kwargs['tester']), **tester)
FPDF.set_left_margin(30)
FPDF.set_xy(x=30, y=105)
for (a, b, c) in kwargs['test_params']:
if a+b+c == u'':
break
FPDF.cell(49, 10, txt=a, align='C')
FPDF.cell(49, 10, txt=b, align='C')
FPDF.cell(49, 10, txt=c, align='C')
FPDF.ln()
FPDF.cell(49)
FPDF.cell(49, 10, txt=u'以下空白', align='C')
initialfilename = u'QC_{}_{}'.format(kwargs['product'], kwargs['lot_no'])
FILE_OPTS = dict(
title = u'PDF name and location.',
defaultextension = '.pdf',
initialdir = os.path.expanduser('~') + '/Desktop/',
initialfile = initialfilename,
)
if settings.load().get(u'pdfpath'):
FILE_OPTS['initialdir'] = settings.load()[u'pdfpath']
outfile = os.path.normpath(tkFileDialog.asksaveasfilename(**FILE_OPTS))
if os.path.exists(outfile):
os.remove(outfile)
if outfile and not os.path.exists(outfile):
FPDF.output(name=outfile)
try:
subprocess.call(['start', outfile],
shell=True)
return
except:
pass
try:
print u'Trying alternate subprocess command.'
subprocess.call(['start', '/D'] +
list(os.path.split(outfile)),
shell=True)
return
except UnicodeEncodeError:
pass
try:
os.startfile(outfile)
return
except:
pass
print u'Failed to autoload PDF after creation.'
return
else:
head = u'Cancelled'
body = u'Canceled PDF creation.'
tkMessageBox.showinfo(head, body)
#if __name__ == '__main__':
# create_qc_pdf(product=u'Nitrogen 60%',
# company=u'台茂化工儀器原料行',
# ASE_pn=u'2013-001-00816-000',)
|
Ripley6811/TAIMAU
|
src/pdf_tools/product_QC_report.py
|
Python
|
gpl-2.0
| 12,995
|
[
"ASE"
] |
dcd3ef9579442273fa50f263072c9b94d46bf714a5cc3de8786800a3109bb3fe
|
##
# Copyright 2013-2020 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing PSI, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
@author: Ward Poelmans (Ghent University)
"""
from distutils.version import LooseVersion
import glob
import os
import shutil
import tempfile
import easybuild.tools.environment as env
from easybuild.easyblocks.generic.cmakemake import CMakeMake
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.framework.easyconfig import BUILD
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.modules import get_software_root
from easybuild.tools.run import run_cmd
class EB_PSI(CMakeMake):
"""
Support for building and installing PSI
"""
def __init__(self, *args, **kwargs):
"""Initialize class variables custom to PSI."""
super(EB_PSI, self).__init__(*args, **kwargs)
self.psi_srcdir = None
self.install_psi_objdir = None
self.install_psi_srcdir = None
@staticmethod
def extra_options():
"""Extra easyconfig parameters specific to PSI."""
extra_vars = CMakeMake.extra_options()
extra_vars.update({
# always include running PSI unit tests (takes about 2h or less)
'runtest': ["tests TESTFLAGS='-u -q'", "Run tests included with PSI, without interruption.", BUILD],
})
# Doesn't work with out-of-source build
extra_vars['separate_build_dir'][0] = False
return extra_vars
def configure_step(self):
"""
Configure build outside of source directory.
"""
try:
objdir = os.path.join(self.builddir, 'obj')
os.makedirs(objdir)
os.chdir(objdir)
except OSError as err:
raise EasyBuildError("Failed to prepare for configuration of PSI build: %s", err)
env.setvar('F77FLAGS', os.getenv('F90FLAGS'))
# In order to create new plugins with PSI, it needs to know the location of the source
# and the obj dir after install. These env vars give that information to the configure script.
self.psi_srcdir = os.path.basename(self.cfg['start_dir'].rstrip(os.sep))
self.install_psi_objdir = os.path.join(self.installdir, 'obj')
self.install_psi_srcdir = os.path.join(self.installdir, self.psi_srcdir)
env.setvar('PSI_OBJ_INSTALL_DIR', self.install_psi_objdir)
env.setvar('PSI_SRC_INSTALL_DIR', self.install_psi_srcdir)
# explicitely specify Python binary to use
pythonroot = get_software_root('Python')
if not pythonroot:
raise EasyBuildError("Python module not loaded.")
# pre 4.0b5, they were using autotools, on newer it's CMake
if LooseVersion(self.version) <= LooseVersion("4.0b5") and self.name == "PSI":
# Use EB Boost
boostroot = get_software_root('Boost')
if not boostroot:
raise EasyBuildError("Boost module not loaded.")
self.log.info("Using configure based build")
env.setvar('PYTHON', os.path.join(pythonroot, 'bin', 'python'))
env.setvar('USE_SYSTEM_BOOST', 'TRUE')
if self.toolchain.options.get('usempi', None):
# PSI doesn't require a Fortran compiler itself, but may require it to link to BLAS/LAPACK correctly
# we should always specify the sequential Fortran compiler,
# to avoid problems with -lmpi vs -lmpi_mt during linking
fcompvar = 'F77_SEQ'
else:
fcompvar = 'F77'
# update configure options
# using multi-threaded BLAS/LAPACK is important for performance,
# cfr. http://sirius.chem.vt.edu/psi4manual/latest/installfile.html#sec-install-iii
opt_vars = [
('cc', 'CC'),
('cxx', 'CXX'),
('fc', fcompvar),
('libdirs', 'LDFLAGS'),
('blas', 'LIBBLAS_MT'),
('lapack', 'LIBLAPACK_MT'),
]
for (opt, var) in opt_vars:
self.cfg.update('configopts', "--with-%s='%s'" % (opt, os.getenv(var)))
# -DMPICH_IGNORE_CXX_SEEK dances around problem with order of stdio.h and mpi.h headers
# both define SEEK_SET, this makes the one for MPI be ignored
self.cfg.update('configopts', "--with-opt='%s -DMPICH_IGNORE_CXX_SEEK'" % os.getenv('CFLAGS'))
# specify location of Boost
self.cfg.update('configopts', "--with-boost=%s" % boostroot)
# enable support for plugins
self.cfg.update('configopts', "--with-plugins")
ConfigureMake.configure_step(self, cmd_prefix=self.cfg['start_dir'])
else:
self.log.info("Using CMake based build")
self.cfg.update('configopts', ' -DPYTHON_INTERPRETER=%s' % os.path.join(pythonroot, 'bin', 'python'))
if self.name == 'PSI4' and LooseVersion(self.version) >= LooseVersion("1.2"):
self.log.info("Remove the CMAKE_BUILD_TYPE test in PSI4 source and the downloaded dependencies!")
self.log.info("Use PATCH_COMMAND in the corresponding CMakeLists.txt")
self.cfg['build_type'] = 'EasyBuildRelease'
if self.toolchain.options.get('usempi', None):
self.cfg.update('configopts', " -DENABLE_MPI=ON")
if get_software_root('imkl'):
self.cfg.update('configopts', " -DENABLE_CSR=ON -DBLAS_TYPE=MKL")
if self.name == 'PSI4':
pcmsolverroot = get_software_root('PCMSolver')
if pcmsolverroot:
self.cfg.update('configopts', " -DENABLE_PCMSOLVER=ON")
if LooseVersion(self.version) < LooseVersion("1.2"):
self.cfg.update('configopts', " -DPCMSOLVER_ROOT=%s" % pcmsolverroot)
else:
self.cfg.update('configopts', " -DCMAKE_INSIST_FIND_PACKAGE_PCMSolver=ON "
"-DPCMSolver_DIR=%s/share/cmake/PCMSolver" % pcmsolverroot)
chempsroot = get_software_root('CheMPS2')
if chempsroot:
self.cfg.update('configopts', " -DENABLE_CHEMPS2=ON")
if LooseVersion(self.version) < LooseVersion("1.2"):
self.cfg.update('configopts', " -DCHEMPS2_ROOT=%s" % chempsroot)
else:
self.cfg.update('configopts', " -DCMAKE_INSIST_FIND_PACKAGE_CheMPS2=ON "
"-DCheMPS2_DIR=%s/share/cmake/CheMPS2" % chempsroot)
# Be aware, PSI4 wants exact versions of the following deps! built with CMake!!
# If you want to use non-CMake build versions, the you have to provide the
# corresponding Find<library-name>.cmake scripts
# In PSI4 version 1.2.1, you can check the corresponding CMakeLists.txt file
# in external/upstream/<library-name>/
if LooseVersion(self.version) >= LooseVersion("1.2"):
for dep in ['libxc', 'Libint', 'pybind11', 'gau2grid']:
deproot = get_software_root(dep)
if deproot:
self.cfg.update('configopts', " -DCMAKE_INSIST_FIND_PACKAGE_%s=ON" % dep)
dep_dir = os.path.join(deproot, 'share', 'cmake', dep)
self.cfg.update('configopts', " -D%s_DIR=%s " % (dep, dep_dir))
CMakeMake.configure_step(self, srcdir=self.cfg['start_dir'])
def install_step(self):
"""Custom install procedure for PSI."""
super(EB_PSI, self).install_step()
# the obj and unpacked sources must remain available for working with plugins
try:
for subdir in ['obj', self.psi_srcdir]:
# copy symlinks as symlinks to work around broken symlinks
shutil.copytree(os.path.join(self.builddir, subdir), os.path.join(self.installdir, subdir),
symlinks=True)
except OSError as err:
raise EasyBuildError("Failed to copy obj and unpacked sources to install dir: %s", err)
def test_step(self):
"""
Run the testsuite of PSI4
"""
testdir = tempfile.mkdtemp()
env.setvar('PSI_SCRATCH', testdir)
if self.name == 'PSI4' and LooseVersion(self.version) >= LooseVersion("1.2"):
if self.cfg['runtest']:
paracmd = ''
# Run ctest parallel, but limit to maximum 4 jobs (in case of slow disks)
if self.cfg['parallel']:
if self.cfg['parallel'] > 4:
paracmd = '-j 4'
else:
paracmd = "-j %s" % self.cfg['parallel']
cmd = "ctest %s %s" % (paracmd, self.cfg['runtest'])
run_cmd(cmd, log_all=True, simple=False)
else:
super(EB_PSI, self).test_step()
try:
shutil.rmtree(testdir)
except OSError as err:
raise EasyBuildError("Failed to remove test directory %s: %s", testdir, err)
def sanity_check_step(self):
"""Custom sanity check for PSI."""
custom_paths = {
'files': ['bin/psi4'],
'dirs': ['include', ('share/psi', 'share/psi4')],
}
super(EB_PSI, self).sanity_check_step(custom_paths=custom_paths)
def make_module_extra(self):
"""Custom variables for PSI module."""
txt = super(EB_PSI, self).make_module_extra()
share_dir = os.path.join(self.installdir, 'share')
if os.path.exists(share_dir):
psi4datadir = glob.glob(os.path.join(share_dir, 'psi*'))
if len(psi4datadir) == 1:
txt += self.module_generator.set_environment('PSI4DATADIR', psi4datadir[0])
else:
raise EasyBuildError("Failed to find exactly one PSI4 data dir: %s", psi4datadir)
return txt
|
pescobar/easybuild-easyblocks
|
easybuild/easyblocks/p/psi.py
|
Python
|
gpl-2.0
| 11,207
|
[
"Psi4"
] |
b45dea1850dc551137dd3b4580887c121668765844ac1c0fc929dacf4fcdd21e
|
import os
import sys
import argparse
import time
import glob
from farmpy import lsf
from farm_blast import blast, utils
class Error (Exception): pass
parser = argparse.ArgumentParser(
description = 'Run BLAST in parallel on the farm',
usage = '%(prog)s [options] <reference> <query>')
parser.add_argument('--no_bsub', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('--fix_coords_in_blast_output', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('--split_bases_tolerance', type=int, default=1000, help=argparse.SUPPRESS)
parser.add_argument('--test', action='store_true', help=argparse.SUPPRESS)
common_blast_group = parser.add_argument_group('Common BLAST options')
common_blast_group.add_argument('--blastall', action='store_true', help='Use blastall instead of the default blast+')
common_blast_group.add_argument('-p', '--blast_type', help='Type of blast to run [%(default)s]', choices=sorted(list(blast.ref_not_protein_types)) + sorted(list(blast.ref_protein_types)), default='blastn')
common_blast_group.add_argument('-e', '--evalue', help='Set the evalue cutoff')
common_blast_group.add_argument('-W', '--word_size', help='Set the word size')
common_blast_group.add_argument('--no_filter', action='store_true', help='Do not filter query sequence (equivalent to -F F in blastall, -dust no in blast+). By default, the query will be filtered')
bsub_group = parser.add_argument_group('Bsub options')
bsub_group.add_argument('-q', '--bsub_queue', help='Queue in which all jobs are run [%(default)s]', default = 'normal', metavar='Queue_name')
bsub_group.add_argument('--blast_mem', type=float, help='Memory limit in GB for the farm jobs that run BLAST. Default is 0.5, except set to 5 if blastall tblastx is used. Defaults doubled if --no_filter used', metavar='FLOAT', default=None)
bsub_group.add_argument('--bsub_name_prefix', help='Set the prefix of the names of the bsub jobs', default=None)
advanced_opts_group = parser.add_argument_group('Advanced options')
advanced_opts_group.add_argument('--act', action='store_true', help='Make ACT-friendly blast file, by concatenating all reference sequences together and all query sequences together before blasting.')
advanced_opts_group.add_argument('--blast_options', help='Put any extra options to the blast call (i.e. blastall, blastn, blastx ...etc) in quotes. e.g. --blast_options "-r 2". Whatever you put in here is NOT sanity checked.', default = '', metavar='"options in quotes"')
advanced_opts_group.add_argument('--debug', action='store_true', help='Just make scripts etc but do not run anything')
advanced_opts_group.add_argument('--outdir', help='Name of output directory (must not exist already)', metavar='output directory', default=None)
advanced_opts_group.add_argument('--split_bases', type=int, help='Number of bases in each split file of query. Default is 500000, except set to 200000 if blastall tblastx is used', metavar='INT', default=None)
parser.add_argument('reference', help='Name of reference file. Does not need to be indexed already. If not indexed, can be any format from FASTA, FASTQ, GFF3, EMBL, Phylip, GBK', metavar='reference')
parser.add_argument('query', help='Name of query file. Can be any format from FASTA, FASTQ, GFF3, EMBL, Phylip, GBK', metavar='query')
def get_opts(args=None):
return parser.parse_args(args=args)
class Pipeline:
def __init__(self, options, farm_blast_script):
if options.outdir is None:
if options.blastall:
version = 'blastall'
else:
version = 'blast_plus'
options.outdir = '.'.join(['Farm_blast', os.path.basename(options.reference), os.path.basename(options.query), version, options.blast_type, 'out'])
self.outdir = os.path.abspath(options.outdir)
self.reference = os.path.abspath(options.reference)
self.query = os.path.abspath(options.query)
self.bsub_queue = options.bsub_queue
self.farm_blast_script = farm_blast_script
self.test = options.test
self.union_for_act = options.act
self.blast = blast.Blast(
self.reference,
'query.split.INDEX',
outfile='tmp.array.out.INDEX',
blastall=options.blastall,
blast_type=options.blast_type,
evalue=options.evalue,
word_size=options.word_size,
no_filter=options.no_filter,
extra_options=options.blast_options
)
self.setup_script = '01.setup.sh'
self.start_array_script = '02.run_array.sh'
self.combine_script = '03.combine.sh'
if options.bsub_name_prefix is None:
self.bsub_name_prefix = 'farm_blast:' + self.outdir
else:
self.bsub_name_prefix = options.bsub_name_prefix
if options.no_bsub:
self.no_bsub = True
self.memory_units = 'MB'
else:
self.no_bsub = False
self.memory_units = None
self.debug = options.debug
self.split_bases_tolerance = options.split_bases_tolerance
self.files_to_delete = [
'tmp.array.*',
'query.split.*',
'blast.out.tmp.gz',
'02.array.id',
'03.combine.sh.id',
]
if not options.blast_mem:
if self.blast.blastall and self.blast.blast_type == 'tblastx':
self.array_mem = 5
else:
self.array_mem = 0.5
if self.blast.no_filter:
self.array_mem *= 2
else:
self.array_mem = options.blast_mem
if not options.split_bases:
if self.blast.blastall and self.blast.blast_type == 'tblastx':
self.split_bases = 200000
else:
self.split_bases = 500000
else:
self.split_bases = options.split_bases
def _make_setup_script(self, script_name=None):
if script_name is None:
script_name = self.setup_script
try:
f = open(script_name, 'w')
except:
raise Error('Error opening setup script "' + script_name + '" for writing')
print('set -e', file=f)
if not self.blast.blast_db_exists() or self.union_for_act:
if self.union_for_act:
print('fastaq merge', self.reference, '- |',
'fastaq to_fasta -s - reference.fa', file=f)
else:
print('fastaq to_fasta -s', self.reference, 'reference.fa', file=f)
self.reference = 'reference.fa'
self.blast.reference = self.reference
print(self.blast.format_database_command(), file=f)
self.files_to_delete.append('reference.*')
# blast strips off everything after the first whitespace, so do this
# before chunking so names stay consistent with query fasta and in blast output
if self.union_for_act:
print('fastaq merge', self.query, '- |',
'fastaq to_fasta -s - - |', end=' ', file=f)
else:
print('fastaq to_fasta -s', self.query, '- |', end=' ', file=f)
print('fastaq chunker --skip_all_Ns', '-', 'query.split', self.split_bases, self.split_bases_tolerance, file=f)
f.close()
def _make_setup_job(self):
self.setup_job = lsf.Job(
self.setup_script + '.o',
self.setup_script + '.e',
self.bsub_name_prefix + '.setup',
self.bsub_queue,
1,
'bash ' + self.setup_script,
memory_units=self.memory_units,
)
def _make_array_job(self):
self.array_job = lsf.Job(
'tmp.array.o',
'tmp.array.e',
self.bsub_name_prefix + '.array',
self.bsub_queue,
self.array_mem,
self.blast.get_run_command(),
array_start=1,
array_end=r'''$n''',
memory_units=self.memory_units,
max_array_size=100
)
def _make_start_array_job(self):
self.start_array_job = lsf.Job(
self.start_array_script + '.o',
self.start_array_script + '.e',
self.bsub_name_prefix + '.start_array',
'small',
0.1,
'bash ' + self.start_array_script,
no_resources=True # we have to do this otherwise bmod fails! LSF bug?
)
def _make_start_array_script(self, script_name=None):
if script_name is None:
script_name = self.start_array_script
try:
f = open(script_name, 'w')
except:
raise Error('Error writing script "' + script_name + '"')
print('set -e', file=f)
print(r'''n=`ls query.split.* | grep -v coords | wc -l`''', file=f)
print(str(self.array_job) + r''' | awk '{print substr($2,2,length($2)-2)}' > 02.array.id''', file=f)
print(r'''array_id=`cat 02.array.id`
combine_id=`cat ''' + self.combine_script + r'''.id`
bmod -w "done($array_id)" $combine_id''', file=f)
f.close()
def _make_combine_job(self):
self.combine_job = lsf.Job(
self.combine_script + '.o',
self.combine_script + '.e',
self.bsub_name_prefix + '.combine',
self.bsub_queue,
0.5,
'bash ' + self.combine_script,
memory_units=self.memory_units,
threads=2
)
def _make_combine_script(self, script_name=None):
if script_name is None:
script_name = self.combine_script
try:
f = open(script_name, 'w')
except:
raise Error('Error writing script "' + script_name + '"')
if not self.no_bsub:
print('set -e', file=f)
print(r'''cat tmp.array.e.* > 02.array.e
cat tmp.array.o.* > 02.array.o
cat tmp.array.out.* | gzip -9 -c > blast.out.tmp.gz''', file=f)
if self.test:
p = os.path.dirname(self.farm_blast_script)
p = os.path.join(p, os.pardir)
p = os.path.normpath(p)
print('PYTHONPATH=' + p + ':$PYTHONPATH', file=f)
if self.test:
print(self.farm_blast_script, '--test --fix_coords_in_blast_output x x', file=f)
else:
print('farm_blast --fix_coords_in_blast_output x x', file=f)
print('rm', ' '.join(self.files_to_delete), file=f)
print('touch FINISHED', file=f)
f.close()
def run(self):
'''Runs the whole blast pipeline'''
# Here's the fun part: the first job splits the query file into
# chunks. Don't know the number of chunks until that job has finished.
# Therefore don't know the size of the job array until the chunking
# job has finished.
#
# Here's what's going to happen:
# 1. Submit chunking job.
# 2. Submit job to submit an array, to run when Job1 finishes.
# - this is NOT the array itself!
# 3. Submit job that combines the results of the job array and tidies
# up files. It is initially set to run when the job that starts
# the array finishes.
# 4. When the job that starts the array actually runs, it does this:
# - figures out the size of the job array and submits it
# - changes the dependency of the last combine job, so that
# the combien job runs when the array has finished
# 5. When the array finishes, Job3 will then run.
try:
os.mkdir(self.outdir)
except:
print('Error making output directory', self.outdir, file=sys.stderr)
sys.exit(1)
original_dir = os.getcwd()
os.chdir(self.outdir)
self._make_setup_script()
self._make_setup_job()
self._make_array_job()
self._make_start_array_script()
self._make_start_array_job()
self._make_combine_script()
self._make_combine_job()
if self.debug:
sys.exit()
if self.no_bsub:
self.setup_job.run_not_bsubbed()
# get size of job array
files_count = len(glob.glob('query.split.*')) - 1
self.array_job.array_end = files_count
print(self.array_job)
print(self.array_job.array_start)
print(self.array_job.array_end)
print(self.array_job._make_command_string().replace('\$LSB_JOBINDEX', '$LSB_JOBINDEX'))
self.array_job.run_not_bsubbed()
# a little hack here to make the farm_blast script run
this_script = os.path.realpath(__file__)
this_script_dir = os.path.dirname(this_script)
module_root_dir = os.path.join(this_script_dir, '..')
module_root_dir = os.path.normpath(module_root_dir)
os.environ["PATH"] = os.path.join(module_root_dir, 'scripts:') + os.environ["PATH"]
os.environ["PYTHONPATH"] = module_root_dir + ':' + os.environ["PYTHONPATH"]
self.combine_job.run_not_bsubbed()
else:
self.setup_job.run()
time.sleep(1)
self.start_array_job.add_dependency(self.setup_job.job_id)
self.start_array_job.run()
time.sleep(1)
self.combine_job.add_dependency(self.start_array_job.job_id)
self.combine_job.run()
time.sleep(1)
try:
f = open(self.combine_script + '.id', 'w')
except:
raise Error('Error opening file "' + self.combine_script + '.id' + '" for writing')
print(self.combine_job.job_id, file=f)
f.close()
print('Jobs submitted to the farm.')
print('Final job id is', self.combine_job.job_id)
print('\nPipeline finished OK when this file is written:\n ', os.path.join(self.outdir, 'FINISHED'))
print('\nFinal file will be called:\n ', os.path.join(self.outdir, 'blast.out.gz'))
os.chdir(original_dir)
|
sanger-pathogens/Farm_blast
|
farm_blast/pipeline.py
|
Python
|
gpl-3.0
| 14,070
|
[
"BLAST"
] |
df8790e397fadfc9efe28dcc628c912551a34c120c8d27bd9de8bbc0efc022f3
|
'''
MFEM example 20p
See c++ version in the MFEM library for more detail
'''
import os
import mfem.par as mfem
from mfem.par import intArray
from os.path import expanduser, join, dirname
import numpy as np
from numpy import sin, cos, exp, sqrt, pi
from mpi4py import MPI
num_procs = MPI.COMM_WORLD.size
myid = MPI.COMM_WORLD.rank
smyid = '{:0>6d}'.format(myid)
m_ = 1.0
k_ = 1.0
def nicePrint(*s):
MPI.COMM_WORLD.Barrier()
for i in range(num_procs):
MPI.COMM_WORLD.Barrier()
if i == myid:
print(str(myid)+': ' + ': '.join([str(ss) for ss in s]))
MPI.COMM_WORLD.Barrier()
def run(order=1,
prob=0,
nsteps=100,
dt=0.1,
sc=1.0,
visualization=False):
class GradT(mfem.Operator):
def __init__(self):
mfem.Operator.__init__(self, 1)
def Mult(self, x, y):
y.Set(1.0/m_, x)
class NegGradV(mfem.TimeDependentOperator):
def __init__(self):
mfem.TimeDependentOperator.__init__(self, 1)
def Mult(self, x, y):
if prob == 1:
y[0] = - k_ * sin(x[0])
elif prob == 2:
y[0] = - k_ * x[0] * exp(-0.5 * x[0] * x[0])
elif prob == 3:
y[0] = - k_ * (1.0 + 2.0 * x[0] * x[0]) * x[0]
elif prob == 4:
y[0] = - k_ * (1.0 - 0.25 * x[0] * x[0]) * x[0]
else:
y[0] = - k_ * x[0]
def hamiltonian(q, p, t):
h = 1.0 - 0.5 / m_ + 0.5 * p * p / m_
if prob == 1:
h += k_ * (1.0 - cos(q))
elif prob == 2:
h += k_ * (1.0 - exp(-0.5 * q * q))
elif prob == 3:
h += 0.5 * k_ * (1.0 + q * q) * q * q
elif prob == 4:
h += 0.5 * k_ * (1.0 - 0.125 * q * q) * q * q
else:
h += 0.5 * k_ * q * q
return h
# 2. Create and Initialize the Symplectic Integration Solver
siaSolver = mfem.SIAVSolver(order)
P = GradT()
F = NegGradV()
siaSolver.Init(P, F)
# 3. Set the initial conditions
t = 0.0
q = mfem.Vector(1)
p = mfem.Vector(1)
e = mfem.Vector(nsteps+1)
q[0] = sin(2*pi*myid/num_procs)
p[0] = cos(2*pi*myid/num_procs)
# 5. Create a Mesh for visualization in phase space
nverts = 2*(nsteps+1)*num_procs if visualization else 0
nelems = nsteps*num_procs if visualization else 0
mesh = mfem.Mesh(2, nverts, nelems, 0, 3)
part = mfem.intArray(nelems)
# 6. Perform time-stepping
e_mean = 0.0
for i in range(nsteps):
if i == 0:
e[0] = hamiltonian(q[0], p[0], t)
e_mean += e[0]
if visualization:
for j in range(num_procs):
mesh.AddVertex([0, 0, 0])
mesh.AddVertex([q[0], p[0], 0.0])
# 6b. Advance the state of the system
t, dt = siaSolver.Step(q, p, t, dt)
e[i+1] = hamiltonian(q[0], p[0], t)
e_mean += e[i+1]
# 6d. Add results to GLVis visualization
if visualization:
for j in range(num_procs):
mesh.AddVertex([0, 0, t])
mesh.AddVertex([q[0], p[0], t])
mesh.AddQuad([2*i*num_procs + 2*j,
2*(i+1)*num_procs + 2*j,
2*(i+1)*num_procs + 2*j+1,
2*i*num_procs + 2*j+1])
part[num_procs*i + j] = j
# this also works ;D
# mesh.AddQuad(v.ToList())
#mesh.AddQuad(np.array(v.ToList(), dtype=np.int32))
# 7. Compute and display mean and standard deviation of the energy
e_mean /= (nsteps + 1)
e_var = 0.0
for i in range(nsteps+1):
e_var += (e[i] - e_mean)**2
e_var /= (nsteps + 1)
if myid == 0:
print("Mean and standard deviation of the energy")
nicePrint("{:g}".format(e_mean) + "\t" + "{:g}".format(sqrt(e_var)))
# 9. Finalize the GLVis output
if visualization:
mesh.FinalizeQuadMesh(1)
pmesh = mfem.ParMesh(MPI.COMM_WORLD, mesh, part.GetData())
fec = mfem.H1_FECollection(1, 2)
fespace = mfem.ParFiniteElementSpace(pmesh, fec)
energy = mfem.ParGridFunction(fespace)
energy.Assign(0.0)
for i in range(nsteps+1):
energy[2*i+0] = e[i]
energy[2*i+1] = e[i]
sock = mfem.socketstream("localhost", 19916)
sock.precision(8)
sock << "parallel " << num_procs << " " << myid << "\n"
sock << "solution\n" << pmesh << energy
sock << "window_title 'Energy in Phase Space'\n"
sock << "keys\n maac\n" << "axis_labels 'q' 'p' 't'\n"
sock.flush()
if __name__ == "__main__":
from mfem.common.arg_parser import ArgParser
parser = ArgParser(description='Ex20p (Sympletic ODE)')
parser.add_argument('-m', '--mesh',
default='star.mesh',
action='store', type=str,
help='Mesh file to use.')
parser.add_argument("-p",
"--problem-type",
action='store', type=int, default=0,
help=''.join(["Problem Type:\n",
"\t 0 - Simple Harmonic Oscillator\n",
"\t 1 - Pendulum\n",
"\t 2 - Gaussian Potential Well\n",
"\t 3 - Quartic Potential\n",
"\t 4 - Negative Quartic Potential", ]))
parser.add_argument('-o', '--order',
action='store', default=1, type=int,
help="Time integration order")
parser.add_argument('-n', '--number-of-steps',
action='store', default=100, type=int,
help="Number of time steps")
parser.add_argument('-dt', '--time-step',
action='store', default=0.1, type=float,
help="Time step size")
parser.add_argument('-k', '--spring-constant',
action='store', default=1, type=float,
help="Sprint constant")
parser.add_argument('-vis', '--visualization',
action='store_true',
default=True,
help='Enable GLVis visualization')
parser.add_argument('-no-gp', '--no-gnuplot',
action='store_true',
default=True,
help='Disable GnuPlot visualization')
args = parser.parse_args()
if myid == 0:
parser.print_options(args)
prob = args.problem_type
visualization = args.visualization
order = args.order
nsteps = args.number_of_steps
dt = args.time_step
sc = args.spring_constant
np_gp = args.no_gnuplot
run(order=order,
prob=prob,
nsteps=nsteps,
dt=dt,
sc=sc,
visualization=visualization)
|
mfem/PyMFEM
|
examples/ex20p.py
|
Python
|
bsd-3-clause
| 7,072
|
[
"Gaussian"
] |
1352ccf2e2fefac3bbb1bb76c7f155425a90728646d407fbd1553004ccf69eae
|
"""Galaxy (ansible-galaxy) plugin for integration tests."""
from __future__ import annotations
import os
import tempfile
from ....config import (
IntegrationConfig,
)
from ....docker_util import (
docker_cp_to,
)
from ....containers import (
run_support_container,
)
from . import (
CloudEnvironment,
CloudEnvironmentConfig,
CloudProvider,
)
# We add BasicAuthentication, to make the tasks that deal with
# direct API access easier to deal with across galaxy_ng and pulp
SETTINGS = b'''
CONTENT_ORIGIN = 'http://ansible-ci-pulp:80'
ANSIBLE_API_HOSTNAME = 'http://ansible-ci-pulp:80'
ANSIBLE_CONTENT_HOSTNAME = 'http://ansible-ci-pulp:80/pulp/content'
TOKEN_AUTH_DISABLED = True
GALAXY_REQUIRE_CONTENT_APPROVAL = False
GALAXY_AUTHENTICATION_CLASSES = [
"rest_framework.authentication.SessionAuthentication",
"rest_framework.authentication.TokenAuthentication",
"rest_framework.authentication.BasicAuthentication",
]
'''
SET_ADMIN_PASSWORD = b'''#!/usr/bin/execlineb -S0
foreground {
redirfd -w 1 /dev/null
redirfd -w 2 /dev/null
export DJANGO_SETTINGS_MODULE pulpcore.app.settings
export PULP_CONTENT_ORIGIN localhost
s6-setuidgid postgres
if { /usr/local/bin/django-admin reset-admin-password --password password }
if { /usr/local/bin/pulpcore-manager create-group system:partner-engineers --users admin }
}
'''
# There are 2 overrides here:
# 1. Change the gunicorn bind address from 127.0.0.1 to 0.0.0.0 now that Galaxy NG does not allow us to access the
# Pulp API through it.
# 2. Grant access allowing us to DELETE a namespace in Galaxy NG. This is as CI deletes and recreates repos and
# distributions in Pulp which now breaks the namespace in Galaxy NG. Recreating it is the "simple" fix to get it
# working again.
# These may not be needed in the future, especially if 1 becomes configurable by an env var but for now they must be
# done.
OVERRIDES = b'''#!/usr/bin/execlineb -S0
foreground {
sed -i "0,/\\"127.0.0.1:24817\\"/s//\\"0.0.0.0:24817\\"/" /etc/services.d/pulpcore-api/run
}
# This sed calls changes the first occurrence to "allow" which is conveniently the delete operation for a namespace.
# https://github.com/ansible/galaxy_ng/blob/master/galaxy_ng/app/access_control/statements/standalone.py#L9-L11.
backtick NG_PREFIX { python -c "import galaxy_ng; print(galaxy_ng.__path__[0], end='')" }
importas ng_prefix NG_PREFIX
foreground {
sed -i "0,/\\"effect\\": \\"deny\\"/s//\\"effect\\": \\"allow\\"/" ${ng_prefix}/app/access_control/statements/standalone.py
}'''
class GalaxyProvider(CloudProvider):
"""
Galaxy plugin. Sets up pulp (ansible-galaxy) servers for tests.
The pulp source itself resides at: https://github.com/pulp/pulp-oci-images
"""
def __init__(self, args): # type: (IntegrationConfig) -> None
super().__init__(args)
# Cannot use the latest container image as either galaxy_ng 4.2.0rc2 or pulp 0.5.0 has sporatic issues with
# dropping published collections in CI. Try running the tests multiple times when updating. Will also need to
# comment out the cache tests in 'test/integration/targets/ansible-galaxy-collection/tasks/install.yml' when
# the newer update is available.
self.pulp = os.environ.get(
'ANSIBLE_PULP_CONTAINER',
'quay.io/ansible/pulp-galaxy-ng:b79a7be64eff'
)
self.uses_docker = True
def setup(self): # type: () -> None
"""Setup cloud resource before delegation and reg cleanup callback."""
super().setup()
galaxy_port = 80
pulp_host = 'ansible-ci-pulp'
pulp_port = 24817
ports = [
galaxy_port,
pulp_port,
]
# Create the container, don't run it, we need to inject configs before it starts
descriptor = run_support_container(
self.args,
self.platform,
self.pulp,
pulp_host,
ports,
start=False,
allow_existing=True,
)
if not descriptor:
return
if not descriptor.running:
pulp_id = descriptor.container_id
injected_files = {
'/etc/pulp/settings.py': SETTINGS,
'/etc/cont-init.d/111-postgres': SET_ADMIN_PASSWORD,
'/etc/cont-init.d/000-ansible-test-overrides': OVERRIDES,
}
for path, content in injected_files.items():
with tempfile.NamedTemporaryFile() as temp_fd:
temp_fd.write(content)
temp_fd.flush()
docker_cp_to(self.args, pulp_id, temp_fd.name, path)
descriptor.start(self.args)
self._set_cloud_config('PULP_HOST', pulp_host)
self._set_cloud_config('PULP_PORT', str(pulp_port))
self._set_cloud_config('GALAXY_PORT', str(galaxy_port))
self._set_cloud_config('PULP_USER', 'admin')
self._set_cloud_config('PULP_PASSWORD', 'password')
class GalaxyEnvironment(CloudEnvironment):
"""Galaxy environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
pulp_user = str(self._get_cloud_config('PULP_USER'))
pulp_password = str(self._get_cloud_config('PULP_PASSWORD'))
pulp_host = self._get_cloud_config('PULP_HOST')
galaxy_port = self._get_cloud_config('GALAXY_PORT')
pulp_port = self._get_cloud_config('PULP_PORT')
return CloudEnvironmentConfig(
ansible_vars=dict(
pulp_user=pulp_user,
pulp_password=pulp_password,
pulp_api='http://%s:%s' % (pulp_host, pulp_port),
pulp_server='http://%s:%s/pulp_ansible/galaxy/' % (pulp_host, pulp_port),
galaxy_ng_server='http://%s:%s/api/galaxy/' % (pulp_host, galaxy_port),
),
env_vars=dict(
PULP_USER=pulp_user,
PULP_PASSWORD=pulp_password,
PULP_SERVER='http://%s:%s/pulp_ansible/galaxy/api/' % (pulp_host, pulp_port),
GALAXY_NG_SERVER='http://%s:%s/api/galaxy/' % (pulp_host, galaxy_port),
),
)
|
mattclay/ansible
|
test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
|
Python
|
gpl-3.0
| 6,384
|
[
"Galaxy"
] |
ddfd8b27944d63c7bf5a949496e8f26636b2e72e93c2bb646c120af73188de40
|
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import ray
from bigdl.orca.cpu_info import schedule_workers
import os
import sys
import logging
log = logging.getLogger(__name__)
class ClusterInfo:
def ip_addr(self):
return ray._private.services.get_node_ip_address()
def set_cpu_affinity(self, core_list):
proclist_str = f"[{','.join([str(i) for i in core_list])}]"
os.environ["OMP_NUM_THREADS"] = str(len(core_list))
os.environ["OMP_SCHEDULE"] = "STATIC"
os.environ["OMP_PROC_BIND"] = "CLOSE"
# KMP_AFFINITY works on intel openmp (intel tensorlow, intel pytorch/ipex)
# GOMP_CPU_AFFINITY works on gomp (stock pytorch)
# os.sched_setaffinity works on other threads (stock tensorflow)
os.environ["KMP_AFFINITY"] = f"verbose,granularity=fine,proclist={proclist_str},explicit"
os.environ["GOMP_CPU_AFFINITY"] = proclist_str
os.sched_setaffinity(0, set(core_list))
def disable_cpu_affinity(self, num_cores):
os.environ["OMP_NUM_THREADS"] = str(num_cores)
os.environ["KMP_AFFINITY"] = "disabled"
os.environ["OMP_PROC_BIND"] = "FALSE"
def run(self, func, *args, **kwargs):
return func(*args, **kwargs)
def make_worker(worker_cls):
class Worker(worker_cls, ClusterInfo):
pass
return Worker
class RayDLCluster:
def __init__(self,
num_workers,
worker_cores,
worker_cls=None,
worker_param=None,
cpu_binding=True,
):
if not ray.is_initialized():
raise RuntimeError("Ray is not initialize. Please initialize ray.")
self.num_workers = num_workers
self.worker_cores = worker_cores
self.worker_cls = make_worker(worker_cls)
self.work_param = worker_param
if sys.platform == 'linux':
self.cpu_binding = cpu_binding
else:
if cpu_binding:
log.warn(f"cpu_binding is only support in linux, detectiong os {sys.platform}, "
"set cpu_binding to False")
self.cpu_binding = False
self.worker_class = ray.remote(num_cpus=self.worker_cores)(self.worker_cls)
self.remote_workers = [self.worker_class.remote(**worker_param)
for i in range(0, self.num_workers)]
if self.cpu_binding:
hosts = ray.get([worker.ip_addr.remote() for worker in self.remote_workers])
ip2workers = {}
for ip, worker in zip(hosts, self.remote_workers):
if ip not in ip2workers:
ip2workers[ip] = []
ip2workers[ip].append(worker)
ips = ip2workers.keys()
cpu_binding_refs = []
for ip in ips:
ref = ip2workers[ip][0].run.remote(schedule_workers,
len(ip2workers[ip]),
self.worker_cores)
cpu_binding_refs.append(ref)
cpu_bindings = ray.get(cpu_binding_refs)
result = []
for ip, core_lists in zip(ips, cpu_bindings):
for worker, core_list in zip(ip2workers[ip], core_lists):
log.debug(f"Setting thread affinity for worker in {ip}: {core_list}")
result.append(worker.set_cpu_affinity.remote(core_list))
ray.get(result)
else:
ray.get([worker.disable_cpu_affinity.remote(self.worker_cores)
for worker in self.remote_workers])
def get_workers(self):
return self.remote_workers
|
intel-analytics/BigDL
|
python/orca/src/bigdl/orca/learn/dl_cluster.py
|
Python
|
apache-2.0
| 4,240
|
[
"ORCA"
] |
7a5ae963fa25a060e2ce793012b20f1594c29039b24832e095b9728d05c7c0e2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.