text
stringlengths
11
4.05M
// Copyright 2022 The ChromiumOS Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package hwsec import ( "context" "sort" "strings" "time" "github.com/google/go-cmp/cmp" uda "chromiumos/system_api/user_data_auth_proto" cryptohomecommon "chromiumos/tast/common/cryptohome" "chromiumos/tast/common/hwsec" "chromiumos/tast/ctxutil" "chromiumos/tast/errors" hwsecremote "chromiumos/tast/remote/hwsec" "chromiumos/tast/testing" ) // pinWeaverWithAuthAPIParam contains the test parameters which are different // between the types of backing store. type pinWeaverWithAuthAPIParam struct { // Specifies whether to use user secret stash. useUserSecretStash bool // Specifies whether to use AuthFactor. // This, for now, also assumes that AuthSession would be used with AuthFactors. useAuthFactor bool // For M104 AuthSession launch, pin is currently set with Legacy API. // Note: both these parameters cannot be true at the same time as that is not a supported case. useLegacyAddAPIForPin bool } func init() { testing.AddTest(&testing.Test{ Func: PINWeaverWithAuthAPI, Desc: "Checks that LE credentials work with AuthSession, AuthFactor and USS", Contacts: []string{ "hardikgoyal@chromium.org", // Test author "cryptohome-core@google.com", }, Attr: []string{"informational", "group:mainline"}, SoftwareDeps: []string{"pinweaver", "reboot"}, Params: []testing.Param{{ Name: "pin_weaver_with_auth_factor_with_no_uss", Val: pinWeaverWithAuthAPIParam{ useUserSecretStash: false, useAuthFactor: true, useLegacyAddAPIForPin: false, }, }, { Name: "pin_weaver_with_auth_session", Val: pinWeaverWithAuthAPIParam{ useUserSecretStash: false, useAuthFactor: false, useLegacyAddAPIForPin: false, }, }, { Name: "pin_weaver_with_auth_session_legacy_pin_add", Val: pinWeaverWithAuthAPIParam{ useUserSecretStash: false, useAuthFactor: false, useLegacyAddAPIForPin: true, }, }, { Name: "pin_weaver_with_auth_factor_with_uss", Val: pinWeaverWithAuthAPIParam{ useUserSecretStash: true, useAuthFactor: true, useLegacyAddAPIForPin: false, }, }, }, }) } // Some constants used across the test. const ( authFactorLabelPIN = "lecred" correctPINSecret = "123456" incorrectPINSecret = "000000" passwordAuthFactorLabel = "fake_label" passwordAuthFactorSecret = "password" testUser1 = "testUser1@example.com" testUser2 = "testUser2@example.com" ) func PINWeaverWithAuthAPI(ctx context.Context, s *testing.State) { userParam := s.Param().(pinWeaverWithAuthAPIParam) ctxForCleanUp := ctx ctx, cancel := ctxutil.Shorten(ctx, 10*time.Second) defer cancel() cmdRunner := hwsecremote.NewCmdRunner(s.DUT()) client := hwsec.NewCryptohomeClient(cmdRunner) helper, err := hwsecremote.NewHelper(cmdRunner, s.DUT()) cryptohomeHelper := helper.CryptohomeClient() if err != nil { s.Fatal("Helper creation error: ", err) } daemonController := helper.DaemonController() // Wait for cryptohomed becomes available if needed. if err := daemonController.Ensure(ctx, hwsec.CryptohomeDaemon); err != nil { s.Fatal("Failed to ensure cryptohomed: ", err) } supportsLE, err := client.SupportsLECredentials(ctx) if err != nil { s.Fatal("Failed to get supported policies: ", err) } else if !supportsLE { s.Fatal("Device does not support PinWeaver") } // Clean up obsolete state, in case there's any. cmdRunner.Run(ctx, "rm -rf /home/.shadow/low_entropy_creds") if err := client.UnmountAll(ctx); err != nil { s.Fatal("Failed to unmount vaults for preparation: ", err) } if _, err := client.RemoveVault(ctx, testUser1); err != nil { s.Fatal("Failed to remove old vault for preparation: ", err) } if _, err := client.RemoveVault(ctx, testUser2); err != nil { s.Fatal("Failed to remove old vault for preparation: ", err) } if userParam.useUserSecretStash { // Enable the UserSecretStash experiment for the duration of the test by // creating a flag file that's checked by cryptohomed. cleanupUSSExperiment, err := helper.EnableUserSecretStash(ctx) if err != nil { s.Fatal("Failed to enable the UserSecretStash experiment: ", err) } defer cleanupUSSExperiment(ctx) } /**Initial User Setup. Test both user 1 and user 2 can login successfully.**/ // Setup a user 1 for testing. This user will be locked out and re-authed to ensure the PIN is unlocked. if err = setupUserWithPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam); err != nil { s.Fatal("Failed to run setupUserWithPIN with error: ", err) } defer removeLeCredential(ctx, ctxForCleanUp, testUser1, authFactorLabelPIN, cmdRunner, helper, userParam) // Ensure we can authenticate with correct pin. if _, err = authenticateWithCorrectPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, true /*shouldAuthenticate*/); err != nil { s.Fatal("Failed to run authenticateWithCorrectPIN with error: ", err) } // Ensure we can authenticate with correct password. if err = authenticateWithCorrectPassword(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam); err != nil { s.Fatal("Failed to run authenticateWithCorrectPassword with error: ", err) } // Setup a user 2 for testing. This user will be removed and the le_credential file will be checked. if err = setupUserWithPIN(ctx, ctxForCleanUp, testUser2, cmdRunner, helper, userParam); err != nil { s.Fatal("Failed to run setupUserWithPIN with error: ", err) } defer removeLeCredential(ctx, ctxForCleanUp, testUser2, authFactorLabelPIN, cmdRunner, helper, userParam) // Ensure we can authenticate with correct password for testUser2. if err = authenticateWithCorrectPassword(ctx, ctxForCleanUp, testUser2, cmdRunner, helper, userParam); err != nil { s.Fatal("Failed to run authenticateWithCorrectPassword with error: ", err) } // Ensure we can authenticate with correct pin for testUser2. if _, err = authenticateWithCorrectPIN(ctx, ctxForCleanUp, testUser2, cmdRunner, helper, userParam, true /*shouldAuthenticate*/); err != nil { s.Fatal("Failed to run authenticateWithCorrectPIN with error: ", err) } // Ensure that testUser1 still works wth pin. if _, err = authenticateWithCorrectPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, true /*shouldAuthenticate*/); err != nil { s.Fatal("Failed to run authenticateWithCorrectPIN with error: ", err) } // Ensure that testUser1 still works wth password. if err = authenticateWithCorrectPassword(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam); err != nil { s.Fatal("Failed to run authenticateWithCorrectPassword with error: ", err) } /** Running test where we try to almost lock out PIN with 4 attempts twice, but the user is able to log back in **/ // Attempt four wrong PIN. if _, err = attemptWrongPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, 4 /*attempts*/); err != nil { s.Fatal("Failed to run attemptWrongPIN with error: ", err) } // Since the pin is not locked out yet, we should be able to log back in again. if _, err = authenticateWithCorrectPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, true /*shouldAuthenticate*/); err != nil { s.Fatal("Failed to run authenticateWithCorrectPIN with error: ", err) } // Attempt four wrong PIN again. replyWithError, err := attemptWrongPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, 4 /*attempts*/) if err != nil { s.Fatal("Failed to run attemptWrongPIN with error: ", err) } // Ensure AutheneticateAuthFactor error code relays TPM is not locked out. if userParam.useAuthFactor && replyWithError.Error != uda.CryptohomeErrorCode_CRYPTOHOME_ERROR_AUTHORIZATION_KEY_FAILED { s.Fatal("TPM is locked out: ", replyWithError.Error) } // Since the pin is not locked out yet, we should be able to log back in again. if _, err = authenticateWithCorrectPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, true /*shouldAuthenticate*/); err != nil { s.Fatal("Failed to run authenticateWithCorrectPIN with error: ", err) } /** Test whether the attempt counter persists after reboot **/ // Attempt four wrong PIN. if _, err = attemptWrongPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, 4 /*attempts*/); err != nil { s.Fatal("Failed to run attemptWrongPIN with error: ", err) } // Check to make sure that PIN AuthFactor appears in StartAuthSessionReply. reply, authSessionID, err := cryptohomeHelper.StartAuthSession(ctx, testUser1, false /*isEphemeral*/, uda.AuthIntent_AUTH_INTENT_DECRYPT) if err != nil { s.Fatal("Failed to start auth session when searching for PIN factor in reply: ", err) } defer cryptohomeHelper.InvalidateAuthSession(ctx, authSessionID) // Search for PIN-based AuthFactor in reply. hasPinAuthFactor := false for _, authFactor := range reply.AuthFactors { if authFactor.Type == uda.AuthFactorType_AUTH_FACTOR_TYPE_PIN { hasPinAuthFactor = true } } if !hasPinAuthFactor { s.Fatal("PIN-based AuthFactor was not found in StartAuthSessionReply") } // Because Cr50 stores state in the firmware, that persists across reboots, this test // needs to run before and after a reboot. if err = helper.Reboot(ctx); err != nil { s.Fatal("Failed to run helper with error: ", err) } // Lockout the PIN this time. _, err = attemptWrongPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, 1 /*attempts*/) if err != nil { s.Fatal("Failed to run attemptWrongPIN with error: ", err) } if err = ensurePINLockedOut(ctx, testUser1, client); err != nil { s.Fatal("Failed to run ensurePINLockedOut with error: ", err) } // After the PIN lock out we should not be able to authenticate with correct PIN. if replyWithError, err = authenticateWithCorrectPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, false /*shouldAuthenticate*/); err != nil { s.Fatal("Failed to run authenticateWithCorrectPIN with error: ", err) } // Ensure AutheneticateAuthFactor error code relays TPM is locked out. if userParam.useAuthFactor && replyWithError.Error != uda.CryptohomeErrorCode_CRYPTOHOME_ERROR_TPM_DEFEND_LOCK { s.Fatal("AuthenticateAuthFactor indicates that the TPM is not locked out: ", replyWithError.Error) } // Check to make sure that PIN AuthFactor does not appear in StartAuthSessionReply. reply, authSessionID, err = cryptohomeHelper.StartAuthSession(ctx, testUser1, false /*isEphemeral*/, uda.AuthIntent_AUTH_INTENT_DECRYPT) if err != nil { s.Fatal("Failed to start auth session when searching for PIN factor in reply: ", err) } defer cryptohomeHelper.InvalidateAuthSession(ctx, authSessionID) // Search for PIN-based AuthFactor in reply. for _, authFactor := range reply.AuthFactors { if authFactor.Type == uda.AuthFactorType_AUTH_FACTOR_TYPE_PIN { s.Fatal("PIN-based AuthFactor was found in StartAuthSessionReply") } } /** Ensure that testUser2 can still use PIN **/ if _, err = authenticateWithCorrectPIN(ctx, ctxForCleanUp, testUser2, cmdRunner, helper, userParam, true /*shouldAuthenticate*/); err != nil { s.Fatal("Failed to run authenticateWithCorrectPIN with error: ", err) } /** Unlock PIN **/ if err = authenticateWithCorrectPassword(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam); err != nil { s.Fatal("Failed to run authenticateWithCorrectPassword with error: ", err) } // Ensure pin login now works again for testUser1. if _, err = authenticateWithCorrectPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, true /*shouldAuthenticate*/); err != nil { s.Fatal("Failed to run authenticateWithCorrectPIN with error: ", err) } // Remove the added PIN and check to see if le_credential file was updated. if err = removeLeCredential(ctx, ctxForCleanUp, testUser2, authFactorLabelPIN, cmdRunner, helper, userParam); err != nil { s.Fatal("Failed to run removeLeCredential with error: ", err) } /** Ensure test user 1 can still login with PIN**/ if _, err = authenticateWithCorrectPIN(ctx, ctxForCleanUp, testUser1, cmdRunner, helper, userParam, true /*shouldAuthenticate*/); err != nil { s.Fatal("Failed to run authenticateWithCorrectPIN with error: ", err) } } // getLeCredsFromDisk gets the LE Credential file from disk. func getLeCredsFromDisk(ctx context.Context, r *hwsecremote.CmdRunnerRemote) ([]string, error) { output, err := r.Run(ctx, "/bin/ls", "/home/.shadow/low_entropy_creds") if err != nil { return nil, err } labels := strings.Split(string(output), "\n") sort.Strings(labels) return labels, nil } // setupUserWithPIN sets up a user with a password and a PIN auth factor. func setupUserWithPIN(ctx, ctxForCleanUp context.Context, userName string, cmdRunner *hwsecremote.CmdRunnerRemote, helper *hwsecremote.CmdHelperRemote, userParam pinWeaverWithAuthAPIParam) error { cryptohomeHelper := helper.CryptohomeClient() // Start an Auth session and get an authSessionID. _, authSessionID, err := cryptohomeHelper.StartAuthSession(ctx, userName, false /*ephemeral*/, uda.AuthIntent_AUTH_INTENT_DECRYPT) if err != nil { return errors.Wrap(err, "failed to start auth session for PIN authentication") } defer cryptohomeHelper.InvalidateAuthSession(ctx, authSessionID) if err = cryptohomeHelper.CreatePersistentUser(ctx, authSessionID); err != nil { return errors.Wrap(err, "failed to create persistent user with auth session") } if err = cryptohomeHelper.PreparePersistentVault(ctx, authSessionID, false); err != nil { return errors.Wrap(err, "failed to prepare persistent user with auth session") } defer cryptohomeHelper.Unmount(ctx, userName) if userParam.useAuthFactor { err = cryptohomeHelper.AddAuthFactor(ctx, authSessionID, passwordAuthFactorLabel, passwordAuthFactorSecret) } else { err = cryptohomeHelper.AddCredentialsWithAuthSession(ctx, userName, passwordAuthFactorSecret, passwordAuthFactorLabel, authSessionID, false /*kiosk*/) } if err != nil { return errors.Wrap(err, "failed to add password auth factor") } leCredsBeforeAdd, err := getLeCredsFromDisk(ctx, cmdRunner) if err != nil { return errors.Wrap(err, "failed to get le creds from disk before add") } // Add a PIN auth factor to the user. if userParam.useLegacyAddAPIForPin { err = cryptohomeHelper.AddVaultKey(ctx, userName, passwordAuthFactorSecret, passwordAuthFactorLabel, correctPINSecret, authFactorLabelPIN, true) } else { if userParam.useAuthFactor { err = cryptohomeHelper.AddPinAuthFactor(ctx, authSessionID, authFactorLabelPIN, correctPINSecret) } else { err = cryptohomeHelper.AddPinCredentialsWithAuthSession(ctx, authFactorLabelPIN, correctPINSecret, authSessionID) } } if err != nil { return errors.Wrap(err, "failed to add le credential") } leCredsAfterAdd, err := getLeCredsFromDisk(ctx, cmdRunner) if err != nil { return errors.Wrap(err, "failed to get le creds from disk after add") } if diff := cmp.Diff(leCredsAfterAdd, leCredsBeforeAdd); diff == "" { return errors.Wrap(err, "le cred file did not change after add") } return nil } // attemptWrongPIN attempts to try wrong PIN for authentication for given number of attempts. func attemptWrongPIN(ctx, ctxForCleanUp context.Context, testUser string, r *hwsecremote.CmdRunnerRemote, helper *hwsecremote.CmdHelperRemote, userParam pinWeaverWithAuthAPIParam, numberOfWrongAttempts int) (*uda.AuthenticateAuthFactorReply, error) { cryptohomeHelper := helper.CryptohomeClient() // Authenticate a new auth session via the new added PIN auth factor. _, authSessionID, err := cryptohomeHelper.StartAuthSession(ctx, testUser, false /*ephemeral*/, uda.AuthIntent_AUTH_INTENT_DECRYPT) if err != nil { return nil, errors.Wrap(err, "failed to start auth session for PIN authentication") } defer cryptohomeHelper.InvalidateAuthSession(ctxForCleanUp, authSessionID) reply := &uda.AuthenticateAuthFactorReply{} // Supply invalid credentials five times to trigger firmware lockout of the credential. for i := 0; i < numberOfWrongAttempts; i++ { if userParam.useAuthFactor { reply, err = cryptohomeHelper.AuthenticatePinAuthFactor(ctx, authSessionID, authFactorLabelPIN, incorrectPINSecret) } else { err = cryptohomeHelper.AuthenticatePinWithAuthSession(ctx, incorrectPINSecret, authFactorLabelPIN, authSessionID) } if err == nil { return nil, errors.Wrap(err, "authentication with wrong PIN succeeded unexpectedly") } } return reply, nil } // authenticateWithCorrectPIN authenticates a given user with the correct PIN. func authenticateWithCorrectPIN(ctx, ctxForCleanUp context.Context, testUser string, r *hwsecremote.CmdRunnerRemote, helper *hwsecremote.CmdHelperRemote, userParam pinWeaverWithAuthAPIParam, shouldAuthenticate bool) (*uda.AuthenticateAuthFactorReply, error) { cryptohomeHelper := helper.CryptohomeClient() // Authenticate a new auth session via the new added PIN auth factor. _, authSessionID, err := cryptohomeHelper.StartAuthSession(ctx, testUser, false /*ephemeral*/, uda.AuthIntent_AUTH_INTENT_DECRYPT) if err != nil { return nil, errors.Wrap(err, "failed to start auth session for PIN authentication") } defer cryptohomeHelper.InvalidateAuthSession(ctxForCleanUp, authSessionID) reply := &uda.AuthenticateAuthFactorReply{} if userParam.useAuthFactor { reply, err = cryptohomeHelper.AuthenticatePinAuthFactor(ctx, authSessionID, authFactorLabelPIN, correctPINSecret) } else { err = cryptohomeHelper.AuthenticatePinWithAuthSession(ctx, correctPINSecret, authFactorLabelPIN, authSessionID) } if (err == nil) != shouldAuthenticate { return reply, errors.Wrapf(err, "failed to authenticated auth factor with correct PIN. got %v, want %v", (err == nil), shouldAuthenticate) } return reply, nil } // authenticateWithCorrectPassword authenticates a given user with the correct password. func authenticateWithCorrectPassword(ctx, ctxForCleanUp context.Context, testUser string, r *hwsecremote.CmdRunnerRemote, helper *hwsecremote.CmdHelperRemote, userParam pinWeaverWithAuthAPIParam) error { cryptohomeHelper := helper.CryptohomeClient() // Authenticate a new auth session via the new password auth factor and mount the user. _, authSessionID, err := cryptohomeHelper.StartAuthSession(ctx, testUser, false /*ephemeral*/, uda.AuthIntent_AUTH_INTENT_DECRYPT) if err != nil { return errors.Wrap(err, "failed to start auth session for password authentication") } defer cryptohomeHelper.InvalidateAuthSession(ctxForCleanUp, authSessionID) // Authenticate with correct password. if userParam.useAuthFactor { reply, err := cryptohomeHelper.AuthenticateAuthFactor(ctx, authSessionID, passwordAuthFactorLabel, passwordAuthFactorSecret) if err != nil { return errors.Wrap(err, "failed to authenticate auth factor") } if !reply.Authenticated { return errors.New("AuthSession not authenticated despite successful reply") } if err := cryptohomecommon.ExpectAuthIntents(reply.AuthorizedFor, []uda.AuthIntent{ uda.AuthIntent_AUTH_INTENT_DECRYPT, uda.AuthIntent_AUTH_INTENT_VERIFY_ONLY, }); err != nil { return errors.Wrap(err, "unexpected AuthSession authorized intents") } } else { err = cryptohomeHelper.AuthenticateAuthSession(ctx, passwordAuthFactorSecret, passwordAuthFactorLabel, authSessionID, false /*kiosk_mount*/) if err != nil { return errors.Wrap(err, "failed to authenticate AuthSession") } } return nil } // removeLeCredential removes testUser and checks to see if the leCreds on disk was updated. func removeLeCredential(ctx, ctxForCleanUp context.Context, testUser, label string, r *hwsecremote.CmdRunnerRemote, helper *hwsecremote.CmdHelperRemote, userParam pinWeaverWithAuthAPIParam) error { cryptohomeHelper := helper.CryptohomeClient() _, authSessionID, err := cryptohomeHelper.StartAuthSession(ctx, testUser, false /*isEphemeral*/, uda.AuthIntent_AUTH_INTENT_DECRYPT) if err != nil { return errors.Wrap(err, "failed to start auth session for authentication") } defer cryptohomeHelper.InvalidateAuthSession(ctxForCleanUp, authSessionID) // Authenticate with correct password. if userParam.useAuthFactor { reply, err := cryptohomeHelper.AuthenticateAuthFactor(ctx, authSessionID, passwordAuthFactorLabel, passwordAuthFactorSecret) if err != nil { return errors.Wrap(err, "failed to authenticate auth factor") } if !reply.Authenticated { return errors.New("AuthSession not authenticated despite successful reply") } if err := cryptohomecommon.ExpectAuthIntents(reply.AuthorizedFor, []uda.AuthIntent{ uda.AuthIntent_AUTH_INTENT_DECRYPT, uda.AuthIntent_AUTH_INTENT_VERIFY_ONLY, }); err != nil { return errors.Wrap(err, "unexpected AuthSession authorized intents") } } else { err = cryptohomeHelper.AuthenticateAuthSession(ctx, passwordAuthFactorSecret, passwordAuthFactorLabel, authSessionID, false /*kiosk_mount*/) if err != nil { return errors.Wrap(err, "failed to authenticate AuthSession") } } leCredsBeforeRemove, err := getLeCredsFromDisk(ctx, r) if err != nil { return errors.Wrap(err, "failed to get le creds from disk") } if err := cryptohomeHelper.RemoveAuthFactor(ctx, authSessionID, label); err != nil { return errors.Wrap(err, "failed to remove vault") } leCredsAfterRemove, err := getLeCredsFromDisk(ctx, r) if err != nil { return errors.Wrap(err, "failed to get le creds from disk") } if diff := cmp.Diff(leCredsAfterRemove, leCredsBeforeRemove); diff == "" { return errors.Wrap(err, "LE cred not cleaned up successfully") } return nil } func ensurePINLockedOut(ctx context.Context, testUser string, cryptohomeClient *hwsec.CryptohomeClient) error { output, err := cryptohomeClient.ListAuthFactors(ctx, testUser) if err != nil { return errors.Wrap(err, "failed to list auth factors") } // Search for the first PIN-based AuthFactor, and parse if it is locked out. var pinAuthFactor *uda.AuthFactorWithStatus for _, authFactor := range output.ConfiguredAuthFactorsWithStatus { if authFactor.AuthFactor.Type == uda.AuthFactorType_AUTH_FACTOR_TYPE_PIN { pinAuthFactor = authFactor break } } if pinAuthFactor != nil { for _, authIntent := range pinAuthFactor.AvailableForIntents { if authIntent == uda.AuthIntent_AUTH_INTENT_DECRYPT { return errors.New("PIN not locked when it should have been") } } return nil } return errors.New(testUser + " does not have any PIN-based AuthFactors.") }
package txhash import ( "crypto/sha256" "encoding/binary" "io" "log" "sort" pb "github.com/xuperchain/xupercore/bcs/ledger/xledger/xldgpb" "github.com/xuperchain/xupercore/protos" ) type encoder struct { intbuf [8]byte w io.Writer } func newEncoder(w io.Writer) *encoder { return &encoder{ w: w, } } func (e *encoder) EncodeInt64(x int64) { buf := e.intbuf[:8] binary.BigEndian.PutUint64(buf, uint64(x)) e.w.Write(buf) } func (e *encoder) EncodeString(s string) { e.EncodeInt64(int64(len(s))) if len(s) == 0 { return } io.WriteString(e.w, s) } func (e *encoder) EncodeBytes(s []byte) { e.EncodeInt64(int64(len(s))) if len(s) == 0 { return } e.w.Write(s) } func (e *encoder) EncodeMap(m map[string][]byte) { length := len(m) e.EncodeInt64(int64(length)) if length == 0 { return } keys := make([]string, 0, len(m)) for key := range m { keys = append(keys, key) } sort.Strings(keys) for _, key := range keys { e.EncodeString(key) e.EncodeBytes(m[key]) } } func (e *encoder) Encode(x interface{}) { switch v := x.(type) { case bool: if v { e.EncodeInt64(1) } else { e.EncodeInt64(0) } case int: e.EncodeInt64(int64(v)) case int32: e.EncodeInt64(int64(v)) case int64: e.EncodeInt64(v) case string: e.EncodeString(v) case []byte: e.EncodeBytes(v) case map[string][]byte: e.EncodeMap(v) default: log.Panicf("not supported type:%T", x) } } // txDigestHashV2 make tx hash using double sha256 func txDigestHashV2(tx *pb.Transaction, includeSigns bool) []byte { h := sha256.New() enc := newEncoder(h) // encode TxInputs enc.Encode(len(tx.TxInputs)) for _, input := range tx.TxInputs { enc.Encode(input.RefTxid) enc.Encode(input.RefOffset) enc.Encode(input.FromAddr) enc.Encode(input.Amount) enc.Encode(input.FrozenHeight) } // encode TxOutputs enc.Encode(len(tx.TxOutputs)) for _, output := range tx.TxOutputs { enc.Encode(output.Amount) enc.Encode(output.ToAddr) enc.Encode(output.FrozenHeight) } enc.Encode(tx.Desc) enc.Encode(tx.Coinbase) enc.Encode(tx.Nonce) enc.Encode(tx.Timestamp) enc.Encode(tx.Version) enc.Encode(tx.Autogen) // encode TxInputsExt enc.Encode(len(tx.TxInputsExt)) for _, input := range tx.TxInputsExt { enc.Encode(input.Bucket) enc.Encode(input.Key) enc.Encode(input.RefTxid) enc.Encode(input.RefOffset) } // encode TxOutputsExt enc.Encode(len(tx.TxOutputsExt)) for _, output := range tx.TxOutputsExt { enc.Encode(output.Bucket) enc.Encode(output.Key) enc.Encode(output.Value) } // encode ContractRequests enc.Encode(len(tx.ContractRequests)) for _, req := range tx.ContractRequests { enc.Encode(req.ModuleName) enc.Encode(req.ContractName) enc.Encode(req.MethodName) enc.Encode(req.Args) enc.Encode(len(req.ResourceLimits)) for _, limit := range req.ResourceLimits { enc.Encode(int32(limit.Type)) enc.Encode(limit.Limit) } enc.Encode(req.Amount) } enc.Encode(tx.Initiator) enc.Encode(len(tx.AuthRequire)) for _, addr := range tx.AuthRequire { enc.Encode(addr) } encSigs := func(sigs []*protos.SignatureInfo) { enc.Encode(len(sigs)) for _, sig := range sigs { enc.Encode(sig.PublicKey) enc.Encode(sig.Sign) } } if includeSigns { encSigs(tx.InitiatorSigns) encSigs(tx.AuthRequireSigns) // encode PublicKeys xuperSign := tx.GetXuperSign() enc.Encode(len(xuperSign.GetPublicKeys())) for _, pubkey := range xuperSign.GetPublicKeys() { enc.Encode(pubkey) } enc.Encode(tx.GetXuperSign().GetSignature()) } enc.Encode(tx.GetHDInfo().GetHdPublicKey()) enc.Encode(tx.GetHDInfo().GetOriginalHash()) sum := sha256.Sum256(h.Sum(nil)) return sum[:] }
package owners type Owner struct { Name string Cpf string Profession string }
package main import "fmt" type Vertex struct { X, Y int } func (v *Vertex) ScaleMethod(f int) { v.X = v.X * f v.Y = v.Y * f } func ScaleFunc(v *Vertex, f int) { v.X = v.X * f v.Y = v.Y * f } func main() { v := Vertex{3, 4} (&v).ScaleMethod(2) // == v.ScaleMethod(2) ScaleFunc(&v, 10) p := &Vertex{4, 3} p.ScaleMethod(3) ScaleFunc(p, 8) fmt.Println(v, p) }
package migrations import ( "github.com/jmoiron/sqlx" ) func CreateCommandTable(tx *sqlx.Tx) error { _, err := tx.Exec("CREATE TABLE `commands` (`channel` varchar(255),`name` varchar(255),`response` varchar(255) NOT NULL,`enabled` tinyint UNSIGNED DEFAULT 1 NOT NULL,`restricted` tinyint UNSIGNED DEFAULT 0 NOT NULL,`cooldown` tinyint UNSIGNED DEFAULT 0 NOT NULL,`description` varchar(255),`schedule` int UNSIGNED DEFAULT 0 NOT NULL,`updated_by` varchar(255) NOT NULL,`updated_at` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY(`channel`,`name`))") return err }
package main import "fmt" // создаем интерфейс type UserInterface interface { InterfaceMethod() bool } // Первый пользовательским тип type UserType1 struct { Values []int } // Второй пользовательский тип type UserType2 struct { Values string } // Реализация метода интерфейса в пользовательском типе UserType1 func (u *UserType1) InterfaceMethod() bool { return true } // Реализация метода интерфейса в пользовательском типе UserType2 func (u *UserType2) InterfaceMethod() bool { return false } // Функция, которая работает с интерфейсом UserInterface func ExampleInterfaceFunc(value UserInterface) bool { // использование функции интерфейса return value.InterfaceMethod() } func main() { u1 := UserType1{} u2 := UserType2{} fmt.Println(ExampleInterfaceFunc(&u1)) fmt.Println(ExampleInterfaceFunc(&u2)) }
package rand import ( "fmt" "math/rand" "time" ) func RandomPairs(members []string, maxMembers int) { var newList []string type team []string var teams []team //teams := []rune{'A', 'B', 'C', 'D'} rooms := []string{"Room B", "Room C", "Room D", "Room E", "Room F", "Room G"} source := rand.NewSource(time.Now().UnixNano()) r := rand.New(source) memberCount := len(members) shuffledIndexes := r.Perm(memberCount) for _, v := range shuffledIndexes { newList = append(newList, members[v]) } for i := 0; i < memberCount; i += maxMembers { if i+2*maxMembers <= memberCount { teams = append(teams, newList[i:i+maxMembers]) //fmt.Println(newList[i : i+maxMembers]) } else { teams = append(teams, newList[i:]) //fmt.Println(newList[i:]) break } } for i, v := range teams { fmt.Print(rooms[i] + " ") fmt.Println(v) } }
package Longest_Increasing_Subsequence func lengthOfLIS(nums []int) int { increasingArray := make([]int, 0) replaceFirstBigerOne := func(value int) { left, right := 0, len(increasingArray)-1 for left <= right { mid := right - (right-left)/2 if increasingArray[mid] == value { return } if increasingArray[mid] > value { right = mid - 1 } else { left = mid + 1 } } increasingArray[left] = value } for _, c := range nums { if len(increasingArray) == 0 { increasingArray = append(increasingArray, c) continue } left, right := increasingArray[0], increasingArray[len(increasingArray)-1] if c == left || c == right { continue } if c > right { increasingArray = append(increasingArray, c) continue } if c < left { increasingArray[0] = c continue } replaceFirstBigerOne(c) } return len(increasingArray) }
package crudcontracts import ( "github.com/adamluzsi/frameless/internal/suites" ) type ( EntType struct{ ID IDType } IDType struct{} ) var _ = []suites.Suite{ Creator[EntType, IDType](nil), Finder[EntType, IDType](nil), QueryOne[EntType, IDType](nil), Updater[EntType, IDType](nil), Saver[EntType, IDType](nil), Deleter[EntType, IDType](nil), OnePhaseCommitProtocol[EntType, IDType](nil), }
/* * HPE API * * API's for HPE User Interface * * API version: 1.0.1 * Contact: kollisreekanth@gmail.com * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) */ package swagger type LoginResponseModel struct { Id string `json:"id,omitempty"` Username string `json:"username"` Token string `json:"token"` }
// Copyright (C) 2019-2020 Zilliz. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License // is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express // or implied. See the License for the specific language governing permissions and limitations under the License. package querynode type indexInfo struct { indexName string indexID UniqueID buildID UniqueID indexPaths []string indexParams map[string]string readyLoad bool } func newIndexInfo() *indexInfo { return &indexInfo{ indexPaths: make([]string, 0), indexParams: make(map[string]string), } } func (info *indexInfo) setIndexName(name string) { info.indexName = name } func (info *indexInfo) setIndexID(id UniqueID) { info.indexID = id } func (info *indexInfo) setBuildID(id UniqueID) { info.buildID = id } func (info *indexInfo) setIndexPaths(paths []string) { info.indexPaths = paths } func (info *indexInfo) setIndexParams(params map[string]string) { info.indexParams = params } func (info *indexInfo) setReadyLoad(load bool) { info.readyLoad = load } func (info *indexInfo) getIndexName() string { return info.indexName } func (info *indexInfo) getIndexID() UniqueID { return info.indexID } func (info *indexInfo) getBuildID() UniqueID { return info.buildID } func (info *indexInfo) getIndexPaths() []string { return info.indexPaths } func (info *indexInfo) getIndexParams() map[string]string { return info.indexParams } func (info *indexInfo) getReadyLoad() bool { return info.readyLoad }
package currency import "time" // ConverterInfo holds information about converter setup type ConverterInfo interface { Source() string LastUpdated() time.Time Rates() *map[string]map[string]float64 AdditionalInfo() interface{} } type converterInfo struct { source string lastUpdated time.Time rates *map[string]map[string]float64 additionalInfo interface{} } // Source returns converter's URL source func (ci converterInfo) Source() string { return ci.source } // LastUpdated returns converter's last updated time func (ci converterInfo) LastUpdated() time.Time { return ci.lastUpdated } // Rates returns converter's internal rates func (ci converterInfo) Rates() *map[string]map[string]float64 { return ci.rates } // AdditionalInfo returns converter's additional infos func (ci converterInfo) AdditionalInfo() interface{} { return ci.additionalInfo }
package memoryds import ( "time" "github.com/thisiserico/golabox/domain" ) type WriteClient struct { products []*domain.Product orders []*domain.Order items []*domain.Item payments []*domain.Payment } func NewWriteClient() *WriteClient { return &WriteClient{ products: initializeProducts(), orders: initializeOrders(), items: []*domain.Item{}, payments: []*domain.Payment{}, } } func (cl *WriteClient) UpsertProduct(product *domain.Product) error { for i, p := range cl.products { if p.ID.Equals(product.ID) { cl.products[i] = product return nil } } cl.products = append(cl.products, product) return nil } func (cl *WriteClient) GetProduct(id domain.ProductId) *domain.Product { for _, p := range cl.products { if p.ID.Equals(id) { return p } } return nil } func (cl *WriteClient) GetAllProducts() []*domain.Product { return cl.products } func (cl *WriteClient) UpsertOrder(order *domain.Order) error { for i, o := range cl.orders { if o.ID.Equals(order.ID) { cl.orders[i] = order return nil } } cl.orders = append(cl.orders, order) return nil } func (cl *WriteClient) GetOrder(id domain.OrderId) *domain.Order { for _, o := range cl.orders { if o.ID.Equals(id) { return o } } return nil } func (cl *WriteClient) UpsertOrderItem(item *domain.Item) error { for i, oi := range cl.items { if oi.ID.Equals(item.ID) { cl.items[i] = item return nil } } cl.items = append(cl.items, item) return nil } func (cl *WriteClient) GetOrderItem(id domain.ItemId) *domain.Item { for _, oi := range cl.items { if oi.ID.Equals(id) { return oi } } return nil } func (cl *WriteClient) DeleteOrderItem(id domain.ItemId) error { for i, oi := range cl.items { if oi.ID.Equals(id) { cl.items = append(cl.items[:i], cl.items[i+1:]...) return nil } } return nil } func (cl *WriteClient) UpsertPayment(payment *domain.Payment) error { for i, p := range cl.payments { if p.ID.Equals(payment.ID) { cl.payments[i] = payment return nil } } cl.payments = append(cl.payments, payment) return nil } func initializeProducts() []*domain.Product { return []*domain.Product{ &domain.Product{ ID: domain.ProductId("d21de357"), Name: "olives", Stock: 100, }, &domain.Product{ ID: domain.ProductId("aa53eeab"), Name: "cookies", Stock: 35, }, &domain.Product{ ID: domain.ProductId("324d6cc9"), Name: "pasta", Stock: 76, }, &domain.Product{ ID: domain.ProductId("b01fd3f4"), Name: "chocolate", Stock: 12, }, &domain.Product{ ID: domain.ProductId("f8948e3c"), Name: "oil", Stock: 215, }, &domain.Product{ ID: domain.ProductId("a088c4bc"), Name: "tomato sauce", Stock: 73, }, } } func initializeOrders() []*domain.Order { return []*domain.Order{ &domain.Order{ ID: domain.OrderId("97ae944e"), CreatedAt: time.Now().UTC(), Items: []domain.ItemId{}, Payment: "", }, } }
// SPDX-License-Identifier: MIT package lang // swift 嵌套风格的块注释。会忽略掉内嵌的注释块。 type swiftNestMCommentBlock struct { begin string end string prefix []byte // 需要过滤的前缀 begins []byte ends []byte level int8 } // prefix 表示每一行的前缀符号,比如: // // /* // * // */ // // 中的 * 字符 func newSwiftNestMCommentBlock(begin, end, prefix string) blocker { return &swiftNestMCommentBlock{ begin: begin, end: end, prefix: []byte(prefix), begins: []byte(begin), ends: []byte(end), } } func (b *swiftNestMCommentBlock) beginFunc(l *parser) bool { if l.Match(b.begin) { b.level++ return true } return false } func (b *swiftNestMCommentBlock) endFunc(l *parser) (data []byte, ok bool) { data = append(make([]byte, 0, 200), b.begins...) LOOP: for { switch { case l.AtEOF(): return nil, false case l.Match(b.end): data = append(data, b.ends...) b.level-- if b.level == 0 { break LOOP } case l.Match(b.begin): data = append(data, b.begins...) b.level++ default: data = append(data, l.Next(1)...) } } return convertMultipleCommentToXML(data, b.begins, b.ends, b.prefix), true }
package handlerTable // Handler 注册函数 type Handler interface { CallFunc() } // HandlerFunc 注册函数类型 type HandlerFunc func() // CallFunc 实现注册函数 func (f HandlerFunc) CallFunc() { f() } // Table 函数注册表 type Table interface { HandlerFunc(f func()) error CallBack() } // 函数注册表结构体 type handerTable struct { Pool } // NewTable 新建一个函数注册表 func NewTable() Table { ht := &handerTable{Pool:NewPool(), } return ht } // HandlerFunc 注册一个函数 func (ht *handerTable) HandlerFunc(f func()) error { ht.Put(HandlerFunc(f)) return nil } // put 注册函数 func (ht *handerTable) put(h HandlerFunc) { ht.Put(Handler(h)) } // 取出所有函数并回调 func (ht *handerTable) CallBack() { var h Handler for { h = ht.Get() if h == nil { return } h.CallFunc() } }
package model type StandardResponse struct { Description string `json:"description"` Result string `json:"result"` Value string `json:"value"` }
package imitationPool import ( "runtime" "time" ) type goworker struct { //worker所在pool的指针 pool *Pool //接收task的channel task chan func() //时间标记,当worker被使用后并重新放入队列中时,更新这个字段 recycleTime time.Time } //启动worker,在初始话一个worker时调用 func (g *goworker) run() { //更新pool中运行goroutine数量 +1 g.pool.incRunning() go func() { defer func() { //更新pool中运行goroutine数量 -1 g.pool.decRunning() //将关闭的worker重新放入二级缓存 pool g.pool.workerCache.Put(g) if perr := recover(); perr != nil { if ph := g.pool.options.PanicHandle; ph != nil { ph(perr) } else { g.pool.options.Logger.Printf("worker exits from a panic: %v\n", perr) //var buf [4096]byte var buf []byte = make([]byte, 4096, 4096) n := runtime.Stack(buf, false) g.pool.options.Logger.Printf("worker exits from a panic: %v\n", string(buf[:n])) } } }() //循环监听task channel for t := range g.task { if t == nil { return } //调用任务功能函数 t() //回收worker近一级缓存 if ok := g.pool.revertWorker(g); !ok { return } } }() }
package portal import ( "fmt" "strings" "github.com/golang/glog" "k8s.io/apimachinery/pkg/types" "kope.io/auth/pkg/oauth/session" ) func (s *HTTPServer) mapUser(session *session.Session, info *session.UserInfo) (types.UID, error) { providerID := session.ProviderId if providerID == "" { return "", fmt.Errorf("providerID not specified") } conf, err := s.config.AuthProvider(providerID) if err != nil { return "", fmt.Errorf("error reading configuration for %q: %v", providerID, err) } if conf == nil { return "", fmt.Errorf("no provider configuration for %q", providerID) } validator, err := buildValidator(conf.PermitEmails) if err != nil { return "", fmt.Errorf("error building email validator: %v", err) } email := info.Email if email == "" { return "", fmt.Errorf("rejected login attempt without email: %s", info) } if !validator(email) { glog.Infof("rejected login attempt from %q", email) return "", fmt.Errorf("rejected login attempt, email %q not permitted", email) } user, err := s.tokenStore.MapToUser(info, true) if err != nil { return "", err } glog.Infof("mapped %s to %s", email, user.UID) return user.UID, nil } func buildValidator(permitEmails []string) (func(string) bool, error) { allowAll := false var exact []string var suffixes []string for _, permitEmail := range permitEmails { wildcardCount := strings.Count(permitEmail, "*") if wildcardCount == 0 { if permitEmail == "" { // TODO: Move to validation? // TODO: Maybe ignore invalid rules? return nil, fmt.Errorf("empty permitEmail not allowed") } exact = append(exact, permitEmail) } else if wildcardCount == 1 && strings.HasPrefix(permitEmail, "*") { if permitEmail == "*" { allowAll = true } else { // TODO: Block dangerous things i.e. require *@ or *. ? suffixes = append(suffixes, permitEmail[1:]) } } else { return nil, fmt.Errorf("Cannot parse permitEmail rule: %q", permitEmail) } } validator := func(email string) bool { if email == "" { return false } email = strings.TrimSpace(strings.ToLower(email)) if allowAll { return true } for _, s := range exact { if s == email { return true } } for _, suffix := range suffixes { if strings.HasSuffix(email, suffix) { return true } } return false } return validator, nil }
package main import "fmt" func main() { switch "Asepnur" { case "Asepnur": fmt.Println("Hello Asepnur") fallthrough case "Muhammad": fmt.Println("Hello Muhammad") fallthrough case "Iskandar": fmt.Println("Hello Iskandar") case "Yusuf": fmt.Println("Hello Yusuf") default: fmt.Println("Unknown") } }
// Copyright (C) 2019 Storj Labs, Inc. // See LICENSE for copying information package sync2 import ( "context" "sync" "sync/atomic" "time" monkit "github.com/spacemonkeygo/monkit/v3" "golang.org/x/sync/errgroup" ) // Cycle implements a controllable recurring event. // // Cycle control methods PANICS after Close has been called and don't have any // effect after Stop has been called. // // Start or Run (only one of them, not both) must be only called once. type Cycle struct { noCopy noCopy //nolint:structcheck stopsent int32 runexec int32 interval time.Duration ticker *time.Ticker control chan interface{} stopping chan struct{} stopped chan struct{} init sync.Once delayStart bool } type ( // cycle control messages. cyclePause struct{} cycleContinue struct{} cycleChangeInterval struct{ Interval time.Duration } cycleTrigger struct{ done chan struct{} } ) // NewCycle creates a new cycle with the specified interval. func NewCycle(interval time.Duration) *Cycle { cycle := &Cycle{} cycle.SetInterval(interval) return cycle } // SetInterval allows to change the interval before starting. func (cycle *Cycle) SetInterval(interval time.Duration) { cycle.interval = interval } // SetDelayStart wait interval before first trigger on start/run. func (cycle *Cycle) SetDelayStart() { cycle.delayStart = true } func (cycle *Cycle) initialize() { cycle.init.Do(func() { cycle.stopped = make(chan struct{}) cycle.stopping = make(chan struct{}) cycle.control = make(chan interface{}) }) } // Start runs the specified function with an errgroup. func (cycle *Cycle) Start(ctx context.Context, group *errgroup.Group, fn func(ctx context.Context) error) { atomic.CompareAndSwapInt32(&cycle.runexec, 0, 1) group.Go(func() error { return cycle.Run(ctx, fn) }) } // Run runs the specified in an interval. // // Every interval `fn` is started. // When `fn` is not fast enough, it may skip some of those executions. // // Run PANICS if it's called after Stop has been called. func (cycle *Cycle) Run(ctx context.Context, fn func(ctx context.Context) error) error { atomic.CompareAndSwapInt32(&cycle.runexec, 0, 1) cycle.initialize() defer close(cycle.stopped) currentInterval := cycle.interval cycle.ticker = time.NewTicker(currentInterval) defer cycle.ticker.Stop() choreCtx := monkit.ResetContextSpan(ctx) if !cycle.delayStart { if err := fn(choreCtx); err != nil { return err } } for { // prioritize stopping messages select { case <-cycle.stopping: return nil case <-ctx.Done(): // handle control messages return ctx.Err() default: } // handle other messages as well select { case message := <-cycle.control: // handle control messages switch message := message.(type) { case cycleChangeInterval: currentInterval = message.Interval cycle.ticker.Stop() cycle.ticker = time.NewTicker(currentInterval) case cyclePause: cycle.ticker.Stop() // ensure we don't have ticks left select { case <-cycle.ticker.C: default: } case cycleContinue: cycle.ticker.Stop() cycle.ticker = time.NewTicker(currentInterval) case cycleTrigger: // trigger the function if err := fn(withManualTrigger(choreCtx)); err != nil { return err } if message.done != nil { close(message.done) } } case <-cycle.stopping: return nil case <-ctx.Done(): // handle control messages return ctx.Err() case <-cycle.ticker.C: // trigger the function if err := fn(choreCtx); err != nil { return err } } } } // Close closes all resources associated with it. // // It MUST NOT be called concurrently. func (cycle *Cycle) Close() { cycle.Stop() if atomic.LoadInt32(&cycle.runexec) == 1 { <-cycle.stopped } close(cycle.control) } // sendControl sends a control message. func (cycle *Cycle) sendControl(message interface{}) { cycle.initialize() select { case cycle.control <- message: case <-cycle.stopped: } } // Stop stops the cycle permanently. func (cycle *Cycle) Stop() { cycle.initialize() if atomic.CompareAndSwapInt32(&cycle.stopsent, 0, 1) { close(cycle.stopping) } if atomic.LoadInt32(&cycle.runexec) == 1 { <-cycle.stopped } } // ChangeInterval allows to change the ticker interval after it has started. func (cycle *Cycle) ChangeInterval(interval time.Duration) { cycle.sendControl(cycleChangeInterval{interval}) } // Pause pauses the cycle. func (cycle *Cycle) Pause() { cycle.sendControl(cyclePause{}) } // Restart restarts the ticker from 0. func (cycle *Cycle) Restart() { cycle.sendControl(cycleContinue{}) } // Trigger ensures that the loop is done at least once. // If it's currently running it waits for the previous to complete and then runs. func (cycle *Cycle) Trigger() { cycle.sendControl(cycleTrigger{}) } // TriggerWait ensures that the loop is done at least once and waits for completion. // If it's currently running it waits for the previous to complete and then runs. func (cycle *Cycle) TriggerWait() { done := make(chan struct{}) cycle.sendControl(cycleTrigger{done}) select { case <-done: case <-cycle.stopped: } } type cycleManualTriggerTag struct{} func withManualTrigger(ctx context.Context) context.Context { return context.WithValue(ctx, cycleManualTriggerTag{}, true) } // IsManuallyTriggeredCycle returns whether ctx comes from a context // that was started due to a `Trigger` or `TriggerWait` call in Cycle. func IsManuallyTriggeredCycle(ctx context.Context) bool { val := ctx.Value(cycleManualTriggerTag{}) if val == nil { return false } triggered, ok := val.(bool) return triggered && ok }
package models type User struct { ID string `json:"id"` Contacts []Contact `json:"contacts"` }
package lc import ( "bytes" "strings" ) // Time: O(n) // Benchmark: 0ms 2.1mb | 100% func reorderSpaces(text string) string { var spaceCount int for _, ch := range text { if ch == ' ' { spaceCount++ } } words := strings.Fields(text) var spacesBetween, spacesAfter int if len(words) <= 1 { spacesAfter = spaceCount } else { spacesBetween = spaceCount / (len(words) - 1) spacesAfter = spaceCount % (len(words) - 1) } var buf bytes.Buffer for i, w := range words { buf.WriteString(w) if i == len(words)-1 { break } for j := 0; j < spacesBetween; j++ { buf.WriteString(" ") } } for i := 0; i < spacesAfter; i++ { buf.WriteString(" ") } return buf.String() }
// Copyright (c) Mainflux // SPDX-License-Identifier: Apache-2.0 package mocks import ( "context" "fmt" "strings" "sync" "time" "github.com/mainflux/mainflux/auth" ) var _ auth.GroupRepository = (*groupRepositoryMock)(nil) type groupRepositoryMock struct { mu sync.Mutex // Map of groups, group id as a key. // groups map[GroupID]auth.Group groups map[string]auth.Group // Map of groups with group id as key that are // children (i.e. has same parent id) is element // in children's map where parent id is key. // children map[ParentID]map[GroupID]auth.Group children map[string]map[string]auth.Group // Map of parents' id with child group id as key. // Each child has one parent. // parents map[ChildID]ParentID parents map[string]string // Map of groups (with group id as key) which // represent memberships is element in // memberships' map where member id is a key. // memberships map[MemberID]map[GroupID]auth.Group memberships map[string]map[string]auth.Group // Map of group members where member id is a key // is an element in the map members where group id is a key. // members map[type][GroupID]map[MemberID]MemberID members map[string]map[string]map[string]string } // NewGroupRepository creates in-memory user repository func NewGroupRepository() auth.GroupRepository { return &groupRepositoryMock{ groups: make(map[string]auth.Group), children: make(map[string]map[string]auth.Group), parents: make(map[string]string), memberships: make(map[string]map[string]auth.Group), members: make(map[string]map[string]map[string]string), } } func (grm *groupRepositoryMock) Save(ctx context.Context, group auth.Group) (auth.Group, error) { grm.mu.Lock() defer grm.mu.Unlock() if _, ok := grm.groups[group.ID]; ok { return auth.Group{}, auth.ErrGroupConflict } path := group.ID if group.ParentID != "" { parent, ok := grm.groups[group.ParentID] if !ok { return auth.Group{}, auth.ErrCreateGroup } if _, ok := grm.children[group.ParentID]; !ok { grm.children[group.ParentID] = make(map[string]auth.Group) } grm.children[group.ParentID][group.ID] = group grm.parents[group.ID] = group.ParentID path = fmt.Sprintf("%s.%s", parent.Path, path) } group.Path = path group.Level = len(strings.Split(path, ".")) grm.groups[group.ID] = group return group, nil } func (grm *groupRepositoryMock) Update(ctx context.Context, group auth.Group) (auth.Group, error) { grm.mu.Lock() defer grm.mu.Unlock() up, ok := grm.groups[group.ID] if !ok { return auth.Group{}, auth.ErrNotFound } up.Name = group.Name up.Description = group.Description up.Metadata = group.Metadata up.UpdatedAt = time.Now() grm.groups[group.ID] = up return up, nil } func (grm *groupRepositoryMock) Delete(ctx context.Context, id string) error { grm.mu.Lock() defer grm.mu.Unlock() if _, ok := grm.groups[id]; !ok { return auth.ErrGroupNotFound } if len(grm.members[id]) > 0 { return auth.ErrGroupNotEmpty } // This is not quite exact, it should go in depth for _, ch := range grm.children[id] { if len(grm.members[ch.ID]) > 0 { return auth.ErrGroupNotEmpty } } // This is not quite exact, it should go in depth delete(grm.groups, id) for _, ch := range grm.children[id] { delete(grm.members, ch.ID) } delete(grm.children, id) return nil } func (grm *groupRepositoryMock) RetrieveByID(ctx context.Context, id string) (auth.Group, error) { grm.mu.Lock() defer grm.mu.Unlock() val, ok := grm.groups[id] if !ok { return auth.Group{}, auth.ErrGroupNotFound } return val, nil } func (grm *groupRepositoryMock) RetrieveAll(ctx context.Context, pm auth.PageMetadata) (auth.GroupPage, error) { grm.mu.Lock() defer grm.mu.Unlock() var items []auth.Group for _, g := range grm.groups { items = append(items, g) } return auth.GroupPage{ Groups: items, PageMetadata: auth.PageMetadata{ Total: uint64(len(items)), }, }, nil } func (grm *groupRepositoryMock) Unassign(ctx context.Context, groupID string, memberIDs ...string) error { grm.mu.Lock() defer grm.mu.Unlock() if _, ok := grm.groups[groupID]; !ok { return auth.ErrGroupNotFound } for _, memberID := range memberIDs { for typ, m := range grm.members[groupID] { _, ok := m[memberID] if !ok { return auth.ErrGroupNotFound } delete(grm.members[groupID][typ], memberID) delete(grm.memberships[memberID], groupID) } } return nil } func (grm *groupRepositoryMock) Assign(ctx context.Context, groupID, groupType string, memberIDs ...string) error { grm.mu.Lock() defer grm.mu.Unlock() if _, ok := grm.groups[groupID]; !ok { return auth.ErrGroupNotFound } if _, ok := grm.members[groupID]; !ok { grm.members[groupID] = make(map[string]map[string]string) } for _, memberID := range memberIDs { if _, ok := grm.members[groupID][groupType]; !ok { grm.members[groupID][groupType] = make(map[string]string) } if _, ok := grm.memberships[memberID]; !ok { grm.memberships[memberID] = make(map[string]auth.Group) } grm.members[groupID][groupType][memberID] = memberID grm.memberships[memberID][groupID] = grm.groups[groupID] } return nil } func (grm *groupRepositoryMock) Memberships(ctx context.Context, memberID string, pm auth.PageMetadata) (auth.GroupPage, error) { grm.mu.Lock() defer grm.mu.Unlock() var items []auth.Group first := uint64(pm.Offset) last := first + uint64(pm.Limit) i := uint64(0) for _, g := range grm.memberships[memberID] { if i >= first && i < last { items = append(items, g) } i++ } return auth.GroupPage{ Groups: items, PageMetadata: auth.PageMetadata{ Limit: pm.Limit, Offset: pm.Offset, Total: uint64(len(items)), }, }, nil } func (grm *groupRepositoryMock) Members(ctx context.Context, groupID, groupType string, pm auth.PageMetadata) (auth.MemberPage, error) { grm.mu.Lock() defer grm.mu.Unlock() var items []auth.Member members, ok := grm.members[groupID][groupType] if !ok { return auth.MemberPage{}, auth.ErrGroupNotFound } first := uint64(pm.Offset) last := first + uint64(pm.Limit) i := uint64(0) for _, g := range members { if i >= first && i < last { items = append(items, auth.Member{ID: g, Type: groupType}) } i++ } return auth.MemberPage{ Members: items, PageMetadata: auth.PageMetadata{ Total: uint64(len(items)), }, }, nil } func (grm *groupRepositoryMock) RetrieveAllParents(ctx context.Context, groupID string, pm auth.PageMetadata) (auth.GroupPage, error) { grm.mu.Lock() defer grm.mu.Unlock() if groupID == "" { return auth.GroupPage{}, nil } group, ok := grm.groups[groupID] if !ok { return auth.GroupPage{}, auth.ErrGroupNotFound } groups := make([]auth.Group, 0) groups, err := grm.getParents(groups, group) if err != nil { return auth.GroupPage{}, err } return auth.GroupPage{ Groups: groups, PageMetadata: auth.PageMetadata{ Total: uint64(len(groups)), }, }, nil } func (grm *groupRepositoryMock) getParents(groups []auth.Group, group auth.Group) ([]auth.Group, error) { groups = append(groups, group) parentID, ok := grm.parents[group.ID] if !ok && parentID == "" { return groups, nil } parent, ok := grm.groups[parentID] if !ok { panic(fmt.Sprintf("parent with id: %s not found", parentID)) } return grm.getParents(groups, parent) } func (grm *groupRepositoryMock) RetrieveAllChildren(ctx context.Context, groupID string, pm auth.PageMetadata) (auth.GroupPage, error) { grm.mu.Lock() defer grm.mu.Unlock() group, ok := grm.groups[groupID] if !ok { return auth.GroupPage{}, nil } groups := make([]auth.Group, 0) groups = append(groups, group) for ch := range grm.parents { g, ok := grm.groups[ch] if !ok { panic(fmt.Sprintf("child with id %s not found", ch)) } groups = append(groups, g) } return auth.GroupPage{ Groups: groups, PageMetadata: auth.PageMetadata{ Total: uint64(len(groups)), Offset: pm.Offset, Limit: pm.Limit, }, }, nil }
package dao import ( "db" "time" "types" "github.com/google/uuid" "github.com/kisielk/sqlstruct" ) //RecoverDAO - data access for recovery requests type RecoverDAO struct { } //CreateRecovery - creates a new recovery func (dao RecoverDAO) CreateRecovery(account *types.Account, db *db.MySQL) (*types.Recovery, error) { recovery := types.Recovery{ID: uuid.New().String(), AccountID: account.ID, Created: time.Now(), Email: account.Email} stmt, err := db.PreparedQuery("INSERT INTO recover (id, accountId, created, email) VALUES(?,?,?,?)") if err != nil { return nil, err } rows, err := stmt.Query(recovery.ID, recovery.AccountID, recovery.Created, recovery.Email) if err != nil { return nil, err } stmt.Close() defer rows.Close() return &recovery, nil } //GetRecovery - returns a recovery from db func (dao RecoverDAO) GetRecovery(recovery *types.Recovery, db *db.MySQL) (*types.Recovery, error) { stmt, err := db.PreparedQuery("SELECT * FROM recover WHERE id = ?") if err != nil { return nil, err } rows, err := stmt.Query(recovery.ID) if err != nil { return nil, err } stmt.Close() defer rows.Close() for rows.Next() { rec := types.Recovery{} err = sqlstruct.Scan(&rec, rows) if err != nil { return nil, err } return &rec, nil } return nil, nil } //FinishRecovery - completes a account recovery process func (dao RecoverDAO) FinishRecovery(account *types.Account, recoveryRequest *types.FinalRecoveryRequest, recovery *types.Recovery, db *db.MySQL) (string, error) { stmt, err := db.PreparedQuery("UPDATE users SET password = ? WHERE id = ?") if err != nil { return "", err } rows, err := stmt.Query(account.Password, account.ID) if err != nil { return "", err } stmt.Close() defer rows.Close() //If this fails it will expire within the HOUR. The request is already completed. _, _ = db.SimpleQuery("DELETE FROM recover WHERE id = '" + recovery.ID + "'") return "", nil }
/** 1. 指定类型, 声明后不赋值则使用默认值: var 变量 类型 变量 = 值 2. 直接赋值自行判断变量类型 var 变量 = 值 3. 省略关键字var(注意: 这里变量不能是已经声明过的不然编译器会报错) 变量 := 值 eg: var a int = 1 var b = 1 v := 1 **/ package main var a = 1 var b string = "我是一段字符串" var c bool func main() { println(a, b, c) } // 结果: 1 我是一段字符串 false
package main import ( "testing" ) type BubbleSorter interface { sort() } type Bubble struct { name string } func TestEbullition(t *testing.T) { array := []int{4, 93, 22, 86, 57, 12, 29} bubble := Bubble{name: "冒泡---从小到大---稳定---n*n---"} bubble.sort(array) t.Log(array) } /** 主要 冒泡排序不是两两交换, 而是把循环对比, 把最轻的交换到应该在的位置 */ func (b Bubble) sort(array []int) { for i := 0; i < len(array); i++ { for j := i + 1; j < len(array); j++ { if array[i] > array[j] { // 最小的在左边 array[i], array[j] = array[j], array[i] } } } } func (b Bubble) zSort(array []int) { for i := 0; i < len(array); i++ { for j := i + 1; j < len(array); j++ { if array[i] < array[j] { // 最大的在左边 array[i], array[j] = array[j], array[i] } } } } func bubbleSort(nums []int) { for i := 0; i < len(nums); i++ { for j := 1; j < len(nums); j++ { if nums[j] < nums[j - 1] { // 交换 nums[j], nums[j - 1] = nums[j - 1], nums[j] } } } }
package util import ( "fmt" "github.com/spiral/roadrunner" "strings" ) // LogEvent outputs rr event into given logger and return false if event was not handled. func StdErrOutput(event int, ctx interface{}) bool { // outputs switch event { case roadrunner.EventStderrOutput: for _, line := range strings.Split(string(ctx.([]byte)), "\n") { if line == "" { continue } fmt.Println(strings.Trim(line, "\r\n")) } return true } return false }
// Copyright 2021 The Cockroach Authors. // // Licensed as a CockroachDB Enterprise file under the Cockroach Community // License (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // https://github.com/cockroachdb/cockroach/blob/master/licenses/CCL.txt package streamclient import ( "context" "testing" _ "github.com/cockroachdb/cockroach/pkg/ccl/changefeedccl" // Ensure changefeed init hooks run. _ "github.com/cockroachdb/cockroach/pkg/ccl/kvccl/kvtenantccl" // Ensure we can start tenant. "github.com/cockroachdb/cockroach/pkg/ccl/streamingccl" "github.com/cockroachdb/cockroach/pkg/ccl/streamingccl/streamingtest" _ "github.com/cockroachdb/cockroach/pkg/ccl/streamingccl/streamproducer" // Ensure we can start replication stream. "github.com/cockroachdb/cockroach/pkg/sql/catalog/catalogkv" "github.com/cockroachdb/cockroach/pkg/util/leaktest" "github.com/cockroachdb/cockroach/pkg/util/log" "github.com/stretchr/testify/require" ) // channelFeedSource wraps the eventsCh returned from a client. It expects that // no errors are returned from the client. type channelFeedSource struct { t *testing.T eventCh chan streamingccl.Event errCh chan error } var _ streamingtest.FeedSource = (*channelFeedSource)(nil) // Next implements the streamingtest.FeedSource interface. func (f *channelFeedSource) Next() (streamingccl.Event, bool) { // First check for any errors. select { case err := <-f.errCh: require.NoError(f.t, err) return nil, false default: } event, haveMoreRows := <-f.eventCh return event, haveMoreRows } // Close implements the streamingtest.FeedSource interface. func (f *channelFeedSource) Close() { close(f.eventCh) } func TestSinklessReplicationClient(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) h, cleanup := streamingtest.NewReplicationHelper(t) defer cleanup() h.Tenant.SQL.Exec(t, ` CREATE DATABASE d; CREATE TABLE d.t1(i int primary key, a string, b string); CREATE TABLE d.t2(i int primary key); INSERT INTO d.t1 (i) VALUES (42); INSERT INTO d.t2 VALUES (2); `) t1 := catalogkv.TestingGetTableDescriptor(h.SysServer.DB(), h.Tenant.Codec, "d", "t1") pgURL := h.PGUrl q := pgURL.Query() q.Set(TenantID, h.Tenant.ID.String()) pgURL.RawQuery = q.Encode() sa := streamingccl.StreamAddress(pgURL.String()) client := &sinklessReplicationClient{} top, err := client.GetTopology(sa) require.NoError(t, err) require.Equal(t, 1, len(top.Partitions)) pa := top.Partitions[0] require.Equal(t, streamingccl.PartitionAddress(pgURL.String()), pa) ctx := context.Background() h.Tenant.SQL.Exec(t, `UPDATE d.t1 SET b = 'world' WHERE i = 42`) startTime := h.SysServer.Clock().Now() h.Tenant.SQL.Exec(t, `UPDATE d.t1 SET a = 'привет' WHERE i = 42`) h.Tenant.SQL.Exec(t, `UPDATE d.t1 SET b = 'мир' WHERE i = 42`) t.Run("replicate_existing_tenant", func(t *testing.T) { clientCtx, cancelIngestion := context.WithCancel(ctx) eventCh, errCh, err := client.ConsumePartition(clientCtx, pa, startTime) require.NoError(t, err) feedSource := &channelFeedSource{eventCh: eventCh, errCh: errCh} feed := streamingtest.MakeReplicationFeed(t, feedSource) // We should observe 2 versions of this key: one with ("привет", "world"), and a later // version ("привет", "мир") expected := streamingtest.EncodeKV(t, h.Tenant.Codec, t1, 42, "привет", "world") firstObserved := feed.ObserveKey(expected.Key) require.Equal(t, expected.Value.RawBytes, firstObserved.Value.RawBytes) expected = streamingtest.EncodeKV(t, h.Tenant.Codec, t1, 42, "привет", "мир") secondObserved := feed.ObserveKey(expected.Key) require.Equal(t, expected.Value.RawBytes, secondObserved.Value.RawBytes) feed.ObserveResolved(secondObserved.Value.Timestamp) cancelIngestion() }) }
// Copyright (C) 2019-2020 Zilliz. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License // is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express // or implied. See the License for the specific language governing permissions and limitations under the License. package datanode import ( "sync" "go.uber.org/zap" "github.com/milvus-io/milvus/internal/log" "github.com/milvus-io/milvus/internal/msgstream" "github.com/milvus-io/milvus/internal/proto/commonpb" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/util/flowgraph" "github.com/milvus-io/milvus/internal/util/trace" "github.com/opentracing/opentracing-go" ) type ddNode struct { BaseNode clearSignal chan<- UniqueID collectionID UniqueID mu sync.RWMutex seg2SegInfo map[UniqueID]*datapb.SegmentInfo // Segment ID to UnFlushed Segment vchanInfo *datapb.VchannelInfo } func (ddn *ddNode) Name() string { return "ddNode" } func (ddn *ddNode) Operate(in []flowgraph.Msg) []flowgraph.Msg { // log.Debug("DDNode Operating") if len(in) != 1 { log.Error("Invalid operate message input in ddNode", zap.Int("input length", len(in))) // TODO: add error handling } if len(in) == 0 { return []flowgraph.Msg{} } msMsg, ok := in[0].(*MsgStreamMsg) if !ok { log.Error("type assertion failed for MsgStreamMsg") return []flowgraph.Msg{} // TODO: add error handling } var spans []opentracing.Span for _, msg := range msMsg.TsMessages() { sp, ctx := trace.StartSpanFromContext(msg.TraceCtx()) spans = append(spans, sp) msg.SetTraceCtx(ctx) } if msMsg == nil { return []Msg{} } var iMsg = insertMsg{ insertMessages: make([]*msgstream.InsertMsg, 0), timeRange: TimeRange{ timestampMin: msMsg.TimestampMin(), timestampMax: msMsg.TimestampMax(), }, startPositions: make([]*internalpb.MsgPosition, 0), endPositions: make([]*internalpb.MsgPosition, 0), } for _, msg := range msMsg.TsMessages() { switch msg.Type() { case commonpb.MsgType_DropCollection: if msg.(*msgstream.DropCollectionMsg).GetCollectionID() == ddn.collectionID { log.Info("Destroying current flowgraph", zap.Any("collectionID", ddn.collectionID)) ddn.clearSignal <- ddn.collectionID } case commonpb.MsgType_Insert: log.Debug("DDNode with insert messages") if msg.EndTs() < FilterThreshold { log.Info("Filtering Insert Messages", zap.Uint64("Message endts", msg.EndTs()), zap.Uint64("FilterThreshold", FilterThreshold), ) if ddn.filterFlushedSegmentInsertMessages(msg.(*msgstream.InsertMsg)) { continue } } iMsg.insertMessages = append(iMsg.insertMessages, msg.(*msgstream.InsertMsg)) } } iMsg.startPositions = append(iMsg.startPositions, msMsg.StartPositions()...) iMsg.endPositions = append(iMsg.endPositions, msMsg.EndPositions()...) var res Msg = &iMsg for _, sp := range spans { sp.Finish() } return []Msg{res} } func (ddn *ddNode) filterFlushedSegmentInsertMessages(msg *msgstream.InsertMsg) bool { if ddn.isFlushed(msg.GetSegmentID()) { return true } ddn.mu.Lock() if si, ok := ddn.seg2SegInfo[msg.GetSegmentID()]; ok { if msg.EndTs() <= si.GetDmlPosition().GetTimestamp() { return true } delete(ddn.seg2SegInfo, msg.GetSegmentID()) } ddn.mu.Unlock() return false } func (ddn *ddNode) isFlushed(segmentID UniqueID) bool { ddn.mu.Lock() defer ddn.mu.Unlock() for _, id := range ddn.vchanInfo.GetFlushedSegments() { if id == segmentID { return true } } return false } func newDDNode(clearSignal chan<- UniqueID, collID UniqueID, vchanInfo *datapb.VchannelInfo) *ddNode { baseNode := BaseNode{} baseNode.SetMaxParallelism(Params.FlowGraphMaxQueueLength) si := make(map[UniqueID]*datapb.SegmentInfo) for _, us := range vchanInfo.GetUnflushedSegments() { si[us.GetID()] = us } return &ddNode{ BaseNode: baseNode, clearSignal: clearSignal, collectionID: collID, seg2SegInfo: si, vchanInfo: vchanInfo, } }
package httpx import ( "fmt" "net/http" "strings" "github.com/socialpoint-labs/bsk/metrics" ) // InstrumentDecorator returns an adapter that instrument requests with some metrics: // - http.request_duration: requests duration // - http.requests: number of requests // // Metrics are tagged with the HTTP method, requests path, response status code and response status class. func InstrumentDecorator(met metrics.Metrics, t ...metrics.Tag) Decorator { return func(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { timer := met.Timer("http.request_duration") timer.Start() delegate := &responseWriterDelegator{ResponseWriter: w} h.ServeHTTP(delegate, r) code := delegate.status tags := t tags = append(tags, metrics.Tag{Key: "method", Value: strings.ToLower(r.Method)}, metrics.Tag{Key: "path", Value: r.URL.EscapedPath()}, metrics.Tag{Key: "code", Value: code}, metrics.Tag{Key: "class", Value: httpStatusCodeClass(code)}, ) timer.WithTags(tags...).Stop() }) } } // responseWriterDelegator is an implementation of a http.ResponseWriter that keeps track of the HTTP status code // written during the request/response lifecycle type responseWriterDelegator struct { http.ResponseWriter status int } func (r *responseWriterDelegator) WriteHeader(code int) { r.status = code r.ResponseWriter.WriteHeader(code) } func httpStatusCodeClass(code int) string { return fmt.Sprintf("%dxx", code/100) }
package logger import ( configs "github.com/fwchen/jellyfish/config" "github.com/stretchr/testify/assert" "testing" ) func TestInitLogger(t *testing.T) { err := InitLogger(configs.LoggerConfig{ Level: "info", }) assert.Nil(t, err) assert.NotNil(t, sugaredLogger) }
/* * Copyright 2021 American Express * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. */ package core import ( "os" "path/filepath" "testing" cfgReader "github.com/americanexpress/earlybird/pkg/config" "github.com/americanexpress/earlybird/pkg/scan" "github.com/americanexpress/earlybird/pkg/utils" ) var eb EarlybirdCfg func setup() { wd := utils.MustGetWD() eb.Config.LabelsConfigDir = filepath.Join(wd, "../../config/labels") eb.Config.SolutionsConfigDir = filepath.Join(wd, "../../config/solutions") eb.Config.FalsePositivesConfigDir = filepath.Join(wd, "../../config/falsepositives") } func cleanup() { eb = EarlybirdCfg{} } func TestMain(m *testing.M) { setup() scan.Init(eb.Config) os.Exit(m.Run()) } // Program will exit with error if config init fails func TestEarlybirdCfg_ConfigInit(t *testing.T) { eb.ConfigInit() } func TestEarlybirdCfg_Scan(t *testing.T) { eb.Config.SearchDir = utils.MustGetWD() eb.Scan() } func TestEarlybirdCfg_GitClone(t *testing.T) { if os.Getenv("local") == "" { t.Skip("If test cases not running locally, skip cloning external repositories for CI/CD purposes.") } var ( FakeRepo = "https://github.com/carnal0wnage/fake_commited_secrets" RepoUser string Project string ) ptr := PTRGitConfig{ Repo: &FakeRepo, RepoUser: &RepoUser, Project: &Project, } eb.GitClone(ptr) //Delete temporary cloned repository directory utils.DeleteGit(FakeRepo, eb.Config.SearchDir) } func TestEarlybirdCfg_getDefaultModuleSettings(t *testing.T) { modules := map[string]cfgReader.ModuleConfig{ "inclusivity": { DisplaySeverity: "high", }, } eb.Config.ModuleConfigs.Modules = modules eb.getDefaultModuleSettings() // we didn't explicitly configure DisplayConfidence, make sure it got set to global default if got, want := eb.Config.ModuleConfigs.Modules["inclusivity"].DisplayConfidenceLevel, eb.Config.ConfidenceDisplayLevel; got != want { t.Fatalf("Unexpected default value set, got: %d, want: %d", got, want) } cleanup() }
package main import ( "fmt" "math" ) func Sqrt(x float64) float64 { var e float64 = 0.0000001 var z float64 = 1 var e1 float64 e1 = z*z - x for !(e1 >= -e && e1 <= e) { if z*z > x { z -= (z*z - x) / (2 * z) } else if z*z < x { z += (x - z*z) / (2 * z) } e1 = z*z - x fmt.Printf("e1:%v \n", e1) } return z } func main() { var x float64 = 23251 fmt.Println(Sqrt(x)) fmt.Println(math.Sqrt(x)) }
package wiki import ( "log" "net/http" "github.com/gorilla/mux" ) type Server struct { Conf Config httpServer *http.Server db *Database } func NewServer(c Config) (*Server, error) { db, err := NewDatabase(c) if err != nil { return nil, err } m := mux.NewRouter() h := &http.Server{ Addr: c.Addr, Handler: m, } s := &Server{ Conf: c, httpServer: h, db: db, } m.HandleFunc("/", s.indexHandler) m.HandleFunc("/stats", s.statsHandler) m.HandleFunc("/page/new", s.getNewPageHandler).Methods("GET") m.HandleFunc("/page/new", s.postNewPageHandler).Methods("POST") m.HandleFunc("/page/edit/{title}", s.getEditPageHandler).Methods("GET") m.HandleFunc("/page/edit/{title}", s.postEditPageHandler).Methods("POST") m.HandleFunc("/page/del/{title}", s.getDelPageHandler).Methods("GET") m.HandleFunc("/page/del/{title}", s.postDelPageHandler).Methods("POST") m.HandleFunc("/page/{title}", s.getPageHandler) return s, nil } func (s *Server) Run() error { s.Log("Listening on %s", s.Conf.Addr) return s.httpServer.ListenAndServe() } func (s *Server) Log(msg string, args ...interface{}) { if s.Conf.Verbose { log.Printf(msg+"\n", args...) } } func (s *Server) logTemplateError(tmpl string, err error) { if err != nil { s.Log("Error while executing template %s: %s", tmpl, err) } } func (s *Server) indexHandler(w http.ResponseWriter, r *http.Request) { } func (s *Server) statsHandler(w http.ResponseWriter, r *http.Request) { } func (s *Server) getPageHandler(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) title := vars["title"] p, err := s.db.GetPage(title) if err != nil { s.Log("Error getPageHandler: %s", err) } if p.IsZero() { http.Error(w, "Page not found", http.StatusNotFound) return } err = tmplGetPage.Execute(w, map[string]interface{}{ "Page": p, }) s.logTemplateError("getPage", err) } func (s *Server) getNewPageHandler(w http.ResponseWriter, r *http.Request) { err := tmplGetNewPage.Execute(w, nil) s.logTemplateError("getNewPage", err) } func (s *Server) postNewPageHandler(w http.ResponseWriter, r *http.Request) { title := r.FormValue("title") http.Redirect(w, r, "/page/edit/"+title, http.StatusFound) } func (s *Server) getEditPageHandler(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) title := vars["title"] p, err := s.db.GetPage(title) if err != nil { s.Log("Error getEditPageHandler: %s", err) http.Error(w, "Page not found", http.StatusNotFound) return } if p.IsZero() { p.Title = title } err = tmplGetEditPage.Execute(w, map[string]interface{}{ "Page": p, }) s.logTemplateError("getEditPage", err) } func (s *Server) postEditPageHandler(w http.ResponseWriter, r *http.Request) { // These two will be cleaned up by the db call title := r.FormValue("title") body := r.FormValue("body") p, err := s.db.SavePage(title, body) if err != nil { s.Log("Error postEditPageHandler: %s", err) http.Error(w, "Internal server error while saving page", http.StatusInternalServerError) return } http.Redirect(w, r, "/page/"+p.Title, http.StatusFound) } func (s *Server) getDelPageHandler(w http.ResponseWriter, r *http.Request) { } func (s *Server) postDelPageHandler(w http.ResponseWriter, r *http.Request) { }
// Copyright 2019 Copyright (c) 2019 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package gkescheduler import ( "context" "fmt" "os" container "cloud.google.com/go/container/apiv1" "github.com/pkg/errors" "github.com/spf13/cobra" flag "github.com/spf13/pflag" "google.golang.org/api/option" "github.com/gardener/test-infra/pkg/hostscheduler" "github.com/gardener/test-infra/pkg/logger" "github.com/gardener/test-infra/pkg/util/cmdutil/viper" ) const ( Name hostscheduler.Provider = "gke" ) var Register hostscheduler.Register = func(m *hostscheduler.Registrations) { m.Add(&registration{ scheduler: &gkescheduler{}, }) } func (r *registration) Name() hostscheduler.Provider { return Name } func (r *registration) Description() string { return "" } func (r *registration) Interface() hostscheduler.Interface { return r.scheduler } func (r *registration) RegisterFlags(flagset *flag.FlagSet) { if flagset == nil { flagset = flag.CommandLine } flagset.StringVar(&r.gcloudkeyFile, "key", "", "Path to the gardener cluster gcloudKeyfilePath") flagset.StringVar(&r.scheduler.hostname, "name", "", "Name of the target gke cluster. Optional") flagset.StringVar(&r.scheduler.project, "project", "", "gcp project name") flagset.StringVar(&r.scheduler.zone, "zone", "", "gcp zone name") viper.BindPFlagFromFlagSet(flagset, "key", "gke.gcloudKeyPath") viper.BindPFlagFromFlagSet(flagset, "project", "gke.project") viper.BindPFlagFromFlagSet(flagset, "zone", "gke.zone") } func (r *registration) PreRun(cmd *cobra.Command, args []string) error { r.scheduler.log = logger.Log.WithName(string(r.Name())) if r.gcloudkeyFile == "" { return errors.New("No gcloud key file defined") } if _, err := os.Stat(r.gcloudkeyFile); err != nil { return fmt.Errorf("GCloud json at %s cannot be found", r.gcloudkeyFile) } c, err := container.NewClusterManagerClient(context.TODO(), option.WithCredentialsFile(r.gcloudkeyFile)) if err != nil { return err } r.scheduler.client = c return nil }
//go:build !windows // +build !windows package main import "github.com/sirupsen/logrus" func HookLogger(l *logrus.Logger) { // Do nothing, let the logs flow to stdout/stderr }
package gorp import ( "database/sql" "github.com/chidam1994/happyfox/models" _ "github.com/lib/pq" "gopkg.in/gorp.v2" ) var db *sql.DB func InitDB() *gorp.DbMap { dbConnString := "host=localhost port=5432 user=postgres password=postgres dbname=postgres sslmode=disable" //dbConnString := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=disable", config.GetString(config.DB_HOST), config.GetString(config.DB_PORT), config.GetString(config.DB_USERNAME), config.GetString(config.DB_PASSWORD), config.GetString(config.DB_DBNAME)) var err error db, err = sql.Open("postgres", dbConnString) if err != nil { panic(err) } dbmap := &gorp.DbMap{Db: db, Dialect: gorp.PostgresDialect{}} dbmap.AddTableWithName(models.Contact{}, "contacts") dbmap.AddTableWithName(models.Email{}, "emails") dbmap.AddTableWithName(models.PhNum{}, "phnumbers") dbmap.AddTableWithName(models.Group{}, "groups") dbmap.AddTableWithName(models.Member{}, "members") return dbmap } func CloseDBConn() { db.Close() }
package restful import ( "encoding/json" "io/ioutil" "net/http" "net/http/httptest" "strconv" "strings" "testing" "github.com/stretchr/testify/assert" ) func TestIndex(t *testing.T) { assert := assert.New(t) mock := httptest.NewServer(NewHandler()) defer mock.Close() res, err := http.Get(mock.URL) assert.NoError(err) assert.Equal(http.StatusOK, res.StatusCode) data, _ := ioutil.ReadAll(res.Body) assert.Equal("hello, world", string(data)) } func TestUsers_WithoutUsers(t *testing.T) { assert := assert.New(t) mock := httptest.NewServer(NewHandler()) defer mock.Close() res, err := http.Get(mock.URL + "/users") assert.NoError(err) assert.Equal(http.StatusOK, res.StatusCode) data, _ := ioutil.ReadAll(res.Body) assert.Contains(string(data), "No Users") } func TestUsers_WithUsers(t *testing.T) { assert := assert.New(t) mock := httptest.NewServer(NewHandler()) defer mock.Close() res, err := http.Post(mock.URL+"/users", "application/json", strings.NewReader(`{"first_name":"hyunjin", "last_name":"kim", "email":"hyunjin1612@gmail.com"}`)) assert.NoError(err) assert.Equal(http.StatusCreated, res.StatusCode) res, err = http.Post(mock.URL+"/users", "application/json", strings.NewReader(`{"first_name":"jason", "last_name":"park", "email":"jason@gmail.com"}`)) assert.NoError(err) assert.Equal(http.StatusCreated, res.StatusCode) res, err = http.Get(mock.URL + "/users") assert.NoError(err) assert.Equal(http.StatusOK, res.StatusCode) users := []*User{} err = json.NewDecoder(res.Body).Decode(&users) assert.NoError(err) assert.Equal(2, len(users)) } func TestGetUserInfo(t *testing.T) { assert := assert.New(t) mock := httptest.NewServer(NewHandler()) defer mock.Close() res, err := http.Get(mock.URL + "/users/777") assert.NoError(err) assert.Equal(http.StatusOK, res.StatusCode) data, _ := ioutil.ReadAll(res.Body) assert.Contains(string(data), "No User With ID") } func TestCreateUser(t *testing.T) { assert := assert.New(t) mock := httptest.NewServer(NewHandler()) defer mock.Close() res, err := http.Post(mock.URL+"/users", "application/json", strings.NewReader(`{"first_name":"hyunjin", "last_name":"kim", "email":"hyunjin1612@gmail.com"}`)) assert.NoError(err) assert.Equal(http.StatusCreated, res.StatusCode) user := new(User) err = json.NewDecoder(res.Body).Decode(user) assert.NoError(err) assert.NotEqual(0, user.ID) id := user.ID res, err = http.Get(mock.URL + "/users/" + strconv.Itoa(id)) assert.NoError(err) assert.Equal(http.StatusOK, res.StatusCode) user2 := new(User) err = json.NewDecoder(res.Body).Decode(user2) assert.NoError(err) assert.Equal(user.ID, user2.ID) assert.Equal(user.FirstName, user2.FirstName) } func TestDeleteUser(t *testing.T) { assert := assert.New(t) mock := httptest.NewServer(NewHandler()) defer mock.Close() req, _ := http.NewRequest("DELETE", mock.URL+"/users/1", nil) res, err := http.DefaultClient.Do(req) assert.NoError(err) assert.Equal(http.StatusOK, res.StatusCode) data, _ := ioutil.ReadAll(res.Body) assert.Contains(string(data), "No User ID With: 1") res, err = http.Post(mock.URL+"/users", "application/json", strings.NewReader(`{"first_name":"hyunjin", "last_name":"kim", "email":"hyunjin1612@gmail.com"}`)) assert.NoError(err) assert.Equal(http.StatusCreated, res.StatusCode) user := new(User) err = json.NewDecoder(res.Body).Decode(user) assert.NoError(err) assert.NotEqual(0, user.ID) req, _ = http.NewRequest("DELETE", mock.URL+"/users/1", nil) res, err = http.DefaultClient.Do(req) assert.NoError(err) assert.Equal(http.StatusOK, res.StatusCode) data, _ = ioutil.ReadAll(res.Body) assert.Contains(string(data), "User ID With: 1 Deleted!") } func TestUpdateUser(t *testing.T) { assert := assert.New(t) mock := httptest.NewServer(NewHandler()) defer mock.Close() // 업데이트할 유저가 없는 경우 req, _ := http.NewRequest("PUT", mock.URL + "/users", strings.NewReader(`{"id":1, "first_name":"updated", "last_name":"kim", "email":"hyunjin1612@gmail.com"}`)) res, err := http.DefaultClient.Do(req) assert.NoError(err) assert.Equal(http.StatusOK, res.StatusCode) data, _ := ioutil.ReadAll(res.Body) assert.Contains(string(data), "No User With ID: 1") // 유저 생성 후 업데이트하기 res, err = http.Post(mock.URL+"/users", "application/json", strings.NewReader(`{"first_name":"hyunjin", "last_name":"kim", "email":"hyunjin1612@gmail.com"}`)) assert.NoError(err) assert.Equal(http.StatusCreated, res.StatusCode) user := new(User) err = json.NewDecoder(res.Body).Decode(user) assert.NoError(err) req, _ = http.NewRequest("PUT", mock.URL + "/users", strings.NewReader(`{"id":1, "first_name":"updated"}`)) res, err = http.DefaultClient.Do(req) assert.NoError(err) assert.Equal(http.StatusOK, res.StatusCode) updatedUser := new(User) err = json.NewDecoder(res.Body).Decode(updatedUser) assert.NoError(err) assert.NotEqual(0, updatedUser.ID) assert.Equal("updated", updatedUser.FirstName) assert.Equal(user.LastName, updatedUser.LastName) }
// Copyright (C) 2019-2020 Zilliz. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License // is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express // or implied. See the License for the specific language governing permissions and limitations under the License. package datacoord import ( "testing" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/stretchr/testify/assert" ) func TestWatchRestartsPolicy(t *testing.T) { p := newWatchRestartsStartupPolicy() c := make(map[string]*datapb.DataNodeInfo) c["localhost:1111"] = &datapb.DataNodeInfo{ Address: "localhost:1111", Version: 0, Channels: []*datapb.ChannelStatus{ { Name: "vch1", State: datapb.ChannelWatchState_Complete, CollectionID: 0, }, }, } c["localhost:2222"] = &datapb.DataNodeInfo{ Address: "localhost:2222", Version: 0, Channels: []*datapb.ChannelStatus{ { Name: "vch2", State: datapb.ChannelWatchState_Complete, CollectionID: 0, }, }, } dchange := &clusterDeltaChange{ newNodes: []string{}, offlines: []string{}, restarts: []string{"localhost:2222"}, } nodes, _ := p.apply(c, dchange, []*datapb.ChannelStatus{}) assert.EqualValues(t, 1, len(nodes)) assert.EqualValues(t, datapb.ChannelWatchState_Uncomplete, nodes[0].Channels[0].State) } func TestRandomReassign(t *testing.T) { p := randomAssignRegisterFunc clusters := make(map[string]*datapb.DataNodeInfo) clusters["addr1"] = &datapb.DataNodeInfo{ Address: "addr1", Channels: make([]*datapb.ChannelStatus, 0, 10), } clusters["addr2"] = &datapb.DataNodeInfo{ Address: "addr2", Channels: make([]*datapb.ChannelStatus, 0, 10), } clusters["addr3"] = &datapb.DataNodeInfo{ Address: "addr3", Channels: make([]*datapb.ChannelStatus, 0, 10), } cases := []*datapb.DataNodeInfo{ { Channels: []*datapb.ChannelStatus{}, }, { Channels: []*datapb.ChannelStatus{ {Name: "VChan1", CollectionID: 1}, {Name: "VChan2", CollectionID: 2}, }, }, { Channels: []*datapb.ChannelStatus{ {Name: "VChan3", CollectionID: 1}, {Name: "VChan4", CollectionID: 2}, }, }, nil, } for _, ca := range cases { nodes := p.apply(clusters, ca) if ca == nil || len(ca.Channels) == 0 { assert.Equal(t, 0, len(nodes)) } else { for _, ch := range ca.Channels { found := false loop: for _, node := range nodes { for _, nch := range node.Channels { if nch.Name == ch.Name { found = true assert.EqualValues(t, datapb.ChannelWatchState_Uncomplete, nch.State) break loop } } } assert.Equal(t, true, found) } } } }
package generic import ( "io/ioutil" "testing" "github.com/stretchr/testify/assert" ) func TestCreds(t *testing.T) { testCases := []struct { description string serverUsername string serverPassword string expected string }{ { "proxy credentials match server credentials", fromProxyUsername, fromProxyPassword, serverResponseOK, }, { "proxy credentials don't match server credentials", "not-proxy-user", "not-proxy-password", serverResponseUnauthorized, }, } for _, testCase := range testCases { t.Run(testCase.description, func(t *testing.T) { srv, err := httpServer(testCase.serverUsername, testCase.serverPassword) if !assert.NoError(t, err) { return } defer srv.Close() res, err := proxyGet("http://"+targetEndpoint(srv), proxyHTTP) if !assert.NoError(t, err) { return } body, err := ioutil.ReadAll(res.Body) if !assert.NoError(t, err) { return } assert.Contains(t, string(body), testCase.expected) }) } } func TestForceSSL(t *testing.T) { testCases := []struct { description string tlsCert string tlsKey string expected string }{ { "certificate included in proxy bundle", serverCertIncluded, serverKeyIncluded, serverResponseOK, }, { "certificate not included proxy bundle", serverCertExcluded, serverKeyExcluded, "x509: certificate signed by unknown authority", }, } for _, testCase := range testCases { t.Run(testCase.description, func(t *testing.T) { srv, err := httpsServer( fromProxyUsername, fromProxyPassword, testCase.tlsCert, testCase.tlsKey, ) if !assert.NoError(t, err) { return } defer srv.Close() res, err := proxyGet("http://"+targetEndpoint(srv), proxyHTTPS) if !assert.NoError(t, err) { return } body, err := ioutil.ReadAll(res.Body) if !assert.NoError(t, err) { return } assert.Contains(t, string(body), testCase.expected) }) } }
/* Copyright 2019 The Skaffold Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package flags import ( "errors" "fmt" "strings" "github.com/GoogleContainerTools/skaffold/v2/pkg/skaffold/docker" "github.com/GoogleContainerTools/skaffold/v2/pkg/skaffold/graph" ) // Images describes a flag which contains a list of image names type Images struct { images []image usage string } type image struct { name string artifact *graph.Artifact } // String Implements String() method for pflag interface and // returns a placeholder for the help text. func (i *Images) String() string { return strings.Join(i.GetSlice(), ",") } // Type Implements Type() method for pflag interface func (i *Images) Type() string { return fmt.Sprintf("%T", i) } // SetNil Implements SetNil() method for our Nillable interface func (i *Images) SetNil() error { i.images = []image{} return nil } // Set Implements Set() method for pflag interface. We append values // to preserve compatibility with previous behaviour where each image // required a separate `-i` flag. func (i *Images) Set(csv string) error { for _, split := range strings.Split(csv, ",") { if err := i.Append(split); err != nil { return fmt.Errorf("%s: %w", split, err) } } return nil } // GetSlice Implements GetSlice() method for pflag SliceValue interface and // returns a slice of image names. func (i *Images) GetSlice() []string { names := make([]string, len(i.images)) for i, image := range i.images { names[i] = image.name } return names } // Append Implements Append() method for pflag SliceValue interface func (i *Images) Append(value string) error { a, err := convertImageToArtifact(value) if err != nil { return err } i.images = append(i.images, image{name: value, artifact: a}) return nil } // Replace Implements Replace() method for pflag SliceValue interface func (i *Images) Replace(images []string) error { newImages := make([]image, 0, len(images)) for _, value := range images { a, err := convertImageToArtifact(value) if err != nil { return err } newImages = append(newImages, image{name: value, artifact: a}) } i.images = newImages return nil } // Artifacts returns an artifact representation for the corresponding image func (i *Images) Artifacts() []graph.Artifact { var artifacts []graph.Artifact for _, image := range i.images { artifacts = append(artifacts, *image.artifact) } return artifacts } // NewEmptyImages returns a new nil Images list. func NewEmptyImages(usage string) *Images { return &Images{ images: []image{}, usage: usage, } } func convertImageToArtifact(value string) (*graph.Artifact, error) { if value == "" { return nil, errors.New("cannot add an empty image value") } if c := strings.SplitN(value, "=", 2); len(c) == 2 { _, err := docker.ParseReference(c[1]) if err != nil { return nil, err } return &graph.Artifact{ ImageName: c[0], Tag: c[1], }, nil } parsed, err := docker.ParseReference(value) if err != nil { return nil, err } return &graph.Artifact{ ImageName: parsed.BaseName, Tag: value, }, nil }
package core // Meta is returned from many queries that support pagination type Meta struct { Results MetaResults `json:"results"` Page MetaPage `json:"page"` } // MetaResults contains the total number of results type MetaResults struct { Total int `json:"total"` All int `json:"all,omitempty"` } // MetaPage contains pagination information type MetaPage struct { Limit int `json:"limit"` Offset int `json:"offset"` Current int `json:"current"` Total int `json:"total"` }
package pubsub import ( "context" "fmt" "time" "github.com/go-redis/redis/v7" "github.com/apenella/go-redis-queues/internal/infrastructure/configuration" providerredis "github.com/apenella/go-redis-queues/internal/infrastructure/provider/redis" ) // Producer is a redis client which appends messages to a channel type Consumer struct { client *redis.Client } // NewClient creates a new redis client func NewConsumer(config *configuration.Configuration) (*Consumer, error) { c, err := providerredis.NewClient(config.RedisHost, config.RedisPort, config.RedisDB, config.RedisPassword) if err != nil { return nil, err } consumer := &Consumer{ client: c, } return consumer, nil } // Publish appends a new event to channel func (c *Consumer) Consume(ctx context.Context, channel string) { pubsub := c.client.Subscribe(channel) defer pubsub.Close() subChannel := pubsub.Channel() for { select { case m := <-subChannel: fmt.Println(m.String()) case <-time.After(5 * time.Second): fmt.Println("Consumer cancelled after 5 idle seconds") c.client.Close() return case <-ctx.Done(): fmt.Println("Consumer cancelled by user") c.client.Close() return } } } // Publish appends a new event to channel func (c *Consumer) Ping(ctx context.Context) error { _, err := c.client.Ping().Result() if err != nil { return err } c.client.Close() return nil }
/* Copyright 2015 All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package main import ( "bytes" "crypto/tls" "fmt" "net/http" "os" "reflect" "testing" "time" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestDecodeKeyPairs(t *testing.T) { testCases := []struct { List []string KeyPairs map[string]string Ok bool }{ { List: []string{"a=b", "b=3"}, KeyPairs: map[string]string{ "a": "b", "b": "3", }, Ok: true, }, { List: []string{"add", "b=3"}, }, } for i, c := range testCases { kp, err := decodeKeyPairs(c.List) if err != nil && c.Ok { t.Errorf("test case %d should not have failed", i) continue } if !c.Ok { continue } if !reflect.DeepEqual(kp, c.KeyPairs) { t.Errorf("test case %d are not equal %v <-> %v", i, kp, c.KeyPairs) } } } func TestGetRequestHostURL(t *testing.T) { cs := []struct { Expected string HostHeader string Hostname string TLS *tls.ConnectionState }{ { Expected: "http://www.test.com", Hostname: "www.test.com", }, { Expected: "http://", }, { Expected: "http://www.override.com", HostHeader: "www.override.com", Hostname: "www.test.com", }, { Expected: "https://www.test.com", Hostname: "www.test.com", TLS: &tls.ConnectionState{}, }, { Expected: "https://www.override.com", HostHeader: "www.override.com", Hostname: "www.test.com", TLS: &tls.ConnectionState{}, }, } for i, c := range cs { request := &http.Request{ Method: http.MethodGet, Host: c.Hostname, TLS: c.TLS, } if c.HostHeader != "" { request.Header = make(http.Header) request.Header.Set("X-Forwarded-Host", c.HostHeader) } assert.Equal(t, c.Expected, getRequestHostURL(request), "case %d, expected: %s, got: %s", i, c.Expected, getRequestHostURL(request)) } } func BenchmarkUUID(b *testing.B) { for n := 0; n < b.N; n++ { s := uuid.New() _ = s.String() } } func TestDefaultTo(t *testing.T) { cs := []struct { Value string Default string Expected string }{ { Value: "", Default: "hello", Expected: "hello", }, { Value: "world", Default: "hello", Expected: "world", }, } for _, c := range cs { assert.Equal(t, c.Expected, defaultTo(c.Value, c.Default)) } } func TestEncryptDataBlock(t *testing.T) { testCase := []struct { Text string Key string Ok bool }{ { Text: "hello world, my name is keycloak proxy", Key: "DtNMS2eO7Fi5vsuLrW55nrRbir2kPfTw", Ok: true, }, { Text: "hello world, my name is keycloak proxy", Key: "DtNMS2eO7Fi5vsu", }, { Text: "h", Key: "DtNMS2eO7Fi5vsuLrW55nrRbir2kPfTwtr", }, } for i, test := range testCase { _, err := encryptDataBlock(bytes.NewBufferString(test.Text).Bytes(), bytes.NewBufferString(test.Key).Bytes()) if err != nil && test.Ok { t.Errorf("test case: %d should not have failed, %s", i, err) } } } func TestEncodeText(t *testing.T) { session, err := encodeText("12245325632323263762", "1gjrlcjQ8RyKANngp9607txr5fF5fhf1") assert.NotEmpty(t, session) assert.NoError(t, err) } var ( fakePlainText = []byte(`nFlhnhwRzC9uJ9mjhR0PQezUpIiDlU9ASLqH1KIKFhBZZrMZfnAAdHdgKs2OJoni8cTSQ JxkaNpboZ6hnrMytlw5kf0biF7dLTU885uHIGkUIRy75hx6BaTEEhbN36qVTxediEHd6xeBPS3qpJ7riO6J EeaQr1rroDL0LvmDyB6Zds4LdVQEmtUueusc7jkBz7gJ12vnTHIxviZM5rzcq4tyCbZO7Kb37RqZg5kbYGK PfErhUwUIin7jsNVE7coB`) fakeCipherText = []byte("lfQPTa6jwMTABaJhcrfVkoqcdyMVAettMsqgKXIALSKG5UpoYKbT/WgZjOiuCmEI0E/7piP8VATLOAHKDBNF2WrQOKSYF+gdHkh4NLv0cW0NZ2qyZeWhknywE6063ylhCYjJOrJA1z12i2bHHbjZZGfqkwfzyxxFLTv6jSbalpZ4oZcUcNY/DrtVk/K01qZw6o4l1f0FUL6UZVSirn+B3YDWLeVQ0FGr6jlhCpN203Rf688nqdBvhw4bUEQiykCMxWm2/rJBNWm2SzZgw65kb4W0ph1qjcoUjXBwNakK+E0Lw/fwi8+bUC1lkT8+hJpMLKZkzb07rbGAnmljQo0NkqJh4kl+aycsEhm9bZj+b6w0r795YugyNsyca5CnUvkB1Dg") fakeKey = []byte("u3K0eKsmGl76jY1buzexwYoRRLLQrQck") ) /* func TestEncryptedText(t *testing.T) { s, err := encodeText(string(fakePlainText), string(fakeKey)) require.NoError(t, err) require.NotEmpty(t, s) d, err := decodeText(s, string(fakeKey)) require.NoError(t, err) require.NotEmpty(t, d) assert.Equal(t, string(fakePlainText), d) fmt.Printf("Encoded: '%s'\n", s) fmt.Printf("Decoded: '%s'\n", d) } */ func BenchmarkEncryptDataBlock(b *testing.B) { for n := 0; n < b.N; n++ { _, _ = encryptDataBlock(fakePlainText, fakeKey) } } func BenchmarkEncodeText(b *testing.B) { text := string(fakePlainText) key := string(fakeKey) for n := 0; n < b.N; n++ { _, _ = encodeText(text, key) } } func BenchmarkDecodeText(b *testing.B) { t := string(fakeCipherText) k := string(fakeKey) for n := 0; n < b.N; n++ { if _, err := decodeText(t, k); err != nil { b.FailNow() } } } func TestDecodeText(t *testing.T) { fakeKey := "HYLNt2JSzD7Lpz0djTRudmlOpbwx1oHB" fakeText := "12245325632323263762" encrypted, err := encodeText(fakeText, fakeKey) require.NoError(t, err) assert.NotEmpty(t, encrypted) decoded, err := decodeText(encrypted, fakeKey) require.NoError(t, err) assert.NotEmpty(t, decoded, "the session should not have been nil") assert.Equal(t, fakeText, decoded, "the decoded text is not the same") } func TestFindCookie(t *testing.T) { cookies := []*http.Cookie{ {Name: "cookie_there"}, } assert.NotNil(t, findCookie("cookie_there", cookies)) assert.Nil(t, findCookie("not_there", cookies)) } func TestDecryptDataBlock(t *testing.T) { testCase := []struct { Text string Key string Ok bool }{ { Text: "hello world, my name is keycloak proxy", Key: "DtNMS2eO7Fi5vsuLrW55nrRbir2kPfss", Ok: true, }, { Text: "h", Key: "DtNMS2eO7Fi5vsuLrW55nrRbir2kPfTw", Ok: true, }, } for i, test := range testCase { cipher, err := encryptDataBlock(bytes.NewBufferString(test.Text).Bytes(), bytes.NewBufferString(test.Key).Bytes()) if err != nil && test.Ok { t.Errorf("test case: %d should not have failed, %s", i, err) } plain, err := decryptDataBlock(cipher, bytes.NewBufferString(test.Key).Bytes()) if err != nil { t.Errorf("test case: %d should not have failed, %s", i, err) } if string(plain) != test.Text { t.Errorf("test case: %d are not the same", i) } } } func TestHasAccessOK(t *testing.T) { cs := []struct { Have []string Need []string Required bool }{ {}, { Have: []string{"a", "b"}, }, { Have: []string{"a", "b", "c"}, Need: []string{"a", "b"}, Required: true, }, { Have: []string{"a", "b", "c"}, Need: []string{"a", "c"}, }, { Have: []string{"a", "b", "c"}, Need: []string{"c"}, }, { Have: []string{"a", "b", "c"}, Need: []string{"b"}, }, { Have: []string{"a", "b", "c"}, Need: []string{"b"}, }, { Have: []string{"a", "b"}, Need: []string{"a"}, }, { Have: []string{"a", "b"}, Need: []string{"a"}, Required: true, }, { Have: []string{"b", "a"}, Need: []string{"a"}, Required: true, }, } for i, x := range cs { assert.True(t, hasAccess(x.Need, x.Have, x.Required, false), "case: %d should be true, have: %v, need: %v, require: %t ", i, x.Have, x.Need, x.Required) } } func TestHasAccessBad(t *testing.T) { cs := []struct { Have []string Need []string Required bool }{ { Have: []string{"a", "b"}, Need: []string{"c"}, }, { Have: []string{"a", "b"}, Need: []string{"c"}, Required: true, }, { Have: []string{"a", "c"}, Need: []string{"a", "b"}, Required: true, }, { Have: []string{"a", "b", "c"}, Need: []string{"b", "j"}, Required: true, }, { Have: []string{"a", "b", "c"}, Need: []string{"a", "d"}, Required: true, }, } for i, x := range cs { assert.False(t, hasAccess(x.Need, x.Have, x.Required, false), "case: %d should be false, have: %v, need: %v, require: %t ", i, x.Have, x.Need, x.Required) } } func TestHasAccessWildcard(t *testing.T) { cs := []struct { Have []string Need []string Required bool Expect bool }{ { Have: []string{"a", "b", "c/b/d"}, Need: []string{"c/*"}, Expect: true, }, { Have: []string{"a", "b", "c"}, Need: []string{"c/*"}, Expect: true, }, { Have: []string{"a", "b", "c"}, Need: []string{"c/*", "b/*"}, Required: true, Expect: true, }, { Have: []string{"a", "b", "c1"}, Need: []string{"c*", "b/*"}, Required: true, Expect: true, }, { Have: []string{"a", "b", "c1/b/d"}, Need: []string{"c/*"}, Expect: false, }, { Have: []string{"a", "b1", "c1"}, Need: []string{"c*", "b/*"}, Required: true, Expect: false, }, } for i, x := range cs { assert.Equal(t, x.Expect, hasAccess(x.Need, x.Have, x.Required, true), "case: %d should be %t, have: %v, need: %v, require: %t ", i, x.Expect, x.Have, x.Need, x.Required) } } func TestContainedIn(t *testing.T) { assert.False(t, containedIn("1", []string{"2", "3", "4"}, false)) assert.True(t, containedIn("1", []string{"1", "2", "3", "4"}, false)) // with wildcards assert.False(t, containedIn("1/*", []string{"2", "3", "4"}, true)) assert.False(t, containedIn("1/*", []string{"1.2", "3", "4"}, true)) assert.False(t, containedIn("1/*", []string{"12", "3", "4"}, true)) assert.False(t, containedIn("2/3/4/*", []string{"1", "2", "3", "4"}, true)) assert.False(t, containedIn("1*", []string{"0123", "3", "4"}, true)) assert.True(t, containedIn("1/*", []string{"1", "3", "4"}, true)) assert.True(t, containedIn("1/*", []string{"1/2", "3", "4"}, true)) assert.True(t, containedIn("2/3/4/*", []string{"1", "2/3/4", "3", "4"}, true)) assert.True(t, containedIn("2/3/4/*", []string{"1", "2/3/4", "3", "4"}, true)) assert.True(t, containedIn("2/3/4/*", []string{"1", "2/3/4/", "3", "4"}, true)) assert.True(t, containedIn("2/3/4/*", []string{"1", "2/3/4/5/6", "3", "4"}, true)) assert.True(t, containedIn("1*", []string{"123", "3", "4"}, true)) } func TestContainsSubString(t *testing.T) { assert.False(t, containsSubString("bar.com", []string{"foo.bar.com"})) assert.True(t, containsSubString("www.foo.bar.com", []string{"foo.bar.com"})) assert.True(t, containsSubString("foo.bar.com", []string{"bar.com"})) assert.True(t, containsSubString("star.domain.com", []string{"domain.com", "domain1.com"})) assert.True(t, containsSubString("star.domain1.com", []string{"domain.com", "domain1.com"})) assert.True(t, containsSubString("test.test.svc.cluster.local", []string{"svc.cluster.local"})) assert.False(t, containsSubString("star.domain1.com", []string{"domain.com", "sub.domain1.com"})) assert.False(t, containsSubString("svc.cluster.local", []string{"nginx.pr1.svc.cluster.local"})) assert.False(t, containsSubString("cluster.local", []string{"nginx.pr1.svc.cluster.local"})) assert.False(t, containsSubString("pr1", []string{"nginx.pr1.svc.cluster.local"})) } func BenchmarkContainsSubString(t *testing.B) { for n := 0; n < t.N; n++ { containsSubString("svc.cluster.local", []string{"nginx.pr1.svc.cluster.local"}) } } func TestIdValidHTTPMethod(t *testing.T) { cs := []struct { Method string Ok bool }{ {Method: "GET", Ok: true}, {Method: "GETT"}, {Method: "CONNECT", Ok: false}, {Method: "PUT", Ok: true}, {Method: "PATCH", Ok: true}, } for _, x := range cs { assert.Equal(t, x.Ok, isValidHTTPMethod(x.Method)) } } func TestFileExists(t *testing.T) { if fileExists("no_such_file_exsit_32323232") { t.Error("we should have received false") } tmpfile, err := os.CreateTemp("/tmp", fmt.Sprintf("test_file_%d", os.Getpid())) if err != nil { t.Fatalf("failed to create the temporary file, %s", err) } defer os.Remove(tmpfile.Name()) if !fileExists(tmpfile.Name()) { t.Error("we should have received a true") } } func TestGetWithin(t *testing.T) { cs := []struct { Expires time.Time Percent float64 Expected time.Duration }{ { Expires: time.Now().Add(time.Duration(1) * time.Hour), Percent: 0.10, Expected: 359000000000, }, { Expires: time.Now().Add(time.Duration(1) * time.Hour), Percent: 0.20, Expected: 719000000000, }, } for _, x := range cs { assert.Equal(t, x.Expected, getWithin(x.Expires, x.Percent)) } } func TestToHeader(t *testing.T) { cases := []struct { Word string Expected string }{ { Word: "given_name", Expected: "Given-Name", }, { Word: "family%name", Expected: "Family-Name", }, { Word: "perferredname", Expected: "Perferredname", }, } for i, x := range cases { assert.Equal(t, x.Expected, toHeader(x.Word), "case %d, expected: %s but got: %s", i, x.Expected, toHeader(x.Word)) } } func TestCapitalize(t *testing.T) { cases := []struct { Word string Expected string }{ { Word: "given", Expected: "Given", }, { Word: "1iven", Expected: "1iven", }, { Word: "Test this", Expected: "Test this", }, } for i, x := range cases { assert.Equal(t, x.Expected, capitalize(x.Word), "case %d, expected: %s but got: %s", i, x.Expected, capitalize(x.Word)) } } func TestMergeMaps(t *testing.T) { cases := []struct { Source map[string]string Dest map[string]string Expected map[string]string }{ { Source: map[string]string{ "a": "b", "b": "b", }, Dest: map[string]string{ "c": "c", }, Expected: map[string]string{ "a": "b", "b": "b", "c": "c", }, }, } for i, x := range cases { merged := mergeMaps(x.Dest, x.Source) if !reflect.DeepEqual(x.Expected, merged) { t.Errorf("case %d, expected: %v but got: %v", i, x.Expected, merged) } } } func TestReadConfiguration(t *testing.T) { testCases := []struct { Content string Ok bool }{ { Content: ` discovery_url: https://keyclock.domain.com/ client-id: <client_id> secret: <secret> `, }, { Content: ` discovery_url: https://keyclock.domain.com client-id: <client_id> secret: <secret> upstream-url: http://127.0.0.1:8080 redirection_url: http://127.0.0.1:3000 `, Ok: true, }, } for i, test := range testCases { // step: write the fake config file file := writeFakeConfigFile(t, test.Content) config := new(Config) err := readConfigFile(file.Name(), config) if test.Ok && err != nil { os.Remove(file.Name()) t.Errorf("test case %d should not have failed, config: %v, error: %s", i, config, err) } os.Remove(file.Name()) } } func writeFakeConfigFile(t *testing.T, content string) *os.File { f, err := os.CreateTemp("", "node_label_file") if err != nil { t.Fatalf("unexpected error creating node_label_file: %v", err) } f.Close() if err := os.WriteFile(f.Name(), []byte(content), 0600); err != nil { t.Fatalf("unexpected error writing node label file: %v", err) } return f }
// Copyright (c) 2018 The MATRIX Authors // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php package amhash import "testing" func TestMine(t *testing.T) { t.Logf("test") }
//+build !test package config import ( "github.com/gobjserver/gobjserver/core/gateway" "github.com/gobjserver/gobjserver/db" ) // CreateObjectGateway . func CreateObjectGateway() gateway.ObjectGateway { var database db.CommonDatabase database = CreateRethinkDB() rethinkObjectGateway := db.ObjectGatewayImpl{ Datatbase: database, } return rethinkObjectGateway } // CreateRethinkDB . func CreateRethinkDB() *db.RethinkDB { var database db.RethinkDB database.Session = database.Connect() return &database }
package main import ( "context" "fmt" "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials" "google.golang.org/grpc/status" "io" "learnings/grpc/errors" "learnings/grpc/greet/greetpb" "log" "sync" "time" "google.golang.org/grpc" ) func main() { // boilerplate fmt.Println("hello I'm a grpc client!") certFile := "ssl/ca.crt" // certificate authority trust certificate creds, sslErr := credentials.NewClientTLSFromFile(certFile, "") errors.HandleError("error while loading ca trust certificate", sslErr) opts := grpc.WithTransportCredentials(creds) cc, err := grpc.Dial("localhost:50051", opts) defer cc.Close() if err != nil { log.Fatalln("could not connect:", err) } c := greetpb.NewGreetServiceClient(cc) // invoking grpc calls doUnary(c) // doServerStream(c) // doClientStream(c) //doBiDirectionalStream(c) //doUnaryWithDeadline(c, 5) // should complete //doUnaryWithDeadline(c, 1) // should timeout } func doUnaryWithDeadline(c greetpb.GreetServiceClient, timeout time.Duration) { fmt.Println("Starting to do Unary RPC with deadline!") req := &greetpb.GreetWithDeadlineRequest{ Greeting: &greetpb.Greeting{ FirstName: "yang", LastName: "hu", }, } ctx, cancel := context.WithTimeout(context.Background(), timeout * time.Second) defer cancel() res, err := c.GreetWithDeadline(ctx, req) if err != nil { statusErr, ok := status.FromError(err) if ok { if statusErr.Code() == codes.DeadlineExceeded { fmt.Println("Timeout was hit! Deadline was exceeded!") fmt.Printf("error message: %v\n", statusErr.Message()) } else { fmt.Printf("unexpected error: %v", statusErr) } return } else { log.Fatalf("fatal error while calling Greet RPC: %v", statusErr) } } log.Printf("Response from Greet: %v", res.GetResult()) } func doUnary(c greetpb.GreetServiceClient) { fmt.Println("Starting to do Unary RPC!") req := &greetpb.GreetingRequest{ Greeting: &greetpb.Greeting{ FirstName: "yang", LastName: "hu", }, } res, err := c.Greet(context.Background(), req) if err != nil { log.Printf("error while calling Greet RPC: %v", err) } log.Printf("Response from Greet: %v", res.GetResult()) } func doServerStream(c greetpb.GreetServiceClient) { fmt.Println("Starting to do Server Streaming RPC!") req := &greetpb.GreetManyTimesRequest{ Greeting: &greetpb.Greeting{ FirstName: "yang", LastName: "hu", }, } stream, err := c.GreetManyTimes(context.Background(), req) if err != nil { log.Fatalln("error while calling GreetManyTimes RPC:", err) } for { msg, err := stream.Recv() if err == io.EOF { break } if err != nil { log.Fatalln("error while reading stream:", err) } result := msg.GetResult() log.Printf("Response from GreetManyTimes: %v\n", result) } } func doClientStream(c greetpb.GreetServiceClient) { fmt.Println("Starting to do Client Streaming RPC!") requests := []*greetpb.LongGreetRequest{ { Greeting: &greetpb.Greeting{ FirstName: "Yang", }, }, { Greeting: &greetpb.Greeting{ FirstName: "Rin", }, }, { Greeting: &greetpb.Greeting{ FirstName: "Lewis", }, }, { Greeting: &greetpb.Greeting{ FirstName: "Ben", }, }, { Greeting: &greetpb.Greeting{ FirstName: "Shane", }, }, } stream, err := c.LongGreet(context.Background()) errors.HandleError("error while calling Long Greet", err) for _, req := range requests { fmt.Printf("Sending request: %v\n", req) err := stream.Send(req) errors.HandleError("error while sending LongGreet request", err) time.Sleep(100 * time.Microsecond) } res, err := stream.CloseAndRecv() errors.HandleError("error while receiving response from LongGreet", err) fmt.Printf("LongGreet Response: %v\n", res.GetResult()) } func doBiDirectionalStream(c greetpb.GreetServiceClient) { var wg sync.WaitGroup fmt.Println("Starting to do Bi-directional Streaming RPC!") // create a stream by invoking the client stream, err := c.GreetEveryone(context.Background()) errors.HandleError("error while calling GreetEveryone RPC", err) wg.Add(1) requests := []*greetpb.GreetEveryoneRequest{ { Greeting: &greetpb.Greeting{ FirstName: "Yang", }, }, { Greeting: &greetpb.Greeting{ FirstName: "Rin", }, }, { Greeting: &greetpb.Greeting{ FirstName: "Lewis", }, }, { Greeting: &greetpb.Greeting{ FirstName: "Ben", }, }, { Greeting: &greetpb.Greeting{ FirstName: "Shane", }, }, } // send a bunch of messages to the server go func() { for _, req := range requests { fmt.Printf("Sending message: %v\n", req) err := stream.Send(req) errors.HandleError("error while sending message to server", err) time.Sleep(time.Second) } err := stream.CloseSend() errors.HandleError("error while closing sending to server", err) }() // receive a bunch of messages from the server go func() { for { res, err := stream.Recv() if err == io.EOF { wg.Done() return } errors.HandleError("error while receiving response from server", err) fmt.Printf("Received: %v\n", res.GetResult()) } }() // block until everything is done wg.Wait() }
package database import ( "database/sql" "encoding/json" ) type ( NullString struct { sql.NullString } NullInt64 struct { sql.NullInt64 } ) func (t NullString) MarshalJSON() ([]byte, error) { if t.Valid { return json.Marshal(t.String) } else { return json.Marshal(nil) } } func (t *NullString) UnmarshalJSON(data []byte) error { var v *string if err := json.Unmarshal(data, &v); err != nil { return err } if v != nil { t.Valid = true t.String = *v } else { t.Valid = false } return nil } func (t NullInt64) MarshalJSON() ([]byte, error) { if t.Valid { return json.Marshal(t.Int64) } else { return json.Marshal(nil) } } func (t *NullInt64) UnmarshalJSON(data []byte) error { var v *int64 if err := json.Unmarshal(data, &v); err != nil { return err } if v != nil { t.Valid = true t.Int64 = *v } else { t.Valid = false } return nil }
package main import ( "fmt" "log" "strconv" "time" ) func SysDeposit(depositNumber string, platfrom string) map[string]interface{} { data := make(map[string]interface{}) deposit, err := dbManager.GetDeposit(depositNumber, platfrom) if deposit == nil || err != nil { data["code"] = ERROR_DEPOSIT_NOT_EXISTS data["msg"] = "Deposit not exists." return data } platfromId, _ := strconv.Atoi(platfrom) agent, err := dbManager.GetAgentById(platfromId) if agent == nil || err != nil { data["code"] = ERROR_PLATFROM_NOT_EXISTS data["msg"] = "Platfrom not exists." return data } deposit.CallbackUrl = agent.CallbackUrl if deposit.Remark != "" { payer, _ := dbManager.GetPayer(deposit.WechatName, deposit.Remark, deposit.Amount) log.Println("[SysDeposit] payer:", payer) if payer != nil { deposit.CustomSign = payer.CustomSign } dbManager.UpdateMidpayRecord(deposit.WechatName, deposit.Remark, deposit.Amount, payer.RequestTime) log.Println("[SysDeposit] UpdateMidpayRecord:", payer) } log.Printf("SysDeposit:[%v] Platfrom:%v Type:%v CallbackUrl:%v Payer:%s CS:%v\n", deposit.DepositNumber, deposit.Platfrom, deposit.PayType, deposit.CallbackUrl, deposit.Remark, deposit.CustomSign) status := SendNotifcationToAgent(deposit) data["code"] = status if status != 200 { data["msg"] = fmt.Sprintf("sync deposit failed.Number:%s Platfrom:%s", deposit.DepositNumber, deposit.CallbackUrl) } else { data["msg"] = "success" } return data } func genAccountAmount(input map[string]interface{}) map[string]interface{} { data := make(map[string]interface{}) fields := []string{"account", "amount"} if !verifyFields(input, fields) { return GetMissingFieldsError() } accountInfo, err := dbManager.CheckAccountExists(input["account"].(string)) if err != nil { log.Println("[GenAmountQRCode]Can't find account info.", input["account"]) data["code"] = 400 data["msg"] = "Can't find account info" return data } noteMiddle := String(2) amount, _ := strconv.ParseFloat(input["amount"].(string), 64) if amount <= 0 { data["code"] = 400 data["msg"] = fmt.Sprintf("Amount can't be zero. Amount:%.2f", amount) return data } noteSign := fmt.Sprintf("%s-%s-%.2f", accountInfo.IP[2:], noteMiddle, amount) qrUrl := getPayAppUrl(accountInfo.Account, accountInfo.Type, noteSign, amount) if qrUrl != "" { exist, _ := dbManager.CheckSFPayNoteExists(accountInfo.Account, amount) if exist { err := dbManager.UpdateSFPay(accountInfo.Account, accountInfo.Platfrom, amount, noteSign, qrUrl) if err == nil { log.Printf("[GenAmountQRCode] Update QrCode: Account:%s amount:%.2f qrcode:%s\n", accountInfo.Account, amount, qrUrl) data["code"] = 200 data["msg"] = "success. Mode: Update" } else { log.Printf("[GenAmountQRCode] Update QrCode Error: Account:%s amount:%.2f qrcode:%s\n", accountInfo.Account, amount, qrUrl) data["code"] = 200 data["msg"] = "Update Failed. QRCode:" + err.Error() } } else { state, _ := dbManager.InsertSFPay(accountInfo, amount, noteSign, qrUrl) if state { log.Printf("[GenAmountQRCode] Insert QrCode:Account:%s amount:%.2f qrcode:%s\n", accountInfo.Account, amount, qrUrl) data["code"] = 200 data["msg"] = "success. Mode: Insert" } else { log.Printf("[GenAmountQRCode] Insert QrCode Error: Account:%s amount:%.2f qrcode:%s\n", accountInfo.Account, amount, qrUrl) data["code"] = 400 data["msg"] = "Insert Failed. QRCode:" + qrUrl } } } else { data["code"] = 402 data["msg"] = "Can't generate the QRCoce." } return data } func manualSaveDeposit(record map[string]interface{}) map[string]interface{} { data := make(map[string]interface{}) log.Println("manualSaveDeposit: The deposit not exsits. create new.") var depositRecord DepositRecord if record["createUser"] == nil { log.Println("createUser not exists.") data["code"] = ERROR_DEPOSIT_NUMBER_EXISTS data["msg"] = "createUser number exists." return data } if record["platfrom"] == nil { log.Println("platfrom not exists.") data["code"] = ERROR_PLATFROM_NOT_EXISTS data["msg"] = "platfrom not exists." return data } if record["amount"] == nil { log.Println("Amount not exists.") data["code"] = ERROR_PLATFROM_NOT_EXISTS data["msg"] = "Amount not exists." return data } if record["wechatName"] == nil { log.Println("account not exists.") data["code"] = ERROR_PLATFROM_NOT_EXISTS data["msg"] = "account not exists." return data } if record["payAccount"] == nil { log.Println("payAccount not exists.") data["code"] = ERROR_PLATFROM_NOT_EXISTS data["msg"] = "payAccount not exists." return data } if record["depositNumber"] == nil { log.Println("DepositNumber not exists.") data["code"] = ERROR_PLATFROM_NOT_EXISTS data["msg"] = "DepositNumber not exists." return data } if record["note"] == nil { log.Println("Note not exists.") data["code"] = ERROR_PLATFROM_NOT_EXISTS data["msg"] = "Note not exists." return data } if record["tranTime"] == nil { log.Println("transferTime not exists.") data["code"] = ERROR_PLATFROM_NOT_EXISTS data["msg"] = "transferTime not exists." return data } deposit, err := dbManager.GetDeposit(record["depositNumber"].(string), record["platfrom"].(string)) if deposit != nil { data["code"] = ERROR_DEPOSIT_NUMBER_EXISTS data["msg"] = "deposit number exists." return data } accountInfo, err := dbManager.CheckAccountExists(record["wechatName"].(string)) if err != nil || accountInfo.Account == "" { log.Println("account not exists.") data["code"] = ERROR_ACCOUNT_NOT_EXISTS data["msg"] = "Account not exists." return data } payerRecord, err := dbManager.GetPayerInfo(record["wechatName"].(string), record["amount"].(string), record["note"].(string)) if err != nil { log.Println("manualSaveDeposit PayerRecord Error:", err) } record["sign"] = record["note"] if payerRecord.Payer != "" { record["sign"] = record["note"] record["note"] = payerRecord.Payer depositRecord.Remark = payerRecord.Payer depositRecord.CustomSign = payerRecord.CustomSign } amount, _ := strconv.ParseFloat(record["amount"].(string), 64) depositRecord.Amount = amount depositRecord.CreateUser = record["createUser"].(string) depositRecord.DepositNumber = record["depositNumber"].(string) depositRecord.Note = record["note"].(string) depositRecord.PayAccount = record["payAccount"].(string) depositRecord.Platfrom, _ = strconv.Atoi(record["platfrom"].(string)) if depositRecord.Platfrom == 0 { depositRecord.Platfrom = accountInfo.Platfrom log.Println("ManualSaveDeposit: Can't find platform from client. Use account platform") } // Data init depositRecord.CreateTime = time.Now() depositRecord.TranTime = depositRecord.CreateTime depositRecord.ExcuteTime = depositRecord.CreateTime if record["tranTime"] == nil { record["tranTime"] = depositRecord.CreateTime.Format("2006-01-02 15:04:05") } else { i, err := strconv.ParseInt(record["tranTime"].(string), 10, 64) if err != nil { log.Println("transferTime Error:", err) } i = int64(i / 1000) tm := time.Unix(i, 0) record["tranTime"] = tm.Format("2006-01-02 15:04:05") } depositRecord.TransferTime = record["tranTime"].(string) depositRecord.State = STATE_PENDING depositRecord.Times = 0 depositRecord.BillNo = fmt.Sprintf("%d", time.Now().UnixNano()/1000000) depositRecord.WechatName = record["wechatName"].(string) record["deviceAccount"] = depositRecord.WechatName log.Println("manualSaveDeposit:", depositRecord) data = saveDeposit(&accountInfo, &depositRecord, record) log.Println("manualSaveDeposit Result:", data) return data }
package notificationqueue import ( "errors" ) //ErrQueueDriverRequired queue driver required error. var ErrQueueDriverRequired = errors.New("queue driver required") //ErrIDGeneratorRequired id generator required var ErrIDGeneratorRequired = errors.New("id generator required")
package writer import "go.uber.org/zap/zapcore" type Writer interface { zapcore.WriteSyncer }
package gevent /* ================================================================================ * gevent * qq group: 582452342 * email : 2091938785@qq.com * author : 美丽的地球啊 - mliu * ================================================================================ */ type ( IEvent interface { Subscribe(ISubscriberHandler, ...int) IEvent Publish(IEventSource) IEvent } EventHandler func(*Event) EventList []*Event Event struct { ChannelName string `json:"channel_name"` //channel name Name string `json:"name"` //event name Data interface{} `json:"data"` //event data IsBroadcast bool `json:"is_broadcast"` //it is broadcast isCompleted bool `json:"_"` //it is complete CreationDate int64 `json:"creation_date"` //event creation time } ) /* ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ * mapping event data * ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */ func (s *Event) Map(mapFunc func(data interface{}) interface{}) { if !s.IsBroadcast { s.Data = mapFunc(s.Data) } } /* ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ * the next subscriber of the event continues to process * ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */ func (s *Event) Next() { if s.isCompleted { return } } /* ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ * event Completed * ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */ func (s *Event) Completed() { s.isCompleted = true }
package main // #include "libnative/native.h" import "C" func main() { C.native_example() }
/* Copyright 2021 The KodeRover Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package remotedialer import ( "io" "net" "sync" "time" ) func clientDial(dialer Dialer, conn *connection, message *message) { defer conn.Close() var ( netConn net.Conn err error ) if dialer == nil { netDialer := &net.Dialer{ Timeout: time.Duration(message.deadline) * time.Millisecond, KeepAlive: 30 * time.Second, } netConn, err = netDialer.Dial(message.proto, message.address) } else { netConn, err = dialer(message.proto, message.address) } if err != nil { conn.tunnelClose(err) return } defer netConn.Close() pipe(conn, netConn) } func pipe(client *connection, server net.Conn) { wg := sync.WaitGroup{} wg.Add(1) close := func(err error) error { if err == nil { err = io.EOF } client.doTunnelClose(err) server.Close() return err } go func() { defer wg.Done() _, err := io.Copy(server, client) close(err) }() _, err := io.Copy(client, server) err = close(err) wg.Wait() // Write tunnel error after no more I/O is happening, just incase messages get out of order client.writeErr(err) }
// Copyright (C) 2017 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package f64 import "math" // MinOf returns the minimum value of all the arguments. func MinOf(a float64, b ...float64) float64 { v := a for _, x := range b { if x < v { v = x } } return v } // MaxOf returns the maximum value of all the arguments. func MaxOf(a float64, b ...float64) float64 { v := a for _, x := range b { if x > v { v = x } } return v } // Round rounds v to the nearest integer. // Examples: // Round(-0.9) = -1 // Round(-0.1) = 0 // Round(0.0) = 0 // Round(0.1) = 0 // Round(0.9) = 1 func Round(v float64) int { if v < 0 { return int(math.Ceil(v - 0.5)) } return int(math.Floor(v + 0.5)) } // FromBits takes binary representation of floating-point value // with user-defined bit sizes and expands it to full float64. func FromBits(val uint64, expBits, manBits uint32) float64 { manMask := (uint64(1) << manBits) - 1 expMask := (uint64(1) << expBits) - 1 expBias := expMask / 2 const expMaskFloat64 = (1 << 11) - 1 const expBiasFloat64 = expMaskFloat64 / 2 // 1023 // Extract mantissa, exponent and sign from the packed value man, val := val&manMask, val>>manBits exp, sig := val&expMask, val>>expBits // Special cases in increasing numerical order of the value if exp == 0 { if man == 0 { // Zero - return zero with the same sign return math.Float64frombits(sig << 63) } else if expMask != expMaskFloat64 { // Denormalized number - promote it to normalized exp++ for man&(1<<manBits) == 0 { man *= 2 exp-- } man &= manMask exp += expBiasFloat64 - expBias } } else if exp < expMask { // Normalized number - just adjust the exponent's bias exp += expBiasFloat64 - expBias } else /* exp == expMask */ { // NaN or Inf - set all bits in exponent; keep mantissa exp = expMaskFloat64 } // Compact mantissa, exponent and sign to 64-bit float value val = (sig << 63) | (exp << 52) | (man << (52 - manBits)) return math.Float64frombits(val) }
package problem0290 import "strings" func wordPattern(pattern string, str string) bool { strArr := strings.Split(str, " ") if len(pattern) != len(strArr) { return false } m1 := make(map[rune]string, len(strArr)) m2 := make(map[string]rune, len(strArr)) for pos, v := range pattern { s := strArr[pos] if _, ok := m1[v]; ok { if m1[v] != s { return false } } m1[v] = s if _, ok := m2[s]; ok { if m2[s] != v { return false } } m2[s] = v } return true }
package bfs import ( "testing" "github.com/victorfernandesraton/bfs-and-dfs/node" ) func TestIsValid(t *testing.T) { zero := &node.Node{ Value: 0, Index: 0, } one := &node.Node{ Value: 1, Index: 0, } two := &node.Node{ Value: 2, Index: 0, } one.AddChildren(zero) one.AddChildren(two) three := &node.Node{ Value: 3, Index: 0, } another := &node.Node{ Value: "F", Index: 0, } two.AddChildren(three) two.AddChildren(another) res := Execution(one, &node.Output{}) if res.LastLevel != 2 { t.Errorf("Not expected index %v", res.LastLevel) } if len(res.Queue) != 5 { t.Errorf("Unexpected query elements expected 5 have %v", len(res.Queue)) } }
package containeraction import ( "context" "encoding/json" "os" "os/exec" "path" "path/filepath" "regexp" "runtime" "strconv" "strings" "time" commonapi "github.com/cidverse/cid/pkg/common/api" "github.com/cidverse/cid/pkg/common/command" "github.com/cidverse/cid/pkg/core/catalog" "github.com/cidverse/cid/pkg/core/restapi" "github.com/cidverse/cid/pkg/core/state" "github.com/cidverse/cidverseutils/pkg/cihelper" _ "github.com/cidverse/cidverseutils/pkg/cihelper" "github.com/cidverse/cidverseutils/pkg/containerruntime" "github.com/cidverse/cidverseutils/pkg/network" "github.com/google/uuid" "github.com/labstack/echo/v4" "github.com/rs/zerolog/log" ) type Executor struct{} func (e Executor) GetName() string { return "container" } func (e Executor) GetVersion() string { return "0.1.0" } func (e Executor) GetType() string { return string(catalog.ActionTypeContainer) } func (e Executor) Execute(ctx *commonapi.ActionExecutionContext, localState *state.ActionStateContext, catalogAction *catalog.Action, action *catalog.WorkflowAction) error { // api (port or socket) freePort, err := network.GetFreePort() if err != nil { log.Fatal().Err(err).Msg("no free ports available") } apiPort := strconv.Itoa(freePort) socketFile := path.Join(ctx.Paths.Temp, strings.ReplaceAll(uuid.New().String(), "-", "")+".socket") // properties secret := generateSecret() buildID := generateSnowflakeId() jobID := generateSnowflakeId() // pass config var actionConfig string if len(ctx.Config) > 0 { actionConfigJSON, _ := json.Marshal(action.Config) actionConfig = string(actionConfigJSON) } // listen apiEngine := restapi.Setup(restapi.APIConfig{ BuildID: buildID, JobID: jobID, ProjectDir: ctx.ProjectDir, Modules: ctx.Modules, CurrentModule: ctx.CurrentModule, CurrentAction: catalogAction, Env: ctx.Env, ActionConfig: actionConfig, State: localState, TempDir: filepath.Join(ctx.ProjectDir, ".tmp"), ArtifactDir: filepath.Join(ctx.ProjectDir, ".dist"), }) restapi.SecureWithAPIKey(apiEngine, secret) go func() { if runtime.GOOS == "windows" { restapi.ListenOnAddr(apiEngine, ":"+apiPort) } else { restapi.ListenOnSocket(apiEngine, socketFile) } }() // shutdown listener (on function end) defer func(apiEngine *echo.Echo, ctx context.Context) { err := apiEngine.Shutdown(ctx) if err != nil { log.Fatal().Err(err).Msg("failed to shutdown rest api") } }(apiEngine, context.Background()) if runtime.GOOS != "windows" { defer func() { if _, err := os.Stat(socketFile); err == nil { _ = os.Remove(socketFile) } }() } // wait a short moment for the unix socket to be created / the api endpoint to be ready time.Sleep(100 * time.Millisecond) // create temp dir for action tempDir := filepath.Join(ctx.Paths.Temp, jobID) createPath(tempDir) log.Debug().Str("dir", tempDir).Msg("creating temp dir") defer func() { log.Debug().Str("dir", tempDir).Msg("cleaning up temp dir") _ = os.RemoveAll(tempDir) }() // configure container containerExec := containerruntime.Container{} containerExec.SetImage(catalogAction.Container.Image) containerExec.SetCommand(insertCommandVariables(catalogAction.Container.Command, *catalogAction)) containerExec.AddVolume(containerruntime.ContainerMount{ MountType: "directory", Source: ctx.ProjectDir, Target: cihelper.ToUnixPath(ctx.ProjectDir), }) containerExec.SetWorkingDirectory(cihelper.ToUnixPath(ctx.ProjectDir)) if runtime.GOOS == "windows" { // windows does not support unix sockets containerExec.SetUserArgs("--net host") containerExec.AddEnvironmentVariable("CID_API_ADDR", "http://host.docker.internal:"+apiPort) } else { // socket-based sharing of the api is more secure than sharing the host network containerExec.AddVolume(containerruntime.ContainerMount{ MountType: "directory", Source: socketFile, Target: socketFile, }) containerExec.AddEnvironmentVariable("CID_API_SOCKET", socketFile) } containerExec.AddEnvironmentVariable("CID_API_SECRET", secret) // enterprise (proxy, ca-certs) command.ApplyProxyConfiguration(&containerExec) for _, cert := range catalogAction.Container.Certs { command.ApplyCertMount(&containerExec, command.GetCertFileByType(cert.Type), cert.ContainerPath) } // catalogAction access if len(catalogAction.Access.Env) > 0 { for k, v := range ctx.Env { for _, pattern := range catalogAction.Access.Env { if regexp.MustCompile(pattern).MatchString(k) { containerExec.AddEnvironmentVariable(k, v) } } } } containerCmd, containerCmdErr := containerExec.GetRunCommand(containerExec.DetectRuntime()) if containerCmdErr != nil { return containerCmdErr } log.Debug().Str("action", catalogAction.Name).Msg("container command for action: " + containerCmd) stdout, stderr, cmdErr := command.RunCommandAndGetOutput(containerCmd, nil, "") exitErr, isExitError := cmdErr.(*exec.ExitError) if isExitError { log.Error().Int("exit_code", exitErr.ExitCode()).Str("message", exitErr.Error()).Str("stdout", stdout).Str("stderr", stderr).Msg("command failed") return cmdErr } else if cmdErr != nil { log.Error().Int("exit_code", 1).Str("message", exitErr.Error()).Str("stdout", stdout).Str("stderr", stderr).Msg("command failed") return cmdErr } return nil }
package main import ( "fmt" "math/rand" "os" "strconv" ) // func main() { // // parse() // s3() // } // parse arg to float, convert to C, display w/format func parse() { f, e := strconv.ParseFloat(os.Args[1], 64) if e != nil { fmt.Println("error parsing float") os.Exit(1) } cValue := fToC(f) fmt.Printf("%.2fF = %.2fC\n", f, cValue) } // pointers func s3() { i := pt() fmt.Println("> i := pt()") fmt.Printf("i is now a pointer to some address containing %v\n", *i) origI := *i j := &i fmt.Println("> j := &i") fmt.Println("j is now a pointer that points to i (aka points to another pointer), so *i == **j and &i == *&j") k := *i fmt.Println("> k := *i") fmt.Println("k takes the value of *i, but it is not a pointer, so modifying *i will not modify k") fmt.Println("address to i is now *&j, dereference j and then get the value") fmt.Printf("location of i: %v\n", &i) fmt.Printf("location of j: %v\n", &j) fmt.Printf("value of j: %v and address that value points to: %v\n", *j, *&j) fmt.Printf("location of k: %v\n", &k) fmt.Printf("data at i: %d\n", *i) fmt.Printf("data at j: %d\n", **j) fmt.Printf("data of k: %d\n", k) *i = 3 fmt.Println("> *i = 3") fmt.Printf("value at i is now %d\n", *i) fmt.Printf("k is still %v: %v\n", origI, k == origI) fmt.Printf("**j is still equal to *i: %v\n", *i == **j) l := i fmt.Println("> l = i") i = pt() fmt.Println("> i = pt()") fmt.Printf("*i is now %v\n", *i) fmt.Println("i points to some other address from pt(), j will still point to the address of &i, and fully dereference to the same value since the location of i hasn't moved") fmt.Printf("*i == **j: %v\n", *i == **j) fmt.Printf("however, the old value is still at previous *&i (which we saved in l), *l = %v\n", *l) ptInc(*&i) fmt.Println("> ptInc(*&i)") fmt.Printf("passing by reference to pointer i, i is now %v\n", *i) } func fToC(f float64) float64 { return (f - 32) * 5 / 9 } func pt() *int { i := rand.Intn(100) return &i } func ptInc(p *int) { *p++ }
package strings import ( "fmt" ) func isRotation(str1, str2 string) bool{ var point int for i,_ := range str1 { if str1[i] == str2[1] { point = i break } } longStr := str2 + str1[point-1:] fmt.Println(point, longStr) return isSubString(longStr, str1) } func isSubString(str1, str2 string) bool { return true } func main() { fmt.Println(isRotation("waterbottle", "erbottlewat")) }
// Package zipper responsible to download files over HTTP, compress them on // the fly and pipe the zip output to given destination. package zipper import ( "archive/zip" "errors" "io" "net/http" "time" ) // DownloaderClient used to download files over HTTP. var DownloaderClient = &http.Client{ Timeout: 30 * time.Second, } // File represents a file that needs to be downloaded over HTTP. type File struct { URL string `json:"url"` Name string `json:"filename"` } // Download downloads files one by one, zips them on the fly and writes zip output to w as a stream. // This is a pipe, writes only can continue as long as reads continuously made by the caller until EOF. // When skipOnFail is set to true it'll skip the failed files and continue to process remaining ones. func Download(files []File, w io.Writer, skipOnFail bool) error { zw := zip.NewWriter(w) defer zw.Close() download := func(f File) error { resp, err := DownloaderClient.Get(f.URL) if err != nil { return err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return errors.New("invalid file") } fw, err := zw.Create(f.Name) if err != nil { return err } _, err = io.Copy(fw, resp.Body) return err } for _, f := range files { if err := download(f); err != nil && !skipOnFail { return err } } return nil }
package sinks import ( "fmt" "github.com/matang28/reshape/reshape/serde" ) type DebugSink struct { serializer serde.Serializer } func NewDebugSink() *DebugSink { return &DebugSink{serializer: serde.FmtSerializer} } func NewCustomDebugSink(serializer serde.Serializer) *DebugSink { return &DebugSink{serializer: serializer} } func (this *DebugSink) Dump(objects ...interface{}) error { for _, o := range objects { str, err := this.serializer(o) if err != nil { return err } fmt.Printf(str + "\n") } return nil } func (this *DebugSink) Close() error { return nil }
package mediator import ( "context" ) type Sender interface { Send(context.Context, Message) (interface{}, error) }
package sysinfo import ( "io/ioutil" "os" "path" "path/filepath" "testing" ) func TestReadProcBool(t *testing.T) { tmpDir, err := ioutil.TempDir("", "test-sysinfo-proc") if err != nil { t.Fatal(err) } defer os.RemoveAll(tmpDir) procFile := filepath.Join(tmpDir, "read-proc-bool") if err := ioutil.WriteFile(procFile, []byte("1"), 644); err != nil { t.Fatal(err) } if !readProcBool(procFile) { t.Fatal("expected proc bool to be true, got false") } if err := ioutil.WriteFile(procFile, []byte("0"), 644); err != nil { t.Fatal(err) } if readProcBool(procFile) { t.Fatal("expected proc bool to be false, got false") } if readProcBool(path.Join(tmpDir, "no-exist")) { t.Fatal("should be false for non-existent entry") } }
package main import "github.com/onuryartasi/scaler/pkg/protocol/grpc" func main(){ grpc.RunServer() }
package exchange import ( "context" "encoding/json" "net/http" "net/http/httptest" "testing" "time" "github.com/prebid/prebid-server/adapters" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/currency" "github.com/prebid/prebid-server/exchange/entities" "github.com/prebid/prebid-server/gdpr" "github.com/prebid/prebid-server/hooks/hookexecution" metricsConfig "github.com/prebid/prebid-server/metrics/config" "github.com/prebid/prebid-server/openrtb_ext" "github.com/prebid/prebid-server/util/ptrutil" "github.com/prebid/openrtb/v19/openrtb2" "github.com/stretchr/testify/assert" ) // Using this set of bids in more than one test var mockBids = map[openrtb_ext.BidderName][]*openrtb2.Bid{ openrtb_ext.BidderAppnexus: {{ ID: "losing-bid", ImpID: "some-imp", Price: 0.5, CrID: "1", }, { ID: "winning-bid", ImpID: "some-imp", Price: 0.7, CrID: "2", }}, openrtb_ext.BidderRubicon: {{ ID: "contending-bid", ImpID: "some-imp", Price: 0.6, CrID: "3", }}, } // Prevents #378. This is not a JSON test because the cache ID values aren't reproducible, which makes them a pain to test in that format. func TestTargetingCache(t *testing.T) { bids := runTargetingAuction(t, mockBids, true, true, true, false) // Make sure that the cache keys exist on the bids where they're expected to assertKeyExists(t, bids["winning-bid"], string(openrtb_ext.HbCacheKey), true) assertKeyExists(t, bids["winning-bid"], openrtb_ext.HbCacheKey.BidderKey(openrtb_ext.BidderAppnexus, MaxKeyLength), true) assertKeyExists(t, bids["contending-bid"], string(openrtb_ext.HbCacheKey), false) assertKeyExists(t, bids["contending-bid"], openrtb_ext.HbCacheKey.BidderKey(openrtb_ext.BidderRubicon, MaxKeyLength), true) assertKeyExists(t, bids["losing-bid"], string(openrtb_ext.HbCacheKey), false) assertKeyExists(t, bids["losing-bid"], openrtb_ext.HbCacheKey.BidderKey(openrtb_ext.BidderAppnexus, MaxKeyLength), false) //assert hb_cache_host was included assert.Contains(t, string(bids["winning-bid"].Ext), string(openrtb_ext.HbConstantCacheHostKey)) assert.Contains(t, string(bids["winning-bid"].Ext), "www.pbcserver.com") //assert hb_cache_path was included assert.Contains(t, string(bids["winning-bid"].Ext), string(openrtb_ext.HbConstantCachePathKey)) assert.Contains(t, string(bids["winning-bid"].Ext), "/pbcache/endpoint") } func assertKeyExists(t *testing.T, bid *openrtb2.Bid, key string, expected bool) { t.Helper() targets := parseTargets(t, bid) if _, ok := targets[key]; ok != expected { t.Errorf("Bid %s has wrong key: %s. Expected? %t, Exists? %t", bid.ID, key, expected, ok) } } // runAuction takes a bunch of mock bids by Bidder and runs an auction. It returns a map of Bids indexed by their ImpID. // If includeCache is true, the auction will be run with cacheing as well, so the cache targeting keys should exist. func runTargetingAuction(t *testing.T, mockBids map[openrtb_ext.BidderName][]*openrtb2.Bid, includeCache bool, includeWinners bool, includeBidderKeys bool, isApp bool) map[string]*openrtb2.Bid { server := httptest.NewServer(http.HandlerFunc(mockServer)) defer server.Close() categoriesFetcher, error := newCategoryFetcher("./test/category-mapping") if error != nil { t.Errorf("Failed to create a category Fetcher: %v", error) } gdprPermsBuilder := fakePermissionsBuilder{ permissions: &permissionsMock{ allowAllBidders: true, }, }.Builder ex := &exchange{ adapterMap: buildAdapterMap(mockBids, server.URL, server.Client()), me: &metricsConfig.NilMetricsEngine{}, cache: &wellBehavedCache{}, cacheTime: time.Duration(0), gdprPermsBuilder: gdprPermsBuilder, currencyConverter: currency.NewRateConverter(&http.Client{}, "", time.Duration(0)), gdprDefaultValue: gdpr.SignalYes, categoriesFetcher: categoriesFetcher, bidIDGenerator: &mockBidIDGenerator{false, false}, } ex.requestSplitter = requestSplitter{ me: ex.me, gdprPermsBuilder: ex.gdprPermsBuilder, } imps := buildImps(t, mockBids) req := &openrtb2.BidRequest{ Imp: imps, Ext: buildTargetingExt(includeCache, includeWinners, includeBidderKeys), } if isApp { req.App = &openrtb2.App{} } else { req.Site = &openrtb2.Site{} } auctionRequest := &AuctionRequest{ BidRequestWrapper: &openrtb_ext.RequestWrapper{BidRequest: req}, Account: config.Account{}, UserSyncs: &emptyUsersync{}, HookExecutor: &hookexecution.EmptyHookExecutor{}, TCF2Config: gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), } debugLog := DebugLog{} bidResp, err := ex.HoldAuction(context.Background(), auctionRequest, &debugLog) if err != nil { t.Fatalf("Unexpected errors running auction: %v", err) } if len(bidResp.SeatBid) != len(mockBids) { t.Fatalf("Unexpected number of SeatBids. Expected %d, got %d", len(mockBids), len(bidResp.SeatBid)) } return buildBidMap(bidResp.SeatBid, len(mockBids)) } func buildAdapterMap(bids map[openrtb_ext.BidderName][]*openrtb2.Bid, mockServerURL string, client *http.Client) map[openrtb_ext.BidderName]AdaptedBidder { adapterMap := make(map[openrtb_ext.BidderName]AdaptedBidder, len(bids)) for bidder, bids := range bids { adapterMap[bidder] = AdaptBidder(&mockTargetingBidder{ mockServerURL: mockServerURL, bids: bids, }, client, &config.Configuration{}, &metricsConfig.NilMetricsEngine{}, openrtb_ext.BidderAppnexus, nil, "") } return adapterMap } func buildTargetingExt(includeCache bool, includeWinners bool, includeBidderKeys bool) json.RawMessage { var targeting string if includeWinners && includeBidderKeys { targeting = `{"pricegranularity":{"precision":2,"ranges": [{"min": 0,"max": 20,"increment": 0.1}]},"includewinners": true, "includebidderkeys": true}` } else if !includeWinners && includeBidderKeys { targeting = `{"precision":2,"includewinners": false}` } else if includeWinners && !includeBidderKeys { targeting = `{"precision":2,"includebidderkeys": false}` } else { targeting = `{"precision":2,"includewinners": false, "includebidderkeys": false}` } if includeCache { return json.RawMessage(`{"prebid":{"targeting":` + targeting + `,"cache":{"bids":{}}}}`) } return json.RawMessage(`{"prebid":{"targeting":` + targeting + `}}`) } func buildParams(t *testing.T, mockBids map[openrtb_ext.BidderName][]*openrtb2.Bid) json.RawMessage { params := make(map[string]interface{}) paramsPrebid := make(map[string]interface{}) paramsPrebidBidders := make(map[string]json.RawMessage) for bidder := range mockBids { paramsPrebidBidders[string(bidder)] = json.RawMessage(`{"whatever":true}`) } paramsPrebid["bidder"] = paramsPrebidBidders params["prebid"] = paramsPrebid ext, err := json.Marshal(params) if err != nil { t.Fatalf("Failed to make imp exts: %v", err) } return ext } func buildImps(t *testing.T, mockBids map[openrtb_ext.BidderName][]*openrtb2.Bid) []openrtb2.Imp { impExt := buildParams(t, mockBids) var s struct{} impIds := make(map[string]struct{}, 2*len(mockBids)) for _, bidList := range mockBids { for _, bid := range bidList { impIds[bid.ImpID] = s } } imps := make([]openrtb2.Imp, 0, len(impIds)) for impId := range impIds { imps = append(imps, openrtb2.Imp{ ID: impId, Ext: impExt, }) } return imps } func buildBidMap(seatBids []openrtb2.SeatBid, numBids int) map[string]*openrtb2.Bid { bids := make(map[string]*openrtb2.Bid, numBids) for _, seatBid := range seatBids { for i := 0; i < len(seatBid.Bid); i++ { bid := seatBid.Bid[i] bids[bid.ID] = &bid } } return bids } func parseTargets(t *testing.T, bid *openrtb2.Bid) map[string]string { t.Helper() var parsed openrtb_ext.ExtBid if err := json.Unmarshal(bid.Ext, &parsed); err != nil { t.Fatalf("Unexpected error parsing targeting params: %v", err) } return parsed.Prebid.Targeting } type mockTargetingBidder struct { mockServerURL string bids []*openrtb2.Bid } func (m *mockTargetingBidder) MakeRequests(request *openrtb2.BidRequest, reqInfo *adapters.ExtraRequestInfo) ([]*adapters.RequestData, []error) { return []*adapters.RequestData{{ Method: "POST", Uri: m.mockServerURL, Body: []byte(""), Headers: http.Header{}, }}, nil } func (m *mockTargetingBidder) MakeBids(internalRequest *openrtb2.BidRequest, externalRequest *adapters.RequestData, response *adapters.ResponseData) (*adapters.BidderResponse, []error) { bidResponse := &adapters.BidderResponse{ Bids: make([]*adapters.TypedBid, len(m.bids)), } for i := 0; i < len(m.bids); i++ { bidResponse.Bids[i] = &adapters.TypedBid{ Bid: m.bids[i], BidType: openrtb_ext.BidTypeBanner, } } return bidResponse, nil } func mockServer(w http.ResponseWriter, req *http.Request) { w.Write([]byte("{}")) } type TargetingTestData struct { Description string TargetData targetData Auction auction IsApp bool CategoryMapping map[string]string ExpectedPbsBids map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid TruncateTargetAttr *int MultiBidMap map[string]openrtb_ext.ExtMultiBid DefaultBidLimit int } type ExpectedPbsBid struct { BidTargets map[string]string TargetBidderCode string } var bid123 *openrtb2.Bid = &openrtb2.Bid{ Price: 1.23, } var bid111 *openrtb2.Bid = &openrtb2.Bid{ Price: 1.11, DealID: "mydeal", } var bid084 *openrtb2.Bid = &openrtb2.Bid{ Price: 0.84, } var bid1p001 *openrtb2.Bid = &openrtb2.Bid{ Price: 0.01, } var bid1p077 *openrtb2.Bid = &openrtb2.Bid{ Price: 0.77, } var bid1p120 *openrtb2.Bid = &openrtb2.Bid{ Price: 1.20, } var bid2p123 *openrtb2.Bid = &openrtb2.Bid{ Price: 1.23, } var bid2p144 *openrtb2.Bid = &openrtb2.Bid{ Price: 1.44, } var bid2p155 *openrtb2.Bid = &openrtb2.Bid{ Price: 1.55, } var bid2p166 *openrtb2.Bid = &openrtb2.Bid{ Price: 1.66, } var ( truncateTargetAttrValue10 int = 10 truncateTargetAttrValue5 int = 5 truncateTargetAttrValue25 int = 25 truncateTargetAttrValueNegative int = -1 ) func lookupPriceGranularity(v string) openrtb_ext.PriceGranularity { priceGranularity, _ := openrtb_ext.NewPriceGranularityFromLegacyID(v) return priceGranularity } var TargetingTests []TargetingTestData = []TargetingTestData{ { Description: "Targeting winners only (most basic targeting example)", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeWinners: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder": "appnexus", "hb_pb": "1.20", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{}, }, }, TruncateTargetAttr: nil, }, { Description: "Targeting on bidders only", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeBidderKeys: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_appnexus": "appnexus", "hb_pb_appnexus": "1.20", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_rubicon": "rubicon", "hb_pb_rubicon": "0.80", }, }, }, }, }, TruncateTargetAttr: nil, }, { Description: "Full basic targeting with hd_format", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeWinners: true, includeBidderKeys: true, includeFormat: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder": "appnexus", "hb_bidder_appnexus": "appnexus", "hb_pb": "1.20", "hb_pb_appnexus": "1.20", "hb_format": "banner", "hb_format_appnexus": "banner", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_rubicon": "rubicon", "hb_pb_rubicon": "0.80", "hb_format_rubicon": "banner", }, }, }, }, }, TruncateTargetAttr: nil, }, { Description: "Cache and deal targeting test", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeBidderKeys: true, cacheHost: "cache.prebid.com", cachePath: "cache", }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid111, BidType: openrtb_ext.BidTypeBanner, }}, }, }, cacheIds: map[*openrtb2.Bid]string{ bid123: "55555", bid111: "cacheme", }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_appnexus": "appnexus", "hb_pb_appnexus": "1.20", "hb_cache_id_appnexus": "55555", "hb_cache_host_appnex": "cache.prebid.com", "hb_cache_path_appnex": "cache", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_rubicon": "rubicon", "hb_pb_rubicon": "1.10", "hb_cache_id_rubicon": "cacheme", "hb_deal_rubicon": "mydeal", "hb_cache_host_rubico": "cache.prebid.com", "hb_cache_path_rubico": "cache", }, }, }, }, }, TruncateTargetAttr: nil, }, { Description: "bidder with no dealID should not have deal targeting", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeBidderKeys: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_appnexus": "appnexus", "hb_pb_appnexus": "1.20", }, }, }, }, }, TruncateTargetAttr: nil, }, { Description: "Truncate Targeting Attribute value is given and is less than const MaxKeyLength", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeBidderKeys: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_": "appnexus", "hb_pb_appn": "1.20", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_": "rubicon", "hb_pb_rubi": "0.80", }, }, }, }, }, TruncateTargetAttr: &truncateTargetAttrValue10, }, { Description: "Truncate Targeting Attribute value is given and is greater than const MaxKeyLength", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeBidderKeys: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_appnexus": "appnexus", "hb_pb_appnexus": "1.20", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_rubicon": "rubicon", "hb_pb_rubicon": "0.80", }, }, }, }, }, TruncateTargetAttr: &truncateTargetAttrValue25, }, { Description: "Truncate Targeting Attribute value is given and is negative", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeBidderKeys: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_appnexus": "appnexus", "hb_pb_appnexus": "1.20", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_rubicon": "rubicon", "hb_pb_rubicon": "0.80", }, }, }, }, }, TruncateTargetAttr: &truncateTargetAttrValueNegative, }, { Description: "Check that key gets truncated properly when value is smaller than key", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeWinners: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bi": "appnexus", "hb_pb": "1.20", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{}, }, }, TruncateTargetAttr: &truncateTargetAttrValue5, }, { Description: "Check that key gets truncated properly when value is greater than key", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeWinners: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder": "appnexus", "hb_pb": "1.20", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{}, }, }, TruncateTargetAttr: &truncateTargetAttrValue25, }, { Description: "Check that key gets truncated properly when value is negative", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeWinners: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: {{ Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }}, openrtb_ext.BidderRubicon: {{ Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }}, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder": "appnexus", "hb_pb": "1.20", }, }, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{}, }, }, TruncateTargetAttr: &truncateTargetAttrValueNegative, }, { Description: "Full basic targeting with multibid", TargetData: targetData{ priceGranularity: lookupPriceGranularity("med"), includeWinners: true, includeBidderKeys: true, includeFormat: true, }, Auction: auction{ winningBidsByBidder: map[string]map[openrtb_ext.BidderName][]*entities.PbsOrtbBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: { { Bid: bid1p120, BidType: openrtb_ext.BidTypeBanner, }, { Bid: bid1p077, BidType: openrtb_ext.BidTypeBanner, }, { Bid: bid1p001, BidType: openrtb_ext.BidTypeBanner, }, }, openrtb_ext.BidderRubicon: { { Bid: bid123, BidType: openrtb_ext.BidTypeBanner, }, { Bid: bid111, BidType: openrtb_ext.BidTypeBanner, }, { Bid: bid084, BidType: openrtb_ext.BidTypeBanner, }, }, }, "ImpId-2": { openrtb_ext.BidderPubmatic: { { Bid: bid2p166, BidType: openrtb_ext.BidTypeBanner, }, { Bid: bid2p155, BidType: openrtb_ext.BidTypeBanner, }, { Bid: bid2p144, BidType: openrtb_ext.BidTypeBanner, }, { Bid: bid2p123, BidType: openrtb_ext.BidTypeBanner, }, }, }, }, }, ExpectedPbsBids: map[string]map[openrtb_ext.BidderName][]ExpectedPbsBid{ "ImpId-1": { openrtb_ext.BidderAppnexus: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder_appnexus": "appnexus", "hb_pb_appnexus": "1.10", "hb_format_appnexus": "banner", }, TargetBidderCode: "appnexus", }, {}, {}, }, openrtb_ext.BidderRubicon: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder": "rubicon", "hb_bidder_rubicon": "rubicon", "hb_pb": "1.20", "hb_pb_rubicon": "1.20", "hb_format": "banner", "hb_format_rubicon": "banner", }, }, {}, {}, }, }, "ImpId-2": { openrtb_ext.BidderPubmatic: []ExpectedPbsBid{ { BidTargets: map[string]string{ "hb_bidder": "pubmatic", "hb_bidder_pubmatic": "pubmatic", "hb_pb": "1.60", "hb_pb_pubmatic": "1.60", "hb_format": "banner", "hb_format_pubmatic": "banner", }, TargetBidderCode: "pubmatic", }, { BidTargets: map[string]string{ "hb_bidder_pm2": "pm2", "hb_pb_pm2": "1.50", "hb_format_pm2": "banner", }, TargetBidderCode: "pm2", }, { BidTargets: map[string]string{ "hb_bidder_pm3": "pm3", "hb_pb_pm3": "1.40", "hb_format_pm3": "banner", }, TargetBidderCode: "pm3", }, {}, }, }, }, TruncateTargetAttr: nil, MultiBidMap: map[string]openrtb_ext.ExtMultiBid{ string(openrtb_ext.BidderPubmatic): { MaxBids: ptrutil.ToPtr(3), TargetBidderCodePrefix: "pm", }, string(openrtb_ext.BidderAppnexus): { MaxBids: ptrutil.ToPtr(2), }, }, }, } func TestSetTargeting(t *testing.T) { for _, test := range TargetingTests { auc := &test.Auction // Set rounded prices from the auction data auc.setRoundedPrices(test.TargetData) winningBids := make(map[string]*entities.PbsOrtbBid) // Set winning bids from the auction data for imp, bidsByBidder := range auc.winningBidsByBidder { for _, bids := range bidsByBidder { for _, bid := range bids { if winningBid, ok := winningBids[imp]; ok { if winningBid.Bid.Price < bid.Bid.Price { winningBids[imp] = bid } } else { winningBids[imp] = bid } } } } auc.winningBids = winningBids targData := test.TargetData targData.setTargeting(auc, test.IsApp, test.CategoryMapping, test.TruncateTargetAttr, test.MultiBidMap) for imp, targetsByBidder := range test.ExpectedPbsBids { for bidder, expectedTargets := range targetsByBidder { for i, expected := range expectedTargets { assert.Equal(t, expected.BidTargets, auc.winningBidsByBidder[imp][bidder][i].BidTargets, "Test: %s\nTargeting failed for bidder %s on imp %s.", test.Description, string(bidder), imp) assert.Equal(t, expected.TargetBidderCode, auc.winningBidsByBidder[imp][bidder][i].TargetBidderCode) } } } } }
package pando import ( "bytes" goContext "context" "encoding/json" "fmt" "github.com/agiledragon/gomonkey/v2" "github.com/gin-gonic/gin" "github.com/kenlabs/pando/pkg/api/types" . "github.com/smartystreets/goconvey/convey" "io/ioutil" "net/http" "net/http/httptest" "reflect" "testing" ) func TestProviderRegister(t *testing.T) { Convey("TestProviderRegister", t, func() { responseRecorder := httptest.NewRecorder() testContext, _ := gin.CreateTestContext(responseRecorder) Convey("When controller.ProviderRegister return nil error, should return success resp", func() { patch := gomonkey.ApplyMethodFunc( reflect.TypeOf(mockAPI.controller), "ProviderRegister", func(_ goContext.Context, _ []byte) error { return nil }, ) defer patch.Reset() req, err := http.NewRequest("POST", "http://127.0.0.1", bytes.NewBufferString("test body")) testContext.Request = req if err != nil { t.Error(err) } mockAPI.providerRegister(testContext) respBody, err := ioutil.ReadAll(responseRecorder.Result().Body) if err != nil { t.Error(err) } var resp types.ResponseJson if err = json.Unmarshal(respBody, &resp); err != nil { t.Error(err) } So(resp.Code, ShouldEqual, http.StatusOK) So(resp.Message, ShouldEqual, "register success") }) Convey("When controller.ProviderRegister return an error, should return an error resp", func() { patch := gomonkey.ApplyMethodFunc( reflect.TypeOf(mockAPI.controller), "ProviderRegister", func(_ goContext.Context, _ []byte) error { return fmt.Errorf("monkey error") }, ) defer patch.Reset() req, err := http.NewRequest("POST", "http://127.0.0.1", bytes.NewBufferString("test body")) testContext.Request = req if err != nil { t.Error(err) } mockAPI.providerRegister(testContext) respBody, err := ioutil.ReadAll(responseRecorder.Result().Body) var resp types.ResponseJson if err = json.Unmarshal(respBody, &resp); err != nil { t.Error(err) } So(resp.Code, ShouldEqual, http.StatusBadRequest) So(resp.Message, ShouldEqual, "monkey error") }) }) }
package typrls import ( "fmt" "os" "strings" "github.com/typical-go/typical-go/pkg/typgo" ) type ( // CrossCompiler compile project to various platform CrossCompiler struct { Targets []Target MainPackage string } // Target of release contain "$GOOS/$GOARC" Target string ) // // Compile // var _ Releaser = (*CrossCompiler)(nil) // Release for compile func (o *CrossCompiler) Release(c *Context) error { defer os.Unsetenv("GOOS") defer os.Unsetenv("GOARC") for _, target := range o.Targets { goos := target.OS() goarch := target.Arch() output := fmt.Sprintf("%s/%s_%s_%s_%s", c.ReleaseFolder, c.Descriptor.ProjectName, c.TagName, goos, goarch) c.Infof("\nGOOS=%s GOARC=%s", goos, goarch) os.Setenv("GOOS", goos) os.Setenv("GOARC", goarch) err := c.ExecuteCommand(&typgo.GoBuild{ Output: output, MainPackage: o.getMainPackage(c), Ldflags: typgo.BuildVars{ "github.com/typical-go/typical-go/pkg/typgo.ProjectName": c.Descriptor.ProjectName, "github.com/typical-go/typical-go/pkg/typgo.ProjectVersion": c.TagName, }, }) if err != nil { return err } } return nil } func (o *CrossCompiler) getMainPackage(c *Context) string { if o.MainPackage == "" { o.MainPackage = fmt.Sprintf("./cmd/%s", c.Descriptor.ProjectName) } return o.MainPackage } // // OSTarget // // OS operating system func (t Target) OS() string { i := strings.Index(string(t), "/") if i < 0 { return "" } return string(t)[:i] } // Arch architecture func (t Target) Arch() string { i := strings.Index(string(t), "/") if i < 0 { return "" } return string(t)[i+1:] }
package actions import "log" import "cointhink/proto" import "cointhink/model/algorun" import "cointhink/model/token" import gproto "github.com/golang/protobuf/proto" func DoLambdaResponse(_lambda_response *proto.LambdaResponse, _token *proto.Token) []gproto.Message { var responses []gproto.Message log.Printf("LambdaResponse %s %s ", _token.AlgorunId, _lambda_response.StateOut) _ltoken, err := token.FindByToken(_lambda_response.Token) if err != nil { log.Printf("dolambdaresponse token %#v err %#v", _lambda_response.Token, err) } else { _algorun, err := algorun.Find(_ltoken.AlgorunId) if err != nil { log.Printf("dolambdaresponse algorun err %#v", err) } else { algorun.UpdateState(_algorun, _lambda_response.StateOut) } } return responses }
package main import ( "fmt" "io" "net/http" "os" ) func Download(url string, downloadPath string) error { resp, err := http.Get(url) if err != nil { return err } defer resp.Body.Close() f, err := os.OpenFile(downloadPath, os.O_RDWR|os.O_CREATE, 0755) if err != nil { return err } defer f.Close() return Write(resp, f) } func Write(resp *http.Response, f *os.File) error { n, err := io.Copy(f, resp.Body) if err != nil { return err } fmt.Printf("%v written bytes", n) return nil } func main() { err := Download("https://tinyurl.com/sxynmjp", "./goTenthAnniversary.png") if err != nil { fmt.Println(err) } }
package main import ( "bytes" "encoding/base64" "encoding/json" "fmt" "io/ioutil" "log" "math/rand" "net/http" "net/url" "strings" "time" "github.com/niklasfasching/goheadless" "github.com/niklasfasching/telegram" ) type config struct { TelegramToken string ServerAddress string BrowserPort int } func main() { config, err := readConfig("config.json") if err != nil { log.Fatal(err) } errs := make(chan error) go func() { errs <- startBot(config) }() go func() { errs <- startServer(config) }() log.Fatal(<-errs) } func readConfig(path string) (c config, err error) { bs, err := ioutil.ReadFile(path) if err != nil { return c, err } return c, json.Unmarshal(bs, &c) } type ReplyMarkup struct { InlineKeyboard InlineKeyboard `json:"inline_keyboard"` } type InlineKeyboard [][]InlineButton type InlineButton struct { Text string `json:"text"` CallbackData string `json:"callback_data"` } type CallbackQuery struct { Data string `json:"data"` Message struct { Chat struct { ID int `json:"id"` } `json:"chat"` ID int `json:"message_id"` ReplyMarkup ReplyMarkup `json:"reply_markup"` } `json:"message"` } func startBot(c config) error { browser := &goheadless.Browser{ Executable: "chromium-browser", Port: fmt.Sprintf("%d", c.BrowserPort), } if err := browser.Start(); err != nil { return err } defer browser.Stop() bot := telegram.Connection{ Token: c.TelegramToken, Debug: true, } bot.Handle("callback_query", func(q CallbackQuery) error { parts := strings.SplitN(q.Data, " :: ", 2) template, quote := parts[0], parts[1] url := fmt.Sprintf("http://%s/template/%s.html?quote=%s", c.ServerAddress, template, url.QueryEscape(quote)) log.Println(url, parts) bs, err := screenshot(browser, url) if err != nil { return err } return bot.Call("editMessageMedia", map[string]interface{}{ "chat_id": q.Message.Chat.ID, "message_id": q.Message.ID, "media": map[string]interface{}{ "type": "photo", "media": "attach://img", }, "reply_markup": replyMarkup(template, quote), "img": bytes.NewReader(bs), }, nil) }) bot.Handle("message", func(m telegram.Message) error { template, quote := "big", m.Text url := fmt.Sprintf("http://%s/template/%s.html?quote=%s", c.ServerAddress, template, url.QueryEscape(quote)) bs, err := screenshot(browser, url) if err != nil { return err } return bot.Call("sendPhoto", map[string]interface{}{ "chat_id": m.Chat.ID, "photo": bytes.NewReader(bs), "reply_markup": replyMarkup(template, quote), }, nil) }) return bot.Start() } func replyMarkup(template, quote string) ReplyMarkup { return ReplyMarkup{ InlineKeyboard{{ InlineButton{"Basic", fmt.Sprintf("%s :: %s", "basic", quote)}, InlineButton{"Big", fmt.Sprintf("%s :: %s", "big", quote)}, }}, } } func startServer(c config) error { cliparts, err := ioutil.ReadDir("public/clipart") if err != nil { return err } mux := &http.ServeMux{} mux.Handle("/", http.FileServer(http.Dir("public"))) mux.Handle("/clipart/random.png", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { http.Redirect(w, r, cliparts[rand.Intn(len(cliparts)-1)].Name(), http.StatusFound) })) server := &http.Server{ Addr: c.ServerAddress, Handler: mux, } return server.ListenAndServe() } func screenshot(browser *goheadless.Browser, url string) ([]byte, error) { page, err := browser.OpenPage() if err != nil { return nil, err } defer page.Close() err = page.Execute("Page.setDeviceMetricsOverride", map[string]interface{}{ "width": 800, "height": 600, "deviceScaleFactor": 1, "mobile": false, }, nil) if err != nil { return nil, err } err = page.Execute("Page.navigate", map[string]interface{}{ "url": url, "frameId": page.ID, }, nil, goheadless.StringEvent("Page.frameStoppedLoading")) if err != nil { return nil, err } time.Sleep(500 * time.Millisecond) // TODO: wait for js - could also poll for element exits m := map[string]string{} if err := page.Execute("Page.captureScreenshot", nil, &m); err != nil { return nil, err } return base64.StdEncoding.DecodeString(m["data"]) }
// Copyright 2021 The ChromiumOS Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package perfutil // UnstableModels is the list of the models which appear to be not stable on // the UI performance tests. var UnstableModels = []string{ "esche", "willow", "lava", "vilboz360", "woomax", }
package main import ( "fmt" "time" "math/rand" ) const height = 20 const width = 40 var field = [height][width]int{} var newField = [height][width]int{} func main() { rand.Seed(time.Now().UTC().UnixNano()) initField() for { clearScreen() dumpField() time.Sleep(100 * time.Millisecond) evolve() prepareNext() } } func initField() { for y := 0; y < height; y++ { for x := 0; x < width; x++ { field[y][x] = rand.Intn(2) } } } func evolve() { for y := 0; y < height; y++ { for x := 0; x < width; x++ { switch countAliveNeighbours(y, x) { case 2: newField[y][x] = field[y][x] case 3: newField[y][x] = 1 default: newField[y][x] = 0 } } } } func countAliveNeighbours(y int, x int) int { count := 0 for yi := -1; yi <= 1; yi++ { for xi := -1; xi <= 1; xi++ { if yi == 0 && xi == 0 { continue } if field[mod(y + yi, height)][mod(x + xi, width)] == 1 { count += 1 } } } return count } func mod(a int, b int) int { return (a + b) % b } func prepareNext() { field = newField } func dumpField() { for y := 0; y < height; y++ { for x := 0; x < width; x++ { if field[y][x] == 0 { fmt.Print(" ") } else { fmt.Print("o") } } fmt.Print("\n") } } func clearScreen() { fmt.Print("\033[;H\033[2J") }
package main import ( "encoding/json" "fmt" "net/http" m "github.com/keighl/mandrill" ) type Message struct { Token string `json:"token"` ToEmail string `json:"email"` Type string `json:"type"` FromEmail string `json:"from_email"` FromName string `json:"from_name"` Subject string `json:"subject"` HTML string `json:"html"` Text string `json:"text"` } func main() { //TODO: use port defined on env values or specify at static url, example: http://pigeon-mandrill.wisegrowth.io http.ListenAndServe(":5152", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var msg Message //TODO: remove logger fmt.Println("here on ") if err := json.NewDecoder(r.Body).Decode(&msg); err != nil { // TODO use an error that better explains the problem http.Error(w, "bad request", http.StatusBadRequest) fmt.Println(err) return } r.Body.Close() // TODO remove logger fmt.Printf("%#v\n", msg) client := m.ClientWithKey(msg.Token) email := &m.Message{} email.AddRecipient(msg.FromEmail, msg.FromName, msg.Type) email.FromEmail = msg.FromEmail email.FromName = msg.FromName email.Subject = msg.Subject email.HTML = msg.HTML email.Text = msg.Text response, err := client.MessagesSend(email) if err != nil { // TODO use an error that better explains the problem http.Error(w, "forbidden", http.StatusForbidden) fmt.Println(err) } // TODO remove Logger fmt.Println("message sent") fmt.Println(response) w.WriteHeader(200) })) }
/* * Copyright IBM Corporation 2021 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package qaengine import ( "testing" "github.com/google/go-cmp/cmp" "github.com/konveyor/move2kube/internal/common" "github.com/sirupsen/logrus" ) func TestCacheEngine(t *testing.T) { logrus.SetLevel(logrus.DebugLevel) qaTestPath := "testdata/qaenginetest.yaml" // tmpTestPath := "/tmp/qatest.yaml" t.Run("input type problem", func(t *testing.T) { engines = []Engine{} e := NewStoreEngineFromCache(qaTestPath) AddEngine(e) // SetWriteConfig(tmpTestPath) key := common.BaseKey + common.Delim + "input" desc := "Enter the container registry username : " context := []string{"Enter username for container registry login"} def := "" want := "testuser" answer := FetchStringAnswer(key, desc, context, def) if answer != want { t.Fatalf("Fetched answer was different from the default one. Fetched answer: %s, expected answer: %s ", answer, want) } }) t.Run("select type problem", func(t *testing.T) { engines = []Engine{} e := NewStoreEngineFromCache(qaTestPath) AddEngine(e) // SetWriteConfig(tmpTestPath) key := common.BaseKey + common.Delim + "select" desc := "What type of container registry login do you want to use?" context := []string{"Docker login from config mode, will use the default config from your local machine."} def := "No authentication" opts := []string{"Use existing pull secret", "No authentication", "UserName/Password"} want := "UserName/Password" answer := FetchSelectAnswer(key, desc, context, def, opts) if answer != want { t.Fatalf("Fetched answer was different from the default one. Fetched answer: %s, expected answer: %s ", answer, want) } }) t.Run("multi-line input type problem", func(t *testing.T) { engines = []Engine{} e := NewStoreEngineFromCache(qaTestPath) AddEngine(e) // SetWriteConfig(tmpTestPath) key := common.BaseKey + common.Delim + "multline" desc := "Multiline input problem test description : " context := []string{"Multiline input problem test context."} cachedAnswer := `line1 line2 line3 ` answer := FetchMultilineAnswer(key, desc, context, "") if answer != cachedAnswer { t.Fatalf("Fetched answer was different from the default one. Fetched answer: %s, expected answer: %s ", answer, cachedAnswer) } }) t.Run("confirm type problem", func(t *testing.T) { engines = []Engine{} e := NewStoreEngineFromCache(qaTestPath) AddEngine(e) // SetWriteConfig(tmpTestPath) key := common.BaseKey + common.Delim + "confirm" desc := "Confirm problem test description : " context := []string{"Confirm input problem test context."} def := true want := true answer := FetchBoolAnswer(key, desc, context, def) if answer != want { t.Fatalf("Fetched answer was different from the default one. Fetched answer: %v, expected answer: %v ", answer, want) } }) t.Run("multi-select type problem", func(t *testing.T) { engines = []Engine{} e := NewStoreEngineFromCache(qaTestPath) AddEngine(e) // SetWriteConfig(tmpTestPath) key := common.BaseKey + common.Delim + "multiselect" desc := "MultiSelect input problem test description : " context := []string{"MultiSelect input problem test context"} def := []string{"Option A", "Option C"} opts := []string{"Option A", "Option B", "Option C", "Option D"} answer := FetchMultiSelectAnswer(key, desc, context, def, opts) if !cmp.Equal(answer, def) { t.Fatalf("Fetched answer was different from the default one. Fetched answer: %s, expected answer: %s ", answer, def) } }) }
package jindo import ( "github.com/fluid-cloudnative/fluid/pkg/utils" datasetSchedule "github.com/fluid-cloudnative/fluid/pkg/utils/dataset/lifecycle" ) func (e *JindoEngine) AssignNodesToCache(desiredNum int32) (currentScheduleNum int32, err error) { runtimeInfo, err := e.getRuntimeInfo() if err != nil { return currentScheduleNum, err } dataset, err := utils.GetDataset(e.Client, e.name, e.namespace) e.Log.Info("AssignNodesToCache", "dataset", dataset) if err != nil { return } return datasetSchedule.AssignDatasetToNodes(runtimeInfo, dataset, e.Client, desiredNum) }
package log import ( "bytes" "testing" "github.com/rs/zerolog" "github.com/stretchr/testify/require" "github.com/tendermint/tendermint/types/time" "github.com/cosmos/cosmos-sdk/server" ethlog "github.com/ethereum/go-ethereum/log" ) const ( timeKey = "t" lvlKey = "lvl" msgKey = "msg" ctxKey = "ctx" ) func TestLog(t *testing.T) { out := &bytes.Buffer{} logger := &server.ZeroLogWrapper{ Logger: zerolog.New(out).Level(zerolog.DebugLevel).With().Timestamp().Logger(), } h := NewHandler(logger) err := h.Log(&ethlog.Record{ Time: time.Now().UTC(), Lvl: ethlog.LvlCrit, Msg: "critical error", KeyNames: ethlog.RecordKeyNames{ Time: timeKey, Msg: msgKey, Lvl: lvlKey, Ctx: ctxKey, }, }) require.NoError(t, err) require.Contains(t, string(out.Bytes()), "\"message\":\"critical error\"") require.Contains(t, string(out.Bytes()), "\"level\":\"fatal\"") } func TestOverrideRootLogger(t *testing.T) { out := &bytes.Buffer{} logger := &server.ZeroLogWrapper{ Logger: zerolog.New(out).Level(zerolog.DebugLevel).With().Timestamp().Logger(), } h := NewHandler(logger) ethlog.Root().SetHandler(h) ethlog.Root().Info("some info") require.Contains(t, string(out.Bytes()), "\"message\":\"some info\"") require.Contains(t, string(out.Bytes()), "\"level\":\"info\"") }
// Package gitwatch provides a simple tool to first clone a set of git // repositories to a local directory and then periodically check them all for // any updates. package gitwatch import ( "context" "fmt" "io" "net/url" "path/filepath" "strings" "time" "github.com/pkg/errors" "golang.org/x/xerrors" "gopkg.in/src-d/go-git.v4" "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/transport" ) // Repository represents a Git repository address and branch name type Repository struct { URL string // local or remote repository URL to watch Branch string // the name of the branch to use `master` being default } // Session represents a git watch session configuration type Session struct { Repositories []Repository // list of local or remote repository URLs to watch Interval time.Duration // the interval between remote checks Directory string // the directory to store repositories Auth transport.AuthMethod // authentication method for git operations InitialEvent bool // if true, an event for each repo will be emitted upon construction InitialDone chan struct{} // if InitialEvent true, this is pushed to after initial setup done Events chan Event // when a change is detected, events are pushed here Errors chan error // when an error occurs, errors come here instead of halting the loop ctx context.Context cf context.CancelFunc } // Event represents an update detected on one of the watched repositories type Event struct { URL string Path string Timestamp time.Time } // New constructs a new git watch session on the given repositories func New( ctx context.Context, repos []string, interval time.Duration, dir string, auth transport.AuthMethod, initialEvent bool, ) (session *Session, err error) { ctx2, cf := context.WithCancel(ctx) repoList := MakeRepositoryList(repos) session = &Session{ Repositories: repoList, Interval: interval, Directory: dir, Auth: auth, Events: make(chan Event, len(repos)), Errors: make(chan error, 16), InitialEvent: initialEvent, InitialDone: make(chan struct{}, 1), ctx: ctx2, cf: cf, } return } // Run begins the watcher and blocks until an error occurs func (s *Session) Run() (err error) { return s.daemon() } // Close gracefully shuts down the git watcher func (s *Session) Close() { s.cf() } func (s *Session) daemon() (err error) { t := time.NewTicker(s.Interval) // a function to select over the session's context and the ticker to check // repositories. f := func() (err error) { select { case <-s.ctx.Done(): err = s.ctx.Err() case <-t.C: err = s.checkRepos() if err != nil { if xerrors.Is(err, io.EOF) { return nil } s.Errors <- err return nil } } return } // before starting the daemon process loop, perform an initial check against // all targets. If the targets do not exist, they will be cloned and events // will be emitted for them. if s.InitialEvent { err = s.checkRepos() if err != nil { return } s.InitialDone <- struct{}{} } for { err = f() if err != nil { return } } } // checkRepos simply iterates all repositories and collects events from them, if // there are any, they will be emitted to the Events channel concurrently. func (s *Session) checkRepos() (err error) { for _, repository := range s.Repositories { var event *Event event, err = s.checkRepo(repository) if err != nil { return } if event != nil { go func() { s.Events <- *event }() } } return } // checkRepo checks a specific git repository that may or may not exist locally // and if there are changes or the repository had to be cloned fresh (and // InitialEvents is true) then an event is returned. func (s *Session) checkRepo(repository Repository) (event *Event, err error) { localPath, err := GetRepoPath(s.Directory, repository.URL) if err != nil { err = errors.Wrap(err, "failed to get path from repo url") return } repo, err := git.PlainOpen(localPath) if err != nil { if err != git.ErrRepositoryNotExists { err = errors.Wrap(err, "failed to open local repo") return } return s.cloneRepo(repository, localPath) } return s.GetEventFromRepoChanges(repo, repository.Branch) } // cloneRepo clones the specified repository to the session's cache and, if // InitialEvent is true, emits an event for the newly cloned repo. func (s *Session) cloneRepo(repository Repository, localPath string) (event *Event, err error) { repo, err := git.PlainCloneContext(s.ctx, localPath, false, &git.CloneOptions{ Auth: s.Auth, URL: repository.URL, ReferenceName: plumbing.ReferenceName( fmt.Sprintf("refs/heads/%s", repository.Branch), ), }) if err != nil { err = errors.Wrap(err, "failed to clone initial copy of repository") return } if s.InitialEvent { event, err = GetEventFromRepo(repo) } return } // GetEventFromRepoChanges reads a locally cloned git repository an returns an // event only if an attempted fetch resulted in new changes in the working tree. func (s *Session) GetEventFromRepoChanges(repo *git.Repository, branch string) (event *Event, err error) { wt, err := repo.Worktree() if err != nil { return nil, errors.Wrap(err, "failed to get worktree") } err = wt.Pull(&git.PullOptions{ Auth: s.Auth, ReferenceName: plumbing.ReferenceName( fmt.Sprintf("refs/heads/%s", branch), ), }) if err != nil { if err == git.NoErrAlreadyUpToDate { return nil, nil } return nil, errors.Wrap(err, "failed to pull local repo") } return GetEventFromRepo(repo) } // GetEventFromRepo reads a locally cloned git repository and returns an event // based on the most recent commit. func GetEventFromRepo(repo *git.Repository) (event *Event, err error) { wt, err := repo.Worktree() if err != nil { return nil, errors.Wrap(err, "failed to get worktree") } remote, err := repo.Remote("origin") if err != nil { return } ref, err := repo.Head() if err != nil { return } c, err := repo.CommitObject(ref.Hash()) if err != nil { return } return &Event{ URL: remote.Config().URLs[0], Path: wt.Filesystem.Root(), Timestamp: c.Author.When, }, nil } // GetRepoPath returns the local path of a cached repo from the given cache, the // base component of the repo path is used as a directory name for the target // repository. func GetRepoPath(cache, repo string) (result string, err error) { path := strings.Split(repo, ":") i := 0 if len(path) == 2 { i = 1 } u, err := url.Parse(path[i]) if err != nil { return } return filepath.Join(cache, filepath.Base(u.Path)), nil } // MakeRepositoryList Creates a repository list from an array of // strings, while also checking is the string contains a special // character which can be used to get the branch to use func MakeRepositoryList(repos []string) []Repository { result := make([]Repository, len(repos)) for i, repo := range repos { url := repo branch := "master" if strings.Contains(repo, "#") { path := strings.Split(repo, "#") url = path[0] if len(path[1]) > 0 { branch = path[1] } } result[i] = Repository{ URL: url, Branch: branch, } } return result }
package twitterscraper import ( "context" "testing" ) func TestFetchSearchCursor(t *testing.T) { scraper := New() maxTweetsNbr := 150 tweetsNbr := 0 nextCursor := "" for tweetsNbr < maxTweetsNbr { tweets, cursor, err := scraper.FetchSearchTweets("twitter", maxTweetsNbr, nextCursor) if err != nil { t.Fatal(err) } if cursor == "" { t.Fatal("Expected search cursor is not empty") } tweetsNbr += len(tweets) nextCursor = cursor } } func TestGetSearchTweets(t *testing.T) { count := 0 maxTweetsNbr := 250 dupcheck := make(map[string]bool) for tweet := range SearchTweets(context.Background(), "twitter -filter:retweets", maxTweetsNbr) { if tweet.Error != nil { t.Error(tweet.Error) } else { count++ if tweet.ID == "" { t.Error("Expected tweet ID is not empty") } else { if dupcheck[tweet.ID] { t.Errorf("Detect duplicated tweet ID: %s", tweet.ID) } else { dupcheck[tweet.ID] = true } } if tweet.PermanentURL == "" { t.Error("Expected tweet PermanentURL is not empty") } if tweet.IsRetweet { t.Error("Expected tweet IsRetweet is false") } if tweet.Text == "" { t.Error("Expected tweet Text is not empty") } } } if count != maxTweetsNbr { t.Errorf("Expected tweets count=%v, got: %v", maxTweetsNbr, count) } }
package lo import ( "fmt" "testing" "github.com/stretchr/testify/assert" ) func Test_Map(t *testing.T) { sourceSlice := []int{1, 2, 3} targetSlice := Map(sourceSlice, func(item int) int { return item * 2 }) assert.Equal(t, []int{2, 4, 6}, targetSlice, "should map the slice") } func Test_Reduce(t *testing.T) { collection := []int{1, 2, 3} result := Reduce(collection, func(accumulated int, item int) int { return accumulated + item }, 0) assert.Equal(t, 6, result, "should reduce the slice") } func Test_Filter(t *testing.T) { collection := []int{1, 2, 3} result := Filter(collection, func(item int) bool { return item%2 == 0 }) assert.Equal(t, []int{2}, result, "should filter the slice") } func Test_KeyBy(t *testing.T) { collection := []int{10, 20, 30} result := KeyBy(collection, func(item int) int { return item / 10 }) assert.Equal(t, map[int]int{1: 10, 2: 20, 3: 30}, result, "should key the slice") } func Test_FilterByValue(t *testing.T) { collection := map[int]int{1: 10, 2: 20, 3: 30} result := FilterByValue(collection, func(item int) bool { return item%20 == 0 }) assert.Equal(t, map[int]int{2: 20}, result, "should filter the slice") } func Test_Keys(t *testing.T) { collection := map[int]int{1: 10, 2: 20, 3: 30} result := Keys(collection) assert.Contains(t, result, 1, "should get the keys") assert.Contains(t, result, 2, "should get the keys") assert.Contains(t, result, 3, "should get the keys") } func Test_Values(t *testing.T) { collection := map[int]int{1: 10, 2: 20, 3: 30} result := Values(collection) assert.Contains(t, result, 10, "should get the values") assert.Contains(t, result, 20, "should get the values") assert.Contains(t, result, 30, "should get the values") } func Test_ForEach(t *testing.T) { collection := []int{1, 2, 3} result := []int{} ForEach(collection, func(item int) { result = append(result, item) }) assert.Equal(t, []int{1, 2, 3}, result, "should iterate over the slice") } func Test_ReduceProperty(t *testing.T) { collection := [][]int{{1, 2, 3}, {10, 20, 30}, {100, 200, 300}} result := ReduceProperty(collection, func(item []int) int { return item[0] }, func(accumulated int, item int) int { return accumulated + item }, 0) assert.Equal(t, 111, result, "should reduce the slice") } func Test_Bind(t *testing.T) { f := func(param1, param2 int) int { return param1 * param2 } boundF := Bind(10, f) assert.Equal(t, 200, boundF(20), "should correcly add 10") } func Test_PanicOnErr(t *testing.T) { fPanic := func() (int, error) { return 0, fmt.Errorf("error") } fPanicCall := func() { PanicOnErr(fPanic()) } assert.Panics(t, fPanicCall, "should panic on error") fNoPanic := func() (int, error) { return 1, nil } fNoPanicCall := func() { val := PanicOnErr(fNoPanic()) assert.Equal(t, 1, val, "should return correct value") } assert.NotPanics(t, fNoPanicCall, "should not panic without error") } func Test_Max(t *testing.T) { maxValueInt := Max(10, 1, 154, 61, 51, 65, 16, 51, 6, 516, 1, 65, -465, -465, -1, 0) assert.Equal(t, 516, maxValueInt, "should correctly select maximum value") maxValueFloat := Max(1.0, -1.6, -1.5, -1.4, -1.3, -1.2, -1.1, -1.0, 1.5, 1.4, 1.3, 1.2, 1.1, 1.0) assert.Equal(t, 1.5, maxValueFloat, "should correctly select maximum value") defaultIntValue := Max([]int{}...) assert.Equal(t, 0, defaultIntValue, "should return default int value") } func Test_Min(t *testing.T) { maxValueInt := Min(10, 1, 154, 61, 51, 65, 16, 51, 6, 516, 1, 65, -465, -465, -1, 0) assert.Equal(t, -465, maxValueInt, "should correctly select minimum value") maxValueFloat := Min(1.0, -1.6, -1.5, -1.4, -1.3, -1.2, -1.1, -1.0, 1.5, 1.4, 1.3, 1.2, 1.1, 1.0) assert.Equal(t, -1.6, maxValueFloat, "should correctly select minimum value") defaultIntValue := Min([]int{}...) assert.Equal(t, 0, defaultIntValue, "should return default int value") } func Test_Sum(t *testing.T) { maxValueInt := Sum(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) assert.Equal(t, 55, maxValueInt, "should correctly sum values") }
package notification_test import ( "strconv" "sync" "sync/atomic" "testing" "github.com/herb-go/notification" ) type testStore struct { locker sync.Mutex data []*notification.Notification } func (d *testStore) Open() error { return nil } func (d *testStore) Close() error { return nil } func (d *testStore) Save(notification *notification.Notification) error { d.locker.Lock() defer d.locker.Unlock() for k := range d.data { if d.data[k].ID == notification.ID { d.data[k] = notification return nil } } d.data = append(d.data, notification) return nil } func (d *testStore) List(condition []*notification.Condition, iter string, asc bool, count int) (result []*notification.Notification, newiter string, err error) { d.locker.Lock() defer d.locker.Unlock() var start int var step int var end int var batch = "" for _, v := range condition { if v.Keyword == notification.ConditionBatch { batch = v.Value } else { return nil, "", notification.NewErrConditionNotSupported(v.Keyword) } } if asc { start = 0 step = 1 end = len(d.data) } else { start = len(d.data) - 1 step = -1 end = -1 } result = []*notification.Notification{} var i = start iterpos, _ := strconv.Atoi(iter) for { var skiped bool if iter != "" { if asc { if iterpos >= i { skiped = true } } else { if iterpos <= i { skiped = true } } } if !skiped { data := d.data[i] if batch != "" { if data.Header.Get(notification.HeaderNameBatch) == batch { result = append(result, data) } } else { result = append(result, data) } if count > 0 && len(result) == count { return result, strconv.Itoa(i), nil } } i = i + step if i == end { break } } return result, "", nil } func (d *testStore) Count(condition []*notification.Condition) (int, error) { d.locker.Lock() defer d.locker.Unlock() var batch = "" for _, v := range condition { if v.Keyword == notification.ConditionBatch { batch = v.Value } else { return 0, notification.NewErrConditionNotSupported(v.Keyword) } } var count int for k := range d.data { if batch == "" { count = count + 1 } else { if d.data[k].Header.Get(notification.HeaderNameBatch) == batch { count = count + 1 } } } return count, nil } func (d *testStore) SupportedConditions() ([]string, error) { return []string{notification.ConditionBatch}, nil } func (d *testStore) Eject(id string) (*notification.Notification, error) { for k := range d.data { if d.data[k].ID == id { n := d.data[k] d.data = append(d.data[:k], d.data[k:]...) return n, nil } } return nil, notification.NewErrNotificationIDNotFound(id) } func newTestStore() *testStore { return &testStore{} } var current int64 func mustID() string { c := atomic.AddInt64(&current, 1) return strconv.FormatInt(c, 10) } func TestCondition(t *testing.T) { var store = newTestStore() var n *notification.Notification n = notification.New() n.Header.Set(notification.HeaderNameDraftMode, "1") n.ID = mustID() err := store.Save(n) if err != nil { t.Fatal(err) } n = notification.New() n.Header.Set(notification.HeaderNameDraftMode, "1") n.Header.Set(notification.HeaderNameBatch, "12345") err = store.Save(n) if err != nil { t.Fatal(err) } n = notification.New() n.Header.Set(notification.HeaderNameDraftMode, "1") n.Header.Set(notification.HeaderNameBatch, "12345") n.ID = mustID() err = store.Save(n) if err != nil { t.Fatal(err) } count, err := store.Count(nil) if count != 3 || err != nil { t.Fatal(count, err) } result, iter, err := store.List(nil, "", true, 0) if len(result) != 3 || iter != "" || err != nil { t.Fatal(result, iter, err) } result, iter, err = store.List(nil, "", true, 2) if len(result) != 2 || iter != "1" || err != nil { t.Fatal(result, iter, err) } result, iter, err = store.List(nil, "", false, 1) if len(result) != 1 || iter != "2" || err != nil { t.Fatal(result, iter, err) } result, iter, err = store.List(nil, "2", false, 1) if len(result) != 1 || iter != "1" || err != nil { t.Fatal(result, iter, err) } result, iter, err = store.List(nil, "1", false, 1) if len(result) != 1 || iter != "0" || err != nil { t.Fatal(result, iter, err) } result, iter, err = store.List(nil, "0", false, 1) if len(result) != 0 || iter != "" || err != nil { t.Fatal(result, iter, err) } cond := []*notification.Condition{&notification.Condition{ Keyword: notification.ConditionBatch, Value: "12345", }} count, err = store.Count(cond) if count != 2 || err != nil { t.Fatal(count, err) } result, iter, err = store.List(cond, "", true, 0) if len(result) != 2 || iter != "" || err != nil { t.Fatal(result, iter, err) } cond = []*notification.Condition{&notification.Condition{ Keyword: notification.ConditionBatch, Value: "notfound", }} count, err = store.Count(cond) if count != 0 || err != nil { t.Fatal(count, err) } result, iter, err = store.List(cond, "", true, 0) if len(result) != 0 || iter != "" || err != nil { t.Fatal(result, iter, err) } cond = []*notification.Condition{&notification.Condition{ Keyword: "notfound", Value: "notfound", }} count, err = store.Count(cond) if !notification.IsErrConditionNotSupported(err) { t.Fatal(result, iter, err) } result, iter, err = store.List(cond, "", true, 0) if !notification.IsErrConditionNotSupported(err) { t.Fatal(result, iter, err) } } func TestNopStore(t *testing.T) { var err error d := &notification.NopStore{} err = d.Open() if err != nil { t.Fatal(err) } err = d.Close() if err != nil { t.Fatal(err) } _, err = d.Count(nil) if err != notification.ErrStoreFeatureNotSupported { t.Fatal(err) } _, _, err = d.List(nil, "", true, 0) if err != notification.ErrStoreFeatureNotSupported { t.Fatal(err) } err = d.Save(notification.New()) if err != notification.ErrStoreFeatureNotSupported { t.Fatal(err) } _, err = d.Remove("notexsit") if err != notification.ErrStoreFeatureNotSupported { t.Fatal(err) } _, err = d.SupportedConditions() if err != notification.ErrStoreFeatureNotSupported { t.Fatal(err) } }
// Copyright © 2016 Jason Gardner <buhrietoe@gmail.com> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // Brood is a backup and restore utility for ceph clusters package main import ( "log" "github.com/Buhrietoe/brood/server" "github.com/Buhrietoe/brood/server/config" "github.com/docopt/docopt-go" ) const version = "brood 0.1.0" const usage = ` Usage: brood server brood --config <config> server brood --help brood --version Options: --config <config> The brood config [default: /etc/brood/brood.toml]. --help Show this screen. --version Show version. ` // main entrypoint func main() { // Parse args args, argsErr := docopt.Parse(usage, nil, true, version, false) if argsErr != nil { log.Fatalln(argsErr) } log.SetPrefix("[brood] ") log.Println("starting brood") // Server command specified if args["server"].(bool) { // Load config configFile := args["--config"].(string) config := config.Load(configFile) // Build and run web server serv := server.BuildServer() log.Printf("listening on %v\n", config.Server.ListenString) if serverError := serv.Run(config.Server.ListenString); serverError != nil { log.Printf("server error: %v\n", serverError) } } }
package gormbatchputs_test import "github.com/theplant/gormbatchputs" import "github.com/jinzhu/gorm" /* #### A list of rows, Put to multiple databases */ func ExampleNew_1ToManyDBs() { db := openAndMigrate() bputs := gormbatchputs.New().Rows([]*Country{ { Code: "CHN", ShortName: "China", }, { Code: "US", ShortName: "America", }, }) dbs := []*gorm.DB{db, db, db} for _, d := range dbs { bputs.WithDB(d).Put() } } /* #### A database, Put many rows many times to different tables */ func ExampleNew_2OneDbPutManyTimes() { db := openAndMigrate() bputs := gormbatchputs.New().WithDB(db) err := bputs.Rows([]*Country{ { Code: "CHN", ShortName: "China", Population: 13e8, }, { Code: "US", ShortName: "America", Population: 5e8, }, }).Put() if err != nil { panic(err) } err = bputs.Rows([]*City{ { Code: "HZ", Name: "Hangzhou", Population: 8e6, }, { Code: "SH", Name: "Shanghai", Population: 1e8, }, }).Put() if err != nil { panic(err) } } /* #### A database, Put only the same columns to different tables */ func ExampleNew_2OnlyCertainColumnsToDifferentTables() { countries := []*Country{ { Code: "CHN", ShortName: "China", Population: 13e8, }, { Code: "US", ShortName: "America", Population: 5e8, }, } cities := []*City{ { Code: "HZ", Name: "Hangzhou", Population: 8e6, }, { Code: "SH", Name: "Shanghai", Population: 1e8, }, } db := openAndMigrate() bputs := gormbatchputs.New().WithDB(db).OnlyColumns("code", "population") err := bputs.Rows(countries).Put() if err != nil { panic(err) } err = bputs.Rows(cities).Put() if err != nil { panic(err) } }
package geo import ( "testing" "github.com/paulmach/orb" ) func TestLength(t *testing.T) { for _, g := range orb.AllGeometries { // should not panic with unsupported type Length(g) } } func TestLengthHaversine(t *testing.T) { for _, g := range orb.AllGeometries { // should not panic with unsupported type LengthHaversine(g) } }
/* Copyright 2019 Adobe All Rights Reserved. NOTICE: Adobe permits you to use, modify, and distribute this file in accordance with the terms of the Adobe license agreement accompanying it. If you have received this file from a source other than Adobe, then your use, modification, or distribution of it requires the prior written permission of Adobe. */ package maker import ( "net/url" ) // ResolveTemplateURL URL func ResolveTemplateURL(template string) string { u, err := url.Parse(template) if err != nil { return template } if u.Scheme == "" { u.Scheme = "https" } if u.Host == "" { u.Host = "github.com" } return u.String() }
package main import ( "fmt" "io" "log" "net/http" "os" "github.com/go-redis/redis" ) func main() { opt, err := redis.ParseURL(os.Getenv("REDIS_URL")) if err != nil { log.Fatal(err) } client := redis.NewClient(opt) err = client.Set("key", "value", 0).Err() if err != nil { log.Fatal(err) } val, err := client.Get("key").Result() if err != nil { log.Fatal(err) } fmt.Println("key", val) http.HandleFunc("/", handler) log.Fatal(http.ListenAndServe(":"+os.Getenv("PORT"), nil)) } func handler(w http.ResponseWriter, r *http.Request) { io.WriteString(w, "Hello Heroku!") }
package eedid var pnpLookup = map[string]PNPID{ "TTL": PNPID{ ID: "TTL", Company: "2-TEL B.V", Date: "03/20/1999", }, "BUT": PNPID{ ID: "BUT", Company: "21ST CENTURY ENTERTAINMENT", Date: "04/25/2002", }, "TCM": PNPID{ ID: "TCM", Company: "3COM CORPORATION", Date: "11/29/1996", }, "TDP": PNPID{ ID: "TDP", Company: "3D PERCEPTION", Date: "05/16/2002", }, "VSD": PNPID{ ID: "VSD", Company: "3M", Date: "10/16/1998", }, "NOD": PNPID{ ID: "NOD", Company: "3NOD DIGITAL TECHNOLOGY CO. LTD.", Date: "12/11/2014", }, "NGS": PNPID{ ID: "NGS", Company: "A D S EXPORTS", Date: "07/16/1998", }, "API": PNPID{ ID: "API", Company: "A PLUS INFO CORPORATION", Date: "11/29/1996", }, "ACG": PNPID{ ID: "ACG", Company: "A&R CAMBRIDGE LTD.", Date: "06/13/2007", }, "APV": PNPID{ ID: "APV", Company: "A+V LINK", Date: "01/27/2010", }, "AVX": PNPID{ ID: "AVX", Company: "A/VAUX ELECTRONICS", Date: "08/29/2012", }, "AAN": PNPID{ ID: "AAN", Company: "AAEON TECHNOLOGY INC.", Date: "09/01/2016", }, "TRU": PNPID{ ID: "TRU", Company: "AASHIMA TECHNOLOGY B.V.", Date: "05/08/1998", }, "AAM": PNPID{ ID: "AAM", Company: "AAVA MOBILE OY", Date: "08/13/2013", }, "GEH": PNPID{ ID: "GEH", Company: "ABACO SYSTEMS, INC.", Date: "09/03/2010", }, "ABS": PNPID{ ID: "ABS", Company: "ABACO SYSTEMS, INC.", Date: "04/27/2016", }, "ABA": PNPID{ ID: "ABA", Company: "ABBAHOME INC.", Date: "11/08/1999", }, "MEG": PNPID{ ID: "MEG", Company: "ABEAM TECH LTD.", Date: "11/29/1996", }, "ATC": PNPID{ ID: "ATC", Company: "ABLY-TECH CORPORATION", Date: "11/29/1996", }, "ABC": PNPID{ ID: "ABC", Company: "ABOCOM SYSTEM INC.", Date: "03/28/1997", }, "WTC": PNPID{ ID: "WTC", Company: "ACC MICROELECTRONICS", Date: "11/29/1996", }, "AWC": PNPID{ ID: "AWC", Company: "ACCESS WORKS COMM INC", Date: "11/29/1996", }, "PKA": PNPID{ ID: "PKA", Company: "ACCO UK LTD.", Date: "05/12/2003", }, "ACC": PNPID{ ID: "ACC", Company: "ACCTON TECHNOLOGY CORPORATION", Date: "11/29/1996", }, "ACU": PNPID{ ID: "ACU", Company: "ACCULOGIC", Date: "11/29/1996", }, "ASL": PNPID{ ID: "ASL", Company: "ACCUSCENE CORPORATION LTD", Date: "06/13/2007", }, "ANT": PNPID{ ID: "ANT", Company: "ACE CAD ENTERPRISE COMPANY LTD", Date: "11/29/1996", }, "CHE": PNPID{ ID: "CHE", Company: "ACER INC", Date: "11/29/1996", }, "ALI": PNPID{ ID: "ALI", Company: "ACER LABS", Date: "11/29/1996", }, "ANX": PNPID{ ID: "ANX", Company: "ACER NETXUS INC", Date: "11/29/1996", }, "ACR": PNPID{ ID: "ACR", Company: "ACER TECHNOLOGIES", Date: "11/29/1996", }, "ACK": PNPID{ ID: "ACK", Company: "ACKSYS", Date: "11/29/1996", }, "ADC": PNPID{ ID: "ADC", Company: "ACNHOR DATACOMM", Date: "11/29/1996", }, "CAL": PNPID{ ID: "CAL", Company: "ACON", Date: "11/29/1996", }, "ALK": PNPID{ ID: "ALK", Company: "ACROLINK INC", Date: "03/12/1997", }, "ACM": PNPID{ ID: "ACM", Company: "ACROLOOP MOTION CONTROL SYSTEMS INC", Date: "03/26/1998", }, "LAB": PNPID{ ID: "LAB", Company: "ACT LABS LTD", Date: "09/02/1997", }, "ACE": PNPID{ ID: "ACE", Company: "ACTEK ENGINEERING PTY LTD", Date: "11/29/1996", }, "AEI": PNPID{ ID: "AEI", Company: "ACTIONTEC ELECTRIC INC", Date: "11/29/1996", }, "ACV": PNPID{ ID: "ACV", Company: "ACTIVCARD S.A", Date: "05/08/1998", }, "ACB": PNPID{ ID: "ACB", Company: "ACULAB LTD", Date: "11/29/1996", }, "ALM": PNPID{ ID: "ALM", Company: "ACUTEC LTD.", Date: "11/08/1999", }, "GLE": PNPID{ ID: "GLE", Company: "AD ELECTRONICS", Date: "04/19/2000", }, "ADM": PNPID{ ID: "ADM", Company: "AD LIB MULTIMEDIA INC", Date: "04/23/1998", }, "ADP": PNPID{ ID: "ADP", Company: "ADAPTEC INC", Date: "11/29/1996", }, "ADX": PNPID{ ID: "ADX", Company: "ADAX INC", Date: "11/29/1996", }, "RSH": PNPID{ ID: "RSH", Company: "ADC-CENTRE", Date: "11/08/1999", }, "AVE": PNPID{ ID: "AVE", Company: "ADD VALUE ENTERPISES (ASIA) PTE LTD", Date: "01/10/1999", }, "ADZ": PNPID{ ID: "ADZ", Company: "ADDER TECHNOLOGY LTD", Date: "03/30/2016", }, "ADA": PNPID{ ID: "ADA", Company: "ADDI-DATA GMBH", Date: "11/29/1996", }, "ADI": PNPID{ ID: "ADI", Company: "ADI SYSTEMS INC", Date: "11/29/1996", }, "DPM": PNPID{ ID: "DPM", Company: "ADPM SYNTHESIS SAS", Date: "08/10/2000", }, "AXB": PNPID{ ID: "AXB", Company: "ADRIENNE ELECTRONICS CORPORATION", Date: "10/07/1997", }, "ADT": PNPID{ ID: "ADT", Company: "ADTEK", Date: "11/29/1996", }, "ADK": PNPID{ ID: "ADK", Company: "ADTEK SYSTEM SCIENCE COMPANY LTD", Date: "11/29/1996", }, "FLE": PNPID{ ID: "FLE", Company: "ADTI MEDIA, INC", Date: "09/15/2009", }, "AND": PNPID{ ID: "AND", Company: "ADTRAN INC", Date: "11/29/1996", }, "AGM": PNPID{ ID: "AGM", Company: "ADVAN INT'L CORPORATION", Date: "05/26/1998", }, "AVN": PNPID{ ID: "AVN", Company: "ADVANCE COMPUTER CORPORATION", Date: "06/10/2010", }, "MSM": PNPID{ ID: "MSM", Company: "ADVANCED DIGITAL SYSTEMS", Date: "11/29/1996", }, "AED": PNPID{ ID: "AED", Company: "ADVANCED ELECTRONIC DESIGNS, INC.", Date: "07/12/2004", }, "RJS": PNPID{ ID: "RJS", Company: "ADVANCED ENGINEERING", Date: "06/25/1998", }, "GRV": PNPID{ ID: "GRV", Company: "ADVANCED GRAVIS", Date: "11/29/1996", }, "AIR": PNPID{ ID: "AIR", Company: "ADVANCED INTEG. RESEARCH INC", Date: "11/29/1996", }, "ALR": PNPID{ ID: "ALR", Company: "ADVANCED LOGIC", Date: "11/29/1996", }, "ADV": PNPID{ ID: "ADV", Company: "ADVANCED MICRO DEVICES INC", Date: "11/29/1996", }, "EVE": PNPID{ ID: "EVE", Company: "ADVANCED MICRO PERIPHERALS LTD", Date: "11/18/2011", }, "AOE": PNPID{ ID: "AOE", Company: "ADVANCED OPTICS ELECTRONICS, INC.", Date: "04/20/2004", }, "ADD": PNPID{ ID: "ADD", Company: "ADVANCED PERIPHERAL DEVICES INC", Date: "11/29/1996", }, "ABV": PNPID{ ID: "ABV", Company: "ADVANCED RESEARCH TECHNOLOGY", Date: "01/16/1997", }, "PSA": PNPID{ ID: "PSA", Company: "ADVANCED SIGNAL PROCESSING TECHNOLOGIES", Date: "09/13/1999", }, "AHC": PNPID{ ID: "AHC", Company: "ADVANTECH CO., LTD.", Date: "06/13/2007", }, "ADH": PNPID{ ID: "ADH", Company: "AERODATA HOLDINGS LTD", Date: "11/11/1997", }, "AEP": PNPID{ ID: "AEP", Company: "AETAS PERIPHERAL INTERNATIONAL", Date: "11/08/1999", }, "AET": PNPID{ ID: "AET", Company: "AETHRA TELECOMUNICAZIONI S.R.L.", Date: "12/13/1996", }, "CHS": PNPID{ ID: "CHS", Company: "AGENTUR CHAIROS", Date: "03/15/2001", }, "AGT": PNPID{ ID: "AGT", Company: "AGILENT TECHNOLOGIES", Date: "10/08/2001", }, "ASI": PNPID{ ID: "ASI", Company: "AHEAD SYSTEMS", Date: "11/29/1996", }, "AIM": PNPID{ ID: "AIM", Company: "AIMS LAB INC", Date: "03/13/1998", }, "AYR": PNPID{ ID: "AYR", Company: "AIRLIB, INC", Date: "02/21/2000", }, "AWL": PNPID{ ID: "AWL", Company: "AIRONET WIRELESS COMMUNICATIONS, INC", Date: "08/11/1998", }, "AIW": PNPID{ ID: "AIW", Company: "AIWA COMPANY LTD", Date: "11/29/1996", }, "AJA": PNPID{ ID: "AJA", Company: "AJA VIDEO SYSTEMS, INC.", Date: "10/11/2007", }, "AKE": PNPID{ ID: "AKE", Company: "AKAMI ELECTRIC CO.,LTD", Date: "09/03/2010", }, "AKB": PNPID{ ID: "AKB", Company: "AKEBIA LTD", Date: "11/29/1996", }, "AKI": PNPID{ ID: "AKI", Company: "AKIA CORPORATION", Date: "12/23/1998", }, "ALH": PNPID{ ID: "ALH", Company: "AL SYSTEMS", Date: "01/20/1999", }, "ALA": PNPID{ ID: "ALA", Company: "ALACRON INC", Date: "11/29/1996", }, "ALN": PNPID{ ID: "ALN", Company: "ALANA TECHNOLOGIES", Date: "01/13/2000", }, "AOT": PNPID{ ID: "AOT", Company: "ALCATEL", Date: "11/06/2001", }, "ABE": PNPID{ ID: "ABE", Company: "ALCATEL BELL", Date: "11/29/1996", }, "ADB": PNPID{ ID: "ADB", Company: "ALDEBBARON", Date: "03/15/2001", }, "ALE": PNPID{ ID: "ALE", Company: "ALENCO BV", Date: "05/20/2014", }, "ALX": PNPID{ ID: "ALX", Company: "ALEXON CO.,LTD.", Date: "09/13/1999", }, "AFA": PNPID{ ID: "AFA", Company: "ALFA INC", Date: "11/29/1996", }, "ALO": PNPID{ ID: "ALO", Company: "ALGOLITH INC.", Date: "05/02/2005", }, "AGO": PNPID{ ID: "AGO", Company: "ALGOLTEK, INC.", Date: "10/23/2013", }, "AIS": PNPID{ ID: "AIS", Company: "ALIEN INTERNET SERVICES", Date: "06/21/2001", }, "ABD": PNPID{ ID: "ABD", Company: "ALLEN BRADLEY COMPANY", Date: "11/29/1996", }, "ALL": PNPID{ ID: "ALL", Company: "ALLIANCE SEMICONDUCTOR CORPORATION", Date: "11/29/1996", }, "ATI": PNPID{ ID: "ATI", Company: "ALLIED TELESIS KK", Date: "11/29/1996", }, "ATK": PNPID{ ID: "ATK", Company: "ALLIED TELESYN INT'L", Date: "11/29/1996", }, "ATA": PNPID{ ID: "ATA", Company: "ALLIED TELESYN INTERNATIONAL (ASIA) PTE LTD", Date: "11/10/1997", }, "ACO": PNPID{ ID: "ACO", Company: "ALLION COMPUTER INC.", Date: "10/23/2000", }, "XAD": PNPID{ ID: "XAD", Company: "ALPHA DATA", Date: "10/08/2009", }, "AEJ": PNPID{ ID: "AEJ", Company: "ALPHA ELECTRONICS COMPANY", Date: "11/29/1996", }, "ATD": PNPID{ ID: "ATD", Company: "ALPHA TELECOM INC", Date: "09/26/1997", }, "ATP": PNPID{ ID: "ATP", Company: "ALPHA-TOP CORPORATION", Date: "12/04/1996", }, "ALV": PNPID{ ID: "ALV", Company: "ALPHAVIEW LCD", Date: "11/01/2008", }, "APE": PNPID{ ID: "APE", Company: "ALPINE ELECTRONICS, INC.", Date: "01/22/2013", }, "ALP": PNPID{ ID: "ALP", Company: "ALPS ELECTRIC COMPANY LTD", Date: "11/29/1996", }, "AUI": PNPID{ ID: "AUI", Company: "ALPS ELECTRIC INC", Date: "11/29/1996", }, "ARC": PNPID{ ID: "ARC", Company: "ALTA RESEARCH CORPORATION", Date: "11/29/1996", }, "ALC": PNPID{ ID: "ALC", Company: "ALTEC CORPORATION", Date: "08/04/1998", }, "ALJ": PNPID{ ID: "ALJ", Company: "ALTEC LANSING", Date: "01/13/2000", }, "AIX": PNPID{ ID: "AIX", Company: "ALTINEX, INC.", Date: "04/24/2001", }, "AIE": PNPID{ ID: "AIE", Company: "ALTMANN INDUSTRIEELEKTRONIK", Date: "11/29/1996", }, "ACS": PNPID{ ID: "ACS", Company: "ALTOS COMPUTER SYSTEMS", Date: "11/29/1996", }, "AIL": PNPID{ ID: "AIL", Company: "ALTOS INDIA LTD", Date: "11/29/1996", }, "ALT": PNPID{ ID: "ALT", Company: "ALTRA", Date: "11/29/1996", }, "CNC": PNPID{ ID: "CNC", Company: "ALVEDON COMPUTERS LTD", Date: "11/06/1998", }, "AMB": PNPID{ ID: "AMB", Company: "AMBIENT TECHNOLOGIES, INC.", Date: "05/16/1999", }, "AMD": PNPID{ ID: "AMD", Company: "AMDEK CORPORATION", Date: "11/29/1996", }, "AOL": PNPID{ ID: "AOL", Company: "AMERICA ONLINE", Date: "11/29/1996", }, "YOW": PNPID{ ID: "YOW", Company: "AMERICAN BIOMETRIC COMPANY", Date: "05/16/1999", }, "AXP": PNPID{ ID: "AXP", Company: "AMERICAN EXPRESS", Date: "07/16/1999", }, "AXI": PNPID{ ID: "AXI", Company: "AMERICAN MAGNETICS", Date: "03/15/2001", }, "AMI": PNPID{ ID: "AMI", Company: "AMERICAN MEGATRENDS INC", Date: "11/29/1996", }, "MCA": PNPID{ ID: "MCA", Company: "AMERICAN NUCLEAR SYSTEMS INC", Date: "02/12/1997", }, "CNB": PNPID{ ID: "CNB", Company: "AMERICAN POWER CONVERSION", Date: "03/15/2001", }, "APC": PNPID{ ID: "APC", Company: "AMERICAN POWER CONVERSION", Date: "11/29/1996", }, "AMN": PNPID{ ID: "AMN", Company: "AMIMON LTD.", Date: "06/13/2007", }, "AMO": PNPID{ ID: "AMO", Company: "AMINO TECHNOLOGIES PLC AND AMINO COMMUNICATIONS LIMITED", Date: "12/09/2011", }, "AKL": PNPID{ ID: "AKL", Company: "AMIT LTD", Date: "12/02/1997", }, "AMP": PNPID{ ID: "AMP", Company: "AMP INC", Date: "11/29/1996", }, "AII": PNPID{ ID: "AII", Company: "AMPTRON INTERNATIONAL INC.", Date: "05/24/2000", }, "AMT": PNPID{ ID: "AMT", Company: "AMT INTERNATIONAL INDUSTRY", Date: "11/29/1996", }, "AMR": PNPID{ ID: "AMR", Company: "AMTRAN TECHNOLOGY CO., LTD.", Date: "06/10/2013", }, "AMX": PNPID{ ID: "AMX", Company: "AMX LLC", Date: "07/06/2008", }, "BBB": PNPID{ ID: "BBB", Company: "AN-NAJAH UNIVERSITY", Date: "03/15/2001", }, "ANA": PNPID{ ID: "ANA", Company: "ANAKRON", Date: "11/08/1999", }, "ADN": PNPID{ ID: "ADN", Company: "ANALOG & DIGITAL DEVICES TEL. INC", Date: "03/14/1997", }, "ADS": PNPID{ ID: "ADS", Company: "ANALOG DEVICES INC", Date: "11/29/1996", }, "ANW": PNPID{ ID: "ANW", Company: "ANALOG WAY SAS", Date: "01/22/2014", }, "ANL": PNPID{ ID: "ANL", Company: "ANALOGIX SEMICONDUCTOR, INC", Date: "10/10/2005", }, "AAE": PNPID{ ID: "AAE", Company: "ANATEK ELECTRONICS INC.", Date: "05/25/2004", }, "ABT": PNPID{ ID: "ABT", Company: "ANCHOR BAY TECHNOLOGIES, INC.", Date: "02/14/2006", }, "ACI": PNPID{ ID: "ACI", Company: "ANCOR COMMUNICATIONS INC", Date: "11/29/1996", }, "ANC": PNPID{ ID: "ANC", Company: "ANCOT", Date: "11/29/1996", }, "AML": PNPID{ ID: "AML", Company: "ANDERSON MULTIMEDIA COMMUNICATIONS (HK) LIMITED", Date: "01/03/2003", }, "ANP": PNPID{ ID: "ANP", Company: "ANDREW NETWORK PRODUCTION", Date: "11/29/1996", }, "ANI": PNPID{ ID: "ANI", Company: "ANIGMA INC", Date: "11/29/1996", }, "ANK": PNPID{ ID: "ANK", Company: "ANKO ELECTRONIC COMPANY LTD", Date: "03/24/1998", }, "AAT": PNPID{ ID: "AAT", Company: "ANN ARBOR TECHNOLOGIES", Date: "04/24/2001", }, "ANO": PNPID{ ID: "ANO", Company: "ANORAD CORPORATION", Date: "01/13/2000", }, "ANR": PNPID{ ID: "ANR", Company: "ANR LTD", Date: "11/29/1996", }, "ANS": PNPID{ ID: "ANS", Company: "ANSEL COMMUNICATION COMPANY", Date: "11/29/1996", }, "AEC": PNPID{ ID: "AEC", Company: "ANTEX ELECTRONICS CORPORATION", Date: "11/29/1996", }, "AOA": PNPID{ ID: "AOA", Company: "AOPEN INC.", Date: "11/06/2001", }, "APX": PNPID{ ID: "APX", Company: "AP DESIGNS LTD", Date: "12/08/1997", }, "DNG": PNPID{ ID: "DNG", Company: "APACHE MICRO PERIPHERALS INC", Date: "11/11/1997", }, "APL": PNPID{ ID: "APL", Company: "APLICOM OY", Date: "05/02/2005", }, "APN": PNPID{ ID: "APN", Company: "APPIAN TECH INC", Date: "11/29/1996", }, "APP": PNPID{ ID: "APP", Company: "APPLE COMPUTER INC", Date: "11/29/1996", }, "APD": PNPID{ ID: "APD", Company: "APPLIADATA", Date: "11/29/1996", }, "ACT": PNPID{ ID: "ACT", Company: "APPLIED CREATIVE TECHNOLOGY", Date: "11/29/1996", }, "APM": PNPID{ ID: "APM", Company: "APPLIED MEMORY TECH", Date: "11/29/1996", }, "ACL": PNPID{ ID: "ACL", Company: "APRICOT COMPUTERS", Date: "11/29/1996", }, "APR": PNPID{ ID: "APR", Company: "APRILIA S.P.A.", Date: "02/22/1999", }, "ATJ": PNPID{ ID: "ATJ", Company: "ARCHITEK CORPORATION", Date: "01/22/2014", }, "ACH": PNPID{ ID: "ACH", Company: "ARCHTEK TELECOM CORPORATION", Date: "01/15/1997", }, "ATL": PNPID{ ID: "ATL", Company: "ARCUS TECHNOLOGY LTD", Date: "11/29/1996", }, "ARD": PNPID{ ID: "ARD", Company: "AREC INC.", Date: "07/08/2013", }, "ARS": PNPID{ ID: "ARS", Company: "ARESCOM INC", Date: "11/29/1996", }, "AGL": PNPID{ ID: "AGL", Company: "ARGOLIS", Date: "03/15/2001", }, "ARI": PNPID{ ID: "ARI", Company: "ARGOSY RESEARCH INC", Date: "02/24/1997", }, "ARG": PNPID{ ID: "ARG", Company: "ARGUS ELECTRONICS CO., LTD", Date: "06/04/2004", }, "ACA": PNPID{ ID: "ACA", Company: "ARIEL CORPORATION", Date: "12/13/1996", }, "ARM": PNPID{ ID: "ARM", Company: "ARIMA", Date: "04/07/2004", }, "ADE": PNPID{ ID: "ADE", Company: "ARITHMOS, INC.", Date: "07/16/1999", }, "ARK": PNPID{ ID: "ARK", Company: "ARK LOGIC INC", Date: "11/29/1996", }, "ARL": PNPID{ ID: "ARL", Company: "ARLOTTO COMNET INC", Date: "04/29/1997", }, "AMS ": PNPID{ ID: "AMS ", Company: "ARMSTEL, INC.", Date: "02/25/2011", }, "AIC": PNPID{ ID: "AIC", Company: "ARNOS INSTURMENTS & COMPUTER SYSTEMS", Date: "11/29/1996", }, "ARR": PNPID{ ID: "ARR", Company: "ARRIS GROUP, INC.", Date: "01/27/2015", }, "IMB": PNPID{ ID: "IMB", Company: "ART S.R.L.", Date: "01/27/2012", }, "AGI": PNPID{ ID: "AGI", Company: "ARTISH GRAPHICS INC", Date: "11/29/1996", }, "NPA": PNPID{ ID: "NPA", Company: "ARVANICS", Date: "03/05/2015", }, "AKM": PNPID{ ID: "AKM", Company: "ASAHI KASEI MICROSYSTEMS COMPANY LTD", Date: "11/29/1996", }, "ASN": PNPID{ ID: "ASN", Company: "ASANTE TECH INC", Date: "11/29/1996", }, "HER": PNPID{ ID: "HER", Company: "ASCOM BUSINESS SYSTEMS", Date: "01/20/1999", }, "ASC": PNPID{ ID: "ASC", Company: "ASCOM STRATEGIC TECHNOLOGY UNIT", Date: "11/29/1996", }, "ASM": PNPID{ ID: "ASM", Company: "ASEM S.P.A.", Date: "03/15/2001", }, "AEM": PNPID{ ID: "AEM", Company: "ASEM S.P.A.", Date: "11/29/1996", }, "ASE": PNPID{ ID: "ASE", Company: "ASEV DISPLAY LABS", Date: "10/16/1998", }, "ASH": PNPID{ ID: "ASH", Company: "ASHTON BENTLEY CONCEPTS", Date: "09/20/2013", }, "AMA": PNPID{ ID: "AMA", Company: "ASIA MICROELECTRONIC DEVELOPMENT INC", Date: "09/24/1997", }, "ASK": PNPID{ ID: "ASK", Company: "ASK A/S", Date: "11/29/1996", }, "DYN": PNPID{ ID: "DYN", Company: "ASKEY COMPUTER CORPORATION", Date: "07/22/1997", }, "AKY": PNPID{ ID: "AKY", Company: "ASKEY COMPUTER CORPORATION", Date: "04/02/1997", }, "ASP": PNPID{ ID: "ASP", Company: "ASP MICROELECTRONICS LTD", Date: "11/29/1996", }, "ACP": PNPID{ ID: "ACP", Company: "ASPEN TECH INC", Date: "11/29/1996", }, "AST": PNPID{ ID: "AST", Company: "AST RESEARCH INC", Date: "11/29/1996", }, "JAC": PNPID{ ID: "JAC", Company: "ASTEC INC", Date: "11/29/1996", }, "ADL": PNPID{ ID: "ADL", Company: "ASTRA SECURITY PRODUCTS LTD", Date: "07/30/1997", }, "ATO": PNPID{ ID: "ATO", Company: "ASTRO DESIGN, INC.", Date: "06/06/2003", }, "AHQ": PNPID{ ID: "AHQ", Company: "ASTRO HQ LLC", Date: "09/05/2018", }, "ASU": PNPID{ ID: "ASU", Company: "ASUSCOM NETWORK INC", Date: "11/29/1996", }, "AUS": PNPID{ ID: "AUS", Company: "ASUSTEK COMPUTER INC", Date: "12/21/2015", }, "ATT": PNPID{ ID: "ATT", Company: "AT&T", Date: "11/29/1996", }, "GIS": PNPID{ ID: "GIS", Company: "AT&T GLOBAL INFO SOLUTIONS", Date: "11/29/1996", }, "HSM": PNPID{ ID: "HSM", Company: "AT&T MICROELECTRONICS", Date: "11/29/1996", }, "TME": PNPID{ ID: "TME", Company: "AT&T MICROELECTRONICS", Date: "11/29/1996", }, "PDN": PNPID{ ID: "PDN", Company: "AT&T PARADYNE", Date: "11/29/1996", }, "AVJ": PNPID{ ID: "AVJ", Company: "ATELIER VISION CORPORATION", Date: "02/24/2015", }, "ATH": PNPID{ ID: "ATH", Company: "ATHENA INFORMATICA S.R.L.", Date: "01/29/1997", }, "ATN": PNPID{ ID: "ATN", Company: "ATHENA SMARTCARD SOLUTIONS LTD.", Date: "09/13/1999", }, "ATX": PNPID{ ID: "ATX", Company: "ATHENIX CORPORATION", Date: "11/29/1996", }, "BUJ": PNPID{ ID: "BUJ", Company: "ATI TECH INC", Date: "11/29/1996", }, "CFG": PNPID{ ID: "CFG", Company: "ATLANTIS", Date: "11/29/1996", }, "ATM": PNPID{ ID: "ATM", Company: "ATM LTD", Date: "11/29/1996", }, "AKP": PNPID{ ID: "AKP", Company: "ATOM KOMPLEX PRYLAD", Date: "10/23/2000", }, "AMC": PNPID{ ID: "AMC", Company: "ATTACHMATE CORPORATION", Date: "11/29/1996", }, "FWA": PNPID{ ID: "FWA", Company: "ATTERO TECH, LLC", Date: "04/20/2010", }, "APT": PNPID{ ID: "APT", Company: "AUDIO PROCESSING TECHNOLOGY LTD", Date: "03/18/1997", }, "ASX": PNPID{ ID: "ASX", Company: "AUDIOSCIENCE", Date: "11/29/1996", }, "AUG": PNPID{ ID: "AUG", Company: "AUGUST HOME, INC.", Date: "06/11/2014", }, "AVC": PNPID{ ID: "AVC", Company: "AURAVISION CORPORATION", Date: "11/29/1996", }, "AUR": PNPID{ ID: "AUR", Company: "AUREAL SEMICONDUCTOR", Date: "11/29/1996", }, "APS": PNPID{ ID: "APS", Company: "AUTOLOGIC INC", Date: "11/29/1996", }, "CLT": PNPID{ ID: "CLT", Company: "AUTOMATED COMPUTER CONTROL SYSTEMS", Date: "09/13/1999", }, "AUT": PNPID{ ID: "AUT", Company: "AUTOTIME CORPORATION", Date: "10/08/2001", }, "AUV": PNPID{ ID: "AUV", Company: "AUVIDEA GMBH", Date: "04/21/2014", }, "AVL": PNPID{ ID: "AVL", Company: "AVALUE TECHNOLOGY INC.", Date: "11/18/2011", }, "ALS": PNPID{ ID: "ALS", Company: "AVANCE LOGIC INC", Date: "11/29/1996", }, "AVS": PNPID{ ID: "AVS", Company: "AVATRON SOFTWARE INC.", Date: "08/23/2017", }, "AVA": PNPID{ ID: "AVA", Company: "AVAYA COMMUNICATION", Date: "03/15/2001", }, "AVG": PNPID{ ID: "AVG", Company: "AVEGANT CORPORATION", Date: "12/02/2015", }, "AEN": PNPID{ ID: "AEN", Company: "AVENCALL", Date: "01/27/2012", }, "AVR": PNPID{ ID: "AVR", Company: "AVER INFORMATION INC.", Date: "05/07/2010", }, "AVD": PNPID{ ID: "AVD", Company: "AVID ELECTRONICS CORPORATION", Date: "11/29/1996", }, "AVM": PNPID{ ID: "AVM", Company: "AVM GMBH", Date: "11/29/1996", }, "AVO": PNPID{ ID: "AVO", Company: "AVOCENT CORPORATION", Date: "10/23/2000", }, "AAA": PNPID{ ID: "AAA", Company: "AVOLITES LTD", Date: "02/17/2012", }, "AVT": PNPID{ ID: "AVT", Company: "AVTEK (ELECTRONICS) PTY LTD", Date: "11/29/1996", }, "ACD": PNPID{ ID: "ACD", Company: "AWETA BV", Date: "01/20/1998", }, "AXL": PNPID{ ID: "AXL", Company: "AXEL", Date: "11/29/1996", }, "AXE": PNPID{ ID: "AXE", Company: "AXELL CORPORATION", Date: "08/03/2016", }, "AXC": PNPID{ ID: "AXC", Company: "AXIOMTEK CO., LTD.", Date: "05/02/2005", }, "AXO": PNPID{ ID: "AXO", Company: "AXONIC LABS LLC", Date: "06/21/2012", }, "AXT": PNPID{ ID: "AXT", Company: "AXTEND TECHNOLOGIES INC", Date: "12/01/1997", }, "AXX": PNPID{ ID: "AXX", Company: "AXXON COMPUTER CORPORATION", Date: "11/29/1996", }, "AXY": PNPID{ ID: "AXY", Company: "AXYZ AUTOMATION SERVICES, INC", Date: "08/11/1998", }, "AYD": PNPID{ ID: "AYD", Company: "AYDIN DISPLAYS", Date: "06/13/2007", }, "AZM": PNPID{ ID: "AZM", Company: "AZ MIDDELHEIM - RADIOTHERAPY", Date: "11/14/2003", }, "AZT": PNPID{ ID: "AZT", Company: "AZTECH SYSTEMS LTD", Date: "11/29/1996", }, "BBH": PNPID{ ID: "BBH", Company: "B&BH", Date: "01/17/2003", }, "SMR": PNPID{ ID: "SMR", Company: "B.& V. S.R.L.", Date: "03/21/1997", }, "BFE": PNPID{ ID: "BFE", Company: "B.F. ENGINEERING CORPORATION", Date: "11/29/1996", }, "BUG": PNPID{ ID: "BUG", Company: "B.U.G., INC.", Date: "08/30/2011", }, "BNO": PNPID{ ID: "BNO", Company: "BANG & OLUFSEN", Date: "05/16/2003", }, "BNK": PNPID{ ID: "BNK", Company: "BANKSIA TECH PTY LTD", Date: "11/29/1996", }, "BAN": PNPID{ ID: "BAN", Company: "BANYAN", Date: "11/29/1996", }, "BRC": PNPID{ ID: "BRC", Company: "BARC", Date: "08/10/2000", }, "BDS": PNPID{ ID: "BDS", Company: "BARCO DISPLAY SYSTEMS", Date: "09/13/1999", }, "BCD": PNPID{ ID: "BCD", Company: "BARCO GMBH", Date: "03/07/2011", }, "BGB": PNPID{ ID: "BGB", Company: "BARCO GRAPHICS N.V", Date: "11/29/1996", }, "BPS": PNPID{ ID: "BPS", Company: "BARCO, N.V.", Date: "09/12/2000", }, "DDS": PNPID{ ID: "DDS", Company: "BARCO, N.V.", Date: "10/23/2000", }, "BEO": PNPID{ ID: "BEO", Company: "BAUG & OLUFSEN", Date: "11/29/1996", }, "BCC": PNPID{ ID: "BCC", Company: "BEAVER COMPUTER CORPORATON", Date: "11/29/1996", }, "BEC": PNPID{ ID: "BEC", Company: "BECKHOFF AUTOMATION", Date: "04/25/2002", }, "BEI": PNPID{ ID: "BEI", Company: "BECKWORTH ENTERPRISES INC", Date: "07/16/1997", }, "LHC": PNPID{ ID: "LHC", Company: "BEIHAI CENTURY JOINT INNOVATION TECHNOLOGY CO.,LTD", Date: "09/10/2019", }, "AGC": PNPID{ ID: "AGC", Company: "BEIJING AEROSPACE GOLDEN CARD ELECTRONIC ENGINEERING CO.,LTD.", Date: "06/21/2001", }, "AHS": PNPID{ ID: "AHS", Company: "BEIJING ANHENG SECOTECH INFORMATION TECHNOLOGY CO., LTD.", Date: "03/24/2015", }, "ANV": PNPID{ ID: "ANV", Company: "BEIJING ANTVR TECHNOLOGY CO., LTD.", Date: "08/24/2015", }, "NRT": PNPID{ ID: "NRT", Company: "BEIJING NORTHERN RADIANTELECOM CO.", Date: "03/20/1999", }, "BEK": PNPID{ ID: "BEK", Company: "BEKO ELEKTRONIK A.S.", Date: "06/15/2005", }, "BEL": PNPID{ ID: "BEL", Company: "BELTRONIC INDUSTRIEELEKTRONIK GMBH", Date: "09/05/2006", }, "BMI": PNPID{ ID: "BMI", Company: "BENSON MEDICAL INSTRUMENTS COMPANY", Date: "12/04/1996", }, "BUR": PNPID{ ID: "BUR", Company: "BERNECKER & RAINER IND-ELETRONIK GMBH", Date: "11/29/1996", }, "INZ": PNPID{ ID: "INZ", Company: "BEST BUY", Date: "06/04/2004", }, "VPR": PNPID{ ID: "VPR", Company: "BEST BUY", Date: "05/16/2002", }, "BPU": PNPID{ ID: "BPU", Company: "BEST POWER", Date: "11/29/1996", }, "BIA": PNPID{ ID: "BIA", Company: "BIAMP SYSTEMS CORPORATION", Date: "05/14/2015", }, "ICC": PNPID{ ID: "ICC", Company: "BICC DATA NETWORKS LTD", Date: "11/29/1996", }, "BIC": PNPID{ ID: "BIC", Company: "BIG ISLAND COMMUNICATIONS", Date: "05/13/1997", }, "BLD": PNPID{ ID: "BLD", Company: "BILD INNOVATIVE TECHNOLOGY LLC", Date: "10/22/2019", }, "BIL": PNPID{ ID: "BIL", Company: "BILLION ELECTRIC COMPANY LTD", Date: "12/11/1996", }, "BLN": PNPID{ ID: "BLN", Company: "BIOLINK TECHNOLOGIES", Date: "08/10/2000", }, "BIO": PNPID{ ID: "BIO", Company: "BIOLINK TECHNOLOGIES INTERNATIONAL, INC.", Date: "05/24/2000", }, "BML": PNPID{ ID: "BML", Company: "BIOMED LAB", Date: "05/22/1997", }, "BSL": PNPID{ ID: "BSL", Company: "BIOMEDICAL SYSTEMS LABORATORY", Date: "10/16/1997", }, "BMS": PNPID{ ID: "BMS", Company: "BIOMEDISYS", Date: "05/24/2000", }, "BAC": PNPID{ ID: "BAC", Company: "BIOMETRIC ACCESS CORPORATION", Date: "05/19/1998", }, "BTO": PNPID{ ID: "BTO", Company: "BIOTAO LTD", Date: "03/21/2012", }, "BIT": PNPID{ ID: "BIT", Company: "BIT 3 COMPUTER", Date: "11/29/1996", }, "BTC": PNPID{ ID: "BTC", Company: "BIT 3 COMPUTER", Date: "11/29/1996", }, "BTF": PNPID{ ID: "BTF", Company: "BITFIELD OY", Date: "11/29/1996", }, "BHZ": PNPID{ ID: "BHZ", Company: "BITHEADZ, INC.", Date: "09/29/2003", }, "BWK": PNPID{ ID: "BWK", Company: "BITWORKS INC.", Date: "07/10/2003", }, "BBX": PNPID{ ID: "BBX", Company: "BLACK BOX CORPORATION", Date: "02/28/2017", }, "BMD": PNPID{ ID: "BMD", Company: "BLACKMAGIC DESIGN", Date: "09/13/2012", }, "BDR": PNPID{ ID: "BDR", Company: "BLONDER TONGUE LABS, INC.", Date: "09/16/2008", }, "BLP": PNPID{ ID: "BLP", Company: "BLOOMBERG L.P.", Date: "09/16/2008", }, "BBV": PNPID{ ID: "BBV", Company: "BLUEBOX VIDEO LIMITED", Date: "06/22/2017", }, "ZZZ": PNPID{ ID: "ZZZ", Company: "BOCA RESEARCH INC", Date: "02/13/1997", }, "BRI": PNPID{ ID: "BRI", Company: "BOCA RESEARCH INC", Date: "11/29/1996", }, "BST": PNPID{ ID: "BST", Company: "BODYSOUND TECHNOLOGIES, INC.", Date: "03/12/2008", }, "BOE": PNPID{ ID: "BOE", Company: "BOE", Date: "12/02/2004", }, "BII": PNPID{ ID: "BII", Company: "BOECKELER INSTRUMENTS INC", Date: "10/17/1996", }, "BCS": PNPID{ ID: "BCS", Company: "BOORIA CAD/CAM SYSTEMS", Date: "05/11/2005", }, "BOS": PNPID{ ID: "BOS", Company: "BOS", Date: "07/03/1997", }, "BSE": PNPID{ ID: "BSE", Company: "BOSE CORPORATION", Date: "09/05/2006", }, "BNS": PNPID{ ID: "BNS", Company: "BOULDER NONLINEAR SYSTEMS", Date: "03/12/2008", }, "BRA": PNPID{ ID: "BRA", Company: "BRAEMAC PTY LTD", Date: "11/18/2010", }, "BRM": PNPID{ ID: "BRM", Company: "BRAEMAR INC", Date: "10/07/1997", }, "BDO": PNPID{ ID: "BDO", Company: "BRAHLER ICS", Date: "06/04/1998", }, "BBL": PNPID{ ID: "BBL", Company: "BRAIN BOXES LIMITED", Date: "10/02/2001", }, "BRG": PNPID{ ID: "BRG", Company: "BRIDGE INFORMATION CO., LTD", Date: "08/11/1998", }, "BSN": PNPID{ ID: "BSN", Company: "BRIGHTSIGN, LLC", Date: "02/28/2012", }, "BTE": PNPID{ ID: "BTE", Company: "BRILLIANT TECHNOLOGY", Date: "11/29/1996", }, "BCI": PNPID{ ID: "BCI", Company: "BROADATA COMMUNICATIONS INC.", Date: "11/19/2013", }, "BCM": PNPID{ ID: "BCM", Company: "BROADCOM", Date: "04/01/2004", }, "BRO": PNPID{ ID: "BRO", Company: "BROTHER INDUSTRIES,LTD.", Date: "02/21/2000", }, "NFC": PNPID{ ID: "NFC", Company: "BTC KOREA CO., LTD", Date: "02/25/2002", }, "BGT": PNPID{ ID: "BGT", Company: "BUDZETRON INC", Date: "11/29/1996", }, "BUL": PNPID{ ID: "BUL", Company: "BULL", Date: "02/03/1998", }, "BNE": PNPID{ ID: "BNE", Company: "BULL AB", Date: "10/06/1998", }, "BLI": PNPID{ ID: "BLI", Company: "BUSICOM", Date: "08/11/1998", }, "BTI": PNPID{ ID: "BTI", Company: "BUSTECH INC", Date: "11/29/1996", }, "BUS": PNPID{ ID: "BUS", Company: "BUSTEK", Date: "11/29/1996", }, "FLY": PNPID{ ID: "FLY", Company: "BUTTERFLY COMMUNICATIONS", Date: "05/05/1997", }, "BXE": PNPID{ ID: "BXE", Company: "BUXCO ELECTRONICS", Date: "11/29/1996", }, "BYD": PNPID{ ID: "BYD", Company: "BYD:SIGN CORPORATION", Date: "04/10/2008", }, "FVX": PNPID{ ID: "FVX", Company: "C-C-C GROUP PLC", Date: "05/04/1998", }, "CCC": PNPID{ ID: "CCC", Company: "C-CUBE MICROSYSTEMS", Date: "11/29/1996", }, "CEP": PNPID{ ID: "CEP", Company: "C-DAC", Date: "11/29/1996", }, "CMI": PNPID{ ID: "CMI", Company: "C-MEDIA ELECTRONICS", Date: "11/29/1996", }, "XMM": PNPID{ ID: "XMM", Company: "C3PO S.L.", Date: "03/03/1998", }, "CAC": PNPID{ ID: "CAC", Company: "CA & F ELETTRONICA", Date: "05/16/1999", }, "CBT": PNPID{ ID: "CBT", Company: "CABLETIME LTD", Date: "05/04/2010", }, "CSI": PNPID{ ID: "CSI", Company: "CABLETRON SYSTEM INC", Date: "11/29/1996", }, "CCI": PNPID{ ID: "CCI", Company: "CACHE", Date: "11/29/1996", }, "CAG": PNPID{ ID: "CAG", Company: "CALCOMP", Date: "11/29/1996", }, "CDP": PNPID{ ID: "CDP", Company: "CALCOMP", Date: "11/29/1996", }, "CUK": PNPID{ ID: "CUK", Company: "CALIBRE UK LTD", Date: "09/15/2005", }, "CSO": PNPID{ ID: "CSO", Company: "CALIFORNIA INSTITUTE OF TECHNOLOGY", Date: "03/20/1999", }, "CAM": PNPID{ ID: "CAM", Company: "CAMBRIDGE AUDIO", Date: "08/09/2008", }, "CED": PNPID{ ID: "CED", Company: "CAMBRIDGE ELECTRONIC DESIGN LTD", Date: "11/29/1996", }, "CMR": PNPID{ ID: "CMR", Company: "CAMBRIDGE RESEARCH SYSTEMS LTD", Date: "04/25/2002", }, "CRW": PNPID{ ID: "CRW", Company: "CAMMEGH LIMITED", Date: "06/18/2019", }, "CNN": PNPID{ ID: "CNN", Company: "CANON INC", Date: "11/29/1996", }, "CAI": PNPID{ ID: "CAI", Company: "CANON INC.", Date: "11/06/2001", }, "UBU": PNPID{ ID: "UBU", Company: "CANONICAL LTD.", Date: "05/24/2013", }, "CAN": PNPID{ ID: "CAN", Company: "CANOPUS COMPANY LTD", Date: "11/29/1996", }, "CPM": PNPID{ ID: "CPM", Company: "CAPELLA MICROSYSTEMS INC.", Date: "05/09/2012", }, "CCP": PNPID{ ID: "CCP", Company: "CAPETRONIC USA INC", Date: "11/29/1996", }, "DJE": PNPID{ ID: "DJE", Company: "CAPSTONE VISUA LPRODUCT DEVELOPMENT", Date: "10/09/2008", }, "CAR": PNPID{ ID: "CAR", Company: "CARDINAL COMPANY LTD", Date: "11/29/1996", }, "CRD": PNPID{ ID: "CRD", Company: "CARDINAL TECHNICAL INC", Date: "11/29/1996", }, "CLX": PNPID{ ID: "CLX", Company: "CARDLOGIX", Date: "03/15/2001", }, "CKJ": PNPID{ ID: "CKJ", Company: "CARINA SYSTEM CO., LTD.", Date: "09/03/2010", }, "CZE": PNPID{ ID: "CZE", Company: "CARL ZEISS AG", Date: "06/03/2009", }, "JAZ": PNPID{ ID: "JAZ", Company: "CARRERA COMPUTER INC", Date: "01/01/1994", }, "CAS": PNPID{ ID: "CAS", Company: "CASIO COMPUTER CO.,LTD", Date: "10/06/1998", }, "CAA": PNPID{ ID: "CAA", Company: "CASTLES AUTOMATION CO., LTD", Date: "01/13/2000", }, "CAV": PNPID{ ID: "CAV", Company: "CAVIUM NETWORKS, INC", Date: "02/02/2011", }, "CCL": PNPID{ ID: "CCL", Company: "CCL/ITRI", Date: "03/31/1997", }, "CBR": PNPID{ ID: "CBR", Company: "CEBRA TECH A/S", Date: "11/29/1996", }, "CEF": PNPID{ ID: "CEF", Company: "CEFAR DIGITAL VISION", Date: "02/19/1997", }, "CEN": PNPID{ ID: "CEN", Company: "CENTURION TECHNOLOGIES P/L", Date: "10/23/2000", }, "TCE": PNPID{ ID: "TCE", Company: "CENTURY CORPORATION", Date: "11/29/1996", }, "CRV": PNPID{ ID: "CRV", Company: "CEREVO INC.", Date: "07/13/2010", }, "CER": PNPID{ ID: "CER", Company: "CERONIX", Date: "09/02/2008", }, "TOM": PNPID{ ID: "TOM", Company: "CETON CORPORATION", Date: "05/08/2014", }, "CHP": PNPID{ ID: "CHP", Company: "CH PRODUCTS", Date: "04/24/1997", }, "CHD": PNPID{ ID: "CHD", Company: "CHANGHONG ELECTRIC CO.,LTD", Date: "11/30/2001", }, "FIR": PNPID{ ID: "FIR", Company: "CHAPLET SYSTEMS INC", Date: "11/29/1996", }, "CHA": PNPID{ ID: "CHA", Company: "CHASE RESEARCH PLC", Date: "11/29/1996", }, "CMG": PNPID{ ID: "CMG", Company: "CHENMING MOLD IND. CORP.", Date: "11/14/2003", }, "CHY": PNPID{ ID: "CHY", Company: "CHERRY GMBH", Date: "05/16/1999", }, "CMO": PNPID{ ID: "CMO", Company: "CHI MEI OPTOELECTRONICS CORP.", Date: "03/15/2001", }, "CHM": PNPID{ ID: "CHM", Company: "CHIC TECHNOLOGY CORP.", Date: "07/16/1999", }, "CEC": PNPID{ ID: "CEC", Company: "CHICONY ELECTRONICS COMPANY LTD", Date: "11/29/1996", }, "CMN": PNPID{ ID: "CMN", Company: "CHIMEI INNOLUX CORPORATION", Date: "09/02/2010", }, "HLG": PNPID{ ID: "HLG", Company: "CHINA HUALU GROUP CO., LTD.", Date: "05/13/2013", }, "CHL": PNPID{ ID: "CHL", Company: "CHLORIDE-R&D", Date: "11/29/1996", }, "CDG": PNPID{ ID: "CDG", Company: "CHRISTIE DIGITAL SYSTEMS INC", Date: "04/24/2001", }, "CHR": PNPID{ ID: "CHR", Company: "CHRISTMANN INFORMATIONSTECHNIK + MEDIEN GMBH & CO. KG", Date: "05/25/2017", }, "CVP": PNPID{ ID: "CVP", Company: "CHROMATEC VIDEO PRODUCTS LTD", Date: "08/09/2013", }, "CHI": PNPID{ ID: "CHI", Company: "CHRONTEL INC", Date: "11/29/1996", }, "CGA": PNPID{ ID: "CGA", Company: "CHUNGHWA PICTURE TUBES, LTD", Date: "01/01/1994", }, "CHT": PNPID{ ID: "CHT", Company: "CHUNGHWA PICTURE TUBES,LTD.", Date: "03/15/2001", }, "CTE": PNPID{ ID: "CTE", Company: "CHUNGHWA TELECOM CO., LTD.", Date: "05/16/2002", }, "KCD": PNPID{ ID: "KCD", Company: "CHUNICHI DENSHI CO.,LTD.", Date: "12/23/2010", }, "QQQ": PNPID{ ID: "QQQ", Company: "CHUOMUSEN CO., LTD.", Date: "08/07/2002", }, "CGS": PNPID{ ID: "CGS", Company: "CHYRON CORP", Date: "11/13/2008", }, "CNE": PNPID{ ID: "CNE", Company: "CINE-TAL", Date: "06/13/2007", }, "PTG": PNPID{ ID: "PTG", Company: "CIPHER SYSTEMS INC", Date: "11/29/1996", }, "CIP": PNPID{ ID: "CIP", Company: "CIPRICO INC", Date: "11/29/1996", }, "CPC": PNPID{ ID: "CPC", Company: "CIPRICO INC", Date: "11/29/1996", }, "FPX": PNPID{ ID: "FPX", Company: "CIREL SYSTEMES", Date: "11/29/1996", }, "CRQ": PNPID{ ID: "CRQ", Company: "CIRQUE CORPORATION", Date: "11/29/1996", }, "CIR": PNPID{ ID: "CIR", Company: "CIRRUS LOGIC INC", Date: "11/29/1996", }, "CLI": PNPID{ ID: "CLI", Company: "CIRRUS LOGIC INC", Date: "11/29/1996", }, "SNS": PNPID{ ID: "SNS", Company: "CIRTECH (UK) LTD", Date: "08/20/1997", }, "WSC": PNPID{ ID: "WSC", Company: "CIS TECHNOLOGY INC", Date: "11/29/1996", }, "CIS": PNPID{ ID: "CIS", Company: "CISCO SYSTEMS INC", Date: "11/29/1996", }, "CIL": PNPID{ ID: "CIL", Company: "CITICOM INFOTECH PRIVATE LIMITED", Date: "08/10/2000", }, "CIT": PNPID{ ID: "CIT", Company: "CITIFAX LIMITED", Date: "07/16/1997", }, "CIN": PNPID{ ID: "CIN", Company: "CITRON GMBH", Date: "07/28/2005", }, "CLA": PNPID{ ID: "CLA", Company: "CLARION COMPANY LTD", Date: "11/29/1996", }, "CVS": PNPID{ ID: "CVS", Company: "CLARITY VISUAL SYSTEMS", Date: "01/13/2000", }, "CLE": PNPID{ ID: "CLE", Company: "CLASSE AUDIO", Date: "02/16/2006", }, "CLV": PNPID{ ID: "CLV", Company: "CLEVO COMPANY", Date: "01/30/1998", }, "PPM": PNPID{ ID: "PPM", Company: "CLINTON ELECTRONICS CORP.", Date: "10/01/2003", }, "CLO": PNPID{ ID: "CLO", Company: "CLONE COMPUTERS", Date: "11/29/1996", }, "CSL": PNPID{ ID: "CSL", Company: "CLOUDIUM SYSTEMS LTD.", Date: "02/14/2013", }, "CMC": PNPID{ ID: "CMC", Company: "CMC LTD", Date: "11/29/1996", }, "JQE": PNPID{ ID: "JQE", Company: "CNET TECHNICAL INC", Date: "11/29/1996", }, "COB": PNPID{ ID: "COB", Company: "COBY ELECTRONICS CO., LTD", Date: "06/13/2007", }, "COD": PNPID{ ID: "COD", Company: "CODAN PTY. LTD.", Date: "10/23/2000", }, "COI": PNPID{ ID: "COI", Company: "CODEC INC.", Date: "11/30/2001", }, "CDN": PNPID{ ID: "CDN", Company: "CODENOLL TECHNICAL CORPORATION", Date: "11/29/1996", }, "CNT": PNPID{ ID: "CNT", Company: "COINT MULTIMEDIA SYSTEMS", Date: "03/20/1999", }, "CDE": PNPID{ ID: "CDE", Company: "COLIN.DE", Date: "01/18/2005", }, "CMD": PNPID{ ID: "CMD", Company: "COLORADO MICRODISPLAY, INC.", Date: "03/20/1999", }, "CVI": PNPID{ ID: "CVI", Company: "COLORADO VIDEO, INC.", Date: "08/15/2012", }, "MVX": PNPID{ ID: "MVX", Company: "COM 1", Date: "11/29/1996", }, "CMK": PNPID{ ID: "CMK", Company: "COMARK LLC", Date: "07/15/2020", }, "CMX": PNPID{ ID: "CMX", Company: "COMEX ELECTRONICS AB", Date: "05/28/2004", }, "CIC": PNPID{ ID: "CIC", Company: "COMM. INTELLIGENCE CORPORATION", Date: "11/29/1996", }, "CLD": PNPID{ ID: "CLD", Company: "COMMAT L.T.D.", Date: "08/10/2000", }, "SDH": PNPID{ ID: "SDH", Company: "COMMUNICATIONS SPECIALIES, INC.", Date: "09/06/2005", }, "INX": PNPID{ ID: "INX", Company: "COMMUNICATIONS SUPPLY CORPORATION (A DIVISION OF WESCO)", Date: "11/07/2012", }, "CPL": PNPID{ ID: "CPL", Company: "COMPAL ELECTRONICS INC", Date: "11/29/1996", }, "CPQ": PNPID{ ID: "CPQ", Company: "COMPAQ COMPUTER COMPANY", Date: "11/29/1996", }, "CPP": PNPID{ ID: "CPP", Company: "COMPOUND PHOTONICS", Date: "10/01/2013", }, "CPD": PNPID{ ID: "CPD", Company: "COMPUADD", Date: "11/29/1996", }, "CMS": PNPID{ ID: "CMS", Company: "COMPUMASTER SRL", Date: "02/22/1999", }, "CDS": PNPID{ ID: "CDS", Company: "COMPUTER DIAGNOSTIC SYSTEMS", Date: "03/15/2001", }, "CPI": PNPID{ ID: "CPI", Company: "COMPUTER PERIPHERALS INC", Date: "11/29/1996", }, "CTP": PNPID{ ID: "CTP", Company: "COMPUTER TECHNOLOGY CORPORATION", Date: "03/26/1998", }, "CBI": PNPID{ ID: "CBI", Company: "COMPUTERBOARDS INC", Date: "02/03/1998", }, "CTM": PNPID{ ID: "CTM", Company: "COMPUTERM CORPORATION", Date: "11/29/1996", }, "CTN": PNPID{ ID: "CTN", Company: "COMPUTONE PRODUCTS", Date: "11/29/1996", }, "COX": PNPID{ ID: "COX", Company: "COMREX", Date: "10/18/2011", }, "CTS": PNPID{ ID: "CTS", Company: "COMTEC SYSTEMS CO., LTD.", Date: "04/25/2002", }, "CMM": PNPID{ ID: "CMM", Company: "COMTIME GMBH", Date: "09/23/2002", }, "COM": PNPID{ ID: "COM", Company: "COMTROL CORPORATION", Date: "11/29/1996", }, "CDI": PNPID{ ID: "CDI", Company: "CONCEPT DEVELOPMENT INC", Date: "11/29/1996", }, "CSE": PNPID{ ID: "CSE", Company: "CONCEPT SOLUTIONS & ENGINEERING", Date: "12/11/1996", }, "DCI": PNPID{ ID: "DCI", Company: "CONCEPTS INC", Date: "11/29/1996", }, "CXT": PNPID{ ID: "CXT", Company: "CONEXANT SYSTEMS", Date: "01/20/1999", }, "CGT": PNPID{ ID: "CGT", Company: "CONGATEC AG", Date: "06/16/2011", }, "CNI": PNPID{ ID: "CNI", Company: "CONNECT INT'L A/S", Date: "11/29/1996", }, "CWR": PNPID{ ID: "CWR", Company: "CONNECTWARE INC", Date: "11/29/1996", }, "CRC": PNPID{ ID: "CRC", Company: "CONRAC GMBH", Date: "04/20/2004", }, "CAT": PNPID{ ID: "CAT", Company: "CONSULTANCY IN ADVANCED TECHNOLOGY", Date: "09/19/1997", }, "CEA": PNPID{ ID: "CEA", Company: "CONSUMER ELECTRONICS ASSOCIATION", Date: "09/05/2006", }, "CCJ": PNPID{ ID: "CCJ", Company: "CONTEC CO.,LTD.", Date: "08/10/2000", }, "CON": PNPID{ ID: "CON", Company: "CONTEC COMPANY LTD", Date: "11/29/1996", }, "CRH": PNPID{ ID: "CRH", Company: "CONTEMPORARY RESEARCH CORP.", Date: "02/24/2015", }, "CTR": PNPID{ ID: "CTR", Company: "CONTROL4 CORPORATION", Date: "05/28/2014", }, "CDD": PNPID{ ID: "CDD", Company: "CONVERGENT DATA DEVICES", Date: "02/27/2004", }, "CDV": PNPID{ ID: "CDV", Company: "CONVERGENT DESIGN INC.", Date: "09/05/2006", }, "CIE": PNPID{ ID: "CIE", Company: "CONVERGENT ENGINEERING, INC.", Date: "09/05/2018", }, "COO": PNPID{ ID: "COO", Company: "COOLUX GMBH", Date: "09/30/2010", }, "CDC": PNPID{ ID: "CDC", Company: "CORE DYNAMICS CORPORATION", Date: "11/29/1996", }, "COT": PNPID{ ID: "COT", Company: "CORE TECHNOLOGY INC", Date: "04/19/2000", }, "CLG": PNPID{ ID: "CLG", Company: "CORELOGIC", Date: "11/27/1998", }, "ART": PNPID{ ID: "ART", Company: "CORION INDUSTRIAL CORPORATION", Date: "11/29/1996", }, "CRN": PNPID{ ID: "CRN", Company: "CORNERSTONE IMAGING", Date: "11/29/1996", }, "COR": PNPID{ ID: "COR", Company: "COROLLARY INC", Date: "12/13/1996", }, "CSM": PNPID{ ID: "CSM", Company: "COSMIC ENGINEERING INC.", Date: "04/18/2012", }, "COS": PNPID{ ID: "COS", Company: "COSTAR CORPORATION", Date: "11/29/1996", }, "CTA": PNPID{ ID: "CTA", Company: "COSYSTEMS INC", Date: "10/24/1998", }, "CVA": PNPID{ ID: "CVA", Company: "COVIA INC.", Date: "05/11/2010", }, "CPT": PNPID{ ID: "CPT", Company: "CPATH", Date: "03/09/1998", }, "CRA": PNPID{ ID: "CRA", Company: "CRALTECH ELECTRONICA, S.L.", Date: "03/24/2015", }, "CDK": PNPID{ ID: "CDK", Company: "CRAY COMMUNICATIONS", Date: "11/29/1996", }, "IOA": PNPID{ ID: "IOA", Company: "CRE TECHNOLOGY CORPORATION", Date: "06/30/1997", }, "CRE": PNPID{ ID: "CRE", Company: "CREATIVE LABS INC", Date: "11/29/1996", }, "CRL": PNPID{ ID: "CRL", Company: "CREATIVE LOGIC  ", Date: "10/16/1997", }, "CTL": PNPID{ ID: "CTL", Company: "CREATIVE TECHNOLOGY LTD", Date: "11/29/1996", }, "CTX": PNPID{ ID: "CTX", Company: "CREATIX POLYMEDIA GMBH", Date: "11/29/1996", }, "CRS": PNPID{ ID: "CRS", Company: "CRESCENDO COMMUNICATION INC", Date: "11/29/1996", }, "CSD": PNPID{ ID: "CSD", Company: "CRESTA SYSTEMS INC", Date: "08/01/1997", }, "CEI": PNPID{ ID: "CEI", Company: "CRESTRON ELECTRONICS, INC.", Date: "05/08/2006", }, "CRI": PNPID{ ID: "CRI", Company: "CRIO INC.", Date: "09/13/1999", }, "CII": PNPID{ ID: "CII", Company: "CROMACK INDUSTRIES INC", Date: "01/22/1997", }, "XTL": PNPID{ ID: "XTL", Company: "CRYSTAL COMPUTER", Date: "11/29/1996", }, "CSC": PNPID{ ID: "CSC", Company: "CRYSTAL SEMICONDUCTOR", Date: "11/29/1996", }, "CLM": PNPID{ ID: "CLM", Company: "CRYSTALAKE MULTIMEDIA", Date: "11/29/1996", }, "CSS": PNPID{ ID: "CSS", Company: "CSS LABORATORIES", Date: "01/02/1997", }, "CST": PNPID{ ID: "CST", Company: "CSTI INC", Date: "11/29/1996", }, "CTC": PNPID{ ID: "CTC", Company: "CTC COMMUNICATION DEVELOPMENT COMPANY LTD", Date: "10/21/1997", }, "CUB": PNPID{ ID: "CUB", Company: "CUBIX CORPORATION", Date: "11/29/1996", }, "CWC": PNPID{ ID: "CWC", Company: "CURTISS-WRIGHT CONTROLS, INC.", Date: "04/05/2013", }, "CYL": PNPID{ ID: "CYL", Company: "CYBERLABS", Date: "04/14/1998", }, "CYB": PNPID{ ID: "CYB", Company: "CYBERVISION", Date: "05/13/1997", }, "CYW": PNPID{ ID: "CYW", Company: "CYBERWARE", Date: "02/21/2000", }, "CBX": PNPID{ ID: "CBX", Company: "CYBEX COMPUTER PRODUCTS CORPORATION", Date: "11/08/1999", }, "CYD": PNPID{ ID: "CYD", Company: "CYCLADES CORPORATION", Date: "05/07/2001", }, "CYC": PNPID{ ID: "CYC", Company: "CYLINK CORPORATION", Date: "11/29/1996", }, "CYP": PNPID{ ID: "CYP", Company: "CYPRESS SEMICONDUCTOR CORPORATION", Date: "05/25/2016", }, "CYX": PNPID{ ID: "CYX", Company: "CYRIX CORPORATION", Date: "10/21/1997", }, "CRX": PNPID{ ID: "CRX", Company: "CYRIX CORPORATION", Date: "03/21/1997", }, "CYT": PNPID{ ID: "CYT", Company: "CYTECHINFO INC", Date: "03/13/1998", }, "CYV": PNPID{ ID: "CYV", Company: "CYVIZ AS", Date: "04/25/2002", }, "DMP": PNPID{ ID: "DMP", Company: "D&M HOLDINGS INC, PROFESSIONAL BUSINESS COMPANY", Date: "09/05/2006", }, "ABO": PNPID{ ID: "ABO", Company: "D-LINK SYSTEMS INC", Date: "11/29/1996", }, "DLK": PNPID{ ID: "DLK", Company: "D-LINK SYSTEMS INC", Date: "11/29/1996", }, "OPI": PNPID{ ID: "OPI", Company: "D.N.S. CORPORATION", Date: "11/29/1996", }, "DDA": PNPID{ ID: "DDA", Company: "DA2 TECHNOLOGIES CORPORATION", Date: "03/13/2006", }, "DAW": PNPID{ ID: "DAW", Company: "DA2 TECHNOLOGIES INC", Date: "09/06/2005", }, "DWE": PNPID{ ID: "DWE", Company: "DAEWOO ELECTRONICS COMPANY LTD", Date: "11/29/1996", }, "TLT": PNPID{ ID: "TLT", Company: "DAI TELECOM S.P.A.", Date: "06/04/2003", }, "DIN": PNPID{ ID: "DIN", Company: "DAINTELECOM CO., LTD", Date: "11/08/1999", }, "DAI": PNPID{ ID: "DAI", Company: "DAIS SET LTD.", Date: "02/21/2000", }, "DAK": PNPID{ ID: "DAK", Company: "DAKTRONICS", Date: "06/23/2004", }, "DCC": PNPID{ ID: "DCC", Company: "DALE COMPUTER CORPORATION", Date: "11/29/1996", }, "DCT": PNPID{ ID: "DCT", Company: "DANCALL TELECOM A/S", Date: "08/12/1997", }, "DAN": PNPID{ ID: "DAN", Company: "DANELEC MARINE A/S", Date: "12/24/2009", }, "DDD": PNPID{ ID: "DDD", Company: "DANKA DATA DEVICES", Date: "11/29/1996", }, "DAU": PNPID{ ID: "DAU", Company: "DAOU TECH INC", Date: "11/29/1996", }, "HCA": PNPID{ ID: "HCA", Company: "DAT", Date: "03/15/2001", }, "DAX": PNPID{ ID: "DAX", Company: "DATA APEX LTD", Date: "11/29/1996", }, "DDI": PNPID{ ID: "DDI", Company: "DATA DISPLAY AG", Date: "07/17/2002", }, "DXP": PNPID{ ID: "DXP", Company: "DATA EXPERT CORPORATION", Date: "11/29/1996", }, "EXP": PNPID{ ID: "EXP", Company: "DATA EXPORT CORPORATION", Date: "11/29/1996", }, "DGC": PNPID{ ID: "DGC", Company: "DATA GENERAL CORPORATION", Date: "11/29/1996", }, "DMO": PNPID{ ID: "DMO", Company: "DATA MODUL AG", Date: "12/03/2013", }, "EBH": PNPID{ ID: "EBH", Company: "DATA PRICE INFORMATICA", Date: "05/24/2001", }, "DRI": PNPID{ ID: "DRI", Company: "DATA RACE INC", Date: "07/30/1997", }, "DRC": PNPID{ ID: "DRC", Company: "DATA RAY CORP.", Date: "11/30/2001", }, "DTX": PNPID{ ID: "DTX", Company: "DATA TRANSLATION", Date: "11/29/1996", }, "DVT": PNPID{ ID: "DVT", Company: "DATA VIDEO", Date: "02/13/2007", }, "DBK": PNPID{ ID: "DBK", Company: "DATABOOK INC", Date: "11/29/1996", }, "DCD": PNPID{ ID: "DCD", Company: "DATACAST LLC", Date: "12/02/1997", }, "TRN": PNPID{ ID: "TRN", Company: "DATACOMMUNICATIE TRON B.V.", Date: "11/29/1996", }, "DQB": PNPID{ ID: "DQB", Company: "DATACUBE INC", Date: "11/29/1996", }, "DDT": PNPID{ ID: "DDT", Company: "DATADESK TECHNOLOGIES INC", Date: "11/27/1998", }, "DKY": PNPID{ ID: "DKY", Company: "DATAKEY INC", Date: "04/06/1998", }, "LJX": PNPID{ ID: "LJX", Company: "DATALOGIC CORPORATION", Date: "11/29/1996", }, "DTN": PNPID{ ID: "DTN", Company: "DATANG TELEPHONE CO", Date: "09/23/1998", }, "DII": PNPID{ ID: "DII", Company: "DATAQ INSTRUMENTS INC", Date: "11/29/1996", }, "DDE": PNPID{ ID: "DDE", Company: "DATASAT DIGITAL ENTERTAINMENT", Date: "11/18/2011", }, "DCV": PNPID{ ID: "DCV", Company: "DATATRONICS TECHNOLOGY INC", Date: "01/02/1997", }, "DAT": PNPID{ ID: "DAT", Company: "DATEL INC", Date: "11/29/1996", }, "MSD": PNPID{ ID: "MSD", Company: "DATENERFASSUNGS- UND INFORMATIONSSYSTEME", Date: "03/16/1998", }, "DAV": PNPID{ ID: "DAV", Company: "DAVICOM SEMICONDUCTOR INC", Date: "01/15/1997", }, "DAS": PNPID{ ID: "DAS", Company: "DAVIS AS", Date: "02/03/1998", }, "DBN": PNPID{ ID: "DBN", Company: "DB NETWORKS INC", Date: "12/01/1997", }, "HWC": PNPID{ ID: "HWC", Company: "DBA HANS WEDEMEYER", Date: "03/20/1999", }, "DCM": PNPID{ ID: "DCM", Company: "DCM DATA PRODUCTS", Date: "11/29/1996", }, "DGT": PNPID{ ID: "DGT", Company: "DEARBORN GROUP TECHNOLOGY", Date: "11/11/1997", }, "DXD": PNPID{ ID: "DXD", Company: "DECIMATOR DESIGN PTY LTD", Date: "03/06/2012", }, "DCR": PNPID{ ID: "DCR", Company: "DECROS LTD", Date: "11/29/1996", }, "MLD": PNPID{ ID: "MLD", Company: "DEEP VIDEO IMAGING LTD", Date: "08/14/2003", }, "DFT": PNPID{ ID: "DFT", Company: "DEI HOLDINGS DBA DEFINITIVE TECHNOLOGY", Date: "12/09/2011", }, "DEI": PNPID{ ID: "DEI", Company: "DEICO ELECTRONICS", Date: "11/29/1996", }, "DLL": PNPID{ ID: "DLL", Company: "DELL INC", Date: "03/27/2009", }, "DEL": PNPID{ ID: "DEL", Company: "DELL INC.", Date: "12/09/2009", }, "DPH": PNPID{ ID: "DPH", Company: "DELPHI AUTOMOTIVE LLP", Date: "10/15/2013", }, "DPC": PNPID{ ID: "DPC", Company: "DELTA ELECTRONICS INC", Date: "11/29/1996", }, "DDV": PNPID{ ID: "DDV", Company: "DELTA INFORMATION SYSTEMS, INC", Date: "01/03/2012", }, "DTA": PNPID{ ID: "DTA", Company: "DELTATEC", Date: "03/13/2009", }, "FPS": PNPID{ ID: "FPS", Company: "DELTEC CORPORATION", Date: "11/29/1996", }, "DON": PNPID{ ID: "DON", Company: "DENON, LTD.", Date: "04/01/2004", }, "DHD": PNPID{ ID: "DHD", Company: "DENSION AUDIO SYSTEMS", Date: "03/04/2013", }, "DEN": PNPID{ ID: "DEN", Company: "DENSITRON COMPUTERS LTD", Date: "09/13/1999", }, "DTT": PNPID{ ID: "DTT", Company: "DESIGN & TEST TECHNOLOGY, INC.", Date: "09/30/2010", }, "LPI": PNPID{ ID: "LPI", Company: "DESIGN TECHNOLOGY", Date: "11/29/1996", }, "DNI": PNPID{ ID: "DNI", Company: "DETERMINISTIC NETWORKS INC.", Date: "04/19/2000", }, "BCQ": PNPID{ ID: "BCQ", Company: "DEUTSCHE TELEKOM BERKOM GMBH", Date: "08/12/1997", }, "DTO": PNPID{ ID: "DTO", Company: "DEUTSCHE THOMSON OHG", Date: "06/14/2007", }, "DVL": PNPID{ ID: "DVL", Company: "DEVOLO AG", Date: "05/30/2002", }, "DXL": PNPID{ ID: "DXL", Company: "DEXTERA LABS INC", Date: "12/09/2009", }, "DFI": PNPID{ ID: "DFI", Company: "DFI", Date: "11/29/1996", }, "DHP": PNPID{ ID: "DHP", Company: "DH PRINT", Date: "11/29/1996", }, "DIA": PNPID{ ID: "DIA", Company: "DIADEM", Date: "11/29/1996", }, "DGS": PNPID{ ID: "DGS", Company: "DIAGSOFT INC", Date: "11/29/1996", }, "DCO": PNPID{ ID: "DCO", Company: "DIALOGUE TECHNOLOGY CORPORATION", Date: "06/16/2004", }, "DCS": PNPID{ ID: "DCS", Company: "DIAMOND COMPUTER SYSTEMS INC", Date: "11/29/1996", }, "DLC": PNPID{ ID: "DLC", Company: "DIAMOND LANE COMM. CORPORATION", Date: "11/29/1996", }, "DNV": PNPID{ ID: "DNV", Company: "DICON", Date: "12/15/2004", }, "DVD": PNPID{ ID: "DVD", Company: "DICTAPHONE CORPORATION", Date: "04/03/1998", }, "DBD": PNPID{ ID: "DBD", Company: "DIEBOLD INC.", Date: "09/05/2006", }, "WNX": PNPID{ ID: "WNX", Company: "DIEBOLD NIXDORF SYSTEMS GMBH", Date: "09/20/2004", }, "DAE": PNPID{ ID: "DAE", Company: "DIGATRON INDUSTRIE ELEKTRONIK GMBH", Date: "02/24/1997", }, "DGI": PNPID{ ID: "DGI", Company: "DIGI INTERNATIONAL", Date: "11/29/1996", }, "DBI": PNPID{ ID: "DBI", Company: "DIGIBOARD INC", Date: "11/29/1996", }, "DIG": PNPID{ ID: "DIG", Company: "DIGICOM S.P.A.", Date: "11/29/1996", }, "DMB": PNPID{ ID: "DMB", Company: "DIGICOM SYSTEMS INC", Date: "03/13/1998", }, "DGP": PNPID{ ID: "DGP", Company: "DIGICORP EUROPEAN SALES S.A.", Date: "05/22/1997", }, "DGA": PNPID{ ID: "DGA", Company: "DIGIITAL ARTS INC", Date: "06/14/2007", }, "DXC": PNPID{ ID: "DXC", Company: "DIGIPRONIX CONTROL SYSTEMS", Date: "07/16/1999", }, "DAC": PNPID{ ID: "DAC", Company: "DIGITAL ACOUSTICS CORPORATION", Date: "05/24/2000", }, "DAL": PNPID{ ID: "DAL", Company: "DIGITAL AUDIO LABS INC", Date: "11/29/1996", }, "DCA": PNPID{ ID: "DCA", Company: "DIGITAL COMMUNICATIONS ASSOCIATION", Date: "11/29/1996", }, "SHR": PNPID{ ID: "SHR", Company: "DIGITAL DISCOVERY", Date: "09/24/1997", }, "DEC": PNPID{ ID: "DEC", Company: "DIGITAL EQUIPMENT CORPORATION", Date: "11/29/1996", }, "DPS": PNPID{ ID: "DPS", Company: "DIGITAL PROCESSING SYSTEMS", Date: "11/29/1996", }, "DPL": PNPID{ ID: "DPL", Company: "DIGITAL PROJECTION LIMITED", Date: "07/09/2002", }, "DRD": PNPID{ ID: "DRD", Company: "DIGITAL REFLECTION INC.", Date: "02/21/2000", }, "DVS": PNPID{ ID: "DVS", Company: "DIGITAL VIDEO SYSTEM", Date: "11/29/1996", }, "DLG": PNPID{ ID: "DLG", Company: "DIGITAL-LOGIC GMBH", Date: "09/02/2003", }, "DPA": PNPID{ ID: "DPA", Company: "DIGITALK PRO AV", Date: "10/23/2000", }, "DSI": PNPID{ ID: "DSI", Company: "DIGITAN SYSTEMS INC", Date: "11/29/1996", }, "DLT": PNPID{ ID: "DLT", Company: "DIGITELEC INFORMATIQUE PARK CADERA", Date: "11/29/1996", }, "DMN": PNPID{ ID: "DMN", Company: "DIMENSION ENGINEERING LLC", Date: "02/06/2019", }, "DTE": PNPID{ ID: "DTE", Company: "DIMENSION TECHNOLOGIES, INC.", Date: "05/03/2010", }, "DMM": PNPID{ ID: "DMM", Company: "DIMOND MULTIMEDIA SYSTEMS INC", Date: "11/29/1996", }, "DIS": PNPID{ ID: "DIS", Company: "DISEDA S.A.", Date: "11/29/1996", }, "DSG": PNPID{ ID: "DSG", Company: "DISGUISE TECHNOLOGIES", Date: "10/22/2019", }, "DSA": PNPID{ ID: "DSA", Company: "DISPLAY SOLUTION AG", Date: "02/03/2016", }, "DMT": PNPID{ ID: "DMT", Company: "DISTRIBUTED MANAGEMENT TASK FORCE, INC. (DMTF)", Date: "03/31/2009", }, "DTI": PNPID{ ID: "DTI", Company: "DIVERSIFIED TECHNOLOGY, INC.", Date: "11/29/1996", }, "DNA": PNPID{ ID: "DNA", Company: "DNA ENTERPRISES, INC.", Date: "09/01/1998", }, "AUO": PNPID{ ID: "AUO", Company: "DO NOT USE - AUO", Date: "09/16/2008", }, "LPL": PNPID{ ID: "LPL", Company: "DO NOT USE - LPL", Date: "09/16/2008", }, "PHI": PNPID{ ID: "PHI", Company: "DO NOT USE - PHI", Date: "11/29/1996", }, "PTW": PNPID{ ID: "PTW", Company: "DO NOT USE - PTW", Date: "09/09/2009", }, "PVC": PNPID{ ID: "PVC", Company: "DO NOT USE - PVC", Date: "09/09/2009", }, "RTK": PNPID{ ID: "RTK", Company: "DO NOT USE - RTK", Date: "09/09/2009", }, "SEG": PNPID{ ID: "SEG", Company: "DO NOT USE - SEG", Date: "09/09/2009", }, "TNJ": PNPID{ ID: "TNJ", Company: "DO NOT USE - TNJ", Date: "09/09/2009", }, "UND": PNPID{ ID: "UND", Company: "DO NOT USE - UND", Date: "11/29/1996", }, "UNE": PNPID{ ID: "UNE", Company: "DO NOT USE - UNE", Date: "11/29/1996", }, "UNF": PNPID{ ID: "UNF", Company: "DO NOT USE - UNF", Date: "11/29/1996", }, "WAN": PNPID{ ID: "WAN", Company: "DO NOT USE - WAN", Date: "09/09/2009", }, "XER": PNPID{ ID: "XER", Company: "DO NOT USE - XER", Date: "09/09/2009", }, "XOC": PNPID{ ID: "XOC", Company: "DO NOT USE - XOC", Date: "09/09/2009", }, "DBL": PNPID{ ID: "DBL", Company: "DOBLE ENGINEERING COMPANY", Date: "11/29/1996", }, "DPI": PNPID{ ID: "DPI", Company: "DOCUPOINT", Date: "11/29/1996", }, "DLB": PNPID{ ID: "DLB", Company: "DOLBY LABORATORIES INC.", Date: "01/27/2010", }, "DOL": PNPID{ ID: "DOL", Company: "DOLMAN TECHNOLOGIES GROUP INC", Date: "11/11/1997", }, "DSP": PNPID{ ID: "DSP", Company: "DOMAIN TECHNOLOGY INC", Date: "11/29/1996", }, "DMS": PNPID{ ID: "DMS", Company: "DOME IMAGING SYSTEMS", Date: "10/23/2000", }, "DOM": PNPID{ ID: "DOM", Company: "DOME IMAGING SYSTEMS", Date: "11/29/1996", }, "AIK": PNPID{ ID: "AIK", Company: "DONGGUAN ALLLIKE ELECTRONICS CO., LTD.", Date: "04/11/2015", }, "DUA": PNPID{ ID: "DUA", Company: "DOSCH & AMAND GMBH & COMPANY KG", Date: "12/02/1997", }, "DOT": PNPID{ ID: "DOT", Company: "DOTRONIC MIKROELEKTRONIK GMBH", Date: "06/28/2002", }, "DIM": PNPID{ ID: "DIM", Company: "DPICT IMAGING, INC.", Date: "02/12/2008", }, "DPX": PNPID{ ID: "DPX", Company: "DPIX, INC.", Date: "09/23/1998", }, "DPT": PNPID{ ID: "DPT", Company: "DPT", Date: "11/29/1996", }, "DRB": PNPID{ ID: "DRB", Company: "DR. BOTT KG", Date: "04/25/2002", }, "DNT": PNPID{ ID: "DNT", Company: "DR. NEUHOUS TELEKOMMUNIKATION GMBH", Date: "11/29/1996", }, "DIT": PNPID{ ID: "DIT", Company: "DRAGON INFORMATION TECHNOLOGY", Date: "11/29/1996", }, "DRS": PNPID{ ID: "DRS", Company: "DRS DEFENSE SOLUTIONS, LLC", Date: "10/18/2011", }, "DSD": PNPID{ ID: "DSD", Company: "DS MULTIMEDIA PTE LTD", Date: "02/14/2006", }, "DSM": PNPID{ ID: "DSM", Company: "DSM DIGITAL SERVICES GMBH", Date: "11/29/1996", }, "DCE": PNPID{ ID: "DCE", Company: "DSPACE GMBH", Date: "12/16/1996", }, "DTC": PNPID{ ID: "DTC", Company: "DTC TECH CORPORATION", Date: "11/29/1996", }, "DGK": PNPID{ ID: "DGK", Company: "DUGOTECH CO., LTD", Date: "06/14/2007", }, "DMC": PNPID{ ID: "DMC", Company: "DUNE MICROSYSTEMS CORPORATION", Date: "11/29/1996", }, "DYC": PNPID{ ID: "DYC", Company: "DYCAM INC", Date: "01/08/1998", }, "DYM": PNPID{ ID: "DYM", Company: "DYMO-COSTAR CORPORATION", Date: "12/28/1998", }, "TOS": PNPID{ ID: "TOS", Company: "DYNABOOK INC.", Date: "11/29/1996", }, "DCL": PNPID{ ID: "DCL", Company: "DYNAMIC CONTROLS LTD", Date: "05/24/2000", }, "DTK": PNPID{ ID: "DTK", Company: "DYNAX ELECTRONICS (HK) LTD", Date: "11/29/1996", }, "DYX": PNPID{ ID: "DYX", Company: "DYNAX ELECTRONICS (HK) LTD", Date: "11/29/1996", }, "ECM": PNPID{ ID: "ECM", Company: "E-CMOS TECH CORPORATION", Date: "11/29/1996", }, "DTL": PNPID{ ID: "DTL", Company: "E-NET INC", Date: "10/16/1997", }, "ESY": PNPID{ ID: "ESY", Company: "E-SYSTEMS INC", Date: "11/29/1996", }, "ETT": PNPID{ ID: "ETT", Company: "E-TECH INC", Date: "11/29/1996", }, "EDC": PNPID{ ID: "EDC", Company: "E.DIGITAL CORPORATION", Date: "10/23/2000", }, "EEP": PNPID{ ID: "EEP", Company: "E.E.P.D. GMBH", Date: "06/14/2007", }, "EGL": PNPID{ ID: "EGL", Company: "EAGLE TECHNOLOGY", Date: "11/29/1996", }, "KOD": PNPID{ ID: "KOD", Company: "EASTMAN KODAK COMPANY", Date: "05/24/2000", }, "EKC": PNPID{ ID: "EKC", Company: "EASTMAN KODAK COMPANY", Date: "11/29/1996", }, "TWI": PNPID{ ID: "TWI", Company: "EASYTEL OY", Date: "07/16/1999", }, "EBS": PNPID{ ID: "EBS", Company: "EBS EUCHNER BÜRO- UND SCHULSYSTEME GMBH", Date: "02/05/2013", }, "ECO": PNPID{ ID: "ECO", Company: "ECHO SPEECH CORPORATION", Date: "11/29/1996", }, "ECH": PNPID{ ID: "ECH", Company: "ECHOSTAR CORPORATION", Date: "02/26/2016", }, "ETI": PNPID{ ID: "ETI", Company: "ECLIPSE TECH INC", Date: "11/29/1996", }, "ESC": PNPID{ ID: "ESC", Company: "EDEN SISTEMAS DE COMPUTACAO S/A", Date: "11/29/1996", }, "EDI": PNPID{ ID: "EDI", Company: "EDIMAX TECH. COMPANY LTD", Date: "11/29/1996", }, "EDM": PNPID{ ID: "EDM", Company: "EDMI", Date: "07/16/1998", }, "ELI": PNPID{ ID: "ELI", Company: "EDSUN LABORATORIES", Date: "11/29/1996", }, "EES": PNPID{ ID: "EES", Company: "EE SOLUTIONS, INC.", Date: "04/16/2003", }, "EEH": PNPID{ ID: "EEH", Company: "EEH DATALINK GMBH", Date: "07/03/1997", }, "ENI": PNPID{ ID: "ENI", Company: "EFFICIENT NETWORKS", Date: "11/29/1996", }, "EGN": PNPID{ ID: "EGN", Company: "EGENERA, INC.", Date: "10/08/2002", }, "EIC": PNPID{ ID: "EIC", Company: "EICON TECHNOLOGY CORPORATION", Date: "11/29/1996", }, "EGD": PNPID{ ID: "EGD", Company: "EIZO GMBH DISPLAY TECHNOLOGIES", Date: "02/13/2009", }, "ENC": PNPID{ ID: "ENC", Company: "EIZO NANAO CORPORATION", Date: "12/28/1998", }, "ERS": PNPID{ ID: "ERS", Company: "EIZO RUGGED SOLUTIONS", Date: "05/25/2016", }, "EKS": PNPID{ ID: "EKS", Company: "EKSEN YAZILIM", Date: "04/25/2002", }, "LPE": PNPID{ ID: "LPE", Company: "EL-PUSK CO., LTD.", Date: "08/14/2001", }, "ELA": PNPID{ ID: "ELA", Company: "ELAD SRL", Date: "04/25/2002", }, "ETD": PNPID{ ID: "ETD", Company: "ELAN MICROELECTRONICS CORPORATION", Date: "11/03/2009", }, "TSH": PNPID{ ID: "TSH", Company: "ELAN MICROELECTRONICS CORPORATION", Date: "11/14/2014", }, "ESA": PNPID{ ID: "ESA", Company: "ELBIT SYSTEMS OF AMERICA", Date: "06/15/2009", }, "ESG": PNPID{ ID: "ESG", Company: "ELCON SYSTEMTECHNIK GMBH", Date: "07/16/1999", }, "LXS": PNPID{ ID: "LXS", Company: "ELEA CARDWARE", Date: "06/25/1998", }, "ECP": PNPID{ ID: "ECP", Company: "ELECOM COMPANY LTD", Date: "11/29/1996", }, "ELE": PNPID{ ID: "ELE", Company: "ELECOM COMPANY LTD", Date: "11/29/1996", }, "ECA": PNPID{ ID: "ECA", Company: "ELECTRO CAM CORP.", Date: "08/10/2000", }, "ELC": PNPID{ ID: "ELC", Company: "ELECTRO SCIENTIFIC IND", Date: "11/29/1996", }, "MMM": PNPID{ ID: "MMM", Company: "ELECTRONIC MEASUREMENTS", Date: "11/29/1996", }, "ETS": PNPID{ ID: "ETS", Company: "ELECTRONIC TRADE SOLUTIONS LTD", Date: "08/20/2002", }, "EDG": PNPID{ ID: "EDG", Company: "ELECTRONIC-DESIGN GMBH", Date: "08/12/1997", }, "ELL": PNPID{ ID: "ELL", Company: "ELECTROSONIC LTD", Date: "09/13/1999", }, "EIN": PNPID{ ID: "EIN", Company: "ELEGANT INVENTION", Date: "03/29/2018", }, "ELT": PNPID{ ID: "ELT", Company: "ELEMENT LABS, INC.", Date: "10/11/2007", }, "EGA": PNPID{ ID: "EGA", Company: "ELGATO SYSTEMS LLC", Date: "02/08/2011", }, "ECS": PNPID{ ID: "ECS", Company: "ELITEGROUP COMPUTER SYSTEMS COMPANY LTD", Date: "11/29/1996", }, "UEG": PNPID{ ID: "UEG", Company: "ELITEGROUP COMPUTER SYSTEMS COMPANY LTD", Date: "11/29/1996", }, "ELG": PNPID{ ID: "ELG", Company: "ELMEG GMBH KOMMUNIKATIONSTECHNIK", Date: "11/29/1996", }, "ELM": PNPID{ ID: "ELM", Company: "ELMIC SYSTEMS INC", Date: "11/29/1996", }, "EMO": PNPID{ ID: "EMO", Company: "ELMO COMPANY, LIMITED", Date: "06/26/2012", }, "ELO": PNPID{ ID: "ELO", Company: "ELO TOUCHSYSTEMS INC", Date: "11/29/1996", }, "ELX": PNPID{ ID: "ELX", Company: "ELONEX PLC", Date: "11/29/1996", }, "ELS": PNPID{ ID: "ELS", Company: "ELSA GMBH", Date: "11/29/1996", }, "EAG": PNPID{ ID: "EAG", Company: "ELTEC ELEKTRONIK AG", Date: "11/25/2014", }, "EMB": PNPID{ ID: "EMB", Company: "EMBEDDED COMPUTING INC LTD", Date: "02/25/2002", }, "EST": PNPID{ ID: "EST", Company: "EMBEDDED SOLUTION TECHNOLOGY", Date: "05/24/2000", }, "EMD": PNPID{ ID: "EMD", Company: "EMBRIONIX DESIGN INC.", Date: "07/24/2013", }, "EMK": PNPID{ ID: "EMK", Company: "EMCORE CORPORATION", Date: "05/31/2012", }, "EDT": PNPID{ ID: "EDT", Company: "EMERGING DISPLAY TECHNOLOGIES CORP", Date: "08/18/2009", }, "EMG": PNPID{ ID: "EMG", Company: "EMG CONSULTANTS INC", Date: "11/29/1996", }, "EMC": PNPID{ ID: "EMC", Company: "EMICRO CORPORATION", Date: "01/01/1994", }, "EME": PNPID{ ID: "EME", Company: "EMINE TECHNOLOGY COMPANY, LTD.", Date: "06/16/2005", }, "EPC": PNPID{ ID: "EPC", Company: "EMPAC", Date: "12/04/1996", }, "EMU": PNPID{ ID: "EMU", Company: "EMULEX CORPORATION", Date: "11/29/1996", }, "ECI": PNPID{ ID: "ECI", Company: "ENCIRIS TECHNOLOGIES", Date: "11/01/2008", }, "ECT": PNPID{ ID: "ECT", Company: "ENCIRIS TECHNOLOGIES", Date: "11/01/2008", }, "ENE": PNPID{ ID: "ENE", Company: "ENE TECHNOLOGY INC.", Date: "03/15/2001", }, "EHN": PNPID{ ID: "EHN", Company: "ENHANSOFT", Date: "11/16/2010", }, "END": PNPID{ ID: "END", Company: "ENIDAN TECHNOLOGIES LTD", Date: "04/19/2000", }, "ESD": PNPID{ ID: "ESD", Company: "ENSEMBLE DESIGNS, INC", Date: "12/09/2009", }, "ENS": PNPID{ ID: "ENS", Company: "ENSONIQ CORPORATION", Date: "11/29/1996", }, "ENT": PNPID{ ID: "ENT", Company: "ENTERPRISE COMM. & COMPUTING INC", Date: "11/29/1996", }, "EPI": PNPID{ ID: "EPI", Company: "ENVISION PERIPHERALS, INC", Date: "02/22/1999", }, "EON": PNPID{ ID: "EON", Company: "EON INSTRUMENTATION, INC.", Date: "01/15/2015", }, "EPN": PNPID{ ID: "EPN", Company: "EPICON INC.", Date: "09/23/1998", }, "EPH ": PNPID{ ID: "EPH ", Company: "EPIPHAN SYSTEMS INC. ", Date: "03/14/2011", }, "EHJ": PNPID{ ID: "EHJ", Company: "EPSON RESEARCH", Date: "11/29/1996", }, "EQX": PNPID{ ID: "EQX", Company: "EQUINOX SYSTEMS INC", Date: "11/29/1996", }, "EQP": PNPID{ ID: "EQP", Company: "EQUIPE ELECTRONICS LTD.", Date: "07/14/2005", }, "EGO": PNPID{ ID: "EGO", Company: "ERGO ELECTRONICS", Date: "11/29/1996", }, "ERG": PNPID{ ID: "ERG", Company: "ERGO SYSTEM", Date: "11/29/1996", }, "ERI": PNPID{ ID: "ERI", Company: "ERICSSON MOBILE COMMUNICATIONS AB", Date: "10/22/1997", }, "EUT": PNPID{ ID: "EUT", Company: "ERICSSON MOBILE NETWORKS B.V.", Date: "04/14/1998", }, "ERN": PNPID{ ID: "ERN", Company: "ERICSSON, INC.", Date: "09/23/1998", }, "ESK": PNPID{ ID: "ESK", Company: "ES&S", Date: "11/08/1999", }, "ESN": PNPID{ ID: "ESN", Company: "ESATURNUS", Date: "02/21/2012", }, "ERT": PNPID{ ID: "ERT", Company: "ESCORT INSTURMENTS CORPORATION", Date: "05/02/1997", }, "ESS": PNPID{ ID: "ESS", Company: "ESS TECHNOLOGY INC", Date: "11/29/1996", }, "ECC": PNPID{ ID: "ECC", Company: "ESSENTIAL COMM. CORPORATION", Date: "11/29/1996", }, "ESB": PNPID{ ID: "ESB", Company: "ESTERLINE BELGIUM BVBA", Date: "01/15/2015", }, "ESL": PNPID{ ID: "ESL", Company: "ESTERLINE TECHNOLOGIES", Date: "01/06/2012", }, "EEE": PNPID{ ID: "EEE", Company: "ET&T TECHNOLOGY COMPANY LTD", Date: "05/04/1998", }, "ETK": PNPID{ ID: "ETK", Company: "ETEK LABS INC.", Date: "07/16/1998", }, "ETH": PNPID{ ID: "ETH", Company: "ETHERBOOT PROJECT", Date: "07/09/2010", }, "ECK": PNPID{ ID: "ECK", Company: "EUGENE CHUKHLOMIN SOLE PROPRIETORSHIP, D.B.A.", Date: "05/03/2008", }, "ERP": PNPID{ ID: "ERP", Company: "EURAPLAN GMBH", Date: "11/29/1996", }, "EAS": PNPID{ ID: "EAS", Company: "EVANS AND SUTHERLAND COMPUTER", Date: "01/28/2003", }, "EVX": PNPID{ ID: "EVX", Company: "EVEREX", Date: "11/29/1996", }, "ETC": PNPID{ ID: "ETC", Company: "EVERTON TECHNOLOGY COMPANY LTD", Date: "04/10/1997", }, "ETL": PNPID{ ID: "ETL", Company: "EVERTZ MICROSYSTEMS LTD.", Date: "06/14/2007", }, "EVI": PNPID{ ID: "EVI", Company: "EVIATEG GMBH", Date: "02/21/2000", }, "EMI": PNPID{ ID: "EMI", Company: "EX MACHINA INC", Date: "11/29/1996", }, "EXA": PNPID{ ID: "EXA", Company: "EXABYTE", Date: "11/29/1996", }, "YHW": PNPID{ ID: "YHW", Company: "EXACOM SA", Date: "11/29/1996", }, "EXT": PNPID{ ID: "EXT", Company: "EXATECH COMPUTADORES & SERVICOS LTDA", Date: "09/23/1998", }, "ECL": PNPID{ ID: "ECL", Company: "EXCEL COMPANY LTD", Date: "05/27/1997", }, "EXC": PNPID{ ID: "EXC", Company: "EXCESSION AUDIO", Date: "11/06/1998", }, "XFO": PNPID{ ID: "XFO", Company: "EXFO ELECTRO OPTICAL ENGINEERING", Date: "04/29/1998", }, "EXI": PNPID{ ID: "EXI", Company: "EXIDE ELECTRONICS", Date: "11/29/1996", }, "EXR": PNPID{ ID: "EXR", Company: "EXPLORER INC.", Date: "11/18/2015", }, "ELU": PNPID{ ID: "ELU", Company: "EXPRESS INDUSTRIAL, LTD.", Date: "09/10/2015", }, "ELD": PNPID{ ID: "ELD", Company: "EXPRESS LUCK, INC.", Date: "10/22/2019", }, "ESI": PNPID{ ID: "ESI", Company: "EXTENDED SYSTEMS, INC.", Date: "07/16/1999", }, "EXY": PNPID{ ID: "EXY", Company: "EXTERITY LTD", Date: "02/12/2009", }, "CRO": PNPID{ ID: "CRO", Company: "EXTRAORDINARY TECHNOLOGIES PTY LIMITED", Date: "04/11/2005", }, "XES": PNPID{ ID: "XES", Company: "EXTREME ENGINEERING SOLUTIONS, INC.", Date: "06/22/2017", }, "EXX": PNPID{ ID: "EXX", Company: "EXXACT GMBH", Date: "11/29/1996", }, "EYF": PNPID{ ID: "EYF", Company: "EYEFACTIVE GMBH", Date: "07/07/2015", }, "EYE": PNPID{ ID: "EYE", Company: "EYEVIS GMBH", Date: "11/18/2011", }, "EZE": PNPID{ ID: "EZE", Company: "EZE TECHNOLOGIES", Date: "02/21/2005", }, "FJT": PNPID{ ID: "FJT", Company: "F.J. TIEMAN BV", Date: "06/25/1998", }, "FFI": PNPID{ ID: "FFI", Company: "FAIRFIELD INDUSTRIES", Date: "11/29/1996", }, "FAN": PNPID{ ID: "FAN", Company: "FANTALOOKS CO., LTD.", Date: "03/12/2014", }, "FNC": PNPID{ ID: "FNC", Company: "FANUC LTD", Date: "01/29/1997", }, "FAR": PNPID{ ID: "FAR", Company: "FARALLON COMPUTING", Date: "11/29/1996", }, "FRO": PNPID{ ID: "FRO", Company: "FARO TECHNOLOGIES", Date: "09/21/2012", }, "FLI": PNPID{ ID: "FLI", Company: "FAROUDJA LABORATORIES", Date: "06/02/2004", }, "FMA": PNPID{ ID: "FMA", Company: "FAST MULTIMEDIA AG", Date: "11/29/1996", }, "FTI": PNPID{ ID: "FTI", Company: "FASTPOINT TECHNOLOGIES, INC.", Date: "06/21/2001", }, "FIT": PNPID{ ID: "FIT", Company: "FEATURE INTEGRATION TECHNOLOGY INC.", Date: "08/11/2009", }, "FEL": PNPID{ ID: "FEL", Company: "FELLOWES & QUESTEC", Date: "11/29/1996", }, "FMI": PNPID{ ID: "FMI", Company: "FELLOWES, INC.", Date: "07/05/2001", }, "FEN": PNPID{ ID: "FEN", Company: "FEN SYSTEMS LTD.", Date: "05/04/2010", }, "FER": PNPID{ ID: "FER", Company: "FERRANTI INT'L", Date: "11/29/1996", }, "TLA": PNPID{ ID: "TLA", Company: "FERRARI ELECTRONIC GMBH", Date: "12/04/1996", }, "FHL": PNPID{ ID: "FHL", Company: "FHLP", Date: "11/29/1996", }, "FRI": PNPID{ ID: "FRI", Company: "FIBERNET RESEARCH INC", Date: "11/29/1996", }, "FDX": PNPID{ ID: "FDX", Company: "FINDEX, INC.", Date: "10/22/2019", }, "FIN": PNPID{ ID: "FIN", Company: "FINECOM CO., LTD.", Date: "11/27/1998", }, "FPC": PNPID{ ID: "FPC", Company: "FINGERPRINT CARDS AB", Date: "06/14/2013", }, "PCG": PNPID{ ID: "PCG", Company: "FIRST INDUSTRIAL COMPUTER INC", Date: "11/29/1996", }, "LEO": PNPID{ ID: "LEO", Company: "FIRST INTERNATIONAL COMPUTER INC", Date: "09/19/1997", }, "FCG": PNPID{ ID: "FCG", Company: "FIRST INTERNATIONAL COMPUTER LTD", Date: "04/10/1997", }, "FVC": PNPID{ ID: "FVC", Company: "FIRST VIRTUAL CORPORATION", Date: "11/29/1996", }, "FWR": PNPID{ ID: "FWR", Company: "FLAT CONNECTIONS INC", Date: "11/29/1996", }, "SSD": PNPID{ ID: "SSD", Company: "FLIGHTSAFETY INTERNATIONAL", Date: "08/10/2000", }, "FIS": PNPID{ ID: "FIS", Company: "FLY-IT SIMULATORS", Date: "09/08/1997", }, "FTS": PNPID{ ID: "FTS", Company: "FOCALTECH SYSTEMS CO., LTD.", Date: "07/23/2013", }, "FCS": PNPID{ ID: "FCS", Company: "FOCUS ENHANCEMENTS, INC.", Date: "12/12/2002", }, "FOK": PNPID{ ID: "FOK", Company: "FOKUS TECHNOLOGIES GMBH", Date: "10/22/2013", }, "FOA": PNPID{ ID: "FOA", Company: "FOR-A COMPANY LIMITED", Date: "12/06/2008", }, "FRC": PNPID{ ID: "FRC", Company: "FORCE COMPUTERS", Date: "11/29/1996", }, "FMC": PNPID{ ID: "FMC", Company: "FORD MICROELECTRONICS INC", Date: "03/11/1997", }, "FSI": PNPID{ ID: "FSI", Company: "FORE SYSTEMS INC", Date: "11/29/1996", }, "FIL": PNPID{ ID: "FIL", Company: "FOREFRONT INT'L LTD", Date: "11/29/1996", }, "FIC": PNPID{ ID: "FIC", Company: "FORMOSA INDUSTRIAL COMPUTING INC", Date: "11/29/1996", }, "FMZ": PNPID{ ID: "FMZ", Company: "FORMOZA-ALTAIR", Date: "04/25/2003", }, "FDD": PNPID{ ID: "FDD", Company: "FORTH DIMENSION DISPLAYS LTD", Date: "07/07/2015", }, "FRE": PNPID{ ID: "FRE", Company: "FORVUS RESEARCH INC", Date: "04/24/1997", }, "FOS": PNPID{ ID: "FOS", Company: "FOSS TECATOR", Date: "10/22/1997", }, "FZC": PNPID{ ID: "FZC", Company: "FOUNDER GROUP SHENZHEN CO.", Date: "11/08/1999", }, "FTN": PNPID{ ID: "FTN", Company: "FOUNTAIN TECHNOLOGIES INC", Date: "11/29/1996", }, "FOV": PNPID{ ID: "FOV", Company: "FOVE INC", Date: "07/01/2016", }, "HHI": PNPID{ ID: "HHI", Company: "FRAUNHOFER HEINRICH-HERTZ-INSTITUTE", Date: "07/27/2012", }, "FRD": PNPID{ ID: "FRD", Company: "FREEDOM SCIENTIFIC BLV", Date: "06/15/2007", }, "TCX": PNPID{ ID: "TCX", Company: "FREEMARS HEAVY INDUSTRIES", Date: "03/15/2001", }, "FTE": PNPID{ ID: "FTE", Company: "FRONTLINE TEST EQUIPMENT INC.", Date: "01/20/1999", }, "FTG": PNPID{ ID: "FTG", Company: "FTG DATA SYSTEMS", Date: "11/29/1996", }, "FXX": PNPID{ ID: "FXX", Company: "FUJI XEROX", Date: "11/29/1996", }, "FFC": PNPID{ ID: "FFC", Company: "FUJIFILM CORPORATION", Date: "08/22/2011", }, "FDT": PNPID{ ID: "FDT", Company: "FUJITSU DISPLAY TECHNOLOGIES CORP.", Date: "10/23/2002", }, "FGL": PNPID{ ID: "FGL", Company: "FUJITSU GENERAL LIMITED.", Date: "02/21/2000", }, "FUJ": PNPID{ ID: "FUJ", Company: "FUJITSU LTD", Date: "11/29/1996", }, "FML": PNPID{ ID: "FML", Company: "FUJITSU MICROELECT LTD", Date: "11/29/1996", }, "FPE": PNPID{ ID: "FPE", Company: "FUJITSU PERIPHERALS LTD", Date: "08/19/1997", }, "FUS": PNPID{ ID: "FUS", Company: "FUJITSU SIEMENS COMPUTERS GMBH", Date: "01/13/2000", }, "FJS": PNPID{ ID: "FJS", Company: "FUJITSU SPAIN", Date: "11/29/1996", }, "FJC": PNPID{ ID: "FJC", Company: "FUJITSU TAKAMISAWA COMPONENT LIMITED", Date: "05/16/1999", }, "FTL": PNPID{ ID: "FTL", Company: "FUJITSU TEN LIMITED", Date: "12/20/2011", }, "FNI": PNPID{ ID: "FNI", Company: "FUNAI ELECTRIC CO., LTD.", Date: "01/18/2005", }, "FCB": PNPID{ ID: "FCB", Company: "FURUKAWA ELECTRIC COMPANY LTD", Date: "11/29/1996", }, "FEC": PNPID{ ID: "FEC", Company: "FURUNO ELECTRIC CO., LTD.", Date: "11/29/1996", }, "FDI": PNPID{ ID: "FDI", Company: "FUTURE DESIGNS, INC.", Date: "09/29/2014", }, "FDC": PNPID{ ID: "FDC", Company: "FUTURE DOMAIN", Date: "11/29/1996", }, "FSC": PNPID{ ID: "FSC", Company: "FUTURE SYSTEMS CONSULTING KK", Date: "11/29/1996", }, "FTC": PNPID{ ID: "FTC", Company: "FUTURETOUCH CORPORATION", Date: "11/29/1996", }, "FZI": PNPID{ ID: "FZI", Company: "FZI FORSCHUNGSZENTRUM INFORMATIK", Date: "08/12/1997", }, "SPH": PNPID{ ID: "SPH", Company: "G&W INSTRUMENTS GMBH", Date: "02/25/2002", }, "GTK": PNPID{ ID: "GTK", Company: "G-TECH CORPORATION", Date: "11/29/1996", }, "GDI": PNPID{ ID: "GDI", Company: "G. DIEHL ISDN GMBH", Date: "11/29/1996", }, "GGT": PNPID{ ID: "GGT", Company: "G2TOUCH KOREA", Date: "05/25/2017", }, "GLS": PNPID{ ID: "GLS", Company: "GADGET LABS LLC", Date: "11/29/1996", }, "GAG": PNPID{ ID: "GAG", Company: "GAGE APPLIED SCIENCES INC", Date: "11/29/1996", }, "HUB": PNPID{ ID: "HUB", Company: "GAI-TRONICS, A HUBBELL COMPANY", Date: "03/26/2009", }, "GAL": PNPID{ ID: "GAL", Company: "GALIL MOTION CONTROL", Date: "11/29/1996", }, "GRM": PNPID{ ID: "GRM", Company: "GARMIN INTERNATIONAL", Date: "12/09/2011", }, "GTM": PNPID{ ID: "GTM", Company: "GARNET SYSTEM COMPANY LTD", Date: "11/29/1996", }, "GWY": PNPID{ ID: "GWY", Company: "GATEWAY 2000", Date: "11/29/1996", }, "GCI": PNPID{ ID: "GCI", Company: "GATEWAY COMM. INC", Date: "11/29/1996", }, "GWK": PNPID{ ID: "GWK", Company: "GATEWORKS CORPORATION", Date: "07/31/2013", }, "GAU": PNPID{ ID: "GAU", Company: "GAUDI CO., LTD.", Date: "03/31/2003", }, "GCC": PNPID{ ID: "GCC", Company: "GCC TECHNOLOGIES INC", Date: "06/05/1997", }, "GDS": PNPID{ ID: "GDS", Company: "GDS", Date: "06/23/2004", }, "GEF": PNPID{ ID: "GEF", Company: "GE FANUC EMBEDDED SYSTEMS", Date: "06/14/2007", }, "GEC": PNPID{ ID: "GEC", Company: "GECHIC CORPORATION", Date: "01/04/2016", }, "GFN": PNPID{ ID: "GFN", Company: "GEFEN INC.", Date: "10/11/2007", }, "GEM": PNPID{ ID: "GEM", Company: "GEM PLUS", Date: "02/27/1998", }, "GMN": PNPID{ ID: "GMN", Company: "GEMINI 2000 LTD", Date: "10/23/2000", }, "GDC": PNPID{ ID: "GDC", Company: "GENERAL DATACOM", Date: "11/29/1996", }, "GED": PNPID{ ID: "GED", Company: "GENERAL DYNAMICS C4 SYSTEMS", Date: "01/09/2013", }, "GML": PNPID{ ID: "GML", Company: "GENERAL INFORMATION SYSTEMS", Date: "01/13/2000", }, "GIC": PNPID{ ID: "GIC", Company: "GENERAL INST. CORPORATION", Date: "11/29/1996", }, "GSC": PNPID{ ID: "GSC", Company: "GENERAL STANDARDS CORPORATION", Date: "07/16/1998", }, "GTT": PNPID{ ID: "GTT", Company: "GENERAL TOUCH TECHNOLOGY CO., LTD.", Date: "11/21/2002", }, "GEN": PNPID{ ID: "GEN", Company: "GENESYS ATE INC", Date: "11/29/1996", }, "GLM": PNPID{ ID: "GLM", Company: "GENESYS LOGIC", Date: "11/08/1999", }, "GND": PNPID{ ID: "GND", Company: "GENNUM CORPORATION", Date: "09/05/2006", }, "GEO": PNPID{ ID: "GEO", Company: "GEO SENSE", Date: "11/29/1996", }, "GTS": PNPID{ ID: "GTS", Company: "GEOTEST MARVIN TEST SYSTEMS INC", Date: "02/24/1998", }, "GER": PNPID{ ID: "GER", Company: "GERMANEERS GMBH", Date: "12/20/2011", }, "GES": PNPID{ ID: "GES", Company: "GES SINGAPORE PTE LTD", Date: "03/15/2001", }, "GET": PNPID{ ID: "GET", Company: "GETAC TECHNOLOGY CORPORATION", Date: "05/11/2010", }, "GFM": PNPID{ ID: "GFM", Company: "GFMESSTECHNIK GMBH", Date: "03/15/2001", }, "GIP": PNPID{ ID: "GIP", Company: "GI PROVISION LTD", Date: "02/08/2012", }, "GBT": PNPID{ ID: "GBT", Company: "GIGA-BYTE TECHNOLOGY CO., LTD.", Date: "09/05/2018", }, "PST": PNPID{ ID: "PST", Company: "GLOBAL DATA SA", Date: "11/29/1996", }, "GVL": PNPID{ ID: "GVL", Company: "GLOBAL VILLAGE COMMUNICATION", Date: "11/29/1996", }, "GMK": PNPID{ ID: "GMK", Company: "GMK ELECTRONIC DESIGN GMBH", Date: "01/18/2008", }, "GMM": PNPID{ ID: "GMM", Company: "GMM RESEARCH INC", Date: "11/29/1996", }, "GMX": PNPID{ ID: "GMX", Company: "GMX INC", Date: "11/29/1996", }, "GNN": PNPID{ ID: "GNN", Company: "GN NETTEST INC", Date: "07/30/1997", }, "GOE": PNPID{ ID: "GOE", Company: "GOEPEL ELECTRONIC GMBH", Date: "06/24/2013", }, "GRE": PNPID{ ID: "GRE", Company: "GOLD RAIN ENTERPRISES CORP.", Date: "06/04/2003", }, "GLD": PNPID{ ID: "GLD", Company: "GOLDMUND - DIGITAL AUDIO SA", Date: "02/06/2012", }, "GSM": PNPID{ ID: "GSM", Company: "GOLDSTAR COMPANY LTD", Date: "11/29/1996", }, "GTI": PNPID{ ID: "GTI", Company: "GOLDTOUCH", Date: "08/06/1997", }, "GGL": PNPID{ ID: "GGL", Company: "GOOGLE INC.", Date: "05/26/2010", }, "GPR": PNPID{ ID: "GPR", Company: "GOPRO, INC.", Date: "01/15/2015", }, "GRH": PNPID{ ID: "GRH", Company: "GRANCH LTD", Date: "09/23/2002", }, "GJN": PNPID{ ID: "GJN", Company: "GRAND JUNCTION NETWORKS", Date: "11/29/1996", }, "GSN": PNPID{ ID: "GSN", Company: "GRANDSTREAM NETWORKS, INC.", Date: "03/03/2014", }, "GST": PNPID{ ID: "GST", Company: "GRAPHIC SYSTEMTECHNOLOGY", Date: "11/29/1996", }, "GRA": PNPID{ ID: "GRA", Company: "GRAPHICA COMPUTER", Date: "11/29/1996", }, "GTC": PNPID{ ID: "GTC", Company: "GRAPHTEC CORPORATION", Date: "11/29/1996", }, "TGV": PNPID{ ID: "TGV", Company: "GRASS VALLEY GERMANY GMBH", Date: "06/14/2007", }, "GAC": PNPID{ ID: "GAC", Company: "GREENARRAYS, INC.", Date: "11/18/2015", }, "GCS": PNPID{ ID: "GCS", Company: "GREY CELL SYSTEMS LTD", Date: "04/29/1997", }, "GSY": PNPID{ ID: "GSY", Company: "GROSSENBACHER SYSTEME AG", Date: "04/19/2000", }, "SWO": PNPID{ ID: "SWO", Company: "GUANGZHOU SHIRUI ELECTRONICS CO., LTD.", Date: "10/16/2015", }, "SKM": PNPID{ ID: "SKM", Company: "GUANGZHOU TECLAST INFORMATION TECHNOLOGY LIMITED", Date: "11/18/2015", }, "GIM": PNPID{ ID: "GIM", Company: "GUILLEMONT INTERNATIONAL", Date: "10/29/1997", }, "GUD": PNPID{ ID: "GUD", Company: "GUNTERMANN & DRUNCK GMBH", Date: "03/10/2003", }, "GZE": PNPID{ ID: "GZE", Company: "GUNZE LIMITED", Date: "05/02/2005", }, "GNZ": PNPID{ ID: "GNZ", Company: "GUNZE LTD", Date: "11/29/1996", }, "GUZ": PNPID{ ID: "GUZ", Company: "GUZIK TECHNICAL ENTERPRISES", Date: "11/29/1996", }, "GVC": PNPID{ ID: "GVC", Company: "GVC CORPORATION", Date: "11/29/1996", }, "GWI": PNPID{ ID: "GWI", Company: "GW INSTRUMENTS", Date: "11/29/1996", }, "HPR": PNPID{ ID: "HPR", Company: "H.P.R. ELECTRONICS GMBH", Date: "08/29/2007", }, "HSC": PNPID{ ID: "HSC", Company: "HAGIWARA SYS-COM COMPANY LTD", Date: "11/29/1996", }, "HAE": PNPID{ ID: "HAE", Company: "HAIDER ELECTRONICS", Date: "07/05/2001", }, "HAI": PNPID{ ID: "HAI", Company: "HAIVISION SYSTEMS INC.", Date: "11/15/2007", }, "HAL": PNPID{ ID: "HAL", Company: "HALBERTHAL", Date: "02/10/1998", }, "HRI": PNPID{ ID: "HRI", Company: "HALL RESEARCH", Date: "05/10/2012", }, "HPK": PNPID{ ID: "HPK", Company: "HAMAMATSU PHOTONICS K.K.", Date: "12/20/2006", }, "HTI": PNPID{ ID: "HTI", Company: "HAMPSHIRE COMPANY, INC.", Date: "01/20/1999", }, "HAN": PNPID{ ID: "HAN", Company: "HANCHANG SYSTEM CORPORATION", Date: "06/21/2003", }, "ZMC": PNPID{ ID: "ZMC", Company: "HANGZHOU ZMCHIVIN", Date: "10/16/2015", }, "HSD": PNPID{ ID: "HSD", Company: "HANNSTAR DISPLAY CORP", Date: "08/11/2009", }, "HSP": PNPID{ ID: "HSP", Company: "HANNSTAR DISPLAY CORP", Date: "08/11/2009", }, "HDC": PNPID{ ID: "HDC", Company: "HARDCOM ELEKTRONIK & DATATEKNIK", Date: "04/14/1998", }, "HII": PNPID{ ID: "HII", Company: "HARMAN INTERNATIONAL INDUSTRIES, INC", Date: "01/09/2015", }, "HJI": PNPID{ ID: "HJI", Company: "HARRIS & JEFFRIES INC", Date: "11/29/1996", }, "HWA": PNPID{ ID: "HWA", Company: "HARRIS CANADA INC", Date: "03/13/1998", }, "HAR": PNPID{ ID: "HAR", Company: "HARRIS CORPORATION", Date: "12/20/2011", }, "HRS": PNPID{ ID: "HRS", Company: "HARRIS SEMICONDUCTOR", Date: "01/02/1997", }, "HCW": PNPID{ ID: "HCW", Company: "HAUPPAUGE COMPUTER WORKS INC", Date: "11/29/1996", }, "HAY": PNPID{ ID: "HAY", Company: "HAYES MICROCOMPUTER PRODUCTS INC", Date: "11/29/1996", }, "HCL": PNPID{ ID: "HCL", Company: "HCL AMERICA INC", Date: "11/29/1996", }, "HCM": PNPID{ ID: "HCM", Company: "HCL PERIPHERALS", Date: "10/02/2001", }, "HDI": PNPID{ ID: "HDI", Company: "HD-INFO D.O.O.", Date: "10/08/2001", }, "HPI": PNPID{ ID: "HPI", Company: "HEADPLAY, INC.", Date: "04/30/2007", }, "HYT": PNPID{ ID: "HYT", Company: "HENG YU TECHNOLOGY (HK) LIMITED", Date: "10/23/2000", }, "HRC": PNPID{ ID: "HRC", Company: "HERCULES", Date: "03/15/2001", }, "HRT": PNPID{ ID: "HRT", Company: "HERCULES", Date: "03/15/2001", }, "HRL": PNPID{ ID: "HRL", Company: "HEROLAB GMBH", Date: "03/17/1998", }, "HET": PNPID{ ID: "HET", Company: "HETEC DATENSYSTEME GMBH", Date: "02/03/2004", }, "HWP": PNPID{ ID: "HWP", Company: "HEWLETT PACKARD", Date: "03/15/2001", }, "HPD": PNPID{ ID: "HPD", Company: "HEWLETT PACKARD", Date: "05/02/1997", }, "HPE": PNPID{ ID: "HPE", Company: "HEWLETT PACKARD ENTERPRISE", Date: "09/22/2015", }, "HPC": PNPID{ ID: "HPC", Company: "HEWLETT-PACKARD CO.", Date: "08/10/2000", }, "HPQ": PNPID{ ID: "HPQ", Company: "HEWLETT-PACKARD CO.", Date: "07/12/2004", }, "HXM": PNPID{ ID: "HXM", Company: "HEXIUM LTD.", Date: "04/15/2008", }, "HIB": PNPID{ ID: "HIB", Company: "HIBINO CORPORATION", Date: "07/09/2003", }, "HWD": PNPID{ ID: "HWD", Company: "HIGHWATER DESIGNS LTD", Date: "11/29/1996", }, "HIK": PNPID{ ID: "HIK", Company: "HIKOM CO., LTD.", Date: "10/13/2003", }, "HIL": PNPID{ ID: "HIL", Company: "HILEVEL TECHNOLOGY", Date: "11/29/1996", }, "HHC": PNPID{ ID: "HHC", Company: "HIRAKAWA HEWTECH CORP.", Date: "05/20/2008", }, "HEC": PNPID{ ID: "HEC", Company: "HISENSE ELECTRIC CO., LTD.", Date: "01/01/1994", }, "HIT": PNPID{ ID: "HIT", Company: "HITACHI AMERICA LTD", Date: "11/29/1996", }, "HCP": PNPID{ ID: "HCP", Company: "HITACHI COMPUTER PRODUCTS INC", Date: "11/29/1996", }, "HCE": PNPID{ ID: "HCE", Company: "HITACHI CONSUMER ELECTRONICS CO., LTD", Date: "05/15/2009", }, "HIC": PNPID{ ID: "HIC", Company: "HITACHI INFORMATION TECHNOLOGY CO., LTD.", Date: "04/19/2000", }, "HTC": PNPID{ ID: "HTC", Company: "HITACHI LTD", Date: "11/29/1996", }, "MXL": PNPID{ ID: "MXL", Company: "HITACHI MAXELL, LTD.", Date: "01/13/2000", }, "HEL": PNPID{ ID: "HEL", Company: "HITACHI MICRO SYSTEMS EUROPE LTD", Date: "07/09/1997", }, "HTX": PNPID{ ID: "HTX", Company: "HITEX SYSTEMENTWICKLUNG GMBH", Date: "01/30/1998", }, "HKC": PNPID{ ID: "HKC", Company: "HKC OVERSEAS LIMITED", Date: "03/30/2016", }, "HMK": PNPID{ ID: "HMK", Company: "HMK DATEN-SYSTEM-TECHNIK BMBH", Date: "09/30/1997", }, "HOB": PNPID{ ID: "HOB", Company: "HOB ELECTRONIC GMBH", Date: "11/29/1996", }, "HUK": PNPID{ ID: "HUK", Company: "HOFFMANN + KRIPPNER GMBH", Date: "07/01/2016", }, "HOL": PNPID{ ID: "HOL", Company: "HOLOEYE PHOTONICS AG", Date: "02/02/2005", }, "HDV": PNPID{ ID: "HDV", Company: "HOLOGRAFIKA KFT.", Date: "03/31/2005", }, "HTK": PNPID{ ID: "HTK", Company: "HOLTEK MICROELECTRONICS INC", Date: "11/29/1996", }, "INC": PNPID{ ID: "INC", Company: "HOME ROW INC", Date: "11/29/1996", }, "FOX": PNPID{ ID: "FOX", Company: "HON HAI PRECISON IND.CO.,LTD.", Date: "08/02/2010", }, "HKA": PNPID{ ID: "HKA", Company: "HONKO MFG. CO., LTD.", Date: "12/01/2004", }, "HIS": PNPID{ ID: "HIS", Company: "HOPE INDUSTRIAL SYSTEMS, INC.", Date: "01/13/2014", }, "APG": PNPID{ ID: "APG", Company: "HORNER ELECTRIC INC", Date: "11/29/1996", }, "HST": PNPID{ ID: "HST", Company: "HORSENT TECHNOLOGY CO., LTD.", Date: "04/11/2015", }, "HOE": PNPID{ ID: "HOE", Company: "HOSIDEN CORPORATION", Date: "08/05/1997", }, "PNT": PNPID{ ID: "PNT", Company: "HOYA CORPORATION PENTAX LIFECARE DIVISION", Date: "05/25/2017", }, "HPN": PNPID{ ID: "HPN", Company: "HP INC.", Date: "12/21/2015", }, "HTL": PNPID{ ID: "HTL", Company: "HTBLUVA MÖDLING", Date: "02/17/2014", }, "HVR": PNPID{ ID: "HVR", Company: "HTC CORPORTATION", Date: "10/16/2015", }, "HMC": PNPID{ ID: "HMC", Company: "HUALON MICROELECTRIC CORPORATION", Date: "11/29/1996", }, "EBT": PNPID{ ID: "EBT", Company: "HUALONG TECHNOLOGY CO., LTD", Date: "06/15/2007", }, "HWV": PNPID{ ID: "HWV", Company: "HUAWEI TECHNOLOGIES CO., INC.", Date: "04/25/2018", }, "HNS": PNPID{ ID: "HNS", Company: "HUGHES NETWORK SYSTEMS", Date: "11/29/1996", }, "HMX": PNPID{ ID: "HMX", Company: "HUMAX CO., LTD.", Date: "02/14/2006", }, "HYO": PNPID{ ID: "HYO", Company: "HYC CO., LTD.", Date: "04/12/2006", }, "HYD": PNPID{ ID: "HYD", Company: "HYDIS TECHNOLOGIES.CO.,LTD", Date: "11/22/2010", }, "HYV": PNPID{ ID: "HYV", Company: "HYNIX SEMICONDUCTOR", Date: "11/29/2008", }, "HYC": PNPID{ ID: "HYC", Company: "HYPERCOPE GMBH AACHEN", Date: "12/01/1997", }, "HYR": PNPID{ ID: "HYR", Company: "HYPERTEC PTY LTD", Date: "11/29/1996", }, "HYP": PNPID{ ID: "HYP", Company: "HYPHEN LTD", Date: "11/29/1996", }, "ITT": PNPID{ ID: "ITT", Company: "I&T TELECOM.", Date: "11/08/1999", }, "IOD": PNPID{ ID: "IOD", Company: "I-O DATA DEVICE INC", Date: "11/29/1996", }, "IOS": PNPID{ ID: "IOS", Company: "I-O DISPLAY SYSTEM", Date: "03/15/2001", }, "IOT": PNPID{ ID: "IOT", Company: "I/OTECH INC", Date: "11/29/1996", }, "IAD": PNPID{ ID: "IAD", Company: "IADEA CORPORATION", Date: "09/10/2015", }, "IAT": PNPID{ ID: "IAT", Company: "IAT GERMANY GMBH", Date: "11/29/1996", }, "IBM": PNPID{ ID: "IBM", Company: "IBM BRASIL", Date: "11/29/1996", }, "CDT": PNPID{ ID: "CDT", Company: "IBM CORPORATION", Date: "11/29/1996", }, "IBP": PNPID{ ID: "IBP", Company: "IBP INSTRUMENTS GMBH", Date: "09/23/1998", }, "IBR": PNPID{ ID: "IBR", Company: "IBR GMBH", Date: "01/16/1998", }, "ICE": PNPID{ ID: "ICE", Company: "IC ENSEMBLE", Date: "09/19/1997", }, "ICA": PNPID{ ID: "ICA", Company: "ICA INC", Date: "05/20/2002", }, "ICX": PNPID{ ID: "ICX", Company: "ICCC A/S", Date: "11/29/1996", }, "ICD": PNPID{ ID: "ICD", Company: "ICD INC", Date: "06/09/1997", }, "ARE": PNPID{ ID: "ARE", Company: "ICET S.P.A.", Date: "05/16/1999", }, "ICP": PNPID{ ID: "ICP", Company: "ICP ELECTRONICS, INC./IEI TECHNOLOGY CORP.", Date: "09/07/2012", }, "ICR": PNPID{ ID: "ICR", Company: "ICRON", Date: "10/22/2019", }, "IUC": PNPID{ ID: "IUC", Company: "ICSL", Date: "08/14/1997", }, "XTD": PNPID{ ID: "XTD", Company: "ICUITI CORPORATION", Date: "06/14/2007", }, "IWR": PNPID{ ID: "IWR", Company: "ICUITI CORPORATION", Date: "03/06/2007", }, "ISC": PNPID{ ID: "ISC", Company: "ID3 SEMICONDUCTORS", Date: "03/15/2001", }, "IDE": PNPID{ ID: "IDE", Company: "IDE ASSOCIATES", Date: "11/29/1996", }, "IDO": PNPID{ ID: "IDO", Company: "IDEO PRODUCT DEVELOPMENT", Date: "09/30/1997", }, "DEX": PNPID{ ID: "DEX", Company: "IDEX DISPLAYS", Date: "04/25/2002", }, "IDX": PNPID{ ID: "IDX", Company: "IDEXX LABS", Date: "11/29/1996", }, "IDK": PNPID{ ID: "IDK", Company: "IDK CORPORATION", Date: "04/16/2003", }, "IDN": PNPID{ ID: "IDN", Company: "IDNEO TECHNOLOGIES", Date: "07/05/2012", }, "ITS": PNPID{ ID: "ITS", Company: "IDTECH", Date: "06/17/2002", }, "IEE": PNPID{ ID: "IEE", Company: "IEE", Date: "06/21/2001", }, "IGM": PNPID{ ID: "IGM", Company: "IGM COMMUNI", Date: "11/29/1996", }, "IIN": PNPID{ ID: "IIN", Company: "IINFRA CO., LTD", Date: "05/09/2003", }, "IVM": PNPID{ ID: "IVM", Company: "IIYAMA NORTH AMERICA", Date: "11/29/1996", }, "IKE": PNPID{ ID: "IKE", Company: "IKEGAMI TSUSHINKI CO. LTD.", Date: "11/14/2014", }, "IKS": PNPID{ ID: "IKS", Company: "IKOS SYSTEMS INC", Date: "11/29/1996", }, "IND": PNPID{ ID: "IND", Company: "ILC", Date: "06/16/2004", }, "ILC": PNPID{ ID: "ILC", Company: "IMAGE LOGIC CORPORATION", Date: "11/29/1996", }, "ISM": PNPID{ ID: "ISM", Company: "IMAGE STREAM MEDICAL", Date: "05/27/2010", }, "IMG": PNPID{ ID: "IMG", Company: "IMAGENICS CO., LTD.", Date: "09/05/2006", }, "IQT": PNPID{ ID: "IQT", Company: "IMAGEQUEST CO., LTD", Date: "10/08/2002", }, "IME": PNPID{ ID: "IME", Company: "IMAGRAPH", Date: "12/04/1996", }, "IMA": PNPID{ ID: "IMA", Company: "IMAGRAPH", Date: "11/29/1996", }, "IMD": PNPID{ ID: "IMD", Company: "IMASDE CANARIAS S.A.", Date: "07/03/1997", }, "IMC": PNPID{ ID: "IMC", Company: "IMC NETWORKS", Date: "11/29/1996", }, "IMM": PNPID{ ID: "IMM", Company: "IMMERSION CORPORATION", Date: "07/16/1997", }, "IMF": PNPID{ ID: "IMF", Company: "IMMERSIVE AUDIO TECHNOLOGIES FRANCE", Date: "03/29/2018", }, "HUM": PNPID{ ID: "HUM", Company: "IMP ELECTRONICS LTD.", Date: "06/16/2004", }, "IMP": PNPID{ ID: "IMP", Company: "IMPINJ", Date: "08/14/2012", }, "IMN": PNPID{ ID: "IMN", Company: "IMPOSSIBLE PRODUCTION", Date: "08/10/2000", }, "IFS": PNPID{ ID: "IFS", Company: "IN FOCUS SYSTEMS INC", Date: "11/29/1996", }, "ALD": PNPID{ ID: "ALD", Company: "IN4S INC", Date: "12/05/1997", }, "IBI": PNPID{ ID: "IBI", Company: "INBINE.CO.LTD", Date: "11/06/2001", }, "INK": PNPID{ ID: "INK", Company: "INDTEK CO., LTD.", Date: "03/26/2007", }, "IPD": PNPID{ ID: "IPD", Company: "INDUSTRIAL PRODUCTS DESIGN, INC.", Date: "07/16/1999", }, "IQI": PNPID{ ID: "IQI", Company: "INEOQUEST TECHNOLOGIES, INC", Date: "02/18/2011", }, "INS": PNPID{ ID: "INS", Company: "INES GMBH", Date: "11/29/1996", }, "IFX": PNPID{ ID: "IFX", Company: "INFINEON TECHNOLOGIES AG", Date: "04/19/2000", }, "IFZ": PNPID{ ID: "IFZ", Company: "INFINITE Z", Date: "01/04/2012", }, "IIT": PNPID{ ID: "IIT", Company: "INFORMATIK INFORMATION TECHNOLOGIES", Date: "08/14/2013", }, "IFT": PNPID{ ID: "IFT", Company: "INFORMTECH", Date: "11/29/1996", }, "ICI": PNPID{ ID: "ICI", Company: "INFOTEK COMMUNICATION INC", Date: "11/29/1996", }, "ITR": PNPID{ ID: "ITR", Company: "INFOTRONIC AMERICA, INC.", Date: "06/21/2001", }, "INF": PNPID{ ID: "INF", Company: "INFRAMETRICS INC", Date: "11/29/1996", }, "VSN": PNPID{ ID: "VSN", Company: "INGRAM MACROTRON", Date: "08/10/2000", }, "VID": PNPID{ ID: "VID", Company: "INGRAM MACROTRON GERMANY", Date: "05/24/2000", }, "IHE": PNPID{ ID: "IHE", Company: "INHAND ELECTRONICS", Date: "04/20/2010", }, "INI": PNPID{ ID: "INI", Company: "INITIO CORPORATION", Date: "11/29/1996", }, "IVR": PNPID{ ID: "IVR", Company: "INLIFE-HANDNET CO., LTD.", Date: "01/19/2017", }, "IMT": PNPID{ ID: "IMT", Company: "INMAX TECHNOLOGY CORPORATION", Date: "02/12/2003", }, "NES": PNPID{ ID: "NES", Company: "INNES", Date: "07/01/2016", }, "INO": PNPID{ ID: "INO", Company: "INNOLAB PTE LTD", Date: "01/20/1999", }, "INL": PNPID{ ID: "INL", Company: "INNOLUX DISPLAY CORPORATION", Date: "12/15/2004", }, "INM": PNPID{ ID: "INM", Company: "INNOMEDIA INC", Date: "11/29/1996", }, "ILS": PNPID{ ID: "ILS", Company: "INNOTECH CORPORATION", Date: "10/23/2000", }, "ATE": PNPID{ ID: "ATE", Company: "INNOVATE LTD", Date: "11/29/1996", }, "INN": PNPID{ ID: "INN", Company: "INNOVENT SYSTEMS, INC.", Date: "04/19/2000", }, "WII": PNPID{ ID: "WII", Company: "INNOWARE INC", Date: "01/30/1998", }, "inu": PNPID{ ID: "inu", Company: "INOVATEC S.P.A.", Date: "03/15/2001", }, "ICV": PNPID{ ID: "ICV", Company: "INSIDE CONTACTLESS", Date: "11/04/2010", }, "ION": PNPID{ ID: "ION", Company: "INSIDE OUT NETWORKS", Date: "12/28/1998", }, "ISG": PNPID{ ID: "ISG", Company: "INSIGNIA SOLUTIONS INC", Date: "11/29/1996", }, "ISR": PNPID{ ID: "ISR", Company: "INSIS CO., LTD.", Date: "02/12/2003", }, "IAF": PNPID{ ID: "IAF", Company: "INSTITUT F R ANGEWANDTE FUNKSYSTEMTECHNIK GMBH", Date: "03/20/1999", }, "ING": PNPID{ ID: "ING", Company: "INTEGRAPH CORPORATION", Date: "11/29/1996", }, "IBC": PNPID{ ID: "IBC", Company: "INTEGRATED BUSINESS SYSTEMS", Date: "11/29/1996", }, "ICS": PNPID{ ID: "ICS", Company: "INTEGRATED CIRCUIT SYSTEMS", Date: "11/29/1996", }, "IDP": PNPID{ ID: "IDP", Company: "INTEGRATED DEVICE TECHNOLOGY, INC.", Date: "01/27/2010", }, "ITE": PNPID{ ID: "ITE", Company: "INTEGRATED TECH EXPRESS INC", Date: "11/29/1996", }, "SRC": PNPID{ ID: "SRC", Company: "INTEGRATED TECH EXPRESS INC", Date: "11/29/1996", }, "ITX": PNPID{ ID: "ITX", Company: "INTEGRATED TECHNOLOGY EXPRESS INC", Date: "06/25/1997", }, "IAI": PNPID{ ID: "IAI", Company: "INTEGRATION ASSOCIATES, INC.", Date: "03/17/2004", }, "ICO": PNPID{ ID: "ICO", Company: "INTEL CORP", Date: "08/10/2000", }, "III": PNPID{ ID: "III", Company: "INTELLIGENT INSTRUMENTATION", Date: "11/29/1996", }, "IPI": PNPID{ ID: "IPI", Company: "INTELLIGENT PLATFORM MANAGEMENT INTERFACE (IPMI) FORUM (INTEL, HP, NEC, DELL)", Date: "05/24/2000", }, "IWX": PNPID{ ID: "IWX", Company: "INTELLIWORXX, INC.", Date: "05/16/1999", }, "SVC": PNPID{ ID: "SVC", Company: "INTELLIX CORP.", Date: "01/18/2008", }, "ITL": PNPID{ ID: "ITL", Company: "INTER-TEL", Date: "03/21/1997", }, "TCH": PNPID{ ID: "TCH", Company: "INTERACTION SYSTEMS, INC", Date: "03/20/1999", }, "PEN": PNPID{ ID: "PEN", Company: "INTERACTIVE COMPUTER PRODUCTS INC", Date: "01/15/1997", }, "ITC": PNPID{ ID: "ITC", Company: "INTERCOM INC", Date: "11/29/1996", }, "IDS": PNPID{ ID: "IDS", Company: "INTERDIGITAL SISTEMAS DE INFORMACAO", Date: "10/28/1997", }, "FBI": PNPID{ ID: "FBI", Company: "INTERFACE CORPORATION", Date: "11/29/1996", }, "ISI": PNPID{ ID: "ISI", Company: "INTERFACE SOLUTIONS", Date: "11/29/1996", }, "IGC": PNPID{ ID: "IGC", Company: "INTERGATE PTY LTD", Date: "11/29/1996", }, "IEC": PNPID{ ID: "IEC", Company: "INTERLACE ENGINEERING CORPORATION", Date: "11/29/1996", }, "IEI": PNPID{ ID: "IEI", Company: "INTERLINK ELECTRONICS", Date: "10/16/1998", }, "IDC": PNPID{ ID: "IDC", Company: "INTERNATIONAL DATACASTING CORPORATION", Date: "02/25/1997", }, "IDT": PNPID{ ID: "IDT", Company: "INTERNATIONAL DISPLAY TECHNOLOGY", Date: "05/16/2002", }, "ISY": PNPID{ ID: "ISY", Company: "INTERNATIONAL INTEGRATED SYSTEMS,INC.(IISI)", Date: "08/10/2000", }, "IMI": PNPID{ ID: "IMI", Company: "INTERNATIONAL MICROSYSTEMS INC", Date: "11/29/1996", }, "IPT": PNPID{ ID: "IPT", Company: "INTERNATIONAL POWER TECHNOLOGIES", Date: "04/11/1997", }, "ITD": PNPID{ ID: "ITD", Company: "INTERNET TECHNOLOGY CORPORATION", Date: "12/05/1997", }, "INP": PNPID{ ID: "INP", Company: "INTERPHASE CORPORATION", Date: "11/29/1996", }, "INT": PNPID{ ID: "INT", Company: "INTERPHASE CORPORATION", Date: "11/29/1996", }, "LSD": PNPID{ ID: "LSD", Company: "INTERSIL CORPORATION", Date: "03/14/2012", }, "IST": PNPID{ ID: "IST", Company: "INTERSOLVE TECHNOLOGIES", Date: "03/20/1999", }, "IXD": PNPID{ ID: "IXD", Company: "INTERTEX DATA AB", Date: "11/29/1996", }, "IVI": PNPID{ ID: "IVI", Company: "INTERVOICE INC", Date: "11/29/1996", }, "IVS": PNPID{ ID: "IVS", Company: "INTEVAC PHOTONICS INC.", Date: "02/16/2011", }, "ICM": PNPID{ ID: "ICM", Company: "INTRACOM SA", Date: "08/03/1998", }, "SDD": PNPID{ ID: "SDD", Company: "INTRADA-SDD LTD", Date: "11/21/2007", }, "ISP": PNPID{ ID: "ISP", Company: "INTRESOURCE SYSTEMS PTE LTD", Date: "08/27/1997", }, "SRG": PNPID{ ID: "SRG", Company: "INTUITIVE SURGICAL, INC.", Date: "02/16/2006", }, "INA": PNPID{ ID: "INA", Company: "INVENTEC CORPORATION", Date: "09/13/2013", }, "INE": PNPID{ ID: "INE", Company: "INVENTEC ELECTRONICS (M) SDN. BHD.", Date: "07/21/1998", }, "INV": PNPID{ ID: "INV", Company: "INVISO, INC.", Date: "10/23/2000", }, "IOM": PNPID{ ID: "IOM", Company: "IOMEGA", Date: "11/29/1996", }, "IPP": PNPID{ ID: "IPP", Company: "IP POWER TECHNOLOGIES GMBH", Date: "12/06/2010", }, "IPQ": PNPID{ ID: "IPQ", Company: "IP3 TECHNOLOGY LTD.", Date: "11/11/2013", }, "IPC": PNPID{ ID: "IPC", Company: "IPC CORPORATION", Date: "11/29/1996", }, "IPM": PNPID{ ID: "IPM", Company: "IPM INDUSTRIA POLITECNICA MERIDIONALE SPA", Date: "09/23/1998", }, "IPS": PNPID{ ID: "IPS", Company: "IPS, INC. (INTELLECTUAL PROPERTY SOLUTIONS, INC.)", Date: "09/05/2001", }, "IPW": PNPID{ ID: "IPW", Company: "IPWIRELESS, INC", Date: "03/15/2001", }, "IRD": PNPID{ ID: "IRD", Company: "IRDATA", Date: "04/24/2001", }, "IIC": PNPID{ ID: "IIC", Company: "ISIC INNOSCAN INDUSTRIAL COMPUTERS A/S", Date: "07/23/2003", }, "ISL": PNPID{ ID: "ISL", Company: "ISOLATION SYSTEMS", Date: "11/29/1996", }, "ISS": PNPID{ ID: "ISS", Company: "ISS INC", Date: "11/29/1996", }, "ITP": PNPID{ ID: "ITP", Company: "IT-PRO CONSULTING UND SYSTEMHAUS GMBH", Date: "10/23/2000", }, "ITA": PNPID{ ID: "ITA", Company: "ITAUSA EXPORT NORTH AMERICA", Date: "11/29/1996", }, "IPR": PNPID{ ID: "IPR", Company: "ITHACA PERIPHERALS", Date: "07/01/1997", }, "ITK": PNPID{ ID: "ITK", Company: "ITK TELEKOMMUNIKATION AG", Date: "11/29/1996", }, "ITM": PNPID{ ID: "ITM", Company: "ITM INC.", Date: "04/24/2001", }, "JCE": PNPID{ ID: "JCE", Company: "JACE TECH INC", Date: "11/29/1996", }, "JIC": PNPID{ ID: "JIC", Company: "JAEIK INFORMATION & COMMUNICATION CO., LTD.", Date: "10/23/2000", }, "XFG": PNPID{ ID: "XFG", Company: "JAN STRAPKO - FOTO", Date: "05/07/2001", }, "JUK": PNPID{ ID: "JUK", Company: "JANICH & KLASS COMPUTERTECHNIK GMBH", Date: "10/08/2002", }, "JAS": PNPID{ ID: "JAS", Company: "JANZ AUTOMATIONSSYSTEME AG", Date: "11/03/2009", }, "JAE": PNPID{ ID: "JAE", Company: "JAPAN AVIATION ELECTRONICS INDUSTRY, LIMITED", Date: "03/15/2001", }, "JDL": PNPID{ ID: "JDL", Company: "JAPAN DIGITAL LABORATORY CO.,LTD.", Date: "04/19/2000", }, "JDI": PNPID{ ID: "JDI", Company: "JAPAN DISPLAY INC.", Date: "04/18/2013", }, "JEM": PNPID{ ID: "JEM", Company: "JAPAN E.M.SOLUTIONS CO., LTD.", Date: "05/24/2018", }, "JAT": PNPID{ ID: "JAT", Company: "JATON CORPORATION", Date: "09/24/1997", }, "JET": PNPID{ ID: "JET", Company: "JET POWER TECHNOLOGY CO., LTD.", Date: "03/15/2001", }, "JWY": PNPID{ ID: "JWY", Company: "JETWAY INFORMATION CO., LTD", Date: "09/22/2003", }, "JTY": PNPID{ ID: "JTY", Company: "JETWAY SECURITY MICRO,INC", Date: "11/11/2009", }, "JWL": PNPID{ ID: "JWL", Company: "JEWELL INSTRUMENTS, LLC", Date: "06/21/2001", }, "SHI": PNPID{ ID: "SHI", Company: "JIANGSU SHINCO ELECTRONIC GROUP CO., LTD", Date: "08/10/2004", }, "JFX": PNPID{ ID: "JFX", Company: "JONES FUTUREX INC", Date: "11/29/1996", }, "LTI": PNPID{ ID: "LTI", Company: "JONGSHINE TECH INC", Date: "11/29/1996", }, "HKG": PNPID{ ID: "HKG", Company: "JOSEF HEIM KG", Date: "11/29/1996", }, "JPC": PNPID{ ID: "JPC", Company: "JPC TECHNOLOGY LIMITED", Date: "10/23/2000", }, "JSD": PNPID{ ID: "JSD", Company: "JS DIGITECH, INC", Date: "10/23/2000", }, "JTS": PNPID{ ID: "JTS", Company: "JS MOTORSPORTS", Date: "12/05/1997", }, "TPJ": PNPID{ ID: "TPJ", Company: "JUNNILA", Date: "03/15/2001", }, "JUP": PNPID{ ID: "JUP", Company: "JUPITER SYSTEMS", Date: "09/05/2006", }, "JSI": PNPID{ ID: "JSI", Company: "JUPITER SYSTEMS, INC.", Date: "06/14/2007", }, "JVC": PNPID{ ID: "JVC", Company: "JVC", Date: "10/23/2000", }, "JKC": PNPID{ ID: "JKC", Company: "JVC KENWOOD CORPORATION", Date: "03/08/2012", }, "JWS": PNPID{ ID: "JWS", Company: "JWSPENCER & CO.", Date: "07/16/1999", }, "KTE": PNPID{ ID: "KTE", Company: "K-TECH", Date: "03/31/2003", }, "KZN": PNPID{ ID: "KZN", Company: "K-ZONE INTERNATIONAL", Date: "06/21/2001", }, "KZI": PNPID{ ID: "KZI", Company: "K-ZONE INTERNATIONAL CO. LTD.", Date: "08/10/2000", }, "SGE": PNPID{ ID: "SGE", Company: "KANSAI ELECTRIC COMPANY LTD", Date: "12/04/1996", }, "HIQ": PNPID{ ID: "HIQ", Company: "KAOHSIUNG OPTO ELECTRONICS AMERICAS, INC.", Date: "03/14/2012", }, "KSL": PNPID{ ID: "KSL", Company: "KARN SOLUTIONS LTD.", Date: "05/08/2006", }, "KAR": PNPID{ ID: "KAR", Company: "KARNA", Date: "02/21/2000", }, "KTN": PNPID{ ID: "KTN", Company: "KATRON TECH INC", Date: "11/29/1996", }, "KTG": PNPID{ ID: "KTG", Company: "KAYSER-THREDE GMBH", Date: "11/29/1996", }, "KDT": PNPID{ ID: "KDT", Company: "KDDI TECHNOLOGY CORPORATION", Date: "05/22/2012", }, "KDE": PNPID{ ID: "KDE", Company: "KDE", Date: "08/14/2001", }, "KDS": PNPID{ ID: "KDS", Company: "KDS USA", Date: "11/29/1996", }, "KGL": PNPID{ ID: "KGL", Company: "KEISOKU GIKEN CO.,LTD.", Date: "04/17/2012", }, "KML": PNPID{ ID: "KML", Company: "KENSINGTON MICROWARE LTD", Date: "11/29/1996", }, "KWD": PNPID{ ID: "KWD", Company: "KENWOOD CORPORATION", Date: "02/22/2008", }, "EPS": PNPID{ ID: "EPS", Company: "KEPS", Date: "11/29/1996", }, "KES": PNPID{ ID: "KES", Company: "KESA CORPORATION", Date: "11/29/1996", }, "KEY": PNPID{ ID: "KEY", Company: "KEY TECH INC", Date: "11/29/1996", }, "KTK": PNPID{ ID: "KTK", Company: "KEY TRONIC CORPORATION", Date: "11/29/1996", }, "KCL": PNPID{ ID: "KCL", Company: "KEYCORP LTD", Date: "05/20/1997", }, "KYN": PNPID{ ID: "KYN", Company: "KEYENCE CORPORATION", Date: "03/30/2016", }, "KVX": PNPID{ ID: "KVX", Company: "KEYVIEW", Date: "08/13/2012", }, "KBI": PNPID{ ID: "KBI", Company: "KIDBOARD INC", Date: "04/24/1997", }, "KME": PNPID{ ID: "KME", Company: "KIMIN ELECTRONICS CO., LTD.", Date: "02/15/2011", }, "KSC": PNPID{ ID: "KSC", Company: "KINETIC SYSTEMS CORPORATION", Date: "11/29/1996", }, "KPC": PNPID{ ID: "KPC", Company: "KING PHOENIX COMPANY", Date: "11/29/1996", }, "KSX": PNPID{ ID: "KSX", Company: "KING TESTER CORPORATION", Date: "07/16/1998", }, "KTC": PNPID{ ID: "KTC", Company: "KINGSTON TECH CORPORATION", Date: "11/29/1996", }, "KIO": PNPID{ ID: "KIO", Company: "KIONIX, INC.", Date: "12/23/2013", }, "KIS": PNPID{ ID: "KIS", Company: "KISS TECHNOLOGY A/S", Date: "06/16/2005", }, "KGI": PNPID{ ID: "KGI", Company: "KLIPSCH GROUP, INC", Date: "09/22/2015", }, "PVP": PNPID{ ID: "PVP", Company: "KLOS TECHNOLOGIES, INC.", Date: "08/10/2000", }, "KBL": PNPID{ ID: "KBL", Company: "KOBIL SYSTEMS GMBH", Date: "03/15/2001", }, "KOB": PNPID{ ID: "KOB", Company: "KOBIL SYSTEMS GMBH", Date: "03/15/2001", }, "KDK": PNPID{ ID: "KDK", Company: "KODIAK TECH", Date: "11/29/1996", }, "KFX": PNPID{ ID: "KFX", Company: "KOFAX IMAGE PRODUCTS", Date: "11/29/1996", }, "KOL": PNPID{ ID: "KOL", Company: "KOLLMORGEN MOTION TECHNOLOGIES GROUP", Date: "11/29/1996", }, "KOE": PNPID{ ID: "KOE", Company: "KOLTER ELECTRONIC", Date: "03/15/2001", }, "KFE": PNPID{ ID: "KFE", Company: "KOMATSU FOREST", Date: "04/20/2010", }, "KNC": PNPID{ ID: "KNC", Company: "KONICA CORPORATION", Date: "08/05/1997", }, "KTI": PNPID{ ID: "KTI", Company: "KONICA TECHNICAL INC", Date: "11/29/1996", }, "TWE": PNPID{ ID: "TWE", Company: "KONTRON ELECTRONIK", Date: "11/29/1996", }, "KEM": PNPID{ ID: "KEM", Company: "KONTRON EMBEDDED MODULES GMBH", Date: "08/29/2007", }, "KEU": PNPID{ ID: "KEU", Company: "KONTRON EUROPE GMBH", Date: "02/20/2014", }, "KOM": PNPID{ ID: "KOM", Company: "KONTRON GMBH", Date: "09/05/2018", }, "KDM": PNPID{ ID: "KDM", Company: "KOREA DATA SYSTEMS CO., LTD.", Date: "12/18/2003", }, "KOU": PNPID{ ID: "KOU", Company: "KOUZIRO CO.,LTD.", Date: "07/27/2012", }, "KOW": PNPID{ ID: "KOW", Company: "KOWA COMPANY,LTD.", Date: "03/12/2008", }, "KMR": PNPID{ ID: "KMR", Company: "KRAMER ELECTRONICS LTD. INTERNATIONAL", Date: "07/10/2013", }, "KRL": PNPID{ ID: "KRL", Company: "KRELL INDUSTRIES INC.", Date: "08/03/2004", }, "KRM": PNPID{ ID: "KRM", Company: "KROMA TELECOM", Date: "05/05/2010", }, "KRY": PNPID{ ID: "KRY", Company: "KROY LLC", Date: "07/16/1998", }, "KSG": PNPID{ ID: "KSG", Company: "KUPA CHINA SHENZHEN MICRO TECHNOLOGY CO., LTD. GOLD INSTITUTE", Date: "04/22/2014", }, "KUR": PNPID{ ID: "KUR", Company: "KURTA CORPORATION", Date: "11/29/1996", }, "KVA": PNPID{ ID: "KVA", Company: "KVASER AB", Date: "01/24/1997", }, "KYE": PNPID{ ID: "KYE", Company: "KYE SYST CORPORATION", Date: "11/29/1996", }, "KYC": PNPID{ ID: "KYC", Company: "KYOCERA CORPORATION", Date: "11/29/1996", }, "KEC": PNPID{ ID: "KEC", Company: "KYUSHU ELECTRONICS SYSTEMS INC", Date: "01/12/1998", }, "LLL": PNPID{ ID: "LLL", Company: "L-3 COMMUNICATIONS", Date: "05/11/2010", }, "LCE": PNPID{ ID: "LCE", Company: "LA COMMANDE ELECTRONIQUE", Date: "11/29/1996", }, "LCT": PNPID{ ID: "LCT", Company: "LABCAL TECHNOLOGIES", Date: "11/08/1999", }, "LTC": PNPID{ ID: "LTC", Company: "LABTEC INC", Date: "12/08/1997", }, "LWC": PNPID{ ID: "LWC", Company: "LABWAY CORPORATION", Date: "12/04/1996", }, "LAC": PNPID{ ID: "LAC", Company: "LACIE", Date: "12/28/1998", }, "LAG": PNPID{ ID: "LAG", Company: "LAGUNA SYSTEMS", Date: "11/29/1996", }, "LND": PNPID{ ID: "LND", Company: "LAND COMPUTER COMPANY LTD", Date: "11/29/1996", }, "LNT": PNPID{ ID: "LNT", Company: "LANETCO INTERNATIONAL", Date: "05/02/2003", }, "LWW": PNPID{ ID: "LWW", Company: "LANIER WORLDWIDE", Date: "11/29/1996", }, "LHA": PNPID{ ID: "LHA", Company: "LARS HAAGH APS", Date: "01/09/1997", }, "LAS": PNPID{ ID: "LAS", Company: "LASAT COMM. A/S", Date: "11/29/1996", }, "LMT": PNPID{ ID: "LMT", Company: "LASER MASTER", Date: "11/29/1996", }, "LDN": PNPID{ ID: "LDN", Company: "LASERDYNE TECHNOLOGIES", Date: "10/16/2013", }, "LGX": PNPID{ ID: "LGX", Company: "LASERGRAPHICS, INC.", Date: "02/21/2000", }, "LCM": PNPID{ ID: "LCM", Company: "LATITUDE COMM.", Date: "11/29/1996", }, "LAV": PNPID{ ID: "LAV", Company: "LAVA COMPUTER MFG INC", Date: "04/14/1997", }, "LCC": PNPID{ ID: "LCC", Company: "LCI", Date: "08/10/2000", }, "LEC": PNPID{ ID: "LEC", Company: "LECTRON COMPANY LTD", Date: "03/27/1997", }, "LMP": PNPID{ ID: "LMP", Company: "LEDA MEDIA PRODUCTS", Date: "05/11/1998", }, "LEG": PNPID{ ID: "LEG", Company: "LEGERITY, INC", Date: "01/18/2005", }, "LTV": PNPID{ ID: "LTV", Company: "LEITCH TECHNOLOGY INTERNATIONAL INC.", Date: "12/09/2003", }, "LNV": PNPID{ ID: "LNV", Company: "LENOVO", Date: "07/14/2005", }, "VLM": PNPID{ ID: "VLM", Company: "LENOVO BEIJING CO. LTD.", Date: "05/21/2019", }, "LIN": PNPID{ ID: "LIN", Company: "LENOVO BEIJING CO. LTD.", Date: "05/22/2012", }, "LEN": PNPID{ ID: "LEN", Company: "LENOVO GROUP LIMITED", Date: "06/03/2005", }, "PRS": PNPID{ ID: "PRS", Company: "LEUTRON VISION", Date: "11/29/1996", }, "LEX": PNPID{ ID: "LEX", Company: "LEXICAL LTD", Date: "11/29/1996", }, "LCN": PNPID{ ID: "LCN", Company: "LEXICON", Date: "03/01/2005", }, "LMI": PNPID{ ID: "LMI", Company: "LEXMARK INT'L INC", Date: "11/29/1996", }, "LGS": PNPID{ ID: "LGS", Company: "LG SEMICOM COMPANY LTD", Date: "11/29/1996", }, "MAN": PNPID{ ID: "MAN", Company: "LGIC", Date: "02/21/2000", }, "LSC": PNPID{ ID: "LSC", Company: "LIFESIZE COMMUNICATIONS", Date: "02/14/2006", }, "LHT": PNPID{ ID: "LHT", Company: "LIGHTHOUSE TECHNOLOGIES LIMITED", Date: "05/04/2010", }, "LSP": PNPID{ ID: "LSP", Company: "LIGHTSPACE TECHNOLOGIES", Date: "03/29/2018", }, "LWR": PNPID{ ID: "LWR", Company: "LIGHTWARE VISUAL ENGINEERING", Date: "02/04/2009", }, "LTW": PNPID{ ID: "LTW", Company: "LIGHTWARE, INC", Date: "10/16/1998", }, "LZX": PNPID{ ID: "LZX", Company: "LIGHTWELL COMPANY LTD", Date: "12/02/1997", }, "LKM": PNPID{ ID: "LKM", Company: "LIKOM TECHNOLOGY SDN. BHD.", Date: "04/23/1998", }, "LNR": PNPID{ ID: "LNR", Company: "LINEAR SYSTEMS LTD.", Date: "10/11/2007", }, "LNK": PNPID{ ID: "LNK", Company: "LINK TECH INC", Date: "11/29/1996", }, "LIP": PNPID{ ID: "LIP", Company: "LINKED IP GMBH", Date: "07/19/2010", }, "FGD": PNPID{ ID: "FGD", Company: "LISA DRAEXLMAIER GMBH", Date: "02/22/1999", }, "LCI": PNPID{ ID: "LCI", Company: "LITE-ON COMMUNICATION INC", Date: "11/29/1996", }, "LOL": PNPID{ ID: "LOL", Company: "LITELOGIC OPERATIONS LTD", Date: "12/09/2011", }, "LIT": PNPID{ ID: "LIT", Company: "LITHICS SILICON TECHNOLOGY", Date: "03/15/2001", }, "LTN": PNPID{ ID: "LTN", Company: "LITRONIC INC", Date: "02/03/1998", }, "SKI": PNPID{ ID: "SKI", Company: "LLC SKTB “SKIT”", Date: "10/22/2019", }, "LOC": PNPID{ ID: "LOC", Company: "LOCAMATION B.V.", Date: "01/09/2004", }, "LOE": PNPID{ ID: "LOE", Company: "LOEWE OPTA GMBH", Date: "05/02/2005", }, "LGC": PNPID{ ID: "LGC", Company: "LOGIC LTD", Date: "04/02/1994", }, "LSL": PNPID{ ID: "LSL", Company: "LOGICAL SOLUTIONS", Date: "11/29/1996", }, "LOG": PNPID{ ID: "LOG", Company: "LOGICODE TECHNOLOGY INC", Date: "11/29/1996", }, "LDT": PNPID{ ID: "LDT", Company: "LOGIDATATECH ELECTRONIC GMBH", Date: "03/15/2001", }, "LGI": PNPID{ ID: "LGI", Company: "LOGITECH INC", Date: "11/29/1996", }, "SGO": PNPID{ ID: "SGO", Company: "LOGOS DESIGN A/S", Date: "04/24/2001", }, "LED": PNPID{ ID: "LED", Company: "LONG ENGINEERING DESIGN INC", Date: "11/29/1996", }, "LCS": PNPID{ ID: "LCS", Company: "LONGSHINE ELECTRONICS COMPANY", Date: "11/29/1996", }, "LSI": PNPID{ ID: "LSI", Company: "LOUGHBOROUGH SOUND IMAGES", Date: "11/29/1996", }, "LSJ": PNPID{ ID: "LSJ", Company: "LSI JAPAN COMPANY LTD", Date: "11/29/1996", }, "LSY": PNPID{ ID: "LSY", Company: "LSI SYSTEMS INC", Date: "11/29/1996", }, "LTS": PNPID{ ID: "LTS", Company: "LTS SCALE LLC", Date: "11/15/2007", }, "LBO": PNPID{ ID: "LBO", Company: "LUBOSOFT", Date: "04/24/2001", }, "LUC": PNPID{ ID: "LUC", Company: "LUCENT TECHNOLOGIES", Date: "04/19/2000", }, "LMG": PNPID{ ID: "LMG", Company: "LUCENT TECHNOLOGIES", Date: "01/13/1997", }, "LTK": PNPID{ ID: "LTK", Company: "LUCIDITY TECHNOLOGY COMPANY LTD", Date: "05/18/1998", }, "LUM": PNPID{ ID: "LUM", Company: "LUMAGEN, INC.", Date: "08/12/2004", }, "LHE": PNPID{ ID: "LHE", Company: "LUNG HWA ELECTRONICS COMPANY LTD", Date: "06/12/1998", }, "LXN": PNPID{ ID: "LXN", Company: "LUXEON", Date: "03/15/2001", }, "LUX": PNPID{ ID: "LUX", Company: "LUXXELL RESEARCH INC", Date: "06/09/1997", }, "LVI": PNPID{ ID: "LVI", Company: "LVI LOW VISION INTERNATIONAL AB", Date: "01/21/2011", }, "LXC": PNPID{ ID: "LXC", Company: "LXCO TECHNOLOGIES AG", Date: "01/11/2012", }, "MGL": PNPID{ ID: "MGL", Company: "M-G TECHNOLOGY LTD", Date: "10/29/1997", }, "OHW": PNPID{ ID: "OHW", Company: "M-LABS LIMITED", Date: "11/27/2013", }, "MPC": PNPID{ ID: "MPC", Company: "M-PACT INC", Date: "11/29/1996", }, "MSF": PNPID{ ID: "MSF", Company: "M-SYSTEMS FLASH DISK PIONEERS", Date: "12/17/1997", }, "MAC": PNPID{ ID: "MAC", Company: "MAC SYSTEM COMPANY LTD", Date: "09/26/1997", }, "MEJ": PNPID{ ID: "MEJ", Company: "MAC-EIGHT CO., LTD.", Date: "01/19/2011", }, "OCD": PNPID{ ID: "OCD", Company: "MACRAIGOR SYSTEMS INC", Date: "03/23/1998", }, "VHI": PNPID{ ID: "VHI", Company: "MACROCAD DEVELOPMENT INC.", Date: "04/19/2000", }, "MXI": PNPID{ ID: "MXI", Company: "MACRONIX INC", Date: "11/29/1996", }, "MDG": PNPID{ ID: "MDG", Company: "MADGE NETWORKS", Date: "11/29/1996", }, "MAE": PNPID{ ID: "MAE", Company: "MAESTRO PTY LTD", Date: "12/04/1996", }, "MAG": PNPID{ ID: "MAG", Company: "MAG INNOVISION", Date: "11/29/1996", }, "MLP": PNPID{ ID: "MLP", Company: "MAGIC LEAP", Date: "11/14/2014", }, "MCP": PNPID{ ID: "MCP", Company: "MAGNI SYSTEMS INC", Date: "11/29/1996", }, "EKA": PNPID{ ID: "EKA", Company: "MAGTEK INC.", Date: "02/14/2006", }, "MDT": PNPID{ ID: "MDT", Company: "MAGUS DATA TECH", Date: "11/29/1996", }, "MPN": PNPID{ ID: "MPN", Company: "MAINPINE LIMITED", Date: "06/30/2007", }, "MUK": PNPID{ ID: "MUK", Company: "MAINPINE LIMITED", Date: "09/13/1999", }, "PAK": PNPID{ ID: "PAK", Company: "MANY CNC SYSTEM CO., LTD.", Date: "03/12/2004", }, "MPL": PNPID{ ID: "MPL", Company: "MAPLE RESEARCH INST. COMPANY LTD", Date: "11/29/1996", }, "MJI": PNPID{ ID: "MJI", Company: "MARANTZ JAPAN, INC.", Date: "10/23/2000", }, "MIL": PNPID{ ID: "MIL", Company: "MARCONI INSTRUMENTS LTD", Date: "11/29/1996", }, "MRC": PNPID{ ID: "MRC", Company: "MARCONI SIMULATION & TY-COCH WAY TRAINING", Date: "11/29/1996", }, "MCR": PNPID{ ID: "MCR", Company: "MARINA COMMUNICAITONS", Date: "11/29/1996", }, "MLN": PNPID{ ID: "MLN", Company: "MARK LEVINSON", Date: "02/28/2005", }, "MTU": PNPID{ ID: "MTU", Company: "MARK OF THE UNICORN INC", Date: "03/21/1997", }, "MTC": PNPID{ ID: "MTC", Company: "MARS-TECH CORPORATION", Date: "11/29/1996", }, "MNI": PNPID{ ID: "MNI", Company: "MARSEILLE, INC.", Date: "02/27/2013", }, "MBM": PNPID{ ID: "MBM", Company: "MARSHALL ELECTRONICS", Date: "03/13/2006", }, "MRK": PNPID{ ID: "MRK", Company: "MARUKO & COMPANY LTD", Date: "11/29/1996", }, "MSR": PNPID{ ID: "MSR", Company: "MASPRO DENKOH CORP.", Date: "10/25/2012", }, "MAS": PNPID{ ID: "MAS", Company: "MASS INC.", Date: "02/25/2002", }, "MCQ": PNPID{ ID: "MCQ", Company: "MAT'S COMPUTERS", Date: "07/22/2004", }, "MEQ": PNPID{ ID: "MEQ", Company: "MATELECT LTD.", Date: "05/30/2002", }, "MOC": PNPID{ ID: "MOC", Company: "MATRIX ORBITAL CORPORATION", Date: "11/13/2017", }, "MTX": PNPID{ ID: "MTX", Company: "MATROX", Date: "11/29/1996", }, "WPA": PNPID{ ID: "WPA", Company: "MATSUSHITA COMMUNICATION INDUSTRIAL CO., LTD.", Date: "03/15/2001", }, "MAT": PNPID{ ID: "MAT", Company: "MATSUSHITA ELECTRIC IND. COMPANY LTD", Date: "11/29/1996", }, "MTI": PNPID{ ID: "MTI", Company: "MAXCOM TECHNICAL INC", Date: "11/29/1996", }, "VOB": PNPID{ ID: "VOB", Company: "MAXDATA COMPUTER AG", Date: "02/21/2000", }, "MXD": PNPID{ ID: "MXD", Company: "MAXDATA COMPUTER GMBH & CO.KG", Date: "04/19/2000", }, "MXP": PNPID{ ID: "MXP", Company: "MAXPEED CORPORATION", Date: "02/19/1997", }, "MXT": PNPID{ ID: "MXT", Company: "MAXTECH CORPORATION", Date: "11/29/1996", }, "MXV": PNPID{ ID: "MXV", Company: "MAXVISION CORPORATION", Date: "07/16/1999", }, "DJP": PNPID{ ID: "DJP", Company: "MAYGAY MACHINES, LTD", Date: "08/10/2000", }, "MAY": PNPID{ ID: "MAY", Company: "MAYNARD ELECTRONICS", Date: "11/29/1996", }, "MAZ": PNPID{ ID: "MAZ", Company: "MAZET GMBH", Date: "08/11/1998", }, "MBC": PNPID{ ID: "MBC", Company: "MBC", Date: "11/29/1996", }, "MCD": PNPID{ ID: "MCD", Company: "MCDATA CORPORATION", Date: "11/29/1996", }, "MLI": PNPID{ ID: "MLI", Company: "MCINTOSH LABORATORY INC.", Date: "01/18/2008", }, "MIT": PNPID{ ID: "MIT", Company: "MCM INDUSTRIAL TECHNOLOGY GMBH", Date: "10/29/2004", }, "CEM": PNPID{ ID: "CEM", Company: "MEC ELECTRONICS GMBH", Date: "04/19/2000", }, "MDR": PNPID{ ID: "MDR", Company: "MEDAR INC", Date: "12/11/1996", }, "MTB": PNPID{ ID: "MTB", Company: "MEDIA TECHNOLOGIES LTD.", Date: "01/05/2009", }, "MKC": PNPID{ ID: "MKC", Company: "MEDIA TEK INC.", Date: "06/14/2007", }, "MVI": PNPID{ ID: "MVI", Company: "MEDIA VISION INC", Date: "11/29/1996", }, "MDA": PNPID{ ID: "MDA", Company: "MEDIA4 INC", Date: "03/20/1997", }, "OWL": PNPID{ ID: "OWL", Company: "MEDIACOM TECHNOLOGIES PTE LTD", Date: "11/29/1996", }, "MEK": PNPID{ ID: "MEK", Company: "MEDIAEDGE CORPORATION", Date: "11/19/2013", }, "MFR": PNPID{ ID: "MFR", Company: "MEDIAFIRE CORP.", Date: "12/28/1998", }, "FTR": PNPID{ ID: "FTR", Company: "MEDIASONIC", Date: "11/29/1996", }, "MTE": PNPID{ ID: "MTE", Company: "MEDIATEC GMBH", Date: "12/13/1996", }, "MDK": PNPID{ ID: "MDK", Company: "MEDIATEK CORPORATION", Date: "03/13/1997", }, "MPI": PNPID{ ID: "MPI", Company: "MEDIATRIX PERIPHERALS INC", Date: "04/24/1997", }, "MVR": PNPID{ ID: "MVR", Company: "MEDICAPTURE, INC.", Date: "05/25/2017", }, "MCJ": PNPID{ ID: "MCJ", Company: "MEDICAROID CORPORATION", Date: "08/20/2018", }, "MRO": PNPID{ ID: "MRO", Company: "MEDIKRO OY", Date: "09/19/1997", }, "MEC": PNPID{ ID: "MEC", Company: "MEGA SYSTEM TECHNOLOGIES INC", Date: "12/29/1997", }, "MGA": PNPID{ ID: "MGA", Company: "MEGA SYSTEM TECHNOLOGIES, INC.", Date: "12/28/1998", }, "MPV": PNPID{ ID: "MPV", Company: "MEGAPIXEL VISUAL REALTY", Date: "07/15/2020", }, "MSK": PNPID{ ID: "MSK", Company: "MEGASOFT INC", Date: "11/29/1996", }, "MGT": PNPID{ ID: "MGT", Company: "MEGATECH R & D COMPANY", Date: "11/29/1996", }, "MEP": PNPID{ ID: "MEP", Company: "MELD TECHNOLOGY", Date: "08/16/2012", }, "MEN": PNPID{ ID: "MEN", Company: "MEN MIKROELECTRONIK NUERUBERG GMBH", Date: "05/23/1997", }, "MGC": PNPID{ ID: "MGC", Company: "MENTOR GRAPHICS CORPORATION", Date: "07/30/2009", }, "RLD": PNPID{ ID: "RLD", Company: "MEPCO", Date: "03/15/2001", }, "PPD": PNPID{ ID: "PPD", Company: "MEPHI", Date: "11/27/1998", }, "MRT": PNPID{ ID: "MRT", Company: "MERGING TECHNOLOGIES", Date: "11/29/1996", }, "MAL": PNPID{ ID: "MAL", Company: "MERIDIAN AUDIO LTD", Date: "02/04/2009", }, "MED": PNPID{ ID: "MED", Company: "MESSELTRONIK DRESDEN GMBH", Date: "11/29/1996", }, "MDV": PNPID{ ID: "MDV", Company: "MET DEVELOPMENT INC", Date: "11/29/1996", }, "MVN": PNPID{ ID: "MVN", Company: "META COMPANY", Date: "05/25/2016", }, "CFR": PNPID{ ID: "CFR", Company: "META VIEW, INC.", Date: "07/15/2020", }, "MTA": PNPID{ ID: "MTA", Company: "META WATCH LTD", Date: "08/29/2013", }, "MET": PNPID{ ID: "MET", Company: "METHEUS CORPORATION", Date: "11/29/1996", }, "MCM": PNPID{ ID: "MCM", Company: "METRICOM INC", Date: "11/29/1996", }, "QCH": PNPID{ ID: "QCH", Company: "METRONICS INC", Date: "11/29/1996", }, "NET": PNPID{ ID: "NET", Company: "METTLER TOLEDO", Date: "11/29/1996", }, "MCE": PNPID{ ID: "MCE", Company: "METZ-WERKE GMBH & CO KG", Date: "06/30/2005", }, "MIC": PNPID{ ID: "MIC", Company: "MICOM COMMUNICATIONS INC", Date: "05/05/1997", }, "MSX": PNPID{ ID: "MSX", Company: "MICOMSOFT CO., LTD.", Date: "04/10/2008", }, "MCS": PNPID{ ID: "MCS", Company: "MICRO COMPUTER SYSTEMS", Date: "11/29/1996", }, "MDI": PNPID{ ID: "MDI", Company: "MICRO DESIGN INC", Date: "01/20/1998", }, "MDS": PNPID{ ID: "MDS", Company: "MICRO DISPLAY SYSTEMS INC", Date: "11/29/1996", }, "MFI": PNPID{ ID: "MFI", Company: "MICRO FIRMWARE", Date: "12/30/1997", }, "MCC": PNPID{ ID: "MCC", Company: "MICRO INDUSTRIES", Date: "04/21/2003", }, "BPD": PNPID{ ID: "BPD", Company: "MICRO SOLUTIONS, INC.", Date: "04/19/2000", }, "MSA": PNPID{ ID: "MSA", Company: "MICRO SYSTEMATION AB", Date: "11/08/1999", }, "JMT": PNPID{ ID: "JMT", Company: "MICRO TECHNICAL COMPANY LTD", Date: "11/29/1996", }, "MTH": PNPID{ ID: "MTH", Company: "MICRO-TECH HEARING INSTRUMENTS", Date: "12/15/1997", }, "MBD": PNPID{ ID: "MBD", Company: "MICROBUS PLC", Date: "08/13/2002", }, "MNP": PNPID{ ID: "MNP", Company: "MICROCOM", Date: "11/29/1996", }, "MDX": PNPID{ ID: "MDX", Company: "MICRODATEC GMBH", Date: "09/13/1999", }, "MRD": PNPID{ ID: "MRD", Company: "MICRODISPLAY CORPORATION", Date: "06/14/2007", }, "MDY": PNPID{ ID: "MDY", Company: "MICRODYNE INC", Date: "12/18/1996", }, "MFG": PNPID{ ID: "MFG", Company: "MICROFIELD GRAPHICS INC", Date: "11/29/1996", }, "MIV": PNPID{ ID: "MIV", Company: "MICROIMAGE VIDEO SYSTEMS", Date: "12/08/2015", }, "MPJ": PNPID{ ID: "MPJ", Company: "MICROLAB", Date: "05/23/1997", }, "LAF": PNPID{ ID: "LAF", Company: "MICROLINE", Date: "09/13/1999", }, "MLG": PNPID{ ID: "MLG", Company: "MICROLOGICA AG", Date: "10/06/1998", }, "MMD": PNPID{ ID: "MMD", Company: "MICROMED BIOTECNOLOGIA LTD", Date: "12/11/1996", }, "MMA": PNPID{ ID: "MMA", Company: "MICROMEDIA AG", Date: "04/24/1997", }, "MCN": PNPID{ ID: "MCN", Company: "MICRON ELECTRONICS INC", Date: "02/20/1997", }, "MCI": PNPID{ ID: "MCI", Company: "MICRONICS COMPUTERS", Date: "11/29/1996", }, "MIP": PNPID{ ID: "MIP", Company: "MICRONPC.COM", Date: "08/10/2000", }, "MYX": PNPID{ ID: "MYX", Company: "MICRONYX INC", Date: "11/29/1996", }, "MPX": PNPID{ ID: "MPX", Company: "MICROPIX TECHNOLOGIES, LTD.", Date: "10/08/2001", }, "MSL": PNPID{ ID: "MSL", Company: "MICROSLATE INC.", Date: "05/16/1999", }, "PNP": PNPID{ ID: "PNP", Company: "MICROSOFT", Date: "03/05/2004", }, "MSH": PNPID{ ID: "MSH", Company: "MICROSOFT", Date: "11/29/1996", }, "PNG": PNPID{ ID: "PNG", Company: "MICROSOFT", Date: "11/29/1996", }, "WBN": PNPID{ ID: "WBN", Company: "MICROSOFTWARE", Date: "01/14/1998", }, "MSI": PNPID{ ID: "MSI", Company: "MICROSTEP", Date: "11/29/1996", }, "MCT": PNPID{ ID: "MCT", Company: "MICROTEC", Date: "11/29/1996", }, "MTJ": PNPID{ ID: "MTJ", Company: "MICROTECHNICA CO.,LTD.", Date: "01/04/2016", }, "MKT": PNPID{ ID: "MKT", Company: "MICROTEK INC.", Date: "07/14/2005", }, "MTK": PNPID{ ID: "MTK", Company: "MICROTEK INTERNATIONAL INC.", Date: "02/25/2002", }, "MSY": PNPID{ ID: "MSY", Company: "MICROTOUCH SYSTEMS INC", Date: "08/10/2000", }, "MVS": PNPID{ ID: "MVS", Company: "MICROVISION", Date: "02/13/2009", }, "MVD": PNPID{ ID: "MVD", Company: "MICROVITEC PLC", Date: "11/29/1996", }, "MWY": PNPID{ ID: "MWY", Company: "MICROWAY INC", Date: "11/29/1996", }, "MDC": PNPID{ ID: "MDC", Company: "MIDORI ELECTRONICS", Date: "11/29/1996", }, "SFT": PNPID{ ID: "SFT", Company: "MIKROFORUM RING 3", Date: "11/02/2004", }, "MLC": PNPID{ ID: "MLC", Company: "MILCOTS", Date: "07/15/2020", }, "MDF": PNPID{ ID: "MDF", Company: "MILDEF AB", Date: "06/23/2016", }, "MLS": PNPID{ ID: "MLS", Company: "MILESTONE EPE", Date: "08/11/1998", }, "MLM": PNPID{ ID: "MLM", Company: "MILLENNIUM ENGINEERING INC", Date: "11/29/1996", }, "MLL": PNPID{ ID: "MLL", Company: "MILLOGIC LTD.", Date: "01/09/2014", }, "MCX": PNPID{ ID: "MCX", Company: "MILLSON CUSTOM SOLUTIONS INC.", Date: "10/17/2013", }, "VTM": PNPID{ ID: "VTM", Company: "MILTOPE CORPORATION", Date: "09/23/2009", }, "MIM": PNPID{ ID: "MIM", Company: "MIMIO – A NEWELL RUBBERMAID COMPANY", Date: "07/31/2012", }, "MMT": PNPID{ ID: "MMT", Company: "MIMO MONITORS", Date: "10/22/2019", }, "MTD": PNPID{ ID: "MTD", Company: "MINDTECH DISPLAY CO. LTD", Date: "06/14/2007", }, "FTW": PNPID{ ID: "FTW", Company: "MINDTRIBE PRODUCT ENGINEERING, INC.", Date: "02/14/2011", }, "MNC": PNPID{ ID: "MNC", Company: "MINI MICRO METHODS LTD", Date: "11/29/1996", }, "MIN": PNPID{ ID: "MIN", Company: "MINICOM DIGITAL SIGNAGE", Date: "08/13/2010", }, "MMN": PNPID{ ID: "MMN", Company: "MINIMAN INC", Date: "11/29/1996", }, "MMF": PNPID{ ID: "MMF", Company: "MINNESOTA MINING AND MANUFACTURING", Date: "03/15/2001", }, "MRA": PNPID{ ID: "MRA", Company: "MIRANDA TECHNOLOGIES INC", Date: "11/29/1996", }, "MRL": PNPID{ ID: "MRL", Company: "MIRATEL", Date: "10/16/1998", }, "MIR": PNPID{ ID: "MIR", Company: "MIRO COMPUTER PROD.", Date: "11/29/1996", }, "MID": PNPID{ ID: "MID", Company: "MIRO DISPLAYS", Date: "03/20/1999", }, "MSP": PNPID{ ID: "MSP", Company: "MISTRAL SOLUTIONS [P] LTD.", Date: "09/23/1998", }, "MII": PNPID{ ID: "MII", Company: "MITEC INC", Date: "11/29/1996", }, "MTL": PNPID{ ID: "MTL", Company: "MITEL CORPORATION", Date: "08/01/1997", }, "MTR": PNPID{ ID: "MTR", Company: "MITRON COMPUTER INC", Date: "11/29/1996", }, "MEL": PNPID{ ID: "MEL", Company: "MITSUBISHI ELECTRIC CORPORATION", Date: "11/29/1996", }, "MEE": PNPID{ ID: "MEE", Company: "MITSUBISHI ELECTRIC ENGINEERING CO., LTD.", Date: "10/03/2005", }, "KMC": PNPID{ ID: "KMC", Company: "MITSUMI COMPANY LTD", Date: "11/29/1996", }, "MJS": PNPID{ ID: "MJS", Company: "MJS DESIGNS", Date: "11/29/1996", }, "MKS": PNPID{ ID: "MKS", Company: "MK SEIKO CO., LTD.", Date: "06/18/2013", }, "MMS": PNPID{ ID: "MMS", Company: "MMS ELECTRONICS", Date: "02/24/1998", }, "FST": PNPID{ ID: "FST", Company: "MODESTO PC INC", Date: "02/27/1997", }, "MDD": PNPID{ ID: "MDD", Company: "MODIS", Date: "11/08/1999", }, "MIS": PNPID{ ID: "MIS", Company: "MODULAR INDUSTRIAL SOLUTIONS INC", Date: "11/29/1996", }, "MOD": PNPID{ ID: "MOD", Company: "MODULAR TECHNOLOGY", Date: "06/09/1997", }, "MOM": PNPID{ ID: "MOM", Company: "MOMENTUM DATA SYSTEMS", Date: "01/18/2008", }, "MNL": PNPID{ ID: "MNL", Company: "MONORAIL INC", Date: "02/18/1997", }, "MYA": PNPID{ ID: "MYA", Company: "MONYDATA", Date: "11/29/1996", }, "MBV": PNPID{ ID: "MBV", Company: "MORETON BAY", Date: "01/13/2000", }, "MOS": PNPID{ ID: "MOS", Company: "MOSES CORPORATION", Date: "11/29/1996", }, "MSV": PNPID{ ID: "MSV", Company: "MOSGI CORPORATION", Date: "11/29/1996", }, "MCO": PNPID{ ID: "MCO", Company: "MOTION COMPUTING INC.", Date: "05/30/2002", }, "MTM": PNPID{ ID: "MTM", Company: "MOTIUM", Date: "06/19/2012", }, "MSU": PNPID{ ID: "MSU", Company: "MOTOROLA", Date: "03/15/2001", }, "MCL": PNPID{ ID: "MCL", Company: "MOTOROLA COMMUNICATIONS ISRAEL", Date: "07/02/2002", }, "MCG": PNPID{ ID: "MCG", Company: "MOTOROLA COMPUTER GROUP", Date: "08/14/1997", }, "MOT": PNPID{ ID: "MOT", Company: "MOTOROLA UDS", Date: "11/29/1996", }, "MSC": PNPID{ ID: "MSC", Company: "MOUSE SYSTEMS CORPORATION", Date: "11/29/1996", }, "MHQ": PNPID{ ID: "MHQ", Company: "MOXA INC.", Date: "10/22/2019", }, "MEU": PNPID{ ID: "MEU", Company: "MPL AG, ELEKTRONIK-UNTERNEHMEN", Date: "01/15/2016", }, "MPS": PNPID{ ID: "MPS", Company: "MPS SOFTWARE GMBH", Date: "11/29/1996", }, "MST": PNPID{ ID: "MST", Company: "MS TELEMATICA", Date: "04/28/1997", }, "MEX": PNPID{ ID: "MEX", Company: "MSC VERTRIEBS GMBH", Date: "06/04/2012", }, "MSG": PNPID{ ID: "MSG", Company: "MSI GMBH", Date: "09/13/1999", }, "MTN": PNPID{ ID: "MTN", Company: "MTRON STORAGE TECHNOLOGY CO., LTD.", Date: "06/17/2008", }, "MUD": PNPID{ ID: "MUD", Company: "MULTI-DIMENSION INSTITUTE", Date: "10/23/2000", }, "MTS": PNPID{ ID: "MTS", Company: "MULTI-TECH SYSTEMS", Date: "11/29/1996", }, "MMI": PNPID{ ID: "MMI", Company: "MULTIMAX", Date: "11/29/1996", }, "MQP": PNPID{ ID: "MQP", Company: "MULTIQ PRODUCTS AB", Date: "03/20/1999", }, "MWI": PNPID{ ID: "MWI", Company: "MULTIWAVE INNOVATION PTE LTD", Date: "11/29/1996", }, "MAI": PNPID{ ID: "MAI", Company: "MUTOH AMERICA INC", Date: "09/13/1999", }, "MWR": PNPID{ ID: "MWR", Company: "MWARE", Date: "04/24/2001", }, "MLX": PNPID{ ID: "MLX", Company: "MYLEX CORPORATION", Date: "11/29/1996", }, "MYR": PNPID{ ID: "MYR", Company: "MYRIAD SOLUTIONS LTD", Date: "11/29/1996", }, "WYS": PNPID{ ID: "WYS", Company: "MYSE TECHNOLOGY", Date: "11/29/1996", }, "NBL": PNPID{ ID: "NBL", Company: "N*ABLE TECHNOLOGIES INC", Date: "04/28/1998", }, "NTR": PNPID{ ID: "NTR", Company: "N-TRIG INNOVATIVE TECHNOLOGIES, INC.", Date: "10/03/2005", }, "JEN": PNPID{ ID: "JEN", Company: "N-VISION", Date: "10/23/2000", }, "NAD": PNPID{ ID: "NAD", Company: "NAD ELECTRONICS", Date: "06/14/2007", }, "NDK": PNPID{ ID: "NDK", Company: "NAITOH DENSEI CO., LTD.", Date: "04/12/2006", }, "NCP": PNPID{ ID: "NCP", Company: "NAJING CEC PANDA FPD TECHNOLOGY CO. LTD", Date: "02/24/2015", }, "NAK": PNPID{ ID: "NAK", Company: "NAKANO ENGINEERING CO.,LTD.", Date: "07/22/2009", }, "NYC": PNPID{ ID: "NYC", Company: "NAKAYO RELECOMMUNICATIONS, INC.", Date: "08/10/2000", }, "SCS": PNPID{ ID: "SCS", Company: "NANOMACH ANSTALT", Date: "11/29/1996", }, "ADR": PNPID{ ID: "ADR", Company: "NASA AMES RESEARCH CENTER", Date: "11/29/1996", }, "NDC": PNPID{ ID: "NDC", Company: "NATIONAL DATACOMM CORPORAITON", Date: "11/29/1996", }, "NDI": PNPID{ ID: "NDI", Company: "NATIONAL DISPLAY SYSTEMS", Date: "08/08/2003", }, "NIC": PNPID{ ID: "NIC", Company: "NATIONAL INSTRUMENTS CORPORATION", Date: "11/29/1996", }, "NBS": PNPID{ ID: "NBS", Company: "NATIONAL KEY LAB. ON ISN", Date: "07/16/1998", }, "NSC": PNPID{ ID: "NSC", Company: "NATIONAL SEMICONDUCTOR CORPORATION", Date: "11/29/1996", }, "TTB": PNPID{ ID: "TTB", Company: "NATIONAL SEMICONDUCTOR JAPAN LTD", Date: "04/14/1997", }, "NTL": PNPID{ ID: "NTL", Company: "NATIONAL TRANSCOMM. LTD", Date: "11/29/1996", }, "ZIC": PNPID{ ID: "ZIC", Company: "NATIONZ TECHNOLOGIES INC.", Date: "03/12/2009", }, "NMS": PNPID{ ID: "NMS", Company: "NATURAL MICRO SYSTEM", Date: "11/29/1996", }, "NAT": PNPID{ ID: "NAT", Company: "NATURALPOINT INC.", Date: "09/03/2010", }, "NVT": PNPID{ ID: "NVT", Company: "NAVATEK ENGINEERING CORPORATION", Date: "03/02/1998", }, "NME": PNPID{ ID: "NME", Company: "NAVICO, INC.", Date: "11/28/2012", }, "NAV": PNPID{ ID: "NAV", Company: "NAVIGATION CORPORATION", Date: "02/22/1999", }, "NAX": PNPID{ ID: "NAX", Company: "NAXOS TECNOLOGIA", Date: "12/12/1997", }, "NAC": PNPID{ ID: "NAC", Company: "NCAST CORPORATION", Date: "02/14/2006", }, "DUN": PNPID{ ID: "DUN", Company: "NCR CORPORATION", Date: "04/25/2002", }, "NCC": PNPID{ ID: "NCC", Company: "NCR CORPORATION", Date: "11/29/1996", }, "NCR": PNPID{ ID: "NCR", Company: "NCR ELECTRONICS", Date: "11/29/1996", }, "NDF": PNPID{ ID: "NDF", Company: "NDF SPECIAL LIGHT PRODUCTS B.V.", Date: "09/18/2014", }, "DMV": PNPID{ ID: "DMV", Company: "NDS LTD", Date: "06/25/1997", }, "NEC": PNPID{ ID: "NEC", Company: "NEC CORPORATION", Date: "05/24/2000", }, "NCT": PNPID{ ID: "NCT", Company: "NEC CUSTOMTECHNICA, LTD.", Date: "10/23/2002", }, "NMV": PNPID{ ID: "NMV", Company: "NEC-MITSUBISHI ELECTRIC VISUAL SYSTEMS CORPORATION", Date: "02/25/2002", }, "NEO": PNPID{ ID: "NEO", Company: "NEO TELECOM CO.,LTD.", Date: "11/08/1999", }, "NMX": PNPID{ ID: "NMX", Company: "NEOMAGIC", Date: "11/29/1996", }, "NTC": PNPID{ ID: "NTC", Company: "NEOTECH S.R.L", Date: "11/11/1997", }, "NTX": PNPID{ ID: "NTX", Company: "NETACCESS INC", Date: "02/07/1997", }, "NCL": PNPID{ ID: "NCL", Company: "NETCOMM LTD", Date: "11/29/1996", }, "NVC": PNPID{ ID: "NVC", Company: "NETVISION CORPORATION", Date: "11/29/1996", }, "NAL": PNPID{ ID: "NAL", Company: "NETWORK ALCHEMY", Date: "09/30/1997", }, "NDL": PNPID{ ID: "NDL", Company: "NETWORK DESIGNERS", Date: "11/29/1996", }, "NGC": PNPID{ ID: "NGC", Company: "NETWORK GENERAL", Date: "08/26/1997", }, "NIT": PNPID{ ID: "NIT", Company: "NETWORK INFO TECHNOLOGY", Date: "11/29/1996", }, "NPI": PNPID{ ID: "NPI", Company: "NETWORK PERIPHERALS INC", Date: "11/29/1996", }, "NST": PNPID{ ID: "NST", Company: "NETWORK SECURITY TECHNOLOGY CO", Date: "02/22/1999", }, "NTW": PNPID{ ID: "NTW", Company: "NETWORTH INC", Date: "11/29/1996", }, "NSA": PNPID{ ID: "NSA", Company: "NEUROSKY, INC.", Date: "08/28/2013", }, "NEU": PNPID{ ID: "NEU", Company: "NEUROTEC - EMPRESA DE PESQUISA E DESENVOLVIMENTO EM BIOMEDICINA", Date: "03/15/2001", }, "NTI": PNPID{ ID: "NTI", Company: "NEW TECH INT'L COMPANY", Date: "11/29/1996", }, "NCI": PNPID{ ID: "NCI", Company: "NEWCOM INC", Date: "01/09/1997", }, "NWS": PNPID{ ID: "NWS", Company: "NEWISYS, INC.", Date: "10/08/2002", }, "NSS": PNPID{ ID: "NSS", Company: "NEWPORT SYSTEMS SOLUTIONS", Date: "11/29/1996", }, "NTK": PNPID{ ID: "NTK", Company: "NEWTEK", Date: "06/22/2017", }, "NXG": PNPID{ ID: "NXG", Company: "NEXGEN", Date: "11/29/1996", }, "NEX": PNPID{ ID: "NEX", Company: "NEXGEN MEDIATECH INC.,", Date: "11/11/2003", }, "NXQ": PNPID{ ID: "NXQ", Company: "NEXIQ TECHNOLOGIES, INC.", Date: "10/08/2001", }, "NLC": PNPID{ ID: "NLC", Company: "NEXT LEVEL COMMUNICATIONS", Date: "11/29/1996", }, "NXC": PNPID{ ID: "NXC", Company: "NEXTCOM K.K.", Date: "11/29/1996", }, "NBT": PNPID{ ID: "NBT", Company: "NINGBO BESTWINNING TECHNOLOGY CO., LTD", Date: "09/05/2006", }, "BOI": PNPID{ ID: "BOI", Company: "NINGBO BOIGLE DIGITAL TECHNOLOGY CO.,LTD", Date: "11/25/2009", }, "AVI": PNPID{ ID: "AVI", Company: "NIPPON AVIONICS CO.,LTD", Date: "10/23/2000", }, "GSB": PNPID{ ID: "GSB", Company: "NIPPONDENCHI CO,.LTD", Date: "05/24/2000", }, "NSI": PNPID{ ID: "NSI", Company: "NISSEI ELECTRIC CO.,LTD", Date: "01/13/2000", }, "NIS": PNPID{ ID: "NIS", Company: "NISSEI ELECTRIC COMPANY", Date: "11/29/1996", }, "NTS": PNPID{ ID: "NTS", Company: "NITS TECHNOLOGY INC.", Date: "12/19/2006", }, "NCA": PNPID{ ID: "NCA", Company: "NIXDORF COMPANY", Date: "11/29/1996", }, "NNC": PNPID{ ID: "NNC", Company: "NNC", Date: "11/29/1996", }, "NDS": PNPID{ ID: "NDS", Company: "NOKIA DATA", Date: "11/29/1996", }, "NOK": PNPID{ ID: "NOK", Company: "NOKIA DISPLAY PRODUCTS", Date: "11/29/1996", }, "NMP": PNPID{ ID: "NMP", Company: "NOKIA MOBILE PHONES", Date: "11/29/1996", }, "NOR": PNPID{ ID: "NOR", Company: "NORAND CORPORATION", Date: "03/19/1997", }, "NCE": PNPID{ ID: "NCE", Company: "NORCENT TECHNOLOGY, INC.", Date: "06/20/2007", }, "NOE": PNPID{ ID: "NOE", Company: "NORDICEYE AB", Date: "09/23/2009", }, "NRI": PNPID{ ID: "NRI", Company: "NORITAKE ITRON CORPORATION", Date: "11/13/2017", }, "NOI": PNPID{ ID: "NOI", Company: "NORTH INVENT A/S", Date: "05/04/2010", }, "NCS": PNPID{ ID: "NCS", Company: "NORTHGATE COMPUTER SYSTEMS", Date: "11/29/1996", }, "NOT": PNPID{ ID: "NOT", Company: "NOT LIMITED INC", Date: "01/30/1998", }, "NWP": PNPID{ ID: "NWP", Company: "NOVAWEB TECHNOLOGIES INC", Date: "06/12/1998", }, "NVL": PNPID{ ID: "NVL", Company: "NOVELL INC", Date: "11/29/1996", }, "NSP": PNPID{ ID: "NSP", Company: "NSPIRE SYSTEM INC.", Date: "02/13/2007", }, "NTT": PNPID{ ID: "NTT", Company: "NTT ADVANCED TECHNOLOGY CORPORATION", Date: "08/19/2004", }, "NUI": PNPID{ ID: "NUI", Company: "NU INC.", Date: "08/29/2007", }, "NUG": PNPID{ ID: "NUG", Company: "NU TECHNOLOGY, INC.", Date: "04/16/2004", }, "NFS": PNPID{ ID: "NFS", Company: "NUMBER FIVE SOFTWARE", Date: "02/22/1999", }, "KNX": PNPID{ ID: "KNX", Company: "NUTECH MARKETING PTL", Date: "11/29/1996", }, "NVI": PNPID{ ID: "NVI", Company: "NUVISION US, INC.", Date: "09/05/2006", }, "NTN": PNPID{ ID: "NTN", Company: "NUVOTON TECHNOLOGY CORPORATION", Date: "10/09/2008", }, "NVD": PNPID{ ID: "NVD", Company: "NVIDIA", Date: "11/29/1996", }, "NWC": PNPID{ ID: "NWC", Company: "NW COMPUTER ENGINEERING", Date: "02/03/1997", }, "NXP": PNPID{ ID: "NXP", Company: "NXP SEMICONDUCTORS BV.", Date: "06/14/2007", }, "NXT": PNPID{ ID: "NXT", Company: "NZXT (PNP SAME EDID)_", Date: "07/15/2020", }, "OAK": PNPID{ ID: "OAK", Company: "OAK TECH INC", Date: "11/29/1996", }, "OAS": PNPID{ ID: "OAS", Company: "OASYS TECHNOLOGY COMPANY", Date: "11/29/1996", }, "OMC": PNPID{ ID: "OMC", Company: "OBJIX MULTIMEDIA CORPORATION", Date: "11/29/1996", }, "PCB": PNPID{ ID: "PCB", Company: "OCTAL S.A.", Date: "02/24/1998", }, "OVR": PNPID{ ID: "OVR", Company: "OCULUS VR, INC.", Date: "10/19/2012", }, "ODM": PNPID{ ID: "ODM", Company: "ODME INC.", Date: "09/23/1998", }, "ODR": PNPID{ ID: "ODR", Company: "ODRAC", Date: "06/21/2001", }, "ATV": PNPID{ ID: "ATV", Company: "OFFICE DEPOT, INC.", Date: "06/13/2007", }, "OKI": PNPID{ ID: "OKI", Company: "OKI ELECTRIC INDUSTRIAL COMPANY LTD", Date: "11/29/1996", }, "OQI": PNPID{ ID: "OQI", Company: "OKSORI COMPANY LTD", Date: "11/29/1996", }, "OSR": PNPID{ ID: "OSR", Company: "OKSORI COMPANY LTD", Date: "11/29/1996", }, "OCN": PNPID{ ID: "OCN", Company: "OLFAN", Date: "11/29/1996", }, "OLC": PNPID{ ID: "OLC", Company: "OLICOM A/S", Date: "11/29/1996", }, "OLD": PNPID{ ID: "OLD", Company: "OLIDATA S.P.A.", Date: "03/13/2006", }, "OLT": PNPID{ ID: "OLT", Company: "OLITEC S.A.", Date: "11/29/1996", }, "OLV": PNPID{ ID: "OLV", Company: "OLITEC S.A.", Date: "11/29/1996", }, "OLI": PNPID{ ID: "OLI", Company: "OLIVETTI", Date: "11/29/1996", }, "OLY": PNPID{ ID: "OLY", Company: "OLYMPUS CORPORATION", Date: "05/02/2005", }, "OTK": PNPID{ ID: "OTK", Company: "OMNITEK", Date: "09/19/2013", }, "OMN": PNPID{ ID: "OMN", Company: "OMNITEL", Date: "04/28/1998", }, "OMR": PNPID{ ID: "OMR", Company: "OMRON CORPORATION", Date: "11/29/1996", }, "ONS": PNPID{ ID: "ONS", Company: "ON SYSTEMS INC", Date: "11/29/1996", }, "ONE": PNPID{ ID: "ONE", Company: "ONEAC CORPORATION", Date: "04/14/1998", }, "ONK": PNPID{ ID: "ONK", Company: "ONKYO CORPORATION", Date: "06/16/2005", }, "ONL": PNPID{ ID: "ONL", Company: "ONLIVE, INC", Date: "09/03/2010", }, "TIV": PNPID{ ID: "TIV", Company: "OOO TECHNOINVEST", Date: "08/05/1997", }, "OPC": PNPID{ ID: "OPC", Company: "OPCODE INC", Date: "11/29/1996", }, "OCS": PNPID{ ID: "OCS", Company: "OPEN CONNECT SOLUTIONS", Date: "09/13/1999", }, "ONW": PNPID{ ID: "ONW", Company: "OPEN NETWORKS LTD", Date: "04/25/2003", }, "OSI": PNPID{ ID: "OSI", Company: "OPEN STACK, INC.", Date: "07/22/2013", }, "OPP": PNPID{ ID: "OPP", Company: "OPPO DIGITAL, INC.", Date: "06/19/2012", }, "OPT": PNPID{ ID: "OPT", Company: "OPTI INC", Date: "11/29/1996", }, "OSP": PNPID{ ID: "OSP", Company: "OPTI-UPS CORPORATION", Date: "07/01/1997", }, "OBS": PNPID{ ID: "OBS", Company: "OPTIBASE TECHNOLOGIES", Date: "11/01/2010", }, "OSD": PNPID{ ID: "OSD", Company: "OPTICAL SYSTEMS DESIGN PTY LTD", Date: "06/03/2013", }, "OIC": PNPID{ ID: "OIC", Company: "OPTION INDUSTRIAL COMPUTERS", Date: "05/07/2001", }, "OIN": PNPID{ ID: "OIN", Company: "OPTION INTERNATIONAL", Date: "10/23/2000", }, "OIM": PNPID{ ID: "OIM", Company: "OPTION INTERNATIONAL", Date: "01/30/1997", }, "OPV": PNPID{ ID: "OPV", Company: "OPTIVISION INC", Date: "11/29/1996", }, "OTT": PNPID{ ID: "OTT", Company: "OPTO22, INC.", Date: "10/06/1998", }, "OTM": PNPID{ ID: "OTM", Company: "OPTOMA CORPORATION          ", Date: "04/20/2010", }, "OEI": PNPID{ ID: "OEI", Company: "OPTUM ENGINEERING INC.", Date: "08/02/2010", }, "OTI": PNPID{ ID: "OTI", Company: "ORCHID TECHNOLOGY", Date: "11/29/1996", }, "ORG": PNPID{ ID: "ORG", Company: "ORGA KARTENSYSTEME GMBH", Date: "10/24/1998", }, "TOP": PNPID{ ID: "TOP", Company: "ORION COMMUNICATIONS CO., LTD.", Date: "04/30/2007", }, "ORN": PNPID{ ID: "ORN", Company: "ORION ELECTRIC CO., LTD.", Date: "01/19/2005", }, "OEC": PNPID{ ID: "OEC", Company: "ORION ELECTRIC CO.,LTD", Date: "01/13/2000", }, "OSA": PNPID{ ID: "OSA", Company: "OSAKA MICRO COMPUTER, INC.", Date: "09/05/2003", }, "ORI": PNPID{ ID: "ORI", Company: "OSR OPEN SYSTEMS RESOURCES, INC.", Date: "01/20/1999", }, "OOS": PNPID{ ID: "OOS", Company: "OSRAM", Date: "04/25/2002", }, "OUK": PNPID{ ID: "OUK", Company: "OUK COMPANY LTD", Date: "11/29/1996", }, "OTB": PNPID{ ID: "OTB", Company: "OUTSIDETHEBOXSTUFF.COM", Date: "09/03/2010", }, "OXU": PNPID{ ID: "OXU", Company: "OXUS RESEARCH S.A.", Date: "11/29/1996", }, "OZC": PNPID{ ID: "OZC", Company: "OZ CORPORATION", Date: "08/07/2012", }, "PMS": PNPID{ ID: "PMS", Company: "PABIAN EMBEDDED SYSTEMS", Date: "02/28/2017", }, "PAC": PNPID{ ID: "PAC", Company: "PACIFIC AVIONICS CORPORATION", Date: "11/29/1996", }, "PCW": PNPID{ ID: "PCW", Company: "PACIFIC COMMWARE INC", Date: "11/29/1996", }, "PIE": PNPID{ ID: "PIE", Company: "PACIFIC IMAGE ELECTRONICS COMPANY LTD", Date: "10/21/1997", }, "PBL": PNPID{ ID: "PBL", Company: "PACKARD BELL ELECTRONICS", Date: "11/29/1996", }, "PBN": PNPID{ ID: "PBN", Company: "PACKARD BELL NEC", Date: "11/29/1996", }, "PGI": PNPID{ ID: "PGI", Company: "PACSGEAR, INC.", Date: "08/13/2012", }, "QFF": PNPID{ ID: "QFF", Company: "PADIX CO., INC.", Date: "09/13/1999", }, "PJT": PNPID{ ID: "PJT", Company: "PAN JIT INTERNATIONAL INC.", Date: "08/03/2004", }, "PNS": PNPID{ ID: "PNS", Company: "PANASCOPE", Date: "01/01/1994", }, "MDO": PNPID{ ID: "MDO", Company: "PANASONIC", Date: "11/29/1996", }, "PLF": PNPID{ ID: "PLF", Company: "PANASONIC AVIONICS CORPORATION", Date: "08/13/2010", }, "MEI": PNPID{ ID: "MEI", Company: "PANASONIC INDUSTRY COMPANY", Date: "11/29/1996", }, "PNL": PNPID{ ID: "PNL", Company: "PANELVIEW, INC.", Date: "08/04/2003", }, "PTL": PNPID{ ID: "PTL", Company: "PANTEL INC", Date: "11/29/1996", }, "PTA": PNPID{ ID: "PTA", Company: "PAR TECH INC.", Date: "01/26/2011", }, "PRT": PNPID{ ID: "PRT", Company: "PARADE TECHNOLOGIES, LTD.", Date: "04/06/2012", }, "PGM": PNPID{ ID: "PGM", Company: "PARADIGM ADVANCED RESEARCH CENTRE", Date: "06/16/2005", }, "PAR": PNPID{ ID: "PAR", Company: "PARALLAN COMP INC", Date: "11/29/1996", }, "PLX": PNPID{ ID: "PLX", Company: "PARALLAX GRAPHICS", Date: "11/29/1996", }, "RCE": PNPID{ ID: "RCE", Company: "PARC D'ACTIVITE DES BELLEVUES", Date: "11/29/1996", }, "POT": PNPID{ ID: "POT", Company: "PARROT", Date: "11/25/2014", }, "PTH": PNPID{ ID: "PTH", Company: "PATHLIGHT TECHNOLOGY INC", Date: "11/29/1996", }, "PCX": PNPID{ ID: "PCX", Company: "PC XPERTEN", Date: "02/24/1998", }, "PCT": PNPID{ ID: "PCT", Company: "PC-TEL INC", Date: "05/02/1997", }, "PCK": PNPID{ ID: "PCK", Company: "PCBANK21", Date: "02/13/2007", }, "PCM": PNPID{ ID: "PCM", Company: "PCM SYSTEMS CORPORATION", Date: "03/25/1997", }, "PDS": PNPID{ ID: "PDS", Company: "PD SYSTEMS INTERNATIONAL LTD", Date: "03/20/1999", }, "PDT": PNPID{ ID: "PDT", Company: "PDTS - PROZESSDATENTECHNIK UND SYSTEME", Date: "02/10/1998", }, "PEG": PNPID{ ID: "PEG", Company: "PEGATRON CORPORATION", Date: "08/27/2013", }, "PEI": PNPID{ ID: "PEI", Company: "PEI ELECTRONICS INC", Date: "04/06/1998", }, "PVM": PNPID{ ID: "PVM", Company: "PENTA STUDIOTECHNIK GMBH", Date: "05/05/2010", }, "PCL": PNPID{ ID: "PCL", Company: "PENTEL.CO.,LTD", Date: "02/25/2002", }, "PEP": PNPID{ ID: "PEP", Company: "PEPPERCON AG", Date: "04/12/2006", }, "PPX": PNPID{ ID: "PPX", Company: "PERCEPTIVE PIXEL INC.", Date: "05/04/2010", }, "PER": PNPID{ ID: "PER", Company: "PERCEPTIVE SIGNAL TECHNOLOGIES", Date: "05/13/1997", }, "PRC": PNPID{ ID: "PRC", Company: "PERCOMM", Date: "04/24/2001", }, "PCO": PNPID{ ID: "PCO", Company: "PERFORMANCE CONCEPTS INC.,", Date: "09/24/2002", }, "IPN": PNPID{ ID: "IPN", Company: "PERFORMANCE TECHNOLOGIES", Date: "02/24/2004", }, "PSL": PNPID{ ID: "PSL", Company: "PERLE SYSTEMS LIMITED", Date: "02/22/1999", }, "PON": PNPID{ ID: "PON", Company: "PERPETUAL TECHNOLOGIES, LLC", Date: "01/13/2000", }, "PAM": PNPID{ ID: "PAM", Company: "PETER ANTESBERGER MESSTECHNIK", Date: "04/28/1998", }, "PSD": PNPID{ ID: "PSD", Company: "PEUS-SYSTEMS GMBH", Date: "11/29/1996", }, "PCA": PNPID{ ID: "PCA", Company: "PHILIPS BU ADD ON CARD", Date: "11/29/1996", }, "PHS": PNPID{ ID: "PHS", Company: "PHILIPS COMMUNICATION SYSTEMS", Date: "11/29/1996", }, "PHL": PNPID{ ID: "PHL", Company: "PHILIPS CONSUMER ELECTRONICS COMPANY", Date: "11/29/1996", }, "PHE": PNPID{ ID: "PHE", Company: "PHILIPS MEDICAL SYSTEMS BOEBLINGEN GMBH", Date: "04/20/2010", }, "PSC": PNPID{ ID: "PSC", Company: "PHILIPS SEMICONDUCTORS", Date: "11/29/1996", }, "PXC": PNPID{ ID: "PXC", Company: "PHOENIX CONTACT", Date: "02/27/2008", }, "PNX": PNPID{ ID: "PNX", Company: "PHOENIX TECHNOLOGIES, LTD.", Date: "11/08/1999", }, "PPC": PNPID{ ID: "PPC", Company: "PHOENIXTEC POWER COMPANY LTD", Date: "05/16/1999", }, "PMX": PNPID{ ID: "PMX", Company: "PHOTOMATRIX", Date: "11/29/1996", }, "PHO": PNPID{ ID: "PHO", Company: "PHOTONICS SYSTEMS INC.", Date: "06/03/2002", }, "RSC": PNPID{ ID: "RSC", Company: "PHOTOTELESIS", Date: "03/16/1998", }, "PHY": PNPID{ ID: "PHY", Company: "PHYLON COMMUNICATIONS", Date: "11/29/1996", }, "PPR": PNPID{ ID: "PPR", Company: "PICPRO", Date: "10/18/2004", }, "PIC": PNPID{ ID: "PIC", Company: "PICTURALL LTD.", Date: "11/13/2015", }, "PHC": PNPID{ ID: "PHC", Company: "PIJNENBURG BEHEER N.V.", Date: "04/24/2001", }, "PVR": PNPID{ ID: "PVR", Company: "PIMAX TECH. CO., LTD", Date: "02/07/2017", }, "PCI": PNPID{ ID: "PCI", Company: "PIONEER COMPUTER INC", Date: "11/29/1996", }, "PIO": PNPID{ ID: "PIO", Company: "PIONEER ELECTRONIC CORPORATION", Date: "07/16/1997", }, "PBV": PNPID{ ID: "PBV", Company: "PITNEY BOWES", Date: "09/13/1999", }, "PBI": PNPID{ ID: "PBI", Company: "PITNEY BOWES", Date: "11/29/1996", }, "PQI": PNPID{ ID: "PQI", Company: "PIXEL QI", Date: "06/24/2009", }, "PVN": PNPID{ ID: "PVN", Company: "PIXEL VISION", Date: "11/29/1996", }, "PXE": PNPID{ ID: "PXE", Company: "PIXELA CORPORATION", Date: "11/21/2007", }, "PXN": PNPID{ ID: "PXN", Company: "PIXELNEXT INC", Date: "03/29/2018", }, "PIX": PNPID{ ID: "PIX", Company: "PIXIE TECH INC", Date: "11/29/1996", }, "PTS": PNPID{ ID: "PTS", Company: "PLAIN TREE SYSTEMS INC", Date: "11/29/1996", }, "PNR": PNPID{ ID: "PNR", Company: "PLANAR SYSTEMS, INC.", Date: "08/11/2003", }, "PLV": PNPID{ ID: "PLV", Company: "PLUS VISION CORP.", Date: "07/05/2001", }, "PMC": PNPID{ ID: "PMC", Company: "PMC CONSUMER ELECTRONICS LTD", Date: "12/11/1996", }, "SPR": PNPID{ ID: "SPR", Company: "PMNS GMBH", Date: "10/08/2002", }, "PMM": PNPID{ ID: "PMM", Company: "POINT MULTIMEDIA SYSTEM", Date: "06/09/1997", }, "PLY": PNPID{ ID: "PLY", Company: "POLYCOM INC.", Date: "06/19/2002", }, "POL": PNPID{ ID: "POL", Company: "POLYCOMP (PTY) LTD.", Date: "02/14/2006", }, "COW": PNPID{ ID: "COW", Company: "POLYCOW PRODUCTIONS", Date: "03/15/2001", }, "POR": PNPID{ ID: "POR", Company: "PORTALIS LC", Date: "11/01/2008", }, "POS": PNPID{ ID: "POS", Company: "POSITIVO TECNOLOGIA S.A.", Date: "09/01/2017", }, "ARO": PNPID{ ID: "ARO", Company: "POSO INTERNATIONAL B.V.", Date: "08/01/1997", }, "PEC": PNPID{ ID: "PEC", Company: "POTRANS ELECTRICAL CORP.", Date: "07/16/1999", }, "PCC": PNPID{ ID: "PCC", Company: "POWERCOM TECHNOLOGY COMPANY LTD", Date: "09/02/1997", }, "CPX": PNPID{ ID: "CPX", Company: "POWERMATIC DATA SYSTEMS", Date: "11/29/1996", }, "PET": PNPID{ ID: "PET", Company: "PRACTICAL ELECTRONIC TOOLS", Date: "02/22/1999", }, "PPI": PNPID{ ID: "PPI", Company: "PRACTICAL PERIPHERALS", Date: "11/29/1996", }, "PSE": PNPID{ ID: "PSE", Company: "PRACTICAL SOLUTIONS PTE., LTD.", Date: "10/06/1998", }, "PRD": PNPID{ ID: "PRD", Company: "PRAIM S.R.L.", Date: "11/29/1996", }, "PEL": PNPID{ ID: "PEL", Company: "PRIMAX ELECTRIC LTD", Date: "11/29/1996", }, "SYX": PNPID{ ID: "SYX", Company: "PRIME SYSTEMS, INC.", Date: "10/21/2003", }, "PVI": PNPID{ ID: "PVI", Company: "PRIME VIEW INTERNATIONAL CO., LTD", Date: "07/06/2009", }, "PGS": PNPID{ ID: "PGS", Company: "PRINCETON GRAPHIC SYSTEMS", Date: "11/29/1996", }, "PIM": PNPID{ ID: "PIM", Company: "PRISM, LLC", Date: "07/24/2007", }, "PRI": PNPID{ ID: "PRI", Company: "PRIVA HORTIMATION BV", Date: "10/22/1997", }, "PLC": PNPID{ ID: "PLC", Company: "PRO-LOG CORPORATION", Date: "11/29/1996", }, "PRA": PNPID{ ID: "PRA", Company: "PRO/AUTOMATION", Date: "07/16/1999", }, "PCP": PNPID{ ID: "PCP", Company: "PROCOMP USA INC", Date: "11/29/1996", }, "PSY": PNPID{ ID: "PSY", Company: "PRODEA SYSTEMS INC.", Date: "02/04/2013", }, "PDV": PNPID{ ID: "PDV", Company: "PRODRIVE B.V.", Date: "01/18/2005", }, "PJA": PNPID{ ID: "PJA", Company: "PROJECTA", Date: "01/29/1997", }, "DHT": PNPID{ ID: "DHT", Company: "PROJECTAVISION INC", Date: "01/14/1998", }, "PJD": PNPID{ ID: "PJD", Company: "PROJECTIONDESIGN AS", Date: "09/23/2002", }, "PLM": PNPID{ ID: "PLM", Company: "PROLINK MICROSYSTEMS CORP.", Date: "02/25/2002", }, "PMT": PNPID{ ID: "PMT", Company: "PROMATE ELECTRONIC CO., LTD.", Date: "01/13/2003", }, "PRM": PNPID{ ID: "PRM", Company: "PROMETHEUS", Date: "11/29/1996", }, "PTI": PNPID{ ID: "PTI", Company: "PROMISE TECHNOLOGY INC", Date: "01/02/1997", }, "PAD": PNPID{ ID: "PAD", Company: "PROMOTION AND DISPLAY TECHNOLOGY LTD.", Date: "04/24/2001", }, "TEL": PNPID{ ID: "TEL", Company: "PROMOTION AND DISPLAY TECHNOLOGY LTD.", Date: "04/24/2001", }, "PGP": PNPID{ ID: "PGP", Company: "PROPAGAMMA KOMMUNIKATION", Date: "04/19/2000", }, "PSM": PNPID{ ID: "PSM", Company: "PROSUM", Date: "11/29/1996", }, "PRO": PNPID{ ID: "PRO", Company: "PROTEON", Date: "11/29/1996", }, "PVG": PNPID{ ID: "PVG", Company: "PROVIEW GLOBAL CO., LTD", Date: "10/08/2002", }, "PXM": PNPID{ ID: "PXM", Company: "PROXIM INC", Date: "09/19/1997", }, "PRX": PNPID{ ID: "PRX", Company: "PROXIMA CORPORATION", Date: "11/29/1996", }, "PTC": PNPID{ ID: "PTC", Company: "PS TECHNOLOGY CORPORATION", Date: "01/29/1997", }, "PSI": PNPID{ ID: "PSI", Company: "PSI-PERCEPTIVE SOLUTIONS INC", Date: "11/29/1996", }, "PDM": PNPID{ ID: "PDM", Company: "PSION DACOM PLC.", Date: "11/08/1999", }, "PLT": PNPID{ ID: "PLT", Company: "PT HARTONO ISTANA TEKNOLOGI", Date: "05/05/2010", }, "PUL": PNPID{ ID: "PUL", Company: "PULSE-EIGHT LTD", Date: "09/12/2012", }, "PDR": PNPID{ ID: "PDR", Company: "PURE DATA INC", Date: "11/29/1996", }, "PPP": PNPID{ ID: "PPP", Company: "PURUP PREPRESS AS", Date: "11/29/1996", }, "QLC": PNPID{ ID: "QLC", Company: "Q-LOGIC", Date: "11/29/1996", }, "QDL": PNPID{ ID: "QDL", Company: "QD LASER, INC.", Date: "05/31/2018", }, "HRE": PNPID{ ID: "HRE", Company: "QINGDAO HAIER ELECTRONICS CO., LTD.", Date: "04/12/2006", }, "QSC": PNPID{ ID: "QSC", Company: "QSC, LLC", Date: "01/18/2019", }, "QTR": PNPID{ ID: "QTR", Company: "QTRONIX CORPORATION", Date: "11/29/1996", }, "DHQ": PNPID{ ID: "DHQ", Company: "QUADRAM", Date: "11/29/1996", }, "QDM": PNPID{ ID: "QDM", Company: "QUADRAM", Date: "11/29/1996", }, "QCL": PNPID{ ID: "QCL", Company: "QUADRANT COMPONENTS INC", Date: "04/03/1997", }, "QCC": PNPID{ ID: "QCC", Company: "QUAKECOM COMPANY LTD", Date: "03/23/1998", }, "QCP": PNPID{ ID: "QCP", Company: "QUALCOMM INC", Date: "05/16/1999", }, "QCI": PNPID{ ID: "QCI", Company: "QUANTA COMPUTER INC", Date: "11/29/1996", }, "QDS": PNPID{ ID: "QDS", Company: "QUANTA DISPLAY INC.", Date: "04/25/2002", }, "QTM": PNPID{ ID: "QTM", Company: "QUANTUM", Date: "11/29/1996", }, "QTD": PNPID{ ID: "QTD", Company: "QUANTUM 3D INC", Date: "05/23/1997", }, "QDI": PNPID{ ID: "QDI", Company: "QUANTUM DATA INCORPORATED", Date: "03/15/2001", }, "QSI": PNPID{ ID: "QSI", Company: "QUANTUM SOLUTIONS, INC.", Date: "01/13/2000", }, "QVU": PNPID{ ID: "QVU", Company: "QUARTICS", Date: "11/04/2010", }, "QUA": PNPID{ ID: "QUA", Company: "QUATOGRAPHIC AG", Date: "01/13/2000", }, "QTH": PNPID{ ID: "QTH", Company: "QUESTECH LTD", Date: "01/13/2000", }, "QUE": PNPID{ ID: "QUE", Company: "QUESTRA CONSULTING", Date: "01/30/1998", }, "QCK": PNPID{ ID: "QCK", Company: "QUICK CORPORATION", Date: "11/29/1996", }, "QFI": PNPID{ ID: "QFI", Company: "QUICKFLEX, INC", Date: "08/04/1998", }, "QTI": PNPID{ ID: "QTI", Company: "QUICKNET TECHNOLOGIES INC", Date: "11/29/1996", }, "RSQ": PNPID{ ID: "RSQ", Company: "R SQUARED", Date: "11/08/1999", }, "RPT": PNPID{ ID: "RPT", Company: "R.P.T.INTERGROUPS", Date: "11/29/1996", }, "RII": PNPID{ ID: "RII", Company: "RACAL INTERLAN INC", Date: "11/29/1996", }, "TSF": PNPID{ ID: "TSF", Company: "RACAL-AIRTECH SOFTWARE FORGE LTD", Date: "11/29/1996", }, "RAC": PNPID{ ID: "RAC", Company: "RACORE COMPUTER PRODUCTS INC", Date: "11/29/1996", }, "RRI": PNPID{ ID: "RRI", Company: "RADICOM RESEARCH INC", Date: "12/02/1997", }, "RCN": PNPID{ ID: "RCN", Company: "RADIO CONSULT SRL", Date: "09/24/2002", }, "RDN": PNPID{ ID: "RDN", Company: "RADIODATA GMBH", Date: "07/25/2012", }, "RLN": PNPID{ ID: "RLN", Company: "RADIOLAN INC", Date: "11/29/1996", }, "RSN": PNPID{ ID: "RSN", Company: "RADIOSPIRE NETWORKS, INC.", Date: "06/14/2007", }, "RAD": PNPID{ ID: "RAD", Company: "RADISYS CORPORATION", Date: "11/29/1996", }, "RDS": PNPID{ ID: "RDS", Company: "RADIUS INC", Date: "03/07/1997", }, "RFI": PNPID{ ID: "RFI", Company: "RAFI GMBH & CO. KG", Date: "08/24/2015", }, "RDI": PNPID{ ID: "RDI", Company: "RAINBOW DISPLAYS, INC.", Date: "09/23/1998", }, "RNB": PNPID{ ID: "RNB", Company: "RAINBOW TECHNOLOGIES", Date: "11/29/1996", }, "RTS": PNPID{ ID: "RTS", Company: "RAINTREE SYSTEMS", Date: "10/02/2001", }, "BOB": PNPID{ ID: "BOB", Company: "RAINY ORCHARD", Date: "02/21/2000", }, "RSI": PNPID{ ID: "RSI", Company: "RAMPAGE SYSTEMS INC", Date: "11/29/1996", }, "RAN": PNPID{ ID: "RAN", Company: "RANCHO TECH INC", Date: "11/29/1996", }, "RTI": PNPID{ ID: "RTI", Company: "RANCHO TECH INC", Date: "11/29/1996", }, "RSX": PNPID{ ID: "RSX", Company: "RAPID TECH CORPORATION", Date: "11/29/1996", }, "RMC": PNPID{ ID: "RMC", Company: "RARITAN COMPUTER, INC", Date: "11/27/1998", }, "RAR": PNPID{ ID: "RAR", Company: "RARITAN, INC.", Date: "06/14/2007", }, "RAS": PNPID{ ID: "RAS", Company: "RASCOM INC", Date: "11/29/1996", }, "REX": PNPID{ ID: "REX", Company: "RATOC SYSTEMS, INC.", Date: "01/06/2012", }, "RAY": PNPID{ ID: "RAY", Company: "RAYLAR DESIGN, INC.", Date: "01/13/2000", }, "RZR": PNPID{ ID: "RZR", Company: "RAZER TAIWAN CO. LTD.", Date: "08/20/2018", }, "RCI": PNPID{ ID: "RCI", Company: "RC INTERNATIONAL", Date: "11/29/1996", }, "RCH": PNPID{ ID: "RCH", Company: "REACH TECHNOLOGY INC", Date: "02/09/1998", }, "RKC": PNPID{ ID: "RKC", Company: "REAKIN TECHNOLOHY CORPORATION", Date: "03/15/2001", }, "REA": PNPID{ ID: "REA", Company: "REAL D", Date: "11/15/2007", }, "RTL": PNPID{ ID: "RTL", Company: "REALTEK SEMICONDUCTOR COMPANY LTD", Date: "11/29/1996", }, "ALG": PNPID{ ID: "ALG", Company: "REALTEK SEMICONDUCTOR CORP.", Date: "10/25/2002", }, "RVI": PNPID{ ID: "RVI", Company: "REALVISION INC", Date: "11/29/1996", }, "REC": PNPID{ ID: "REC", Company: "RECOM", Date: "05/16/1999", }, "RHT": PNPID{ ID: "RHT", Company: "RED HAT, INC.", Date: "02/17/2011", }, "RWC": PNPID{ ID: "RWC", Company: "RED WING CORPORATION", Date: "01/08/1998", }, "RFX": PNPID{ ID: "RFX", Company: "REDFOX TECHNOLOGIES INC.", Date: "01/14/2014", }, "REF": PNPID{ ID: "REF", Company: "REFLECTIVITY, INC.", Date: "04/19/2000", }, "REH": PNPID{ ID: "REH", Company: "REHAN ELECTRONICS LTD.", Date: "02/15/2012", }, "RTC": PNPID{ ID: "RTC", Company: "RELIA TECHNOLOGIES", Date: "11/29/1996", }, "REL": PNPID{ ID: "REL", Company: "RELIANCE ELECTRIC IND CORPORATION", Date: "11/29/1996", }, "REN": PNPID{ ID: "REN", Company: "RENESAS TECHNOLOGY CORP.", Date: "06/14/2007", }, "RAT": PNPID{ ID: "RAT", Company: "RENT-A-TECH", Date: "02/22/1999", }, "RED": PNPID{ ID: "RED", Company: "RESEARCH ELECTRONICS DEVELOPMENT INC", Date: "12/02/1997", }, "RMP": PNPID{ ID: "RMP", Company: "RESEARCH MACHINES", Date: "11/29/1996", }, "RES": PNPID{ ID: "RES", Company: "RESMED PTY LTD", Date: "02/21/2000", }, "RET": PNPID{ ID: "RET", Company: "RESONANCE TECHNOLOGY, INC.", Date: "02/09/2011", }, "WTS": PNPID{ ID: "WTS", Company: "RESTEK ELECTRIC COMPANY LTD", Date: "11/29/1996", }, "RVL": PNPID{ ID: "RVL", Company: "REVEAL COMPUTER PROD", Date: "11/29/1996", }, "REV": PNPID{ ID: "REV", Company: "REVOLUTION DISPLAY, INC.", Date: "03/19/2014", }, "RGB": PNPID{ ID: "RGB", Company: "RGB SPECTRUM", Date: "11/14/2012", }, "EXN": PNPID{ ID: "EXN", Company: "RGB SYSTEMS, INC. DBA EXTRON ELECTRONICS", Date: "07/06/2008", }, "RIC": PNPID{ ID: "RIC", Company: "RICOH COMPANY, LTD.", Date: "05/13/2010", }, "RHD": PNPID{ ID: "RHD", Company: "RIGHTHAND TECHNOLOGIES", Date: "05/01/2012", }, "RIO": PNPID{ ID: "RIO", Company: "RIOS SYSTEMS COMPANY LTD", Date: "11/29/1996", }, "RIT": PNPID{ ID: "RIT", Company: "RITECH INC", Date: "04/14/1998", }, "RIV": PNPID{ ID: "RIV", Company: "RIVULET COMMUNICATIONS", Date: "07/19/2007", }, "BSG": PNPID{ ID: "BSG", Company: "ROBERT BOSCH GMBH", Date: "05/15/2014", }, "GRY": PNPID{ ID: "GRY", Company: "ROBERT GRAY COMPANY", Date: "03/31/1998", }, "RGL": PNPID{ ID: "RGL", Company: "ROBERTSON GEOLOGGING LTD", Date: "08/10/2000", }, "ROB": PNPID{ ID: "ROB", Company: "ROBUST ELECTRONICS GMBH", Date: "01/18/2008", }, "RAI": PNPID{ ID: "RAI", Company: "ROCKWELL AUTOMATION/INTECOLOR", Date: "03/13/1998", }, "RCO": PNPID{ ID: "RCO", Company: "ROCKWELL COLLINS", Date: "09/10/2010", }, "ASY": PNPID{ ID: "ASY", Company: "ROCKWELL COLLINS / AIRSHOW SYSTEMS", Date: "12/02/2004", }, "COL": PNPID{ ID: "COL", Company: "ROCKWELL COLLINS, INC.", Date: "06/14/2007", }, "ROK": PNPID{ ID: "ROK", Company: "ROCKWELL INTERNATIONAL", Date: "11/29/1996", }, "RSS": PNPID{ ID: "RSS", Company: "ROCKWELL SEMICONDUCTOR SYSTEMS", Date: "11/29/1996", }, "MAX": PNPID{ ID: "MAX", Company: "ROGEN TECH DISTRIBUTION INC", Date: "11/29/1996", }, "ROS": PNPID{ ID: "ROS", Company: "ROHDE & SCHWARZ", Date: "01/20/2012", }, "ROH": PNPID{ ID: "ROH", Company: "ROHM CO., LTD.", Date: "06/16/2004", }, "RHM": PNPID{ ID: "RHM", Company: "ROHM COMPANY LTD", Date: "05/13/1997", }, "RJA": PNPID{ ID: "RJA", Company: "ROLAND CORPORATION", Date: "11/29/1996", }, "RPI": PNPID{ ID: "RPI", Company: "ROOMPRO TECHNOLOGIES", Date: "07/09/2010", }, "ROP": PNPID{ ID: "ROP", Company: "ROPER INTERNATIONAL LTD", Date: "05/16/1999", }, "RMT": PNPID{ ID: "RMT", Company: "ROPER MOBILE", Date: "07/02/2010", }, "RSV": PNPID{ ID: "RSV", Company: "ROSS VIDEO LTD", Date: "06/11/2012", }, "TRL": PNPID{ ID: "TRL", Company: "ROYAL INFORMATION", Date: "11/29/1996", }, "RZS": PNPID{ ID: "RZS", Company: "ROZSNYÓ, S.R.O.", Date: "03/24/2014", }, "RVC": PNPID{ ID: "RVC", Company: "RSI SYSTEMS INC", Date: "04/28/1998", }, "RUN": PNPID{ ID: "RUN", Company: "RUNCO INTERNATIONAL", Date: "04/01/2004", }, "SNK": PNPID{ ID: "SNK", Company: "S&K ELECTRONICS", Date: "02/21/2000", }, "SSI": PNPID{ ID: "SSI", Company: "S-S TECHNOLOGY INC", Date: "11/29/1996", }, "TLV": PNPID{ ID: "TLV", Company: "S3 INC", Date: "01/07/1997", }, "SIM": PNPID{ ID: "SIM", Company: "S3 INC", Date: "11/29/1996", }, "SSS": PNPID{ ID: "SSS", Company: "S3 INC", Date: "11/29/1996", }, "SAE": PNPID{ ID: "SAE", Company: "SAAB AEROTECH", Date: "06/14/2007", }, "SAI": PNPID{ ID: "SAI", Company: "SAGE INC", Date: "07/16/1997", }, "SGM": PNPID{ ID: "SGM", Company: "SAGEM", Date: "09/05/2003", }, "SDK": PNPID{ ID: "SDK", Company: "SAIT-DEVLONICS", Date: "11/29/1996", }, "SAK": PNPID{ ID: "SAK", Company: "SAITEK LTD", Date: "05/16/1999", }, "SLT": PNPID{ ID: "SLT", Company: "SALT INTERNATIOINAL CORP.", Date: "09/05/2006", }, "SAM": PNPID{ ID: "SAM", Company: "SAMSUNG ELECTRIC COMPANY", Date: "11/29/1996", }, "SKT": PNPID{ ID: "SKT", Company: "SAMSUNG ELECTRO-MECHANICS COMPANY LTD", Date: "11/29/1996", }, "SSE": PNPID{ ID: "SSE", Company: "SAMSUNG ELECTRONIC CO.", Date: "08/10/2000", }, "STN": PNPID{ ID: "STN", Company: "SAMSUNG ELECTRONICS AMERICA", Date: "08/10/2000", }, "KYK": PNPID{ ID: "KYK", Company: "SAMSUNG ELECTRONICS AMERICA INC", Date: "02/24/1998", }, "SEM": PNPID{ ID: "SEM", Company: "SAMSUNG ELECTRONICS COMPANY LTD", Date: "11/29/1996", }, "SDI": PNPID{ ID: "SDI", Company: "SAMTRON DISPLAYS INC", Date: "11/29/1996", }, "JSK": PNPID{ ID: "JSK", Company: "SANKEN ELECTRIC CO., LTD", Date: "09/13/1999", }, "SSJ": PNPID{ ID: "SSJ", Company: "SANKYO SEIKI MFG.CO., LTD", Date: "01/28/2003", }, "SAA": PNPID{ ID: "SAA", Company: "SANRITZ AUTOMATION CO.,LTD.", Date: "02/25/2002", }, "STK": PNPID{ ID: "STK", Company: "SANTAK CORP.", Date: "11/27/1998", }, "SOC": PNPID{ ID: "SOC", Company: "SANTEC CORPORATION", Date: "01/12/2015", }, "SAN": PNPID{ ID: "SAN", Company: "SANYO ELECTRIC CO.,LTD.", Date: "11/08/1999", }, "SCD": PNPID{ ID: "SCD", Company: "SANYO ELECTRIC COMPANY LTD", Date: "11/29/1996", }, "SIB": PNPID{ ID: "SIB", Company: "SANYO ELECTRIC COMPANY LTD", Date: "11/29/1996", }, "TSC": PNPID{ ID: "TSC", Company: "SANYO ELECTRIC COMPANY LTD", Date: "11/29/1996", }, "ICN": PNPID{ ID: "ICN", Company: "SANYO ICON", Date: "11/29/1996", }, "SPN": PNPID{ ID: "SPN", Company: "SAPIENCE CORPORATION", Date: "11/29/1996", }, "SDA": PNPID{ ID: "SDA", Company: "SAT (SOCIETE ANONYME)", Date: "11/29/1996", }, "AVV": PNPID{ ID: "AVV", Company: "SBS TECHNOLOGIES (CANADA), INC. (WAS AVVIDA SYSTEMS, INC.)", Date: "12/17/2002", }, "SBS": PNPID{ ID: "SBS", Company: "SBS-OR INDUSTRIAL COMPUTERS GMBH", Date: "12/28/1998", }, "SGI": PNPID{ ID: "SGI", Company: "SCAN GROUP LTD", Date: "11/29/1996", }, "SCN": PNPID{ ID: "SCN", Company: "SCANPORT, INC.", Date: "08/05/2002", }, "KFC": PNPID{ ID: "KFC", Company: "SCD TECH", Date: "10/23/2002", }, "SPT": PNPID{ ID: "SPT", Company: "SCEPTRE TECH INC", Date: "11/29/1996", }, "SMB": PNPID{ ID: "SMB", Company: "SCHLUMBERGER", Date: "07/16/1999", }, "SCH": PNPID{ ID: "SCH", Company: "SCHLUMBERGER CARDS", Date: "04/28/1998", }, "SLR": PNPID{ ID: "SLR", Company: "SCHLUMBERGER TECHNOLOGY CORPORATE", Date: "08/10/2000", }, "SKD": PNPID{ ID: "SKD", Company: "SCHNEIDER & KOCH", Date: "11/29/1996", }, "PRF": PNPID{ ID: "PRF", Company: "SCHNEIDER ELECTRIC JAPAN HOLDINGS, LTD.", Date: "01/02/2003", }, "MGE": PNPID{ ID: "MGE", Company: "SCHNEIDER ELECTRIC S.A.", Date: "11/29/1996", }, "SLS": PNPID{ ID: "SLS", Company: "SCHNICK-SCHNACK-SYSTEMS GMBH", Date: "05/06/2009", }, "REM": PNPID{ ID: "REM", Company: "SCI SYSTEMS INC.", Date: "08/10/2000", }, "SCM": PNPID{ ID: "SCM", Company: "SCM MICROSYSTEMS INC", Date: "11/29/1996", }, "SCP": PNPID{ ID: "SCP", Company: "SCRIPTEL CORPORATION", Date: "06/14/2007", }, "SDR": PNPID{ ID: "SDR", Company: "SDR SYSTEMS", Date: "03/15/2001", }, "STY": PNPID{ ID: "STY", Company: "SDS TECHNOLOGIES", Date: "11/08/1999", }, "SDX": PNPID{ ID: "SDX", Company: "SDX BUSINESS SYSTEMS LTD", Date: "11/29/1996", }, "NIX": PNPID{ ID: "NIX", Company: "SEANIX TECHNOLOGY INC", Date: "04/09/2007", }, "SEA": PNPID{ ID: "SEA", Company: "SEANIX TECHNOLOGY INC.", Date: "02/24/1998", }, "SAG": PNPID{ ID: "SAG", Company: "SEDLBAUER", Date: "11/29/1996", }, "SEE": PNPID{ ID: "SEE", Company: "SEECOLOR CORPORATION", Date: "11/29/1996", }, "SCB": PNPID{ ID: "SCB", Company: "SEECUBIC B.V.", Date: "11/02/2012", }, "SRT": PNPID{ ID: "SRT", Company: "SEEREAL TECHNOLOGIES GMBH", Date: "06/27/2005", }, "SEC": PNPID{ ID: "SEC", Company: "SEIKO EPSON CORPORATION", Date: "11/29/1996", }, "SID": PNPID{ ID: "SID", Company: "SEIKO INSTRUMENTS INFORMATION DEVICES INC", Date: "12/16/1996", }, "SIU": PNPID{ ID: "SIU", Company: "SEIKO INSTRUMENTS USA INC", Date: "11/29/1996", }, "SEI": PNPID{ ID: "SEI", Company: "SEITZ & ASSOCIATES INC", Date: "01/30/1998", }, "SJE": PNPID{ ID: "SJE", Company: "SEJIN ELECTRON INC", Date: "08/20/1997", }, "SXG": PNPID{ ID: "SXG", Company: "SELEX GALILEO", Date: "10/01/2012", }, "STH": PNPID{ ID: "STH", Company: "SEMTECH CORPORATION", Date: "11/30/2001", }, "SEN": PNPID{ ID: "SEN", Company: "SENCORE", Date: "05/23/1997", }, "SET": PNPID{ ID: "SET", Company: "SENDTEK CORPORATION", Date: "11/08/1999", }, "SBT": PNPID{ ID: "SBT", Company: "SENSEBOARD TECHNOLOGIES AB", Date: "09/03/2002", }, "SVR": PNPID{ ID: "SVR", Company: "SENSICS, INC.", Date: "08/27/2015", }, "STU": PNPID{ ID: "STU", Company: "SENTELIC CORPORATION", Date: "07/27/2012", }, "SNC": PNPID{ ID: "SNC", Company: "SENTRONIC INTERNATIONAL CORP.", Date: "10/23/2000", }, "SEO": PNPID{ ID: "SEO", Company: "SEOS LTD", Date: "02/20/2003", }, "SEP": PNPID{ ID: "SEP", Company: "SEP ELETRONICA LTDA.", Date: "05/07/2001", }, "SQT": PNPID{ ID: "SQT", Company: "SEQUENT COMPUTER SYSTEMS INC", Date: "11/29/1996", }, "SES": PNPID{ ID: "SES", Company: "SESSION CONTROL LLC", Date: "09/03/2010", }, "SRD": PNPID{ ID: "SRD", Company: "SETRED", Date: "09/05/2006", }, "SVT": PNPID{ ID: "SVT", Company: "SEVIT CO., LTD.", Date: "06/25/2002", }, "SYT": PNPID{ ID: "SYT", Company: "SEYEON TECH COMPANY LTD", Date: "12/02/1997", }, "SVA": PNPID{ ID: "SVA", Company: "SGEG", Date: "02/21/2000", }, "STM": PNPID{ ID: "STM", Company: "SGS THOMSON MICROELECTRONICS", Date: "11/11/1997", }, "OYO": PNPID{ ID: "OYO", Company: "SHADOW SYSTEMS", Date: "11/29/1996", }, "SBC": PNPID{ ID: "SBC", Company: "SHANGHAI BELL TELEPHONE EQUIP MFG CO", Date: "04/30/1998", }, "HYL": PNPID{ ID: "HYL", Company: "SHANGHAI CHAI MING HUANG INFO&TECH CO, LTD", Date: "02/28/2017", }, "SGW": PNPID{ ID: "SGW", Company: "SHANGHAI GUOWEI SCIENCE AND TECHNOLOGY CO., LTD.", Date: "01/28/2011", }, "DPN": PNPID{ ID: "DPN", Company: "SHANGHAI LEXIANG TECHNOLOGY LIMITED", Date: "02/07/2017", }, "XQU": PNPID{ ID: "XQU", Company: "SHANGHAI SVA-DAV ELECTRONICS CO., LTD", Date: "07/24/2003", }, "SWL": PNPID{ ID: "SWL", Company: "SHAREDWARE LTD", Date: "08/11/1998", }, "SMM": PNPID{ ID: "SMM", Company: "SHARK MULTIMEDIA INC", Date: "11/29/1996", }, "DFK": PNPID{ ID: "DFK", Company: "SHARKTEC A/S", Date: "02/14/2006", }, "SHP": PNPID{ ID: "SHP", Company: "SHARP CORPORATION", Date: "11/29/1996", }, "SXT": PNPID{ ID: "SXT", Company: "SHARP TAKAYA ELECTRONIC INDUSTRY CO.,LTD.", Date: "06/24/2010", }, "CZC": PNPID{ ID: "CZC", Company: "SHENZHEN CHUANGZHICHENG TECHNOLOGY CO., LTD.", Date: "10/23/2013", }, "DLO": PNPID{ ID: "DLO", Company: "SHENZHEN DLODLO TECHNOLOGIES CO., LTD.", Date: "04/29/2019", }, "IXN": PNPID{ ID: "IXN", Company: "SHENZHEN INET MOBILE INTERNET TECHNOLOGY CO., LTD", Date: "11/04/2014", }, "SZM": PNPID{ ID: "SZM", Company: "SHENZHEN MTC CO., LTD", Date: "08/09/2013", }, "RMS": PNPID{ ID: "RMS", Company: "SHENZHEN RAMOS DIGITAL TECHNOLOGY CO., LTD", Date: "10/29/2014", }, "SSL": PNPID{ ID: "SSL", Company: "SHENZHEN SOUTH-TOP COMPUTER CO., LTD.", Date: "12/06/2013", }, "AZH": PNPID{ ID: "AZH", Company: "SHENZHEN THREE CONNAUGHT INFORMATION TECHNOLOGY CO., LTD. (3NOD GROUP)", Date: "09/17/2013", }, "XYE": PNPID{ ID: "XYE", Company: "SHENZHEN ZHUONA TECHNOLOGY CO., LTD.", Date: "10/01/2013", }, "HTR": PNPID{ ID: "HTR", Company: "SHENZHEN ZHUOYI HENGTONG COMPUTER TECHNOLOGY LIMITED", Date: "12/13/2013", }, "ZWE": PNPID{ ID: "ZWE", Company: "SHENZHEN ZOWEE TECHNOLOGY CO., LTD", Date: "05/26/2015", }, "SDE": PNPID{ ID: "SDE", Company: "SHERWOOD DIGITAL ELECTRONICS CORPORATION", Date: "11/29/1996", }, "SHC": PNPID{ ID: "SHC", Company: "SHIBASOKU CO., LTD.", Date: "05/26/2005", }, "SHT": PNPID{ ID: "SHT", Company: "SHIN HO TECH", Date: "11/29/1996", }, "SLB": PNPID{ ID: "SLB", Company: "SHLUMBERGER LTD", Date: "11/29/1996", }, "SAT": PNPID{ ID: "SAT", Company: "SHUTTLE TECH", Date: "11/29/1996", }, "CHG": PNPID{ ID: "CHG", Company: "SICHUAN CHANGHONG ELECTRIC CO, LTD.", Date: "02/26/2003", }, "CHO": PNPID{ ID: "CHO", Company: "SICHUANG CHANGHONG CORPORATION", Date: "11/30/2001", }, "SIE": PNPID{ ID: "SIE", Company: "SIEMENS", Date: "11/29/1996", }, "SDT": PNPID{ ID: "SDT", Company: "SIEMENS AG", Date: "02/14/2006", }, "SIA": PNPID{ ID: "SIA", Company: "SIEMENS AG", Date: "03/15/2001", }, "SNI": PNPID{ ID: "SNI", Company: "SIEMENS MICRODESIGN GMBH", Date: "11/29/1996", }, "SNP": PNPID{ ID: "SNP", Company: "SIEMENS NIXDORF INFO SYSTEMS", Date: "11/29/1996", }, "SSC": PNPID{ ID: "SSC", Company: "SIERRA SEMICONDUCTOR INC", Date: "11/29/1996", }, "SWI": PNPID{ ID: "SWI", Company: "SIERRA WIRELESS INC.", Date: "07/10/2003", }, "SIG": PNPID{ ID: "SIG", Company: "SIGMA DESIGNS INC", Date: "11/29/1996", }, "SGD": PNPID{ ID: "SGD", Company: "SIGMA DESIGNS, INC.", Date: "02/14/2006", }, "SCL": PNPID{ ID: "SCL", Company: "SIGMACOM CO., LTD.", Date: "04/25/2002", }, "STL": PNPID{ ID: "STL", Company: "SIGMATEL INC", Date: "03/03/1997", }, "DXS": PNPID{ ID: "DXS", Company: "SIGNET", Date: "10/23/2000", }, "STE": PNPID{ ID: "STE", Company: "SII IDO-TSUSHIN INC", Date: "04/03/1997", }, "SMT": PNPID{ ID: "SMT", Company: "SILCOM MANUFACTURING TECH INC", Date: "11/29/1996", }, "SXI": PNPID{ ID: "SXI", Company: "SILEX INSIDE", Date: "03/29/2018", }, "SXD": PNPID{ ID: "SXD", Company: "SILEX TECHNOLOGY, INC.", Date: "03/12/2009", }, "SMS": PNPID{ ID: "SMS", Company: "SILICOM MULTIMEDIA SYSTEMS INC", Date: "12/04/1996", }, "SGX": PNPID{ ID: "SGX", Company: "SILICON GRAPHICS INC", Date: "11/29/1996", }, "SII": PNPID{ ID: "SII", Company: "SILICON IMAGE, INC.", Date: "01/13/2000", }, "SIS": PNPID{ ID: "SIS", Company: "SILICON INTEGRATED SYSTEMS CORPORATION", Date: "11/29/1996", }, "SIL": PNPID{ ID: "SIL", Company: "SILICON LABORATORIES, INC", Date: "07/16/1998", }, "SLH": PNPID{ ID: "SLH", Company: "SILICON LIBRARY INC.", Date: "11/01/2008", }, "SOI": PNPID{ ID: "SOI", Company: "SILICON OPTIX CORPORATION", Date: "07/28/2005", }, "SLK": PNPID{ ID: "SLK", Company: "SILITEK CORPORATION", Date: "07/16/1997", }, "SPU": PNPID{ ID: "SPU", Company: "SIM2 MULTIMEDIA S.P.A.", Date: "09/05/2002", }, "SMP": PNPID{ ID: "SMP", Company: "SIMPLE COMPUTING", Date: "11/29/1996", }, "SPX": PNPID{ ID: "SPX", Company: "SIMPLEX TIME RECORDER CO.", Date: "03/15/2001", }, "SIN": PNPID{ ID: "SIN", Company: "SINGULAR TECHNOLOGY CO., LTD.", Date: "11/08/1999", }, "SNO": PNPID{ ID: "SNO", Company: "SINOSUN TECHNOLOGY CO., LTD", Date: "06/27/2005", }, "SIR": PNPID{ ID: "SIR", Company: "SIRIUS TECHNOLOGIES PTY LTD", Date: "03/13/1998", }, "FUN": PNPID{ ID: "FUN", Company: "SISEL MUHENDISLIK", Date: "04/25/2002", }, "STS": PNPID{ ID: "STS", Company: "SITECSYSTEM CO., LTD.", Date: "03/16/2005", }, "SIT": PNPID{ ID: "SIT", Company: "SITINTEL", Date: "11/29/1996", }, "TDG": PNPID{ ID: "TDG", Company: "SIX15 TECHNOLOGIES", Date: "09/14/2016", }, "SKY": PNPID{ ID: "SKY", Company: "SKYDATA S.P.A.", Date: "09/19/1997", }, "SKW": PNPID{ ID: "SKW", Company: "SKYWORTH", Date: "07/15/2020", }, "SCT": PNPID{ ID: "SCT", Company: "SMART CARD TECHNOLOGY", Date: "08/10/2000", }, "SMA": PNPID{ ID: "SMA", Company: "SMART MODULAR TECHNOLOGIES", Date: "04/04/1997", }, "SPL": PNPID{ ID: "SPL", Company: "SMART SILICON SYSTEMS PTY LTD", Date: "08/10/2000", }, "STI": PNPID{ ID: "STI", Company: "SMART TECH INC", Date: "11/29/1996", }, "SBI": PNPID{ ID: "SBI", Company: "SMART TECHNOLOGIES INC.", Date: "06/14/2007", }, "SMK": PNPID{ ID: "SMK", Company: "SMK CORPORATION", Date: "02/21/2000", }, "SNW": PNPID{ ID: "SNW", Company: "SNELL & WILCOX", Date: "04/25/2002", }, "MVM": PNPID{ ID: "MVM", Company: "SOBO VISION", Date: "06/14/2007", }, "SCX": PNPID{ ID: "SCX", Company: "SOCIONEXT INC.", Date: "05/14/2015", }, "LAN": PNPID{ ID: "LAN", Company: "SODEMAN LANCOM INC", Date: "11/29/1996", }, "SDF": PNPID{ ID: "SDF", Company: "SODIFF E&T CO., LTD.", Date: "06/01/2007", }, "SHG": PNPID{ ID: "SHG", Company: "SOFT & HARDWARE DEVELOPMENT GOLDAMMER GMBH", Date: "11/29/1996", }, "SBD": PNPID{ ID: "SBD", Company: "SOFTBED - CONSULTING & DEVELOPMENT LTD", Date: "12/23/1997", }, "SWC": PNPID{ ID: "SWC", Company: "SOFTWARE CAFÉ", Date: "11/29/1996", }, "SWT": PNPID{ ID: "SWT", Company: "SOFTWARE TECHNOLOGIES GROUP,INC.", Date: "11/29/2008", }, "SOL": PNPID{ ID: "SOL", Company: "SOLITRON TECHNOLOGIES INC", Date: "11/29/1996", }, "SLM": PNPID{ ID: "SLM", Company: "SOLOMON TECHNOLOGY CORPORATION", Date: "01/16/1998", }, "SXL": PNPID{ ID: "SXL", Company: "SOLUTIONINSIDE", Date: "05/08/2001", }, "ONX": PNPID{ ID: "ONX", Company: "SOMELEC Z.I. DU VERT GALANTA", Date: "11/29/1996", }, "HON": PNPID{ ID: "HON", Company: "SONITRONIX", Date: "02/03/2011", }, "SNX": PNPID{ ID: "SNX", Company: "SONIX COMM. LTD", Date: "11/29/1996", }, "SNV": PNPID{ ID: "SNV", Company: "SONOVE GMBH", Date: "03/29/2018", }, "SNY": PNPID{ ID: "SNY", Company: "SONY", Date: "11/29/1996", }, "SON": PNPID{ ID: "SON", Company: "SONY", Date: "11/29/1996", }, "SER": PNPID{ ID: "SER", Company: "SONY ERICSSON MOBILE COMMUNICATIONS INC.", Date: "04/16/2004", }, "SCO": PNPID{ ID: "SCO", Company: "SORCUS COMPUTER GMBH", Date: "01/13/2000", }, "SOR": PNPID{ ID: "SOR", Company: "SORCUS COMPUTER GMBH", Date: "11/29/1996", }, "SCC": PNPID{ ID: "SCC", Company: "SORD COMPUTER CORPORATION", Date: "11/29/1996", }, "SOT": PNPID{ ID: "SOT", Company: "SOTEC COMPANY LTD", Date: "05/21/1997", }, "FRS": PNPID{ ID: "FRS", Company: "SOUTH MOUNTAIN TECHNOLOGIES, LTD", Date: "02/14/2006", }, "SOY": PNPID{ ID: "SOY", Company: "SOYO GROUP, INC", Date: "12/18/2006", }, "SPI": PNPID{ ID: "SPI", Company: "SPACE-I CO., LTD.", Date: "05/11/2005", }, "SMI": PNPID{ ID: "SMI", Company: "SPACELABS MEDICAL INC", Date: "11/29/1996", }, "SPE": PNPID{ ID: "SPE", Company: "SPEA SOFTWARE AG", Date: "11/29/1996", }, "SPK": PNPID{ ID: "SPK", Company: "SPEAKERCRAFT", Date: "04/20/2010", }, "SLX": PNPID{ ID: "SLX", Company: "SPECIALIX", Date: "11/29/1996", }, "SGC": PNPID{ ID: "SGC", Company: "SPECTRAGRAPHICS CORPORATION", Date: "11/29/1996", }, "SSP": PNPID{ ID: "SSP", Company: "SPECTRUM SIGNAL PROECESSING INC", Date: "11/29/1996", }, "SPC": PNPID{ ID: "SPC", Company: "SPINCORE TECHNOLOGIES, INC", Date: "01/01/1994", }, "SRS": PNPID{ ID: "SRS", Company: "SR-SYSTEMS E.K.", Date: "11/19/2012", }, "STA": PNPID{ ID: "STA", Company: "ST ELECTRONICS SYSTEMS ASSEMBLY PTE LTD", Date: "12/28/1998", }, "STX": PNPID{ ID: "STX", Company: "ST-ERICSSON", Date: "12/09/2011", }, "STC": PNPID{ ID: "STC", Company: "STAC ELECTRONICS", Date: "11/29/1996", }, "SMC": PNPID{ ID: "SMC", Company: "STANDARD MICROSYSTEMS CORPORATION", Date: "11/29/1996", }, "STT": PNPID{ ID: "STT", Company: "STAR PAGING TELECOM TECH (SHENZHEN) CO. LTD.", Date: "09/23/1998", }, "STF": PNPID{ ID: "STF", Company: "STARFLIGHT ELECTRONICS", Date: "05/23/1997", }, "SGT": PNPID{ ID: "SGT", Company: "STARGATE TECHNOLOGY", Date: "11/29/1996", }, "SLF": PNPID{ ID: "SLF", Company: "STARLEAF", Date: "11/01/2010", }, "STR": PNPID{ ID: "STR", Company: "STARLIGHT NETWORKS INC", Date: "11/29/1996", }, "STW": PNPID{ ID: "STW", Company: "STARWIN INC.", Date: "04/24/2001", }, "SWS": PNPID{ ID: "SWS", Company: "STATIC", Date: "05/16/1999", }, "STB": PNPID{ ID: "STB", Company: "STB SYSTEMS INC", Date: "11/29/1996", }, "STD": PNPID{ ID: "STD", Company: "STD COMPUTER INC", Date: "11/29/1996", }, "STG": PNPID{ ID: "STG", Company: "STEREOGRAPHICS CORP.", Date: "10/02/2001", }, "SMO": PNPID{ ID: "SMO", Company: "STMICROELECTRONICS", Date: "06/14/2007", }, "STO": PNPID{ ID: "STO", Company: "STOLLMANN E+V GMBH", Date: "03/27/1997", }, "SAS": PNPID{ ID: "SAS", Company: "STORES AUTOMATED SYSTEMS INC", Date: "03/19/1997", }, "EZP": PNPID{ ID: "EZP", Company: "STORM TECHNOLOGY", Date: "10/17/1996", }, "STP": PNPID{ ID: "STP", Company: "STREAMPLAY LTD", Date: "02/04/2009", }, "SYK": PNPID{ ID: "SYK", Company: "STRYKER COMMUNICATIONS", Date: "10/10/2005", }, "SUB": PNPID{ ID: "SUB", Company: "SUBSPACE COMM. INC", Date: "11/29/1996", }, "SML": PNPID{ ID: "SML", Company: "SUMITOMO METAL INDUSTRIES, LTD.", Date: "09/13/1999", }, "SUM": PNPID{ ID: "SUM", Company: "SUMMAGRAPHICS CORPORATION", Date: "11/29/1996", }, "SCE": PNPID{ ID: "SCE", Company: "SUN CORPORATION", Date: "11/29/1996", }, "SUN": PNPID{ ID: "SUN", Company: "SUN ELECTRONICS CORPORATION", Date: "11/29/1996", }, "SVI": PNPID{ ID: "SVI", Company: "SUN MICROSYSTEMS", Date: "01/13/2003", }, "SNN": PNPID{ ID: "SNN", Company: "SUNNY ELEKTRONIK", Date: "11/14/2014", }, "SDS": PNPID{ ID: "SDS", Company: "SUNRIVER DATA SYSTEM", Date: "11/29/1996", }, "SGL": PNPID{ ID: "SGL", Company: "SUPER GATE TECHNOLOGY COMPANY LTD", Date: "12/30/1997", }, "SNT": PNPID{ ID: "SNT", Company: "SUPERNET INC", Date: "04/23/1998", }, "SUP": PNPID{ ID: "SUP", Company: "SUPRA CORPORATION", Date: "11/29/1996", }, "SUR": PNPID{ ID: "SUR", Company: "SURENAM COMPUTER CORPORATION", Date: "11/29/1996", }, "SRF": PNPID{ ID: "SRF", Company: "SURF COMMUNICATION SOLUTIONS LTD", Date: "03/23/1998", }, "SVD": PNPID{ ID: "SVD", Company: "SVD COMPUTER", Date: "04/14/1998", }, "SVS": PNPID{ ID: "SVS", Company: "SVSI", Date: "08/09/2008", }, "SYE": PNPID{ ID: "SYE", Company: "SY ELECTRONICS LTD", Date: "09/20/2010", }, "SYL": PNPID{ ID: "SYL", Company: "SYLVANIA COMPUTER PRODUCTS", Date: "06/12/1998", }, "SLI": PNPID{ ID: "SLI", Company: "SYMBIOS LOGIC INC", Date: "11/29/1996", }, "ISA": PNPID{ ID: "ISA", Company: "SYMBOL TECHNOLOGIES", Date: "06/02/1997", }, "SYM": PNPID{ ID: "SYM", Company: "SYMICRON COMPUTER COMMUNICATIONS LTD.", Date: "11/29/1996", }, "SYN": PNPID{ ID: "SYN", Company: "SYNAPTICS INC", Date: "11/29/1996", }, "SPS": PNPID{ ID: "SPS", Company: "SYNOPSYS INC", Date: "11/29/1996", }, "SXB": PNPID{ ID: "SXB", Company: "SYNTAX-BRILLIAN", Date: "05/08/2006", }, "STQ": PNPID{ ID: "STQ", Company: "SYNTHETEL CORPORATION", Date: "12/21/2015", }, "SYP": PNPID{ ID: "SYP", Company: "SYPRO CO LTD", Date: "11/27/1998", }, "SYS": PNPID{ ID: "SYS", Company: "SYSGRATION LTD", Date: "04/28/1997", }, "SLC": PNPID{ ID: "SLC", Company: "SYSLOGIC DATENTECHNIK AG", Date: "01/20/1999", }, "SME": PNPID{ ID: "SME", Company: "SYSMATE COMPANY", Date: "09/02/1997", }, "SIC": PNPID{ ID: "SIC", Company: "SYSMATE CORPORATION", Date: "05/05/1997", }, "SYC": PNPID{ ID: "SYC", Company: "SYSMIC", Date: "11/29/1996", }, "SGZ": PNPID{ ID: "SGZ", Company: "SYSTEC COMPUTER GMBH", Date: "10/02/1997", }, "SCI": PNPID{ ID: "SCI", Company: "SYSTEM CRAFT", Date: "11/29/1996", }, "SEB": PNPID{ ID: "SEB", Company: "SYSTEM ELEKTRONIK GMBH", Date: "04/19/2000", }, "SLA": PNPID{ ID: "SLA", Company: "SYSTEME LAUER GMBH&CO KG", Date: "03/20/1999", }, "UPS": PNPID{ ID: "UPS", Company: "SYSTEMS ENHANCEMENT", Date: "11/29/1996", }, "SST": PNPID{ ID: "SST", Company: "SYSTEMSOFT CORPORATION", Date: "11/29/1996", }, "SCR": PNPID{ ID: "SCR", Company: "SYSTRAN CORPORATION", Date: "11/29/1996", }, "SYV": PNPID{ ID: "SYV", Company: "SYVAX INC", Date: "11/29/1996", }, "TUA": PNPID{ ID: "TUA", Company: "T+A ELEKTROAKUSTIK GMBH", Date: "01/05/2011", }, "TMT": PNPID{ ID: "TMT", Company: "T-METRICS INC.", Date: "02/21/2000", }, "TCD": PNPID{ ID: "TCD", Company: "TAICOM DATA SYSTEMS CO., LTD.", Date: "10/08/2001", }, "TMR": PNPID{ ID: "TMR", Company: "TAICOM INTERNATIONAL INC", Date: "11/29/1996", }, "TKC": PNPID{ ID: "TKC", Company: "TAIKO ELECTRIC WORKS.LTD", Date: "03/15/2001", }, "TTX": PNPID{ ID: "TTX", Company: "TAITEX CORPORATION", Date: "02/03/2016", }, "TVM": PNPID{ ID: "TVM", Company: "TAIWAN VIDEO & MONITOR CORPORATION", Date: "11/29/1996", }, "KTD": PNPID{ ID: "KTD", Company: "TAKAHATA ELECTRONICS CO.,LTD.", Date: "07/22/2009", }, "TAM": PNPID{ ID: "TAM", Company: "TAMURA SEISAKUSYO LTD", Date: "07/17/1997", }, "TAA": PNPID{ ID: "TAA", Company: "TANDBERG", Date: "10/21/2003", }, "TDD": PNPID{ ID: "TDD", Company: "TANDBERG DATA DISPLAY AS", Date: "11/29/1996", }, "TDM": PNPID{ ID: "TDM", Company: "TANDEM COMPUTER EUROPE INC", Date: "11/29/1996", }, "TCC": PNPID{ ID: "TCC", Company: "TANDON CORPORATION", Date: "11/29/1996", }, "TDY": PNPID{ ID: "TDY", Company: "TANDY ELECTRONICS", Date: "11/29/1996", }, "TAS": PNPID{ ID: "TAS", Company: "TASKIT RECHNERTECHNIK GMBH", Date: "12/15/1997", }, "TCS": PNPID{ ID: "TCS", Company: "TATUNG COMPANY OF AMERICA INC", Date: "11/29/1996", }, "VIB": PNPID{ ID: "VIB", Company: "TATUNG UK LTD", Date: "07/16/1999", }, "NRV": PNPID{ ID: "NRV", Company: "TAUGAGREINING HF", Date: "11/29/1996", }, "TAX": PNPID{ ID: "TAX", Company: "TAXAN (EUROPE) LTD", Date: "03/13/1997", }, "TOL": PNPID{ ID: "TOL", Company: "TCL CORPORATION", Date: "03/30/2016", }, "PMD": PNPID{ ID: "PMD", Company: "TDK USA CORPORATION", Date: "11/29/1996", }, "TDT": PNPID{ ID: "TDT", Company: "TDT", Date: "11/29/1996", }, "TDV": PNPID{ ID: "TDV", Company: "TDVISION SYSTEMS, INC.", Date: "01/18/2008", }, "TCJ": PNPID{ ID: "TCJ", Company: "TEAC AMERICA INC", Date: "11/29/1996", }, "TEA": PNPID{ ID: "TEA", Company: "TEAC SYSTEM CORPORATION", Date: "11/29/1996", }, "CET": PNPID{ ID: "CET", Company: "TEC CORPORATION", Date: "07/16/1998", }, "TEZ": PNPID{ ID: "TEZ", Company: "TECH SOURCE INC.", Date: "08/14/2013", }, "TLN": PNPID{ ID: "TLN", Company: "TECHLOGIX NETWORX", Date: "02/28/2017", }, "TMC": PNPID{ ID: "TMC", Company: "TECHMEDIA COMPUTER SYSTEMS CORPORATION", Date: "02/10/1998", }, "TCL": PNPID{ ID: "TCL", Company: "TECHNICAL CONCEPTS LTD", Date: "11/29/1996", }, "TIL": PNPID{ ID: "TIL", Company: "TECHNICAL ILLUSIONS INC.", Date: "02/14/2014", }, "TSD": PNPID{ ID: "TSD", Company: "TECHNISAT DIGITAL GMBH", Date: "07/14/2005", }, "NXS": PNPID{ ID: "NXS", Company: "TECHNOLOGY NEXUS SECURE OPEN SYSTEMS AB", Date: "05/08/1998", }, "TPE": PNPID{ ID: "TPE", Company: "TECHNOLOGY POWER ENTERPRISES INC", Date: "11/29/1996", }, "TTS": PNPID{ ID: "TTS", Company: "TECHNOTREND SYSTEMTECHNIK GMBH", Date: "11/29/1996", }, "TEC": PNPID{ ID: "TEC", Company: "TECMAR INC", Date: "11/29/1996", }, "PIS": PNPID{ ID: "PIS", Company: "TECNART CO.,LTD.", Date: "10/22/2019", }, "TCN": PNPID{ ID: "TCN", Company: "TECNETICS (PTY) LTD", Date: "11/29/1996", }, "TNM": PNPID{ ID: "TNM", Company: "TECNIMAGEN SA", Date: "05/02/2005", }, "TVD": PNPID{ ID: "TVD", Company: "TECNOVISION", Date: "03/13/2006", }, "RXT": PNPID{ ID: "RXT", Company: "TECTONA SOFTSOLUTIONS (P) LTD.,", Date: "06/02/2004", }, "TKG": PNPID{ ID: "TKG", Company: "TEK GEAR", Date: "10/16/2015", }, "TKN": PNPID{ ID: "TKN", Company: "TEKNOR MICROSYSTEM INC", Date: "11/29/1996", }, "TRM": PNPID{ ID: "TRM", Company: "TEKRAM TECHNOLOGY COMPANY LTD", Date: "11/29/1996", }, "TEK": PNPID{ ID: "TEK", Company: "TEKTRONIX INC", Date: "05/16/1999", }, "TWX": PNPID{ ID: "TWX", Company: "TEKWORX LIMITED", Date: "12/24/2009", }, "TCT": PNPID{ ID: "TCT", Company: "TELECOM TECHNOLOGY CENTRE CO. LTD.", Date: "07/16/1999", }, "TTC": PNPID{ ID: "TTC", Company: "TELECOMMUNICATIONS TECHNIQUES CORPORATION", Date: "11/29/1996", }, "TLF": PNPID{ ID: "TLF", Company: "TELEFORCE.,CO,LTD", Date: "11/19/2012", }, "TAT": PNPID{ ID: "TAT", Company: "TELELIAISON INC", Date: "04/29/1997", }, "TLK": PNPID{ ID: "TLK", Company: "TELELINK AG", Date: "09/01/1998", }, "TPS": PNPID{ ID: "TPS", Company: "TELEPROCESSING SYSTEME GMBH", Date: "01/24/1997", }, "TAG": PNPID{ ID: "TAG", Company: "TELES AG", Date: "11/29/1996", }, "TLS": PNPID{ ID: "TLS", Company: "TELESTE EDUCATIONAL OY", Date: "11/29/1996", }, "TEV": PNPID{ ID: "TEV", Company: "TELEVÉS, S.A.", Date: "06/22/2017", }, "TCF": PNPID{ ID: "TCF", Company: "TELEVIC CONFERENCE", Date: "02/28/2017", }, "TSI": PNPID{ ID: "TSI", Company: "TELEVIDEO SYSTEMS", Date: "11/29/1996", }, "PFT": PNPID{ ID: "PFT", Company: "TELIA PROSOFT AB", Date: "09/13/1999", }, "TLD": PNPID{ ID: "TLD", Company: "TELINDUS", Date: "11/29/1996", }, "TLX": PNPID{ ID: "TLX", Company: "TELXON CORPORATION", Date: "11/29/1996", }, "TEN": PNPID{ ID: "TEN", Company: "TENCENT", Date: "06/20/2017", }, "TNY": PNPID{ ID: "TNY", Company: "TENNYSON TECH PTY LTD", Date: "11/29/1996", }, "TDC": PNPID{ ID: "TDC", Company: "TERADICI", Date: "10/11/2007", }, "TER": PNPID{ ID: "TER", Company: "TERRATEC ELECTRONIC GMBH", Date: "03/21/1997", }, "TET": PNPID{ ID: "TET", Company: "TETRADYNE CO., LTD.", Date: "04/27/2016", }, "TXN": PNPID{ ID: "TXN", Company: "TEXAS INSTURMENTS", Date: "11/29/1996", }, "TMI": PNPID{ ID: "TMI", Company: "TEXAS MICROSYSTEM", Date: "11/29/1996", }, "TXT": PNPID{ ID: "TXT", Company: "TEXTRON DEFENSE SYSTEM", Date: "11/29/1996", }, "TAV": PNPID{ ID: "TAV", Company: "THALES AVIONICS", Date: "11/18/2015", }, "CKC": PNPID{ ID: "CKC", Company: "THE CONCEPT KEYBOARD COMPANY LTD", Date: "06/02/1997", }, "LNX": PNPID{ ID: "LNX", Company: "THE LINUX FOUNDATION", Date: "04/04/2014", }, "PXL": PNPID{ ID: "PXL", Company: "THE MOVING PIXEL COMPANY", Date: "11/24/2003", }, "ITN": PNPID{ ID: "ITN", Company: "THE NTI GROUP", Date: "11/29/1996", }, "TOG": PNPID{ ID: "TOG", Company: "THE OPEN GROUP", Date: "09/13/1999", }, "PAN": PNPID{ ID: "PAN", Company: "THE PANDA PROJECT", Date: "11/29/1996", }, "PRG": PNPID{ ID: "PRG", Company: "THE PHOENIX RESEARCH GROUP INC", Date: "09/19/1997", }, "TSG": PNPID{ ID: "TSG", Company: "THE SOFTWARE GROUP LTD", Date: "11/29/1996", }, "TMX": PNPID{ ID: "TMX", Company: "THERMOTREX CORPORATION", Date: "11/29/1996", }, "TLL": PNPID{ ID: "TLL", Company: "THINKLOGICAL", Date: "06/01/2015", }, "TCO": PNPID{ ID: "TCO", Company: "THOMAS-CONRAD CORPORATION", Date: "11/29/1996", }, "TCR": PNPID{ ID: "TCR", Company: "THOMSON CONSUMER ELECTRONICS", Date: "08/20/1998", }, "TPT": PNPID{ ID: "TPT", Company: "THRUPUT LTD", Date: "06/16/2010", }, "THN": PNPID{ ID: "THN", Company: "THUNDERCOM HOLDINGS SDN. BHD.", Date: "03/21/1997", }, "TWA": PNPID{ ID: "TWA", Company: "TIDEWATER ASSOCIATION", Date: "11/29/1996", }, "TMM": PNPID{ ID: "TMM", Company: "TIME MANAGEMENT, INC.", Date: "03/20/1999", }, "TKS": PNPID{ ID: "TKS", Company: "TIMEKEEPING SYSTEMS, INC.", Date: "08/31/1998", }, "TPD": PNPID{ ID: "TPD", Company: "TIMES (SHANGHAI) COMPUTER CO., LTD.", Date: "12/12/2013", }, "TIP": PNPID{ ID: "TIP", Company: "TIPTEL AG", Date: "02/24/1998", }, "TIX": PNPID{ ID: "TIX", Company: "TIXI.COM GMBH", Date: "10/16/1998", }, "TNC": PNPID{ ID: "TNC", Company: "TNC INDUSTRIAL COMPANY LTD", Date: "02/27/1998", }, "TAB": PNPID{ ID: "TAB", Company: "TODOS DATA SYSTEM AB", Date: "08/20/1997", }, "TOE": PNPID{ ID: "TOE", Company: "TOEI ELECTRONICS CO., LTD.", Date: "10/02/2001", }, "TON": PNPID{ ID: "TON", Company: "TONNA", Date: "03/14/2012", }, "TPV": PNPID{ ID: "TPV", Company: "TOP VICTORY ELECTRONICS ( FUJIAN ) COMPANY LTD", Date: "05/16/1999", }, "TPK": PNPID{ ID: "TPK", Company: "TOPRE CORPORATION", Date: "02/13/2009", }, "TPR": PNPID{ ID: "TPR", Company: "TOPRO TECHNOLOGY INC", Date: "05/08/1998", }, "TTA": PNPID{ ID: "TTA", Company: "TOPSON TECHNOLOGY CO., LTD.", Date: "09/23/1998", }, "SFM": PNPID{ ID: "SFM", Company: "TORNADO COMPANY", Date: "04/15/1997", }, "TGS": PNPID{ ID: "TGS", Company: "TORUS SYSTEMS LTD", Date: "11/29/1996", }, "TRS": PNPID{ ID: "TRS", Company: "TORUS SYSTEMS LTD", Date: "11/29/1996", }, "TAI": PNPID{ ID: "TAI", Company: "TOSHIBA AMERICA INFO SYSTEMS INC", Date: "11/29/1996", }, "TSB": PNPID{ ID: "TSB", Company: "TOSHIBA AMERICA INFO SYSTEMS INC", Date: "11/29/1996", }, "TTP": PNPID{ ID: "TTP", Company: "TOSHIBA CORPORATION", Date: "07/07/2015", }, "TGC": PNPID{ ID: "TGC", Company: "TOSHIBA GLOBAL COMMERCE SOLUTIONS, INC.", Date: "06/26/2012", }, "LCD": PNPID{ ID: "LCD", Company: "TOSHIBA MATSUSHITA DISPLAY TECHNOLOGY CO., LTD", Date: "05/24/2000", }, "PCS": PNPID{ ID: "PCS", Company: "TOSHIBA PERSONAL COMPUTER SYSTEM CORPRATION", Date: "06/22/2010", }, "TLI": PNPID{ ID: "TLI", Company: "TOSHIBA TELI CORPORATION", Date: "01/18/2008", }, "TVL": PNPID{ ID: "TVL", Company: "TOTAL VISION LTD", Date: "02/07/2017", }, "TTK": PNPID{ ID: "TTK", Company: "TOTOKU ELECTRIC COMPANY LTD", Date: "11/29/1996", }, "TSE": PNPID{ ID: "TSE", Company: "TOTTORI SANYO ELECTRIC", Date: "11/29/1996", }, "TSL": PNPID{ ID: "TSL", Company: "TOTTORI SANYO ELECTRIC CO., LTD.", Date: "11/06/2001", }, "TPC": PNPID{ ID: "TPC", Company: "TOUCH PANEL SYSTEMS CORPORATION", Date: "09/02/1997", }, "TKO": PNPID{ ID: "TKO", Company: "TOUCHKO, INC.", Date: "01/12/2006", }, "TOU": PNPID{ ID: "TOU", Company: "TOUCHSTONE TECHNOLOGY", Date: "05/07/2001", }, "TSY": PNPID{ ID: "TSY", Company: "TOUCHSYSTEMS", Date: "01/18/2008", }, "TWK": PNPID{ ID: "TWK", Company: "TOWITOKO ELECTRONICS GMBH", Date: "04/14/1998", }, "KPT": PNPID{ ID: "KPT", Company: "TPK HOLDING CO., LTD", Date: "08/16/2016", }, "CSB": PNPID{ ID: "CSB", Company: "TRANSTEX SA", Date: "03/15/2001", }, "TST": PNPID{ ID: "TST", Company: "TRANSTREAM INC", Date: "04/29/1997", }, "TSV": PNPID{ ID: "TSV", Company: "TRANSVIDEO", Date: "05/04/2010", }, "TRP": PNPID{ ID: "TRP", Company: "TRAPEZE GROUP", Date: "02/28/2017", }, "TRE": PNPID{ ID: "TRE", Company: "TREMETRICS", Date: "04/24/1997", }, "RDM": PNPID{ ID: "RDM", Company: "TREMON ENTERPRISES COMPANY LTD", Date: "11/29/1996", }, "TTI": PNPID{ ID: "TTI", Company: "TRENTON TERMINALS INC", Date: "11/29/1996", }, "TRX": PNPID{ ID: "TRX", Company: "TREX ENTERPRISES", Date: "02/21/2000", }, "TDS": PNPID{ ID: "TDS", Company: "TRI-DATA SYSTEMS INC", Date: "11/29/1996", }, "OZO": PNPID{ ID: "OZO", Company: "TRIBE COMPUTER WORKS INC", Date: "11/29/1996", }, "TRI": PNPID{ ID: "TRI", Company: "TRICORD SYSTEMS", Date: "11/29/1996", }, "TTY": PNPID{ ID: "TTY", Company: "TRIDELITY DISPLAY SOLUTIONS GMBH", Date: "07/19/2010", }, "TRD": PNPID{ ID: "TRD", Company: "TRIDENT MICROSYSTEM INC", Date: "11/29/1996", }, "TMS": PNPID{ ID: "TMS", Company: "TRIDENT MICROSYSTEMS LTD", Date: "07/15/2002", }, "TGI": PNPID{ ID: "TGI", Company: "TRIGEM COMPUTER INC", Date: "11/29/1996", }, "TGM": PNPID{ ID: "TGM", Company: "TRIGEM COMPUTER,INC.", Date: "07/05/2001", }, "TIC": PNPID{ ID: "TIC", Company: "TRIGEM KINFOCOMM", Date: "02/26/2003", }, "TRC": PNPID{ ID: "TRC", Company: "TRIOC AB", Date: "01/13/2000", }, "TBB": PNPID{ ID: "TBB", Company: "TRIPLE S ENGINEERING INC", Date: "09/26/1997", }, "TRT": PNPID{ ID: "TRT", Company: "TRITEC ELECTRONIC AG", Date: "01/11/2012", }, "TRA": PNPID{ ID: "TRA", Company: "TRITECH MICROELECTRONICS INTERNATIONAL", Date: "01/24/1997", }, "TRB": PNPID{ ID: "TRB", Company: "TRIUMPH BOARD A.S.", Date: "09/27/2013", }, "TRV": PNPID{ ID: "TRV", Company: "TRIVISIO PROTOTYPING GMBH", Date: "11/18/2011", }, "TXL": PNPID{ ID: "TXL", Company: "TRIXEL LTD", Date: "08/10/2000", }, "MKV": PNPID{ ID: "MKV", Company: "TRTHEIM TECHNOLOGY", Date: "03/17/1997", }, "TVI": PNPID{ ID: "TVI", Company: "TRUEVISION", Date: "11/29/1996", }, "TTE": PNPID{ ID: "TTE", Company: "TTE, INC.", Date: "01/18/2005", }, "TCI": PNPID{ ID: "TCI", Company: "TULIP COMPUTERS INT'L B.V.", Date: "11/29/1996", }, "TBC": PNPID{ ID: "TBC", Company: "TURBO COMMUNICATION, INC", Date: "09/01/1998", }, "TBS": PNPID{ ID: "TBS", Company: "TURTLE BEACH SYSTEM", Date: "11/29/1996", }, "TUT": PNPID{ ID: "TUT", Company: "TUT SYSTEMS", Date: "08/19/1997", }, "TVR": PNPID{ ID: "TVR", Company: "TV INTERACTIVE CORPORATION", Date: "11/29/1996", }, "TVO": PNPID{ ID: "TVO", Company: "TV ONE LTD", Date: "09/02/2008", }, "TVV": PNPID{ ID: "TVV", Company: "TV1 GMBH", Date: "02/06/2012", }, "TVS": PNPID{ ID: "TVS", Company: "TVS ELECTRONICS LIMITED", Date: "05/20/2008", }, "TWH": PNPID{ ID: "TWH", Company: "TWINHEAD INTERNATIONAL CORPORATION", Date: "11/29/1996", }, "TYN": PNPID{ ID: "TYN", Company: "TYAN COMPUTER CORPORATION", Date: "11/29/1996", }, "USE": PNPID{ ID: "USE", Company: "U. S. ELECTRONICS INC.", Date: "10/28/2013", }, "USD": PNPID{ ID: "USD", Company: "U.S. DIGITAL CORPORATION", Date: "11/29/1996", }, "NRL": PNPID{ ID: "NRL", Company: "U.S. NAVAL RESEARCH LAB", Date: "11/29/1996", }, "TSP": PNPID{ ID: "TSP", Company: "U.S. NAVY", Date: "10/17/2002", }, "USR": PNPID{ ID: "USR", Company: "U.S. ROBOTICS INC", Date: "11/29/1996", }, "UBL": PNPID{ ID: "UBL", Company: "UBINETICS LTD.", Date: "05/23/2002", }, "UJR": PNPID{ ID: "UJR", Company: "UEDA JAPAN RADIO CO., LTD.", Date: "07/09/2003", }, "PRP": PNPID{ ID: "PRP", Company: "UEFI FORUM", Date: "02/03/2016", }, "UFO": PNPID{ ID: "UFO", Company: "UFO SYSTEMS INC", Date: "11/29/1996", }, "UAS": PNPID{ ID: "UAS", Company: "ULTIMA ASSOCIATES PTE LTD", Date: "01/02/1997", }, "UEC": PNPID{ ID: "UEC", Company: "ULTIMA ELECTRONICS CORPORATION", Date: "09/01/1998", }, "UMT": PNPID{ ID: "UMT", Company: "ULTIMACHINE", Date: "08/11/2016", }, "ULT": PNPID{ ID: "ULT", Company: "ULTRA NETWORK TECH", Date: "11/29/1996", }, "UMG": PNPID{ ID: "UMG", Company: "UMEZAWA GIKEN CO.,LTD", Date: "04/10/2008", }, "UBI": PNPID{ ID: "UBI", Company: "UNGERMANN-BASS INC", Date: "11/29/1996", }, "WKH": PNPID{ ID: "WKH", Company: "UNI-TAKE INT'L INC.", Date: "06/17/2002", }, "UNY": PNPID{ ID: "UNY", Company: "UNICATE", Date: "07/21/1998", }, "UTC": PNPID{ ID: "UTC", Company: "UNICOMPUTE TECHNOLOGY CO., LTD.", Date: "10/19/2020", }, "UDN": PNPID{ ID: "UDN", Company: "UNIDEN CORPORATION", Date: "10/18/2004", }, "UIC": PNPID{ ID: "UIC", Company: "UNIFORM INDUSTRIAL CORPORATION", Date: "11/29/1996", }, "UNI": PNPID{ ID: "UNI", Company: "UNIFORM INDUSTRY CORP.", Date: "11/06/2001", }, "UFG": PNPID{ ID: "UFG", Company: "UNIGRAF-USA", Date: "10/09/2008", }, "UNB": PNPID{ ID: "UNB", Company: "UNISYS CORPORATION", Date: "11/29/1996", }, "UNC": PNPID{ ID: "UNC", Company: "UNISYS CORPORATION", Date: "11/29/1996", }, "UNM": PNPID{ ID: "UNM", Company: "UNISYS CORPORATION", Date: "11/29/1996", }, "UNO": PNPID{ ID: "UNO", Company: "UNISYS CORPORATION", Date: "11/29/1996", }, "UNS": PNPID{ ID: "UNS", Company: "UNISYS CORPORATION", Date: "11/29/1996", }, "UNT": PNPID{ ID: "UNT", Company: "UNISYS CORPORATION", Date: "11/29/1996", }, "UNA": PNPID{ ID: "UNA", Company: "UNISYS DSD", Date: "11/29/1996", }, "UMC": PNPID{ ID: "UMC", Company: "UNITED MICROELECTR CORPORATION", Date: "11/29/1996", }, "UNP": PNPID{ ID: "UNP", Company: "UNITOP", Date: "11/06/2001", }, "UEI": PNPID{ ID: "UEI", Company: "UNIVERSAL ELECTRONICS INC", Date: "08/20/1997", }, "UET": PNPID{ ID: "UET", Company: "UNIVERSAL EMPOWERING TECHNOLOGIES", Date: "09/26/1997", }, "UMM": PNPID{ ID: "UMM", Company: "UNIVERSAL MULTIMEDIA", Date: "10/08/2001", }, "USI": PNPID{ ID: "USI", Company: "UNIVERSAL SCIENTIFIC INDUSTRIAL CO., LTD.", Date: "11/04/2003", }, "JGD": PNPID{ ID: "JGD", Company: "UNIVERSITY COLLEGE", Date: "11/29/1996", }, "UWC": PNPID{ ID: "UWC", Company: "UNIWILL COMPUTER CORP.", Date: "04/16/2004", }, "UTD": PNPID{ ID: "UTD", Company: "UP TO DATE TECH", Date: "11/29/1996", }, "UPP": PNPID{ ID: "UPP", Company: "UPPI", Date: "04/14/1998", }, "RUP": PNPID{ ID: "RUP", Company: "UPS MANUFACTORING S.R.L.", Date: "03/15/2001", }, "ASD": PNPID{ ID: "ASD", Company: "USC INFORMATION SCIENCES INSTITUTE", Date: "04/08/1997", }, "USA": PNPID{ ID: "USA", Company: "UTIMACO SAFEWARE AG", Date: "05/04/1998", }, "VSR": PNPID{ ID: "VSR", Company: "V-STAR ELECTRONICS INC.", Date: "02/21/2000", }, "VAT": PNPID{ ID: "VAT", Company: "VADATECH INC", Date: "07/09/2018", }, "VAD": PNPID{ ID: "VAD", Company: "VADDIO, LLC", Date: "11/30/2012", }, "VDM": PNPID{ ID: "VDM", Company: "VADEM", Date: "11/29/1996", }, "VAI": PNPID{ ID: "VAI", Company: "VAIO CORPORATION", Date: "04/18/2014", }, "VAL": PNPID{ ID: "VAL", Company: "VALENCE COMPUTING CORPORATION", Date: "11/29/1996", }, "VBT": PNPID{ ID: "VBT", Company: "VALLEY BOARD LTDA", Date: "03/15/2001", }, "VLB": PNPID{ ID: "VLB", Company: "VALLEYBOARD LTDA.", Date: "04/05/1998", }, "VLV": PNPID{ ID: "VLV", Company: "VALVE CORPORATION", Date: "03/06/2013", }, "ITI": PNPID{ ID: "ITI", Company: "VANERUM GROUP", Date: "10/01/2013", }, "VAR": PNPID{ ID: "VAR", Company: "VARIAN AUSTRALIA PTY LTD", Date: "04/19/2000", }, "VRT": PNPID{ ID: "VRT", Company: "VARJO TECHNOLOGIES", Date: "11/17/2017", }, "VTV": PNPID{ ID: "VTV", Company: "VATIV TECHNOLOGIES", Date: "04/12/2006", }, "VBR": PNPID{ ID: "VBR", Company: "VBRICK SYSTEMS INC.", Date: "08/19/2009", }, "VCX": PNPID{ ID: "VCX", Company: "VCONEX", Date: "06/15/2005", }, "VDC": PNPID{ ID: "VDC", Company: "VDC DISPLAY SYSTEMS", Date: "04/29/2009", }, "VEC": PNPID{ ID: "VEC", Company: "VECTOR INFORMATIK GMBH", Date: "09/10/1997", }, "VCM": PNPID{ ID: "VCM", Company: "VECTOR MAGNETICS, LLC", Date: "04/12/2006", }, "VEK": PNPID{ ID: "VEK", Company: "VEKTREX", Date: "12/13/1996", }, "VFI": PNPID{ ID: "VFI", Company: "VERIFONE INC", Date: "05/29/1998", }, "VMI": PNPID{ ID: "VMI", Company: "VERMONT MICROSYSTEMS", Date: "11/29/1996", }, "VLC": PNPID{ ID: "VLC", Company: "VERSALOGIC CORPORATION", Date: "05/25/2016", }, "VTX": PNPID{ ID: "VTX", Company: "VESTAX CORPORATION", Date: "02/14/2012", }, "VES": PNPID{ ID: "VES", Company: "VESTEL ELEKTRONIK SANAYI VE TICARET A. S.", Date: "09/19/1997", }, "VIM": PNPID{ ID: "VIM", Company: "VIA MONS LTD.", Date: "08/29/2012", }, "VIA": PNPID{ ID: "VIA", Company: "VIA TECH INC", Date: "11/29/1996", }, "VCJ": PNPID{ ID: "VCJ", Company: "VICTOR COMPANY OF JAPAN, LIMITED", Date: "02/06/2009", }, "VDA": PNPID{ ID: "VDA", Company: "VICTOR DATA SYSTEMS", Date: "05/24/2000", }, "VIC": PNPID{ ID: "VIC", Company: "VICTRON B.V.", Date: "11/29/1996", }, "VDO": PNPID{ ID: "VDO", Company: "VIDEO & DISPLAY ORIENTED CORPORATION", Date: "11/29/1996", }, "URD": PNPID{ ID: "URD", Company: "VIDEO COMPUTER S.P.A.", Date: "02/24/1998", }, "JWD": PNPID{ ID: "JWD", Company: "VIDEO INTERNATIONAL INC.", Date: "02/21/2000", }, "VPI": PNPID{ ID: "VPI", Company: "VIDEO PRODUCTS INC", Date: "05/04/2010", }, "VLT": PNPID{ ID: "VLT", Company: "VIDEOLAN TECHNOLOGIES", Date: "10/17/1997", }, "VSI": PNPID{ ID: "VSI", Company: "VIDEOSERVER", Date: "06/25/1997", }, "VTB": PNPID{ ID: "VTB", Company: "VIDEOTECHNIK BREITHAUPT", Date: "07/23/2013", }, "VTN": PNPID{ ID: "VTN", Company: "VIDEOTRON CORP.", Date: "05/04/2010", }, "VDS": PNPID{ ID: "VDS", Company: "VIDISYS GMBH & COMPANY", Date: "11/29/1996", }, "VDT": PNPID{ ID: "VDT", Company: "VIDITEC, INC.", Date: "11/08/1999", }, "VSC": PNPID{ ID: "VSC", Company: "VIEWSONIC CORPORATION", Date: "11/29/1996", }, "VTK": PNPID{ ID: "VTK", Company: "VIEWTECK CO., LTD.", Date: "10/08/2001", }, "VIK": PNPID{ ID: "VIK", Company: "VIKING CONNECTORS", Date: "11/29/1996", }, "VNC": PNPID{ ID: "VNC", Company: "VINCA CORPORATION", Date: "11/29/1996", }, "NHT": PNPID{ ID: "NHT", Company: "VINCI LABS", Date: "03/03/2006", }, "VML": PNPID{ ID: "VML", Company: "VINE MICROS LIMITED", Date: "06/16/2004", }, "VIN": PNPID{ ID: "VIN", Company: "VINE MICROS LTD", Date: "04/19/2000", }, "VCC": PNPID{ ID: "VCC", Company: "VIRTUAL COMPUTER CORPORATION", Date: "11/29/1996", }, "VRC": PNPID{ ID: "VRC", Company: "VIRTUAL RESOURCES CORPORATION", Date: "11/29/1996", }, "VQ@": PNPID{ ID: "VQ@", Company: "VISION QUEST", Date: "10/26/2009", }, "VSP": PNPID{ ID: "VSP", Company: "VISION SYSTEMS GMBH", Date: "11/29/1996", }, "VIS": PNPID{ ID: "VIS", Company: "VISIONEER", Date: "11/29/1996", }, "VIT": PNPID{ ID: "VIT", Company: "VISITECH AS", Date: "09/05/2006", }, "VLK": PNPID{ ID: "VLK", Company: "VISLINK INTERNATIONAL LTD", Date: "08/27/2012", }, "VCI": PNPID{ ID: "VCI", Company: "VISTACOM INC", Date: "11/29/1996", }, "VIR": PNPID{ ID: "VIR", Company: "VISUAL INTERFACE, INC", Date: "11/27/1998", }, "VTL": PNPID{ ID: "VTL", Company: "VIVID TECHNOLOGY PTE LTD", Date: "11/29/1996", }, "VIZ": PNPID{ ID: "VIZ", Company: "VIZIO, INC", Date: "06/06/2012", }, "VTI": PNPID{ ID: "VTI", Company: "VLSI TECH INC", Date: "11/29/1996", }, "VMW": PNPID{ ID: "VMW", Company: "VMWARE INC.,", Date: "10/18/2011", }, "VTG": PNPID{ ID: "VTG", Company: "VOICE TECHNOLOGIES GROUP INC", Date: "04/24/1997", }, "GDT": PNPID{ ID: "GDT", Company: "VORTEX COMPUTERSYSTEME GMBH", Date: "11/29/1996", }, "VPX": PNPID{ ID: "VPX", Company: "VPIXX TECHNOLOGIES INC.", Date: "12/05/2013", }, "DSJ": PNPID{ ID: "DSJ", Company: "VR TECHNOLOGY HOLDINGS LIMITED", Date: "01/19/2017", }, "VRG": PNPID{ ID: "VRG", Company: "VRGINEERS, INC.", Date: "09/07/2017", }, "VRM": PNPID{ ID: "VRM", Company: "VRMAGIC HOLDING AG", Date: "04/12/2013", }, "TSW": PNPID{ ID: "TSW", Company: "VRSHOW TECHNOLOGY LIMITED", Date: "03/29/2018", }, "VRS": PNPID{ ID: "VRS", Company: "VRSTUDIOS, INC.", Date: "06/22/2017", }, "VTS": PNPID{ ID: "VTS", Company: "VTECH COMPUTERS LTD", Date: "11/29/1996", }, "VTC": PNPID{ ID: "VTC", Company: "VTEL CORPORATION", Date: "11/29/1996", }, "VUT": PNPID{ ID: "VUT", Company: "VUTRIX (UK) LTD", Date: "07/22/2003", }, "VWB": PNPID{ ID: "VWB", Company: "VWEB CORP.", Date: "03/12/2004", }, "WEL ": PNPID{ ID: "WEL ", Company: "W-DEV", Date: "11/01/2010", }, "WAC": PNPID{ ID: "WAC", Company: "WACOM TECH", Date: "11/29/1996", }, "JPW": PNPID{ ID: "JPW", Company: "WALLIS HAMILTON INDUSTRIES", Date: "07/16/1999", }, "MLT": PNPID{ ID: "MLT", Company: "WANLIDA GROUP CO., LTD.", Date: "04/24/2014", }, "WAL": PNPID{ ID: "WAL", Company: "WAVE ACCESS", Date: "12/13/1996", }, "AWS": PNPID{ ID: "AWS", Company: "WAVE SYSTEMS", Date: "11/29/1996", }, "WVM": PNPID{ ID: "WVM", Company: "WAVE SYSTEMS CORPORATION", Date: "12/05/1997", }, "WAV": PNPID{ ID: "WAV", Company: "WAVEPHORE", Date: "11/29/1996", }, "SEL": PNPID{ ID: "SEL", Company: "WAY2CALL COMMUNICATIONS", Date: "03/20/1997", }, "WBS": PNPID{ ID: "WBS", Company: "WB SYSTEMTECHNIK GMBH", Date: "09/08/1997", }, "WPI": PNPID{ ID: "WPI", Company: "WEARNES PERIPHERALS INTERNATIONAL (PTE) LTD", Date: "03/31/1998", }, "WTK": PNPID{ ID: "WTK", Company: "WEARNES THAKRAL PTE", Date: "11/29/1996", }, "WEB": PNPID{ ID: "WEB", Company: "WEBGEAR INC", Date: "01/30/1998", }, "WMO": PNPID{ ID: "WMO", Company: "WESTERMO TELEINDUSTRI AB", Date: "01/13/2000", }, "WDC": PNPID{ ID: "WDC", Company: "WESTERN DIGITAL", Date: "11/29/1996", }, "WDE": PNPID{ ID: "WDE", Company: "WESTINGHOUSE DIGITAL ELECTRONICS", Date: "05/23/2003", }, "WEY": PNPID{ ID: "WEY", Company: "WEY DESIGN AG", Date: "10/18/2004", }, "WHI": PNPID{ ID: "WHI", Company: "WHISTLE COMMUNICATIONS", Date: "10/24/1998", }, "WLD": PNPID{ ID: "WLD", Company: "WILDFIRE COMMUNICATIONS INC", Date: "02/13/1997", }, "WNI": PNPID{ ID: "WNI", Company: "WILLNET INC.", Date: "04/19/2000", }, "WEC": PNPID{ ID: "WEC", Company: "WINBOND ELECTRONICS CORPORATION", Date: "11/29/1996", }, "WMT": PNPID{ ID: "WMT", Company: "WINMATE COMMUNICATION INC", Date: "03/15/2001", }, "WNV": PNPID{ ID: "WNV", Company: "WINNOV L.P.", Date: "03/07/1997", }, "WRC": PNPID{ ID: "WRC", Company: "WINRADIO COMMUNICATIONS", Date: "09/11/1997", }, "WIN": PNPID{ ID: "WIN", Company: "WINTOP TECHNOLOGY INC", Date: "12/29/1996", }, "WWP": PNPID{ ID: "WWP", Company: "WIPOTEC WIEGE- UND POSITIONIERSYSTEME GMBH", Date: "04/08/2014", }, "WIL": PNPID{ ID: "WIL", Company: "WIPRO INFORMATION TECHNOLOGY LTD", Date: "11/29/1996", }, "WIP": PNPID{ ID: "WIP", Company: "WIPRO INFOTECH", Date: "01/09/2004", }, "WSP": PNPID{ ID: "WSP", Company: "WIRELESS AND SMART PRODUCTS INC.", Date: "03/20/1999", }, "WCI": PNPID{ ID: "WCI", Company: "WISECOM INC", Date: "11/29/1996", }, "WST": PNPID{ ID: "WST", Company: "WISTRON CORPORATION", Date: "09/03/2010", }, "WLF": PNPID{ ID: "WLF", Company: "WOLF ADVANCED TECHNOLOGY", Date: "10/22/2019", }, "WML": PNPID{ ID: "WML", Company: "WOLFSON MICROELECTRONICS LTD", Date: "07/30/1997", }, "WVV": PNPID{ ID: "WVV", Company: "WOLFVISION GMBH", Date: "09/18/2012", }, "WCS": PNPID{ ID: "WCS", Company: "WOODWIND COMMUNICATIONS SYSTEMS INC", Date: "11/29/1996", }, "WYT": PNPID{ ID: "WYT", Company: "WOOYOUNG IMAGE & INFORMATION CO.,LTD.", Date: "01/18/2008", }, "WTI": PNPID{ ID: "WTI", Company: "WORKSTATION TECH", Date: "11/29/1996", }, "WWV": PNPID{ ID: "WWV", Company: "WORLD WIDE VIDEO, INC.", Date: "10/24/1998", }, "WXT": PNPID{ ID: "WXT", Company: "WOXTER TECHNOLOGY CO. LTD", Date: "09/03/2010", }, "WYR": PNPID{ ID: "WYR", Company: "WYRESTORM TECHNOLOGIES LLC", Date: "09/05/2018", }, "XTN": PNPID{ ID: "XTN", Company: "X-10 (USA) INC", Date: "02/24/1997", }, "XTE": PNPID{ ID: "XTE", Company: "X2E GMBH", Date: "09/23/2009", }, "XAC": PNPID{ ID: "XAC", Company: "XAC AUTOMATION CORP", Date: "02/22/1999", }, "XDM": PNPID{ ID: "XDM", Company: "XDM LTD.", Date: "11/22/2010", }, "MAD": PNPID{ ID: "MAD", Company: "XEDIA CORPORATION", Date: "11/29/1996", }, "XLX": PNPID{ ID: "XLX", Company: "XILINX, INC.", Date: "08/01/2007", }, "XIN": PNPID{ ID: "XIN", Company: "XINEX NETWORKS INC", Date: "02/07/1997", }, "XIO": PNPID{ ID: "XIO", Company: "XIOTECH CORPORATION", Date: "05/29/1998", }, "XRC": PNPID{ ID: "XRC", Company: "XIRCOM INC", Date: "11/29/1996", }, "XIR": PNPID{ ID: "XIR", Company: "XIROCM INC", Date: "11/29/1996", }, "XIT": PNPID{ ID: "XIT", Company: "XITEL PTY LTD", Date: "11/29/1996", }, "XNT": PNPID{ ID: "XNT", Company: "XN TECHNOLOGIES, INC.", Date: "07/14/2003", }, "UHB": PNPID{ ID: "UHB", Company: "XOCECO", Date: "11/27/1998", }, "XRO": PNPID{ ID: "XRO", Company: "XORO ELECTRONICS (CHENGDU) LIMITED", Date: "05/23/2005", }, "XST": PNPID{ ID: "XST", Company: "XS TECHNOLOGIES INC", Date: "01/20/1998", }, "XSN": PNPID{ ID: "XSN", Company: "XSCREEN AS", Date: "02/14/2006", }, "XSY": PNPID{ ID: "XSY", Company: "XSYS", Date: "04/23/1998", }, "XYC": PNPID{ ID: "XYC", Company: "XYCOTEC COMPUTER GMBH", Date: "09/03/2002", }, "YED": PNPID{ ID: "YED", Company: "Y-E DATA INC", Date: "11/29/1996", }, "YMH": PNPID{ ID: "YMH", Company: "YAMAHA CORPORATION", Date: "11/29/1996", }, "BUF": PNPID{ ID: "BUF", Company: "YASUHIKO SHIRAI MELCO INC", Date: "11/29/1996", }, "YHQ": PNPID{ ID: "YHQ", Company: "YOKOGAWA ELECTRIC CORPORATION", Date: "11/29/1996", }, "TPZ": PNPID{ ID: "TPZ", Company: "YPOAZ SYSTEMS INC", Date: "11/29/1996", }, "ZMZ": PNPID{ ID: "ZMZ", Company: "Z MICROSYSTEMS", Date: "08/10/2005", }, "ZTT": PNPID{ ID: "ZTT", Company: "Z3 TECHNOLOGY", Date: "12/14/2010", }, "ZMT": PNPID{ ID: "ZMT", Company: "ZALMAN TECH CO., LTD.", Date: "05/07/2007", }, "ZAN": PNPID{ ID: "ZAN", Company: "ZANDAR TECHNOLOGIES PLC", Date: "12/03/2003", }, "ZBX": PNPID{ ID: "ZBX", Company: "ZEBAX TECHNOLOGIES", Date: "10/16/2015", }, "ZBR": PNPID{ ID: "ZBR", Company: "ZEBRA TECHNOLOGIES INTERNATIONAL, LLC", Date: "09/15/2003", }, "ZAZ": PNPID{ ID: "ZAZ", Company: "ZEEVEE, INC.", Date: "01/18/2008", }, "ZAX": PNPID{ ID: "ZAX", Company: "ZEFIRO ACOUSTICS", Date: "11/29/1996", }, "ZCT": PNPID{ ID: "ZCT", Company: "ZEITCONTROL CARDSYSTEMS GMBH", Date: "01/20/1999", }, "ZEN": PNPID{ ID: "ZEN", Company: "ZENIC INC.", Date: "04/17/2015", }, "ZDS": PNPID{ ID: "ZDS", Company: "ZENITH DATA SYSTEMS", Date: "11/29/1996", }, "ZGT": PNPID{ ID: "ZGT", Company: "ZENITH DATA SYSTEMS", Date: "11/29/1996", }, "ZSE": PNPID{ ID: "ZSE", Company: "ZENITH DATA SYSTEMS", Date: "11/29/1996", }, "ZNI": PNPID{ ID: "ZNI", Company: "ZETINET INC", Date: "11/29/1996", }, "TLE": PNPID{ ID: "TLE", Company: "ZHEJIANG TIANLE DIGITAL ELECTRIC CO., LTD.", Date: "01/17/2014", }, "RSR": PNPID{ ID: "RSR", Company: "ZHONG SHAN CITY RICHSOUND ELECTRONIC INDUSTRIAL LTD.", Date: "01/27/2015", }, "ZNX": PNPID{ ID: "ZNX", Company: "ZNYX ADV. SYSTEMS", Date: "11/29/1996", }, "ZTI": PNPID{ ID: "ZTI", Company: "ZOOM TELEPHONICS INC", Date: "11/29/1996", }, "ZRN": PNPID{ ID: "ZRN", Company: "ZORAN CORPORATION", Date: "03/31/2005", }, "ZOW": PNPID{ ID: "ZOW", Company: "ZOWIE INTERTAINMENT, INC", Date: "02/22/1999", }, "ZTM": PNPID{ ID: "ZTM", Company: "ZT GROUP INT'L INC.", Date: "06/14/2007", }, "ZTE": PNPID{ ID: "ZTE", Company: "ZTE CORPORATION", Date: "09/03/2010", }, "SIX": PNPID{ ID: "SIX", Company: "ZUNIQ DATA CORPORATION", Date: "11/29/1996", }, "ZYD": PNPID{ ID: "ZYD", Company: "ZYDACRON INC", Date: "04/10/1997", }, "ZTC": PNPID{ ID: "ZTC", Company: "ZYDAS TECHNOLOGY CORPORATION", Date: "05/24/2000", }, "ZYP": PNPID{ ID: "ZYP", Company: "ZYPCOM INC", Date: "03/19/1997", }, "ZYT": PNPID{ ID: "ZYT", Company: "ZYTEX COMPUTERS", Date: "11/29/1996", }, "HPA": PNPID{ ID: "HPA", Company: "ZYTOR COMMUNICATIONS", Date: "07/02/2010", }, "ZYX": PNPID{ ID: "ZYX", Company: "ZYXEL", Date: "11/29/1996", }, }
package models import ( "database/sql" "time" "github.com/LiveSocket/bot/service" ) // ModChat Represents a ModChat message type ModChat struct { ID uint `db:"id" json:"id"` Channel string `db:"channel" json:"channel"` Message string `db:"message" json:"message"` Name string `db:"name" json:"name"` Timestamp time.Time `db:"timestamp" json:"timestamp"` } // NewModChat Creates a new ModChat message func NewModChat(channel string, message string, name string) *ModChat { now := time.Now() return &ModChat{ Channel: channel, Message: message, Name: name, Timestamp: now, } } // FindLatestMessagesForChannel Finds the latest x number of mod chat messages for a channel func FindLatestMessagesForChannel(service *service.Service, channel string, offset uint64, limit uint64) ([]ModChat, error) { messages := []ModChat{} err := service.Select(&messages, "SELECT * FROM `mod_chat` WHERE `channel`=? ORDER BY `timestamp` DESC LIMIT ?,?", channel, offset, limit) return messages, err } // CreateModChat Creates a ModChat record in the database func CreateModChat(service *service.Service, modChat *ModChat) (sql.Result, error) { return service.NamedExec("INSERT INTO `mod_chat` (`channel`, `message`,`name`,`timestamp`) VALUES (:channel,:message,:name,:timestamp)", modChat) }
// Copyright 2018 The Cockroach Authors. // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. // // As of the Change Date specified in that file, in accordance with // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. package physical import ( "bytes" "fmt" "regexp" "strconv" "strings" "sync" "github.com/cockroachdb/cockroach/pkg/sql/opt" "github.com/cockroachdb/cockroach/pkg/sql/opt/props" "github.com/cockroachdb/errors" ) // OrderingChoice defines the set of possible row orderings that are provided or // required by an operator. An OrderingChoice consists of two parts: an ordered // sequence of equivalent column groups and a set of optional columns. Together, // these parts specify a simple pattern that can match one or more candidate // orderings. Here are some examples: // // +1 ORDER BY a // +1,-2 ORDER BY a,b DESC // +(1|2) ORDER BY a | ORDER BY b // +(1|2),+3 ORDER BY a,c | ORDER BY b, c // -(3|4),+5 opt(1,2) ORDER BY c DESC,e | ORDER BY a,d DESC,b DESC,e | ... // // Each column in the ordering sequence forms the corresponding column of the // sort key, from most significant to least significant. Each column has a sort // direction, either ascending or descending. The relation is ordered by the // first column; rows that have the same value are then ordered by the second // column; rows that still have the same value are ordered by the third column, // and so on. // // Sometimes multiple columns in the relation have equivalent values. The // OrderingChoiceColumn stores these columns in a group; any of the columns in // the group can be used to form the corresponding column in the sort key. The // equivalent group columns come from SQL expressions like: // // a=b // // The optional column set contains columns that can appear anywhere (or // nowhere) in the ordering. Optional columns come from SQL expressions like: // // a=1 // // Another case for optional columns is when we are grouping along a set of // columns and only care about the intra-group ordering. // // The optional columns can be interleaved anywhere in the sequence of ordering // columns, as they have no effect on the ordering. type OrderingChoice struct { // Optional is the set of columns that can appear at any position in the // ordering. Columns in Optional must not appear in the Columns sequence. // In addition, if Columns is empty, then Optional must be as well. // After initial construction, Optional is immutable. To update, replace // with a different set containing the desired columns. Optional opt.ColSet // Columns is the sequence of equivalent column groups that can be used to // form each column in the sort key. Columns must not appear in the Optional // set. The array memory is owned by this struct, and should not be copied // to another OrderingChoice unless both are kept immutable. Columns []OrderingColumnChoice } // OrderingColumnChoice specifies the set of columns which can form one of the // columns in the sort key, as well as the direction of that column (ascending // or descending). type OrderingColumnChoice struct { // Group is a set of equivalent columns, any of which can be used to form a // column in the sort key. After initial construction, Group is immutable. // To update, replace with a different set containing the desired columns. Group opt.ColSet // Descending is true if the sort key column is ordered from highest to // lowest. Otherwise, it's ordered from lowest to highest. Descending bool } const ( colChoiceRegexStr = `(?:\((\d+(?:\|\d+)*)\))` ordColRegexStr = `^(?:(?:\+|\-)(?:(\d+)|` + colChoiceRegexStr + `))$` colListRegexStr = `(\d+(?:,\d+)*)` optRegexStr = `^\s*([\S]+)?\s*(?:opt\(` + colListRegexStr + `\))?\s*$` ) var once sync.Once var optRegex, ordColRegex *regexp.Regexp // ParseOrderingChoice parses the string representation of an OrderingChoice for // testing purposes. Here are some examples of the string format: // // +1 // -(1|2),+3 // +(1|2),+3 opt(5,6) // // The input string is expected to be valid; ParseOrderingChoice will panic if // it is not. func ParseOrderingChoice(s string) OrderingChoice { once.Do(func() { optRegex = regexp.MustCompile(optRegexStr) ordColRegex = regexp.MustCompile(ordColRegexStr) }) var ordering OrderingChoice // Separate string into column sequence and optional column parts: // +(1|2),+3 opt(5,6) // matches[1]: +(1|2),+3 // matches[2]: opt(5,6) matches := optRegex.FindStringSubmatch(s) if matches == nil { panic(errors.AssertionFailedf("could not parse ordering choice: %s", s)) } // Handle Any case. if len(matches[1]) == 0 { return OrderingChoice{} } // Split column sequence by comma: // +(1|2),+3: // +(1|2) // +3 for _, ordColStr := range strings.Split(matches[1], ",") { // Parse one item in the column sequence: // +(1|2): // matches[1]: <empty> // matches[2]: 1|2 // // +3: // matches[1]: 3 // matches[2]: <empty> ordColMatches := ordColRegex.FindStringSubmatch(ordColStr) // First character is the direction indicator. var colChoice OrderingColumnChoice colChoice.Descending = strings.HasPrefix(ordColStr, "-") if len(ordColMatches[1]) != 0 { // Single column in equivalence group. id, _ := strconv.Atoi(ordColMatches[1]) colChoice.Group.Add(opt.ColumnID(id)) } else { // Split multiple columns in equivalence group by pipe: // 1|2: // 1 // 2 for _, idStr := range strings.Split(ordColMatches[2], "|") { id, _ := strconv.Atoi(idStr) colChoice.Group.Add(opt.ColumnID(id)) } } ordering.Columns = append(ordering.Columns, colChoice) } // Parse any optional columns by splitting by comma: // opt(5,6): // 5 // 6 if len(matches[2]) != 0 { for _, idStr := range strings.Split(matches[2], ",") { id, _ := strconv.Atoi(idStr) ordering.Optional.Add(opt.ColumnID(id)) } } return ordering } // ParseOrdering parses a simple opt.Ordering; for example: "+1,-3". // // The input string is expected to be valid; ParseOrdering will panic if it is // not. func ParseOrdering(str string) opt.Ordering { prov := ParseOrderingChoice(str) if !prov.Optional.Empty() { panic(errors.AssertionFailedf("invalid ordering %s", str)) } for i := range prov.Columns { if prov.Columns[i].Group.Len() != 1 { panic(errors.AssertionFailedf("invalid ordering %s", str)) } } return prov.ToOrdering() } // Any is true if this instance allows any ordering (any length, any columns). func (oc *OrderingChoice) Any() bool { return len(oc.Columns) == 0 } // FromOrdering sets this OrderingChoice to the given opt.Ordering. func (oc *OrderingChoice) FromOrdering(ord opt.Ordering) { oc.Optional = opt.ColSet{} oc.Columns = make([]OrderingColumnChoice, len(ord)) for i := range ord { oc.Columns[i].Group.Add(ord[i].ID()) oc.Columns[i].Descending = ord[i].Descending() } } // FromOrderingWithOptCols sets this OrderingChoice to the given opt.Ordering // and with the given optional columns. Any optional columns in the given // ordering are ignored. func (oc *OrderingChoice) FromOrderingWithOptCols(ord opt.Ordering, optCols opt.ColSet) { oc.Optional = optCols.Copy() oc.Columns = make([]OrderingColumnChoice, 0, len(ord)) for i := range ord { if !oc.Optional.Contains(ord[i].ID()) { oc.Columns = append(oc.Columns, OrderingColumnChoice{ Group: opt.MakeColSet(ord[i].ID()), Descending: ord[i].Descending(), }) } } } // ToOrdering returns an opt.Ordering instance composed of the shortest possible // orderings that this instance allows. If there are several, then one is chosen // arbitrarily. func (oc *OrderingChoice) ToOrdering() opt.Ordering { ordering := make(opt.Ordering, len(oc.Columns)) for i := range oc.Columns { col := &oc.Columns[i] ordering[i] = opt.MakeOrderingColumn(col.AnyID(), col.Descending) } return ordering } // ColSet returns the set of all non-optional columns that are part of this // instance. For example, (1,2,3) will be returned if the OrderingChoice is: // // +1,(2|3) opt(4,5) // func (oc *OrderingChoice) ColSet() opt.ColSet { var cs opt.ColSet for i := range oc.Columns { cs.UnionWith(oc.Columns[i].Group) } return cs } // Implies returns true if any ordering allowed by <oc> is also allowed by <other>. // // In the case of no optional or equivalent columns, Implies returns true when // the given ordering is a prefix of this ordering. // // Examples: // // <empty> implies <empty> // +1 implies <empty> (given set is prefix) // +1 implies +1 // +1,-2 implies +1 (given set is prefix) // +1,-2 implies +1,-2 // +1 implies +1 opt(2) (unused optional col is ignored) // -2,+1 implies +1 opt(2) (optional col is ignored) // +1 implies +(1|2) (subset of choice) // +(1|2) implies +(1|2|3) (subset of choice) // +(1|2),-4 implies +(1|2|3),-(4|5) // +(1|2) opt(4) implies +(1|2|3) opt(4) // // <empty> !implies +1 // +1 !implies -1 (direction mismatch) // +1 !implies +1,-2 (prefix matching not commutative) // +1 opt(2) !implies +1 (extra optional cols not allowed) // +1 opt(2) !implies +1 opt(3) // +(1|2) !implies -(1|2) (direction mismatch) // +(1|2) !implies +(3|4) (no intersection) // +(1|2) !implies +(2|3) (intersects, but not subset) // +(1|2|3) !implies +(1|2) (subset of choice not commutative) // +(1|2) !implies +1 opt(2) // func (oc *OrderingChoice) Implies(other *OrderingChoice) bool { if !oc.Optional.SubsetOf(other.Optional) { return false } for left, right := 0, 0; right < len(other.Columns); { if left >= len(oc.Columns) { return false } leftCol, rightCol := &oc.Columns[left], &other.Columns[right] switch { case leftCol.Descending == rightCol.Descending && leftCol.Group.SubsetOf(rightCol.Group): // The columns match. left, right = left+1, right+1 case leftCol.Group.Intersects(other.Optional): // Left column is optional in the right set. left++ default: return false } } return true } // Intersects returns true if there are orderings that satisfy both // OrderingChoices. See Intersection for more information. func (oc *OrderingChoice) Intersects(other *OrderingChoice) bool { for left, right := 0, 0; left < len(oc.Columns) && right < len(other.Columns); { leftCol, rightCol := &oc.Columns[left], &other.Columns[right] switch { case leftCol.Descending == rightCol.Descending && leftCol.Group.Intersects(rightCol.Group): // The columns match. left, right = left+1, right+1 case leftCol.Group.Intersects(other.Optional): // Left column is optional in the right set. left++ case rightCol.Group.Intersects(oc.Optional): // Right column is optional in the left set. right++ default: return false } } return true } // Intersection returns an OrderingChoice that Implies both ordering choices. // Can only be called if Intersects is true. Some examples: // // +1 ∩ <empty> = +1 // +1 ∩ +1,+2 = +1,+2 // +1,+2 opt(3) ∩ +1,+3 = +1,+3,+2 // // In general, OrderingChoice is not expressive enough to represent the // intersection. In such cases, an OrderingChoice representing a subset of the // intersection is returned. For example, // +1 opt(2) ∩ +2 opt(1) // can be either +1,+2 or +2,+1; only one of these is returned. Note that // the function may not be commutative in this case. In practice, such cases are // unlikely. // // It is guaranteed that if one OrderingChoice Implies the other, it will also // be the Intersection. func (oc *OrderingChoice) Intersection(other *OrderingChoice) OrderingChoice { // We have to handle Any cases separately because an Any ordering choice has // no optional columns (even though semantically it should have all possible // columns as optional). if oc.Any() { return other.Copy() } if other.Any() { return oc.Copy() } result := make([]OrderingColumnChoice, 0, len(oc.Columns)+len(other.Columns)) left, right := 0, 0 for left < len(oc.Columns) && right < len(other.Columns) { leftCol, rightCol := &oc.Columns[left], &other.Columns[right] switch { case leftCol.Descending == rightCol.Descending && leftCol.Group.Intersects(rightCol.Group): // The columns match. result = append(result, OrderingColumnChoice{ Group: leftCol.Group.Intersection(rightCol.Group), Descending: leftCol.Descending, }) left, right = left+1, right+1 case leftCol.Group.Intersects(other.Optional): // Left column is optional in the right set. result = append(result, OrderingColumnChoice{ Group: leftCol.Group.Intersection(other.Optional), Descending: leftCol.Descending, }) left++ case rightCol.Group.Intersects(oc.Optional): // Right column is optional in the left set. result = append(result, OrderingColumnChoice{ Group: rightCol.Group.Intersection(oc.Optional), Descending: rightCol.Descending, }) right++ default: panic(errors.AssertionFailedf("non-intersecting sets")) } } // An ordering matched a prefix of the other. Append the tail of the other // ordering. for ; left < len(oc.Columns); left++ { result = append(result, oc.Columns[left]) } for ; right < len(other.Columns); right++ { result = append(result, other.Columns[right]) } return OrderingChoice{ Optional: oc.Optional.Intersection(other.Optional), Columns: result, } } // SubsetOfCols is true if the OrderingChoice only references columns in the // given set. func (oc *OrderingChoice) SubsetOfCols(cs opt.ColSet) bool { if !oc.Optional.SubsetOf(cs) { return false } for i := range oc.Columns { if !oc.Columns[i].Group.SubsetOf(cs) { return false } } return true } // CanProjectCols is true if at least one column in each ordering column group is // part of the given column set. For example, if the OrderingChoice is: // // +1,-(2|3) opt(4,5) // // then CanProjectCols will behave as follows for these input sets: // // (1,2) => true // (1,3) => true // (1,2,4) => true // (1) => false // (3,4) => false // func (oc *OrderingChoice) CanProjectCols(cs opt.ColSet) bool { for i := range oc.Columns { if !oc.Columns[i].Group.Intersects(cs) { return false } } return true } // MatchesAt returns true if the ordering column at the given index in this // instance matches the given column. The column matches if its id is part of // the equivalence group and if it has the same direction. func (oc *OrderingChoice) MatchesAt(index int, col opt.OrderingColumn) bool { if oc.Optional.Contains(col.ID()) { return true } choice := &oc.Columns[index] if choice.Descending != col.Descending() { return false } if !choice.Group.Contains(col.ID()) { return false } return true } // AppendCol adds a new column to the end of the sequence of ordering columns // maintained by this instance. The new column has the given ID and direction as // the only ordering choice. func (oc *OrderingChoice) AppendCol(id opt.ColumnID, descending bool) { ordCol := OrderingColumnChoice{Descending: descending} ordCol.Group.Add(id) oc.Optional.Remove(id) oc.Columns = append(oc.Columns, ordCol) } // Copy returns a complete copy of this instance, with a private version of the // ordering column array. func (oc *OrderingChoice) Copy() OrderingChoice { var other OrderingChoice other.Optional = oc.Optional.Copy() other.Columns = make([]OrderingColumnChoice, len(oc.Columns)) copy(other.Columns, oc.Columns) return other } // CanSimplify returns true if a call to Simplify would result in any changes to // the OrderingChoice. Changes include additional constant columns, removed // groups, and additional equivalent columns. This is used to quickly check // whether Simplify needs to be called without requiring allocations in the // common case. This logic should be changed in concert with the Simplify logic. func (oc *OrderingChoice) CanSimplify(fdset *props.FuncDepSet) bool { if oc.Any() { // Any ordering allowed, so can't simplify further. return false } // Check whether optional columns can be added by the FD set. optional := fdset.ComputeClosure(oc.Optional) if !optional.Equals(oc.Optional) { return true } closure := optional for i := range oc.Columns { group := &oc.Columns[i] // If group contains an optional column, then group can be simplified // or removed entirely. if group.Group.Intersects(optional) { return true } // If group is functionally determined by previous groups, then it can // be removed entirely. if group.Group.SubsetOf(closure) { return true } // Check whether new equivalent columns can be added by the FD set. equiv := fdset.ComputeEquivClosure(group.Group) if !equiv.Equals(group.Group) { return true } // Add this group's columns and find closure with new columns. closure.UnionWith(equiv) closure = fdset.ComputeClosure(closure) } return false } // Simplify uses the given FD set to streamline the orderings allowed by this // instance, and to potentially increase the number of allowed orderings: // // 1. Constant columns add additional optional column choices. // // 2. Equivalent columns allow additional choices within an ordering column // group. // // 3. If the columns in a group are functionally determined by columns from // previous groups, the group can be dropped. This technique is described // in the "Reduce Order" section of this paper: // // Simmen, David & Shekita, Eugene & Malkemus, Timothy. (1996). // Fundamental Techniques for Order Optimization. // Sigmod Record. Volume 25 Issue 2, June 1996. Pages 57-67. // https://cs.uwaterloo.ca/~gweddell/cs798/p57-simmen.pdf // // This logic should be changed in concert with the CanSimplify logic. func (oc *OrderingChoice) Simplify(fdset *props.FuncDepSet) { oc.Optional = fdset.ComputeClosure(oc.Optional) closure := oc.Optional n := 0 for i := range oc.Columns { group := &oc.Columns[i] // Constant columns from the FD set become optional ordering columns and // so can be removed. if group.Group.Intersects(oc.Optional) { if group.Group.SubsetOf(oc.Optional) { continue } group.Group = group.Group.Difference(oc.Optional) } // If this group is functionally determined from previous groups, then // discard it. if group.Group.SubsetOf(closure) { continue } // Expand group with equivalent columns from FD set. group.Group = fdset.ComputeEquivClosure(group.Group) // Add this group's columns and find closure with the new columns. closure = closure.Union(group.Group) closure = fdset.ComputeClosure(closure) if n != i { oc.Columns[n] = oc.Columns[i] } n++ } oc.Columns = oc.Columns[:n] if len(oc.Columns) == 0 { // Normalize Any case by dropping any optional columns. oc.Optional = opt.ColSet{} } } // Truncate removes all ordering columns beyond the given index. For example, // +1,+(2|3),-4 opt(5,6) would be truncated to: // // prefix=0 => opt(5,6) // prefix=1 => +1 opt(5,6) // prefix=2 => +1,+(2|3) opt(5,6) // prefix=3+ => +1,+(2|3),-4 opt(5,6) // func (oc *OrderingChoice) Truncate(prefix int) { if prefix < len(oc.Columns) { oc.Columns = oc.Columns[:prefix] if len(oc.Columns) == 0 { // Normalize Any case by dropping any optional columns. oc.Optional = opt.ColSet{} } } } // ProjectCols removes any references to columns that are not in the given // set. This method can only be used when the OrderingChoice can be expressed // with the given columns; i.e. all groups have at least one column in the set. func (oc *OrderingChoice) ProjectCols(cols opt.ColSet) { if !oc.Optional.SubsetOf(cols) { oc.Optional = oc.Optional.Intersection(cols) } for i := range oc.Columns { if !oc.Columns[i].Group.SubsetOf(cols) { oc.Columns[i].Group = oc.Columns[i].Group.Intersection(cols) if oc.Columns[i].Group.Empty() { panic(errors.AssertionFailedf("no columns left from group")) } } } } // PrefixIntersection computes an OrderingChoice which: // - implies <oc> (this instance), and // - implies a "segmented ordering", which is any ordering which starts with a // permutation of all columns in <prefix> followed by the <suffix> ordering. // // Note that <prefix> and <suffix> cannot have any columns in common. // // Such an ordering can be computed via the following rules: // // - if <prefix> and <suffix> are empty: return this instance. // // - if <oc> is empty: generate an arbitrary segmented ordering. // // - if the first column of <oc> is either in <prefix> or is the first column // of <suffix> while <prefix> is empty: this column is the first column of // the result; calculate the rest recursively. // func (oc OrderingChoice) PrefixIntersection( prefix opt.ColSet, suffix []OrderingColumnChoice, ) (_ OrderingChoice, ok bool) { var result OrderingChoice oc = oc.Copy() prefix = prefix.Copy() for { switch { case prefix.Empty() && len(suffix) == 0: // Any ordering is allowed by <prefix>+<suffix>, so use <oc> directly. result.Columns = append(result.Columns, oc.Columns...) return result, true case len(oc.Columns) == 0: // Any ordering is allowed by <oc>, so pick an arbitrary ordering of the // columns in <prefix> then append suffix. // TODO(justin): investigate picking an order more intelligently here. for col, ok := prefix.Next(0); ok; col, ok = prefix.Next(col + 1) { result.AppendCol(col, false /* descending */) } result.Columns = append(result.Columns, suffix...) return result, true case prefix.Empty() && len(oc.Columns) > 0 && len(suffix) > 0 && oc.Columns[0].Group.Intersects(suffix[0].Group) && oc.Columns[0].Descending == suffix[0].Descending: // <prefix> is empty, and <suffix> and <oc> agree on the first column, so // emit that column, remove it from both, and loop. newCol := oc.Columns[0] newCol.Group = oc.Columns[0].Group.Intersection(suffix[0].Group) result.Columns = append(result.Columns, newCol) oc.Columns = oc.Columns[1:] suffix = suffix[1:] case len(oc.Columns) > 0 && prefix.Intersects(oc.Columns[0].Group): // <prefix> contains the first column in <oc>, so emit it and remove it // from both. result.Columns = append(result.Columns, oc.Columns[0]) prefix.DifferenceWith(oc.Columns[0].Group) oc.Columns = oc.Columns[1:] default: // If no rule applied, fail. return OrderingChoice{}, false } } } // Equals returns true if the set of orderings matched by this instance is the // same as the set matched by the given instance. func (oc *OrderingChoice) Equals(rhs *OrderingChoice) bool { if len(oc.Columns) != len(rhs.Columns) { return false } if !oc.Optional.Equals(rhs.Optional) { return false } for i := range oc.Columns { left := &oc.Columns[i] y := &rhs.Columns[i] if left.Descending != y.Descending { return false } if !left.Group.Equals(y.Group) { return false } } return true } func (oc OrderingChoice) String() string { var buf bytes.Buffer oc.Format(&buf) return buf.String() } // Format writes the OrderingChoice to the given buffer in a human-readable // string representation that can also be parsed by ParseOrderingChoice: // // +1 // +1,-2 // +(1|2) // +(1|2),+3 // -(3|4),+5 opt(1,2) // func (oc OrderingChoice) Format(buf *bytes.Buffer) { for g := range oc.Columns { group := &oc.Columns[g] count := group.Group.Len() if group.Descending { buf.WriteByte('-') } else { buf.WriteByte('+') } // Write equivalence group. if count > 1 { buf.WriteByte('(') } first := true for i, ok := group.Group.Next(0); ok; i, ok = group.Group.Next(i + 1) { if !first { buf.WriteByte('|') } else { first = false } fmt.Fprintf(buf, "%d", i) } if count > 1 { buf.WriteByte(')') } if g+1 != len(oc.Columns) { buf.WriteByte(',') } } // Write set of optional columns. if !oc.Optional.Empty() { if len(oc.Columns) != 0 { buf.WriteByte(' ') } fmt.Fprintf(buf, "opt%s", oc.Optional) } } // AnyID returns the ID of an arbitrary member of the group of equivalent // columns. func (oc *OrderingColumnChoice) AnyID() opt.ColumnID { id, ok := oc.Group.Next(0) if !ok { panic(errors.AssertionFailedf("column choice group should have at least one column id")) } return id }
/* Copyright 2019 Transwarp All rights reserved. */ package v1alpha1 import ( "encoding/json" "fmt" appsv1 "k8s.io/api/apps/v1" v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/types" "k8s.io/client-go/kubernetes" "k8s.io/klog" transwarpv1alpha1 "transwarp/isomateset-client/pkg/apis/apiextensions.transwarp.io/v1alpha1" clientv1alpha1 "transwarp/isomateset-client/pkg/client/clientset/versioned/typed/apiextensions.transwarp.io/v1alpha1" ) type GroupVersionKindName struct { schema.GroupVersionKind types.NamespacedName } func (gvkn *GroupVersionKindName) Defaults() { if gvkn.Group == "" { gvkn.Group = "apps" } if gvkn.Version == "" { gvkn.Group = "v1" } if gvkn.Kind == "" { gvkn.Kind = "StatefulSet" } if gvkn.Namespace == "" { gvkn.Namespace = "default" } } func (gvkn *GroupVersionKindName) ValidateStatefulSet() error { if gvkn.Group != "apps" { return fmt.Errorf("invalid group %s", gvkn.Group) } if !(gvkn.Version == "v1" || gvkn.Version == "v1beta1" || gvkn.Version == "v1beta2") { return fmt.Errorf("invalid version %s", gvkn.Version) } if gvkn.Kind != "StatefulSet" { return fmt.Errorf("invalid kind %s", gvkn.Kind) } return nil } func (gvkn *GroupVersionKindName) ValidateIsomateSet() error { if gvkn.Group != "apiextensions.transwarp.io" { return fmt.Errorf("invalid group %s", gvkn.Group) } if gvkn.Version != "v1alpha1" { return fmt.Errorf("invalid version %s", gvkn.Version) } if gvkn.Kind != "IsomateSet" { return fmt.Errorf("invalid kind %s", gvkn.Kind) } return nil } func convert_appsv1_StatefulSetSpec_To_v1alpha1_IsomateSetVersionTemplateSpec(in *appsv1.StatefulSetSpec, version string, out *transwarpv1alpha1.IsomateSetSpec) error { if out == nil { return fmt.Errorf("nil isomateset spec") } if out.VersionTemplates == nil { out.VersionTemplates = make(map[string]*transwarpv1alpha1.VersionTemplateSpec) } if _, ok := out.VersionTemplates[version]; ok { return fmt.Errorf("version name conflicts, cannot convert to an already existed version '%s' ", version) } vSpec := new(transwarpv1alpha1.VersionTemplateSpec) if in.Replicas != nil { vSpec.Replicas = new(int32) *vSpec.Replicas = *in.Replicas } if vSpec.Labels == nil { vSpec.Labels = make(map[string]string) } vSpec.Labels[transwarpv1alpha1.IsomateSetVersionNameLabel] = version // if err := k8s_api_v1.Convert_v1_PodTemplateSpec_To_core_PodTemplateSpec(&in.Template, &out.Template, s); err != nil { // return err // } podTemplateSpec := new(v1.PodTemplateSpec) in.Template.DeepCopyInto(podTemplateSpec) vSpec.Template = *podTemplateSpec convert_StatefulSet_VolumeClaimTemplates_To_IsomateSet_VolumeClaimTemplates(&in.VolumeClaimTemplates, &out.VolumeClaimTemplates, vSpec) vSpec.UpdateStrategy.Type = transwarpv1alpha1.IsomateSetUpdateStrategyType(in.UpdateStrategy.Type) if in.UpdateStrategy.RollingUpdate != nil { vSpec.UpdateStrategy.RollingUpdate = new(transwarpv1alpha1.RollingUpdateIsomateSetStrategy) vSpec.UpdateStrategy.RollingUpdate.Partition = new(int32) *vSpec.UpdateStrategy.RollingUpdate.Partition = *in.UpdateStrategy.RollingUpdate.Partition } vSpec.ServiceName = in.ServiceName vSpec.PodManagementPolicy = transwarpv1alpha1.PodManagementPolicyType(in.PodManagementPolicy) out.VersionTemplates[version] = new(transwarpv1alpha1.VersionTemplateSpec) vSpec.DeepCopyInto(out.VersionTemplates[version]) return nil } func convert_StatefulSet_VolumeClaimTemplates_To_IsomateSet_VolumeClaimTemplates( in, out *[]v1.PersistentVolumeClaim, vSpec *transwarpv1alpha1.VersionTemplateSpec) { if in != nil { pvcs := *in numVol := len(pvcs) names := make([]string, len(pvcs)) for i := 0; i < numVol; i++ { pvcs[i].Status = v1.PersistentVolumeClaimStatus{} names[i] = pvcs[i].GetName() } vSpec.VolumeStrategy.Names = append(vSpec.VolumeStrategy.Names, names...) *out = append(*out, pvcs...) } } func Convert_StatefulSets_To_v1alpha1_IsomateSet(in ...runtime.Object) (*transwarpv1alpha1.IsomateSet, error) { out := new(transwarpv1alpha1.IsomateSet) err := Convert_StatefulSets_Into_v1alpha1_IsomateSet(out, in...) if err != nil { return nil, err } return out, err } // merge multiple in objs into one single out obj func Convert_StatefulSets_Into_v1alpha1_IsomateSet( out *transwarpv1alpha1.IsomateSet, in ...runtime.Object) error { if out == nil { return fmt.Errorf("out object must not be nil") } if in == nil || len(in) == 0 { return nil } for _, obj := range in { b, err := json.Marshal(obj) if err != nil { return err } sts := new(appsv1.StatefulSet) if err := json.Unmarshal(b, sts); err != nil { return err } convert_appsv1_Statefulset_To_v1alpha1_IsomateSet(sts, out) } out.SetGroupVersionKind(transwarpv1alpha1.SchemeGroupVersion.WithKind("IsomateSet")) return nil } func convert_appsv1_StatefulSetMeta_To_v1alpha1_IsomateSetVersionTemplateMeta(in metav1.ObjectMeta, version string, out *metav1.ObjectMeta) error { if out == nil { out = new(metav1.ObjectMeta) } if out.Labels == nil { out.Labels = make(map[string]string) } for k, v := range in.Labels { out.Labels[k] = v } if out.Annotations == nil { out.Annotations = make(map[string]string) } for k, v := range in.Annotations { out.Annotations[k] = v } return nil } func convert_appsv1_Statefulset_To_v1alpha1_IsomateSet(in *appsv1.StatefulSet, out *transwarpv1alpha1.IsomateSet) error { version := in.GetName() if out.GetNamespace() == "" { out.SetNamespace(in.GetNamespace()) } if out.GetName() == "" { out.SetName(in.GetName()) } if out.Spec.Selector == nil { out.Spec.Selector = new(metav1.LabelSelector) } if out.Spec.Selector.MatchLabels == nil { out.Spec.Selector.MatchLabels = make(map[string]string) } out.Spec.Selector.MatchLabels[transwarpv1alpha1.IsomateSetNameLabel] = out.GetName() if err := convert_appsv1_StatefulSetSpec_To_v1alpha1_IsomateSetVersionTemplateSpec(&in.Spec, version, &out.Spec); err != nil { return err } if err := convert_appsv1_StatefulSetMeta_To_v1alpha1_IsomateSetVersionTemplateMeta(in.ObjectMeta, version, &out.Spec.VersionTemplates[version].ObjectMeta); err != nil { return err } return nil } func Convert_Incluster_StatefulSet_To_v1alpha1_IsomateSet( k8sClient kubernetes.Interface, imsClient clientv1alpha1.ApiextensionsV1alpha1Interface, out *transwarpv1alpha1.IsomateSet, in ...GroupVersionKindName) error { if out == nil { return fmt.Errorf("out object must not be nil") } for _, gvkn := range in { gvkn.Defaults() if gvkn.Kind != "StatefulSet" { klog.V(4).Infoln("not a StatefulSet resources, skip conversion") return nil } if gvkn.Group != "apps" { klog.V(4).Infof("unsupport group %s", gvkn.Group) return nil } switch gvkn.Version { case "v1": sts, err := k8sClient.AppsV1().StatefulSets(gvkn.Namespace).Get(gvkn.Name, metav1.GetOptions{}) if err != nil { return err } // if sts.Annotations if err = Convert_StatefulSets_Into_v1alpha1_IsomateSet(out, sts); err != nil { return err } case "v1beta1": sts, err := k8sClient.AppsV1beta1().StatefulSets(gvkn.Namespace).Get(gvkn.Name, metav1.GetOptions{}) if err != nil { return err } if err = Convert_StatefulSets_Into_v1alpha1_IsomateSet(out, sts); err != nil { return err } case "v1beta2": sts, err := k8sClient.AppsV1beta2().StatefulSets(gvkn.Namespace).Get(gvkn.Name, metav1.GetOptions{}) if err != nil { return err } if err = Convert_StatefulSets_Into_v1alpha1_IsomateSet(out, sts); err != nil { return err } } } return nil }
package utils import ( "fmt" "github.com/astaxie/beego/orm" _ "github.com/go-sql-driver/mysql" ) type User struct { Id int Name string Age int8 } func init() { //mysql / sqlite3 / postgres 这三种是默认已经注册过的,所以可以无需设置 //orm.RegisterDriver("mysql", orm.DRMySQL) maxIdle := 30 maxConn := 30 orm.RegisterDataBase("default", "mysql", "root:root@tcp(127.0.0.1:3306)/beego?charset=utf8", maxIdle, maxConn) orm.RegisterModel(new(User)) // RegisterDataBase 里面如果写过了,这里就不需要在设置了。 orm.SetMaxIdleConns("default", maxIdle) orm.SetMaxOpenConns("default", maxConn) //orm.RunSyncdb("default", false, true) } // 支持 ORM 操作 func doOrm() { o := orm.NewOrm() user := User{Name: "wanyang", Age: 28} id, err := o.Insert(&user) fmt.Printf("ID: %d, ERR: %v\n", id, err) user.Name = "wanyang3" num, err := o.Update(&user, "Name") fmt.Printf("NUM: %d, ERR: %v\n", num, err) u := User{Id: user.Id} err = o.Read(&u) fmt.Printf("ERR: %v\n", err) num, err = o.Delete(&u) fmt.Printf("NUM: %d, ERR: %v\n", num, err) } // 支持执行原生 sql 语句 func doSql() { o := orm.NewOrm() // Exec 返回 sql.Result 对象 rs, err := o.Raw("update user set age = ? where id = ?", "30", 5).Exec() if err == nil { rowsAffected, _ := rs.RowsAffected() lastInsertId, _ := rs.LastInsertId() fmt.Printf("rs.RowsAffected: %v, rs.LastInsertId: %v\n", rowsAffected, lastInsertId) } fmt.Println("0: --------------------------------------------") // Values 返回结果集的 key => value 值 var value []orm.Params num, err := o.Raw("select * from user").Values(&value) if err == nil && num > 0 { for _, term := range value { fmt.Printf("Id: %s, Name: %s, Age: %s\n", term["id"], term["name"], term["age"]) } } fmt.Println("1: --------------------------------------------") // ValuesList 返回结果集 slice var lists []orm.ParamsList num, err = o.Raw("select * from user where id > ?", 3).ValuesList(&lists) if err == nil && num > 0 { for _, list := range lists { fmt.Printf("User: %v\n", list) } } fmt.Println("2: --------------------------------------------") // ValuesFlat 返回单一字段的平铺 slice 数据 var list orm.ParamsList num, err = o.Raw("select name from user where id > ?", 2).ValuesFlat(&list) if err == nil && num > 0 { for _, val := range list { fmt.Printf("User.Name: %s\n", val) } } fmt.Println("3: --------------------------------------------") // RowsToMap 查询结果匹配到 map 里 res := make(orm.Params) num, err = o.Raw("select id, name from user").RowsToMap(&res, "id", "name") if err == nil && num > 0 { for k, v := range res { fmt.Printf("id: %s, name: %s\n", k, v) } } fmt.Println("4: --------------------------------------------") // RowsToStruct 查询结果匹配到 struct 里 str := new(User) num, err = o.Raw("select id, name from user").RowsToStruct(&res, "id", "name") if err == nil && num > 0 { fmt.Printf("Id: %v\n", str.Id) fmt.Printf("Name: %v\n", str.Name) } fmt.Println("5: --------------------------------------------") // QueryRow 提供高级 sql mapper 功能,支持 struct var user User //err := o.Raw("select * from user where id = ?", 5).QueryRow(&user.Id, &user.Name, &user.Age) err = o.Raw("select * from user where id = ?", 5).QueryRow(&user) if err == nil { fmt.Printf("Id: %d, Name: %s, Age: %d\n", user.Id, user.Name, user.Age) } fmt.Println("6: --------------------------------------------") // QueryRows 提供高级 sql mapper 功能,支持 struct、map 但都是 slice 类型。 var id []int var name []string var age []int8 num, err = o.Raw("select * from user where id >= 3").QueryRows(&id, &name, &age) fmt.Printf("Total row: %d\n", num) if err == nil { fmt.Printf("Id: %v, Name: %v, Age: %v\n", id, name, age) for index, value := range id { fmt.Printf("Row: %d, Id: %d, Name: %s, Age: %d\n", index+1, value, name[index], age[index]) } } var users []User num1, err := o.Raw("select * from user where id >= 3").QueryRows(&users) fmt.Printf("Total row: %d\n", num1) if err == nil { fmt.Printf("User: %v\n", users) for index, u := range users { fmt.Printf("Row: %d, Id: %d, Name: %s, Age: %d\n", index+1, u.Id, u.Name, u.Age) } } fmt.Println("7: ---------------------------------------------") // Prepare 一次 prepare 多次 exec,提高批量执行的速度 rp, err := o.Raw("update user set name= ? where id = ?").Prepare() rs, err = rp.Exec("wanyang33", 4) rs, err = rp.Exec("Beyta", 5) rp.Close() } // 支持事务处理 func doTransaction() { o := orm.NewOrm() o.Begin() user := User{Id: 5, Name: "Lucy", Age: 19} id, err := o.Insert(&user) fmt.Printf("ID: %d, ERR: %v\n", id, err) if err != nil { o.Rollback() } else { o.Commit() } } // 支持 QueryBuilder,功能类似 ORM,但 ORM 更适用于简单的 CRUD 操作,而 QueryBuilder 则更适用于复杂的查询 func doQueryBuilder() { qb, _ := orm.NewQueryBuilder("mysql") qb.Select("id,name,age"). From("user"). Where("age > ?"). And("id < ?"). OrderBy("age"). Asc(). Limit(2). Offset(0) sql := qb.String() fmt.Printf("sql: %s\n", sql) var users []User o := orm.NewOrm() num, err := o.Raw(sql, 20, 100).QueryRows(&users) if err == nil && num > 0 { for _, user := range users { fmt.Printf("Id: %d, Name: %s, Age: %d\n", user.Id, user.Name, user.Age) } } } func main() { //dorm() orm.Debug = true //doSql() //doTransaction() doQueryBuilder() }
package main import "fmt" type Student struct { name string age int grade int } func setName(t *Student, newName string) { t.name = newName } func main() { a := Student{"phj", 20, 99} fmt.Println("변경전", a) setName(&a, "newName") fmt.Println("변경후", a) }
package DTO type UsersListDTO struct { Usernames []string `json:"usernames"` }
package service import "app/dao" func GetUser(id int) (dao.User, error) { user := dao.User{} err := user.GetOne(id) return user, err }
package mocks import "github.com/stretchr/testify/mock" import "github.com/Lunchr/luncher-api/db/model" type RegistrationAccessTokens struct { mock.Mock } func (_m *RegistrationAccessTokens) Insert(_a0 *model.RegistrationAccessToken) (*model.RegistrationAccessToken, error) { ret := _m.Called(_a0) var r0 *model.RegistrationAccessToken if rf, ok := ret.Get(0).(func(*model.RegistrationAccessToken) *model.RegistrationAccessToken); ok { r0 = rf(_a0) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*model.RegistrationAccessToken) } } var r1 error if rf, ok := ret.Get(1).(func(*model.RegistrationAccessToken) error); ok { r1 = rf(_a0) } else { r1 = ret.Error(1) } return r0, r1 } func (_m *RegistrationAccessTokens) Exists(_a0 model.Token) (bool, error) { ret := _m.Called(_a0) var r0 bool if rf, ok := ret.Get(0).(func(model.Token) bool); ok { r0 = rf(_a0) } else { r0 = ret.Get(0).(bool) } var r1 error if rf, ok := ret.Get(1).(func(model.Token) error); ok { r1 = rf(_a0) } else { r1 = ret.Error(1) } return r0, r1 }
package lfsapi import ( "errors" "net/url" "path/filepath" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestSSHCacheResolveFromCache(t *testing.T) { ssh := newFakeResolver() cache := withSSHCache(ssh).(*sshCache) cache.endpoints["userandhost//1//path//post"] = &sshAuthResponse{ Href: "cache", createdAt: time.Now(), } ssh.responses["userandhost"] = sshAuthResponse{Href: "real"} e := Endpoint{ SshUserAndHost: "userandhost", SshPort: "1", SshPath: "path", } res, err := cache.Resolve(e, "post") assert.Nil(t, err) assert.Equal(t, "cache", res.Href) } func TestSSHCacheResolveFromCacheWithFutureExpiresAt(t *testing.T) { ssh := newFakeResolver() cache := withSSHCache(ssh).(*sshCache) cache.endpoints["userandhost//1//path//post"] = &sshAuthResponse{ Href: "cache", ExpiresAt: time.Now().Add(time.Duration(1) * time.Hour), createdAt: time.Now(), } ssh.responses["userandhost"] = sshAuthResponse{Href: "real"} e := Endpoint{ SshUserAndHost: "userandhost", SshPort: "1", SshPath: "path", } res, err := cache.Resolve(e, "post") assert.Nil(t, err) assert.Equal(t, "cache", res.Href) } func TestSSHCacheResolveFromCacheWithFutureExpiresIn(t *testing.T) { ssh := newFakeResolver() cache := withSSHCache(ssh).(*sshCache) cache.endpoints["userandhost//1//path//post"] = &sshAuthResponse{ Href: "cache", ExpiresIn: 60 * 60, createdAt: time.Now(), } ssh.responses["userandhost"] = sshAuthResponse{Href: "real"} e := Endpoint{ SshUserAndHost: "userandhost", SshPort: "1", SshPath: "path", } res, err := cache.Resolve(e, "post") assert.Nil(t, err) assert.Equal(t, "cache", res.Href) } func TestSSHCacheResolveFromCacheWithPastExpiresAt(t *testing.T) { ssh := newFakeResolver() cache := withSSHCache(ssh).(*sshCache) cache.endpoints["userandhost//1//path//post"] = &sshAuthResponse{ Href: "cache", ExpiresAt: time.Now().Add(time.Duration(-1) * time.Hour), createdAt: time.Now(), } ssh.responses["userandhost"] = sshAuthResponse{Href: "real"} e := Endpoint{ SshUserAndHost: "userandhost", SshPort: "1", SshPath: "path", } res, err := cache.Resolve(e, "post") assert.Nil(t, err) assert.Equal(t, "real", res.Href) } func TestSSHCacheResolveFromCacheWithPastExpiresIn(t *testing.T) { ssh := newFakeResolver() cache := withSSHCache(ssh).(*sshCache) cache.endpoints["userandhost//1//path//post"] = &sshAuthResponse{ Href: "cache", ExpiresIn: -60 * 60, createdAt: time.Now(), } ssh.responses["userandhost"] = sshAuthResponse{Href: "real"} e := Endpoint{ SshUserAndHost: "userandhost", SshPort: "1", SshPath: "path", } res, err := cache.Resolve(e, "post") assert.Nil(t, err) assert.Equal(t, "real", res.Href) } func TestSSHCacheResolveFromCacheWithAmbiguousExpirationInfo(t *testing.T) { ssh := newFakeResolver() cache := withSSHCache(ssh).(*sshCache) cache.endpoints["userandhost//1//path//post"] = &sshAuthResponse{ Href: "cache", ExpiresIn: 60 * 60, ExpiresAt: time.Now().Add(-1 * time.Hour), createdAt: time.Now(), } ssh.responses["userandhost"] = sshAuthResponse{Href: "real"} e := Endpoint{ SshUserAndHost: "userandhost", SshPort: "1", SshPath: "path", } res, err := cache.Resolve(e, "post") assert.Nil(t, err) assert.Equal(t, "cache", res.Href) } func TestSSHCacheResolveWithoutError(t *testing.T) { ssh := newFakeResolver() cache := withSSHCache(ssh).(*sshCache) assert.Equal(t, 0, len(cache.endpoints)) ssh.responses["userandhost"] = sshAuthResponse{Href: "real"} e := Endpoint{ SshUserAndHost: "userandhost", SshPort: "1", SshPath: "path", } res, err := cache.Resolve(e, "post") assert.Nil(t, err) assert.Equal(t, "real", res.Href) assert.Equal(t, 1, len(cache.endpoints)) cacheres, ok := cache.endpoints["userandhost//1//path//post"] assert.True(t, ok) assert.NotNil(t, cacheres) assert.Equal(t, "real", cacheres.Href) delete(ssh.responses, "userandhost") res2, err := cache.Resolve(e, "post") assert.Nil(t, err) assert.Equal(t, "real", res2.Href) } func TestSSHCacheResolveWithError(t *testing.T) { ssh := newFakeResolver() cache := withSSHCache(ssh).(*sshCache) assert.Equal(t, 0, len(cache.endpoints)) ssh.responses["userandhost"] = sshAuthResponse{Message: "resolve error", Href: "real"} e := Endpoint{ SshUserAndHost: "userandhost", SshPort: "1", SshPath: "path", } res, err := cache.Resolve(e, "post") assert.NotNil(t, err) assert.Equal(t, "real", res.Href) assert.Equal(t, 0, len(cache.endpoints)) delete(ssh.responses, "userandhost") res2, err := cache.Resolve(e, "post") assert.Nil(t, err) assert.Equal(t, "", res2.Href) } func newFakeResolver() *fakeResolver { return &fakeResolver{responses: make(map[string]sshAuthResponse)} } type fakeResolver struct { responses map[string]sshAuthResponse } func (r *fakeResolver) Resolve(e Endpoint, method string) (sshAuthResponse, error) { res := r.responses[e.SshUserAndHost] var err error if len(res.Message) > 0 { err = errors.New(res.Message) } res.createdAt = time.Now() return res, err } func TestSSHGetLFSExeAndArgs(t *testing.T) { cli, err := NewClient(UniqTestEnv(map[string]string{}), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" endpoint.SshPath = "user/repo" exe, args := sshGetLFSExeAndArgs(cli.OSEnv(), endpoint, "GET") assert.Equal(t, "ssh", exe) assert.Equal(t, []string{ "--", "user@foo.com", "git-lfs-authenticate user/repo download", }, args) exe, args = sshGetLFSExeAndArgs(cli.OSEnv(), endpoint, "HEAD") assert.Equal(t, "ssh", exe) assert.Equal(t, []string{ "--", "user@foo.com", "git-lfs-authenticate user/repo download", }, args) // this is going by endpoint.Operation, implicitly set by Endpoint() on L15. exe, args = sshGetLFSExeAndArgs(cli.OSEnv(), endpoint, "POST") assert.Equal(t, "ssh", exe) assert.Equal(t, []string{ "--", "user@foo.com", "git-lfs-authenticate user/repo download", }, args) endpoint.Operation = "upload" exe, args = sshGetLFSExeAndArgs(cli.OSEnv(), endpoint, "POST") assert.Equal(t, "ssh", exe) assert.Equal(t, []string{ "--", "user@foo.com", "git-lfs-authenticate user/repo upload", }, args) } func TestSSHGetExeAndArgsSsh(t *testing.T) { cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "", "GIT_SSH": "", }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, "ssh", exe) assert.Equal(t, []string{"--", "user@foo.com"}, args) } func TestSSHGetExeAndArgsSshCustomPort(t *testing.T) { cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "", "GIT_SSH": "", }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" endpoint.SshPort = "8888" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, "ssh", exe) assert.Equal(t, []string{"-p", "8888", "--", "user@foo.com"}, args) } func TestSSHGetExeAndArgsPlink(t *testing.T) { plink := filepath.Join("Users", "joebloggs", "bin", "plink.exe") cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "", "GIT_SSH": plink, }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, plink, exe) assert.Equal(t, []string{"user@foo.com"}, args) } func TestSSHGetExeAndArgsPlinkCustomPort(t *testing.T) { plink := filepath.Join("Users", "joebloggs", "bin", "plink") cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "", "GIT_SSH": plink, }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" endpoint.SshPort = "8888" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, plink, exe) assert.Equal(t, []string{"-P", "8888", "user@foo.com"}, args) } func TestSSHGetExeAndArgsTortoisePlink(t *testing.T) { plink := filepath.Join("Users", "joebloggs", "bin", "tortoiseplink.exe") cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "", "GIT_SSH": plink, }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, plink, exe) assert.Equal(t, []string{"-batch", "user@foo.com"}, args) } func TestSSHGetExeAndArgsTortoisePlinkCustomPort(t *testing.T) { plink := filepath.Join("Users", "joebloggs", "bin", "tortoiseplink") cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "", "GIT_SSH": plink, }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" endpoint.SshPort = "8888" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, plink, exe) assert.Equal(t, []string{"-batch", "-P", "8888", "user@foo.com"}, args) } func TestSSHGetExeAndArgsSshCommandPrecedence(t *testing.T) { cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "sshcmd", "GIT_SSH": "bad", }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, "sshcmd", exe) assert.Equal(t, []string{"user@foo.com"}, args) } func TestSSHGetExeAndArgsSshCommandArgs(t *testing.T) { cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "sshcmd --args 1", }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, "sshcmd", exe) assert.Equal(t, []string{"--args", "1", "user@foo.com"}, args) } func TestSSHGetExeAndArgsSshCommandArgsWithMixedQuotes(t *testing.T) { cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "sshcmd foo 'bar \"baz\"'", }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, "sshcmd", exe) assert.Equal(t, []string{"foo", `bar "baz"`, "user@foo.com"}, args) } func TestSSHGetExeAndArgsSshCommandCustomPort(t *testing.T) { cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": "sshcmd", }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" endpoint.SshPort = "8888" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, "sshcmd", exe) assert.Equal(t, []string{"-p", "8888", "user@foo.com"}, args) } func TestSSHGetLFSExeAndArgsWithCustomSSH(t *testing.T) { cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH": "not-ssh", }), nil) require.Nil(t, err) u, err := url.Parse("ssh://git@host.com:12345/repo") require.Nil(t, err) e := endpointFromSshUrl(u) t.Logf("ENDPOINT: %+v", e) assert.Equal(t, "12345", e.SshPort) assert.Equal(t, "git@host.com", e.SshUserAndHost) assert.Equal(t, "repo", e.SshPath) exe, args := sshGetLFSExeAndArgs(cli.OSEnv(), e, "GET") assert.Equal(t, "not-ssh", exe) assert.Equal(t, []string{"-p", "12345", "git@host.com", "git-lfs-authenticate repo download"}, args) } func TestSSHGetLFSExeAndArgsInvalidOptionsAsHost(t *testing.T) { cli, err := NewClient(nil, nil) require.Nil(t, err) u, err := url.Parse("ssh://-oProxyCommand=gnome-calculator/repo") require.Nil(t, err) assert.Equal(t, "-oProxyCommand=gnome-calculator", u.Host) e := endpointFromSshUrl(u) t.Logf("ENDPOINT: %+v", e) assert.Equal(t, "-oProxyCommand=gnome-calculator", e.SshUserAndHost) assert.Equal(t, "repo", e.SshPath) exe, args := sshGetLFSExeAndArgs(cli.OSEnv(), e, "GET") assert.Equal(t, "ssh", exe) assert.Equal(t, []string{"--", "-oProxyCommand=gnome-calculator", "git-lfs-authenticate repo download"}, args) } func TestSSHGetLFSExeAndArgsInvalidOptionsAsHostWithCustomSSH(t *testing.T) { cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH": "not-ssh", }), nil) require.Nil(t, err) u, err := url.Parse("ssh://--oProxyCommand=gnome-calculator/repo") require.Nil(t, err) assert.Equal(t, "--oProxyCommand=gnome-calculator", u.Host) e := endpointFromSshUrl(u) t.Logf("ENDPOINT: %+v", e) assert.Equal(t, "--oProxyCommand=gnome-calculator", e.SshUserAndHost) assert.Equal(t, "repo", e.SshPath) exe, args := sshGetLFSExeAndArgs(cli.OSEnv(), e, "GET") assert.Equal(t, "not-ssh", exe) assert.Equal(t, []string{"oProxyCommand=gnome-calculator", "git-lfs-authenticate repo download"}, args) } func TestSSHGetExeAndArgsInvalidOptionsAsHost(t *testing.T) { cli, err := NewClient(nil, nil) require.Nil(t, err) u, err := url.Parse("ssh://-oProxyCommand=gnome-calculator") require.Nil(t, err) assert.Equal(t, "-oProxyCommand=gnome-calculator", u.Host) e := endpointFromSshUrl(u) t.Logf("ENDPOINT: %+v", e) assert.Equal(t, "-oProxyCommand=gnome-calculator", e.SshUserAndHost) assert.Equal(t, "", e.SshPath) exe, args := sshGetExeAndArgs(cli.OSEnv(), e) assert.Equal(t, "ssh", exe) assert.Equal(t, []string{"--", "-oProxyCommand=gnome-calculator"}, args) } func TestSSHGetExeAndArgsInvalidOptionsAsPath(t *testing.T) { cli, err := NewClient(nil, nil) require.Nil(t, err) u, err := url.Parse("ssh://git@git-host.com/-oProxyCommand=gnome-calculator") require.Nil(t, err) assert.Equal(t, "git-host.com", u.Host) e := endpointFromSshUrl(u) t.Logf("ENDPOINT: %+v", e) assert.Equal(t, "git@git-host.com", e.SshUserAndHost) assert.Equal(t, "-oProxyCommand=gnome-calculator", e.SshPath) exe, args := sshGetExeAndArgs(cli.OSEnv(), e) assert.Equal(t, "ssh", exe) assert.Equal(t, []string{"--", "git@git-host.com"}, args) } func TestParseBareSSHUrl(t *testing.T) { e := endpointFromBareSshUrl("git@git-host.com:repo.git") t.Logf("endpoint: %+v", e) assert.Equal(t, "git@git-host.com", e.SshUserAndHost) assert.Equal(t, "repo.git", e.SshPath) e = endpointFromBareSshUrl("git@git-host.com/should-be-a-colon.git") t.Logf("endpoint: %+v", e) assert.Equal(t, "", e.SshUserAndHost) assert.Equal(t, "", e.SshPath) e = endpointFromBareSshUrl("-oProxyCommand=gnome-calculator") t.Logf("endpoint: %+v", e) assert.Equal(t, "", e.SshUserAndHost) assert.Equal(t, "", e.SshPath) e = endpointFromBareSshUrl("git@git-host.com:-oProxyCommand=gnome-calculator") t.Logf("endpoint: %+v", e) assert.Equal(t, "git@git-host.com", e.SshUserAndHost) assert.Equal(t, "-oProxyCommand=gnome-calculator", e.SshPath) } func TestSSHGetExeAndArgsPlinkCommand(t *testing.T) { plink := filepath.Join("Users", "joebloggs", "bin", "plink.exe") cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": plink, }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, plink, exe) assert.Equal(t, []string{"user@foo.com"}, args) } func TestSSHGetExeAndArgsPlinkCommandCustomPort(t *testing.T) { plink := filepath.Join("Users", "joebloggs", "bin", "plink") cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": plink, }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" endpoint.SshPort = "8888" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, plink, exe) assert.Equal(t, []string{"-P", "8888", "user@foo.com"}, args) } func TestSSHGetExeAndArgsTortoisePlinkCommand(t *testing.T) { plink := filepath.Join("Users", "joebloggs", "bin", "tortoiseplink.exe") cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": plink, }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, plink, exe) assert.Equal(t, []string{"-batch", "user@foo.com"}, args) } func TestSSHGetExeAndArgsTortoisePlinkCommandCustomPort(t *testing.T) { plink := filepath.Join("Users", "joebloggs", "bin", "tortoiseplink") cli, err := NewClient(UniqTestEnv(map[string]string{ "GIT_SSH_COMMAND": plink, }), nil) require.Nil(t, err) endpoint := cli.Endpoints.Endpoint("download", "") endpoint.SshUserAndHost = "user@foo.com" endpoint.SshPort = "8888" exe, args := sshGetExeAndArgs(cli.OSEnv(), endpoint) assert.Equal(t, plink, exe) assert.Equal(t, []string{"-batch", "-P", "8888", "user@foo.com"}, args) }
// Copyright 2021 The ChromiumOS Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Package bluetooth contains helpers to interact with the system's bluetooth // adapters. package bluetooth import ( "context" "fmt" "chromiumos/tast/common/testexec" "chromiumos/tast/errors" ) // LogVerbosity indicates whether or not to enable verbose logging for the different bluetooth modules. type LogVerbosity struct { Bluez bool Kernel bool } // SetDebugLogLevels sets the logging level for Bluetooth debug logs. func SetDebugLogLevels(ctx context.Context, levels LogVerbosity) error { btoi := map[bool]int{ false: 0, true: 1, } if err := testexec.CommandContext(ctx, "dbus-send", "--system", "--print-reply", "--dest=org.bluez", "/org/chromium/Bluetooth", "org.chromium.Bluetooth.Debug.SetLevels", fmt.Sprintf("byte:%v", btoi[levels.Bluez]), fmt.Sprintf("byte:%v", btoi[levels.Kernel]), ).Run(testexec.DumpLogOnError); err != nil { return errors.Wrap(err, "failed to set bluetooth log levels") } return nil } // StartBTSnoopLogging starts capturing Bluetooth HCI "btsnoop" logs in a file at the specified path. // Call Start on the returned command to start log collection, and call Kill when finished to end btmon. func StartBTSnoopLogging(ctx context.Context, path string) *testexec.Cmd { return testexec.CommandContext(ctx, "/usr/bin/btmon", "-w", path) }
package indexer import ( "fmt" "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/tools/cache" "strings" ) // UsersIndexFunc ... func UsersIndexFunc(obj interface{}) ([]string, error) { pod := obj.(*v1.Pod) usersString := pod.Annotations["users"] return strings.Split(usersString, ","), nil } // Indexer func Indexer() error { index := cache.NewIndexer(cache.MetaNamespaceKeyFunc, cache.Indexers{"byUsers": UsersIndexFunc}) pod1 := &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "one", Annotations: map[string]string{"users": "ernie,bert"}}} pod2 := &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "two", Annotations: map[string]string{"users": "bert,oscar"}}} pod3 := &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "three", Annotations: map[string]string{"users": "ernie,elmo"}}} index.Add(pod1) index.Add(pod2) index.Add(pod3) erniePods, err := index.ByIndex("byUsers", "ernie") if err != nil { return err } for _, val := range erniePods { fmt.Println(val.(*v1.Pod).Name) } return nil }
package utils import ( "fmt" "github.com/jwalton/gchalk" "hash/fnv" "log" "os" "os/exec" "path/filepath" "regexp" "strings" "time" ) // PatternsInPath Checks if any of the provided patterns is found in the path. func PatternsInPath(patterns []string, path string) bool { for _, pattern := range patterns { match, err := regexp.MatchString(pattern, path) if err != nil { log.Fatal(err) } if match { return true } } return false } func GetFileHash(path string) (uint64, error) { var fileHash uint64 = 0 // this reads the whole file in memory contents, err := os.ReadFile(path) if err != nil { return fileHash, err } h := fnv.New64a() h.Write(contents) fileHash = h.Sum64() return fileHash, nil } type FilesHash map[string]uint64 func ComputeChanges(filesHash FilesHash, config *Config) ([]string, error) { var changedFiles []string err := filepath.Walk(config.RootPath, func(path string, info os.FileInfo, err error) error { // Ignore directories, excluded patterns and patterns not present in the include filter if info.IsDir() || PatternsInPath(config.ExcludePatterns, path) || !PatternsInPath(config.IncludePatterns, path) { return nil } hash, err := GetFileHash(path) if err != nil { return fmt.Errorf("Error occured while trying to get hash for: %s: %v", path, err) } elem, ok := filesHash[path] // if file is not in the hash map or the hashes don't match => register a file change if !ok || elem != hash { changedFiles = append(changedFiles, path) // update hash for changed file only. // there is no need to update hash for all files filesHash[path] = hash } return nil }) return changedFiles, err } var CommandStyle = gchalk.WithBold().Green var ErrorStyle = gchalk.WithBold().Red func ExecuteCommands(config *Config, changedFiles []string) { log.Println(gchalk.WithBold().Blue("Running commands...")) for _, cmd := range config.Commands { files := strings.Join(changedFiles, " ") cmd = strings.ReplaceAll(cmd, config.FilesPlaceholder, files) // split the command into parts (a part is any whitespace separated chain of chars) command := strings.Fields(cmd) executable := command[0] args := command[1:len(command)] out, err := exec.Command(executable, args...).CombinedOutput() commandLine := fmt.Sprintf("%s %s:", config.Delimiter, cmd) log.Printf(CommandStyle(commandLine)) if len(out) > 0 { log.Printf("\n%s\n", out) } else { log.Println(CommandStyle("...ok")) } if err != nil { log.Println(err) log.Printf(ErrorStyle(fmt.Sprintf("Error while executing: `%s`\n", cmd))) log.Printf(ErrorStyle(fmt.Sprintf("Interrupting further execution.\n\n"))) break } } } func getSleepDuration(configInterval int, defaultInterval time.Duration) time.Duration { // Compute sleep duration in seconds. Minimum sleep is 1s. sleepDuration := time.Duration(configInterval) * time.Second if sleepDuration < defaultInterval { sleepDuration = defaultInterval } return sleepDuration } const ChangedFilesPlaceholder = "<files>" func Run(config *Config) { FilesHash := make(FilesHash) sleepDuration := getSleepDuration(config.Interval, time.Second) // Set default placeholder for changed files if not provided if config.FilesPlaceholder == "" { config.FilesPlaceholder = ChangedFilesPlaceholder } // the first iteration of the loop will mark all files as changed // since we are just building the file hash. This will lead to all // commands being executed even though no actual file changes have happened. // Disable command execution on startup iteration. isStartup := true for { // todo check for keypresses and exit gracefully time.Sleep(sleepDuration) changedFiles, err := ComputeChanges(FilesHash, config) if err != nil { log.Fatal(err) } if isStartup { isStartup = false continue } if len(changedFiles) == 0 { continue } ExecuteCommands(config, changedFiles) } }
package pkg import "fmt" const ( // Методы Get = "GET" Post = "POST" Put = "PUT" Delete = "DELETE" // Товары Product = "/entity/product" ProductDeleteList = Product + "/delete" ProductAttribute = Product + "/metadata/attributes/" ) //TakeProduct Возвращает метод и эндпоинт для того чтобы достать определенный товар func TakeProduct(id string) (string, string) { return Get, fmt.Sprintf("%s/%s", Product, id) } //TakeProductList Возвращает метод и эндпоинт для того чтобы достать все товары func TakeProductList() (string, string) { return Get, Product } //TakeProductAttribute Возвращает метод и эндпоинт для того чтобы достать отдельное поле товара func TakeProductAttribute(id string) (string, string) { return Get, fmt.Sprintf("%s/%s", ProductAttribute, id) } //CreateProduct Возвращает метод и эндпоинт для того чтобы создать товар func CreateProduct() (string, string) { return Post, Product } //DeleteProduct Возвращает метод и эндпоинт для того чтобы удалить определенный товар func DeleteProduct(id string) (string, string) { return Delete, fmt.Sprintf("%s/%s", Product, id) } //DeleteProductList Возвращает метод и эндпоинт для удаления несколько товаров func DeleteProductList() (string, string) { return Post, ProductDeleteList } //UpdateProduct Возвращает метод и эндпоинт для изменения товара func UpdateProduct(id string) (string, string) { return Put, fmt.Sprintf("%s/%s", Product, id) }