prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>User.java<|end_file_name|><|fim▁begin|>package ruboweb.pushetta.back.model; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Table; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.data.jpa.domain.AbstractPersistable; @Entity @Table(name = "user") public class User extends AbstractPersistable<Long> { private static final Logger logger = LoggerFactory.getLogger(User.class); private static final long serialVersionUID = 6088280461151862299L; @Column(nullable = false) private String name; @Column(nullable = false) private String token; public User() { } public User(String name) { this.name = name; this.token = this.generateToken(); } /** * @return the name */ public String getName() { return name; } /** * @param name * the name to set */ public void setName(String name) { this.name = name; } /** * @return the token */ public String getToken() { return token; } /** * @param token * the token to set */ public void setToken(String token) { this.token = token; } private String generateToken() { try { SecureRandom prng = SecureRandom.getInstance("SHA1PRNG"); String randomNum = new Integer(prng.nextInt()).toString(); MessageDigest sha = MessageDigest.getInstance("SHA-1"); byte[] bytes = sha.digest(randomNum.getBytes()); StringBuilder result = new StringBuilder(); <|fim▁hole|> for (int idx = 0; idx < bytes.length; ++idx) { b = bytes[idx]; result.append(digits[(b & 240) >> 4]); result.append(digits[b & 15]); } return result.toString(); } catch (NoSuchAlgorithmException ex) { logger.error("generateToken() -- " + ex.getMessage()); } return null; } }<|fim▁end|>
char[] digits = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; byte b;
<|file_name|>main_menu.py<|end_file_name|><|fim▁begin|># Copyright 2008 Owen Taylor # # This file is part of Reinteract and distributed under the terms # of the BSD license. See the file COPYING in the Reinteract # distribution for full details. # ######################################################################## import glib from about_dialog import AboutDialog from application import application from reinteract.native_main_menu import NativeMainMenu class MainMenu(NativeMainMenu): """This class is an interface to OS X main menu. (The interface of this class could likely be used for other types of main menu if the need ever arises.) The actual heavy-lifting is done in the NativeMainMenu superclass which is implemented in the native-code wrapper application. Here we just forward activated menu items in one direction, and enable/disabling of menu items in the other direction. """ def __init__(self): NativeMainMenu.__init__(self) self.__active_window = None self.__action_to_method_name = {} for action_name in self.get_action_names(): method_name = 'on_' + action_name.replace('-', '_') self.__action_to_method_name[action_name] = method_name self.__update_sensitivity() def run_action(self, action_name): method_name = self.__action_to_method_name[action_name]<|fim▁hole|> else: print action_name def do_action(self, action_name): # Recursing the main loop (which we do for various messages, etc), is a bad thing # to do out of a Quartz menu callback, so defer the real work to the next run of # the main loop glib.idle_add(self.run_action, action_name, priority=glib.PRIORITY_HIGH) def on_about(self): application.show_about_dialog() def on_new_notebook(self): application.create_notebook_dialog() def on_open_notebook(self): application.open_notebook_dialog() def on_quit(self): application.quit() def window_activated(self, window): if window != self.__active_window: self.__active_window = window self.__update_sensitivity() def window_deactivated(self, window): if window == self.__active_window: self.__active_window = None self.__update_sensitivity() def __update_sensitivity(self): for action_name, method_name in self.__action_to_method_name.iteritems(): if hasattr(self, method_name): pass # always active elif self.__active_window and hasattr(self.__active_window, method_name): self.enable_action(action_name) else: self.disable_action(action_name) if self.__active_window: self.__active_window.update_sensitivity() main_menu = MainMenu()<|fim▁end|>
if self.__active_window and hasattr(self.__active_window, method_name): getattr(self.__active_window, method_name)(None) elif hasattr(self, method_name): getattr(self, method_name)()
<|file_name|>cbicqc_incoming.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 """ Rename and organize Horos QC exported data in <BIDS Root>/incoming and place in <BIDS Root>/sourcedata AUTHOR ---- Mike Tyszka, Ph.D. MIT License Copyright (c) 2019 Mike Tyszka Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import os import sys from glob import glob import argparse from pathlib import Path import pydicom from shutil import rmtree def main(): parser = argparse.ArgumentParser(description='Fix subject and session directory naming in Horos output') parser.add_argument('-d', '--dataset', default='.', help='BIDS dataset directory containing sourcedata subdirectory') # Parse command line arguments args = parser.parse_args() dataset_dir = os.path.realpath(args.dataset) incoming_dir = os.path.join(dataset_dir, 'incoming') sourcedata_dir = os.path.join(dataset_dir, 'sourcedata') qc_dir = os.path.join(sourcedata_dir, 'QC') # Create single QC subject print("Checking that QC subject exists in sourcedata") if os.path.isdir(qc_dir): print(" It does - continuning") else: print(" QC subject does not exist - creating QC subject in sourcedata") os.makedirs(qc_dir, exist_ok=True) # Loop over all Qc study directories in sourcedata # Expect subject/session directory names in the form "Qc_<session ID>_*/<session dir>/" # Move session subdirectories from Qc_*/<session dir> to Qc/<ScanDate> print("Scanning for incoming QC studies") for inc_qc_dir in glob(os.path.join(incoming_dir, 'Qc*')): print("") print(" Processing {}".format(inc_qc_dir)) # There should be only one session subdirectory dlist = list(glob(os.path.join(inc_qc_dir, '*'))) if len(dlist) > 0: ses_dir = dlist[0] # Get first DICOM file in ses_dir at any level first_dcm = str(list(Path(ses_dir).rglob("*.dcm"))[0]) # Get acquisition date from DICOM header acq_date = acquisition_date(first_dcm) # Destination session directory name in QC subject folder dest_dir = os.path.join(qc_dir, acq_date) # Move and rename session subdirectory print(' Moving %s to %s' % (ses_dir, dest_dir)) os.rename(ses_dir, dest_dir) # Delete incoming Qc_* directory print(' Deleting %s' % inc_qc_dir) rmtree(inc_qc_dir) def acquisition_date(dcm_fname): """ Extract acquisition date from DICOM header :param dcm_fname: DICOM filename :return acq_date: str, acquisition date (YYYYMMDD) """ # Default return date acq_date = '19010101' if not os.path.isfile(dcm_fname): print('* File not found - %s' % dcm_fname) try: ds = pydicom.read_file(dcm_fname, force=True) except IOError: print("* Problem opening %s" % dcm_fname) raise<|fim▁hole|> if ds: acq_date = ds.AcquisitionDate else: print('* DICOM header problem - returning %s' % acq_date) return acq_date if 'main' in __name__: main()<|fim▁end|>
except AttributeError: print("* Problem opening %s" % dcm_fname) raise
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>pub type Tempo = f64; pub const DEFAULT_TEMPO: Tempo = 120 as Tempo; pub type SampleRate = f64; <|fim▁hole|><|fim▁end|>
pub static DEFAULT_SAMPLE_RATE: SampleRate = 44100 as SampleRate;
<|file_name|>listcreator.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import itertools class ListCreator(): current_list = None def fcstDiracGammasInAlldimsAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<4>']))) def fcstDiracGammasInAlldimsOneG5AllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<4>', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsOneG6AllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<4>', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG7AllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<4>', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsTwoG5AllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma5()', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma5()', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma5()', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsTwoG6AllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gammaR()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gammaR()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gammaR()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsTwoG7AllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gammaL()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gammaL()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gammaL()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG5OneG6AllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma5()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma5()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma5()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG5OneG7AllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma5()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma5()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma5()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG6OneG7AllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gammaR()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gammaR()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gammaR()', 'dirac_gammaL()']))) # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- def fcstDiracGammasInAlldimsOneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<4>', 'dirac_gamma<5>']))) def fcstDiracGammasInAlldimsOneG5OneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsOneG6OneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG7OneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsTwoG5OneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma5()', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma5()', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsTwoG6OneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gammaR()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaR()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsTwoG7OneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gammaL()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaL()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG5OneG6OneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma5()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma5()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG5OneG7OneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma5()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma5()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG6OneG7OneIndexFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gammaR()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaR()', 'dirac_gammaL()']))) # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- def fcstDiracGammasInAlldimsTwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>']))) def fcstDiracGammasInAlldimsOneG5TwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsOneG6TwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG7TwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsTwoG5TwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma5()', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma5()', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsTwoG6TwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaR()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaR()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsTwoG7TwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaL()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaL()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG5OneG6TwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma5()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG5OneG7TwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma5()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG6OneG7TwoIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gammaR()', 'dirac_gammaL()']))) # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- def fcstDiracGammasInAlldimsThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>']))) def fcstDiracGammasInAlldimsOneG5ThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma5()' ])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsOneG6ThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaR()' ])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG7ThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaL()' ])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsTwoG5ThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma5()', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsTwoG6ThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaR()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsTwoG7ThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaL()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG5OneG6ThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma5()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG5OneG7ThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma5()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG6OneG7ThreeIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gammaR()', 'dirac_gammaL()']))) # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- def fcstDiracGammasInAlldimsFourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>' ]))) def fcstDiracGammasInAlldimsOneG5FourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsOneG6FourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG7FourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsTwoG5FourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gamma5()', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsTwoG6FourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gammaR()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsTwoG7FourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gammaL()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG5OneG6FourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gamma5()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG5OneG7FourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gamma5()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG6OneG7FourIndicesFree(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<5>', 'dirac_gammaR()', 'dirac_gammaL()']))) # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- def fcstDiracGammasInAlldimsOneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_slash(p1 comma dims)'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_gamma<4>', 'dirac_gamma<4>', 'dirac_slash(p1 comma dims)']))) def fcstDiracGammasInAlldimsOneG5OneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsOneG6OneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG7OneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()'])) +<|fim▁hole|> list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_gamma<3>', 'dirac_gamma<3>', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsTwoG5OneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsTwoG6OneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsTwoG7OneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG5OneG6OneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG5OneG7OneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG6OneG7OneSlashAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaL()']))) # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- def fcstDiracGammasInAlldimsTwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)']))) def fcstDiracGammasInAlldimsOneG5TwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsOneG6TwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG7TwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_gamma<2>', 'dirac_gamma<2>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsTwoG5TwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gamma5()', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsTwoG6TwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gammaR()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsTwoG7TwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gammaL()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG5OneG6TwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gamma5()', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG5OneG7TwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gamma5()', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsOneG6OneG7TwoSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_gammaR()', 'dirac_gammaL()']))) # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- def fcstDiracGammasInAlldimsThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)']))) def fcstDiracGammasInAlldimsOneG5ThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()']))) def fcstDiracGammasInAlldimsOneG6ThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()']))) def fcstDiracGammasInAlldimsOneG7ThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()'])) + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()']))) def fcstDiracGammasInAlldimsTwoG5ThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)', 'dirac_gamma5()', 'dirac_gamma5()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gamma5()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gamma5()']))[0:600]) def fcstDiracGammasInAlldimsTwoG6ThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)', 'dirac_gammaR()', 'dirac_gammaR()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaR()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaR()']))[0:600]) def fcstDiracGammasInAlldimsTwoG7ThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)', 'dirac_gammaL()', 'dirac_gammaL()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()', 'dirac_gammaL()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaL()', 'dirac_gammaL()']))[0:600]) def fcstDiracGammasInAlldimsOneG5OneG6ThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)', 'dirac_gamma5()', 'dirac_gammaR()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaR()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaR()']))[0:600]) def fcstDiracGammasInAlldimsOneG5OneG7ThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)', 'dirac_gamma5()', 'dirac_gammaL()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaL()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gamma5()', 'dirac_gammaL()']))[0:600]) def fcstDiracGammasInAlldimsOneG6OneG7ThreeSlashesAllIndicesContracted(self): self.current_list = set( list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p3 comma dims)', 'dirac_gammaR()', 'dirac_gammaL()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p2 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaL()']))[0:600] + list(itertools.permutations(['dirac_gamma<1>', 'dirac_gamma<1>', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_slash(p1 comma dims)', 'dirac_gammaR()', 'dirac_gammaL()']))[0:600])<|fim▁end|>
<|file_name|>cron_trigger.py<|end_file_name|><|fim▁begin|># coding=utf-8 ''' cron trigger @author: Huiyugeng ''' import datetime import trigger class CronTrigger(trigger.Trigger): def __init__(self, cron): trigger.Trigger.__init__(self, 0, 1); self.cron = cron def _is_match(self): parser = CronParser(self.cron) _date = datetime.date.today() _time = datetime.datetime.now() return parser._is_match(_date, _time) class CronParser(): def __init__(self, cron): cron_item = cron.split(' ') if len(cron_item) == 6 or len(cron_item) == 7: self.second_set = self._parse_integer(cron_item[0], 0, 59) self.minute_set = self._parse_integer(cron_item[1], 0, 59) self.hour_set = self._parse_integer(cron_item[2], 0, 23) self.day_of_month_set = self._parse_integer(cron_item[3], 1, 31) self.month_set = self._parse_month(cron_item[4]) self.day_of_week_set = self._parse_day_of_week(cron_item[5]) if len(cron_item) == 7: self.year_set = self._parse_integer(cron_item[6], 1970, 2100) def _parse_integer(self, value, min_val, max_val): result = [] range_items = [] if ',' in value: range_items = value.split(',') else: range_items.append(value) for range_item in range_items: temp_result = [] interval = 1 if '/' in range_item: temp = range_item.split('/') range_item = temp[0] interval = int(temp[1]) if interval < 1: interval = 1 if '*' in range_item: temp_result.extend(self._add_to_set(min_val, max_val)) elif '-' in range_item: item = range_item.split('-') temp_result.extend(self._add_to_set(int(item[0]), int(item[1]))) else: temp_result.append(int(range_item)) count = 0 for item in temp_result: if count % interval == 0: result.append(item) count = count + 1 return result def _add_to_set(self, start, end): result = [i for i in range(start, end + 1)] return result <|fim▁hole|> for i in range(0, 12): value = value.replace(months[i], str(i + 1)) return self._parse_integer(value, 1, 12); def _parse_day_of_week(self, value): day_of_weeks = ["MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"] for i in range(0, 7): value = value.replace(day_of_weeks[i], str(i + 1)); return self._parse_integer(value, 1, 7); def _is_match(self, _date, _time): # In Python datetime's weekday Monday is 0 and Sunday is 6 day_of_week = _date.weekday() + 1 result = True and \ _time.second in self.second_set and \ _time.minute in self.minute_set and \ _time.hour in self.hour_set and \ _date.day in self.day_of_month_set and \ _date.month in self.month_set and \ _date.year in self.year_set and \ day_of_week in self.day_of_week_set return result<|fim▁end|>
def _parse_month(self, value): months = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"]
<|file_name|>init_lib.py<|end_file_name|><|fim▁begin|># # init_lib.py # # functions for initialization # from aws_lib import SpinupError import base64 from boto import vpc, ec2 from os import environ from pprint import pprint import re import sys import time from yaml_lib import yaml_attr def read_user_data( fn ): """ Given a filename, returns the file's contents in a string. """ r = '' with open( fn ) as fh: r = fh.read() fh.close() return r def get_tags( ec, r_id ): """ Takes EC2Connection object and resource ID. Returns tags associated with that resource. """ return ec.get_all_tags(filters={ "resource-id": r_id }) def get_tag( ec, obj, tag ): """ Get the value of a tag associated with the given resource object. Returns None if the tag is not set. Warning: EC2 tags are case-sensitive. """ tags = get_tags( ec, obj.id ) found = 0 for t in tags: if t.name == tag: found = 1 break if found: return t else: return None def update_tag( obj, tag, val ): """ Given an EC2 resource object, a tag and a value, updates the given tag to val. """ for x in range(0, 5): error = False try: obj.add_tag( tag, val ) except: error = True e = sys.exc_info()[0] print "Huh, trying again ({})".format(e) time.sleep(5) if not error: print "Object {} successfully tagged.".format(obj) break return None def init_region( r ): """ Takes a region string. Connects to that region. Returns EC2Connection and VPCConnection objects in a tuple. """ # connect to region c = vpc.connect_to_region( r ) ec = ec2.connect_to_region( r ) return ( c, ec ) def init_vpc( c, cidr ): """ Takes VPCConnection object (which is actually a connection to a particular region) and a CIDR block string. Looks for our VPC in that region. Returns the boto.vpc.vpc.VPC object corresponding to our VPC. See: http://boto.readthedocs.org/en/latest/ref/vpc.html#boto.vpc.vpc.VPC """ # look for our VPC all_vpcs = c.get_all_vpcs() found = 0 our_vpc = None for v in all_vpcs: if v.cidr_block == cidr: our_vpc = v found = 1 break if not found: raise SpinupError( "VPC {} not found".format(cidr) ) return our_vpc def init_subnet( c, vpc_id, cidr ): """ Takes VPCConnection object, which is actually a connection to a region, and a CIDR block string. Looks for our subnet in that region. If subnet does not exist, creates it. Returns the subnet resource object on success, raises exception on failure. """ # look for our VPC all_subnets = c.get_all_subnets() found = False our_subnet = None for s in all_subnets: if s.cidr_block == cidr: #print "Found subnet {}".format(cidr) our_subnet = s found = True break if not found: our_subnet = c.create_subnet( vpc_id, cidr ) return our_subnet def set_subnet_map_public_ip( ec, subnet_id ): """ Takes ECConnection object and SubnetId string. Attempts to set the MapPublicIpOnLaunch attribute to True. FIXME: give credit to source """ orig_api_version = ec.APIVersion ec.APIVersion = '2014-06-15' ec.get_status( 'ModifySubnetAttribute', {'SubnetId': subnet_id, 'MapPublicIpOnLaunch.Value': 'true'}, verb='POST' ) ec.APIVersion = orig_api_version return None def derive_ip_address( cidr_block, delegate, final8 ): """ Given a CIDR block string, a delegate number, and an integer representing the final 8 bits of the IP address, construct and return the IP address derived from this values. For example, if cidr_block is 10.0.0.0/16, the delegate number is 10, and the final8 is 8, the derived IP address will be 10.0.10.8. """ result = '' match = re.match( r'\d+\.\d+', cidr_block ) if match: result = '{}.{}.{}'.format( match.group(0), delegate, final8 ) else: raise SpinupError( "{} passed to derive_ip_address() is not a CIDR block!".format(cidr_block) ) return result def get_master_instance( ec2_conn, subnet_id ): """ Given EC2Connection object and Master Subnet id, check that there is just one instance running in that subnet - this is the Master. Raise exception if the number of instances is != 0. Return the Master instance object. """ instances = ec2_conn.get_only_instances( filters={ "subnet-id": subnet_id } ) if 1 > len(instances): raise SpinupError( "There are no instances in the master subnet" ) if 1 < len(instances): raise SpinupError( "There are too many instances in the master subnet" ) return instances[0] def template_token_subst( buf, key, val ): """ Given a string (buf), a key (e.g. '@@MASTER_IP@@') and val, replace all occurrences of key in buf with val. Return the new string. """ targetre = re.compile( re.escape( key ) ) return re.sub( targetre, str(val), buf ) def process_user_data( fn, vars = [] ): """ Given filename of user-data file and a list of environment variable names, replaces @@...@@ tokens with the values of the environment variables. Returns the user-data string on success raises exception on failure. """ # Get user_data string. buf = read_user_data( fn ) for e in vars: if not e in environ: raise SpinupError( "Missing environment variable {}!".format( e ) ) buf = template_token_subst( buf, '@@'+e+'@@', environ[e] ) return buf def count_instances_in_subnet( ec, subnet_id ): """ Given EC2Connection object and subnet ID, count number of instances in that subnet and return it. """ instance_list = ec.get_only_instances( filters={ "subnet-id": subnet_id } ) return len(instance_list) def make_reservation( ec, ami_id, **kwargs ): """ Given EC2Connection object, delegate number, AMI ID, as well as all the kwargs referred to below, make a reservation for an instance and return the registration object. """ # extract arguments to be passed to ec.run_instances() our_kwargs = { "key_name": kwargs['key_name'], "subnet_id": kwargs['subnet_id'], "instance_type": kwargs['instance_type'], "private_ip_address": kwargs['private_ip_address'] } # Master or minion? if kwargs['master']: our_kwargs['user_data'] = kwargs['user_data'] else: # perform token substitution in user-data string u = kwargs['user_data'] u = template_token_subst( u, '@@MASTER_IP@@', kwargs['master_ip'] ) u = template_token_subst( u, '@@DELEGATE@@', kwargs['delegate_no'] ) u = template_token_subst( u, '@@ROLE@@', kwargs['role'] ) u = template_token_subst( u, '@@NODE_NO@@', kwargs['node_no'] ) our_kwargs['user_data'] = u # Make the reservation. reservation = ec.run_instances( ami_id, **our_kwargs ) # Return the reservation object. return reservation def wait_for_running( ec2_conn, instance_id ): """ Given an instance id, wait for its state to change to "running". """ print "Waiting for {} running state".format( instance_id ) while True: instances = ec2_conn.get_only_instances( instance_ids=[ instance_id ] ) print "Current state is {}".format( instances[0].state ) if instances[0].state != 'running': print "Sleeping for 5 seconds" time.sleep(5) else: print "Waiting another 5 seconds for good measure" time.sleep(5) break def wait_for_available( ec2_conn, volume_id ): """ Given a volume id, wait for its state to change to "available". """ print "Waiting for {} available state".format( volume_id ) while True: volumes = ec2_conn.get_all_volumes( volume_ids=[ volume_id ] ) print "Current status is {}".format( volumes[0].status ) if volumes[0].status != 'available': print "Sleeping for 5 seconds" time.sleep(5) else: break def wait_for_detachment( ec2_conn, v_id, i_id ): """ Given a volume ID and an instance ID, wait for volume to become detached. """ print "Waiting for volume {} to be detached from instnace {}".format(v_id, i_id) while True: attached_vol = ec2_conn.get_all_volumes( filters={ "volume-id": v_id, "attachment.instance-id": i_id,<|fim▁hole|> "attachment.device": "/dev/sdb" } ) print "attached_vol == {}".format(attached_vol) if attached_vol is None or len(attached_vol) == 0: print "Detached!" break else: time.sleep(5) print "Still attached."<|fim▁end|>
<|file_name|>video.js<|end_file_name|><|fim▁begin|>var dogVideos = [ "http://media.giphy.com/media/l2JHZ7CDZa6jp1rAQ/giphy.mp4", "http://media.giphy.com/media/26tnmOjq7uQ98qxZC/giphy.mp4", "http://media.giphy.com/media/26tnazn9Fm4V3VUMU/giphy.mp4", "http://media.giphy.com/media/26tnhrpR1B6iOnUgo/giphy.mp4", "http://media.giphy.com/media/26tn2A11Cgd3xvIqc/giphy.mp4", "http://media.giphy.com/media/l2JHTMc51UdCYCn2o/giphy.mp4", "http://media.giphy.com/media/l2JI9XQlqkRLYIWCA/giphy.mp4", "http://media.giphy.com/media/26tn4dAvXHkHcHncQ/giphy.mp4", "http://media.giphy.com/media/26tn956W5qLmbO9YQ/giphy.mp4", "http://media.giphy.com/media/l2JHQGY0SoanXpvck/giphy.mp4", "http://media.giphy.com/media/l2JIcb3CvvjMZ9akU/giphy.mp4", "http://media.giphy.com/media/26tmZGflCf82PGbjq/giphy.mp4", "http://media.giphy.com/media/26tn5ZBO276MhXjiw/giphy.mp4", "http://media.giphy.com/media/26tncxLvZXWff0FlS/giphy.mp4", "http://media.giphy.com/media/l2JHSUA0chBuwY63e/giphy.mp4", "http://media.giphy.com/media/l2JIkLzS2M9c5XD0I/giphy.mp4" ]; function displayRandomVideo() { var randomIndex = Math.floor((Math.random() * dogVideos.length));<|fim▁hole|> $("#video")[0].load(); } $(document).ready(function(){ displayRandomVideo(); });<|fim▁end|>
$("#dog-video").attr("src", dogVideos[randomIndex]);
<|file_name|>version.py<|end_file_name|><|fim▁begin|><|fim▁hole|>VERSION = '2017.3.10'<|fim▁end|>
NAME = 'pokerserver' DESCRIPTION = 'Poker server for our Python workshop at TNG Technology Consulting.'
<|file_name|>scatter-matrix.js<|end_file_name|><|fim▁begin|>// Heavily influenced by Mike Bostock's Scatter Matrix example // http://mbostock.github.io/d3/talk/20111116/iris-splom.html // ScatterMatrix = function(url, data, dom_id) { this.__url = url; if (data === undefined || data === null) { this.__data = undefined; } else { this.__data = data; } this.__cell_size = 80; if (dom_id === undefined) { this.__dom_id = 'body'; } else { this.__dom_id = dom_id; } }; ScatterMatrix.prototype.cellSize = function(n) { this.__cell_size = n; return this; }; ScatterMatrix.prototype.onData = function(cb) { if (this.__data) { cb(); return; } var self = this; d3.csv(self.__url, function(data) { //self.__data = data; cb(); }); }; ScatterMatrix.prototype.render = function() { var self = this; var container = d3.select(this.__dom_id).append('div') .attr('class', 'scatter-matrix-container'); var control = container.append('div') .attr('class', 'scatter-matrix-control toggled') .style('background-color', '#ffffff'); var svg = container.append('div') .attr('class', 'scatter-matrix-svg') .html('<em>Loading data...</em>'); this.onData(function() { var data = self.__data; //// NOTE: passing raw data to local data // Divide variables into string and numeric variables var original_numeric_variables = []; self.__numeric_variables = []; for (k in data[0]) { var is_numeric = true; data.forEach(function(d) { var v = d[k]; if (isNaN(+v)) is_numeric = false; }); if (is_numeric) { self.__numeric_variables.push(k); original_numeric_variables.push(k); } } var scid = 0; data.forEach(function(d) { d.scid = scid; scid += 1; }); //console.log(string_variables);//------------------------------------------------------------------ // Add controls on the left var size_control = control.append('div').attr('class', 'scatter-matrix-size-control'); var color_control = control.append('div').attr('class', 'scatter-matrix-color-control'); var filter_control = control.append('div').attr('class', 'scatter-matrix-filter-control'); var variable_control = control.append('div').attr('class', 'scatter-matrix-variable-control'); var drill_control = control.append('div').attr('class', 'scatter-matrix-drill-control'); // shared control states var to_include = self.__numeric_variables.slice(-3, -1); var color_variable = undefined; var selected_colors = undefined; var drill_variables = []; function set_filter(variable) { filter_control.selectAll('*').remove(); if (variable) { // Get unique values for this variable var values = []; data.forEach(function(d) { var v = d[variable]; if (values.indexOf(v) < 0) { values.push(v); } }); selected_colors = values.slice(0, 5); var filter_li = filter_control .append('p').text('Filter by ' + variable + ': ') .append('ul') .selectAll('li') .data(values) .enter().append('li'); filter_li.append('input') .attr('type', 'checkbox') .attr('checked', function(d, i) { if (selected_colors.indexOf(d) >= 0) return 'checked'; return null; }) .on('click', function(d, i) { var new_selected_colors = []; for (var j in selected_colors) { var v = selected_colors[j]; if (v !== d || this.checked) { new_selected_colors.push(v); } } if (this.checked) { new_selected_colors.push(d); } selected_colors = new_selected_colors; self.__draw(self.__cell_size, svg, color_variable, selected_colors, to_include, drill_variables); }); filter_li.append('label') .html(function(d) { return d; }); } } // -----------------------------------Here starts the control menu list-------------------------------------------------------// size_a = size_control.append('div').attr('class', 'btn-group col-xs-12').style('margin-bottom', '20px'); var orgSize = self.__cell_size; size_a.append('a') .attr('class', 'btn btn-default btn-xs') .attr('href', 'javascript:void(0);') .html('-') .on('click', function() { self.__cell_size -= 50; self.__draw(self.__cell_size, svg, color_variable, selected_colors, to_include, drill_variables); }); size_a.append('a') .attr('class', 'btn btn-default btn-xs') .attr('href', 'javascript:void(0);') .html('Change plot size') .on('click', function() { self.__cell_size = orgSize; self.__draw(self.__cell_size, svg, color_variable, selected_colors, to_include, drill_variables); }); size_a.append('a') .attr('class', 'btn btn-default btn-xs') .attr('href', 'javascript:void(0);') .html('+') .on('click', function() { self.__cell_size += 50; self.__draw(self.__cell_size, svg, color_variable, selected_colors, to_include, drill_variables); }); //set to full screen mode;------------------------------------------------------------------------------------------------- var isRightChartFullScreenToggled = false; var windowHeight = window.innerHeight; var originalCellSize = self.__cell_size; $("#scatter-fullscreen-toggle").click(function(e) { isRightChartFullScreenToggled = !isRightChartFullScreenToggled; if (isRightChartFullScreenToggled) { self.__cell_size = (windowHeight - 250) / 2; d3.selectAll(".cell circle").attr("r", "3"); } else { self.__cell_size = originalCellSize; d3.selectAll(".cell circle").attr("r", "1"); } self.__draw(self.__cell_size, svg, color_variable, selected_colors, to_include, drill_variables); }); var variable_li = variable_control .append('p').text('Vertical Variables: ') .append('ul') .selectAll('li') .data(self.__numeric_variables) .enter().append('li'); variable_li.append('input') .attr('type', 'checkbox') .attr('checked', function(d, i) { if (to_include.indexOf(d) >= 0) return "checked"; return null; }) .on('click', function(d, i) { var new_to_include = []; for (var j in to_include) { var v = to_include[j]; if (v !== d || this.checked) { new_to_include.push(v); } } if (this.checked) { new_to_include.push(d); } to_include = new_to_include; self.__draw(self.__cell_size, svg, color_variable, selected_colors, to_include, drill_variables); }); variable_li.append('label') .html(function(d) { var i = self.__numeric_variables.indexOf(d) + 1; return '' + i + ': ' + d; }); // drill_li = // drill_control // .append('p').text('Horizontal Variable:') // .append('ul') // .selectAll('li') // .data(original_numeric_variables) // .enter().append('li'); // // drill_li.append('input') // .attr('type', 'checkbox') // .on('click', function(d, i) { // var new_drill_variables = []; // for (var j in drill_variables) { // var v = drill_variables[j]; // if (v !== d || this.checked) { // new_drill_variables.push(v); // } // } // if (this.checked) { // new_drill_variables.push(d); // } // drill_variables = new_drill_variables; // self.__draw(self.__cell_size, svg, color_variable, selected_colors, to_include, drill_variables); // }); // drill_li.append('label') // .html(function(d) { // return d; // }); self.__draw(self.__cell_size, svg, color_variable, selected_colors, to_include, drill_variables); }); }; // NOTE: unique id for each Circle function getCircleID(d) { var id = "scatter_" + d.scid; return id; } ScatterMatrix.prototype.__draw = function(cell_size, container_el, color_variable, selected_colors, to_include, drill_variables) { var self = this; this.onData(function() { var data = self.__data; // filter data by selected colors if (color_variable && selected_colors) { data = []; self.__data.forEach(function(d) { if (selected_colors.indexOf(d[color_variable]) >= 0) { data.push(d); } }); } container_el.selectAll('*').remove(); // If no data, don't do anything if (data.length === 0) { return; } // Parse headers from first row of data var variables_to_draw = to_include.slice(0); // Get values of the string variable var colors = []; if (color_variable) {<|fim▁hole|> // Using self.__data (all data), instead of data (data to be drawn), so // our css classes are consistent when we filter by value. self.__data.forEach(function(d) { var s = d[color_variable]; if (colors.indexOf(s) < 0) { colors.push(s); } }); } function color_class(d) { var c = d; if (color_variable && d[color_variable]) { c = d[color_variable]; } return colors.length > 0 ? 'color-' + colors.indexOf(c) : 'color-2'; } // Size parameters var size = cell_size, padding = 10, axis_width = 20, axis_height = 15, legend_width = 0, label_height = 15; // Get x and y scales for each numeric variable var x = {}, y = {}; variables_to_draw.forEach(function(trait) { // Coerce values to numbers. data.forEach(function(d) { d[trait] = +d[trait]; }); var value = function(d) { return d[trait]; }, domain = [d3.min(data, value), d3.max(data, value)], range_x = [padding / 2, size - padding / 2], range_y = [padding / 2, size - padding / 2]; x[trait] = d3.scale.linear().domain(domain).range(range_x); y[trait] = d3.scale.linear().domain(domain).range(range_y.reverse()); }); // When drilling, user select one or more variables. The first drilled // variable becomes the x-axis variable for all columns, and each column // contains only data points that match specific values for each of the // drilled variables other than the first. var drill_values = []; var drill_degrees = [] drill_variables.forEach(function(variable) { // Skip first one, since that's just the x axis if (drill_values.length == 0) { drill_values.push([]); drill_degrees.push(1); } else { var values = []; data.forEach(function(d) { var v = d[variable]; if (v !== undefined && values.indexOf(v) < 0) { values.push(v); } }); values.sort(); drill_values.push(values); drill_degrees.push(values.length); } }); var total_columns = 1; drill_degrees.forEach(function(d) { total_columns *= d; }); // Pick out stuff to draw on horizontal and vertical dimensions if (drill_variables.length > 0) { // First drill is now the x-axis variable for all columns x_variables = []; for (var i = 0; i < total_columns; i++) { x_variables.push(drill_variables[0]); } } else x_variables = variables_to_draw.slice(0); if (drill_variables.length > 0) { // Don't draw any of the "drilled" variables in vertical dimension y_variables = []; variables_to_draw.forEach(function(variable) { if (drill_variables.indexOf(variable) < 0) { y_variables.push(variable); } }); } else y_variables = variables_to_draw.slice(0); y_variables = y_variables.reverse(); var filter_descriptions = 0; if (drill_variables.length > 1) { filter_descriptions = drill_variables.length - 1; } // Formatting for axis var intf = d3.format('d'); var fltf = d3.format('.f'); var scif = d3.format('.1e'); // Brush - for highlighting regions of data var brush = d3.svg.brush() .on("brushstart", brushstart) .on("brush", brush) .on("brushend", brushend); // Root panel var svg = container_el.append("svg:svg") .attr("width", label_height + size * x_variables.length + axis_width + padding + legend_width) .attr("height", size * y_variables.length + axis_height + label_height + label_height * filter_descriptions) .append("svg:g") .attr("transform", "translate(" + label_height + ",0)"); // Push legend to the side var legend = svg.selectAll("g.legend") .data(colors) .enter().append("svg:g") .attr("class", "legend") .attr("transform", function(d, i) { return "translate(" + (label_height + size * x_variables.length + padding) + "," + (i * 20 + 10) + ")"; }); legend.append("svg:text") .attr("x", 12) .attr("dy", ".31em") .text(function(d) { return d; }); var shorten = function(s) { if (s.length > 16) return s.slice(0, 12) + '...' + s.slice(s.length - 8, s.length); return s; }; var reshape_axis = function(axis, k) { axis.ticks(5) .tickFormat(function(d) { if (Math.abs(+d) > 10000 || (Math.abs(d) < 0.001 && Math.abs(d) != 0)) { return scif(d); } if (parseInt(d) == +d) { return intf(d); } return fltf(d); }); return axis; }; // Draw X-axis svg.selectAll("g.x.axis") .data(x_variables) .enter().append("svg:g") .attr("class", "x axis") .attr("transform", function(d, i) { return "translate(" + i * size + ",0)"; }) .each(function(k) { var axis = reshape_axis(d3.svg.axis(), k); axis.tickSize(size * y_variables.length); d3.select(this).call(axis.scale(x[k]).orient('bottom')); }); // Draw Y-axis svg.selectAll("g.y.axis") .data(y_variables) .enter().append("svg:g") .attr("class", "y axis") .attr("transform", function(d, i) { return "translate(0," + i * size + ")"; }) .each(function(k) { var axis = reshape_axis(d3.svg.axis(), k); axis.tickSize(size * x_variables.length); d3.select(this).call(axis.scale(y[k]).orient('right')); }); // Draw scatter plot var cell = svg.selectAll("g.cell") .data(cross(x_variables, y_variables)) .enter().append("svg:g") .attr("class", "cell") .attr("transform", function(d) { return "translate(" + d.i * size + "," + d.j * size + ")"; }) .each(plot); // Add titles for y variables cell.filter(function(d) { return d.i == 0; }).append("svg:text") .attr("x", padding - size) .attr("y", -label_height) .attr("dy", ".71em") .attr("transform", function(d) { return "rotate(-90)"; }) .text(function(d) { var s = self.__numeric_variables.indexOf(d.y) + 1; s = '' + s + ': ' + d.y; return shorten(s); }); function plot(p) { var data_to_draw = data; // If drilling, compute what values of the drill variables correspond to // this column. // var filter = {}; if (drill_variables.length > 1) { var column = p.i; var cap = 1; for (var i = drill_variables.length - 1; i > 0; i--) { var var_name = drill_variables[i]; var var_value = undefined; if (i == drill_variables.length - 1) { // for the last drill variable, we index by % var_value = drill_values[i][column % drill_degrees[i]]; } else { // otherwise divide by capacity of subsequent variables to get value array index var_value = drill_values[i][parseInt(column / cap)]; } filter[var_name] = var_value; cap *= drill_degrees[i]; } data_to_draw = []; data.forEach(function(d) { var pass = true; for (k in filter) { if (d[k] != filter[k]) { pass = false; break; } } if (pass === true) { data_to_draw.push(d); } }); } var cell = d3.select(this); // Frame cell.append("svg:rect") .attr("class", "frame") .attr("x", padding / 2) .attr("y", padding / 2) .attr("width", size - padding) .attr("height", size - padding); // Scatter plot dots cell.selectAll("circle") .data(data_to_draw) .enter().append("svg:circle") .attr("id", function(d) { //console.log(d); return getCircleID(d); }) // .attr("class", "selected") .attr("cx", function(d) { return x[p.x](d[p.x]); }) .attr("cy", function(d) { return y[p.y](d[p.y]); }) .attr("r",function(d){ return isRightChartFullScreenToggled?2:1; }); // Add titles for x variables and drill variable values if (p.j == y_variables.length - 1) { cell.append("svg:text") .attr("x", padding) .attr("y", size + axis_height) .attr("dy", ".71em") .text(function(d) { var s = self.__numeric_variables.indexOf(d.x) + 1; s = '' + s + ': ' + d.x; return shorten(s); }); if (drill_variables.length > 1) { var i = 0; for (k in filter) { i += 1; cell.append("svg:text") .attr("x", padding) .attr("y", size + axis_height + label_height * i) .attr("dy", ".71em") .text(function(d) { return shorten(filter[k] + ': ' + k); }); } } } // Brush cell.call(brush.x(x[p.x]).y(y[p.y])); } // Clear the previously-active brush, if any function brushstart(p) { if (brush.data !== p) { cell.call(brush.clear()); brush.x(x[p.x]).y(y[p.y]).data = p; } } // Highlight selected circles function brush(p) { var e = brush.extent(); svg.selectAll(".cell circle").classed("faded", function(d) { return e[0][0] <= d[p.x] && d[p.x] <= e[1][0] && e[0][1] <= d[p.y] && d[p.y] <= e[1][1] ? false : true; }); } // If brush is empty, select all circles function brushend() { if (brush.empty()) svg.selectAll(".scatter-matrix-svg .cell circle").classed("faded", true); } function cross(a, b) { var c = [], n = a.length, m = b.length, i, j; for (i = -1; ++i < n;) for (j = -1; ++j < m;) c.push({ x: a[i], i: i, y: b[j], j: j }); return c; } }); updateScatterChart(); //for selected data highlightScatterDot();//for highlighted Data update_sc_colorsOnly(); };<|fim▁end|>
<|file_name|>plugin.go<|end_file_name|><|fim▁begin|>package custom_commands import ( "github.com/jmoiron/sqlx" "github.com/sgt-kabukiman/kabukibot/bot" ) type pluginStruct struct { db *sqlx.DB } func NewPlugin() *pluginStruct { return &pluginStruct{} } func (self *pluginStruct) Name() string { return "custom_commands" } func (self *pluginStruct) Setup(bot *bot.Kabukibot) { self.db = bot.Database() } func (self *pluginStruct) CreateWorker(channel bot.Channel) bot.PluginWorker { return &worker{ channel: channel, acl: channel.ACL(), db: self.db, }<|fim▁hole|><|fim▁end|>
}
<|file_name|>primality_trial_div.rs<|end_file_name|><|fim▁begin|>//Implements http://rosettacode.org/wiki/Primality_by_Trial_Division use std::iter::range_step; fn is_prime(nb: int) -> bool { if nb%2 == 0 { return false; } else { for i in range_step(3,(nb as f32).sqrt() as int + 1, 2) { if nb%i == 0 { return false; } } } true } <|fim▁hole|> println!("{:b}", is_prime(62773913)); // The product of the 1000th and 1001st primes. }<|fim▁end|>
fn main() { println!("{:b}", is_prime(15485863)); // The 1 000 000th prime.
<|file_name|>basic.rs<|end_file_name|><|fim▁begin|>extern crate lichen; use lichen::parse::Parser; use lichen::var::Var; use lichen::eval::Evaluator; fn main() { //load the lichen source file as a string let bytes = include_bytes!("basic.ls"); let mut src = String::from_utf8_lossy(bytes); let mut env = Parser::parse_blocks(src.to_mut()).expect("ERROR: Unable to parse source").into_env(); //parse the source and build the environment let mut ev = Evaluator::new(&mut env); // build the evaluator based on the environment<|fim▁hole|> match var { Var::String(s) => { println!("{:}", s); }, // print out the emitted variables _ => {}, } } // if we wanted to we could look at the next_node returned, // and advance it manually if needed // you'd do this if the node were an await-style node, // which either advances or continues on during the next step // see the contrived example to see this } }<|fim▁end|>
while let Some((vars, _next_node)) = ev.next() { // here we loop through the evaluator steps for var in vars {
<|file_name|>simple-remote.py<|end_file_name|><|fim▁begin|># run scripts/jobslave-nodatabase.py import os os.environ["SEAMLESS_COMMUNION_ID"] = "simple-remote" os.environ["SEAMLESS_COMMUNION_INCOMING"] = "localhost:8602" import seamless seamless.set_ncores(0) from seamless import communion_server communion_server.configure_master( buffer=True, transformation_job=True, transformation_status=True, ) from seamless.core import context, cell, transformer, unilink ctx = context(toplevel=True) ctx.cell1 = cell().set(1) ctx.cell2 = cell().set(2) ctx.result = cell() ctx.tf = transformer({ "a": "input", "b": "input", "c": "output" }) ctx.cell1_unilink = unilink(ctx.cell1) ctx.cell1_unilink.connect(ctx.tf.a) ctx.cell2.connect(ctx.tf.b) ctx.code = cell("transformer").set("c = a + b") ctx.code.connect(ctx.tf.code) ctx.result_unilink = unilink(ctx.result) ctx.tf.c.connect(ctx.result_unilink) ctx.result_copy = cell() ctx.result.connect(ctx.result_copy) ctx.compute(0.1) print(ctx.cell1.value) print(ctx.code.value) ctx.compute() print(ctx.result.value, ctx.status) print(ctx.tf.exception) ctx.cell1.set(10) ctx.compute() print(ctx.result.value, ctx.status) ctx.code.set("c = a + b + 1000") ctx.compute() print(ctx.result.value, ctx.status) print("Introduce delay...") ctx.code.set("import time; time.sleep(2); c = -(a + b)") ctx.compute(1.0)<|fim▁hole|>print(ctx.result.value, ctx.status)<|fim▁end|>
print("after 1.0 sec...") print(ctx.result.value, ctx.status) print("...") ctx.compute()
<|file_name|>server.py<|end_file_name|><|fim▁begin|>############################################################################### # # # Peekaboo Extended Email Attachment Behavior Observation Owl # # # # server.py # ############################################################################### # # # Copyright (C) 2016-2020 science + computing ag # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # # the Free Software Foundation, either version 3 of the License, or (at # # your option) any later version. # # # # This program is distributed in the hope that it will be useful, but # # WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # # General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # ############################################################################### """ This module implements the Peekaboo server, i.e. the frontend to the client. """ import asyncio import email.utils import logging import urllib.parse import sanic import sanic.headers import sanic.response from peekaboo.db import PeekabooDatabaseError logger = logging.getLogger(__name__) class PeekabooServer: """ A class wrapping the server components of Peekaboo. """ def __init__(self, host, port, job_queue, sample_factory, request_queue_size, db_con): """ Initialise a new server and start it. All error conditions are returned as exceptions. @param host: The local address to bind the socket to. @type host: String @param port: The local port to listen on for client connections. @type port: int @param job_queue: A reference to the job queue for submission of samples. @type job_queue: JobQueue @param sample_factory: A reference to a sample factory for creating new samples. @type sample_factory: SampleFactory @param request_queue_size: Number of requests that may be pending on the socket. @type request_queue_size: int """ logger.debug('Starting up server.') self.app = sanic.Sanic("PeekabooAV", configure_logging=False) self.app.config.FALLBACK_ERROR_FORMAT = "json" # silence sanic to a reasonable amount logging.getLogger('sanic.root').setLevel(logging.WARNING) logging.getLogger('sanic.access').setLevel(logging.WARNING) self.loop = asyncio.get_event_loop() self.server_coroutine = self.app.create_server( host=host, port=port, return_asyncio_server=True, backlog=request_queue_size, asyncio_server_kwargs=dict(start_serving=False)) self.server = None self.job_queue = job_queue self.sample_factory = sample_factory self.db_con = db_con # remember for diagnostics self.host = host self.port = port self.app.add_route(self.hello, '/') self.app.add_route(self.ping, '/ping') self.app.add_route(self.scan, "/v1/scan", methods=['POST']) self.app.add_route( self.report, '/v1/report/<job_id:int>', methods=['GET']) async def hello(self, _): """ hello endpoint as fallback and catch all @returns: hello world json response """ return sanic.response.json({'hello': 'PeekabooAV'}) async def ping(self, _): """ ping endpoint for diagnostics @returns: pong json response """ return sanic.response.json({'answer': 'pong'}) async def scan(self, request): """ scan endpoint for job submission @param request: sanic request object @type request: sanic.Request @returns: json response containing ID of newly created job """ # this is sanic's multipart/form-data parser in a version that knows # that our file field contains binary data. This allows transferring # files without a filename. The generic parser would treat those as # text fields and try to decode them using the form charset or UTF-8 as # a fallback and cause errors such as: UnicodeDecodeError: 'utf-8' # codec can't decode byte 0xc0 in position 1: invalid start byte content_type, parameters = sanic.headers.parse_content_header( request.content_type) # application/x-www-form-urlencoded is inefficient at transporting # binary data. Also it needs a separate field to transfer the filename. # Make clear here that we do not support that format (yet). if content_type != 'multipart/form-data': logger.error('Invalid content type %s', content_type) return sanic.response.json( {'message': 'Invalid content type, use multipart/form-data'}, 400) boundary = parameters["boundary"].encode("utf-8") form_parts = request.body.split(boundary) # split above leaves preamble in form_parts[0] and epilogue in # form_parts[2] num_fields = len(form_parts) - 2 if num_fields <= 0: logger.error('Invalid MIME structure in request, no fields ' 'or preamble or epilogue missing') return sanic.response.json( {'message': 'Invalid MIME structure in request'}, 400) if num_fields != 1: logger.error('Invalid number of fields in form: %d', num_fields) return sanic.response.json( {'message': 'Invalid number of fields in form, we accept ' 'only one field "file"'}, 400) form_part = form_parts[1] file_name = None content_type = None field_name = None line_index = 2 line_end_index = 0 while line_end_index != -1: line_end_index = form_part.find(b'\r\n', line_index) # this constitutes a hard requirement for the multipart headers # (and filenames therein) to be UTF-8-encoded. There are some # obscure provisions for transferring an encoding in RFC7578 # section 5.1.2 for HTML forms which don't apply here so its # fallback to UTF-8 applies. This is no problem for our field name # (ASCII) and file names in RFC2231 encoding. For HTML5-style # percent-encoded filenames it means that whatever isn't # percent-encoded needs to be UTF-8 encoded. There are no rules in # HTML5 currently to percent-encode any UTF-8 byte sequences. form_line = form_part[line_index:line_end_index].decode('utf-8') line_index = line_end_index + 2 if not form_line: break colon_index = form_line.index(':') idx = colon_index + 2 form_header_field = form_line[0:colon_index].lower() # parse_content_header() reverts some of the percent encoding as # per HTML5 WHATWG spec. As it is a "living standard" (i.e. moving # target), it has changed over the years. There used to be # backslash doubling and explicit control sequence encoding. As of # this writing this has been changed to escaping only newline, # linefeed and double quote. Sanic only supports the double quote # part of that: %22 are reverted back to %. Luckily this interacts # reasonably well with RFC2231 decoding below since that would do # the same. # # There is no way to tell what version of the standard (or draft # thereof) the client was following when encoding. It seems accepted # practice in the browser world to just require current versions of # everything so their behaviour hopefully converges eventually. # This is also the reason why we do not try to improve upon it here # because it's bound to become outdated. # # NOTE: Since we fork the sanic code here we need to keep track of # its changes, particularly how it interacts with RFC2231 encoding # if escaping of the escape character %25 is ever added to the # HTML5 WHATWG spec. In that case parse_content_header() would # start breaking the RFC2231 encoding which would explain why its # use is forbidden in RFC7578 section 4.2 via RFC5987. form_header_value, form_parameters = sanic.headers.parse_content_header( form_line[idx:] ) if form_header_field == 'content-disposition': field_name = form_parameters.get('name') file_name = form_parameters.get('filename') # non-ASCII filenames in RFC2231, "filename*" format if file_name is None and form_parameters.get('filename*'): encoding, _, value = email.utils.decode_rfc2231( form_parameters['filename*'] ) file_name = urllib.parse.unquote(value, encoding=encoding) elif form_header_field == 'content-type': content_type = form_header_value if field_name != 'file': logger.error('Field file missing from request') return sanic.response.json( {'message': 'Field "file" missing from request'}, 400) file_content = form_part[line_index:-4] content_disposition = request.headers.get('x-content-disposition') sample = self.sample_factory.make_sample( file_content, file_name, content_type, content_disposition) try: await self.db_con.analysis_add(sample) except PeekabooDatabaseError as dberr: logger.error('Failed to add analysis to database: %s', dberr) return sanic.response.json( {'message': 'Failed to add analysis to database'}, 500) if not self.job_queue.submit(sample): logger.error('Error submitting sample to job queue') return sanic.response.json( {'message': 'Error submitting sample to job queue'}, 500) # send answer to client return sanic.response.json({'job_id': sample.id}, 200) async def report(self, _, job_id): """ report endpoint for report retrieval by job ID @param request: sanic request object @type request: sanic.Request @param job_id: job ID extracted from endpoint path @type job_id: int @returns: report json response """ if not job_id: return sanic.response.json( {'message': 'job ID missing from request'}, 400) try: job_info = await self.db_con.analysis_retrieve(job_id) except PeekabooDatabaseError as dberr: logger.error('Failed to retrieve analysis result from ' 'database: %s', dberr) return sanic.response.json( {'message': 'Failed to retrieve analysis result ' 'from database'}, 500) if job_info is None: logger.debug('No analysis result yet for job %d', job_id) return sanic.response.json( {'message': 'No analysis result yet for job %d' % job_id}, 404) reason, result = job_info return sanic.response.json({ 'result': result.name, 'reason': reason, # FIXME: depends on saving the report to the database # 'report': report, }, 200) def serve(self): """ Serves requests until shutdown is requested from the outside. """ self.server = self.loop.run_until_complete(self.server_coroutine) # sanic 21.9 introduced an explicit startup that finalizes the app, # particularly the request routing. So we need to run it if present. if hasattr(self.server, 'startup'): self.loop.run_until_complete(self.server.startup()) self.loop.run_until_complete(self.server.start_serving())<|fim▁hole|> logger.debug('Server shut down.') def shut_down(self): """ Triggers a shutdown of the server, used by the signal handler and potentially other components to cause the main loop to exit. """ logger.debug('Server shutdown requested.') if self.server is not None: self.server.close()<|fim▁end|>
logger.info('Peekaboo server is now listening on %s:%d', self.host, self.port) self.loop.run_until_complete(self.server.wait_closed())
<|file_name|>sessions_api.py<|end_file_name|><|fim▁begin|>import requests from flask import session, Blueprint, redirect from flask import request from grano import authz from grano.lib.exc import BadRequest from grano.lib.serialisation import jsonify from grano.views.cache import validate_cache from grano.core import db, url_for, app from grano.providers import github, twitter, facebook from grano.model import Account from grano.logic import accounts blueprint = Blueprint('sessions_api', __name__) @blueprint.route('/api/1/sessions', methods=['GET']) def status(): permissions = {} if authz.logged_in():<|fim▁hole|> for permission in request.account.permissions: permissions[permission.project.slug] = { 'reader': permission.reader, 'editor': permission.editor, 'admin': permission.admin } keys = { 'p': repr(permissions), 'i': request.account.id if authz.logged_in() else None } validate_cache(keys=keys) return jsonify({ 'logged_in': authz.logged_in(), 'api_key': request.account.api_key if authz.logged_in() else None, 'account': request.account if request.account else None, 'permissions': permissions }) def provider_not_enabled(name): return jsonify({ 'status': 501, 'name': 'Provider not configured: %s' % name, 'message': 'There are no OAuth credentials given for %s' % name, }, status=501) @blueprint.route('/api/1/sessions/logout', methods=['GET']) def logout(): #authz.require(authz.logged_in()) session.clear() return redirect(request.args.get('next_url', '/')) @blueprint.route('/api/1/sessions/login/github', methods=['GET']) def github_login(): if not app.config.get('GITHUB_CLIENT_ID'): return provider_not_enabled('github') callback=url_for('sessions_api.github_authorized') session.clear() if not request.args.get('next_url'): raise BadRequest("No 'next_url' is specified.") session['next_url'] = request.args.get('next_url') return github.authorize(callback=callback) @blueprint.route('/api/1/sessions/callback/github', methods=['GET']) @github.authorized_handler def github_authorized(resp): next_url = session.get('next_url', '/') if resp is None or not 'access_token' in resp: return redirect(next_url) access_token = resp['access_token'] session['access_token'] = access_token, '' res = requests.get('https://api.github.com/user?access_token=%s' % access_token, verify=False) data = res.json() account = Account.by_github_id(data.get('id')) data_ = { 'full_name': data.get('name'), 'login': data.get('login'), 'email': data.get('email'), 'github_id': data.get('id') } account = accounts.save(data_, account=account) db.session.commit() session['id'] = account.id return redirect(next_url) @blueprint.route('/api/1/sessions/login/twitter', methods=['GET']) def twitter_login(): if not app.config.get('TWITTER_API_KEY'): return provider_not_enabled('twitter') callback=url_for('sessions_api.twitter_authorized') session.clear() if not request.args.get('next_url'): raise BadRequest("No 'next_url' is specified.") session['next_url'] = request.args.get('next_url') return twitter.authorize(callback=callback) @blueprint.route('/api/1/sessions/callback/twitter', methods=['GET']) @twitter.authorized_handler def twitter_authorized(resp): next_url = session.get('next_url', '/') if resp is None or not 'oauth_token' in resp: return redirect(next_url) session['twitter_token'] = (resp['oauth_token'], resp['oauth_token_secret']) res = twitter.get('users/show.json?user_id=%s' % resp.get('user_id')) account = Account.by_twitter_id(res.data.get('id')) data_ = { 'full_name': res.data.get('name'), 'login': res.data.get('screen_name'), 'twitter_id': res.data.get('id') } account = accounts.save(data_, account=account) db.session.commit() session['id'] = account.id return redirect(next_url) @blueprint.route('/api/1/sessions/login/facebook', methods=['GET']) def facebook_login(): if not app.config.get('FACEBOOK_APP_ID'): return provider_not_enabled('facebook') callback=url_for('sessions_api.facebook_authorized') session.clear() if not request.args.get('next_url'): raise BadRequest("No 'next_url' is specified.") session['next_url'] = request.args.get('next_url') return facebook.authorize(callback=callback) @blueprint.route('/api/1/sessions/callback/facebook', methods=['GET']) @facebook.authorized_handler def facebook_authorized(resp): next_url = session.get('next_url', '/') if resp is None or not 'access_token' in resp: return redirect(next_url) session['facebook_token'] = (resp.get('access_token'), '') data = facebook.get('/me').data account = Account.by_facebook_id(data.get('id')) data_ = { 'full_name': data.get('name'), 'login': data.get('username'), 'email': data.get('email'), 'facebook_id': data.get('id') } account = accounts.save(data_, account=account) db.session.commit() session['id'] = account.id return redirect(next_url)<|fim▁end|>
<|file_name|>push.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and<|fim▁hole|>//! The push thread. //! //! This is the thread for distributing rumors to members. It distributes to `FANOUT` members, no //! more often than `Timing::GOSSIP_PERIOD_DEFAULT_MS`. use std::sync::atomic::Ordering; use std::thread; use std::time::Duration; use protobuf::Message; use time::SteadyTime; use zmq; use ZMQ_CONTEXT; use message::swim::{Member as ProtoMember, Membership as ProtoMembership, Rumor as ProtoRumor, Rumor_Type as ProtoRumor_Type}; use rumor::RumorKey; use member::Member; use server::Server; use server::timing::Timing; use trace::TraceKind; const FANOUT: usize = 5; /// The Push server #[derive(Debug)] pub struct Push { pub server: Server, pub timing: Timing, } impl Push { /// Creates a new Push instance from a Server and Timing pub fn new(server: Server, timing: Timing) -> Push { Push { server: server, timing: timing, } } /// Executes the Push thread. Gets a list of members to talk to that are not Confirmed; then /// proceeds to process the list in `FANOUT` sized chunks. If we finish sending the messages to /// all FANOUT targets faster than `Timing::GOSSIP_PERIOD_DEFAULT_MS`, we will block until we /// exceed that time. pub fn run(&mut self) { 'send: loop { if self.server.pause.load(Ordering::Relaxed) { thread::sleep(Duration::from_millis(100)); continue; } self.server.update_gossip_round(); let mut check_list = self.server.member_list.check_list(self.server.member_id()); let long_wait = self.timing.gossip_timeout(); 'fanout: loop { let mut thread_list = Vec::with_capacity(FANOUT); if check_list.len() == 0 { break 'fanout; } let drain_length = if check_list.len() >= FANOUT { FANOUT } else { check_list.len() }; let next_gossip = self.timing.gossip_timeout(); for member in check_list.drain(0..drain_length) { if self.server.check_blacklist(member.get_id()) { debug!( "Not sending rumors to {} - it is blacklisted", member.get_id() ); continue; } // Unlike the SWIM mechanism, we don't actually want to send gossip traffic to // persistent members that are confirmed dead. When the failure detector thread // finds them alive again, we'll go ahead and get back to the business at hand. if self.server.member_list.pingable(&member) && !self.server.member_list.persistent_and_confirmed(&member) { let rumors = self.server.rumor_heat.currently_hot_rumors(member.get_id()); if rumors.len() > 0 { let sc = self.server.clone(); let guard = match thread::Builder::new() .name(String::from("push-worker")) .spawn(move || { PushWorker::new(sc).send_rumors(member, rumors); }) { Ok(guard) => guard, Err(e) => { error!("Could not spawn thread: {}", e); continue; } }; thread_list.push(guard); } } } let num_threads = thread_list.len(); for guard in thread_list.drain(0..num_threads) { let _ = guard .join() .map_err(|e| println!("Push worker died: {:?}", e)); } if SteadyTime::now() < next_gossip { let wait_time = (next_gossip - SteadyTime::now()).num_milliseconds(); if wait_time > 0 { thread::sleep(Duration::from_millis(wait_time as u64)); } } } if SteadyTime::now() < long_wait { let wait_time = (long_wait - SteadyTime::now()).num_milliseconds(); if wait_time > 0 { thread::sleep(Duration::from_millis(wait_time as u64)); } } } } } /// A worker thread for pushing messages to a target struct PushWorker { pub server: Server, } impl PushWorker { /// Create a new PushWorker. pub fn new(server: Server) -> PushWorker { PushWorker { server: server } } /// Send the list of rumors to a given member. This method creates an outbound socket and then /// closes the connection as soon as we are done sending rumors. ZeroMQ may choose to keep the /// connection and socket open for 1 second longer - so it is possible, but unlikely, that this /// method can loose messages. fn send_rumors(&self, member: Member, rumors: Vec<RumorKey>) { let socket = (**ZMQ_CONTEXT) .as_mut() .socket(zmq::PUSH) .expect("Failure to create the ZMQ push socket"); socket .set_linger(1000) .expect("Failure to set the ZMQ push socket to not linger"); socket .set_tcp_keepalive(0) .expect("Failure to set the ZMQ push socket to not use keepalive"); socket .set_immediate(true) .expect("Failure to set the ZMQ push socket to immediate"); socket .set_sndhwm(1000) .expect("Failure to set the ZMQ push socket hwm"); socket .set_sndtimeo(500) .expect("Failure to set the ZMQ send timeout"); let to_addr = format!("{}:{}", member.get_address(), member.get_gossip_port()); match socket.connect(&format!("tcp://{}", to_addr)) { Ok(()) => debug!("Connected push socket to {:?}", member), Err(e) => { println!("Cannot connect push socket to {:?}: {:?}", member, e); return; } } 'rumorlist: for ref rumor_key in rumors.iter() { let rumor_as_bytes = match rumor_key.kind { ProtoRumor_Type::Member => { let send_rumor = match self.create_member_rumor(&rumor_key) { Some(rumor) => rumor, None => continue 'rumorlist, }; trace_it!( GOSSIP: &self.server, TraceKind::SendRumor, member.get_id(), &send_rumor); match send_rumor.write_to_bytes() { Ok(bytes) => bytes, Err(e) => { println!( "Could not write our own rumor to bytes; abandoning \ sending rumor: {:?}", e ); continue 'rumorlist; } } } ProtoRumor_Type::Service => { // trace_it!(GOSSIP: &self.server, // TraceKind::SendRumor, // member.get_id(), // &send_rumor); match self.server .service_store .write_to_bytes(&rumor_key.key, &rumor_key.id) { Ok(bytes) => bytes, Err(e) => { println!( "Could not write our own rumor to bytes; abandoning \ sending rumor: {:?}", e ); continue 'rumorlist; } } } ProtoRumor_Type::ServiceConfig => { // trace_it!(GOSSIP: &self.server, // TraceKind::SendRumor, // member.get_id(), // &send_rumor); match self.server .service_config_store .write_to_bytes(&rumor_key.key, &rumor_key.id) { Ok(bytes) => bytes, Err(e) => { println!( "Could not write our own rumor to bytes; abandoning \ sending rumor: {:?}", e ); continue 'rumorlist; } } } ProtoRumor_Type::ServiceFile => { // trace_it!(GOSSIP: &self.server, // TraceKind::SendRumor, // member.get_id(), // &send_rumor); match self.server .service_file_store .write_to_bytes(&rumor_key.key, &rumor_key.id) { Ok(bytes) => bytes, Err(e) => { println!( "Could not write our own rumor to bytes; abandoning \ sending rumor: {:?}", e ); continue 'rumorlist; } } } ProtoRumor_Type::Departure => match self.server .departure_store .write_to_bytes(&rumor_key.key, &rumor_key.id) { Ok(bytes) => bytes, Err(e) => { println!( "Could not write our own rumor to bytes; abandoning \ sending rumor: {:?}", e ); continue 'rumorlist; } }, ProtoRumor_Type::Election => { // trace_it!(GOSSIP: &self.server, // TraceKind::SendRumor, // member.get_id(), // &send_rumor); match self.server .election_store .write_to_bytes(&rumor_key.key, &rumor_key.id) { Ok(bytes) => bytes, Err(e) => { println!( "Could not write our own rumor to bytes; abandoning \ sending rumor: {:?}", e ); continue 'rumorlist; } } } ProtoRumor_Type::ElectionUpdate => match self.server .update_store .write_to_bytes(&rumor_key.key, &rumor_key.id) { Ok(bytes) => bytes, Err(e) => { println!( "Could not write our own rumor to bytes; abandoning sending \ rumor: {:?}", e ); continue 'rumorlist; } }, ProtoRumor_Type::Fake | ProtoRumor_Type::Fake2 => { debug!("You have fake rumors; how odd!"); continue 'rumorlist; } }; let payload = match self.server.generate_wire(rumor_as_bytes) { Ok(payload) => payload, Err(e) => { error!("Generating protobuf failed: {}", e); continue 'rumorlist; } }; match socket.send(&payload, 0) { Ok(()) => debug!("Sent rumor {:?} to {:?}", rumor_key, member), Err(e) => println!( "Could not send rumor to {:?} @ {:?}; ZMQ said: {:?}", member.get_id(), to_addr, e ), } } self.server.rumor_heat.cool_rumors(member.get_id(), &rumors); } /// Given a rumorkey, creates a protobuf rumor for sharing. fn create_member_rumor(&self, rumor_key: &RumorKey) -> Option<ProtoRumor> { let mut member: Option<ProtoMember> = None; self.server.member_list.with_member(&rumor_key.key(), |m| { if let Some(m) = m { member = Some(m.proto.clone()); } }); if member.is_none() { return None; } let mut membership = ProtoMembership::new(); membership.set_member(member.unwrap()); membership.set_health( self.server .member_list .health_of_by_id(&rumor_key.key()) .unwrap() .into(), ); let mut rumor = ProtoRumor::new(); rumor.set_field_type(ProtoRumor_Type::Member); rumor.set_member(membership); rumor.set_from_id(String::from(self.server.member_id())); Some(rumor) } }<|fim▁end|>
// limitations under the License.
<|file_name|>base64.js<|end_file_name|><|fim▁begin|>// Base64 encoder/decoder with UTF-8 support // // Copyright (c) 2011 Vitaly Puzrin // Copyright (c) 2011 Aleksey V Zapparov // // Author: Aleksey V Zapparov AKA ixti (http://www.ixti.net/) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell<|fim▁hole|>// all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // Based on original artworks of base64 encoder/decoder by [Mozilla][1] // [1]: http://lxr.mozilla.org/mozilla/source/extensions/xml-rpc/src/nsXmlRpcClient.js 'use strict'; /* eslint-env browser */ /* eslint-disable no-bitwise */ function noop() {} var logger = { warn: noop, error: noop }, padding = '=', chrTable = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' + '0123456789+/', binTable = [ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, 0, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1, -1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1 ]; if (window.console) { logger = window.console; logger.warn = logger.warn || logger.error || logger.log || noop; logger.error = logger.error || logger.warn || logger.log || noop; } // internal helpers ////////////////////////////////////////////////////////// function utf8Encode(str) { var bytes = [], offset = 0, length, char; str = encodeURI(str); length = str.length; while (offset < length) { char = str.charAt(offset); offset += 1; if (char !== '%') { bytes.push(char.charCodeAt(0)); } else { char = str.charAt(offset) + str.charAt(offset + 1); bytes.push(parseInt(char, 16)); offset += 2; } } return bytes; } function utf8Decode(bytes) { var chars = [], offset = 0, length = bytes.length, c1, c2, c3; while (offset < length) { c1 = bytes[offset]; c2 = bytes[offset + 1]; c3 = bytes[offset + 2]; if (c1 < 128) { chars.push(String.fromCharCode(c1)); offset += 1; } else if (191 < c1 && c1 < 224) { chars.push(String.fromCharCode(((c1 & 31) << 6) | (c2 & 63))); offset += 2; } else { chars.push(String.fromCharCode(((c1 & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63))); offset += 3; } } return chars.join(''); } // public api //////////////////////////////////////////////////////////////// function encode(str) { var result = '', bytes = utf8Encode(str), length = bytes.length, i; // Convert every three bytes to 4 ascii characters. for (i = 0; i < (length - 2); i += 3) { result += chrTable[bytes[i] >> 2]; result += chrTable[((bytes[i] & 0x03) << 4) + (bytes[i + 1] >> 4)]; result += chrTable[((bytes[i + 1] & 0x0f) << 2) + (bytes[i + 2] >> 6)]; result += chrTable[bytes[i + 2] & 0x3f]; } // Convert the remaining 1 or 2 bytes, pad out to 4 characters. if (length % 3) { i = length - (length % 3); result += chrTable[bytes[i] >> 2]; if ((length % 3) === 2) { result += chrTable[((bytes[i] & 0x03) << 4) + (bytes[i + 1] >> 4)]; result += chrTable[(bytes[i + 1] & 0x0f) << 2]; result += padding; } else { result += chrTable[(bytes[i] & 0x03) << 4]; result += padding + padding; } } return result; } function decode(data) { var value, code, idx = 0, bytes = [], leftbits = 0, // number of bits decoded, but yet to be appended leftdata = 0; // bits decoded, but yet to be appended // Convert one by one. for (idx = 0; idx < data.length; idx += 1) { code = data.charCodeAt(idx); value = binTable[code & 0x7F]; if (value === -1) { // Skip illegal characters and whitespace logger.warn('Illegal characters (code=' + code + ') in position ' + idx); } else { // Collect data into leftdata, update bitcount leftdata = (leftdata << 6) | value; leftbits += 6; // If we have 8 or more bits, append 8 bits to the result if (leftbits >= 8) { leftbits -= 8; // Append if not padding. if (padding !== data.charAt(idx)) { bytes.push((leftdata >> leftbits) & 0xFF); } leftdata &= (1 << leftbits) - 1; } } } // If there are any bits left, the base64 string was corrupted if (leftbits) { logger.error('Corrupted base64 string'); return null; } return utf8Decode(bytes); } exports.encode = encode; exports.decode = decode;<|fim▁end|>
// copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in
<|file_name|>shepherd.js<|end_file_name|><|fim▁begin|>(function() { var ATTACHMENT, Evented, Shepherd, Step, addClass, createFromHTML, extend, matchesSelector, parseShorthand, removeClass, uniqueId, _ref, __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }, __hasProp = {}.hasOwnProperty, __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }; _ref = Tether.Utils, extend = _ref.extend, removeClass = _ref.removeClass, addClass = _ref.addClass, Evented = _ref.Evented; ATTACHMENT = { 'top': 'top center', 'left': 'middle right', 'right': 'middle left', 'bottom': 'bottom center' }; uniqueId = (function() { var id; id = 0; return function() { return id++; }; })(); createFromHTML = function(html) { var el; el = document.createElement('div'); el.innerHTML = html; return el.children[0]; }; matchesSelector = function(el, sel) { var matches, _ref1, _ref2, _ref3, _ref4; matches = (_ref1 = (_ref2 = (_ref3 = (_ref4 = el.matches) != null ? _ref4 : el.matchesSelector) != null ? _ref3 : el.webkitMatchesSelector) != null ? _ref2 : el.mozMatchesSelector) != null ? _ref1 : el.oMatchesSelector; return matches.call(el, sel); }; parseShorthand = function(obj, props) { var i, out, prop, vals, _i, _len; if (obj == null) { return obj; } else if (typeof obj === 'object') { return obj; } else { vals = obj.split(' '); if (vals.length > props.length) { vals[0] = vals.slice(0, +(vals.length - props.length) + 1 || 9e9).join(' '); vals.splice(1, vals.length - props.length); } out = {}; for (i = _i = 0, _len = props.length; _i < _len; i = ++_i) { prop = props[i]; out[prop] = vals[i]; } return out; } }; Step = (function(_super) { __extends(Step, _super); function Step(shepherd, options) { this.shepherd = shepherd; this.destroy = __bind(this.destroy, this); this.scrollTo = __bind(this.scrollTo, this); this.complete = __bind(this.complete, this); this.cancel = __bind(this.cancel, this); this.hide = __bind(this.hide, this); this.show = __bind(this.show, this); this.setOptions(options); } Step.prototype.setOptions = function(options) { var event, handler, _base, _ref1; this.options = options != null ? options : {}; this.destroy(); this.id = this.options.id || this.id || ("step-" + (uniqueId())); if (this.options.when) { _ref1 = this.options.when; for (event in _ref1) { handler = _ref1[event]; this.on(event, handler, this); } } return (_base = this.options).buttons != null ? (_base = this.options).buttons : _base.buttons = [ { text: 'Next', action: this.shepherd.next } ]; }; Step.prototype.bindAdvance = function() { var event, handler, selector, _ref1, _this = this; _ref1 = parseShorthand(this.options.advanceOn, ['event', 'selector']), event = _ref1.event, selector = _ref1.selector; handler = function(e) { if (selector != null) { if (matchesSelector(e.target, selector)) { return _this.shepherd.advance(); } } else { if (_this.el && e.target === _this.el) { return _this.shepherd.advance(); } } }; document.body.addEventListener(event, handler); return this.on('destroy', function() { return document.body.removeEventListener(event, handler); }); }; Step.prototype.getAttachTo = function() { var opts; opts = parseShorthand(this.options.attachTo, ['element', 'on']); if (opts == null) { opts = {}; } if (typeof opts.element === 'string') { opts.element = document.querySelector(opts.element); if (opts.element == null) { throw new Error("Shepherd step's attachTo was not found in the page"); } } return opts; }; Step.prototype.setupTether = function() { var attachment, opts, tetherOpts; if (typeof Tether === "undefined" || Tether === null) { throw new Error("Using the attachment feature of Shepherd requires the Tether library"); } opts = this.getAttachTo(); attachment = ATTACHMENT[opts.on || 'right']; if (opts.element == null) { opts.element = 'viewport'; attachment = 'middle center'; }<|fim▁hole|> element: this.el, constraints: [ { to: 'window', pin: true, attachment: 'together' } ], target: opts.element, offset: opts.offset || '0 0', attachment: attachment }; return this.tether = new Tether(extend(tetherOpts, this.options.tetherOptions)); }; Step.prototype.show = function() { var _ref1, _this = this; if (this.el == null) { this.render(); } addClass(this.el, 'shepherd-open'); if ((_ref1 = this.tether) != null) { _ref1.enable(); } if (this.options.scrollTo) { setTimeout(function() { return _this.scrollTo(); }); } return this.trigger('show'); }; Step.prototype.hide = function() { var _ref1; removeClass(this.el, 'shepherd-open'); if ((_ref1 = this.tether) != null) { _ref1.disable(); } return this.trigger('hide'); }; Step.prototype.cancel = function() { this.hide(); return this.trigger('cancel'); }; Step.prototype.complete = function() { this.hide(); return this.trigger('complete'); }; Step.prototype.scrollTo = function() { var $attachTo, elHeight, elLeft, elTop, element, height, left, offset, top, _ref1; element = this.getAttachTo().element; if (element == null) { return; } $attachTo = jQuery(element); _ref1 = $attachTo.offset(), top = _ref1.top, left = _ref1.left; height = $attachTo.outerHeight(); offset = $(this.el).offset(); elTop = offset.top; elLeft = offset.left; elHeight = $(this.el).outerHeight(); if (top < pageYOffset || elTop < pageYOffset) { return jQuery(document.body).scrollTop(Math.min(top, elTop) - 10); } else if ((top + height) > (pageYOffset + innerHeight) || (elTop + elHeight) > (pageYOffset + innerHeight)) { return jQuery(document.body).scrollTop(Math.max(top + height, elTop + elHeight) - innerHeight + 10); } }; Step.prototype.destroy = function() { var _ref1; if (this.el != null) { document.body.removeChild(this.el); delete this.el; } if ((_ref1 = this.tether) != null) { _ref1.destroy(); } return this.trigger('destroy'); }; Step.prototype.render = function() { var button, buttons, cfg, content, footer, header, paragraph, paragraphs, text, _i, _j, _len, _len1, _ref1, _ref2, _ref3; if (this.el != null) { this.destroy(); } this.el = createFromHTML("<div class='shepherd-step " + ((_ref1 = this.options.classes) != null ? _ref1 : '') + "' data-id='" + this.id + "'></div>"); content = document.createElement('div'); content.className = 'shepherd-content'; this.el.appendChild(content); if (this.options.title != null) { header = document.createElement('header'); header.innerHTML = "<h3 class='shepherd-title'>" + this.options.title + "</h3>"; this.el.className += ' shepherd-has-title'; content.appendChild(header); } if (this.options.text != null) { text = createFromHTML("<div class='shepherd-text'></div>"); paragraphs = this.options.text; if (typeof paragraphs === 'string') { paragraphs = [paragraphs]; } for (_i = 0, _len = paragraphs.length; _i < _len; _i++) { paragraph = paragraphs[_i]; text.innerHTML += "<p>" + paragraph + "</p>"; } content.appendChild(text); } footer = document.createElement('footer'); if (this.options.buttons) { buttons = createFromHTML("<ul class='shepherd-buttons'></ul>"); _ref2 = this.options.buttons; for (_j = 0, _len1 = _ref2.length; _j < _len1; _j++) { cfg = _ref2[_j]; button = createFromHTML("<li><a class='shepherd-button " + ((_ref3 = cfg.classes) != null ? _ref3 : '') + "'>" + cfg.text + "</a>"); buttons.appendChild(button); this.bindButtonEvents(cfg, button.querySelector('a')); } footer.appendChild(buttons); } content.appendChild(footer); document.body.appendChild(this.el); this.setupTether(); if (this.options.advanceOn) { return this.bindAdvance(); } }; Step.prototype.bindButtonEvents = function(cfg, el) { var event, handler, page, _ref1, _this = this; if (cfg.events == null) { cfg.events = {}; } if (cfg.action != null) { cfg.events.click = cfg.action; } _ref1 = cfg.events; for (event in _ref1) { handler = _ref1[event]; if (typeof handler === 'string') { page = handler; handler = function() { return _this.shepherd.show(page); }; } el.addEventListener(event, handler); } return this.on('destroy', function() { var _ref2, _results; _ref2 = cfg.events; _results = []; for (event in _ref2) { handler = _ref2[event]; _results.push(el.removeEventListener(event, handler)); } return _results; }); }; return Step; })(Evented); Shepherd = (function(_super) { __extends(Shepherd, _super); function Shepherd(options) { var _ref1; this.options = options != null ? options : {}; this.hide = __bind(this.hide, this); this.cancel = __bind(this.cancel, this); this.back = __bind(this.back, this); this.next = __bind(this.next, this); this.steps = (_ref1 = this.options.steps) != null ? _ref1 : []; } Shepherd.prototype.addStep = function(name, step) { if (step == null) { step = name; } else { step.id = name; } step = extend({}, this.options.defaults, step); return this.steps.push(new Step(this, step)); }; Shepherd.prototype.getById = function(id) { var step, _i, _len, _ref1; _ref1 = this.steps; for (_i = 0, _len = _ref1.length; _i < _len; _i++) { step = _ref1[_i]; if (step.id === id) { return step; } } }; Shepherd.prototype.next = function() { var index; index = this.steps.indexOf(this.currentStep); if (index === this.steps.length - 1) { this.hide(index); return this.trigger('complete'); } else { return this.show(index + 1); } }; Shepherd.prototype.back = function() { var index; index = this.steps.indexOf(this.currentStep); return this.show(index - 1); }; Shepherd.prototype.cancel = function() { var _ref1; if ((_ref1 = this.currentStep) != null) { _ref1.cancel(); } return this.trigger('cancel'); }; Shepherd.prototype.hide = function() { var _ref1; if ((_ref1 = this.currentStep) != null) { _ref1.hide(); } return this.trigger('hide'); }; Shepherd.prototype.show = function(key) { var next; if (key == null) { key = 0; } if (this.currentStep) { this.currentStep.hide(); } if (typeof key === 'string') { next = this.getById(key); } else { next = this.steps[key]; } if (next) { this.trigger('shown', { step: next, previous: this.currentStep }); this.currentStep = next; return next.show(); } }; Shepherd.prototype.start = function() { this.currentStep = null; return this.next(); }; return Shepherd; })(Evented); window.Shepherd = Shepherd; }).call(this);<|fim▁end|>
tetherOpts = { classPrefix: 'shepherd',
<|file_name|>test_nova.py<|end_file_name|><|fim▁begin|># Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from cinder.compute import nova from cinder import context from cinder import test<|fim▁hole|> def __getattr__(self, item): return None def __init__(self): self.volumes = self.Volumes() def create_volume_snapshot(self, *args, **kwargs): pass def delete_volume_snapshot(self, *args, **kwargs): pass class NovaApiTestCase(test.TestCase): def setUp(self): super(NovaApiTestCase, self).setUp() self.api = nova.API() self.novaclient = FakeNovaClient() self.ctx = context.get_admin_context() self.mox.StubOutWithMock(nova, 'novaclient') def test_update_server_volume(self): nova.novaclient(self.ctx).AndReturn(self.novaclient) self.mox.StubOutWithMock(self.novaclient.volumes, 'update_server_volume') self.novaclient.volumes.update_server_volume('server_id', 'attach_id', 'new_volume_id') self.mox.ReplayAll() self.api.update_server_volume(self.ctx, 'server_id', 'attach_id', 'new_volume_id')<|fim▁end|>
class FakeNovaClient(object): class Volumes(object):
<|file_name|>pipeline.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use CompositorProxy; use compositor_thread; use compositor_thread::Msg as CompositorMsg; use devtools_traits::{DevtoolsControlMsg, ScriptToDevtoolsControlMsg}; use euclid::scale_factor::ScaleFactor; use euclid::size::TypedSize2D; use gfx::font_cache_thread::FontCacheThread; use gfx::paint_thread::{ChromeToPaintMsg, LayoutToPaintMsg, PaintThread}; use gfx_traits::PaintMsg; use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; use ipc_channel::router::ROUTER; use layers::geometry::DevicePixel; use layout_traits::{LayoutControlChan, LayoutThreadFactory}; use msg::constellation_msg::{ConstellationChan, Failure, FrameId, PipelineId, SubpageId}; use msg::constellation_msg::{LoadData, WindowSizeData}; use msg::constellation_msg::{PipelineNamespaceId}; use net_traits::ResourceThread; use net_traits::image_cache_thread::ImageCacheThread; use net_traits::storage_thread::StorageThread; use profile_traits::mem as profile_mem;<|fim▁hole|>use script_traits::{ScriptToCompositorMsg, ScriptThreadFactory, TimerEventRequest}; use std::mem; use std::sync::mpsc::{Receiver, Sender, channel}; use std::thread; use url::Url; use util; use util::geometry::{PagePx, ViewportPx}; use util::ipc::OptionalIpcSender; use util::opts::{self, Opts}; use util::prefs; use webrender_traits; /// A uniquely-identifiable pipeline of script thread, layout thread, and paint thread. pub struct Pipeline { pub id: PipelineId, pub parent_info: Option<(PipelineId, SubpageId)>, pub script_chan: IpcSender<ConstellationControlMsg>, /// A channel to layout, for performing reflows and shutdown. pub layout_chan: LayoutControlChan, /// A channel to the compositor. pub compositor_proxy: Box<CompositorProxy + 'static + Send>, pub chrome_to_paint_chan: Sender<ChromeToPaintMsg>, pub layout_shutdown_port: IpcReceiver<()>, pub paint_shutdown_port: IpcReceiver<()>, /// URL corresponding to the most recently-loaded page. pub url: Url, /// The title of the most recently-loaded page. pub title: Option<String>, pub size: Option<TypedSize2D<PagePx, f32>>, /// Whether this pipeline is currently running animations. Pipelines that are running /// animations cause composites to be continually scheduled. pub running_animations: bool, pub children: Vec<FrameId>, } /// The subset of the pipeline that is needed for layer composition. #[derive(Clone)] pub struct CompositionPipeline { pub id: PipelineId, pub script_chan: IpcSender<ConstellationControlMsg>, pub layout_chan: LayoutControlChan, pub chrome_to_paint_chan: Sender<ChromeToPaintMsg>, } /// Initial setup data needed to construct a pipeline. /// /// *DO NOT* add any Senders to this unless you absolutely know what you're doing, or pcwalton will /// have to rewrite your code. Use IPC senders instead. pub struct InitialPipelineState { /// The ID of the pipeline to create. pub id: PipelineId, /// The subpage ID of this pipeline to create in its pipeline parent. /// If `None`, this is the root. pub parent_info: Option<(PipelineId, SubpageId)>, /// A channel to the associated constellation. pub constellation_chan: ConstellationChan<ScriptMsg>, /// A channel for the layout thread to send messages to the constellation. pub layout_to_constellation_chan: ConstellationChan<LayoutMsg>, /// A channel to the associated paint thread. pub painter_chan: ConstellationChan<PaintMsg>, /// A channel to schedule timer events. pub scheduler_chan: IpcSender<TimerEventRequest>, /// A channel to the compositor. pub compositor_proxy: Box<CompositorProxy + 'static + Send>, /// A channel to the developer tools, if applicable. pub devtools_chan: Option<Sender<DevtoolsControlMsg>>, /// A channel to the image cache thread. pub image_cache_thread: ImageCacheThread, /// A channel to the font cache thread. pub font_cache_thread: FontCacheThread, /// A channel to the resource thread. pub resource_thread: ResourceThread, /// A channel to the storage thread. pub storage_thread: StorageThread, /// A channel to the time profiler thread. pub time_profiler_chan: time::ProfilerChan, /// A channel to the memory profiler thread. pub mem_profiler_chan: profile_mem::ProfilerChan, /// Information about the initial window size. pub window_size: Option<TypedSize2D<PagePx, f32>>, /// Information about the device pixel ratio. pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>, /// A channel to the script thread, if applicable. If this is `Some`, /// then `parent_info` must also be `Some`. pub script_chan: Option<IpcSender<ConstellationControlMsg>>, /// Information about the page to load. pub load_data: LoadData, /// The ID of the pipeline namespace for this script thread. pub pipeline_namespace_id: PipelineNamespaceId, /// Optional webrender api (if enabled). pub webrender_api_sender: Option<webrender_traits::RenderApiSender>, } impl Pipeline { /// Starts a paint thread, layout thread, and possibly a script thread. /// Returns the channels wrapped in a struct. pub fn create<LTF, STF>(state: InitialPipelineState) -> (Pipeline, UnprivilegedPipelineContent, PrivilegedPipelineContent) where LTF: LayoutThreadFactory, STF: ScriptThreadFactory { let (layout_to_paint_chan, layout_to_paint_port) = util::ipc::optional_ipc_channel(); let (chrome_to_paint_chan, chrome_to_paint_port) = channel(); let (paint_shutdown_chan, paint_shutdown_port) = ipc::channel().unwrap(); let (layout_shutdown_chan, layout_shutdown_port) = ipc::channel().unwrap(); let (pipeline_chan, pipeline_port) = ipc::channel().unwrap(); let (script_to_compositor_chan, script_to_compositor_port) = ipc::channel().unwrap(); let mut pipeline_port = Some(pipeline_port); let failure = Failure { pipeline_id: state.id, parent_info: state.parent_info, }; let window_size = state.window_size.map(|size| { WindowSizeData { visible_viewport: size, initial_viewport: size * ScaleFactor::new(1.0), device_pixel_ratio: state.device_pixel_ratio, } }); // Route messages coming from content to devtools as appropriate. let script_to_devtools_chan = state.devtools_chan.as_ref().map(|devtools_chan| { let (script_to_devtools_chan, script_to_devtools_port) = ipc::channel().unwrap(); let devtools_chan = (*devtools_chan).clone(); ROUTER.add_route(script_to_devtools_port.to_opaque(), box move |message| { let message: ScriptToDevtoolsControlMsg = message.to().unwrap(); devtools_chan.send(DevtoolsControlMsg::FromScript(message)).unwrap() }); script_to_devtools_chan }); let (layout_content_process_shutdown_chan, layout_content_process_shutdown_port) = ipc::channel().unwrap(); let (script_chan, script_port) = match state.script_chan { Some(script_chan) => { let (containing_pipeline_id, subpage_id) = state.parent_info.expect("script_pipeline != None but subpage_id == None"); let new_layout_info = NewLayoutInfo { containing_pipeline_id: containing_pipeline_id, new_pipeline_id: state.id, subpage_id: subpage_id, load_data: state.load_data.clone(), paint_chan: layout_to_paint_chan.clone().to_opaque(), failure: failure, pipeline_port: mem::replace(&mut pipeline_port, None).unwrap(), layout_shutdown_chan: layout_shutdown_chan.clone(), content_process_shutdown_chan: layout_content_process_shutdown_chan.clone(), }; script_chan.send(ConstellationControlMsg::AttachLayout(new_layout_info)) .unwrap(); (script_chan, None) } None => { let (script_chan, script_port) = ipc::channel().unwrap(); (script_chan, Some(script_port)) } }; let (script_content_process_shutdown_chan, script_content_process_shutdown_port) = ipc::channel().unwrap(); let pipeline = Pipeline::new(state.id, state.parent_info, script_chan.clone(), LayoutControlChan(pipeline_chan), state.compositor_proxy.clone_compositor_proxy(), chrome_to_paint_chan.clone(), layout_shutdown_port, paint_shutdown_port, state.load_data.url.clone(), state.window_size); let unprivileged_pipeline_content = UnprivilegedPipelineContent { id: state.id, parent_info: state.parent_info, constellation_chan: state.constellation_chan, scheduler_chan: state.scheduler_chan, devtools_chan: script_to_devtools_chan, image_cache_thread: state.image_cache_thread, font_cache_thread: state.font_cache_thread.clone(), resource_thread: state.resource_thread, storage_thread: state.storage_thread, time_profiler_chan: state.time_profiler_chan.clone(), mem_profiler_chan: state.mem_profiler_chan.clone(), window_size: window_size, layout_to_constellation_chan: state.layout_to_constellation_chan, script_chan: script_chan, load_data: state.load_data.clone(), failure: failure, script_port: script_port, opts: (*opts::get()).clone(), layout_to_paint_chan: layout_to_paint_chan, pipeline_port: pipeline_port, layout_shutdown_chan: layout_shutdown_chan, paint_shutdown_chan: paint_shutdown_chan.clone(), script_to_compositor_chan: script_to_compositor_chan, pipeline_namespace_id: state.pipeline_namespace_id, layout_content_process_shutdown_chan: layout_content_process_shutdown_chan, layout_content_process_shutdown_port: layout_content_process_shutdown_port, script_content_process_shutdown_chan: script_content_process_shutdown_chan, script_content_process_shutdown_port: script_content_process_shutdown_port, webrender_api_sender: state.webrender_api_sender, }; let privileged_pipeline_content = PrivilegedPipelineContent { id: state.id, painter_chan: state.painter_chan, compositor_proxy: state.compositor_proxy, font_cache_thread: state.font_cache_thread, time_profiler_chan: state.time_profiler_chan, mem_profiler_chan: state.mem_profiler_chan, load_data: state.load_data, failure: failure, layout_to_paint_port: layout_to_paint_port, chrome_to_paint_chan: chrome_to_paint_chan, chrome_to_paint_port: chrome_to_paint_port, paint_shutdown_chan: paint_shutdown_chan, script_to_compositor_port: script_to_compositor_port, }; (pipeline, unprivileged_pipeline_content, privileged_pipeline_content) } pub fn new(id: PipelineId, parent_info: Option<(PipelineId, SubpageId)>, script_chan: IpcSender<ConstellationControlMsg>, layout_chan: LayoutControlChan, compositor_proxy: Box<CompositorProxy + 'static + Send>, chrome_to_paint_chan: Sender<ChromeToPaintMsg>, layout_shutdown_port: IpcReceiver<()>, paint_shutdown_port: IpcReceiver<()>, url: Url, size: Option<TypedSize2D<PagePx, f32>>) -> Pipeline { Pipeline { id: id, parent_info: parent_info, script_chan: script_chan, layout_chan: layout_chan, compositor_proxy: compositor_proxy, chrome_to_paint_chan: chrome_to_paint_chan, layout_shutdown_port: layout_shutdown_port, paint_shutdown_port: paint_shutdown_port, url: url, title: None, children: vec!(), size: size, running_animations: false, } } pub fn grant_paint_permission(&self) { let _ = self.chrome_to_paint_chan.send(ChromeToPaintMsg::PaintPermissionGranted); } pub fn revoke_paint_permission(&self) { debug!("pipeline revoking paint channel paint permission"); let _ = self.chrome_to_paint_chan.send(ChromeToPaintMsg::PaintPermissionRevoked); } pub fn exit(&self) { debug!("pipeline {:?} exiting", self.id); // Script thread handles shutting down layout, and layout handles shutting down the painter. // For now, if the script thread has failed, we give up on clean shutdown. if self.script_chan .send(ConstellationControlMsg::ExitPipeline(self.id)) .is_ok() { // Wait until all slave threads have terminated and run destructors // NOTE: We don't wait for script thread as we don't always own it let _ = self.paint_shutdown_port.recv(); let _ = self.layout_shutdown_port.recv(); } // The compositor wants to know when pipelines shut down too. self.compositor_proxy.send(CompositorMsg::PipelineExited(self.id)) } pub fn freeze(&self) { let _ = self.script_chan.send(ConstellationControlMsg::Freeze(self.id)).unwrap(); } pub fn thaw(&self) { let _ = self.script_chan.send(ConstellationControlMsg::Thaw(self.id)).unwrap(); } pub fn force_exit(&self) { let _ = self.script_chan.send(ConstellationControlMsg::ExitPipeline(self.id)).unwrap(); let _ = self.chrome_to_paint_chan.send(ChromeToPaintMsg::Exit); let LayoutControlChan(ref layout_channel) = self.layout_chan; let _ = layout_channel.send(LayoutControlMsg::ExitNow).unwrap(); } pub fn to_sendable(&self) -> CompositionPipeline { CompositionPipeline { id: self.id.clone(), script_chan: self.script_chan.clone(), layout_chan: self.layout_chan.clone(), chrome_to_paint_chan: self.chrome_to_paint_chan.clone(), } } pub fn add_child(&mut self, frame_id: FrameId) { self.children.push(frame_id); } pub fn remove_child(&mut self, frame_id: FrameId) { let index = self.children.iter().position(|id| *id == frame_id).unwrap(); self.children.remove(index); } pub fn trigger_mozbrowser_event(&self, subpage_id: SubpageId, event: MozBrowserEvent) { assert!(prefs::get_pref("dom.mozbrowser.enabled").as_boolean().unwrap_or(false)); let event = ConstellationControlMsg::MozBrowserEvent(self.id, subpage_id, event); self.script_chan.send(event).unwrap(); } } #[derive(Deserialize, Serialize)] pub struct UnprivilegedPipelineContent { id: PipelineId, parent_info: Option<(PipelineId, SubpageId)>, constellation_chan: ConstellationChan<ScriptMsg>, layout_to_constellation_chan: ConstellationChan<LayoutMsg>, scheduler_chan: IpcSender<TimerEventRequest>, devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>, script_to_compositor_chan: IpcSender<ScriptToCompositorMsg>, image_cache_thread: ImageCacheThread, font_cache_thread: FontCacheThread, resource_thread: ResourceThread, storage_thread: StorageThread, time_profiler_chan: time::ProfilerChan, mem_profiler_chan: profile_mem::ProfilerChan, window_size: Option<WindowSizeData>, script_chan: IpcSender<ConstellationControlMsg>, load_data: LoadData, failure: Failure, script_port: Option<IpcReceiver<ConstellationControlMsg>>, layout_to_paint_chan: OptionalIpcSender<LayoutToPaintMsg>, opts: Opts, paint_shutdown_chan: IpcSender<()>, pipeline_port: Option<IpcReceiver<LayoutControlMsg>>, pipeline_namespace_id: PipelineNamespaceId, layout_shutdown_chan: IpcSender<()>, layout_content_process_shutdown_chan: IpcSender<()>, layout_content_process_shutdown_port: IpcReceiver<()>, script_content_process_shutdown_chan: IpcSender<()>, script_content_process_shutdown_port: IpcReceiver<()>, webrender_api_sender: Option<webrender_traits::RenderApiSender>, } impl UnprivilegedPipelineContent { pub fn start_all<LTF, STF>(mut self, wait_for_completion: bool) where LTF: LayoutThreadFactory, STF: ScriptThreadFactory { let layout_pair = ScriptThreadFactory::create_layout_channel(None::<&mut STF>); ScriptThreadFactory::create(None::<&mut STF>, InitialScriptState { id: self.id, parent_info: self.parent_info, compositor: self.script_to_compositor_chan, control_chan: self.script_chan.clone(), control_port: mem::replace(&mut self.script_port, None).unwrap(), constellation_chan: self.constellation_chan.clone(), layout_to_constellation_chan: self.layout_to_constellation_chan.clone(), scheduler_chan: self.scheduler_chan.clone(), failure_info: self.failure.clone(), resource_thread: self.resource_thread, storage_thread: self.storage_thread.clone(), image_cache_thread: self.image_cache_thread.clone(), time_profiler_chan: self.time_profiler_chan.clone(), mem_profiler_chan: self.mem_profiler_chan.clone(), devtools_chan: self.devtools_chan, window_size: self.window_size, pipeline_namespace_id: self.pipeline_namespace_id, content_process_shutdown_chan: self.script_content_process_shutdown_chan.clone(), }, &layout_pair, self.load_data.clone()); LayoutThreadFactory::create(None::<&mut LTF>, self.id, self.load_data.url.clone(), self.parent_info.is_some(), layout_pair, self.pipeline_port.unwrap(), self.layout_to_constellation_chan, self.failure, self.script_chan.clone(), self.layout_to_paint_chan.clone(), self.image_cache_thread, self.font_cache_thread, self.time_profiler_chan, self.mem_profiler_chan, self.layout_shutdown_chan, self.layout_content_process_shutdown_chan.clone(), self.webrender_api_sender); if wait_for_completion { self.script_content_process_shutdown_port.recv().unwrap(); self.layout_content_process_shutdown_port.recv().unwrap(); } } pub fn opts(&self) -> Opts { self.opts.clone() } } pub struct PrivilegedPipelineContent { id: PipelineId, painter_chan: ConstellationChan<PaintMsg>, compositor_proxy: Box<CompositorProxy + Send + 'static>, script_to_compositor_port: IpcReceiver<ScriptToCompositorMsg>, font_cache_thread: FontCacheThread, time_profiler_chan: time::ProfilerChan, mem_profiler_chan: profile_mem::ProfilerChan, load_data: LoadData, failure: Failure, layout_to_paint_port: Receiver<LayoutToPaintMsg>, chrome_to_paint_chan: Sender<ChromeToPaintMsg>, chrome_to_paint_port: Receiver<ChromeToPaintMsg>, paint_shutdown_chan: IpcSender<()>, } impl PrivilegedPipelineContent { pub fn start_all(self) { PaintThread::create(self.id, self.load_data.url, self.chrome_to_paint_chan, self.layout_to_paint_port, self.chrome_to_paint_port, self.compositor_proxy.clone_compositor_proxy(), self.painter_chan, self.font_cache_thread, self.failure, self.time_profiler_chan, self.mem_profiler_chan, self.paint_shutdown_chan); let compositor_proxy_for_script_listener_thread = self.compositor_proxy.clone_compositor_proxy(); let script_to_compositor_port = self.script_to_compositor_port; thread::spawn(move || { compositor_thread::run_script_listener_thread( compositor_proxy_for_script_listener_thread, script_to_compositor_port) }); } pub fn start_paint_thread(self) { PaintThread::create(self.id, self.load_data.url, self.chrome_to_paint_chan, self.layout_to_paint_port, self.chrome_to_paint_port, self.compositor_proxy, self.painter_chan, self.font_cache_thread, self.failure, self.time_profiler_chan, self.mem_profiler_chan, self.paint_shutdown_chan); } }<|fim▁end|>
use profile_traits::time; use script_traits::{ConstellationControlMsg, InitialScriptState, MozBrowserEvent}; use script_traits::{LayoutControlMsg, LayoutMsg, NewLayoutInfo, ScriptMsg};
<|file_name|>git_requirements.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- from ansible.module_utils.basic import AnsibleModule import git import itertools import multiprocessing import os import signal import time DOCUMENTATION = """ --- module: git_requirements short_description: Module to run a multithreaded git clone options: repo_info: description: - List of repo information dictionaries containing at a minimum a key entry "src" with the source git URL to clone for each repo. In these dictionaries, one can further specify: "path" - destination clone location "version" - git version to checkout "refspec" - git refspec to checkout "depth" - clone depth level "force" - require git clone uses "--force" default_path: description: Default git clone path (str) in case not specified on an individual repo basis in repo_info. Defaults to "master". Not required. default_version: description: Default git version (str) in case not specified on an individual repo basis in repo_info. Defaults to "master". Not required. default_refspec: description: Default git repo refspec (str) in case not specified on an individual repo basis in repo_info. Defaults to "". Not required. default_depth: description: Default clone depth (int) in case not specified on an individual repo basis. Defaults to 10. Not required. retries: description: Integer number of retries allowed in case of git clone failure. Defaults to 1. Not required. delay: description: Integer time delay (seconds) between git clone retries in case of failure. Defaults to 0. Not required. force: description: Boolean. Apply --force flags to git clones wherever possible. Defaults to False. Not required. core_multiplier: description: Integer multiplier on the number of cores present on the machine to use for multithreading. For example, on a 2 core machine, a multiplier of 4 would use 8 threads. Defaults to 4. Not required. """ EXAMPLES = r""" - name: Clone repos git_requirements: repo_info: "[{'src':'https://github.com/ansible/', 'name': 'ansible' 'dest': '/etc/opt/ansible'}]" """ def init_signal(): signal.signal(signal.SIGINT, signal.SIG_IGN) def check_out_version(repo, version, pull=False, force=False, refspec=None, tag=False, depth=10): try: repo.git.fetch(tags=tag, force=force, refspec=refspec, depth=depth) except Exception as e: return ["Failed to fetch %s\n%s" % (repo.working_dir, str(e))] try: repo.git.checkout(version, force=force) except Exception as e: return [ "Failed to check out version %s for %s\n%s" % (version, repo.working_dir, str(e))] if repo.is_dirty(untracked_files=True) and force: try: repo.git.clean(force=force) except Exception as e: return [ "Failed to clean up repository% s\n%s" % (repo.working_dir, str(e))] if pull: try: repo.git.pull(force=force, refspec=refspec, depth=depth) except Exception as e: return ["Failed to pull repo %s\n%s" % (repo.working_dir, str(e))] return [] def pull_wrapper(info): role_info = info retries = info[1]["retries"] delay = info[1]["delay"] for i in range(retries): success = pull_role(role_info) if success: return True else: time.sleep(delay) info[2].append(["Role {0} failed after {1} retries\n".format(role_info[0], retries)]) return False def pull_role(info): role, config, failures = info required_version = role["version"] version_hash = False if 'version' in role: # If the version is the length of a hash then treat is as one if len(required_version) == 40: version_hash = True def get_repo(dest): try: return git.Repo(dest) except Exception: failtxt = "Role in {0} is broken/not a git repo.".format( role["dest"]) failtxt += "Please delete or fix it manually" failures.append(failtxt) return False # if repo exists if os.path.exists(role["dest"]): repo = get_repo(role["dest"]) if not repo: return False # go to next role repo_url = list(repo.remote().urls)[0] if repo_url != role["src"]: repo.remote().set_url(role["src"]) # if they want master then fetch, checkout and pull to stay at latest # master if required_version == "master": fail = check_out_version(repo, required_version, pull=True, force=config["force"], refspec=role["refspec"], depth=role["depth"]) # If we have a hash then reset it to elif version_hash: fail = check_out_version(repo, required_version, force=config["force"], refspec=role["refspec"], depth=role["depth"]) else: # describe can fail in some cases so be careful: try: current_version = repo.git.describe(tags=True) except Exception: current_version = "" if current_version == required_version and not config["force"]: fail = [] pass else: fail = check_out_version(repo, required_version, force=config["force"], refspec=role["refspec"], depth=role["depth"], tag=True) else: try: # If we have a hash id then treat this a little differently if version_hash: git.Repo.clone_from(role["src"], role["dest"], branch='master', no_single_branch=True, depth=role["depth"]) repo = get_repo(role["dest"]) if not repo: return False # go to next role fail = check_out_version(repo, required_version, force=config["force"], refspec=role["refspec"], depth=role["depth"]) else: git.Repo.clone_from(role["src"], role["dest"], branch=required_version, depth=role["depth"], no_single_branch=True) fail = [] except Exception as e: fail = ('Failed cloning repo %s\n%s' % (role["dest"], str(e))) if fail == []: return True else: failures.append(fail) return False def set_default(dictionary, key, defaults): if key not in dictionary.keys(): dictionary[key] = defaults[key] def main(): # Define variables failures = multiprocessing.Manager().list() # Data we can pass in to the module fields = { "repo_info": {"required": True, "type": "list"}, "default_path": {"required": True, "type": "str"}, "default_version": {"required": False, "type": "str", "default": "master"}, "default_refspec": {"required": False, "type": "str", "default": None}, "default_depth": {"required": False, "type": "int", "default": 10}, "retries": {"required": False, "type": "int", "default": 1}, "delay": {"required": False, "type": "int", "default": 0}, "force": {"required": False, "type": "bool", "default": False}, "core_multiplier": {"required": False, "type": "int", "default": 4}, } <|fim▁hole|> defaults = { "path": module.params["default_path"], "depth": module.params["default_depth"], "version": module.params["default_version"], "refspec": module.params["default_refspec"] } config = { "retries": module.params["retries"], "delay": module.params["delay"], "force": module.params["force"], "core_multiplier": module.params["core_multiplier"] } # Set up defaults for repo in git_repos: for key in ["path", "refspec", "version", "depth"]: set_default(repo, key, defaults) if "name" not in repo.keys(): repo["name"] = os.path.basename(repo["src"]) repo["dest"] = os.path.join(repo["path"], repo["name"]) # Define varibles failures = multiprocessing.Manager().list() core_count = multiprocessing.cpu_count() * config["core_multiplier"] # Load up process and pass in interrupt and core process count p = multiprocessing.Pool(core_count, init_signal) clone_success = p.map(pull_wrapper, zip(git_repos, itertools.repeat(config), itertools.repeat(failures)), chunksize=1) p.close() success = all(i for i in clone_success) if success: module.exit_json(msg=str(git_repos), changed=True) else: module.fail_json(msg=("Module failed"), meta=failures) if __name__ == '__main__': main()<|fim▁end|>
# Pull in module fields and pass into variables module = AnsibleModule(argument_spec=fields) git_repos = module.params['repo_info']
<|file_name|>Core.UI.Datepicker.js<|end_file_name|><|fim▁begin|>// -- // Copyright (C) 2001-2016 OTRS AG, http://otrs.com/ // -- // This software comes with ABSOLUTELY NO WARRANTY. For details, see // the enclosed file COPYING for license information (AGPL). If you // did not receive this file, see http://www.gnu.org/licenses/agpl.txt. // -- "use strict"; var Core = Core || {}; Core.UI = Core.UI || {}; /** * @namespace Core.UI.Datepicker * @memberof Core.UI * @author OTRS AG * @description * This namespace contains the datepicker functions. */ Core.UI.Datepicker = (function (TargetNS) { /** * @private * @name VacationDays * @memberof Core.UI.Datepicker * @member {Object} * @description * Vacation days, defined in SysConfig. */ var VacationDays, /** * @private * @name VacationDaysOneTime * @memberof Core.UI.Datepicker * @member {Object} * @description * One time vacations, defined in SysConfig. */ VacationDaysOneTime, /** * @private * @name LocalizationData * @memberof Core.UI.Datepicker * @member {Object} * @description * Translations. */ LocalizationData, /** * @private * @name DatepickerCount * @memberof Core.UI.Datepicker * @member {Number} * @description * Number of initialized datepicker. */ DatepickerCount = 0; if (!Core.Debug.CheckDependency('Core.UI.Datepicker', '$([]).datepicker', 'jQuery UI datepicker')) { return false; } /** * @private * @name CheckDate * @memberof Core.UI.Datepicker * @function * @returns {Array} First element is always true, second element contains the name of a CSS class, third element a description for the date. * @param {DateObject} DateObject - A JS date object to check. * @description * Check if date is on of the defined vacation days. */ function CheckDate(DateObject) { var DayDescription = '', DayClass = ''; // Get defined days from Config, if not done already if (typeof VacationDays === 'undefined') { VacationDays = Core.Config.Get('Datepicker.VacationDays').TimeVacationDays; } if (typeof VacationDaysOneTime === 'undefined') { VacationDaysOneTime = Core.Config.Get('Datepicker.VacationDays').TimeVacationDaysOneTime; } // Check if date is one of the vacation days if (typeof VacationDays[DateObject.getMonth() + 1] !== 'undefined' && typeof VacationDays[DateObject.getMonth() + 1][DateObject.getDate()] !== 'undefined') { DayDescription += VacationDays[DateObject.getMonth() + 1][DateObject.getDate()]; DayClass = 'Highlight '; } // Check if date is one of the one time vacation days if (typeof VacationDaysOneTime[DateObject.getFullYear()] !== 'undefined' && typeof VacationDaysOneTime[DateObject.getFullYear()][DateObject.getMonth() + 1] !== 'undefined' && typeof VacationDaysOneTime[DateObject.getFullYear()][DateObject.getMonth() + 1][DateObject.getDate()] !== 'undefined') { DayDescription += VacationDaysOneTime[DateObject.getFullYear()][DateObject.getMonth() + 1][DateObject.getDate()]; DayClass = 'Highlight '; } if (DayClass.length) { return [true, DayClass, DayDescription]; } else { return [true, '']; } } /** * @name Init * @memberof Core.UI.Datepicker * @function * @returns {Boolean} false, if Parameter Element is not of the correct type. * @param {jQueryObject|Object} Element - The jQuery object of a text input field which should get a datepicker. * Or a hash with the Keys 'Year', 'Month' and 'Day' and as values the jQueryObjects of the select drop downs. * @description * This function initializes the datepicker on the defined elements. */ TargetNS.Init = function (Element) { var $DatepickerElement, HasDateSelectBoxes = false, Options, ErrorMessage; if (typeof Element.VacationDays === 'object') { Core.Config.Set('Datepicker.VacationDays', Element.VacationDays); } /** * @private * @name LeadingZero * @memberof Core.UI.Datepicker.Init * @function * @returns {String} A number with leading zero, if needed. * @param {Number} Number - A number to convert. * @description * Converts a one digit number to a string with leading zero. */ function LeadingZero(Number) { if (Number.toString().length === 1) { return '0' + Number; } else { return Number; } } if (typeof LocalizationData === 'undefined') { LocalizationData = Core.Config.Get('Datepicker.Localization'); if (typeof LocalizationData === 'undefined') { throw new Core.Exception.ApplicationError('Datepicker localization data could not be found!', 'InternalError'); } } // Increment number of initialized datepickers on this site DatepickerCount++; // Check, if datepicker is used with three input element or with three select boxes if (typeof Element === 'object' && typeof Element.Day !== 'undefined' && typeof Element.Month !== 'undefined' && typeof Element.Year !== 'undefined' && isJQueryObject(Element.Day, Element.Month, Element.Year) && // Sometimes it can happen that BuildDateSelection was called without placing the full date selection. // Ignore in this case. Element.Day.length ) { $DatepickerElement = $('<input>').attr('type', 'hidden').attr('id', 'Datepicker' + DatepickerCount); Element.Year.after($DatepickerElement); if (Element.Day.is('select') && Element.Month.is('select') && Element.Year.is('select')) { HasDateSelectBoxes = true; } } else { return false; } // Define options hash Options = { beforeShowDay: function (DateObject) { return CheckDate(DateObject); }, showOn: 'focus', prevText: LocalizationData.PrevText, nextText: LocalizationData.NextText, firstDay: Element.WeekDayStart, showMonthAfterYear: 0, monthNames: LocalizationData.MonthNames, monthNamesShort: LocalizationData.MonthNamesShort, dayNames: LocalizationData.DayNames, dayNamesShort: LocalizationData.DayNamesShort, dayNamesMin: LocalizationData.DayNamesMin, isRTL: LocalizationData.IsRTL }; Options.onSelect = function (DateText, Instance) { var Year = Instance.selectedYear, Month = Instance.selectedMonth + 1, Day = Instance.selectedDay; // Update the three select boxes if (HasDateSelectBoxes) { Element.Year.find('option[value=' + Year + ']').prop('selected', true); Element.Month.find('option[value=' + Month + ']').prop('selected', true); Element.Day.find('option[value=' + Day + ']').prop('selected', true); }<|fim▁hole|> Element.Year.val(Year); Element.Month.val(LeadingZero(Month)); Element.Day.val(LeadingZero(Day)); } }; Options.beforeShow = function (Input) { $(Input).val(''); return { defaultDate: new Date(Element.Year.val(), Element.Month.val() - 1, Element.Day.val()) }; }; $DatepickerElement.datepicker(Options); // Add some DOM notes to the datepicker, but only if it was not initialized previously. // Check if one additional DOM node is already present. if (!$('#' + Core.App.EscapeSelector(Element.Day.attr('id')) + 'DatepickerIcon').length) { // add datepicker icon and click event $DatepickerElement.after('<a href="#" class="DatepickerIcon" id="' + Element.Day.attr('id') + 'DatepickerIcon" title="' + LocalizationData.IconText + '"><i class="fa fa-calendar"></i></a>'); if (Element.DateInFuture) { ErrorMessage = Core.Config.Get('Datepicker.ErrorMessageDateInFuture'); } else if (Element.DateNotInFuture) { ErrorMessage = Core.Config.Get('Datepicker.ErrorMessageDateNotInFuture'); } else { ErrorMessage = Core.Config.Get('Datepicker.ErrorMessage'); } // Add validation error messages for all dateselection elements Element.Year .after('<div id="' + Element.Day.attr('id') + 'Error" class="TooltipErrorMessage"><p>' + ErrorMessage + '</p></div>') .after('<div id="' + Element.Month.attr('id') + 'Error" class="TooltipErrorMessage"><p>' + ErrorMessage + '</p></div>') .after('<div id="' + Element.Year.attr('id') + 'Error" class="TooltipErrorMessage"><p>' + ErrorMessage + '</p></div>'); // only insert time element error messages if time elements are present if (Element.Hour && Element.Hour.length) { Element.Hour .after('<div id="' + Element.Hour.attr('id') + 'Error" class="TooltipErrorMessage"><p>' + ErrorMessage + '</p></div>') .after('<div id="' + Element.Minute.attr('id') + 'Error" class="TooltipErrorMessage"><p>' + ErrorMessage + '</p></div>'); } } $('#' + Core.App.EscapeSelector(Element.Day.attr('id')) + 'DatepickerIcon').unbind('click.Datepicker').bind('click.Datepicker', function () { $DatepickerElement.datepicker('show'); return false; }); // do not show the datepicker container div. $('#ui-datepicker-div').hide(); }; return TargetNS; }(Core.UI.Datepicker || {}));<|fim▁end|>
else {
<|file_name|>loggerConfig.py<|end_file_name|><|fim▁begin|>import logging def initLogger():<|fim▁hole|> logger.setLevel(logging.DEBUG) # create file handler which logs even debug messages file_handler = logging.FileHandler('cam.log') file_handler.setLevel(logging.DEBUG) # create console handler with a higher log level console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) # create formatter and add it to the handlers formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') file_handler.setFormatter(formatter) console_handler.setFormatter(formatter) # add the handlers to the logger logger.addHandler(file_handler) logger.addHandler(console_handler) def getLogger(): logger = logging.getLogger('cam') return logger<|fim▁end|>
# create logger logger = logging.getLogger('cam')
<|file_name|>purchase_order_line.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2019 OpenSynergy Indonesia # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp import models, fields, api class PurchaseOrderLine(models.Model): _inherit = "purchase.order.line" @api.multi @api.depends( "product_id", ) def _compute_allowed_purchase_uom_ids(self): obj_product_uom =\ self.env["product.uom"] for document in self: uom_po = document.product_id.uom_po_id if document.product_id: if document.product_id.limit_product_uom_selection: allowed_purchase_uom_ids =\ document.product_id.allowed_purchase_uom_ids.ids if uom_po.id not in allowed_purchase_uom_ids: allowed_purchase_uom_ids.append(uom_po.id) document.allowed_purchase_uom_ids =\ allowed_purchase_uom_ids else: category_id =\ uom_po.category_id.id criteria = [ ("category_id", "=", category_id) ] document.allowed_purchase_uom_ids =\ obj_product_uom.search(criteria) allowed_purchase_uom_ids = fields.Many2many( string="Allowed Invoices", comodel_name="product.uom", compute="_compute_allowed_purchase_uom_ids", store=False, ) @api.onchange( "product_id", "product_uom", "product_qty", ) @api.depends( "order_id.pricelist_id", "product_id",<|fim▁hole|> "order_id.date_order", "order_id.fiscal_position", "date_planned", "name", "order_id.state", ) def onchange_product_id_new_api(self): _super = super(PurchaseOrderLine, self) result = _super.onchange_product_id( self.order_id.pricelist_id.id, self.product_id.id, self.product_qty, self.product_uom.id, self.order_id.partner_id.id, self.order_id.date_order, self.order_id.fiscal_position.id, self.date_planned, self.name, False, self.order_id.state, ) if type(result) is dict and "value" in result: for field, value in result.get('value').items(): if hasattr(self, field): setattr(self, field, value)<|fim▁end|>
"product_qty", "product_uom", "order_id.partner_id",
<|file_name|>multibyte.rs<|end_file_name|><|fim▁begin|>//! Beginnings of a Emacs-encoded string handling library. //! //! Emacs Lisp strings (and by extension, most strings handled by the //! Emacs C API) are encoded in one of two ways: //! //! * "unibyte" strings are just sequences of 8-bit bytes that don't //! carry encoding information. Their interpretation is governed //! by runtime settings (`set-language-environment'). //! //! * "multibyte" strings are sequences of characters from an extended //! set of character codes, encoded in a fashion similar to UTF-8. //! //! The uniqueness of the Multibyte encoding is due to these features: //! //! * Codepoints up to 0x10FFFF coindice with Unicode. However, the //! maximum codepoint is 0x3FFFFF. The additional codepoints are //! used for "characters not unified with Unicode" and for 8-bit //! bytes, see below. //! //! * "Raw 8-bit" bytes, e.g. used when opening a file which is not //! properly encoded in a single encoding, are supported. //! //! Raw 8-bit bytes are represented by codepoints 0x3FFF80 to //! 0x3FFFFF. However, in the UTF-8 like encoding, where they //! should be represented by a 5-byte sequence starting with 0xF8, //! they are instead represented by a 2-byte sequence starting with //! 0xC0 or 0xC1. These 2-byte sequences are disallowed in UTF-8, //! because they would form a duplicate encoding for the the 1-byte //! ASCII range. //! //! Due to these specialties, we cannot treat Emacs strings as Rust //! `&str`, and this module regrettably contains adapted copies of //! stretches of `std::str` functions. use libc::{c_char, c_int, c_uchar, c_uint, ptrdiff_t}; use std::ptr; use std::slice; use remacs_sys::{EmacsInt, Lisp_String, CHARACTERBITS, CHAR_CTL, CHAR_MODIFIER_MASK, CHAR_SHIFT}; use remacs_sys::emacs_abort; use lisp::ExternalPtr; pub type LispStringRef = ExternalPtr<Lisp_String>; // cannot use `char`, it takes values out of its range pub type Codepoint = u32; /// Maximum character code pub const MAX_CHAR: Codepoint = (1 << CHARACTERBITS) - 1; /// Maximum character codes for several encoded lengths pub const MAX_1_BYTE_CHAR: Codepoint = 0x7F; pub const MAX_2_BYTE_CHAR: Codepoint = 0x7FF; pub const MAX_3_BYTE_CHAR: Codepoint = 0xFFFF; pub const MAX_4_BYTE_CHAR: Codepoint = 0x1F_FFFF; pub const MAX_5_BYTE_CHAR: Codepoint = 0x3F_FF7F; /// Maximum length of a single encoded codepoint pub const MAX_MULTIBYTE_LENGTH: usize = 5; impl LispStringRef { /// Return the string's len in bytes. pub fn len_bytes(&self) -> ptrdiff_t { if self.size_byte < 0 { self.size } else { self.size_byte } } /// Return the string's length in characters. Differs from /// `len_bytes` for multibyte strings. pub fn len_chars(&self) -> ptrdiff_t { self.size } pub fn is_multibyte(&self) -> bool { self.size_byte >= 0 } pub fn data_ptr(&mut self) -> *mut c_uchar { self.data as *mut c_uchar } pub fn sdata_ptr(&mut self) -> *mut c_char { self.data as *mut c_char } pub fn const_data_ptr(&self) -> *const c_uchar { self.data as *const c_uchar } pub fn const_sdata_ptr(&self) -> *const c_char { self.data as *const c_char } #[inline] pub fn as_slice(&self) -> &[u8] { unsafe { slice::from_raw_parts(self.data as *const u8, self.len_bytes() as usize) } } #[inline] pub fn as_mut_slice(&self) -> &mut [u8] { unsafe { slice::from_raw_parts_mut(self.data as *mut u8, self.len_bytes() as usize) } } #[inline] pub fn byte_at(&self, index: ptrdiff_t) -> u8 { unsafe { *self.const_data_ptr().offset(index) } } } pub struct LispStringRefIterator<'a> { string_ref: &'a LispStringRef, cur: usize, } pub struct LispStringRefCharIterator<'a>(LispStringRefIterator<'a>); // Substitute for FETCH_STRING_CHAR_ADVANCE impl<'a> Iterator for LispStringRefIterator<'a> { type Item = (usize, Codepoint); fn next(&mut self) -> Option<(usize, Codepoint)> { if self.cur < self.string_ref.len_bytes() as usize { let codepoint: Codepoint; let old_index = self.cur; let ref_slice = self.string_ref.as_slice(); if self.string_ref.is_multibyte() { let (cp, advance) = multibyte_char_at(&ref_slice[self.cur..]); codepoint = cp; self.cur += advance; } else { codepoint = ref_slice[self.cur] as Codepoint; self.cur += 1; } Some((old_index, codepoint)) } else { None } } } impl<'a> Iterator for LispStringRefCharIterator<'a> { type Item = Codepoint; fn next(&mut self) -> Option<Codepoint> { self.0.next().map(|result| result.1) } } impl LispStringRef { pub fn char_indices(&self) -> LispStringRefIterator { LispStringRefIterator { string_ref: self, cur: 0, } } pub fn chars(&self) -> LispStringRefCharIterator { LispStringRefCharIterator(self.char_indices()) } } fn string_overflow() -> ! { error!("Maximum string size exceeded") } /// Parse unibyte string at STR of LEN bytes, and return the number of /// bytes it may occupy when converted to multibyte string by /// `str_to_multibyte`. #[no_mangle] pub fn count_size_as_multibyte(ptr: *const c_uchar, len: ptrdiff_t) -> ptrdiff_t { let slice = unsafe { slice::from_raw_parts(ptr, len as usize) }; slice.iter().fold(0, |total, &byte| { let n = if byte < 0x80 { 1 } else { 2 }; total.checked_add(n).unwrap_or_else(|| string_overflow()) }) } /// Same as the `BYTE8_TO_CHAR` macro. #[inline] pub fn raw_byte_codepoint(byte: c_uchar) -> Codepoint { if byte >= 0x80 { byte as Codepoint + 0x3F_FF00 } else { byte as Codepoint } } /// Same as the `CHAR_TO_BYTE8` macro. #[inline] pub fn raw_byte_from_codepoint(cp: Codepoint) -> c_uchar { (cp - 0x3F_FF00) as c_uchar } /// Same as the `CHAR_TO_BYTE_SAFE` macro. /// Return the raw 8-bit byte for character CP, /// or -1 if CP doesn't correspond to a byte. #[inline] pub fn raw_byte_from_codepoint_safe(cp: Codepoint) -> EmacsInt { if cp < 0x80 { cp as EmacsInt } else if cp > MAX_5_BYTE_CHAR { raw_byte_from_codepoint(cp) as EmacsInt } else { -1 } } /// `UNIBYTE_TO_CHAR` macro #[inline] pub fn unibyte_to_char(cp: Codepoint) -> Codepoint { if cp < 0x80 { cp } else { raw_byte_codepoint(cp as c_uchar) } } /// `MAKE_CHAR_MULTIBYTE` macro #[inline] pub fn make_char_multibyte(cp: Codepoint) -> Codepoint { debug_assert!(cp < 256); unibyte_to_char(cp) } /// Same as the `CHAR_STRING` macro. #[inline] fn write_codepoint(to: &mut [c_uchar], cp: Codepoint) -> usize { if cp <= MAX_1_BYTE_CHAR { to[0] = cp as c_uchar; 1 } else if cp <= MAX_2_BYTE_CHAR { // note: setting later bytes first to avoid multiple bound checks to[1] = 0x80 | (cp & 0x3F) as c_uchar; to[0] = 0xC0 | (cp >> 6) as c_uchar; 2 } else if cp <= MAX_3_BYTE_CHAR { to[2] = 0x80 | (cp & 0x3F) as c_uchar; to[1] = 0x80 | ((cp >> 6) & 0x3F) as c_uchar; to[0] = 0xE0 | (cp >> 12) as c_uchar; 3 } else if cp <= MAX_4_BYTE_CHAR { to[3] = 0x80 | (cp & 0x3F) as c_uchar; to[2] = 0x80 | ((cp >> 6) & 0x3F) as c_uchar; to[1] = 0x80 | ((cp >> 12) & 0x3F) as c_uchar; to[0] = 0xF0 | (cp >> 18) as c_uchar; 4 } else if cp <= MAX_5_BYTE_CHAR { to[4] = 0x80 | (cp & 0x3F) as c_uchar; to[3] = 0x80 | ((cp >> 6) & 0x3F) as c_uchar; to[2] = 0x80 | ((cp >> 12) & 0x3F) as c_uchar; to[1] = 0x80 | ((cp >> 18) & 0x0F) as c_uchar; to[0] = 0xF8; 5 } else if cp <= MAX_CHAR { let b = raw_byte_from_codepoint(cp); to[1] = 0x80 | (b & 0x3F); to[0] = 0xC0 | ((b >> 6) & 1); 2 } else { error!("Invalid character: {:#x}", cp) } } /// If character code C has modifier masks, reflect them to the /// character code if possible. Return the resulting code. #[no_mangle] pub fn char_resolve_modifier_mask(ch: EmacsInt) -> EmacsInt { let mut cp = ch as Codepoint; // A non-ASCII character can't reflect modifier bits to the code. if (cp & !CHAR_MODIFIER_MASK) >= 0x80 { return cp as EmacsInt; } let ascii = (cp & 0x7F) as u8; // For Meta, Shift, and Control modifiers, we need special care. if cp & CHAR_SHIFT != 0 { let unshifted = cp & !CHAR_SHIFT; // Shift modifier is valid only with [A-Za-z]. if ascii >= b'A' && ascii <= b'Z' { cp = unshifted; } else if ascii >= b'a' && ascii <= b'z' { cp = unshifted & !0x20; } else if ascii <= b' ' { // Shift modifier for control characters and SPC is ignored. cp = unshifted; } } // Simulate the code in lread.c. if cp & CHAR_CTL != 0 { // Allow `\C- ' and `\C-?'. if ascii == b' ' { cp &= !0x7F & !CHAR_CTL; } else if ascii == b'?' { cp = 0x7F | (cp & !0x7F & !CHAR_CTL); } else if ascii >= b'@' && ascii <= b'_' { // ASCII control chars are made from letters (both cases), // as well as the non-letters within 0o100...0o137. cp &= 0x1F | (!0x7F & !CHAR_CTL); } } cp as EmacsInt<|fim▁hole|>/// handle them appropriately. #[no_mangle] pub fn char_string(mut cp: c_uint, to: *mut c_uchar) -> c_int { if cp & CHAR_MODIFIER_MASK != 0 { cp = char_resolve_modifier_mask(cp as EmacsInt) as Codepoint; cp &= !CHAR_MODIFIER_MASK; } write_codepoint( unsafe { slice::from_raw_parts_mut(to, MAX_MULTIBYTE_LENGTH) }, cp, ) as c_int } /// Convert unibyte text at STR of BYTES bytes to a multibyte text /// that contains the same single-byte characters. It actually /// converts all 8-bit characters to multibyte forms. It is assured /// that we can use LEN bytes at STR as a work area and that is /// enough. Returns the byte length of the multibyte string. #[no_mangle] pub fn str_to_multibyte(ptr: *mut c_uchar, len: ptrdiff_t, bytes: ptrdiff_t) -> ptrdiff_t { // slice covers the whole work area to be able to write back let slice = unsafe { slice::from_raw_parts_mut(ptr, len as usize) }; // first, search ASCII-only prefix that we can skip processing let mut start = 0; for (idx, &byte) in slice.iter().enumerate() { if byte >= 0x80 { start = idx; break; } // whole string is ASCII-only, done! if idx as ptrdiff_t == bytes - 1 { return bytes; } } // copy the rest to the end of the work area, which is guaranteed to be // large enough, so we can read from there while writing the output let offset = (len - bytes) as usize; let slice = &mut slice[start..]; unsafe { ptr::copy( slice.as_mut_ptr(), slice[offset..].as_mut_ptr(), bytes as usize - start, ); } let mut to = 0; for from in offset..slice.len() { let byte = slice[from]; to += write_codepoint(&mut slice[to..], raw_byte_codepoint(byte)); } (start + to) as ptrdiff_t } /// Same as `MULTIBYTE_LENGTH` macro in C. fn multibyte_length(slice: &[c_uchar], allow_encoded_raw: bool) -> Option<usize> { let len = slice.len(); if len < 1 { None } else if slice[0] & 0x80 == 0 { Some(1) } else if len < 2 || slice[1] & 0xC0 != 0x80 { None } else if !allow_encoded_raw && slice[0] & 0xFE == 0xC0 { None } else if slice[0] & 0xE0 == 0xC0 { Some(2) } else if len < 3 || slice[2] & 0xC0 != 0x80 { None } else if slice[0] & 0xF0 == 0xE0 { Some(3) } else if len < 4 || slice[3] & 0xC0 != 0x80 { None } else if slice[0] & 0xF8 == 0xF0 { Some(4) } else if len < 5 || slice[4] & 0xC0 != 0x80 { None } else if slice[0] == 0xF8 && slice[1] & 0xF0 == 0x80 { Some(5) } else { None } } /// Same as the `STRING_CHAR_ADVANCE` macro. #[inline] pub fn multibyte_char_at(slice: &[c_uchar]) -> (Codepoint, usize) { let head = slice[0] as Codepoint; if head & 0x80 == 0 { (head, 1) } else if head & 0x20 == 0 { let cp = ((head & 0x1F) << 6) | (slice[1] as Codepoint & 0x3F); if head < 0xC2 { (cp | 0x3F_FF80, 2) } else { (cp, 2) } } else if head & 0x10 == 0 { ( ((head & 0x0F) << 12) | ((slice[1] as Codepoint & 0x3F) << 6) | (slice[2] as Codepoint & 0x3F), 3, ) } else if head & 0x08 == 0 { ( ((head & 0x07) << 18) | ((slice[1] as Codepoint & 0x3F) << 12) | ((slice[2] as Codepoint & 0x3F) << 6) | (slice[3] as Codepoint & 0x3F), 4, ) } else { // the relevant bytes of "head" are always zero ( ((slice[1] as Codepoint & 0x3F) << 18) | ((slice[2] as Codepoint & 0x3F) << 12) | ((slice[3] as Codepoint & 0x3F) << 6) | (slice[4] as Codepoint & 0x3F), 5, ) } } /// Same as `BYTES_BY_CHAR_HEAD` macro in C. fn multibyte_length_by_head(byte: c_uchar) -> usize { if byte & 0x80 == 0 { 1 } else if byte & 0x20 == 0 { 2 } else if byte & 0x10 == 0 { 3 } else if byte & 0x08 == 0 { 4 } else { 5 } } /// Return the number of characters in the NBYTES bytes at PTR. /// This works by looking at the contents and checking for multibyte /// sequences while assuming that there's no invalid sequence. It /// ignores enable-multibyte-characters. #[no_mangle] pub fn multibyte_chars_in_text(ptr: *const c_uchar, nbytes: ptrdiff_t) -> ptrdiff_t { let slice = unsafe { slice::from_raw_parts(ptr, nbytes as usize) }; let len = slice.len(); let mut idx = 0; let mut chars = 0; // TODO: make this an iterator? while idx < len { idx += multibyte_length(&slice[idx..], true).unwrap_or_else(|| unsafe { emacs_abort() }); chars += 1; } chars as ptrdiff_t } /// Parse unibyte text at STR of LEN bytes as a multibyte text, count /// characters and bytes in it, and store them in *NCHARS and *NBYTES /// respectively. On counting bytes, pay attention to that 8-bit /// characters not constructing a valid multibyte sequence are /// represented by 2-byte in a multibyte text. #[no_mangle] pub fn parse_str_as_multibyte( ptr: *const c_uchar, len: ptrdiff_t, nchars: *mut ptrdiff_t, nbytes: *mut ptrdiff_t, ) { let slice = unsafe { slice::from_raw_parts(ptr, len as usize) }; let len = slice.len(); let mut chars = 0; let mut bytes = 0; let mut idx = 0; // XXX: in the original, there is an "unchecked" version of multibyte_length // called while the remaining length is >= MAX_MULTIBYTE_LENGTH. while idx < len { chars += 1; match multibyte_length(&slice[idx..], false) { None => { // This is either an invalid multibyte sequence, or // one that would encode a raw 8-bit byte, which we // only use internally when the string is *already* // multibyte. idx += 1; bytes += 2; } Some(n) => { idx += n; bytes += n as ptrdiff_t; } } } unsafe { *nchars = chars; *nbytes = bytes; } } /// Arrange unibyte text at STR of NBYTES bytes as a multibyte text. /// It actually converts only such 8-bit characters that don't construct /// a multibyte sequence to multibyte forms of Latin-1 characters. If /// NCHARS is nonzero, set *NCHARS to the number of characters in the /// text. It is assured that we can use LEN bytes at STR as a work /// area and that is enough. Return the number of bytes of the /// resulting text. #[no_mangle] pub fn str_as_multibyte( ptr: *mut c_uchar, len: ptrdiff_t, mut nbytes: ptrdiff_t, nchars: *mut ptrdiff_t, ) -> ptrdiff_t { // slice covers the whole work area to be able to write back let slice = unsafe { slice::from_raw_parts_mut(ptr, len as usize) }; // first, search ASCII-only prefix that we can skip processing let mut start = None; let mut chars = 0; let mut idx = 0; while idx < nbytes as usize { match multibyte_length(&slice[idx..], false) { None => { start = Some(idx); break; } Some(n) => { idx += n; chars += 1; } } } if let Some(start) = start { // copy the rest to the end of the work area, which is guaranteed to be // large enough, so we can read from there while writing the output let offset = (len - nbytes) as usize; let slice = &mut slice[start..]; unsafe { ptr::copy( slice.as_mut_ptr(), slice[offset..].as_mut_ptr(), nbytes as usize - start, ); } let mut to = 0; let mut from = offset; while from < slice.len() { chars += 1; match multibyte_length(&slice[from..], false) { Some(n) => for _ in 0..n { slice[to] = slice[from]; from += 1; to += 1; }, None => { let byte = slice[from]; to += write_codepoint(&mut slice[to..], raw_byte_codepoint(byte)); from += 1; } } } nbytes = (start + to) as ptrdiff_t; } if !nchars.is_null() { unsafe { *nchars = chars; } } nbytes } /// Arrange multibyte text at STR of LEN bytes as a unibyte text. It /// actually converts characters in the range 0x80..0xFF to unibyte. #[no_mangle] pub fn str_as_unibyte(ptr: *mut c_uchar, bytes: ptrdiff_t) -> ptrdiff_t { let slice = unsafe { slice::from_raw_parts_mut(ptr, bytes as usize) }; let mut from = 0; while from < bytes as usize { let byte = slice[from]; match multibyte_length_by_head(byte) { 2 if byte & 0xFE == 0xC0 => break, n => from += n, } } let mut to = from; while from < bytes as usize { let byte = slice[from]; match multibyte_length_by_head(byte) { 2 if byte & 0xFE == 0xC0 => { let newbyte = 0x80 | ((byte & 1) << 6) | (slice[from + 1] & 0x3F); slice[to] = newbyte; from += 2; to += 1; } n => for _ in 0..n { slice[to] = slice[from]; from += 1; to += 1; }, } } to as ptrdiff_t } /// Return a character whose multibyte form is at P. If LEN is not /// NULL, it must be a pointer to integer. In that case, set *LEN to /// the byte length of the multibyte form. If ADVANCED is not NULL, it /// must be a pointer to unsigned char. In that case, set *ADVANCED to /// the ending address (i.e., the starting address of the next /// character) of the multibyte form. #[no_mangle] pub fn string_char(ptr: *const c_uchar, advanced: *mut *const c_uchar, len: *mut c_int) -> c_int { let slice = unsafe { slice::from_raw_parts(ptr, MAX_MULTIBYTE_LENGTH) }; let (cp, cplen) = multibyte_char_at(slice); if !len.is_null() { unsafe { *len = cplen as c_int; } } if !advanced.is_null() { unsafe { *advanced = ptr.offset(cplen as isize); } } cp as c_int } /// Convert eight-bit chars in SRC (in multibyte form) to the /// corresponding byte and store in DST. CHARS is the number of /// characters in SRC. The value is the number of bytes stored in DST. /// Usually, the value is the same as CHARS, but is less than it if SRC /// contains a non-ASCII, non-eight-bit character. #[no_mangle] pub fn str_to_unibyte(src: *const c_uchar, dst: *mut c_uchar, chars: ptrdiff_t) -> ptrdiff_t { let mut srcslice = unsafe { slice::from_raw_parts(src, chars as usize) }; let dstslice = unsafe { slice::from_raw_parts_mut(dst, chars as usize) }; for i in 0..chars { let (cp, cplen) = multibyte_char_at(srcslice); srcslice = &srcslice[cplen..]; dstslice[i as usize] = if cp > MAX_5_BYTE_CHAR { raw_byte_from_codepoint(cp) } else if cp >= 0x80 { return i; } else { cp as c_uchar }; } chars }<|fim▁end|>
} /// Store multibyte form of character CP at TO. If CP has modifier bits,
<|file_name|>weapons_data.py<|end_file_name|><|fim▁begin|># This file is generated from pydcs_export.lua class Weapons: AB_250_2___144_x_SD_2__250kg_CBU_with_HE_submunitions = {"clsid": "{AB_250_2_SD_2}", "name": "AB 250-2 - 144 x SD-2, 250kg CBU with HE submunitions", "weight": 280} AB_250_2___17_x_SD_10A__250kg_CBU_with_10kg_Frag_HE_submunitions = {"clsid": "{AB_250_2_SD_10A}", "name": "AB 250-2 - 17 x SD-10A, 250kg CBU with 10kg Frag/HE submunitions", "weight": 220} AB_500_1___34_x_SD_10A__500kg_CBU_with_10kg_Frag_HE_submunitions = {"clsid": "{AB_500_1_SD_10A}", "name": "AB 500-1 - 34 x SD-10A, 500kg CBU with 10kg Frag/HE submunitions", "weight": 470} ADEN_GUNPOD = {"clsid": "{ADEN_GUNPOD}", "name": "ADEN GUNPOD", "weight": 87} ADM_141A = {"clsid": "{BRU42_ADM141}", "name": "ADM_141A", "weight": 308} ADM_141A_ = {"clsid": "{BRU3242_ADM141}", "name": "ADM_141A", "weight": 365.38} ADM_141A_TALD = {"clsid": "{ADM_141A}", "name": "ADM-141A TALD", "weight": 180} ADM_141B_TALD = {"clsid": "{ADM_141B}", "name": "ADM-141B TALD", "weight": 180} AERO_1D_300_Gallons_Fuel_Tank_ = {"clsid": "{AV8BNA_AERO1D}", "name": "AERO 1D 300 Gallons Fuel Tank ", "weight": 1002.439} AERO_1D_300_Gallons_Fuel_Tank__Empty_ = {"clsid": "{AV8BNA_AERO1D_EMPTY}", "name": "AERO 1D 300 Gallons Fuel Tank (Empty)", "weight": 93.89362} AGM114x2_OH_58 = {"clsid": "AGM114x2_OH_58", "name": "AGM-114K * 2", "weight": 250} AGM_114K = {"clsid": "{ee368869-c35a-486a-afe7-284beb7c5d52}", "name": "AGM-114K", "weight": 65} AGM_114K___4 = {"clsid": "{88D18A5E-99C8-4B04-B40B-1C02F2018B6E}", "name": "AGM-114K * 4", "weight": 250} AGM_119B_Penguin_ASM = {"clsid": "{7B8DCEB4-820B-4015-9B48-1028A4195692}", "name": "AGM-119B Penguin ASM", "weight": 300} AGM_122_Sidearm = {"clsid": "{AGM_122_SIDEARM}", "name": "AGM-122 Sidearm", "weight": 92} AGM_122_Sidearm_ = {"clsid": "{LAU_7_AGM_122_SIDEARM}", "name": "AGM-122 Sidearm", "weight": 107} AGM_122_Sidearm___light_ARM = {"clsid": "{AGM_122}", "name": "AGM-122 Sidearm - light ARM", "weight": 88} AGM_154A___JSOW_CEB__CBU_type_ = {"clsid": "{AGM-154A}", "name": "AGM-154A - JSOW CEB (CBU-type)", "weight": 485} AGM_154B___JSOW_Anti_Armour = {"clsid": "{AGM-154B}", "name": "AGM-154B - JSOW Anti-Armour", "weight": 485} AGM_154C___JSOW_Unitary_BROACH = {"clsid": "{9BCC2A2B-5708-4860-B1F1-053A18442067}", "name": "AGM-154C - JSOW Unitary BROACH", "weight": 484} AGM_45A_Shrike_ARM = {"clsid": "{AGM_45A}", "name": "AGM-45A Shrike ARM", "weight": 177} AGM_45B_Shrike_ARM__Imp_ = {"clsid": "{3E6B632D-65EB-44D2-9501-1C2D04515404}", "name": "AGM-45B Shrike ARM (Imp)", "weight": 177} AGM_62_Walleye_II___Guided_Weapon_Mk_5__TV_Guided_ = {"clsid": "{C40A1E3A-DD05-40D9-85A4-217729E37FAE}", "name": "AGM-62 Walleye II - Guided Weapon Mk 5 (TV Guided)", "weight": 1061} AGM_65D___Maverick_D__IIR_ASM_ = {"clsid": "{444BA8AE-82A7-4345-842E-76154EFCCA47}", "name": "AGM-65D - Maverick D (IIR ASM)", "weight": 218} AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_ = {"clsid": "{F16A4DE0-116C-4A71-97F0-2CF85B0313EF}", "name": "AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 286} AGM_65K___Maverick_K__CCD_Imp_ASM_ = {"clsid": "{69DC8AE7-8F77-427B-B8AA-B19D3F478B65}", "name": "AGM-65K - Maverick K (CCD Imp ASM)", "weight": 360} AGM_84 = {"clsid": "AGM_84", "name": "AGM-84 HARPOON", "weight": None} AGM_84A_Harpoon_ASM = {"clsid": "{8B7CADF9-4954-46B3-8CFB-93F2F5B90B03}", "name": "AGM-84A Harpoon ASM", "weight": 661.5} AGM_84D_Harpoon_AShM = {"clsid": "{AGM_84D}", "name": "AGM-84D Harpoon AShM", "weight": 540} AGM_84E_Harpoon_SLAM__Stand_Off_Land_Attack_Missile_ = {"clsid": "{AF42E6DF-9A60-46D8-A9A0-1708B241AADB}", "name": "AGM-84E Harpoon/SLAM (Stand-Off Land-Attack Missile)", "weight": 628} AGM_84E_Harpoon_SLAM__Stand_Off_Land_Attack_Missile__ = {"clsid": "{AGM_84E}", "name": "AGM-84E Harpoon/SLAM (Stand-Off Land-Attack Missile)", "weight": 628} AGM_84H_SLAM_ER__Expanded_Response_ = {"clsid": "{AGM_84H}", "name": "AGM-84H SLAM-ER (Expanded Response)", "weight": 675} AGM_86C_ALCM = {"clsid": "{769A15DF-6AFB-439F-9B24-5B7A45C59D16}", "name": "AGM-86C ALCM", "weight": 1950} AGM_88C_HARM___High_Speed_Anti_Radiation_Missile = {"clsid": "{B06DD79A-F21E-4EB9-BD9D-AB3844618C9C}", "name": "AGM-88C HARM - High Speed Anti-Radiation Missile", "weight": 361} AGM_88C_HARM___High_Speed_Anti_Radiation_Missile_ = {"clsid": "{B06DD79A-F21E-4EB9-BD9D-AB3844618C93}", "name": "AGM-88C HARM - High Speed Anti-Radiation Missile", "weight": 406.4} AIM_120B_AMRAAM___Active_Rdr_AAM = {"clsid": "{C8E06185-7CD6-4C90-959F-044679E90751}", "name": "AIM-120B AMRAAM - Active Rdr AAM", "weight": 156} AIM_120C_5_AMRAAM___Active_Rdr_AAM = {"clsid": "{40EF17B7-F508-45de-8566-6FFECC0C1AB8}", "name": "AIM-120C-5 AMRAAM - Active Rdr AAM", "weight": 161.5} AIM_54A_Mk47 = {"clsid": "{AIM_54A_Mk47}", "name": "AIM-54A-Mk47", "weight": 444} AIM_54A_Mk47_ = {"clsid": "{SHOULDER AIM_54A_Mk47 L}", "name": "AIM-54A-Mk47", "weight": 489.36} AIM_54A_Mk47__ = {"clsid": "{SHOULDER AIM_54A_Mk47 R}", "name": "AIM-54A-Mk47", "weight": 489.36} AIM_54A_Mk60 = {"clsid": "{AIM_54A_Mk60}", "name": "AIM-54A-Mk60", "weight": 471.7} AIM_54A_Mk60_ = {"clsid": "{SHOULDER AIM_54A_Mk60 L}", "name": "AIM-54A-Mk60", "weight": 517.06} AIM_54A_Mk60__ = {"clsid": "{SHOULDER AIM_54A_Mk60 R}", "name": "AIM-54A-Mk60", "weight": 517.06} AIM_54C_Mk47 = {"clsid": "{AIM_54C_Mk47}", "name": "AIM-54C-Mk47", "weight": 465.6} AIM_54C_Mk47_ = {"clsid": "{SHOULDER AIM_54C_Mk47 L}", "name": "AIM-54C-Mk47", "weight": 510.96} AIM_54C_Mk47_Phoenix_IN__Semi_Active_Radar = {"clsid": "{7575BA0B-7294-4844-857B-031A144B2595}", "name": "AIM-54C-Mk47 Phoenix IN & Semi-Active Radar", "weight": 463} AIM_54C_Mk47__ = {"clsid": "{SHOULDER AIM_54C_Mk47 R}", "name": "AIM-54C-Mk47", "weight": 510.96} AIM_7E_Sparrow_Semi_Active_Radar = {"clsid": "{AIM-7E}", "name": "AIM-7E Sparrow Semi-Active Radar", "weight": 230} AIM_7F = {"clsid": "{SHOULDER AIM-7F}", "name": "AIM-7F", "weight": 284.4} AIM_7F_ = {"clsid": "{BELLY AIM-7F}", "name": "AIM-7F", "weight": 230} AIM_7F_Sparrow_Semi_Active_Radar = {"clsid": "{AIM-7F}", "name": "AIM-7F Sparrow Semi-Active Radar", "weight": 231} AIM_7M = {"clsid": "{SHOULDER AIM-7M}", "name": "AIM-7M", "weight": 284.4} AIM_7MH = {"clsid": "{SHOULDER AIM-7MH}", "name": "AIM-7MH", "weight": 284.4} AIM_7MH_ = {"clsid": "{BELLY AIM-7MH}", "name": "AIM-7MH", "weight": 230} AIM_7MH_Sparrow_Semi_Active_Radar = {"clsid": "{AIM-7H}", "name": "AIM-7MH Sparrow Semi-Active Radar", "weight": 231} AIM_7M_ = {"clsid": "{BELLY AIM-7M}", "name": "AIM-7M", "weight": 230} AIM_7M_Sparrow_Semi_Active_Radar = {"clsid": "{8D399DDA-FF81-4F14-904D-099B34FE7918}", "name": "AIM-7M Sparrow Semi-Active Radar", "weight": 231.1} AIM_9B_Sidewinder_IR_AAM = {"clsid": "{AIM-9B}", "name": "AIM-9B Sidewinder IR AAM", "weight": 74.39} AIM_9L_Sidewinder_IR_AAM = {"clsid": "{AIM-9L}", "name": "AIM-9L Sidewinder IR AAM", "weight": 85.73} AIM_9M_Sidewinder_IR_AAM = {"clsid": "{6CEB49FC-DED8-4DED-B053-E1F033FF72D3}", "name": "AIM-9M Sidewinder IR AAM", "weight": 85.73} AIM_9P5_Sidewinder_IR_AAM = {"clsid": "{AIM-9P5}", "name": "AIM-9P5 Sidewinder IR AAM", "weight": 85.5} AIM_9P_Sidewinder_IR_AAM = {"clsid": "{9BFD8C90-F7AE-4e90-833B-BFD0CED0E536}", "name": "AIM-9P Sidewinder IR AAM", "weight": 86.18} AIM_9X_Sidewinder_IR_AAM = {"clsid": "{5CE2FF2A-645A-4197-B48D-8720AC69394F}", "name": "AIM-9X Sidewinder IR AAM", "weight": 84.46} AJS_External_tank_1013kg_fuel = {"clsid": "{VIGGEN_X-TANK}", "name": "AJS External-tank 1013kg fuel", "weight": 1208} AKAN_M_55_Gunpod__150_rnds_MINGR55_HE = {"clsid": "{AKAN}", "name": "AKAN M/55 Gunpod, 150 rnds MINGR55-HE", "weight": 276} ALARM = {"clsid": "{E6747967-B1F0-4C77-977B-AB2E6EB0C102}", "name": "ALARM", "weight": 268} ALQ_131___ECM_Pod = {"clsid": "{6D21ECEA-F85B-4E8D-9D51-31DC9B8AA4EF}", "name": "ALQ-131 - ECM Pod", "weight": 305} ALQ_184 = {"clsid": "ALQ_184", "name": "ALQ-184 - ECM Pod", "weight": 215} ALQ_184_Long = {"clsid": "ALQ_184_Long", "name": "ALQ-184 Long - ECM Pod", "weight": 286} AN_AAQ_28_LITENING___Targeting_Pod = {"clsid": "{A111396E-D3E8-4b9c-8AC9-2432489304D5}", "name": "AN/AAQ-28 LITENING - Targeting Pod", "weight": 208} AN_AAQ_28_LITENING___Targeting_Pod_ = {"clsid": "{AAQ-28_LEFT}", "name": "AN/AAQ-28 LITENING - Targeting Pod", "weight": 208} AN_ALQ_164_DECM_Pod = {"clsid": "{ALQ_164_RF_Jammer}", "name": "AN/ALQ-164 DECM Pod", "weight": 143.789} AN_ASQ_173_Laser_Spot_Tracker_Strike_CAMera__LST_SCAM_ = {"clsid": "{1C2B16EB-8EB0-43de-8788-8EBB2D70B8BC}", "name": "AN/ASQ-173 Laser Spot Tracker/Strike CAMera (LST/SCAM)", "weight": 250} AN_ASQ_213_HTS___HARM_Targeting_System = {"clsid": "{AN_ASQ_213}", "name": "AN/ASQ-213 HTS - HARM Targeting System", "weight": 57.2} AN_ASQ_228_ATFLIR___Targeting_Pod = {"clsid": "{AN_ASQ_228}", "name": "AN/ASQ-228 ATFLIR - Targeting Pod", "weight": 195} AN_ASQ_T50_TCTS_Pod___ACMI_Pod = {"clsid": "{AIS_ASQ_T50}", "name": "AN/ASQ-T50 TCTS Pod - ACMI Pod", "weight": 62.6} AN_M30A1___100lb_GP_Bomb_LD = {"clsid": "{AN_M30A1}", "name": "AN-M30A1 - 100lb GP Bomb LD", "weight": 45.8} AN_M3___2_Browning_Machine_Guns_12_7mm = {"clsid": "{AN-M3}", "name": "AN-M3 - 2*Browning Machine Guns 12.7mm", "weight": 218} AN_M57___250lb_GP_Bomb_LD = {"clsid": "{AN_M57}", "name": "AN-M57 - 250lb GP Bomb LD", "weight": 113} AN_M64___500lb_GP_Bomb_LD = {"clsid": "{AN-M64}", "name": "AN-M64 - 500lb GP Bomb LD", "weight": 227} AN_M64___500lb_GP_Bomb_LD_ = {"clsid": "{F86ANM64}", "name": "AN-M64 - 500lb GP Bomb LD", "weight": 227} AN_M65___1000lb_GP_Bomb_LD = {"clsid": "{AN_M65}", "name": "AN-M65 - 1000lb GP Bomb LD", "weight": 475} AN_M66___2000lb_GP_Bomb_LD = {"clsid": "{AN_M66}", "name": "AN-M66 - 2000lb GP Bomb LD", "weight": 977} APU_60_1M_with_R_60M__AA_8_Aphid____Infra_Red = {"clsid": "{APU-60-1_R_60M}", "name": "APU-60-1M with R-60M (AA-8 Aphid) - Infra Red", "weight": 76} APU_60_2M_with_2_x_R_60M__AA_8_Aphid____Infra_Red = {"clsid": "{B0DBC591-0F52-4F7D-AD7B-51E67725FB81}", "name": "APU-60-2M with 2 x R-60M (AA-8 Aphid) - Infra Red", "weight": 148} APU_60_2M_with_2_x_R_60M__AA_8_Aphid____Infra_Red_ = {"clsid": "{275A2855-4A79-4B2D-B082-91EA2ADF4691}", "name": "APU-60-2M with 2 x R-60M (AA-8 Aphid) - Infra Red", "weight": 148} APU_68___S_24B = {"clsid": "{APU_68_S-24}", "name": "APU-68 - S-24B", "weight": 273.5} APU_6___6_9A4172_Vikhr = {"clsid": "{A6FD14D3-6D30-4C85-88A7-8D17BEE120E2}", "name": "APU-6 - 6 9A4172 Vikhr", "weight": 330} APU_8___8_9A4172_Vikhr = {"clsid": "{F789E86A-EE2E-4E6B-B81E-D5E5F903B6ED}", "name": "APU-8 - 8 9A4172 Vikhr", "weight": 404} ARAK_M_70B_AP_6x_135mm_UnGd_Rkts__Pshu70_HEAT = {"clsid": "{ARAKM70BAP}", "name": "ARAK M/70B AP 6x 135mm UnGd Rkts, Pshu70 HEAT", "weight": 372.2} ARAK_M_70B_HE_6x_135mm_UnGd_Rkts__Shu70_HE_FRAG = {"clsid": "{ARAKM70BHE}", "name": "ARAK M/70B HE 6x 135mm UnGd Rkts, Shu70 HE/FRAG", "weight": 372.2} ASO_2___countermeasures_pod = {"clsid": "{ASO-2}", "name": "ASO-2 - countermeasures pod", "weight": 22} AUF2_BLG_66_AC_x_2 = {"clsid": "{M2KC_RAFAUT_BLG66}", "name": "AUF2 BLG-66-AC x 2", "weight": 685} AUF2_GBU_12_x_2 = {"clsid": "{M2KC_RAFAUT_GBU12}", "name": "AUF2 GBU-12 x 2", "weight": 621} AUF2_MK_82_Air_x_2 = {"clsid": "{M2KC_RAFAUT_MK82A}", "name": "AUF2 MK-82 Air x 2", "weight": 525} AUF2_MK_82_Snakeyes_x_2 = {"clsid": "{M2KC_RAFAUT_MK82S}", "name": "AUF2 MK-82 Snakeyes x 2", "weight": 525} AUF2_MK_82_x_2 = {"clsid": "{M2KC_RAFAUT_MK82}", "name": "AUF2 MK-82 x 2", "weight": 525} AUF2_ROCKEYE_x_2 = {"clsid": "{M2KC_RAFAUT_ROCKEYE}", "name": "AUF2 ROCKEYE x 2", "weight": 525} AWW_13_DATALINK_POD = {"clsid": "{AWW-13}", "name": "AWW-13 DATALINK POD", "weight": 200} A_A_Training = {"clsid": "{M2KC_AAF}", "name": "A/A Training", "weight": 0} A_G_Training = {"clsid": "{M2KC_AGF}", "name": "A/G Training", "weight": 0} BAP_100_Anti_Runway = {"clsid": "{BAP_100}", "name": "BAP-100 Anti-Runway", "weight": None} BAP_100_x_12 = {"clsid": "{M2KC_BAP100_12_RACK}", "name": "BAP-100 x 12", "weight": 465} BAP_100_x_18 = {"clsid": "{M2KC_BAP100_18_RACK}", "name": "BAP-100 x 18", "weight": 660} BAP_100_x_6 = {"clsid": "{M2KC_BAP100_6_RACK}", "name": "BAP-100 x 6", "weight": 270} BDU_33___25lb_Practice_Bomb_LD = {"clsid": "{BDU-33}", "name": "BDU-33 - 25lb Practice Bomb LD", "weight": 11} BDU_45 = {"clsid": "{BDU_45}", "name": "BDU-45", "weight": 232} BDU_45B = {"clsid": "{BDU_45B}", "name": "BDU-45B", "weight": 232} BDU_45B_ = {"clsid": "{BRU-32 BDU-45B}", "name": "BDU-45B", "weight": 298.38} BDU_45_ = {"clsid": "{BRU-32 BDU-45}", "name": "BDU-45", "weight": 298.38} BDU_45_LG = {"clsid": "{BDU_45LG}", "name": "BDU-45 LG", "weight": 277} BDU_50HD___500lb_Inert_Practice_Bomb_HD = {"clsid": "{BDU-50HD}", "name": "BDU-50HD - 500lb Inert Practice Bomb HD", "weight": 232} BDU_50LD___500lb_Inert_Practice_Bomb_LD = {"clsid": "{BDU-50LD}", "name": "BDU-50LD - 500lb Inert Practice Bomb LD", "weight": 232} BDU_50LGB___500lb_Laser_Guided_Inert_Practice_Bomb_LD = {"clsid": "{BDU-50LGB}", "name": "BDU-50LGB - 500lb Laser Guided Inert Practice Bomb LD", "weight": 280} BETAB_500M___479_kg__bomb__penetrating = {"clsid": "{BETAB-500M}", "name": "BETAB-500M - 479 kg, bomb, penetrating", "weight": 479} BETAB_500S___425_kg__bomb__penetrating = {"clsid": "{BETAB-500S}", "name": "BETAB-500S - 425 kg, bomb, penetrating", "weight": 425} Beer_Bomb = {"clsid": "{BEER_BOMB}", "name": "\"Beer Bomb\"", "weight": 100} Beer_Bomb__D__on_LH_Spitfire_Wing_Carrier = {"clsid": "Beer_Bomb_(D)_on_LH_Spitfire_Wing_Carrier", "name": "\"Beer Bomb\" (Bitter Ale)", "weight": 104} Beer_Bomb__D__on_RH_Spitfire_Wing_Carrier = {"clsid": "Beer_Bomb_(D)_on_RH_Spitfire_Wing_Carrier", "name": "\"Beer Bomb\" (Bitter Ale)", "weight": 104} Beer_Bomb__L__on_LH_Spitfire_Wing_Carrier = {"clsid": "Beer_Bomb_(L)_on_LH_Spitfire_Wing_Carrier", "name": "\"Beer Bomb\" (Pale Ale)", "weight": 104} Beer_Bomb__L__on_RH_Spitfire_Wing_Carrier = {"clsid": "Beer_Bomb_(L)_on_RH_Spitfire_Wing_Carrier", "name": "\"Beer Bomb\" (Pale Ale)", "weight": 104} Belouga = {"clsid": "{BLG66_BELOUGA}", "name": "Belouga", "weight": 290} BetAB_500ShP___500kg_Concrete_Piercing_HD_w_booster_Bomb = {"clsid": "{BD289E34-DF84-4C5E-9220-4B14C346E79D}", "name": "BetAB-500ShP - 500kg Concrete Piercing HD w booster Bomb", "weight": 424} BetAB_500___500kg_Concrete_Piercing_Bomb_LD = {"clsid": "{35B698AC-9FEF-4EC4-AD29-484A0085F62B}", "name": "BetAB-500 - 500kg Concrete Piercing Bomb LD", "weight": 430} BF109K_4_FUEL_TANK = {"clsid": "BF109K_4_FUEL_TANK", "name": "300 liter Fuel Tank", "weight": 266} BGM_109 = {"clsid": "BGM_109", "name": "BGM-109B Tomahawk", "weight": None} BGM_109B = {"clsid": "BGM-109B", "name": "BGM-109B", "weight": None} BIN_200 = {"clsid": "BIN_200", "name": "BIN-200 - 200kg Napalm Incendiary Bomb", "weight": 200} BKF___12_x_AO_2_5RT = {"clsid": "{BKF_AO2_5RT}", "name": "BKF - 12 x AO-2.5RT", "weight": 76} BKF___12_x_PTAB_2_5KO = {"clsid": "{BKF_PTAB2_5KO}", "name": "BKF - 12 x PTAB-2.5KO", "weight": 63.2} BK_90_MJ12__12x_MJ2_HEAT___36x_MJ1_HE_FRAG_Bomblets_ = {"clsid": "{BK90}", "name": "BK-90 MJ1+2 (12x MJ2 HEAT / 36x MJ1 HE-FRAG Bomblets)", "weight": 605} BK_90_MJ1__72_x_MJ1_HE_FRAG_Bomblets_ = {"clsid": "{BK90MJ1}", "name": "BK-90 MJ1 (72 x MJ1 HE-FRAG Bomblets)", "weight": 605} BK_90_MJ2__24_x_MJ2_HEAT_Bomblets_ = {"clsid": "{BK90MJ2}", "name": "BK-90 MJ2 (24 x MJ2 HEAT Bomblets)", "weight": 605} BLG_66_AC_Belouga = {"clsid": "{BLG66_BELOUGA_AC}", "name": "BLG-66-AC Belouga", "weight": 305} BLG_66_Belouga___290kg_CBU__151_Frag_Pen_bomblets = {"clsid": "{BLG66_AC}", "name": "BLG-66 Belouga - 290kg CBU, 151 Frag/Pen bomblets", "weight": 305} BLU_107___440lb_Anti_Runway_Penetrator_Bomb = {"clsid": "{752B9781-F962-11d5-9190-00A0249B6F00}", "name": "BLU-107 - 440lb Anti-Runway Penetrator Bomb", "weight": 185} BL_755_CBU___450kg__147_Frag_Pen_bomblets = {"clsid": "{08164777-5E9C-4B08-B48E-5AA7AFB246E2}", "name": "BL-755 CBU - 450kg, 147 Frag/Pen bomblets", "weight": 264} BOZ_107___Countermeasure_Dispenser = {"clsid": "{8C3F26A1-FA0F-11d5-9190-00A0249B6F00}", "name": "BOZ-107 - Countermeasure Dispenser", "weight": 200} BRU_33_with_1_x_LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG = {"clsid": "{BRU33_LAU10}", "name": "BRU-33 with 1 x LAU-10 pod - 4 x 127mm ZUNI, UnGd Rkts Mk71, HE/FRAG", "weight": 407.6} BRU_33_with_1_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{BRU33_LAU61}", "name": "BRU-33 with 1 x LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 364.4} BRU_33_with_1_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU33_LAU61_M282}", "name": "BRU-33 with 1 x LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 400.88} BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{BRU33_LAU68}", "name": "BRU-33 with 1 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 204.9} BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU33_LAU68_M282}", "name": "BRU-33 with 1 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 218.34} BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{BRU33_LAU68_MK5}", "name": "BRU-33 with 1 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 193.1} BRU_33_with_2_x_BDU_45B___500lb_Practice_Bomb = {"clsid": "{BRU33_2X_BDU-45B}", "name": "BRU-33 with 2 x BDU-45B - 500lb Practice Bomb", "weight": 555} BRU_33_with_2_x_BDU_45_LG_500lb_Practice_Laser_Guided_Bomb = {"clsid": "{BRU33_2X_BDU_45LG}", "name": "BRU-33 with 2 x BDU-45 LG 500lb Practice Laser Guided Bomb", "weight": 645} BRU_33_with_2_x_BDU_45___500lb_Practice_Bomb = {"clsid": "{BRU33_2X_BDU-45}", "name": "BRU-33 with 2 x BDU-45 - 500lb Practice Bomb", "weight": 555} BRU_33_with_2_x_CBU_99___490lbs__247_x_HEAT_Bomblets = {"clsid": "{BRU33_2X_CBU-99}", "name": "BRU-33 with 2 x CBU-99 - 490lbs, 247 x HEAT Bomblets", "weight": 535} BRU_33_with_2_x_GBU_12___500lb_Laser_Guided_Bomb = {"clsid": "{BRU33_2X_GBU-12}", "name": "BRU-33 with 2 x GBU-12 - 500lb Laser Guided Bomb", "weight": 645} BRU_33_with_2_x_GBU_16___1000lb_Laser_Guided_Bomb = {"clsid": "{BRU33_2X_GBU-16}", "name": "BRU-33 with 2 x GBU-16 - 1000lb Laser Guided Bomb", "weight": 1117} BRU_33_with_2_x_LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG = {"clsid": "{BRU33_2*LAU10}", "name": "BRU-33 with 2 x LAU-10 pod - 4 x 127mm ZUNI, UnGd Rkts Mk71, HE/FRAG", "weight": 724.2} BRU_33_with_2_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{BRU33_2*LAU61}", "name": "BRU-33 with 2 x LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 637.8} BRU_33_with_2_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU33_2*LAU61_M282}", "name": "BRU-33 with 2 x LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 710.76} BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{BRU33_2*LAU68}", "name": "BRU-33 with 2 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 318.8} BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU33_2*LAU68_M282}", "name": "BRU-33 with 2 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 345.68} BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{BRU33_2*LAU68_MK5}", "name": "BRU-33 with 2 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 295.2} BRU_33_with_2_x_Mk_20_Rockeye___490lbs_CBU__247_x_HEAT_Bomblets = {"clsid": "{BRU33_2X_ROCKEYE}", "name": "BRU-33 with 2 x Mk-20 Rockeye - 490lbs CBU, 247 x HEAT Bomblets", "weight": 535} BRU_33_with_2_x_Mk_82Y___500lb_GP_Chute_Retarded_HD = {"clsid": "{BRU33_2X_MK-82Y}", "name": "BRU-33 with 2 x Mk-82Y - 500lb GP Chute Retarded HD", "weight": 555} BRU_33_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{BRU33_2X_MK-82_Snakeye}", "name": "BRU-33 with 2 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 590} BRU_33_with_2_x_Mk_82___500lb_GP_Bomb_LD = {"clsid": "{BRU33_2X_MK-82}", "name": "BRU-33 with 2 x Mk-82 - 500lb GP Bomb LD", "weight": 547} BRU_33_with_2_x_Mk_83___1000lb_GP_Bomb_LD = {"clsid": "{BRU33_2X_MK-83}", "name": "BRU-33 with 2 x Mk-83 - 1000lb GP Bomb LD", "weight": 999} BRU_41A_with_6_x_BDU_33___25lb_Practice_Bomb_LD = {"clsid": "{BRU41_6X_BDU-33}", "name": "BRU-41A with 6 x BDU-33 - 25lb Practice Bomb LD", "weight": 195.713} BRU_41A_with_6_x_Mk_82___500lb_GP_Bomb_LD = {"clsid": "{BRU41_6X_MK-82}", "name": "BRU-41A with 6 x Mk-82 - 500lb GP Bomb LD", "weight": 1495.913} BRU_42_3_BDU_33 = {"clsid": "BRU-42_3*BDU-33", "name": "BRU-42 with 3 x BDU-33 - 25lb Practice Bombs LD", "weight": 90.15} BRU_42_3_GBU_12 = {"clsid": "BRU-42_3*GBU-12", "name": "BRU-42 with 3 x GBU-12 - 500lb Laser Guided Bombs", "weight": 887.25} BRU_42_LS = {"clsid": "BRU-42_LS", "name": "BRU-42 - Triple Ejector Rack (TER)", "weight": 56.25} BRU_42_with_2_x_GBU_10___2000lb_Laser_Guided_Bombs = {"clsid": "{62BE78B1-9258-48AE-B882-279534C0D278}", "name": "BRU-42 with 2 x GBU-10 - 2000lb Laser Guided Bombs", "weight": 1974.25} BRU_42_with_2_x_GBU_27___2000lb_Laser_Guided_Penetrator_Bombs = {"clsid": "{EB969276-1922-4ED1-A5CB-18590F45D7FE}", "name": "BRU-42 with 2 x GBU-27 - 2000lb Laser Guided Penetrator Bombs", "weight": 2038.25} BRU_42_with_3_x_GBU_16___1000lb_Laser_Guided_Bombs = {"clsid": "{88D49E04-78DF-4F08-B47E-B81247A9E3C5}", "name": "BRU-42 with 3 x GBU-16 - 1000lb Laser Guided Bombs", "weight": 1595.25} BRU_42_with_3_x_LAU_131_pods___7_x_2_75_Hydra__Laser_Guided_Rkts_M151__HE_APKWS = {"clsid": "{LAU-131x3 - 7 AGR-20A}", "name": "BRU-42 with 3 x LAU-131 pods - 7 x 2.75\" Hydra, Laser Guided Rkts M151, HE APKWS", "weight": 454.3} BRU_42_with_3_x_LAU_131_pods___7_x_2_75_Hydra__Laser_Guided_Rkts_M282__MPP_APKWS = {"clsid": "{LAU-131x3 - 7 AGR-20 M282}", "name": "BRU-42 with 3 x LAU-131 pods - 7 x 2.75\" Hydra, Laser Guided Rkts M282, MPP APKWS", "weight": 496.3} BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{64329ED9-B14C-4c0b-A923-A3C911DA1527}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 397.95} BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{C2593383-3CA8-4b18-B73D-0E750BCA1C85}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 399.63} BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum = {"clsid": "{E6966004-A525-4f47-AF94-BCFEDF8FDBDA}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 412.65} BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk = {"clsid": "{4C044B08-886B-46c8-9B1F-AB05B3ED9C1D}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 395.85} BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice = {"clsid": "{443364AE-D557-488e-9499-45EDB3BA6730}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 368.76} BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{9BC82B3D-FE70-4910-B2B7-3E54EFE73262}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 362.46} BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice = {"clsid": "{C0FA251E-B645-4ce5-926B-F4BC20822F8B}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 368.76} BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice = {"clsid": "{A1853B38-2160-4ffe-B7E9-9BF81E6C3D77}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 395.85} BRU_42_with_3_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU_42_3xLAU68_M282}", "name": "BRU-42 with 3 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 438.27} BRU_42_with_3_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{B83CB620-5BBE-4BEA-910C-EB605A327EF9}", "name": "BRU-42 with 3 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 722.25} BRU_42_with_3_x_Mk_81___250lb_GP_Bombs_LD = {"clsid": "{7B34E0BB-E427-4C2A-A61A-8407CE18B54D}", "name": "BRU-42 with 3 x Mk-81 - 250lb GP Bombs LD", "weight": 396.45} BRU_42_with_3_x_Mk_82_AIR_Ballute___500lb_GP_Bombs_HD = {"clsid": "{BRU-42_3*Mk-82AIR}", "name": "BRU-42 with 3 x Mk-82 AIR Ballute - 500lb GP Bombs HD", "weight": 782.25} BRU_42_with_3_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{60CC734F-0AFA-4E2E-82B8-93B941AB11CF}", "name": "BRU-42 with 3 x Mk-82 - 500lb GP Bombs LD", "weight": 740.25} BRU_42_with_3_x_SUU_25_x_8_LUU_2___Target_Marker_Flares = {"clsid": "{BRU-42_LS_3*SUU-25_8*LUU-2}", "name": "BRU-42 with 3 x SUU-25 x 8 LUU-2 - Target Marker Flares", "weight": 736.65} BRU_55_with_2_x_AGM_154A___JSOW_CEB__CBU_type_ = {"clsid": "{BRU55_2*AGM-154A}", "name": "BRU-55 with 2 x AGM-154A - JSOW CEB (CBU-type)", "weight": 1057.5} BRU_55_with_2_x_AGM_154C___JSOW_Unitary_BROACH = {"clsid": "{BRU55_2*AGM-154C}", "name": "BRU-55 with 2 x AGM-154C - JSOW Unitary BROACH", "weight": 1055.5} BRU_55_with_2_x_GBU_38___JDAM__500lb_GPS_Guided_Bomb = {"clsid": "{BRU55_2*GBU-38}", "name": "BRU-55 with 2 x GBU-38 - JDAM, 500lb GPS Guided Bomb", "weight": 573} BRU_57_with_2_x_AGM_154A___JSOW_CEB__CBU_type_ = {"clsid": "{BRU57_2*AGM-154A}", "name": "BRU-57 with 2 x AGM-154A - JSOW CEB (CBU-type)", "weight": 1082} BRU_57_with_2_x_AGM_154B___JSOW_Anti_Armour = {"clsid": "{BRU57_2*AGM-154B}", "name": "BRU-57 with 2 x AGM-154B - JSOW Anti-Armour", "weight": 1082} BRU_57_with_2_x_CBU_103___202_x_CEM__CBU_with_WCMD = {"clsid": "{BRU57_2*CBU-103}", "name": "BRU-57 with 2 x CBU-103 - 202 x CEM, CBU with WCMD", "weight": 951} BRU_57_with_2_x_CBU_105___10_x_SFW__CBU_with_WCMD = {"clsid": "{BRU57_2*CBU-105}", "name": "BRU-57 with 2 x CBU-105 - 10 x SFW, CBU with WCMD", "weight": 925} BRU_57_with_2_x_GBU_38___JDAM__500lb_GPS_Guided_Bomb = {"clsid": "{BRU57_2*GBU-38}", "name": "BRU-57 with 2 x GBU-38 - JDAM, 500lb GPS Guided Bomb", "weight": 573} BR_250 = {"clsid": "BR_250", "name": "BR-250 - 250kg GP Bomb LD", "weight": 250} BR_500 = {"clsid": "BR_500", "name": "BR-500 - 500kg GP Bomb LD", "weight": 500} British_GP_250LBS_Bomb_MK4_on_LH_Spitfire_Wing_Carrier = {"clsid": "British_GP_250LBS_Bomb_MK4_on_LH_Spitfire_Wing_Carrier", "name": "250 lb GP Mk.I", "weight": 108.326} British_GP_250LBS_Bomb_MK4_on_RH_Spitfire_Wing_Carrier = {"clsid": "British_GP_250LBS_Bomb_MK4_on_RH_Spitfire_Wing_Carrier", "name": "250 lb GP Mk.I", "weight": 108.326} British_GP_500LBS_Bomb_MK4_on_British_UniversalBC_MK3 = {"clsid": "British_GP_500LBS_Bomb_MK4_on_British_UniversalBC_MK3", "name": "500 lb GP Mk.I", "weight": 225.188} B_13L_pod___5_x_S_13_OF__122mm_UnGd_Rkts__Blast_Frag = {"clsid": "{FC56DF80-9B09-44C5-8976-DCFAFF219062}", "name": "B-13L pod - 5 x S-13-OF, 122mm UnGd Rkts, Blast/Frag", "weight": 510} B_1B_Mk_84_8 = {"clsid": "B-1B_Mk-84*8", "name": "8 x Mk-84 - 2000lb GP Bombs LD", "weight": 7152} B_8M1_pod___20_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP = {"clsid": "{F72F47E5-C83A-4B85-96ED-D3E46671EE9A}", "name": "B-8M1 pod - 20 x S-8KOM, 80mm UnGd Rkts, HEAT/AP", "weight": 363.5} B_8M1_pod___20_x_S_8TsM__80mm_UnGd_Rkts__Smk = {"clsid": "{3DFB7320-AB0E-11d7-9897-000476191836}", "name": "B-8M1 pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk", "weight": 359.5} B_8M1___20_S_8OFP2 = {"clsid": "B-8M1 - 20 S-8OFP2", "name": "B-8M1 pod - 20 x S-8OFP2, 80mm UnGd Rkts, HE/Frag/AP", "weight": 471.5} B_8V20A_CM = {"clsid": "B_8V20A_CM", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, OG", "weight": 345} B_8V20A_CM_BU = {"clsid": "B_8V20A_CM_BU", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, BU", "weight": 345} B_8V20A_CM_GN = {"clsid": "B_8V20A_CM_GN", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, GN", "weight": 345} B_8V20A_CM_RD = {"clsid": "B_8V20A_CM_RD", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, RD", "weight": 345} B_8V20A_CM_VT = {"clsid": "B_8V20A_CM_VT", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, VT", "weight": 345} B_8V20A_CM_WH = {"clsid": "B_8V20A_CM_WH", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, WH", "weight": 345} B_8V20A_CM_YE = {"clsid": "B_8V20A_CM_YE", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, YE", "weight": 345} B_8V20A_OFP2 = {"clsid": "B_8V20A_OFP2", "name": "B-8V20A pod - 20 x S-8OFP2, 80mm UnGd Rkts, HE/Frag/AP", "weight": 457} B_8V20A_OM = {"clsid": "B_8V20A_OM", "name": "B-8V20A pod - 20 x S-8OM, 80mm UnGd Rkts, Illum", "weight": 365} B_8V20A_pod___20_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP = {"clsid": "{6A4B9E69-64FE-439a-9163-3A87FB6A4D81}", "name": "B-8V20A pod - 20 x S-8KOM, 80mm UnGd Rkts, HEAT/AP", "weight": 349} CATM_9M = {"clsid": "CATM-9M", "name": "Captive AIM-9M for ACM", "weight": 85.73} CBLS_200 = {"clsid": "CBLS-200", "name": "4*BDU-33 - AF/B37K Rack with 4*25lb Practice Bomb LD", "weight": 84.4} CBU87_10 = {"clsid": "CBU87*10", "name": "10 x CBU-87 - 202 x CEM Cluster Bombs", "weight": 4300} CBU97_10 = {"clsid": "CBU97*10", "name": "10 x CBU-97 - 10 x SFW Cluster Bombs", "weight": 4170} CBU_103___202_x_CEM__CBU_with_WCMD = {"clsid": "{CBU_103}", "name": "CBU-103 - 202 x CEM, CBU with WCMD", "weight": 430} CBU_105___10_x_SFW__CBU_with_WCMD = {"clsid": "{CBU_105}", "name": "CBU-105 - 10 x SFW, CBU with WCMD", "weight": 417} CBU_52B___220_x_HE_Frag_bomblets = {"clsid": "{CBU-52B}", "name": "CBU-52B - 220 x HE/Frag bomblets", "weight": 356} CBU_87___202_x_CEM_Cluster_Bomb = {"clsid": "{CBU-87}", "name": "CBU-87 - 202 x CEM Cluster Bomb", "weight": 430} CBU_97___10_x_SFW_Cluster_Bomb = {"clsid": "{5335D97A-35A5-4643-9D9B-026C75961E52}", "name": "CBU-97 - 10 x SFW Cluster Bomb", "weight": 417} CBU_99___490lbs__247_x_HEAT_Bomblets = {"clsid": "{CBU_99}", "name": "CBU-99 - 490lbs, 247 x HEAT Bomblets", "weight": 222} CM_802AKG = {"clsid": "{CM_802AKG}", "name": "CM-802AKG", "weight": None} C_802AK = {"clsid": "{C_802AK}", "name": "C-802AK", "weight": 600} DEFA_553___30mm_Revolver_Cannon = {"clsid": "{C-101-DEFA553}", "name": "DEFA-553 - 30mm Revolver Cannon", "weight": 218} DIS_AKD_10 = {"clsid": "DIS_AKD-10", "name": "AKD-10", "weight": 58} DIS_AKG_DLPOD = {"clsid": "DIS_AKG_DLPOD", "name": "DATA-LINK POD", "weight": 295} DIS_BOMB_250_2 = {"clsid": "DIS_BOMB_250_2", "name": "250-2 - 250kg GP Bombs HD", "weight": 250} DIS_BOMB_250_3 = {"clsid": "DIS_BOMB_250_3", "name": "250-3 - 250kg GP Bombs LD", "weight": 250} DIS_BRM1_90 = {"clsid": "DIS_BRM1_90", "name": "BRM-1_90MM", "weight": 462.5} DIS_CM_802AKG = {"clsid": "DIS_CM-802AKG", "name": "CM-802AKG", "weight": 765} DIS_C_701IR = {"clsid": "DIS_C-701IR", "name": "C-701IR", "weight": 170} DIS_C_701T = {"clsid": "DIS_C-701T", "name": "C-701T", "weight": 170} DIS_C_802AK = {"clsid": "DIS_C-802AK", "name": "C-802AK", "weight": 765} DIS_DF4A_KD20 = {"clsid": "DIS_DF4A_KD20", "name": "KD-20", "weight": 1750} DIS_DF4B_YJ12 = {"clsid": "DIS_DF4B_YJ12", "name": "YJ-12", "weight": 2550} DIS_GB6 = {"clsid": "DIS_GB6", "name": "GB-6", "weight": 672} DIS_GB6_HE = {"clsid": "DIS_GB6_HE", "name": "GB-6-HE", "weight": 672} DIS_GB6_TSP = {"clsid": "DIS_GB6_TSP", "name": "GB-6-SFW", "weight": 672} DIS_GBU_10 = {"clsid": "DIS_GBU_10", "name": "GBU-10", "weight": 1162} DIS_GBU_12 = {"clsid": "DIS_GBU_12", "name": "GBU-12", "weight": 275} DIS_GBU_12_DUAL_GDJ_II19_L = {"clsid": "DIS_GBU_12_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x GBU-12", "weight": 629} DIS_GBU_12_DUAL_GDJ_II19_R = {"clsid": "DIS_GBU_12_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x GBU-12", "weight": 629} DIS_GBU_16 = {"clsid": "DIS_GBU_16", "name": "GBU-16", "weight": 564} DIS_GDJ_KD63 = {"clsid": "DIS_GDJ_KD63", "name": "KD-63", "weight": 2050} DIS_GDJ_KD63B = {"clsid": "DIS_GDJ_KD63B", "name": "KD-63B", "weight": 2050} DIS_GDJ_YJ83K = {"clsid": "DIS_GDJ_YJ83K", "name": "YJ-83K", "weight": 765} DIS_H6_250_2_N12 = {"clsid": "DIS_H6_250_2_N12", "name": "12 x 250-2 - 250kg GP Bombs HD", "weight": 3000} DIS_H6_250_2_N24 = {"clsid": "DIS_H6_250_2_N24", "name": "24 x 250-2 - 250kg GP Bombs HD", "weight": 6000} DIS_KD20 = {"clsid": "DIS_KD20", "name": "KD-20", "weight": 1700} DIS_KD63 = {"clsid": "DIS_KD63", "name": "KD-63", "weight": 2000} DIS_KD63B = {"clsid": "DIS_KD63B", "name": "KD-63B", "weight": 2000} DIS_LAU68_MK5_DUAL_GDJ_II19_L = {"clsid": "DIS_LAU68_MK5_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x LAU68 MK5", "weight": 261.06} DIS_LAU68_MK5_DUAL_GDJ_II19_R = {"clsid": "DIS_LAU68_MK5_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x LAU68 MK5", "weight": 261.06} DIS_LD_10 = {"clsid": "DIS_LD-10", "name": "LD-10", "weight": 289} DIS_LD_10_DUAL_L = {"clsid": "DIS_LD-10_DUAL_L", "name": "LD-10 x 2", "weight": 558} DIS_LD_10_DUAL_R = {"clsid": "DIS_LD-10_DUAL_R", "name": "LD-10 x 2", "weight": 558} DIS_LS_6_500 = {"clsid": "DIS_LS_6_500", "name": "LS-6-500", "weight": 570} DIS_MER6_250_3_N6 = {"clsid": "DIS_MER6_250_3_N6", "name": "MER6 - 6 x 250-3 - 250kg GP Bombs LD", "weight": 1550} DIS_MK_20 = {"clsid": "DIS_MK_20", "name": "Mk-20", "weight": 222} DIS_MK_20_DUAL_GDJ_II19_L = {"clsid": "DIS_MK_20_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x Mk-20", "weight": 523} DIS_MK_20_DUAL_GDJ_II19_R = {"clsid": "DIS_MK_20_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x Mk-20", "weight": 523} DIS_MK_82S_DUAL_GDJ_II19_L = {"clsid": "DIS_MK_82S_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x Mk-82 SnakeEye", "weight": 543} DIS_MK_82S_DUAL_GDJ_II19_R = {"clsid": "DIS_MK_82S_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x Mk-82 SnakeEye", "weight": 543} DIS_MK_82_DUAL_GDJ_II19_L = {"clsid": "DIS_MK_82_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x Mk-82", "weight": 561} DIS_MK_82_DUAL_GDJ_II19_R = {"clsid": "DIS_MK_82_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x Mk-82", "weight": 561} DIS_PL_12 = {"clsid": "DIS_PL-12", "name": "PL-12", "weight": 199} DIS_PL_5EII = {"clsid": "DIS_PL-5EII", "name": "PL-5EII", "weight": 153} DIS_PL_8A = {"clsid": "DIS_PL-8A", "name": "PL-8A", "weight": 115} DIS_PL_8B = {"clsid": "DIS_PL-8B", "name": "PL-8B", "weight": 115} DIS_RKT_90_UG = {"clsid": "DIS_RKT_90_UG", "name": "UG_90MM", "weight": 382.5} DIS_SD_10 = {"clsid": "DIS_SD-10", "name": "SD-10", "weight": 289} DIS_SD_10_DUAL_L = {"clsid": "DIS_SD-10_DUAL_L", "name": "SD-10 x 2", "weight": 558} DIS_SD_10_DUAL_R = {"clsid": "DIS_SD-10_DUAL_R", "name": "SD-10 x 2", "weight": 558} DIS_SMOKE_GENERATOR_B = {"clsid": "DIS_SMOKE_GENERATOR_B", "name": "Smoke Generator - blue", "weight": 0} DIS_SMOKE_GENERATOR_G = {"clsid": "DIS_SMOKE_GENERATOR_G", "name": "Smoke Generator - green", "weight": 0} DIS_SMOKE_GENERATOR_O = {"clsid": "DIS_SMOKE_GENERATOR_O", "name": "Smoke Generator - orange", "weight": 0} DIS_SMOKE_GENERATOR_R = {"clsid": "DIS_SMOKE_GENERATOR_R", "name": "Smoke Generator - red", "weight": 0} DIS_SMOKE_GENERATOR_W = {"clsid": "DIS_SMOKE_GENERATOR_W", "name": "Smoke Generator - white", "weight": 0} DIS_SMOKE_GENERATOR_Y = {"clsid": "DIS_SMOKE_GENERATOR_Y", "name": "Smoke Generator - yellow", "weight": 0} DIS_SPJ_POD = {"clsid": "DIS_SPJ_POD", "name": "KG-600", "weight": 270} DIS_TANK1100 = {"clsid": "DIS_TANK1100", "name": "1100L Tank", "weight": 1064} DIS_TANK1100_EMPTY = {"clsid": "DIS_TANK1100_EMPTY", "name": "1100L Tank Empty", "weight": 75} DIS_TANK800 = {"clsid": "DIS_TANK800", "name": "800L Tank", "weight": 730} DIS_TANK800_EMPTY = {"clsid": "DIS_TANK800_EMPTY", "name": "800L Tank Empty", "weight": 45} DIS_TYPE200 = {"clsid": "DIS_TYPE200", "name": "TYPE-200A", "weight": 200} DIS_TYPE200_DUAL_L = {"clsid": "DIS_TYPE200_DUAL_L", "name": "TYPE-200A Dual", "weight": 400} DIS_TYPE200_DUAL_R = {"clsid": "DIS_TYPE200_DUAL_R", "name": "TYPE-200A Dual", "weight": 400} DIS_WMD7 = {"clsid": "DIS_WMD7", "name": "WMD7 POD", "weight": 295} DIS_YJ12 = {"clsid": "DIS_YJ12", "name": "YJ-12", "weight": 2500} DIS_YJ83K = {"clsid": "DIS_YJ83K", "name": "YJ-83K", "weight": 715} DWS39_MJ1 = {"clsid": "{DWS39_MJ1}", "name": "DWS39 MJ1", "weight": 605} DWS39_MJ1_MJ2 = {"clsid": "{DWS39_MJ1_MJ2}", "name": "DWS39 MJ1-MJ2", "weight": 605} DWS39_MJ2 = {"clsid": "{DWS39_MJ2}", "name": "DWS39 MJ2", "weight": 605} Eclair = {"clsid": "{Eclair}", "name": "Eclair", "weight": 20} ER_4_SC50 = {"clsid": "ER_4_SC50", "name": "4 x SC 50 - 50kg GP Bomb LD", "weight": 220} ETHER = {"clsid": "{0519A261-0AB6-11d6-9193-00A0249B6F00}", "name": "ETHER", "weight": 200} FAB_100M = {"clsid": "FAB_100M", "name": "FAB-100M - 100kg GP Bomb LD", "weight": 100} FAB_100M_ = {"clsid": "FAB_100M", "name": "FAB-100M", "weight": 100} FAB_100_x_4 = {"clsid": "{FAB-100-4}", "name": "FAB-100 x 4", "weight": 465} FAB_100___100kg_GP_Bomb_LD = {"clsid": "{FB3CE165-BF07-4979-887C-92B87F13276B}", "name": "FAB-100 - 100kg GP Bomb LD", "weight": 100} FAB_1500_M_54___1500kg_GP_Bomb_LD = {"clsid": "{40AA4ABE-D6EB-4CD6-AEFE-A1A0477B24AB}", "name": "FAB-1500 M-54 - 1500kg GP Bomb LD", "weight": 1392} FAB_250_M54_TU___235_kg__bomb__parachute = {"clsid": "{FAB-250-M54-TU}", "name": "FAB-250 M54 TU - 235 kg, bomb, parachute", "weight": 235} FAB_250_M54___235_kg__bomb__parachute = {"clsid": "{FAB-250-M54}", "name": "FAB-250 M54 - 235 kg, bomb, parachute", "weight": 235} FAB_250_M62___250kg_GP_Bomb_LD = {"clsid": "{FAB_250_M62}", "name": "FAB-250-M62 - 250kg GP Bomb LD", "weight": 227} FAB_250___250kg_GP_Bomb_LD = {"clsid": "{3C612111-C7AD-476E-8A8E-2485812F4E5C}", "name": "FAB-250 - 250kg GP Bomb LD", "weight": 250} FAB_50 = {"clsid": "FAB_50", "name": "FAB-50 - 50kg GP Bomb LD", "weight": 50} FAB_500_M54_TU___480_kg__bomb__parachute = {"clsid": "{FAB-500-M54-TU}", "name": "FAB-500 M54 TU - 480 kg, bomb, parachute", "weight": 480} FAB_500_M54___474_kg__bomb__free_fall = {"clsid": "{FAB-500-M54}", "name": "FAB-500 M54 - 474 kg, bomb, free-fall", "weight": 474} FAB_500_M_62___500kg_GP_Bomb_LD = {"clsid": "{37DCC01E-9E02-432F-B61D-10C166CA2798}", "name": "FAB-500 M-62 - 500kg GP Bomb LD", "weight": 506} FAB_500_SL___515_kg__bomb__parachute = {"clsid": "{FAB-500-SL}", "name": "FAB-500 SL - 515 kg, bomb, parachute", "weight": 515} FAB_500_TA___477_kg__bomb__free_fall = {"clsid": "{FAB-500-TA}", "name": "FAB-500 TA - 477 kg, bomb, free-fall", "weight": 477} FAB_50_ = {"clsid": "FAB_50", "name": "FAB-50", "weight": 50} FIM_92 = {"clsid": "FIM_92", "name": "STINGER", "weight": None} FPU_8A_Fuel_Tank_330_gallons = {"clsid": "{FPU_8A_FUEL_TANK}", "name": "FPU-8A Fuel Tank 330 gallons", "weight": 1150} Fuel_Tank_120_gallons = {"clsid": "{PTB_120_F86F35}", "name": "Fuel Tank 120 gallons", "weight": 413.36} Fuel_Tank_150_liters = {"clsid": "{PTB_150L_L39}", "name": "Fuel Tank 150 liters", "weight": 124.25} Fuel_Tank_200_gallons = {"clsid": "{PTB_200_F86F35}", "name": "Fuel Tank 200 gallons", "weight": 675.6} Fuel_Tank_350_liters = {"clsid": "{PTB_350L_L39}", "name": "Fuel Tank 350 liters", "weight": 283.25} Fuel_Tank_490_L_Central__21_ = {"clsid": "{PTB_490C_MIG21}", "name": "Fuel Tank 490 L Central (21)", "weight": 434} Fuel_Tank_490_L__21_ = {"clsid": "{PTB_490_MIG21}", "name": "Fuel Tank 490 L (21)", "weight": 434} Fuel_Tank_800_L__21_ = {"clsid": "{PTB_800_MIG21}", "name": "Fuel Tank 800 L (21)", "weight": 682} Fuel_Tank_FT600 = {"clsid": "Fuel_Tank_FT600", "name": "Fuel tank FT600", "weight": 1925} Fuel_tank_1150L = {"clsid": "{414E383A-59EB-41BC-8566-2B5E0788ED1F}", "name": "Fuel tank 1150L", "weight": 975.25} Fuel_tank_1150L_MiG_29 = {"clsid": "{C0FF4842-FBAC-11d5-9190-00A0249B6F00}", "name": "Fuel tank 1150L MiG-29", "weight": 975.25} Fuel_tank_1400L = {"clsid": "{2BEC576B-CDF5-4B7F-961F-B0FA4312B841}", "name": "Fuel tank 1400L", "weight": 1262.5} Fuel_tank_2000L = {"clsid": "{16602053-4A12-40A2-B214-AB60D481B20E}", "name": "Fuel tank 2000L", "weight": 1700} Fuel_tank_3000L = {"clsid": "{7D7EC917-05F6-49D4-8045-61FC587DD019}", "name": "Fuel tank 3000L", "weight": 2550} Fuel_tank_300_gal = {"clsid": "{8A0BE8AE-58D4-4572-9263-3144C0D06364}", "name": "Fuel tank 300 gal", "weight": 1083.5076415} Fuel_tank_300_gal_ = {"clsid": "{F14-300gal}", "name": "Fuel tank 300 gal", "weight": 958.4} Fuel_tank_300_gal__empty_ = {"clsid": "{F14-300gal-empty}", "name": "Fuel tank 300 gal (empty)", "weight": 70} Fuel_tank_330_gal = {"clsid": "{EFEC8200-B922-11d7-9897-000476191836}", "name": "Fuel tank 330 gal", "weight": 1067.750921} Fuel_tank_330_gal_ = {"clsid": "{EFEC8201-B922-11d7-9897-000476191836}", "name": "Fuel tank 330 gal", "weight": 1067.750921} Fuel_tank_367_gal = {"clsid": "{82364E69-5564-4043-A866-E13032926C3E}", "name": "Fuel tank 367 gal", "weight": 1181.8623879} Fuel_tank_370_gal = {"clsid": "{F376DBEE-4CAE-41BA-ADD9-B2910AC95DEC}", "name": "Fuel tank 370 gal", "weight": 1338.1101068} Fuel_tank_5000L = {"clsid": "{0855A3A1-FA50-4C89-BDBB-5D5360ABA071}", "name": "Fuel tank 5000L", "weight": 4420} Fuel_tank_610_gal = {"clsid": "{E1F29B21-F291-4589-9FD8-3272EEC69506}", "name": "Fuel tank 610 gal", "weight": 2010.8766885} Fuel_tank_800L = {"clsid": "{A5BAEAB7-6FAF-4236-AF72-0FD900F493F9}", "name": "Fuel tank 800L", "weight": 680} Fuel_tank_800L_Wing = {"clsid": "{E8D4652F-FD48-45B7-BA5B-2AE05BB5A9CF}", "name": "Fuel tank 800L Wing", "weight": 760} Fuel_tank_PTB_450 = {"clsid": "{B99EE8A8-99BC-4a8d-89AC-A26831920DCE}", "name": "Fuel tank PTB-450", "weight": 550} Fuel_tank_PTB_450_ = {"clsid": "{PTB_450}", "name": "Fuel tank PTB-450", "weight": 550} Fuel_tank_S_3 = {"clsid": "{A504D93B-4E80-4B4F-A533-0D9B65F2C55F}", "name": "Fuel tank S-3", "weight": 964} FW109_FUEL_TANK = {"clsid": "FW109_FUEL_TANK", "name": "300 liter Fuel Tank Type E2", "weight": 266} F_4_Fuel_tank_C = {"clsid": "{8B9E3FD0-F034-4A07-B6CE-C269884CC71B}", "name": "F-4 Fuel tank-C", "weight": 2345} F_4_Fuel_tank_W = {"clsid": "{7B4B122D-C12C-4DB4-834E-4D8BB4D863A8}", "name": "F-4 Fuel tank-W", "weight": 1420} F_5_150Gal_Fuel_tank = {"clsid": "{PTB-150GAL}", "name": "F-5 150Gal Fuel tank", "weight": 509} F_5_275Gal_Fuel_tank = {"clsid": "{0395076D-2F77-4420-9D33-087A4398130B}", "name": "F-5 275Gal Fuel tank", "weight": 909} GAU_12_Gunpod_w_AP_M79 = {"clsid": "{GAU_12_Equalizer_AP}", "name": "GAU 12 Gunpod w/AP M79", "weight": 283.9} GAU_12_Gunpod_w_HE_M792 = {"clsid": "{GAU_12_Equalizer_HE}", "name": "GAU 12 Gunpod w/HE M792", "weight": 283.9} GAU_12_Gunpod_w_SAPHEI_T = {"clsid": "{GAU_12_Equalizer}", "name": "GAU 12 Gunpod w/SAPHEI-T", "weight": 283.9} GBU_10 = {"clsid": "{BRU-32 GBU-10}", "name": "GBU-10", "weight": 997.38} GBU_10___2000lb_Laser_Guided_Bomb = {"clsid": "{51F9AAE5-964F-4D21-83FB-502E3BFE5F8A}", "name": "GBU-10 - 2000lb Laser Guided Bomb", "weight": 959} GBU_12 = {"clsid": "{BRU-32 GBU-12}", "name": "GBU-12", "weight": 332.38} GBU_12___500lb_Laser_Guided_Bomb = {"clsid": "{DB769D48-67D7-42ED-A2BE-108D566C8B1E}", "name": "GBU-12 - 500lb Laser Guided Bomb", "weight": 277} GBU_16 = {"clsid": "{BRU-32 GBU-16}", "name": "GBU-16", "weight": 621.38} GBU_16___1000lb_Laser_Guided_Bomb = {"clsid": "{0D33DDAE-524F-4A4E-B5B8-621754FE3ADE}", "name": "GBU-16 - 1000lb Laser Guided Bomb", "weight": 513} GBU_24 = {"clsid": "{BRU-32 GBU-24}", "name": "GBU-24", "weight": 1107.38} GBU_24_Paveway_III___2000lb_Laser_Guided_Bomb = {"clsid": "{34759BBC-AF1E-4AEE-A581-498FF7A6EBCE}", "name": "GBU-24 Paveway III - 2000lb Laser Guided Bomb", "weight": 1087} GBU_24_Paveway_III___2000lb_Laser_Guided_Bomb_ = {"clsid": "{GBU-24}", "name": "GBU-24 Paveway III - 2000lb Laser Guided Bomb", "weight": 1087} GBU_27___2000lb_Laser_Guided_Penetrator_Bomb = {"clsid": "{EF0A9419-01D6-473B-99A3-BEBDB923B14D}", "name": "GBU-27 - 2000lb Laser Guided Penetrator Bomb", "weight": 1200} GBU_28___5000lb_Laser_Guided_Penetrator_Bomb = {"clsid": "{F06B775B-FC70-44B5-8A9F-5B5E2EB839C7}", "name": "GBU-28 - 5000lb Laser Guided Penetrator Bomb", "weight": 2130} GBU_31V3B_8 = {"clsid": "GBU-31V3B*8", "name": "8 x GBU-31(V)3/B - JDAM, 2000lb GPS Guided Penetrator Bombs", "weight": 7848} GBU_31_8 = {"clsid": "GBU-31*8", "name": "8 x GBU-31(V)1/B - JDAM, 2000lb GPS Guided Bombs", "weight": 7152} GBU_31_V_1_B___JDAM__2000lb_GPS_Guided_Bomb = {"clsid": "{GBU-31}", "name": "GBU-31(V)1/B - JDAM, 2000lb GPS Guided Bomb", "weight": 934} GBU_31_V_2_B___JDAM__2000lb_GPS_Guided_Bomb = {"clsid": "{GBU_31_V_2B}", "name": "GBU-31(V)2/B - JDAM, 2000lb GPS Guided Bomb", "weight": 934} GBU_31_V_3_B___JDAM__2000lb_GPS_Guided_Penetrator_Bomb = {"clsid": "{GBU-31V3B}", "name": "GBU-31(V)3/B - JDAM, 2000lb GPS Guided Penetrator Bomb", "weight": 981} GBU_31_V_4_B___JDAM__2000lb_GPS_Guided_Penetrator_Bomb = {"clsid": "{GBU_31_V_4B}", "name": "GBU-31(V)4/B - JDAM, 2000lb GPS Guided Penetrator Bomb", "weight": 970} GBU_32_V_2_B___JDAM__1000lb_GPS_Guided_Bomb = {"clsid": "{GBU_32_V_2B}", "name": "GBU-32(V)2/B - JDAM, 1000lb GPS Guided Bomb", "weight": 467} GBU_38_16 = {"clsid": "GBU-38*16", "name": "16 x GBU-38 - JDAM, 500lb GPS Guided Bombs", "weight": 3856} GBU_38___JDAM__500lb_GPS_Guided_Bomb = {"clsid": "{GBU-38}", "name": "GBU-38 - JDAM, 500lb GPS Guided Bomb", "weight": 241} GBU_54B___LJDAM__500lb_Laser__GPS_Guided_Bomb_LD = {"clsid": "{GBU_54_V_1B}", "name": "GBU-54B - LJDAM, 500lb Laser & GPS Guided Bomb LD", "weight": 253} GUV_VOG = {"clsid": "GUV_VOG", "name": "GUV-8700 w AP-30 - 30mm Grenade Launcher", "weight": 274} GUV_YakB_GSHP = {"clsid": "GUV_YakB_GSHP", "name": "GUV-8700 w 1x12.7 mm & 2x7.62 mm Rotary HMG", "weight": 452} HOT3 = {"clsid": "{HOT3G}", "name": "HOT3", "weight": 32} HOT3_ = {"clsid": "{HOT3D}", "name": "HOT3", "weight": 32} HSAB_with_9_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{4CD2BB0F-5493-44EF-A927-9760350F7BA1}", "name": "HSAB with 9 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 2050} HSAB_with_9_x_Mk_83___1000lb_GP_Bombs_LD = {"clsid": "{696CFFC4-0BDE-42A8-BE4B-0BE3D9DD723C}", "name": "HSAB with 9 x Mk-83 - 1000lb GP Bombs LD", "weight": 8100} HVAR_SMOKE__UnGd_Rkt = {"clsid": "{HVAR_SMOKE_2}", "name": "HVAR SMOKE, UnGd Rkt", "weight": 100} HVAR_Smoke_Generator = {"clsid": "{HVAR_SMOKE_GENERATOR}", "name": "HVAR Smoke Generator", "weight": 64} HVAR__UnGd_Rkt = {"clsid": "{HVAR}", "name": "HVAR, UnGd Rkt", "weight": 64} I16_DROP_FUEL_TANK = {"clsid": "I16_DROP_FUEL_TANK", "name": "I-16 External Fuel Tank", "weight": 73} I16_FAB_100SV = {"clsid": "I16_FAB_100SV", "name": "FAB-100SV", "weight": 100} I16_RS_82 = {"clsid": "I16_RS_82", "name": "RS-82", "weight": 9.7} IAB_500___470_kg__bomb__free_fall = {"clsid": "{IAB-500}", "name": "IAB-500 - 470 kg, bomb, free fall", "weight": 470} IR_Deflector = {"clsid": "{IR_Deflector}", "name": "IR Deflector", "weight": 5} KAB_1500Kr___1500kg_TV_Guided_Bomb = {"clsid": "{KAB_1500Kr_LOADOUT}", "name": "KAB-1500Kr - 1500kg TV Guided Bomb", "weight": 1525} KAB_1500LG_Pr___1500kg_Laser_Guided_Penetrator_Bomb = {"clsid": "{KAB_1500LG_LOADOUT}", "name": "KAB-1500LG-Pr - 1500kg Laser Guided Penetrator Bomb", "weight": 1525} KAB_1500L___1500kg_Laser_Guided_Bomb = {"clsid": "{39821727-F6E2-45B3-B1F0-490CC8921D1E}", "name": "KAB-1500L - 1500kg Laser Guided Bomb", "weight": 1560} KAB_500Kr___500kg_TV_Guided_Bomb = {"clsid": "{E2C426E3-8B10-4E09-B733-9CDC26520F48}", "name": "KAB-500Kr - 500kg TV Guided Bomb", "weight": 560} KAB_500LG___500kg_Laser_Guided_Bomb = {"clsid": "{BA565F89-2373-4A84-9502-A0E017D3A44A}", "name": "KAB-500LG - 500kg Laser Guided Bomb", "weight": 534} KAB_500S___500kg_GPS_Guided_Bomb = {"clsid": "{KAB_500S_LOADOUT}", "name": "KAB-500S - 500kg GPS Guided Bomb", "weight": 500} KB_Flare_Chaff_dispenser_pod = {"clsid": "{KB}", "name": "KB Flare/Chaff dispenser pod", "weight": 296} Kh_22__AS_4_Kitchen____1000kg__AShM__IN__Act_Pas_Rdr = {"clsid": "{12429ECF-03F0-4DF6-BCBD-5D38B6343DE1}", "name": "Kh-22 (AS-4 Kitchen) - 1000kg, AShM, IN & Act/Pas Rdr", "weight": 6800} Kh_23L_Grom__AS_7_Kerry____286kg__ASM__Laser_Guided = {"clsid": "{9F390892-E6F9-42C9-B84E-1136A881DCB2}", "name": "Kh-23L Grom (AS-7 Kerry) - 286kg, ASM, Laser Guided", "weight": 288} Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser = {"clsid": "{6DADF342-D4BA-4D8A-B081-BA928C4AF86D}", "name": "Kh-25ML (AS-10 Karen) - 300kg, ASM, Semi-Act Laser", "weight": 360} Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser_ = {"clsid": "{79D73885-0801-45a9-917F-C90FE1CE3DFC}", "name": "Kh-25ML (AS-10 Karen) - 300kg, ASM, Semi-Act Laser", "weight": 360} Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser__ = {"clsid": "{X-25ML}", "name": "Kh-25ML (AS-10 Karen) - 300kg, ASM, Semi-Act Laser", "weight": 360} Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr = {"clsid": "{E86C5AA5-6D49-4F00-AD2E-79A62D6DDE26}", "name": "Kh-25MPU (Updated AS-12 Kegler) - 320kg, ARM, IN & Pas Rdr", "weight": 370} Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr_ = {"clsid": "{752AF1D2-EBCC-4bd7-A1E7-2357F5601C70}", "name": "Kh-25MPU (Updated AS-12 Kegler) - 320kg, ARM, IN & Pas Rdr", "weight": 370} Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr__ = {"clsid": "{X-25MPU}", "name": "Kh-25MPU (Updated AS-12 Kegler) - 320kg, ARM, IN & Pas Rdr", "weight": 370} Kh_25MP__AS_12_Kegler____320kg__ARM__Pas_Rdr = {"clsid": "{Kh-25MP}", "name": "Kh-25MP (AS-12 Kegler) - 320kg, ARM, Pas Rdr", "weight": 355} Kh_25MR__AS_10_Karen____300kg__ASM__10km__RC_Guided = {"clsid": "{292960BB-6518-41AC-BADA-210D65D5073C}", "name": "Kh-25MR (AS-10 Karen) - 300kg, ASM, 10km, RC Guided", "weight": 360} Kh_25MR__AS_10_Karen____300kg__ASM__RC_Guided = {"clsid": "{X-25MR}", "name": "Kh-25MR (AS-10 Karen) - 300kg, ASM, RC Guided", "weight": 360} Kh_28__AS_9_Kyle____720kg__ARM__Pas_Rdr = {"clsid": "{Kh-28}", "name": "Kh-28 (AS-9 Kyle) - 720kg, ARM, Pas Rdr", "weight": 715} Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser = {"clsid": "{3468C652-E830-4E73-AFA9-B5F260AB7C3D}", "name": "Kh-29L (AS-14 Kedge) - 657kg, ASM, Semi-Act Laser", "weight": 747} Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser_ = {"clsid": "{D4A8D9B9-5C45-42e7-BBD2-0E54F8308432}", "name": "Kh-29L (AS-14 Kedge) - 657kg, ASM, Semi-Act Laser", "weight": 747} Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser__ = {"clsid": "{X-29L}", "name": "Kh-29L (AS-14 Kedge) - 657kg, ASM, Semi-Act Laser", "weight": 747} Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided = {"clsid": "{B4FC81C9-B861-4E87-BBDC-A1158E648EBF}", "name": "Kh-29T (AS-14 Kedge) - 670kg, ASM, TV Guided", "weight": 760} Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided_ = {"clsid": "{601C99F7-9AF3-4ed7-A565-F8B8EC0D7AAC}", "name": "Kh-29T (AS-14 Kedge) - 670kg, ASM, TV Guided", "weight": 760} Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided__ = {"clsid": "{X-29T}", "name": "Kh-29T (AS-14 Kedge) - 670kg, ASM, TV Guided", "weight": 760} Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr = {"clsid": "{4D13E282-DF46-4B23-864A-A9423DFDE504}", "name": "Kh-31A (AS-17 Krypton) - 610kg, AShM, IN & Act Rdr", "weight": 690} Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr_ = {"clsid": "{4D13E282-DF46-4B23-864A-A9423DFDE50A}", "name": "Kh-31A (AS-17 Krypton) - 610kg, AShM, IN & Act Rdr", "weight": 690} Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr__ = {"clsid": "{X-31A}", "name": "Kh-31A (AS-17 Krypton) - 610kg, AShM, IN & Act Rdr", "weight": 690} Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr = {"clsid": "{D8F2C90B-887B-4B9E-9FE2-996BC9E9AF03}", "name": "Kh-31P (AS-17 Krypton) - 600kg, ARM, IN & Pas Rdr", "weight": 690} Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr_ = {"clsid": "{D8F2C90B-887B-4B9E-9FE2-996BC9E9AF0A}", "name": "Kh-31P (AS-17 Krypton) - 600kg, ARM, IN & Pas Rdr", "weight": 690} Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr__ = {"clsid": "{X-31P}", "name": "Kh-31P (AS-17 Krypton) - 600kg, ARM, IN & Pas Rdr", "weight": 690} Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr = {"clsid": "{2234F529-1D57-4496-8BB0-0150F9BDBBD2}", "name": "Kh-35 (AS-20 Kayak) - 520kg, AShM, IN & Act Rdr", "weight": 520} Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr_ = {"clsid": "{2234F529-1D57-4496-8BB0-0150F9BDBBD3}", "name": "Kh-35 (AS-20 Kayak) - 520kg, AShM, IN & Act Rdr", "weight": 570} Kh_41__SS_N_22_Sunburn____4500kg__AShM__IN__Act_Rdr = {"clsid": "{3F26D9C5-5CC3-4E42-BC79-82FAA54E9F26}", "name": "Kh-41 (SS-N-22-Sunburn) - 4500kg, AShM, IN & Act Rdr", "weight": 4500} Kh_58U__AS_11_Kilter____640kg__ARM__IN__Pas_Rdr = {"clsid": "{FE382A68-8620-4AC0-BDF5-709BFE3977D7}", "name": "Kh-58U (AS-11 Kilter) - 640kg, ARM, IN & Pas Rdr", "weight": 730} Kh_58U__AS_11_Kilter____640kg__ARM__IN__Pas_Rdr_ = {"clsid": "{B5CA9846-776E-4230-B4FD-8BCC9BFB1676}", "name": "Kh-58U (AS-11 Kilter) - 640kg, ARM, IN & Pas Rdr", "weight": 730} Kh_59M__AS_18_Kazoo____930kg__ASM__IN = {"clsid": "{40AB87E8-BEFB-4D85-90D9-B2753ACF9514}", "name": "Kh-59M (AS-18 Kazoo) - 930kg, ASM, IN", "weight": 1115} Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC = {"clsid": "{BADAF2DE-68B5-472A-8AAC-35BAEFF6B4A1}", "name": "Kh-65 (AS-15B Kent) - 1250kg, ASM, IN & MCC", "weight": 1250} Kh_66_Grom__21____AGM__radar_guided_APU_68 = {"clsid": "{Kh-66_Grom}", "name": "Kh-66 Grom (21) - AGM, radar guided APU-68", "weight": 300} KMGU_2___96_x_AO_2_5RT_Dispenser__CBU__HE_Frag = {"clsid": "{96A7F676-F956-404A-AD04-F33FB2C74884}", "name": "KMGU-2 - 96 x AO-2.5RT Dispenser (CBU) HE/Frag", "weight": 778} KMGU_2___96_x_PTAB_2_5KO_Dispenser__CBU__HEAT_AP = {"clsid": "{96A7F676-F956-404A-AD04-F33FB2C74881}", "name": "KMGU-2 - 96 x PTAB-2.5KO Dispenser (CBU) HEAT/AP", "weight": 675.6} KORD_12_7 = {"clsid": "KORD_12_7", "name": "Kord 12.7mm HMG", "weight": 95} Kopyo_radar_pod = {"clsid": "{F4920E62-A99A-11d8-9897-000476191836}", "name": "Kopyo radar pod", "weight": 115} Kormoran___ASM = {"clsid": "{7210496B-7B81-4B52-80D6-8529ECF847CD}", "name": "Kormoran - ASM", "weight": 660} K_13A = {"clsid": "{K-13A}", "name": "K-13A", "weight": 90} L005_Sorbtsiya_ECM_pod__left_ = {"clsid": "{44EE8698-89F9-48EE-AF36-5FD31896A82F}", "name": "L005 Sorbtsiya ECM pod (left)", "weight": 150} L005_Sorbtsiya_ECM_pod__right_ = {"clsid": "{44EE8698-89F9-48EE-AF36-5FD31896A82A}", "name": "L005 Sorbtsiya ECM pod (right)", "weight": 150} L175V_Khibiny_ECM_pod = {"clsid": "{ECM_POD_L_175V}", "name": "L175V Khibiny ECM pod", "weight": 150} LANTIRN_Targeting_Pod = {"clsid": "{F14-LANTIRN-TP}", "name": "LANTIRN Targeting Pod", "weight": 342} LAU3_HE151 = {"clsid": "LAU3_HE151", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 234} LAU3_HE5 = {"clsid": "LAU3_HE5", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 234} LAU3_WP156 = {"clsid": "LAU3_WP156", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 234} LAU3_WP1B = {"clsid": "LAU3_WP1B", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 234} LAU3_WP61 = {"clsid": "LAU3_WP61", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 234} LAU_105 = {"clsid": "LAU-105", "name": "LAU-105", "weight": 18} LAU_105_1_AIM_9L_L = {"clsid": "LAU-105_1*AIM-9L_L", "name": "LAU-105 with 1 x AIM-9L Sidewinder IR AAM", "weight": 115.73} LAU_105_1_AIM_9L_R = {"clsid": "LAU-105_1*AIM-9L_R", "name": "LAU-105 with 1 x AIM-9L Sidewinder IR AAM", "weight": 115.73} LAU_105_1_AIM_9M_L = {"clsid": "LAU-105_1*AIM-9M_L", "name": "LAU-105 with 1 x AIM-9M Sidewinder IR AAM", "weight": 115.73} LAU_105_1_AIM_9M_R = {"clsid": "LAU-105_1*AIM-9M_R", "name": "LAU-105 with 1 x AIM-9M Sidewinder IR AAM", "weight": 115.73} LAU_105_1_CATM_9M_L = {"clsid": "LAU-105_1*CATM-9M_L", "name": "LAU-105 with 1 x Captive AIM-9M for ACM", "weight": 115.73} LAU_105_1_CATM_9M_R = {"clsid": "LAU-105_1*CATM-9M_R", "name": "LAU-105 with 1 x Captive AIM-9M for ACM", "weight": 115.73} LAU_105_2_AIM_9L = {"clsid": "LAU-105_2*AIM-9L", "name": "LAU-105 with 2 x AIM-9L Sidewinder IR AAM", "weight": 201.46} LAU_105_2_AIM_9P5 = {"clsid": "LAU-105_2*AIM-9P5", "name": "LAU-105 with 2 x AIM-9P5 Sidewinder IR AAM", "weight": 201} LAU_105_2_CATM_9M = {"clsid": "LAU-105_2*CATM-9M", "name": "LAU-105 with 2 x Captive AIM-9M for ACM", "weight": 201.46} LAU_105_AIS_ASQ_T50_L = {"clsid": "LAU-105_AIS_ASQ_T50_L", "name": "LAU-105 with 1 x AN/ASQ-T50 TCTS Pod - ACMI Pod", "weight": 92.6} LAU_105_AIS_ASQ_T50_R = {"clsid": "LAU-105_AIS_ASQ_T50_R", "name": "LAU-105 with 1 x AN/ASQ-T50 TCTS Pod - ACMI Pod", "weight": 92.6} LAU_105_with_2_x_AIM_9M_Sidewinder_IR_AAM = {"clsid": "{DB434044-F5D0-4F1F-9BA9-B73027E18DD3}", "name": "LAU-105 with 2 x AIM-9M Sidewinder IR AAM", "weight": 201.46} LAU_105_with_2_x_AIM_9P_Sidewinder_IR_AAM = {"clsid": "{3C0745ED-8B0B-42eb-B907-5BD5C1717447}", "name": "LAU-105 with 2 x AIM-9P Sidewinder IR AAM", "weight": 202.36} LAU_10R_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG = {"clsid": "{LAU_10R}", "name": "LAU-10R pod - 4 x 127mm ZUNI, UnGd Rkts Mk71, HE/FRAG", "weight": 316.6} LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG = {"clsid": "{F3EFE0AB-E91A-42D8-9CA2-B63C91ED570A}", "name": "LAU-10 pod - 4 x 127mm ZUNI, UnGd Rkts Mk71, HE/FRAG", "weight": 316.6} LAU_10___4_ZUNI_MK_71 = {"clsid": "{BRU42_LAU10}", "name": "LAU-10 - 4 ZUNI MK 71", "weight": 568} LAU_10___4_ZUNI_MK_71_ = {"clsid": "{BRU3242_LAU10}", "name": "LAU-10 - 4 ZUNI MK 71", "weight": 625.38} LAU_115C_with_AIM_7E_Sparrow_Semi_Active_Radar = {"clsid": "{LAU-115 - AIM-7E}", "name": "LAU-115C with AIM-7E Sparrow Semi-Active Radar", "weight": 284.4} LAU_115C_with_AIM_7F_Sparrow_Semi_Active_Radar = {"clsid": "{LAU-115 - AIM-7F}", "name": "LAU-115C with AIM-7F Sparrow Semi-Active Radar", "weight": 285.4} LAU_115C_with_AIM_7MH_Sparrow_Semi_Active_Radar = {"clsid": "{LAU-115 - AIM-7H}", "name": "LAU-115C with AIM-7MH Sparrow Semi-Active Radar", "weight": 285.4} LAU_115_2_LAU_127_AIM_120B = {"clsid": "LAU-115_2*LAU-127_AIM-120B", "name": "LAU-115 with 2 x LAU-127 AIM-120B AMRAAM - Active Rdr AAM", "weight": 457} LAU_115_2_LAU_127_AIM_120C = {"clsid": "LAU-115_2*LAU-127_AIM-120C", "name": "LAU-115 with 2 x LAU-127 AIM-120C-5 AMRAAM - Active Rdr AAM", "weight": 468} LAU_115_2_LAU_127_AIM_9L = {"clsid": "LAU-115_2*LAU-127_AIM-9L", "name": "LAU-115 with 2 x LAU-127 AIM-9L Sidewinder IR AAM", "weight": 316.46} LAU_115_2_LAU_127_AIM_9M = {"clsid": "LAU-115_2*LAU-127_AIM-9M", "name": "LAU-115 with 2 x LAU-127 AIM-9M Sidewinder IR AAM", "weight": 316.46} LAU_115_2_LAU_127_AIM_9X = {"clsid": "LAU-115_2*LAU-127_AIM-9X", "name": "LAU-115 with 2 x LAU-127 AIM-9X Sidewinder IR AAM", "weight": 313.92} LAU_115_2_LAU_127_CATM_9M = {"clsid": "LAU-115_2*LAU-127_CATM-9M", "name": "LAU-115 with 2 x LAU-127 Captive AIM-9M for ACM", "weight": 316.46} LAU_115_LAU_127_AIM_9L = {"clsid": "LAU-115_LAU-127_AIM-9L", "name": "LAU-115 with 1 x LAU-127 AIM-9L Sidewinder IR AAM", "weight": 230.73} LAU_115_LAU_127_AIM_9L_R = {"clsid": "LAU-115_LAU-127_AIM-9L_R", "name": "LAU-115 with 1 x LAU-127 AIM-9L Sidewinder IR AAM", "weight": 230.73} LAU_115_LAU_127_AIM_9M = {"clsid": "LAU-115_LAU-127_AIM-9M", "name": "LAU-115 with 1 x LAU-127 AIM-9M Sidewinder IR AAM", "weight": 230.73} LAU_115_LAU_127_AIM_9M_R = {"clsid": "LAU-115_LAU-127_AIM-9M_R", "name": "LAU-115 with 1 x LAU-127 AIM-9M Sidewinder IR AAM", "weight": 230.73} LAU_115_LAU_127_AIM_9X = {"clsid": "LAU-115_LAU-127_AIM-9X", "name": "LAU-115 with 1 x LAU-127 AIM-9X Sidewinder IR AAM", "weight": 229.46} LAU_115_LAU_127_AIM_9X_R = {"clsid": "LAU-115_LAU-127_AIM-9X_R", "name": "LAU-115 with 1 x LAU-127 AIM-9X Sidewinder IR AAM", "weight": 229.46} LAU_115_LAU_127_CATM_9M = {"clsid": "LAU-115_LAU-127_CATM-9M", "name": "LAU-115 with 1 x LAU-127 Captive AIM-9M for ACM", "weight": 230.73} LAU_115_LAU_127_CATM_9M_R = {"clsid": "LAU-115_LAU-127_CATM-9M_R", "name": "LAU-115 with 1 x LAU-127 Captive AIM-9M for ACM", "weight": 230.73} LAU_115_with_1_x_LAU_127_AIM_120B_AMRAAM___Active_Rdr_AAM = {"clsid": "{LAU-115 - AIM-120B}", "name": "LAU-115 with 1 x LAU-127 AIM-120B AMRAAM - Active Rdr AAM", "weight": 301} LAU_115_with_1_x_LAU_127_AIM_120B_AMRAAM___Active_Rdr_AAM_ = {"clsid": "{LAU-115 - AIM-120B_R}", "name": "LAU-115 with 1 x LAU-127 AIM-120B AMRAAM - Active Rdr AAM", "weight": 301} LAU_115_with_1_x_LAU_127_AIM_120C_5_AMRAAM___Active_Rdr_AAM = {"clsid": "{LAU-115 - AIM-120C}", "name": "LAU-115 with 1 x LAU-127 AIM-120C-5 AMRAAM - Active Rdr AAM", "weight": 306.5} LAU_115_with_1_x_LAU_127_AIM_120C_5_AMRAAM___Active_Rdr_AAM_ = {"clsid": "{LAU-115 - AIM-120C_R}", "name": "LAU-115 with 1 x LAU-127 AIM-120C-5 AMRAAM - Active Rdr AAM", "weight": 306.5} LAU_115_with_AIM_7M_Sparrow_Semi_Active_Radar = {"clsid": "{LAU-115 - AIM-7M}", "name": "LAU-115 with AIM-7M Sparrow Semi-Active Radar", "weight": 285.5} LAU_117_AGM_65A = {"clsid": "LAU_117_AGM_65A", "name": "LAU-117 with AGM-65A - Maverick A (TV Guided)", "weight": 269.5} LAU_117_AGM_65B = {"clsid": "LAU_117_AGM_65B", "name": "LAU-117 with AGM-65B - Maverick B (TV Guided)", "weight": 269.5} LAU_117_AGM_65F = {"clsid": "LAU_117_AGM_65F", "name": "LAU-117 with AGM-65F - Maverick F (IIR ASM)", "weight": 360} LAU_117_AGM_65G = {"clsid": "LAU_117_AGM_65G", "name": "LAU-117 with AGM-65G - Maverick G (IIR ASM - Lg Whd)", "weight": 360} LAU_117_AGM_65H = {"clsid": "LAU_117_AGM_65H", "name": "LAU-117 with AGM-65H - Maverick H (CCD Imp ASM)", "weight": 267} LAU_117_AGM_65L = {"clsid": "LAU_117_AGM_65L", "name": "LAU-117 with AGM-65L - Maverick E2/L (CCD Laser ASM)", "weight": 351} LAU_117_CATM_65K = {"clsid": "LAU_117_CATM_65K", "name": "LAU-117 with CATM-65K - Captive Trg Round for Mav K (CCD)", "weight": 356} LAU_117_TGM_65D = {"clsid": "LAU_117_TGM_65D", "name": "LAU-117 with TGM-65D - Trg Round for Mav D (IIR)", "weight": 277} LAU_117_TGM_65G = {"clsid": "LAU_117_TGM_65G", "name": "LAU-117 with TGM-65G - Trg Round for Mav G (IIR)", "weight": 360} LAU_117_TGM_65H = {"clsid": "LAU_117_TGM_65H", "name": "LAU-117 with TGM-65H - Trg Round for Mav H (CCD)", "weight": 267} LAU_117_with_AGM_65D___Maverick_D__IIR_ASM_ = {"clsid": "{444BA8AE-82A7-4345-842E-76154EFCCA46}", "name": "LAU-117 with AGM-65D - Maverick D (IIR ASM)", "weight": 277} LAU_117_with_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_ = {"clsid": "{F16A4DE0-116C-4A71-97F0-2CF85B0313EC}", "name": "LAU-117 with AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 345} LAU_117_with_AGM_65K___Maverick_K__CCD_Imp_ASM_ = {"clsid": "{69DC8AE7-8F77-427B-B8AA-B19D3F478B66}", "name": "LAU-117 with AGM-65K - Maverick K (CCD Imp ASM)", "weight": 356} LAU_118a_with_AGM_45B_Shrike_ARM__Imp_ = {"clsid": "{3E6B632D-65EB-44D2-9501-1C2D04515405}", "name": "LAU-118a with AGM-45B Shrike ARM (Imp)", "weight": 222.4} LAU_127_AIM_9L = {"clsid": "LAU-127_AIM-9L", "name": "LAU-127 AIM-9L Sidewinder IR AAM", "weight": 131.03} LAU_127_AIM_9M = {"clsid": "LAU-127_AIM-9M", "name": "LAU-127 AIM-9M Sidewinder IR AAM", "weight": 131.03} LAU_127_AIM_9X = {"clsid": "LAU-127_AIM-9X", "name": "LAU-127 AIM-9X Sidewinder IR AAM", "weight": 129.76} LAU_127_CATM_9M = {"clsid": "LAU-127_CATM-9M", "name": "LAU-127 Captive AIM-9M for ACM", "weight": 131.03} LAU_131x3_HYDRA_70_M151 = {"clsid": "LAU_131x3_HYDRA_70_M151", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 406.65} LAU_131x3_HYDRA_70_M156 = {"clsid": "LAU_131x3_HYDRA_70_M156", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 410.43} LAU_131x3_HYDRA_70_M257 = {"clsid": "LAU_131x3_HYDRA_70_M257", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 423.45} LAU_131x3_HYDRA_70_M274 = {"clsid": "LAU_131x3_HYDRA_70_M274", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 406.65} LAU_131x3_HYDRA_70_MK1 = {"clsid": "LAU_131x3_HYDRA_70_MK1", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 379.56} LAU_131x3_HYDRA_70_MK5 = {"clsid": "LAU_131x3_HYDRA_70_MK5", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 373.26} LAU_131x3_HYDRA_70_MK61 = {"clsid": "LAU_131x3_HYDRA_70_MK61", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 379.56} LAU_131x3_HYDRA_70_WTU1B = {"clsid": "LAU_131x3_HYDRA_70_WTU1B", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 406.65} LAU_131_pod___7_x_2_75_Hydra__Laser_Guided_Rkts_M151__HE_APKWS = {"clsid": "{LAU-131 - 7 AGR-20A}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, Laser Guided Rkts M151, HE APKWS", "weight": 134.5} LAU_131_pod___7_x_2_75_Hydra__Laser_Guided_Rkts_M282__MPP_APKWS = {"clsid": "{LAU-131 - 7 AGR-20 M282}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, Laser Guided Rkts M282, MPP APKWS", "weight": 148.5} LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{69926055-0DA8-4530-9F2F-C86B157EA9F6}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 102.3} LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{2AF2EC3F-9065-4de5-93E1-1739C9A71EF7}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 103.56} LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum = {"clsid": "{DAD45FE5-CFF0-4a2b-99D4-5D044D3BC22F}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 107.9} LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk = {"clsid": "{6D6D5C07-2A90-4a68-9A74-C5D0CFFB05D9}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 102.3} LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice = {"clsid": "{D22C2D63-E5C9-4247-94FB-5E8F3DE22B71}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 93.27} LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{319293F2-392C-4617-8315-7C88C22AF7C4}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 91.17} LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice = {"clsid": "{1CA5E00B-D545-4ff9-9B53-5970E292F14D}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 93.27} LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice = {"clsid": "{DDCE7D70-5313-4181-8977-F11018681662}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 102.3} LAU_138_AIM_9L = {"clsid": "{LAU-138 wtip - AIM-9L}", "name": "LAU-138 AIM-9L", "weight": 85.5} LAU_138_AIM_9M = {"clsid": "{LAU-138 wtip - AIM-9M}", "name": "LAU-138 AIM-9M", "weight": 86.64} LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{LAU3_FFAR_WP156}", "name": "LAU-3 pod - 19 x 2.75\" FFAR, UnGd Rkts M156, Wht Phos", "weight": 312.8707256} LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_Mk1__HE = {"clsid": "{LAU3_FFAR_MK1HE}", "name": "LAU-3 pod - 19 x 2.75\" FFAR, UnGd Rkts Mk1, HE", "weight": 285.292332} LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_Mk5__HEAT = {"clsid": "{LAU3_FFAR_MK5HEAT}", "name": "LAU-3 pod - 19 x 2.75\" FFAR, UnGd Rkts Mk5, HEAT", "weight": 286.1541568} LAU_61R_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{LAU_61R}", "name": "LAU-61R pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 271.5} LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{FD90A1DC-9147-49FA-BF56-CB83EF0BD32B}", "name": "LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 273.4} LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{3DFB7321-AB0E-11d7-9897-000476191836}", "name": "LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 274.92} LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{LAU_61_M282}", "name": "LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 309.88} LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{LAU68_FFAR_WP156}", "name": "LAU-68 pod - 7 x 2.75\" FFAR, UnGd Rkts M156, Wht Phos", "weight": 120.1560568} LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_Mk1__HE = {"clsid": "{LAU68_FFAR_MK1HE}", "name": "LAU-68 pod - 7 x 2.75\" FFAR, UnGd Rkts Mk1, HE", "weight": 109.995596} LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_Mk5__HEAT = {"clsid": "{LAU68_FFAR_MK5HEAT}", "name": "LAU-68 pod - 7 x 2.75\" FFAR, UnGd Rkts Mk5, HEAT", "weight": 110.3131104} LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{A021F29D-18AB-4d3e-985C-FC9C60E35E9E}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 113.9} LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{4F977A2A-CD25-44df-90EF-164BFA2AE72F}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 114.46} LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum = {"clsid": "{647C5F26-BDD1-41e6-A371-8DE1E4CC0E94}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 118.8} LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk = {"clsid": "{0877B74B-5A00-4e61-BA8A-A56450BA9E27}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 113.2} LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{LAU_68_M282}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 127.34} LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice = {"clsid": "{FC85D2ED-501A-48ce-9863-49D468DDD5FC}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 104.17} LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{174C6E6D-0C3D-42ff-BCB3-0853CB371F5C}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 102.07} LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice = {"clsid": "{65396399-9F5C-4ec3-A7D2-5A8F4C1D90C4}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 104.17} LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice = {"clsid": "{1F7136CB-8120-4e77-B97B-945FF01FB67C}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 113.2} LAU_7_AIM_9L = {"clsid": "{LAU-7 - AIM-9L}", "name": "LAU-7 AIM-9L", "weight": 100.5} LAU_7_AIM_9M = {"clsid": "{LAU-7 - AIM-9M}", "name": "LAU-7 AIM-9M", "weight": 101.64} LAU_7_with_2_x_AIM_9B_Sidewinder_IR_AAM = {"clsid": "{F4-2-AIM9B}", "name": "LAU-7 with 2 x AIM-9B Sidewinder IR AAM", "weight": 178.78} LAU_7_with_2_x_AIM_9L_Sidewinder_IR_AAM = {"clsid": "{F4-2-AIM9L}", "name": "LAU-7 with 2 x AIM-9L Sidewinder IR AAM", "weight": 201.46} LAU_7_with_2_x_AIM_9M_Sidewinder_IR_AAM = {"clsid": "{9DDF5297-94B9-42FC-A45E-6E316121CD85}", "name": "LAU-7 with 2 x AIM-9M Sidewinder IR AAM", "weight": 201.46} LAU_7_with_2_x_AIM_9P5_Sidewinder_IR_AAM = {"clsid": "{F4-2-AIM9P5}", "name": "LAU-7 with 2 x AIM-9P5 Sidewinder IR AAM", "weight": 201} LAU_7_with_2_x_AIM_9P_Sidewinder_IR_AAM = {"clsid": "{773675AB-7C29-422f-AFD8-32844A7B7F17}", "name": "LAU-7 with 2 x AIM-9P Sidewinder IR AAM", "weight": 202.36} LAU_7_with_AIM_9B_Sidewinder_IR_AAM = {"clsid": "{GAR-8}", "name": "LAU-7 with AIM-9B Sidewinder IR AAM", "weight": 89.39} LAU_7_with_AIM_9M_Sidewinder_IR_AAM = {"clsid": "{AIM-9M-ON-ADAPTER}", "name": "LAU-7 with AIM-9M Sidewinder IR AAM", "weight": 100.73} LAU_7_with_AIM_9P5_Sidewinder_IR_AAM = {"clsid": "{AIM-9P5-ON-ADAPTER}", "name": "LAU-7 with AIM-9P5 Sidewinder IR AAM", "weight": 100.5} LAU_7_with_AIM_9P_Sidewinder_IR_AAM = {"clsid": "{AIM-9P-ON-ADAPTER}", "name": "LAU-7 with AIM-9P Sidewinder IR AAM", "weight": 101.18} LAU_7_with_AIM_9X_Sidewinder_IR_AAM = {"clsid": "{AIM-9X-ON-ADAPTER}", "name": "LAU-7 with AIM-9X Sidewinder IR AAM", "weight": 99.46} LAU_7_with_AN_ASQ_T50_TCTS_Pod___ACMI_Pod = {"clsid": "{LAU-7_AIS_ASQ_T50}", "name": "LAU-7 with AN/ASQ-T50 TCTS Pod - ACMI Pod", "weight": 92.6} LAU_88_AGM_65D_ONE = {"clsid": "LAU_88_AGM_65D_ONE", "name": "LAU-88 with 1 x AGM-65D - Maverick D (IIR ASM)", "weight": 429} LAU_88_AGM_65H = {"clsid": "LAU_88_AGM_65H", "name": "LAU-88 with 1 x AGM-65H - Maverick H (CCD Imp ASM)", "weight": 419} LAU_88_AGM_65H_2_L = {"clsid": "LAU_88_AGM_65H_2_L", "name": "LAU-88 with 2 x AGM-65H - Maverick H (CCD Imp ASM)", "weight": 627} LAU_88_AGM_65H_2_R = {"clsid": "LAU_88_AGM_65H_2_R", "name": "LAU-88 with 2 x AGM-65H - Maverick H (CCD Imp ASM)", "weight": 627} LAU_88_AGM_65H_3 = {"clsid": "LAU_88_AGM_65H_3", "name": "LAU-88 with 3 x AGM-65H - Maverick H (CCD Imp ASM)", "weight": 835} LAU_88_with_2_x_AGM_65D___Maverick_D__IIR_ASM_ = {"clsid": "{E6A6262A-CA08-4B3D-B030-E1A993B98452}", "name": "LAU-88 with 2 x AGM-65D - Maverick D (IIR ASM)", "weight": 647} LAU_88_with_2_x_AGM_65D___Maverick_D__IIR_ASM__ = {"clsid": "{E6A6262A-CA08-4B3D-B030-E1A993B98453}", "name": "LAU-88 with 2 x AGM-65D - Maverick D (IIR ASM)", "weight": 647} LAU_88_with_2_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_ = {"clsid": "{2CC29C7A-E863-411C-8A6E-BD6F0E730548}", "name": "LAU-88 with 2 x AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 783} LAU_88_with_2_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd__ = {"clsid": "{2CC29C7A-E863-411C-8A6E-BD6F0E730547}", "name": "LAU-88 with 2 x AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 783} LAU_88_with_2_x_AGM_65K___Maverick_K__CCD_Imp_ASM_ = {"clsid": "{D7670BC7-881B-4094-906C-73879CF7EB28}", "name": "LAU-88 with 2 x AGM-65K - Maverick K (CCD Imp ASM)", "weight": 805} LAU_88_with_2_x_AGM_65K___Maverick_K__CCD_Imp_ASM__ = {"clsid": "{D7670BC7-881B-4094-906C-73879CF7EB27}", "name": "LAU-88 with 2 x AGM-65K - Maverick K (CCD Imp ASM)", "weight": 805} LAU_88_with_3_x_AGM_65D___Maverick_D__IIR_ASM_ = {"clsid": "{DAC53A2F-79CA-42FF-A77A-F5649B601308}", "name": "LAU-88 with 3 x AGM-65D - Maverick D (IIR ASM)", "weight": 865} LAU_88_with_3_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_ = {"clsid": "{71AAB9B8-81C1-4925-BE50-1EF8E9899271}", "name": "LAU-88 with 3 x AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 1069} LAU_88_with_3_x_AGM_65K___Maverick_K__CCD_Imp_ASM_ = {"clsid": "{907D835F-E650-4154-BAFD-C656882555C0}", "name": "LAU-88 with 3 x AGM-65K - Maverick K (CCD Imp ASM)", "weight": 1102} LAU_SNEB68G___8xSNEB68_EAP = {"clsid": "{LAU_SNEB68G}", "name": "LAU_SNEB68G - 8xSNEB68_EAP", "weight": 50.08} LAU_SNEB68G___8xSNEB68_WP = {"clsid": "{LAU_SNEB68_WP}", "name": "LAU_SNEB68G - 8xSNEB68_WP", "weight": 50.08} Lantirn_F_16 = {"clsid": "{CAAC1CFD-6745-416B-AFA4-CB57414856D0}", "name": "Lantirn F-16", "weight": 445} Lantirn_Target_Pod = {"clsid": "{D1744B93-2A8A-4C4D-B004-7A09CD8C8F3F}", "name": "Lantirn Target Pod", "weight": 200} LR_25___25_x_ARF_8_M3_API = {"clsid": "{LR25_ARF8M3_API}", "name": "LR-25 - 25 x ARF-8/M3 API", "weight": 141} LR_25___25_x_ARF_8_M3_HEI = {"clsid": "{LR25_ARF8M3_HEI}", "name": "LR-25 - 25 x ARF-8/M3 HEI", "weight": 161} LR_25___25_x_ARF_8_M3_TP_SM = {"clsid": "{LR25_ARF8M3_TPSM}", "name": "LR-25 - 25 x ARF-8/M3 TP-SM", "weight": 141} L_081_Fantasmagoria_ELINT_pod = {"clsid": "{0519A264-0AB6-11d6-9193-00A0249B6F00}", "name": "L-081 Fantasmagoria ELINT pod", "weight": 300} M10_Smoke_Tank___blue = {"clsid": "{US_M10_SMOKE_TANK_BLUE}", "name": "M10 Smoke Tank - blue", "weight": 266.7} M10_Smoke_Tank___green = {"clsid": "{US_M10_SMOKE_TANK_GREEN}", "name": "M10 Smoke Tank - green", "weight": 266.7} M10_Smoke_Tank___orange = {"clsid": "{US_M10_SMOKE_TANK_ORANGE}", "name": "M10 Smoke Tank - orange", "weight": 266.7} M10_Smoke_Tank___red = {"clsid": "{US_M10_SMOKE_TANK_RED}", "name": "M10 Smoke Tank - red", "weight": 266.7} M10_Smoke_Tank___white = {"clsid": "{US_M10_SMOKE_TANK_WHITE}", "name": "M10 Smoke Tank - white", "weight": 266.7} M10_Smoke_Tank___yellow = {"clsid": "{US_M10_SMOKE_TANK_YELLOW}", "name": "M10 Smoke Tank - yellow", "weight": 266.7} M117___750lb_GP_Bomb_LD = {"clsid": "{00F5DAC4-0466-4122-998F-B1A298E34113}", "name": "M117 - 750lb GP Bomb LD", "weight": 340} M134_L = {"clsid": "M134_L", "name": "M134 - 6 x 7.62mm MiniGun left", "weight": 146.4} M134_R = {"clsid": "M134_R", "name": "M134 - 6 x 7.62mm MiniGun right", "weight": 146.4} M134_SIDE_L = {"clsid": "M134_SIDE_L", "name": "M134 - 6 x 7.62mm MiniGun left door", "weight": 270.4} M134_SIDE_R = {"clsid": "M134_SIDE_R", "name": "M134 - 6 x 7.62mm MiniGun right door", "weight": 270.4} M2000_Fuel_tank = {"clsid": "{414DA830-B61A-4F9E-B71B-C2F6832E1D7A}", "name": "M2000 Fuel tank", "weight": 1050} M260_HYDRA = {"clsid": "M260_HYDRA", "name": "M260 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 112} M260_HYDRA_WP = {"clsid": "M260_HYDRA_WP", "name": "M260 pod - 7 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 112} M261_MK151 = {"clsid": "M261_MK151", "name": "M261 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 234} M261_MK156 = {"clsid": "M261_MK156", "name": "M261 pod - 19 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 234} M60_SIDE_L = {"clsid": "M60_SIDE_L", "name": "M60 - 7.62mm MG left door", "weight": 141.4} M60_SIDE_R = {"clsid": "M60_SIDE_R", "name": "M60 - 7.62mm MG right door", "weight": 141.4} MAK79_2_MK_20 = {"clsid": "{MAK79_MK20 2L}", "name": "MAK79 2 MK-20", "weight": 464} MAK79_2_MK_20_ = {"clsid": "{MAK79_MK20 2R}", "name": "MAK79 2 MK-20", "weight": 464} MAK79_3_BDU_33 = {"clsid": "{MAK79_BDU33 3L}", "name": "MAK79 3 BDU-33", "weight": 63} MAK79_3_BDU_33_ = {"clsid": "{MAK79_BDU33 3R}", "name": "MAK79 3 BDU-33", "weight": 63} MAK79_3_BDU_45 = {"clsid": "{MAK79_BDU45 3L}", "name": "MAK79 3 BDU-45", "weight": 726} MAK79_3_BDU_45B = {"clsid": "{MAK79_BDU45B 3L}", "name": "MAK79 3 BDU-45B", "weight": 726} MAK79_3_BDU_45B_ = {"clsid": "{MAK79_BDU45B 3R}", "name": "MAK79 3 BDU-45B", "weight": 726} MAK79_3_BDU_45_ = {"clsid": "{MAK79_BDU45 3R}", "name": "MAK79 3 BDU-45", "weight": 726} MAK79_3_Mk_81 = {"clsid": "{MAK79_MK81 3L}", "name": "MAK79 3 Mk-81", "weight": 384} MAK79_3_Mk_81_ = {"clsid": "{MAK79_MK81 3R}", "name": "MAK79 3 Mk-81", "weight": 384} MAK79_3_Mk_82 = {"clsid": "{MAK79_MK82 3L}", "name": "MAK79 3 Mk-82", "weight": 753} MAK79_3_Mk_82AIR = {"clsid": "{MAK79_MK82AIR 3L}", "name": "MAK79 3 Mk-82AIR", "weight": 753} MAK79_3_Mk_82AIR_ = {"clsid": "{MAK79_MK82AIR 3R}", "name": "MAK79 3 Mk-82AIR", "weight": 753} MAK79_3_Mk_82_ = {"clsid": "{MAK79_MK82 3R}", "name": "MAK79 3 Mk-82", "weight": 753} MAK79_3_Mk_82_SnakeEye = {"clsid": "{MAK79_MK82SE 3L}", "name": "MAK79 3 Mk-82 SnakeEye", "weight": 753} MAK79_3_Mk_82_SnakeEye_ = {"clsid": "{MAK79_MK82SE 3R}", "name": "MAK79 3 Mk-82 SnakeEye", "weight": 753} MAK79_3_Mk_83 = {"clsid": "{MAK79_MK83 3L}", "name": "MAK79 3 Mk-83", "weight": 1371} MAK79_3_Mk_83_ = {"clsid": "{MAK79_MK83 3R}", "name": "MAK79 3 Mk-83", "weight": 1371} MAK79_4_BDU_33 = {"clsid": "{MAK79_BDU33 4}", "name": "MAK79 4 BDU-33", "weight": 84} MAK79_4_BDU_45 = {"clsid": "{MAK79_BDU45 4}", "name": "MAK79 4 BDU-45", "weight": 968} MAK79_4_BDU_45B = {"clsid": "{MAK79_BDU45B 4}", "name": "MAK79 4 BDU-45B", "weight": 968} MAK79_4_Mk_81 = {"clsid": "{MAK79_MK81 4}", "name": "MAK79 4 Mk-81", "weight": 512} MAK79_4_Mk_82 = {"clsid": "{MAK79_MK82 4}", "name": "MAK79 4 Mk-82", "weight": 1004} MAK79_4_Mk_82AIR = {"clsid": "{MAK79_MK82AIR 4}", "name": "MAK79 4 Mk-82AIR", "weight": 1004} MAK79_4_Mk_82_SnakeEye = {"clsid": "{MAK79_MK82SE 4}", "name": "MAK79 4 Mk-82 SnakeEye", "weight": 1004} MAK79_MK_20 = {"clsid": "{MAK79_MK20 1R}", "name": "MAK79 MK-20", "weight": 232} MAK79_MK_20_ = {"clsid": "{MAK79_MK20 1L}", "name": "MAK79 MK-20", "weight": 232} MAK79_Mk_83 = {"clsid": "{MAK79_MK83 1R}", "name": "MAK79 Mk-83", "weight": 457} MAK79_Mk_83_ = {"clsid": "{MAK79_MK83 1L}", "name": "MAK79 Mk-83", "weight": 457} Matra_Magic_II = {"clsid": "{MMagicII}", "name": "Matra Magic II", "weight": 85} Matra_Super_530D = {"clsid": "{Matra_S530D}", "name": "Matra Super 530D", "weight": 350} Matra_Type_155_Rocket_Pod = {"clsid": "{Matra155RocketPod}", "name": "Matra Type 155 Rocket Pod", "weight": 190} MBD2_67U_with_4_x_FAB_100___100kg_GP_Bombs_LD = {"clsid": "{5A1AC2B4-CA4B-4D09-A1AF-AC52FBC4B60B}", "name": "MBD2-67U with 4 x FAB-100 - 100kg GP Bombs LD", "weight": 465} MBD2_67U_with_4_x_FAB_100___100kg_GP_Bombs_LD_ = {"clsid": "{29A828E2-C6BB-11d8-9897-000476191836}", "name": "MBD2-67U with 4 x FAB-100 - 100kg GP Bombs LD", "weight": 465} MBD3_U2T_with_2_x_FAB_1500_M_54___1500kg_GP_Bombs_LD = {"clsid": "{7C5F0F5F-0A0B-46E8-937C-8922303E39A8}", "name": "MBD3-U2T with 2 x FAB-1500 M-54 - 1500kg GP Bombs LD", "weight": 3100} MBD3_U4T_with_4_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{6A367BB4-327F-4A04-8D9E-6D86BDC98E7E}", "name": "MBD3-U4T with 4 x FAB-250 - 250kg GP Bombs LD", "weight": 1060} MBD3_U4T_with_4_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP = {"clsid": "{02B81892-7E24-4795-84F9-B8110C641AF0}", "name": "MBD3-U4T with 4 x RBK-250 - 42 x PTAB-2.5M, 250kg CBUs Medium HEAT/AP", "weight": 1126.4} MBD3_U6_68_with_2_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{E659C4BE-2CD8-4472-8C08-3F28ACB61A8A}", "name": "MBD3-U6-68 with 2 x FAB-250 - 250kg GP Bombs LD", "weight": 550} MBD3_U6_68_with_3_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{MBD3_U6_3*FAB-250_fwd}", "name": "MBD3-U6-68 with 3 x FAB-250 - 250kg GP Bombs LD", "weight": 810} MBD3_U6_68_with_4_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{3E35F8C1-052D-11d6-9191-00A0249B6F00}", "name": "MBD3-U6-68 with 4 x FAB-250 - 250kg GP Bombs LD", "weight": 1060} MBD3_U6_68_with_4_x_FAB_250___250kg_GP_Bombs_LD_ = {"clsid": "{MBD3_U6_4*FAB-250_fwd}", "name": "MBD3-U6-68 with 4 x FAB-250 - 250kg GP Bombs LD", "weight": 1060} MBD3_U6_68_with_5_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{MBD3_U6_5*FAB-250}", "name": "MBD3-U6-68 with 5 x FAB-250 - 250kg GP Bombs LD", "weight": 1310} MBD3_U6_68_with_6_x_BetAB_500ShP___500kg_Concrete_Piercing_HD_w_booster_Bombs = {"clsid": "{E96E1EDD-FF3F-47CF-A959-576C3B682955}", "name": "MBD3-U6-68 with 6 x BetAB-500ShP - 500kg Concrete Piercing HD w booster Bombs", "weight": 3060} MBD3_U6_68_with_6_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD = {"clsid": "{436C6FB9-8BF2-46B6-9DC4-F55ABF3CD1EC}", "name": "MBD3-U6-68 with 6 x BetAB-500 - 500kg Concrete Piercing Bombs LD", "weight": 3060} MBD3_U6_68_with_6_x_FAB_100___100kg_GP_Bombs_LD = {"clsid": "{F99BEC1A-869D-4AC7-9730-FBA0E3B1F5FC}", "name": "MBD3-U6-68 with 6 x FAB-100 - 100kg GP Bombs LD", "weight": 660} MBD3_U6_68_with_6_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{53BE25A4-C86C-4571-9BC0-47D668349595}", "name": "MBD3-U6-68 with 6 x FAB-250 - 250kg GP Bombs LD", "weight": 1560} MBD3_U6_68_with_6_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{FA673F4C-D9E4-4993-AA7A-019A92F3C005}", "name": "MBD3-U6-68 with 6 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 3060} MBD3_U6_68_with_6_x_FAB_500_M_62___500kg_GP_Bombs_LD_ = {"clsid": "{0D945D78-542C-4E9B-9A17-9B5008CC8D39}", "name": "MBD3-U6-68 with 6 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 3060} MBD3_U6_68_with_6_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP = {"clsid": "{F503C276-FE15-4C54-B310-17B50B735A84}", "name": "MBD3-U6-68 with 6 x RBK-500-255 - 30 x PTAB-10-5, 500kg CBUs Heavy HEAT/AP", "weight": 3060} MBD3_U6_68_with_6_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP_ = {"clsid": "{4D459A95-59C0-462F-8A57-34E80697F38B}", "name": "MBD3-U6-68 with 6 x RBK-500-255 - 30 x PTAB-10-5, 500kg CBUs Heavy HEAT/AP", "weight": 3060} MBD3_U9M_with_9_x_FAB_100___100kg_GP_Bombs_LD = {"clsid": "{5F1C54C0-0ABD-4868-A883-B52FF9FCB422}", "name": "MBD3-U9M with 9 x FAB-100 - 100kg GP Bombs LD", "weight": 960} MBD3_U9M_with_9_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{E1AAE713-5FC3-4CAA-9FF5-3FDCFB899E33}", "name": "MBD3-U9M with 9 x FAB-250 - 250kg GP Bombs LD", "weight": 2310} MBD3_U9M_with_9_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP = {"clsid": "{BF83E8FD-E7A2-40D2-9608-42E13AFE2193}", "name": "MBD3-U9M with 9 x RBK-250 - 42 x PTAB-2.5M, 250kg CBUs Medium HEAT/AP", "weight": 2535} MBD3_with_3_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD = {"clsid": "{005E70F5-C3EA-4E95-A148-C1044C42D845}", "name": "MBD3 with 3 x BetAB-500 - 500kg Concrete Piercing Bombs LD", "weight": 1566} MBD3_with_3_x_FAB_100___100kg_GP_Bombs_LD = {"clsid": "{CEE04106-B9AA-46B4-9CD1-CD3FDCF0CE78}", "name": "MBD3 with 3 x FAB-100 - 100kg GP Bombs LD", "weight": 360} MBD3_with_3_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{D109EE9C-A1B7-4F1C-8D87-631C293A1D26}", "name": "MBD3 with 3 x FAB-250 - 250kg GP Bombs LD", "weight": 810} MBD3_with_3_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{A1E85991-B58E-4E92-AE91-DED6DC85B2E7}", "name": "MBD3 with 3 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 1560} MBD3_with_3_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP = {"clsid": "{EAD9B2C1-F3BA-4A7B-A2A5-84E2AF8A1975}", "name": "MBD3 with 3 x RBK-250 - 42 x PTAB 2.5M, 250kg CBUs Medium HEAT/AP", "weight": 885} MBD3_with_3_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP = {"clsid": "{919CE839-9390-4629-BAF7-229DE19B8523}", "name": "MBD3 with 3 x RBK-500-255 - 30 x PTAB-10-5, 500kg CBUs Heavy HEAT/AP", "weight": 1560} MER12_with_12_x_M117___750lb_GP_Bombs_LD = {"clsid": "{574EDEDF-20DE-4942-B2A2-B2EDFD621562}", "name": "MER12 with 12 x M117 - 750lb GP Bombs LD", "weight": 4250} MER12_with_12_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{585D626E-7F42-4073-AB70-41E728C333E2}", "name": "MER12 with 12 x Mk-82 - 500lb GP Bombs LD", "weight": 3000} MER2_with_2_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{0B9ABA77-93B8-45FC-9C63-82AFB2CB50A4}", "name": "MER2 with 2 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 553} MER2_with_2_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{D5D51E24-348C-4702-96AF-97A714E72697}", "name": "MER2 with 2 x Mk-82 - 500lb GP Bombs LD", "weight": 565} MER2_with_2_x_Mk_83___1000lb_GP_Bombs_LD = {"clsid": "{18617C93-78E7-4359-A8CE-D754103EDF63}", "name": "MER2 with 2 x Mk-83 - 1000lb GP Bombs LD", "weight": 1017} MER3_with_3_x_M117___750lb_GP_Bombs_LD = {"clsid": "{82F90BEC-0E2E-4CE5-A66E-1E4ADA2B5D1E}", "name": "MER3 with 3 x M117 - 750lb GP Bombs LD", "weight": 1060} MER6_with_6_x_BLU_107___440lb_Anti_Runway_Penetrator_Bombs = {"clsid": "{752B9782-F962-11d5-9190-00A0249B6F00}", "name": "MER6 with 6 x BLU-107 - 440lb Anti-Runway Penetrator Bombs", "weight": 1800} MER6_with_6_x_M117___750lb_GP_Bombs_LD = {"clsid": "{6CDB6B36-7165-47D0-889F-6625FB333561}", "name": "MER6 with 6 x M117 - 750lb GP Bombs LD", "weight": 2100} MER6_with_6_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{3C7CD675-7D39-41C5-8735-0F4F537818A8}", "name": "MER6 with 6 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 1392} MER6_with_6_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{1C97B4A0-AA3B-43A8-8EE7-D11071457185}", "name": "MER6 with 6 x Mk-82 - 500lb GP Bombs LD", "weight": 1506} Mercury_LLTV_Pod = {"clsid": "{B1EF6B0E-3D91-4047-A7A5-A99E7D8B4A8B}", "name": "Mercury LLTV Pod", "weight": 230} MICA_IR = {"clsid": "{0DA03783-61E4-40B2-8FAE-6AEE0A5C5AAE}", "name": "MICA IR", "weight": 110} MICA_RF = {"clsid": "{6D778860-7BB8-4ACB-9E95-BA772C6BBC2C}", "name": "MICA RF", "weight": 110} MIM_104 = {"clsid": "MIM_104", "name": "M901 PATRIOT", "weight": None} MIM_72 = {"clsid": "MIM_72", "name": "M48 CHAPARRAL", "weight": None} Mistral = {"clsid": "{MBDA_MistralG}", "name": "Mistral", "weight": 27.2} Mistral_ = {"clsid": "{MBDA_MistralD}", "name": "Mistral", "weight": 27.2} MK_82_28 = {"clsid": "MK_82*28", "name": "28 x Mk-82 - 500lb GP Bombs LD", "weight": 6748} Mk_20 = {"clsid": "{BRU-32 MK-20}", "name": "Mk-20", "weight": 279.38} Mk_20_18 = {"clsid": "{ACADB374-6D6C-45A0-BA7C-B22B2E108AE4}", "name": "Mk 20*18", "weight": 3996} Mk_20_Rockeye___490lbs_CBU__247_x_HEAT_Bomblets = {"clsid": "{ADD3FAE1-EBF6-4EF9-8EFC-B36B5DDF1E6B}", "name": "Mk-20 Rockeye - 490lbs CBU, 247 x HEAT Bomblets", "weight": 222} Mk_81___250lb_GP_Bomb_LD = {"clsid": "{90321C8E-7ED1-47D4-A160-E074D5ABD902}", "name": "Mk-81 - 250lb GP Bomb LD", "weight": 118} Mk_82 = {"clsid": "{BRU-32 MK-82}", "name": "Mk-82", "weight": 298.38} Mk_82AIR = {"clsid": "{BRU-32 MK-82AIR}", "name": "Mk-82AIR", "weight": 298.38} Mk_82Y___500lb_GP_Chute_Retarded_HD = {"clsid": "{Mk_82Y}", "name": "Mk-82Y - 500lb GP Chute Retarded HD", "weight": 232} Mk_82_AIR_Ballute___500lb_GP_Bomb_HD = {"clsid": "{Mk82AIR}", "name": "Mk-82 AIR Ballute - 500lb GP Bomb HD", "weight": 242} Mk_82_SnakeEye = {"clsid": "{BRU-32 MK-82SE}", "name": "Mk-82 SnakeEye", "weight": 298.38} Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{Mk82SNAKEYE}", "name": "Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 249.5} Mk_82___500lb_GP_Bomb_LD = {"clsid": "{BCE4E030-38E9-423E-98ED-24BE3DA87C32}", "name": "Mk-82 - 500lb GP Bomb LD", "weight": 228} Mk_83 = {"clsid": "{BRU-32 MK-83}", "name": "Mk-83", "weight": 504.38} Mk_83CT = {"clsid": "{Mk_83CT}", "name": "Mk-83CT", "weight": 454} Mk_83_ = {"clsid": "{BRU42_MK83 RS}", "name": "Mk-83", "weight": 575} Mk_83__ = {"clsid": "{BRU3242_MK83 RS}", "name": "Mk-83", "weight": 632.38} Mk_83___ = {"clsid": "{PHXBRU3242_MK83 RS}", "name": "Mk-83", "weight": 632.38} Mk_83___1000lb_GP_Bomb_LD = {"clsid": "{7A44FF09-527C-4B7E-B42B-3F111CFE50FB}", "name": "Mk-83 - 1000lb GP Bomb LD", "weight": 454} Mk_83____ = {"clsid": "{BRU42_MK83 LS}", "name": "Mk-83", "weight": 575} Mk_83_____ = {"clsid": "{BRU3242_MK83 LS}", "name": "Mk-83", "weight": 632.38} Mk_83______ = {"clsid": "{PHXBRU3242_MK83 LS}", "name": "Mk-83", "weight": 632.38} Mk_84 = {"clsid": "{BRU-32 MK-84}", "name": "Mk-84", "weight": 951.38} Mk_84_18 = {"clsid": "{F092B80C-BB54-477E-9408-66DEEF740008}", "name": "Mk 84*18", "weight": 16092} Mk_84_28 = {"clsid": "{D3ABF208-FA56-4D56-BB31-E0D931D57AE3}", "name": "Mk 84*28", "weight": 25032} Mk_84___2000lb_GP_Bomb_LD = {"clsid": "{AB8B8299-F1CC-4359-89B5-2172E0CF4A5A}", "name": "Mk-84 - 2000lb GP Bomb LD", "weight": 894} MPS_410 = {"clsid": "{44EE8698-89F9-48EE-AF36-5FD31896A82D}", "name": "MPS-410", "weight": 150} MPS_410_ = {"clsid": "{44EE8698-89F9-48EE-AF36-5FD31896A82C}", "name": "MPS-410", "weight": 150} MXU_648_TP = {"clsid": "MXU-648-TP", "name": "MXU-648 Travel Pod", "weight": 300} ODAB_500PM___525_kg__bomb__parachute__simulated_aerosol = {"clsid": "{ODAB-500PM}", "name": "ODAB-500PM - 525 kg, bomb, parachute, simulated aerosol", "weight": 520} OFAB_100_120_TU_x_4 = {"clsid": "{OFAB-100-120-TU}", "name": "OFAB-100-120-TU x 4", "weight": 557} OFAB_100_Jupiter___100kg_GP_Bomb_LD = {"clsid": "{OFAB_100_Jupiter}", "name": "OFAB-100 Jupiter - 100kg GP Bomb LD", "weight": 121} ORO_57K___S_5M1_HE_FRAG_FFAR_x_8 = {"clsid": "{ORO57K_S5M1_HEFRAG}", "name": "ORO-57K - S-5M1 HE-FRAG FFAR x 8", "weight": 63.88} ORO_57K___S_5MO_HE_FRAG_FFAR_x_8 = {"clsid": "{ORO57K_S5MO_HEFRAG}", "name": "ORO-57K - S-5MO HE-FRAG FFAR x 8", "weight": 63.88} ORO_57K___S_5M_x_8 = {"clsid": "{ORO57K_S5M_HEFRAG}", "name": "ORO-57K - S-5M x 8", "weight": 64.92} oh_58_brauning = {"clsid": "oh-58-brauning", "name": "OH-58D Brauning", "weight": 290} Pavetack_F_111 = {"clsid": "{199D6D51-1764-497E-9AE5-7D07C8D4D87E}", "name": "Pavetack F-111", "weight": 200} PKT_7_62 = {"clsid": "PKT_7_62", "name": "PKT 7.62mm MMG", "weight": 90} PK_3___7_62mm_GPMG = {"clsid": "{PK-3}", "name": "PK-3 - 7.62mm GPMG", "weight": 218} PTB300_MIG15 = {"clsid": "PTB300_MIG15", "name": "Fuel Tank 300 liters", "weight": 271} PTB400_MIG15 = {"clsid": "PTB400_MIG15", "name": "Fuel Tank 400 liters", "weight": 364} PTB400_MIG19 = {"clsid": "PTB400_MIG19", "name": "Fuel Tank 400 liters", "weight": 364} PTB600_MIG15 = {"clsid": "PTB600_MIG15", "name": "Fuel Tank 600 liters", "weight": 531} PTB760_MIG19 = {"clsid": "PTB760_MIG19", "name": "Fuel Tank 760 liters", "weight": 663.8} P_50T___50kg_Practice_Bomb_LD = {"clsid": "{P-50T}", "name": "P-50T - 50kg Practice Bomb LD", "weight": 50} RBK_250_275___150_x_AO_1SCh__250kg_CBU_HE_Frag = {"clsid": "{RBK_250_275_AO_1SCH}", "name": "RBK-250-275 - 150 x AO-1SCh, 250kg CBU HE/Frag", "weight": 244.6} RBK_250___42_x_PTAB_2_5M__250kg_CBU_Medium_HEAT_AP = {"clsid": "{4203753F-8198-4E85-9924-6F8FF679F9FF}", "name": "RBK-250 - 42 x PTAB-2.5M, 250kg CBU Medium HEAT/AP", "weight": 244.6} RBK_500U___126_x_OAB_2_5RT__500kg_CBU_HE_Frag = {"clsid": "{RBK_500U_OAB_2_5RT}", "name": "RBK-500U - 126 x OAB-2.5RT, 500kg CBU HE/Frag", "weight": 427} RBK_500_255___30_x_PTAB_10_5__500kg_CBU_Heavy_HEAT_AP = {"clsid": "{D5435F26-F120-4FA3-9867-34ACE562EF1B}", "name": "RBK-500-255 - 30 x PTAB-10-5, 500kg CBU Heavy HEAT/AP", "weight": 253} RBK_500___268_x_PTAB_1M__500kg_CBU_Light_HEAT_AP = {"clsid": "{7AEC222D-C523-425e-B714-719C0D1EB14D}", "name": "RBK-500 - 268 x PTAB-1M, 500kg CBU Light HEAT/AP", "weight": 427} RB_04E__for_A_I___with_launcher = {"clsid": "{Rb04AI}", "name": "RB-04E (for A.I.) with launcher", "weight": 661} RB_15F__for_A_I___with_launcher = {"clsid": "{Rb15AI}", "name": "RB-15F (for A.I.) with launcher", "weight": 610} Rb_04E_Anti_ship_Missile = {"clsid": "{Rb04}", "name": "Rb-04E Anti-ship Missile", "weight": 661} Rb_05A_MCLOS_ASM_AShM_AAM = {"clsid": "{Robot05}", "name": "Rb-05A MCLOS ASM/AShM/AAM", "weight": 341} Rb_15F_Programmable_Anti_ship_Missile = {"clsid": "{Rb15}", "name": "Rb-15F Programmable Anti-ship Missile", "weight": 610} Rb_24J__AIM_9P__Sidewinder_IR_AAM = {"clsid": "{Robot24J}", "name": "Rb-24J (AIM-9P) Sidewinder IR AAM", "weight": 140} Rb_24__AIM_9B__Sidewinder_IR_AAM = {"clsid": "{Robot24}", "name": "Rb-24 (AIM-9B) Sidewinder IR AAM", "weight": 132} Rb_74__AIM_9L__Sidewinder_IR_AAM = {"clsid": "{Robot74}", "name": "Rb-74 (AIM-9L) Sidewinder IR AAM", "weight": 144} Rb_75A__AGM_65A_Maverick___TV_ASM_ = {"clsid": "{RB75}", "name": "Rb-75A (AGM-65A Maverick) (TV ASM)", "weight": 269.5} Rb_75B__AGM_65B_Maverick___TV_ASM_ = {"clsid": "{RB75B}", "name": "Rb-75B (AGM-65B Maverick) (TV ASM)", "weight": 269.5} Rb_75T__AGM_65A_Maverick___TV_ASM_Lg_HE_Whd_ = {"clsid": "{RB75T}", "name": "Rb-75T (AGM-65A Maverick) (TV ASM Lg HE Whd)", "weight": 354} REFLEX_9M119 = {"clsid": "REFLEX_9M119", "name": "AT-11 SNIPER (Reflex)", "weight": None} RKL609_ECM_Pod__Left_ = {"clsid": "{RKL609_L}", "name": "RKL609 ECM Pod (Left)", "weight": 150} RKL609_ECM_Pod__Right_ = {"clsid": "{RKL609_R}", "name": "RKL609 ECM Pod (Right)", "weight": 150} RN_24___470kg__nuclear_bomb__free_fall = {"clsid": "{RN-24}", "name": "RN-24 - 470kg, nuclear bomb, free fall", "weight": 470} RN_28___260_kg__nuclear_bomb__free_fall = {"clsid": "{RN-28}", "name": "RN-28 - 260 kg, nuclear bomb, free fall", "weight": 260} ROLAND = {"clsid": "ROLAND", "name": "ROLAND", "weight": None} RPL_522_1300_liters_Fuel_Tank = {"clsid": "{M2KC_RPL_522}", "name": "RPL 522 1300 liters Fuel Tank", "weight": 1170} RPL_522_1300_liters_Fuel_Tank__Empty_ = {"clsid": "{M2KC_RPL_522_EMPTY}", "name": "RPL 522 1300 liters Fuel Tank (Empty)", "weight": 180} RPL_541_2000_liters_Fuel_Tank_ = {"clsid": "{M2KC_02_RPL541}", "name": "RPL 541 2000 liters Fuel Tank ", "weight": 1837} RPL_541_2000_liters_Fuel_Tank__ = {"clsid": "{M2KC_08_RPL541}", "name": "RPL 541 2000 liters Fuel Tank ", "weight": 1837} RPL_541_2000_liters_Fuel_Tank__Empty_ = {"clsid": "{M2KC_02_RPL541_EMPTY}", "name": "RPL 541 2000 liters Fuel Tank (Empty)", "weight": 257} RPL_541_2000_liters_Fuel_Tank__Empty__ = {"clsid": "{M2KC_08_RPL541_EMPTY}", "name": "RPL 541 2000 liters Fuel Tank (Empty)", "weight": 257} RP_3_25lb_AP_Mk_I = {"clsid": "{British_AP_25LBNo1_3INCHNo1}", "name": "RP-3 25lb AP Mk.I", "weight": 22} RP_3_60lb_F_No1_Mk_I = {"clsid": "{British_HE_60LBFNo1_3INCHNo1}", "name": "RP-3 60lb F No1 Mk.I", "weight": 31.6} RP_3_60lb_SAP_No2_Mk_I = {"clsid": "{British_HE_60LBSAPNo2_3INCHNo1}", "name": "RP-3 60lb SAP No2 Mk.I", "weight": 38.1} RS2US___AAM__beam_rider = {"clsid": "{RS-2US}", "name": "RS2US - AAM, beam-rider", "weight": 105.2} R_13M1___AAM__IR_guided = {"clsid": "{R-13M1}", "name": "R-13M1 - AAM, IR guided", "weight": 122.4} R_13M___AAM__IR_guided = {"clsid": "{R-13M}", "name": "R-13M - AAM, IR guided", "weight": 119.7} R_24R__AA_7_Apex_SA____Semi_Act_Rdr = {"clsid": "{CCF898C9-5BC7-49A4-9D1E-C3ED3D5166A1}", "name": "R-24R (AA-7 Apex SA) - Semi-Act Rdr", "weight": 215} R_24T__AA_7_Apex_IR____Infra_Red = {"clsid": "{6980735A-44CC-4BB9-A1B5-591532F1DC69}", "name": "R-24T (AA-7 Apex IR) - Infra Red", "weight": 215} R_27ER__AA_10_Alamo_C____Semi_Act_Extended_Range = {"clsid": "{E8069896-8435-4B90-95C0-01A03AE6E400}", "name": "R-27ER (AA-10 Alamo C) - Semi-Act Extended Range", "weight": 350} R_27ET__AA_10_Alamo_D____IR_Extended_Range = {"clsid": "{B79C379A-9E87-4E50-A1EE-7F7E29C2E87A}", "name": "R-27ET (AA-10 Alamo D) - IR Extended Range", "weight": 343} R_27R__AA_10_Alamo_A____Semi_Act_Rdr = {"clsid": "{9B25D316-0434-4954-868F-D51DB1A38DF0}", "name": "R-27R (AA-10 Alamo A) - Semi-Act Rdr", "weight": 253} R_27T__AA_10_Alamo_B____Infra_Red = {"clsid": "{88DAC840-9F75-4531-8689-B46E64E42E53}", "name": "R-27T (AA-10 Alamo B) - Infra Red", "weight": 254} R_33__AA_9_Amos____Semi_Act_Rdr = {"clsid": "{F1243568-8EF0-49D4-9CB5-4DA90D92BC1D}", "name": "R-33 (AA-9 Amos) - Semi-Act Rdr", "weight": 490} R_3R___AAM__radar_guided = {"clsid": "{R-3R}", "name": "R-3R - AAM, radar guided", "weight": 111.5} R_3S___AAM__IR_guided = {"clsid": "{R-3S}", "name": "R-3S - AAM, IR guided", "weight": 103.3} R_40R__AA_6_Acrid____Semi_Act_Rdr = {"clsid": "{4EDBA993-2E34-444C-95FB-549300BF7CAF}", "name": "R-40R (AA-6 Acrid) - Semi-Act Rdr", "weight": 475} R_40T__AA_6_Acrid____Infra_Red = {"clsid": "{5F26DBC2-FB43-4153-92DE-6BBCE26CB0FF}", "name": "R-40T (AA-6 Acrid) - Infra Red", "weight": 475} R_550_Magic_2 = {"clsid": "{FC23864E-3B80-48E3-9C03-4DA8B1D7497B}", "name": "R.550 Magic 2", "weight": 89} R_55___AAM__IR_guided = {"clsid": "{R-55}", "name": "R-55 - AAM, IR guided", "weight": 113} R_60 = {"clsid": "{R-60}", "name": "R-60", "weight": 58.5} R_60M = {"clsid": "{R-60M}", "name": "R-60M", "weight": 58.5} R_60M_x_2 = {"clsid": "{R-60M 2L}", "name": "R-60M x 2", "weight": 122} R_60M_x_2_ = {"clsid": "{R-60M 2R}", "name": "R-60M x 2", "weight": 122} R_60M__AA_8_Aphid____Infra_Red = {"clsid": "{682A481F-0CB5-4693-A382-D00DD4A156D7}", "name": "R-60M (AA-8 Aphid) - Infra Red", "weight": 44} R_60_x_2 = {"clsid": "{R-60 2L}", "name": "R-60 x 2", "weight": 122} R_60_x_2_ = {"clsid": "{R-60 2R}", "name": "R-60 x 2", "weight": 122} R_73__AA_11_Archer____Infra_Red = {"clsid": "{FBC29BFE-3D24-4C64-B81D-941239D12249}", "name": "R-73 (AA-11 Archer) - Infra Red", "weight": 110} R_73__AA_11_Archer____Infra_Red_ = {"clsid": "{CBC29BFE-3D24-4C64-B81D-941239D12249}", "name": "R-73 (AA-11 Archer) - Infra Red", "weight": 110} R_77__AA_12_Adder____Active_Rdr = {"clsid": "{B4C01D60-A8A3-4237-BD72-CA7655BC0FE9}", "name": "R-77 (AA-12 Adder) - Active Rdr", "weight": 175} R_77__AA_12_Adder____Active_Rdr_ = {"clsid": "{B4C01D60-A8A3-4237-BD72-CA7655BC0FEC}", "name": "R-77 (AA-12 Adder) - Active Rdr", "weight": 250} SAB_100___100kg_flare_illumination_Bomb = {"clsid": "{0511E528-EA28-4caf-A212-00D1408DF10A}", "name": "SAB-100 - 100kg flare/illumination Bomb", "weight": 100} Sand_Filter = {"clsid": "{FAS}", "name": "Sand Filter", "weight": 15} SC_250_Type_1_L2___250kg_GP_Bomb_LD = {"clsid": "{SC_250_T1_L2}", "name": "SC 250 Type 1 L2 - 250kg GP Bomb LD", "weight": 250} SC_250_Type_3_J___250kg_GP_Bomb_LD = {"clsid": "{Schloss500XIIC1_SC_250_T3_J}", "name": "SC 250 Type 3 J - 250kg GP Bomb LD", "weight": 270} SC_500_L2___500kg_GP_Bomb_LD = {"clsid": "{SC_500_L2}", "name": "SC 500 L2 - 500kg GP Bomb LD", "weight": 500} SC_501_SC250 = {"clsid": "SC_501_SC250", "name": "SC 250 Type 3 J - 250kg GP Bomb LD", "weight": 250} SC_501_SC500 = {"clsid": "SC_501_SC500", "name": "SC 500 J - 500kg GP Bomb LD", "weight": 500} SC_50___50kg_GP_Bomb_LD = {"clsid": "{SC_50}", "name": "SC 50 - 50kg GP Bomb LD", "weight": 50} SD_250_Stg___250kg_GP_Bomb_LD = {"clsid": "{SD_250_Stg}", "name": "SD 250 Stg - 250kg GP Bomb LD", "weight": 250} SD_500_A___500kg_GP_Bomb_LD = {"clsid": "{SD_500_A}", "name": "SD 500 A - 500kg GP Bomb LD", "weight": 500} SEASPARROW = {"clsid": "SEASPARROW", "name": "SEASPARROW", "weight": None} Sea_Eagle___ASM = {"clsid": "{1461CD18-429A-42A9-A21F-4C621ECD4573}", "name": "Sea Eagle - ASM", "weight": 600} Shpil_2_Laser_Recon__Intel_Pod = {"clsid": "{0519A263-0AB6-11d6-9193-00A0249B6F00}", "name": "Shpil-2 Laser Recon & Intel Pod", "weight": 200} Sky_Shadow_ECM_Pod = {"clsid": "{8C3F26A2-FA0F-11d5-9190-00A0249B6F00}", "name": "Sky-Shadow ECM Pod", "weight": 200} SM2 = {"clsid": "SM2", "name": "SM2", "weight": None} Smokewinder___blue = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E743}", "name": "Smokewinder - blue", "weight": 200} Smokewinder___green = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E742}", "name": "Smokewinder - green", "weight": 200} Smokewinder___orange = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E746}", "name": "Smokewinder - orange", "weight": 200} Smokewinder___red = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E741}", "name": "Smokewinder - red", "weight": 200} Smokewinder___white = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E744}", "name": "Smokewinder - white", "weight": 200} Smokewinder___yellow = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E745}", "name": "Smokewinder - yellow", "weight": 200} Smoke_for_Christen_Eagle_II__white = {"clsid": "{CE2_SMOKE_WHITE}", "name": "Smoke for Christen Eagle II, white", "weight": 7} Smoke_Generator___blue = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B3}", "name": "Smoke Generator - blue", "weight": 220} Smoke_Generator___blue_ = {"clsid": "{INV-SMOKE-BLUE}", "name": "Smoke Generator - blue", "weight": 0} Smoke_Generator___green = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B2}", "name": "Smoke Generator - green", "weight": 220} Smoke_Generator___green_ = {"clsid": "{INV-SMOKE-GREEN}", "name": "Smoke Generator - green", "weight": 0} Smoke_Generator___orange = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B6}", "name": "Smoke Generator - orange", "weight": 220} Smoke_Generator___orange_ = {"clsid": "{INV-SMOKE-ORANGE}", "name": "Smoke Generator - orange", "weight": 0} Smoke_Generator___red = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B1}", "name": "Smoke Generator - red", "weight": 220} Smoke_Generator___red_ = {"clsid": "{INV-SMOKE-RED}", "name": "Smoke Generator - red", "weight": 0} Smoke_Generator___white = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B4}", "name": "Smoke Generator - white", "weight": 220} Smoke_Generator___white_ = {"clsid": "{INV-SMOKE-WHITE}", "name": "Smoke Generator - white", "weight": 0} Smoke_Generator___yellow = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B5}", "name": "Smoke Generator - yellow", "weight": 220} Smoke_Generator___yellow_ = {"clsid": "{INV-SMOKE-YELLOW}", "name": "Smoke Generator - yellow", "weight": 0} Smoke_System_red_colorant = {"clsid": "{SMOKE-RED-AVIOJET}", "name": "Smoke System red colorant", "weight": 32.6} Smoke_System_yellow_colorant = {"clsid": "{SMOKE-YELLOW-AVIOJET}", "name": "Smoke System yellow colorant", "weight": 32.6} Smoke_System__White_Smoke_ = {"clsid": "{SMOKE-SYSTEM-AVIOJET}", "name": "Smoke System (White Smoke)", "weight": 1} Smoke___red___21__t = {"clsid": "{MIG21_SMOKE_RED}", "name": "Smoke - red - 21 /t", "weight": 30} Smoke___white___21 = {"clsid": "{SMOKE_WHITE}", "name": "Smoke - white - 21", "weight": 30} Smoke___white___21_ = {"clsid": "{MIG21_SMOKE_WHITE}", "name": "Smoke - white - 21", "weight": 30} SPITFIRE_45GAL_SLIPPER_TANK = {"clsid": "SPITFIRE_45GAL_SLIPPER_TANK", "name": "45 gal. Slipper Tank", "weight": 138.647} SPITFIRE_45GAL_TORPEDO_TANK = {"clsid": "SPITFIRE_45GAL_TORPEDO_TANK", "name": "45 gal. Torpedo Tank", "weight": 144.647} SPPU_22_1___2_x_23mm__GSh_23L_Autocannon_Pod = {"clsid": "{E92CBFE5-C153-11d8-9897-000476191836}", "name": "SPPU-22-1 - 2 x 23mm GSh-23L Autocannon Pod", "weight": 290} SPRD_99_takeoff_rocket = {"clsid": "{SPRD}", "name": "SPRD-99 takeoff rocket", "weight": 500} SPS_141_100__21____jamming_and_countermeasures_pod = {"clsid": "{SPS-141-100}", "name": "SPS-141-100 (21) - jamming and countermeasures pod", "weight": 150} SPS_141___ECM_Jamming_Pod = {"clsid": "{F75187EF-1D9E-4DA9-84B4-1A1A14A3973A}", "name": "SPS-141 - ECM Jamming Pod", "weight": 150} SUU_25_x_8_LUU_2___Target_Marker_Flares = {"clsid": "{CAE48299-A294-4bad-8EE6-89EFC5DCDF00}", "name": "SUU-25 x 8 LUU-2 - Target Marker Flares", "weight": 130} SUU_25___8_LUU_2 = {"clsid": "{BRU42_SUU25}", "name": "SUU-25 * 8 LUU-2", "weight": 258} SUU_25___8_LUU_2_ = {"clsid": "{BRU3242_SUU25}", "name": "SUU-25 * 8 LUU-2", "weight": 315.38} Super_530D = {"clsid": "{FD21B13E-57F3-4C2A-9F78-C522D0B5BCE1}", "name": "Super 530D", "weight": 270} SVIR_9M119 = {"clsid": "SVIR_9M119", "name": "AT-11 SNIPER (Svir')", "weight": None} S_24A__21____180_kg__cumulative_unguided_rocket = {"clsid": "{S-24A}", "name": "S-24A (21) - 180 kg, cumulative unguided rocket", "weight": 235} S_24B__21____180_kg__fragmented_unguided_rocket = {"clsid": "{S-24B}", "name": "S-24B (21) - 180 kg, fragmented unguided rocket", "weight": 235} S_24B___240mm_UnGd_Rkt__235kg__HE_Frag___Low_Smk_ = {"clsid": "{1FA14DEA-8CDB-45AD-88A8-EC068DF1E65A}", "name": "S-24B - 240mm UnGd Rkt, 235kg, HE/Frag, (Low Smk)", "weight": 235} S_24B___240mm_UnGd_Rkt__235kg__HE_Frag___Low_Smk__ = {"clsid": "{3858707D-F5D5-4bbb-BDD8-ABB0530EBC7C}", "name": "S-24B - 240mm UnGd Rkt, 235kg, HE/Frag, (Low Smk)", "weight": 295} S_25L___320Kg__340mm_Laser_Guided_Rkt = {"clsid": "{0180F983-C14A-11d8-9897-000476191836}", "name": "S-25L - 320Kg, 340mm Laser Guided Rkt", "weight": 500} S_25_OFM___340mm_UnGd_Rkt__480kg_Penetrator = {"clsid": "{A0648264-4BC0-4EE8-A543-D119F6BA4257}", "name": "S-25-OFM - 340mm UnGd Rkt, 480kg Penetrator", "weight": 495} S_25_O___420mm_UnGd_Rkt__380kg_Frag = {"clsid": "{S_25_O}", "name": "S-25-O - 420mm UnGd Rkt, 380kg Frag", "weight": 445} Tangazh_ELINT_pod = {"clsid": "{0519A262-0AB6-11d6-9193-00A0249B6F00}", "name": "Tangazh ELINT pod", "weight": 200} TER_9A_with_2_x_CBU_87___202_x_CEM_Cluster_Bomb = {"clsid": "{TER_9A_2L*CBU-87}", "name": "TER-9A with 2 x CBU-87 - 202 x CEM Cluster Bomb", "weight": 913} TER_9A_with_2_x_CBU_87___202_x_CEM_Cluster_Bomb_ = {"clsid": "{TER_9A_2R*CBU-87}", "name": "TER-9A with 2 x CBU-87 - 202 x CEM Cluster Bomb", "weight": 913} TER_9A_with_2_x_CBU_97___10_x_SFW_Cluster_Bomb = {"clsid": "{TER_9A_2L*CBU-97}", "name": "TER-9A with 2 x CBU-97 - 10 x SFW Cluster Bomb", "weight": 887} TER_9A_with_2_x_CBU_97___10_x_SFW_Cluster_Bomb_ = {"clsid": "{TER_9A_2R*CBU-97}", "name": "TER-9A with 2 x CBU-97 - 10 x SFW Cluster Bomb", "weight": 887} TER_9A_with_2_x_GBU_12___500lb_Laser_Guided_Bomb = {"clsid": "{TER_9A_2L*GBU-12}", "name": "TER-9A with 2 x GBU-12 - 500lb Laser Guided Bomb", "weight": 607} TER_9A_with_2_x_GBU_12___500lb_Laser_Guided_Bomb_ = {"clsid": "{TER_9A_2R*GBU-12}", "name": "TER-9A with 2 x GBU-12 - 500lb Laser Guided Bomb", "weight": 607} TER_9A_with_2_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD = {"clsid": "{TER_9A_2L*MK-82AIR}", "name": "TER-9A with 2 x Mk-82 AIR Ballute - 500lb GP Bomb HD", "weight": 537} TER_9A_with_2_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD_ = {"clsid": "{TER_9A_2R*MK-82AIR}", "name": "TER-9A with 2 x Mk-82 AIR Ballute - 500lb GP Bomb HD", "weight": 537} TER_9A_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{TER_9A_2L*MK-82_Snakeye}", "name": "TER-9A with 2 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 552} TER_9A_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD_ = {"clsid": "{TER_9A_2R*MK-82_Snakeye}", "name": "TER-9A with 2 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 552} TER_9A_with_2_x_Mk_82___500lb_GP_Bomb_LD = {"clsid": "{TER_9A_2L*MK-82}", "name": "TER-9A with 2 x Mk-82 - 500lb GP Bomb LD", "weight": 509} TER_9A_with_2_x_Mk_82___500lb_GP_Bomb_LD_ = {"clsid": "{TER_9A_2R*MK-82}", "name": "TER-9A with 2 x Mk-82 - 500lb GP Bomb LD", "weight": 509} TER_9A_with_3_x_BDU_33___25lb_Practice_Bomb_LD = {"clsid": "{TER_9A_3*BDU-33}", "name": "TER-9A with 3 x BDU-33 - 25lb Practice Bomb LD", "weight": 86.9} TER_9A_with_3_x_CBU_87___202_x_CEM_Cluster_Bomb = {"clsid": "{TER_9A_3*CBU-87}", "name": "TER-9A with 3 x CBU-87 - 202 x CEM Cluster Bomb", "weight": 1343} TER_9A_with_3_x_CBU_97___10_x_SFW_Cluster_Bomb = {"clsid": "{TER_9A_3*CBU-97}", "name": "TER-9A with 3 x CBU-97 - 10 x SFW Cluster Bomb", "weight": 1304} TER_9A_with_3_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD = {"clsid": "{TER_9A_3*MK-82AIR}", "name": "TER-9A with 3 x Mk-82 AIR Ballute - 500lb GP Bomb HD", "weight": 779} TER_9A_with_3_x_Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{TER_9A_3*MK-82_Snakeye}", "name": "TER-9A with 3 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 801.5} TER_9A_with_3_x_Mk_82___500lb_GP_Bomb_LD = {"clsid": "{TER_9A_3*MK-82}", "name": "TER-9A with 3 x Mk-82 - 500lb GP Bomb LD", "weight": 737} TEST_ROTARY_LAUNCHER_MK82 = {"clsid": "TEST_ROTARY_LAUNCHER_MK82", "name": "TEST ROTARY LAUNCHER MK82", "weight": 6748} TGM_65H = {"clsid": "TGM_65H", "name": "TGM-65H - Trg Round for Mav H (CCD)", "weight": 208} TORNADO_Fuel_tank = {"clsid": "{EF124821-F9BB-4314-A153-E0E2FE1162C4}", "name": "TORNADO Fuel tank", "weight": 1275} TOW = {"clsid": "TOW", "name": "BGM-71D TOW ATGM", "weight": None} U22_A_Jammer = {"clsid": "{U22A}", "name": "U22/A Jammer", "weight": 348} UB_16UM_pod___16_x_S_5KO__57mm_UnGd_Rkts__HEAT_Frag = {"clsid": "{UB-16-57UMP}", "name": "UB-16UM pod - 16 x S-5KO, 57mm UnGd Rkts, HEAT/Frag", "weight": 138} UB_16UM___16_S_5M = {"clsid": "{UB-16_S5M}", "name": "UB-16UM - 16 S-5M", "weight": 119.76} UB_32A_24_pod___32_x_S_5KO = {"clsid": "{UB-32A-24}", "name": "UB-32A-24 pod - 32 x S-5KO", "weight": 275} UB_32A_pod___32_x_S_5KO__57mm_UnGd_Rkts__HEAT_Frag = {"clsid": "{637334E4-AB5A-47C0-83A6-51B7F1DF3CD5}", "name": "UB-32A pod - 32 x S-5KO, 57mm UnGd Rkts, HEAT/Frag", "weight": 275} UB_32M___32_S_5M = {"clsid": "{UB-32_S5M}", "name": "UB-32M - 32 S-5M", "weight": 228.52} UPK_23_250___2_x_23mm__GSh_23L_Autocannon_Pod = {"clsid": "{05544F1A-C39C-466b-BC37-5BD1D52E57BB}", "name": "UPK-23-250 - 2 x 23mm GSh-23L Autocannon Pod", "weight": 218} UPK_23_250___gun_pod = {"clsid": "{UPK-23-250 MiG-21}", "name": "UPK-23-250 - gun pod", "weight": 218} U_22_Jammer_pod = {"clsid": "{U22}", "name": "U/22 Jammer pod", "weight": 348} Werfer_Granate_21___21_cm_UnGd_air_to_air_rocket = {"clsid": "{WGr21}", "name": "Werfer-Granate 21 - 21 cm UnGd air-to-air rocket", "weight": 121} XM158_M151 = {"clsid": "XM158_M151", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 112} XM158_M156 = {"clsid": "XM158_M156", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 112} XM158_M257 = {"clsid": "XM158_M257", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 112} XM158_M274 = {"clsid": "XM158_M274", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 112} XM158_MK1 = {"clsid": "XM158_MK1", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 112} XM158_MK5 = {"clsid": "XM158_MK5", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 112} _100_gal__Drop_Tank = {"clsid": "{MOSQUITO_100GAL_SLIPPER_TANK}", "name": "100 gal. Drop Tank", "weight": 375.3} _108_US_gal__Paper_Fuel_Tank = {"clsid": "{US_108GAL_PAPER_FUEL_TANK}", "name": "108 US gal. Paper Fuel Tank", "weight": 319} _110_US_gal__Fuel_Tank = {"clsid": "{US_110GAL_FUEL_TANK}", "name": "110 US gal. Fuel Tank", "weight": 349} _12_AN_M64___500lb_GP_Bomb_LD = {"clsid": "{12xM64}", "name": "12 AN-M64 - 500lb GP Bomb LD", "weight": 2744} _12_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD = {"clsid": "{D6A0441E-6794-4FEB-87F7-E68E2290DFAB}", "name": "12 x BetAB-500 - 500kg Concrete Piercing Bombs LD", "weight": 478} _12_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{E70446B7-C7E6-4B95-B685-DEA10CAD1A0E}", "name": "12 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 6000} _13_R4M_3_2kg_UnGd_air_to_air_rocket = {"clsid": "{FW_190_R4M_LEFT_WING}", "name": "13 R4M 3.2kg UnGd air-to-air rocket", "weight": 70.05} _13_R4M_3_2kg_UnGd_air_to_air_rocket_ = {"clsid": "{FW_190_R4M_RGHT_WING}", "name": "13 R4M 3.2kg UnGd air-to-air rocket", "weight": 70.05} _150_US_gal__Fuel_Tank = {"clsid": "{US_150GAL_FUEL_TANK}", "name": "150 US gal. Fuel Tank", "weight": 458.8} _20_x_AGM_86C_ALCM = {"clsid": "{22906569-A97F-404B-BA4F-D96DBF94D05E}", "name": "20 x AGM-86C ALCM", "weight": 39000} _24_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{B0241BD2-5628-47E0-954C-A8675B7E698E}", "name": "24 x FAB-250 - 250kg GP Bombs LD", "weight": 6000} _250_lb_GP_Mk_I = {"clsid": "{British_GP_250LB_Bomb_Mk1}", "name": "250 lb GP Mk.I", "weight": 104.326} _250_lb_GP_Mk_IV = {"clsid": "{British_GP_250LB_Bomb_Mk4}", "name": "250 lb GP Mk.IV", "weight": 104.326} _250_lb_GP_Mk_IV_ = {"clsid": "{British_GP_250LB_Bomb_Mk4_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb GP Mk.IV", "weight": 109.626} _250_lb_GP_Mk_V = {"clsid": "{British_GP_250LB_Bomb_Mk5}", "name": "250 lb GP Mk.V", "weight": 104.326} _250_lb_GP_Mk_V_ = {"clsid": "{British_GP_250LB_Bomb_Mk5_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb GP Mk.V", "weight": 109.626} _250_lb_MC_Mk_I = {"clsid": "{British_MC_250LB_Bomb_Mk1}", "name": "250 lb MC Mk.I", "weight": 102} _250_lb_MC_Mk_II = {"clsid": "{British_MC_250LB_Bomb_Mk2}", "name": "250 lb MC Mk.II", "weight": 102} _250_lb_MC_Mk_II_ = {"clsid": "{British_MC_250LB_Bomb_Mk2_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb MC Mk.II", "weight": 107.3} _250_lb_MC_Mk_I_ = {"clsid": "{British_MC_250LB_Bomb_Mk1_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb MC Mk.I", "weight": 107.3} _250_lb_S_A_P_ = {"clsid": "{British_SAP_250LB_Bomb_Mk5}", "name": "250 lb S.A.P.", "weight": 111.13} _250_lb_S_A_P__ = {"clsid": "{British_SAP_250LB_Bomb_Mk5_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb S.A.P.", "weight": 116.43} _27_x_M117___750lb_GP_Bombs_LD = {"clsid": "{B58F99BA-5480-4572-8602-28B0449F5260}", "name": "27 x M117 - 750lb GP Bombs LD", "weight": 9180} _27_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{6C47D097-83FF-4FB2-9496-EAB36DDF0B05}", "name": "27 x Mk-82 - 500lb GP Bombs LD", "weight": 6507} _2xGBU_12___500lb_Laser_Guided_Bomb = {"clsid": "{89D000B0-0360-461A-AD83-FB727E2ABA98}", "name": "2xGBU-12 - 500lb Laser Guided Bomb", "weight": 610.25} _2xGBU_12___500lb_Laser_Guided_Bomb_ = {"clsid": "{BRU-42_2xGBU-12_right}", "name": "2xGBU-12 - 500lb Laser Guided Bomb", "weight": 610.25} _2x_80kg_LYSB_71_Illumination_Bomb = {"clsid": "{LYSBOMB}", "name": "2x 80kg LYSB-71 Illumination Bomb", "weight": 220} _2_BDU_45 = {"clsid": "{BRU42_2*BDU45 RS}", "name": "2 BDU-45", "weight": 592} _2_BDU_45B = {"clsid": "{BRU42_2*BDU45B RS}", "name": "2 BDU-45B", "weight": 592} _2_BDU_45B_ = {"clsid": "{BRU3242_2*BDU45B RS}", "name": "2 BDU-45B", "weight": 649.38} _2_BDU_45B__ = {"clsid": "{PHXBRU3242_2*BDU45B RS}", "name": "2 BDU-45B", "weight": 649.38} _2_BDU_45B___ = {"clsid": "{BRU42_2*BDU45B LS}", "name": "2 BDU-45B", "weight": 592} _2_BDU_45B____ = {"clsid": "{BRU3242_2*BDU45B LS}", "name": "2 BDU-45B", "weight": 649.38} _2_BDU_45B_____ = {"clsid": "{PHXBRU3242_2*BDU45B LS}", "name": "2 BDU-45B", "weight": 649.38} _2_BDU_45_ = {"clsid": "{BRU3242_2*BDU45 RS}", "name": "2 BDU-45", "weight": 649.38} _2_BDU_45__ = {"clsid": "{PHXBRU3242_2*BDU45 RS}", "name": "2 BDU-45", "weight": 649.38} _2_BDU_45___ = {"clsid": "{BRU42_2*BDU45 LS}", "name": "2 BDU-45", "weight": 592} _2_BDU_45____ = {"clsid": "{BRU3242_2*BDU45 LS}", "name": "2 BDU-45", "weight": 649.38} _2_BDU_45_____ = {"clsid": "{PHXBRU3242_2*BDU45 LS}", "name": "2 BDU-45", "weight": 649.38} _2_CBU_99 = {"clsid": "{BRU-70_2*CBU-99_LEFT}", "name": "2 CBU-99", "weight": 541} _2_CBU_99_ = {"clsid": "{BRU-70_2*CBU-99_RIGHT}", "name": "2 CBU-99", "weight": 541} _2_GBU_12 = {"clsid": "{BRU-42_2*GBU-12_LEFT}", "name": "2 GBU-12", "weight": 547} _2_GBU_12_ = {"clsid": "{BRU-42_2*GBU-12_RIGHT}", "name": "2 GBU-12", "weight": 547} _2_GBU_16 = {"clsid": "{BRU-42_2*GBU-16_LEFT}", "name": "2 GBU-16", "weight": 1005} _2_GBU_16_ = {"clsid": "{BRU-42_2*GBU-16_RIGHT}", "name": "2 GBU-16", "weight": 1005} _2_GBU_38 = {"clsid": "{BRU-42_2*GBU-38_LEFT}", "name": "2 GBU-38", "weight": 579} _2_GBU_38_ = {"clsid": "{BRU-42_2*GBU-38_RIGHT}", "name": "2 GBU-38", "weight": 579} _2_GBU_54_V_1_B = {"clsid": "{BRU-70A_2*GBU-54_LEFT}", "name": "2 GBU-54(V)1/B", "weight": 603} _2_GBU_54_V_1_B_ = {"clsid": "{BRU-70A_2*GBU-54_RIGHT}", "name": "2 GBU-54(V)1/B", "weight": 603} _2_LAU_10___4_ZUNI_MK_71 = {"clsid": "{BRU42_2*LAU10 L}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1008} _2_LAU_10___4_ZUNI_MK_71_ = {"clsid": "{BRU3242_2*LAU10 L}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38} _2_LAU_10___4_ZUNI_MK_71__ = {"clsid": "{BRU42_2*LAU10 R}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1008} _2_LAU_10___4_ZUNI_MK_71___ = {"clsid": "{BRU3242_2*LAU10 R}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38} _2_LAU_10___4_ZUNI_MK_71____ = {"clsid": "{BRU42_2*LAU10 RS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1008} _2_LAU_10___4_ZUNI_MK_71_____ = {"clsid": "{BRU3242_2*LAU10 RS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38} _2_LAU_10___4_ZUNI_MK_71______ = {"clsid": "{PHXBRU3242_2*LAU10 RS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38} _2_LAU_10___4_ZUNI_MK_71_______ = {"clsid": "{BRU42_2*LAU10 LS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1008} _2_LAU_10___4_ZUNI_MK_71________ = {"clsid": "{BRU3242_2*LAU10 LS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38} _2_LAU_10___4_ZUNI_MK_71_________ = {"clsid": "{PHXBRU3242_2*LAU10 LS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38} _2_LUU_2 = {"clsid": "{BRU42_2*LUU2 R}", "name": "2 LUU-2", "weight": 155.2} _2_LUU_2_ = {"clsid": "{BRU3242_2*LUU2 R}", "name": "2 LUU-2", "weight": 212.58} _2_LUU_2__ = {"clsid": "{BRU42_2*LUU2 L}", "name": "2 LUU-2", "weight": 155.2} _2_LUU_2___ = {"clsid": "{BRU3242_2*LUU2 L}", "name": "2 LUU-2", "weight": 212.58} _2_MK_20 = {"clsid": "{BRU42_2*MK20 RS}", "name": "2 MK-20", "weight": 572} _2_MK_20_ = {"clsid": "{BRU3242_2*MK20 RS}", "name": "2 MK-20", "weight": 629.38} _2_MK_20__ = {"clsid": "{PHXBRU3242_2*MK20 RS}", "name": "2 MK-20", "weight": 629.38} _2_MK_20___ = {"clsid": "{BRU42_2*MK20 LS}", "name": "2 MK-20", "weight": 572} _2_MK_20____ = {"clsid": "{BRU3242_2*MK20 LS}", "name": "2 MK-20", "weight": 629.38} _2_MK_20_____ = {"clsid": "{PHXBRU3242_2*MK20 LS}", "name": "2 MK-20", "weight": 629.38} _2_Mk_20_Rockeye = {"clsid": "{BRU-42_2*MK-20_LEFT}", "name": "2 Mk-20 Rockeye", "weight": 541} _2_Mk_20_Rockeye_ = {"clsid": "{BRU-42_2*MK-20_RIGHT}", "name": "2 Mk-20 Rockeye", "weight": 541} _2_Mk_81 = {"clsid": "{BRU42_2*MK81 RS}", "name": "2 Mk-81", "weight": 364} _2_Mk_81_ = {"clsid": "{BRU3242_2*MK81 RS}", "name": "2 Mk-81", "weight": 421.38} _2_Mk_81__ = {"clsid": "{PHXBRU3242_2*MK81 RS}", "name": "2 Mk-81", "weight": 421.38} _2_Mk_81___ = {"clsid": "{BRU42_2*MK81 LS}", "name": "2 Mk-81", "weight": 364} _2_Mk_81____ = {"clsid": "{BRU3242_2*MK81 LS}", "name": "2 Mk-81", "weight": 421.38} _2_Mk_81_____ = {"clsid": "{PHXBRU3242_2*MK81 LS}", "name": "2 Mk-81", "weight": 421.38} _2_Mk_82 = {"clsid": "{BRU-42_2*Mk-82_LEFT}", "name": "2 Mk-82", "weight": 579} _2_Mk_82AIR = {"clsid": "{BRU42_2*MK82AIR RS}", "name": "2 Mk-82AIR", "weight": 610} _2_Mk_82AIR_ = {"clsid": "{BRU3242_2*MK82AIR RS}", "name": "2 Mk-82AIR", "weight": 667.38} _2_Mk_82AIR__ = {"clsid": "{PHXBRU3242_2*MK82AIR RS}", "name": "2 Mk-82AIR", "weight": 667.38} _2_Mk_82AIR___ = {"clsid": "{BRU42_2*MK82AIR LS}", "name": "2 Mk-82AIR", "weight": 610} _2_Mk_82AIR____ = {"clsid": "{BRU3242_2*MK82AIR LS}", "name": "2 Mk-82AIR", "weight": 667.38} _2_Mk_82AIR_____ = {"clsid": "{PHXBRU3242_2*MK82AIR LS}", "name": "2 Mk-82AIR", "weight": 667.38} _2_Mk_82_ = {"clsid": "{BRU-42_2*Mk-82_RIGHT}", "name": "2 Mk-82", "weight": 579} _2_Mk_82_AIR = {"clsid": "{BRU-42_2*Mk-82AIR_LEFT}", "name": "2 Mk-82 AIR", "weight": 579} _2_Mk_82_AIR_ = {"clsid": "{BRU-42_2*Mk-82AIR_RIGHT}", "name": "2 Mk-82 AIR", "weight": 579} _2_Mk_82_SnakeEye = {"clsid": "{BRU42_2*MK82SE RS}", "name": "2 Mk-82 SnakeEye", "weight": 610} _2_Mk_82_SnakeEye_ = {"clsid": "{BRU3242_2*MK82SE RS}", "name": "2 Mk-82 SnakeEye", "weight": 667.38} _2_Mk_82_SnakeEye__ = {"clsid": "{PHXBRU3242_2*MK82SE RS}", "name": "2 Mk-82 SnakeEye", "weight": 667.38} _2_Mk_82_SnakeEye___ = {"clsid": "{BRU42_2*MK82SE LS}", "name": "2 Mk-82 SnakeEye", "weight": 610} _2_Mk_82_SnakeEye____ = {"clsid": "{BRU3242_2*MK82SE LS}", "name": "2 Mk-82 SnakeEye", "weight": 667.38} _2_Mk_82_SnakeEye_____ = {"clsid": "{PHXBRU3242_2*MK82SE LS}", "name": "2 Mk-82 SnakeEye", "weight": 667.38} _2_Mk_82_Snakeye = {"clsid": "{BRU-42_2*Mk-82SNAKEYE_LEFT}", "name": "2 Mk-82 Snakeye", "weight": 579} _2_Mk_82_Snakeye_ = {"clsid": "{BRU-42_2*Mk-82SNAKEYE_RIGHT}", "name": "2 Mk-82 Snakeye", "weight": 579} _2_Mk_82__ = {"clsid": "{BRU42_2*MK82 RS}", "name": "2 Mk-82", "weight": 610} _2_Mk_82___ = {"clsid": "{BRU3242_2*MK82 RS}", "name": "2 Mk-82", "weight": 667.38} _2_Mk_82____ = {"clsid": "{PHXBRU3242_2*MK82 RS}", "name": "2 Mk-82", "weight": 667.38} _2_Mk_82_____ = {"clsid": "{BRU42_2*MK82 LS}", "name": "2 Mk-82", "weight": 610} _2_Mk_82______ = {"clsid": "{BRU3242_2*MK82 LS}", "name": "2 Mk-82", "weight": 667.38} _2_Mk_82_______ = {"clsid": "{PHXBRU3242_2*MK82 LS}", "name": "2 Mk-82", "weight": 667.38} _2_Mk_83 = {"clsid": "{BRU-42_2*Mk-83_LEFT}", "name": "2 Mk-83", "weight": 991} _2_Mk_83_ = {"clsid": "{BRU-42_2*Mk-83_RIGHT}", "name": "2 Mk-83", "weight": 991} _2_SUU_25___8_LUU_2 = {"clsid": "{BRU42_2*SUU25 L}", "name": "2 SUU-25 * 8 LUU-2", "weight": 388} _2_SUU_25___8_LUU_2_ = {"clsid": "{BRU3242_2*SUU25 L}", "name": "2 SUU-25 * 8 LUU-2", "weight": 445.38} _2_SUU_25___8_LUU_2__ = {"clsid": "{BRU42_2*SUU25 R}", "name": "2 SUU-25 * 8 LUU-2", "weight": 388} _2_SUU_25___8_LUU_2___ = {"clsid": "{BRU3242_2*SUU25 R}", "name": "2 SUU-25 * 8 LUU-2", "weight": 445.38} _2_x_9M120F_Ataka__AT_9_Spiral_2____AGM__SACLOS__HE = {"clsid": "{2x9M120F_Ataka_V}", "name": "2 x 9M120F Ataka (AT-9 Spiral-2) - AGM, SACLOS, HE", "weight": 112} _2_x_9M120_Ataka__AT_9_Spiral_2____ATGM__SACLOS__Tandem_HEAT = {"clsid": "{2x9M120_Ataka_V}", "name": "2 x 9M120 Ataka (AT-9 Spiral-2) - ATGM, SACLOS, Tandem HEAT", "weight": 112} _2_x_9M220O_Ataka__AT_9_Spiral_2____AAM__SACLOS__Frag = {"clsid": "{2x9M220_Ataka_V}", "name": "2 x 9M220O Ataka (AT-9 Spiral-2) - AAM, SACLOS, Frag", "weight": 112} _2_x_ALARM = {"clsid": "{07BE2D19-0E48-4B0B-91DA-5F6C8F9E3C75}", "name": "2 x ALARM", "weight": 530} _2_x_BL_755_CBUs___450kg__147_Frag_Pen_bomblets = {"clsid": "{C535596E-F7D2-4301-8BB4-B1658BB87ED7}", "name": "2 x BL-755 CBUs - 450kg, 147 Frag/Pen bomblets", "weight": 554} _2_x_B_13L_pods___10_x_S_13_OF__122mm_UnGd_Rkts__Blast_Frag = {"clsid": "{TWIN_B13L_5OF}", "name": "2 x B-13L pods - 10 x S-13-OF, 122mm UnGd Rkts, Blast/Frag", "weight": 1042} _2_x_B_13L___5_S_13_OF = {"clsid": "{B13_5_S13OF_DUAL_L}", "name": "2 x B-13L - 5 S-13 OF", "weight": 1042} _2_x_B_13L___5_S_13_OF_ = {"clsid": "{B13_5_S13OF_DUAL_R}", "name": "2 x B-13L - 5 S-13 OF", "weight": 1042} _2_x_B_8M1_pods___40_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP = {"clsid": "{TWIN_B_8M1_S_8KOM}", "name": "2 x B-8M1 pods - 40 x S-8KOM, 80mm UnGd Rkts, HEAT/AP", "weight": 759} _2_x_B_8M1___20_S_8KOM = {"clsid": "{B8M1_20_S8KOM_DUAL_L}", "name": "2 x B-8M1 - 20 S-8KOM", "weight": 975} _2_x_B_8M1___20_S_8KOM_ = {"clsid": "{B8M1_20_S8KOM_DUAL_R}", "name": "2 x B-8M1 - 20 S-8KOM", "weight": 975} _2_x_B_8M1___20_S_8OFP2 = {"clsid": "{B8M1_20_S8OFP2_DUAL_L}", "name": "2 x B-8M1 - 20 S-8OFP2", "weight": 975} _2_x_B_8M1___20_S_8OFP2_ = {"clsid": "{B8M1_20_S8OFP2_DUAL_R}", "name": "2 x B-8M1 - 20 S-8OFP2", "weight": 975} _2_x_B_8M1___20_S_8TsM = {"clsid": "{B8M1_20_S8TsM_DUAL_L}", "name": "2 x B-8M1 - 20 S-8TsM", "weight": 751} _2_x_B_8M1___20_S_8TsM_ = {"clsid": "{B8M1_20_S8TsM_DUAL_R}", "name": "2 x B-8M1 - 20 S-8TsM", "weight": 751} _2_x_B_8V20A_pods___40_x_S_8OFP2__80mm_UnGd_Rkts__HE_Frag_AP = {"clsid": "{TWIN_B_8M1_S_8_OFP2}", "name": "2 x B-8V20A pods - 40 x S-8OFP2, 80mm UnGd Rkts, HE/Frag/AP", "weight": 975} _2_x_B_8V20A_pods___40_x_S_8TsM__80mm_UnGd_Rkts__Smk = {"clsid": "{TWIN_B_8M1_S_8TsM}", "name": "2 x B-8V20A pods - 40 x S-8TsM, 80mm UnGd Rkts, Smk", "weight": 751} _2_x_FAB_250 = {"clsid": "{FAB_250_DUAL_L}", "name": "2 x FAB-250", "weight": 532} _2_x_FAB_250_ = {"clsid": "{FAB_250_DUAL_R}", "name": "2 x FAB-250", "weight": 532} _2_x_FAB_500 = {"clsid": "{FAB_500_DUAL_L}", "name": "2 x FAB-500", "weight": 1044} _2_x_FAB_500_ = {"clsid": "{FAB_500_DUAL_R}", "name": "2 x FAB-500", "weight": 1044} _2_x_HVAR__UnGd_Rkts = {"clsid": "{HVARx2}", "name": "2 x HVAR, UnGd Rkts", "weight": 128} _2_x_OFAB_100_Jupiter___100kg_GP_Bombs_LD = {"clsid": "{FAB-100x2}", "name": "2 x OFAB-100 Jupiter - 100kg GP Bombs LD", "weight": 342} _2_x_RBK_250_PTAB_2_5M = {"clsid": "{RBK_250_PTAB25M_DUAL_L}", "name": "2 x RBK-250 PTAB-2.5M", "weight": 578} _2_x_RBK_250_PTAB_2_5M_ = {"clsid": "{RBK_250_PTAB25M_DUAL_R}", "name": "2 x RBK-250 PTAB-2.5M", "weight": 578} _2_x_RBK_500_255_PTAB_10_5 = {"clsid": "{RBK_500_PTAB105_DUAL_L}", "name": "2 x RBK-500-255 PTAB-10-5", "weight": 538} _2_x_RBK_500_255_PTAB_10_5_ = {"clsid": "{RBK_500_PTAB105_DUAL_R}", "name": "2 x RBK-500-255 PTAB-10-5", "weight": 538} _2_x_RP_3_25lb_AP_Mk_I = {"clsid": "{MOSSIE_2_British_AP_25LBNo1_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "2 x RP-3 25lb AP Mk.I", "weight": 174} _2_x_RP_3_25lb_AP_Mk_I_ = {"clsid": "{MOSSIE_2_British_AP_25LBNo1_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "2 x RP-3 25lb AP Mk.I", "weight": 174} _2_x_RP_3_60lb_F_No1_Mk_I = {"clsid": "{MOSSIE_2_British_HE_60LBFNo1_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "2 x RP-3 60lb F No1 Mk.I", "weight": 193.2} _2_x_RP_3_60lb_F_No1_Mk_I_ = {"clsid": "{MOSSIE_2_British_HE_60LBFNo1_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "2 x RP-3 60lb F No1 Mk.I", "weight": 193.2} _2_x_RP_3_60lb_SAP_No2_Mk_I = {"clsid": "{MOSSIE_2_British_HE_60LBSAPNo2_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "2 x RP-3 60lb SAP No2 Mk.I", "weight": 206.2} _2_x_RP_3_60lb_SAP_No2_Mk_I_ = {"clsid": "{MOSSIE_2_British_HE_60LBSAPNo2_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "2 x RP-3 60lb SAP No2 Mk.I", "weight": 206.2} _2_x_S_25 = {"clsid": "{S25_DUAL_L}", "name": "2 x S-25", "weight": 902} _2_x_S_25_ = {"clsid": "{S25_DUAL_R}", "name": "2 x S-25", "weight": 902} _2_x_S_25_OFM___340mm_UnGdrocket__480kg_Penetrator = {"clsid": "{TWIN_S25}", "name": "2 x S-25-OFM - 340mm UnGdrocket, 480kg Penetrator", "weight": 902} _2_x_S_25_O___420mm_UnGd_Rkt__380kg_Frag = {"clsid": "{TWIN_S25_O}", "name": "2 x S-25-O - 420mm UnGd Rkt, 380kg Frag", "weight": 922} _33_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{BDAD04AA-4D4A-4E51-B958-180A89F963CF}", "name": "33 x FAB-250 - 250kg GP Bombs LD", "weight": 8250} _33_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{AD5E5863-08FC-4283-B92C-162E2B2BD3FF}", "name": "33 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 16500} _3M45 = {"clsid": "3M45", "name": "SS-N-19 SHIPWRECK", "weight": None} _3_BDU_33 = {"clsid": "{BRU42_3*BDU33}", "name": "3 BDU-33", "weight": 161} _3_BDU_33_ = {"clsid": "{BRU3242_3*BDU33}", "name": "3 BDU-33", "weight": 218.38} _3_BDU_33__ = {"clsid": "{BRU42_3*BDU33_N}", "name": "3 BDU-33", "weight": 161} _3_BDU_33___ = {"clsid": "{BRU3242_3*BDU33_N}", "name": "3 BDU-33", "weight": 218.38} _3_BDU_33____ = {"clsid": "{PHXBRU3242_BDU33}", "name": "3 BDU-33", "weight": 218.38} _3_GBU_12 = {"clsid": "{BRU-42A_3*GBU-12}", "name": "3 GBU-12", "weight": 772} _3_GBU_16 = {"clsid": "{BRU-42A_3*GBU-16}", "name": "3 GBU-16", "weight": 1459} _3_GBU_38 = {"clsid": "{BRU-42_3*GBU-38}", "name": "3 GBU-38", "weight": 820} _3_GBU_54_V_1_B = {"clsid": "{BRU-70A_3*GBU-54}", "name": "3 GBU-54(V)1/B", "weight": 856} _3_Mk_81 = {"clsid": "{BRU-42_3*Mk-81LD}", "name": "3 Mk-81", "weight": 451} _3_Mk_82 = {"clsid": "{BRU-42_3*Mk-82LD}", "name": "3 Mk-82", "weight": 820} _3_Mk_82_AIR = {"clsid": "{BRU-42_3_MK82AIR}", "name": "3 Mk-82 AIR", "weight": 820} _3_Mk_82_Snakeye = {"clsid": "{BRU-42_3*Mk-82SNAKEYE}", "name": "3 Mk-82 Snakeye", "weight": 820} _3_Mk_83 = {"clsid": "{BRU-42_3*Mk-83}", "name": "3 Mk-83", "weight": 1438} _3_x_4_5_inch_M8_UnGd_Rocket = {"clsid": "{3xM8_ROCKETS_IN_TUBES}", "name": "3 x 4.5 inch M8 UnGd Rocket", "weight": 71.72} _3_x_FAB_1500_M_54___1500kg_GP_Bombs_LD = {"clsid": "{639DB5DD-CB7E-4E42-AC75-2112BC397B97}", "name": "3 x FAB-1500 M-54 - 1500kg GP Bombs LD", "weight": 4500} _3_x_LAU_61_pods___57_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{A76344EB-32D2-4532-8FA2-0C1BDC00747E}", "name": "3 x LAU-61 pods - 57 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 876.45} _48N6E2 = {"clsid": "48N6E2", "name": "48N6E2 S-300F (SA-N-6 Grumble)", "weight": None} _4M80 = {"clsid": "_4M80", "name": "SS-N-12 SANDBOX", "weight": None} _4x_SB_M_71_120kg_GP_Bomb_High_drag = {"clsid": "{M71BOMBD}", "name": "4x SB M/71 120kg GP Bomb High-drag", "weight": 609} _4x_SB_M_71_120kg_GP_Bomb_Low_drag = {"clsid": "{M71BOMB}", "name": "4x SB M/71 120kg GP Bomb Low-drag", "weight": 609} _4_x_AGM_154C___JSOW_Unitary_BROACH = {"clsid": "{AABA1A14-78A1-4E85-94DD-463CF75BD9E4}", "name": "4 x AGM-154C - JSOW Unitary BROACH", "weight": 2560} _4_x_AN_M64___500lb_GP_Bomb_LD = {"clsid": "{4xAN-M64_on_InvCountedAttachmentPoints}", "name": "4 x AN-M64 - 500lb GP Bomb LD", "weight": 908} _4_x_BGM_71D_TOW_ATGM = {"clsid": "{3EA17AB0-A805-4D9E-8732-4CE00CB00F17}", "name": "4 x BGM-71D TOW ATGM", "weight": 250} _4_x_GBU_27___2000lb_Laser_Guided_Penetrator_Bombs = {"clsid": "{B8C99F40-E486-4040-B547-6639172A5D57}", "name": "4 x GBU-27 - 2000lb Laser Guided Penetrator Bombs", "weight": 3936} _4_x_RP_3_25lb_AP_Mk_I = {"clsid": "{MOSSIE_4_British_AP_25LBNo1_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "4 x RP-3 25lb AP Mk.I", "weight": 218} _4_x_RP_3_25lb_AP_Mk_I_ = {"clsid": "{MOSSIE_4_British_AP_25LBNo1_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "4 x RP-3 25lb AP Mk.I", "weight": 218} _4_x_RP_3_60lb_F_No1_Mk_I = {"clsid": "{MOSSIE_4_British_HE_60LBFNo1_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "4 x RP-3 60lb F No1 Mk.I", "weight": 256.4} _4_x_RP_3_60lb_F_No1_Mk_I_ = {"clsid": "{MOSSIE_4_British_HE_60LBFNo1_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "4 x RP-3 60lb F No1 Mk.I", "weight": 256.4} _4_x_RP_3_60lb_SAP_No2_Mk_I = {"clsid": "{MOSSIE_4_British_HE_60LBSAPNo2_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "4 x RP-3 60lb SAP No2 Mk.I", "weight": 282.4} _4_x_RP_3_60lb_SAP_No2_Mk_I_ = {"clsid": "{MOSSIE_4_British_HE_60LBSAPNo2_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "4 x RP-3 60lb SAP No2 Mk.I", "weight": 282.4} _500_lb_GP_Mk_I = {"clsid": "{British_GP_500LB_Bomb_Mk1}", "name": "500 lb GP Mk.I", "weight": 213.188} _500_lb_GP_Mk_IV = {"clsid": "{British_GP_500LB_Bomb_Mk4}", "name": "500 lb GP Mk.IV", "weight": 213.188} _500_lb_GP_Mk_V = {"clsid": "{British_GP_500LB_Bomb_Mk5}", "name": "500 lb GP Mk.V", "weight": 213.188} _500_lb_GP_Short_tail = {"clsid": "{British_GP_500LB_Bomb_Mk4_Short}", "name": "500 lb GP Short tail", "weight": 207.7} _500_lb_GP_Short_tail_ = {"clsid": "{British_GP_500LB_Bomb_Mk4_Short_on_Handley_Page_Type_B_Cut_Bar}", "name": "500 lb GP Short tail", "weight": 213} _500_lb_MC_Mk_II = {"clsid": "{British_MC_500LB_Bomb_Mk2}", "name": "500 lb MC Mk.II", "weight": 231.8} _500_lb_MC_Short_tail = {"clsid": "{British_MC_500LB_Bomb_Mk1_Short}", "name": "500 lb MC Short tail", "weight": 226.3} _500_lb_MC_Short_tail_ = {"clsid": "{British_MC_500LB_Bomb_Mk1_Short_on_Handley_Page_Type_B_Cut_Bar}", "name": "500 lb MC Short tail", "weight": 231.6} _500_lb_S_A_P_ = {"clsid": "{British_SAP_500LB_Bomb_Mk5}", "name": "500 lb S.A.P.", "weight": 222.26} _50_gal__Drop_Tank = {"clsid": "{MOSQUITO_50GAL_SLIPPER_TANK}", "name": "50 gal. Drop Tank", "weight": 187.7} _51_x_M117___750lb_GP_Bombs_LD = {"clsid": "{72CAC282-AE18-490B-BD4D-35E7EE969E73}", "name": "51 x M117 - 750lb GP Bombs LD", "weight": 17340} _51_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{B84DFE16-6AC7-4854-8F6D-34137892E166}", "name": "51 x Mk-82 - 500lb GP Bombs LD", "weight": 12291} _5V55 = {"clsid": "5V55", "name": "5V55 S-300PS (SA-10B Grumble)", "weight": None} _5_x_HVAR__UnGd_Rkt = {"clsid": "{P47_5_HVARS_ON_LEFT_WING_RAILS}", "name": "5 x HVAR, UnGd Rkt", "weight": 330} _5_x_HVAR__UnGd_Rkt_ = {"clsid": "{P47_5_HVARS_ON_RIGHT_WING_RAILS}", "name": "5 x HVAR, UnGd Rkt", "weight": 330} _5_x_Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{MER-5E_Mk82SNAKEYEx5}", "name": "5 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 1250.7} _5_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{MER-5E_MK82x5}", "name": "5 x Mk-82 - 500lb GP Bombs LD", "weight": 1295.7} _6_x_AGM_86C_ALCM_on_MER = {"clsid": "{45447F82-01B5-4029-A572-9AAD28AF0275}", "name": "6 x AGM-86C ALCM on MER", "weight": 11760} _6_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD = {"clsid": "{2B7BDB38-4F45-43F9-BE02-E7B3141F3D24}", "name": "6 x BetAB-500 - 500kg Concrete Piercing Bombs LD", "weight": 2868} _6_x_FAB_1500_M_54___1500kg_GP_Bombs_LD = {"clsid": "{D9179118-E42F-47DE-A483-A6C2EA7B4F38}", "name": "6 x FAB-1500 M-54 - 1500kg GP Bombs LD", "weight": 9000} _6_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{26D2AF37-B0DF-4AB6-9D61-A150FF58A37B}", "name": "6 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 3000} _6_x_Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr = {"clsid": "{C42EE4C3-355C-4B83-8B22-B39430B8F4AE}", "name": "6 x Kh-35 (AS-20 Kayak) - 520kg, AShM, IN & Act Rdr", "weight": 2880} _6_x_Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC = {"clsid": "{0290F5DE-014A-4BB1-9843-D717749B1DED}", "name": "6 x Kh-65 (AS-15B Kent) - 1250kg, ASM, IN & MCC", "weight": 7500} _6_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{E79759F7-C622-4AA4-B1EF-37639A34D924}", "name": "6 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 1332} _6_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{027563C9-D87E-4A85-B317-597B510E3F03}", "name": "6 x Mk-82 - 500lb GP Bombs LD", "weight": 1446} _75_US_gal__Fuel_Tank = {"clsid": "{DT75GAL}", "name": "75 US gal. Fuel Tank", "weight": 227.048087675} _8_x_AGM_84A_Harpoon_ASM = {"clsid": "{46ACDCF8-5451-4E26-BDDB-E78D5830E93C}", "name": "8 x AGM-84A Harpoon ASM", "weight": 5292} _8_x_AGM_86C_ALCM = {"clsid": "{8DCAF3A3-7FCF-41B8-BB88-58DEDA878EDE}", "name": "8 x AGM-86C ALCM", "weight": 15600} _8_x_Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC = {"clsid": "{CD9417DF-455F-4176-A5A2-8C58D61AA00B}", "name": "8 x Kh-65 (AS-15B Kent) - 1250kg, ASM, IN & MCC", "weight": 10000} _9M111 = {"clsid": "_9M111", "name": "AT-4 SPIGOT", "weight": None} _9M114_Shturm_V_2_Rack = {"clsid": "{9M114 Shturm-V-2 Rack}", "name": "9M114 Shturm-V-2 Rack", "weight": 13} _9M114_Shturm_V_2__AT_6_Spiral____ATGM__SACLOS = {"clsid": "{B919B0F4-7C25-455E-9A02-CEA51DB895E3}", "name": "9M114 Shturm-V-2 (AT-6 Spiral) - ATGM, SACLOS", "weight": 105} _9M114_Shturm_V_8__AT_6_Spiral____ATGM__SACLOS = {"clsid": "{57232979-8B0F-4db7-8D9A-55197E06B0F5}", "name": "9M114 Shturm-V-8 (AT-6 Spiral) - ATGM, SACLOS", "weight": 422} _9M117 = {"clsid": "_9M117", "name": "AT-10 SABBER", "weight": None} _9M133 = {"clsid": "9M133", "name": "AT-14 KORNET", "weight": None} _9M14 = {"clsid": "9M14", "name": "AT-3 SAGGER", "weight": None} _9M31 = {"clsid": "9M31", "name": "SA-9 GASKIN", "weight": None} _9M311 = {"clsid": "9M311", "name": "SA-19 GRISON", "weight": None} _9M33 = {"clsid": "9M33", "name": "SA-8 GECKO", "weight": None} _9M331 = {"clsid": "_9M331", "name": "SA-15 GAUNTLET", "weight": None} _9M37 = {"clsid": "_9M37", "name": "SA-13 GOPHER", "weight": None} _9M38 = {"clsid": "_9M38", "name": "SA-11 GADFLY", "weight": None} _9M39 = {"clsid": "_9M39", "name": "SA-18 GROUSE", "weight": None} _9S846_Strelets___2_x_Igla = {"clsid": "{9S846_2xIGLA}", "name": "9S846 Strelets - 2 x Igla", "weight": 71} _NiteHawk_FLIR = {"clsid": "_NiteHawk_FLIR", "name": "AN/AAS-38 \"Nite hawk\" FLIR, Laser designator & Laser spot tracker pod", "weight": 200} weapon_ids = { "{AB_250_2_SD_2}": Weapons.AB_250_2___144_x_SD_2__250kg_CBU_with_HE_submunitions, "{AB_250_2_SD_10A}": Weapons.AB_250_2___17_x_SD_10A__250kg_CBU_with_10kg_Frag_HE_submunitions, "{AB_500_1_SD_10A}": Weapons.AB_500_1___34_x_SD_10A__500kg_CBU_with_10kg_Frag_HE_submunitions, "{ADEN_GUNPOD}": Weapons.ADEN_GUNPOD, "{BRU42_ADM141}": Weapons.ADM_141A, "{BRU3242_ADM141}": Weapons.ADM_141A_, "{ADM_141A}": Weapons.ADM_141A_TALD, "{ADM_141B}": Weapons.ADM_141B_TALD, "{AV8BNA_AERO1D}": Weapons.AERO_1D_300_Gallons_Fuel_Tank_, "{AV8BNA_AERO1D_EMPTY}": Weapons.AERO_1D_300_Gallons_Fuel_Tank__Empty_, "AGM114x2_OH_58": Weapons.AGM114x2_OH_58, "{ee368869-c35a-486a-afe7-284beb7c5d52}": Weapons.AGM_114K, "{88D18A5E-99C8-4B04-B40B-1C02F2018B6E}": Weapons.AGM_114K___4, "{7B8DCEB4-820B-4015-9B48-1028A4195692}": Weapons.AGM_119B_Penguin_ASM, "{AGM_122_SIDEARM}": Weapons.AGM_122_Sidearm, "{LAU_7_AGM_122_SIDEARM}": Weapons.AGM_122_Sidearm_, "{AGM_122}": Weapons.AGM_122_Sidearm___light_ARM, "{AGM-154A}": Weapons.AGM_154A___JSOW_CEB__CBU_type_, "{AGM-154B}": Weapons.AGM_154B___JSOW_Anti_Armour, "{9BCC2A2B-5708-4860-B1F1-053A18442067}": Weapons.AGM_154C___JSOW_Unitary_BROACH, "{AGM_45A}": Weapons.AGM_45A_Shrike_ARM, "{3E6B632D-65EB-44D2-9501-1C2D04515404}": Weapons.AGM_45B_Shrike_ARM__Imp_, "{C40A1E3A-DD05-40D9-85A4-217729E37FAE}": Weapons.AGM_62_Walleye_II___Guided_Weapon_Mk_5__TV_Guided_, "{444BA8AE-82A7-4345-842E-76154EFCCA47}": Weapons.AGM_65D___Maverick_D__IIR_ASM_, "{F16A4DE0-116C-4A71-97F0-2CF85B0313EF}": Weapons.AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_, "{69DC8AE7-8F77-427B-B8AA-B19D3F478B65}": Weapons.AGM_65K___Maverick_K__CCD_Imp_ASM_, "AGM_84": Weapons.AGM_84, "{8B7CADF9-4954-46B3-8CFB-93F2F5B90B03}": Weapons.AGM_84A_Harpoon_ASM, "{AGM_84D}": Weapons.AGM_84D_Harpoon_AShM, "{AF42E6DF-9A60-46D8-A9A0-1708B241AADB}": Weapons.AGM_84E_Harpoon_SLAM__Stand_Off_Land_Attack_Missile_, "{AGM_84E}": Weapons.AGM_84E_Harpoon_SLAM__Stand_Off_Land_Attack_Missile__, "{AGM_84H}": Weapons.AGM_84H_SLAM_ER__Expanded_Response_, "{769A15DF-6AFB-439F-9B24-5B7A45C59D16}": Weapons.AGM_86C_ALCM, "{B06DD79A-F21E-4EB9-BD9D-AB3844618C9C}": Weapons.AGM_88C_HARM___High_Speed_Anti_Radiation_Missile, "{B06DD79A-F21E-4EB9-BD9D-AB3844618C93}": Weapons.AGM_88C_HARM___High_Speed_Anti_Radiation_Missile_, "{C8E06185-7CD6-4C90-959F-044679E90751}": Weapons.AIM_120B_AMRAAM___Active_Rdr_AAM, "{40EF17B7-F508-45de-8566-6FFECC0C1AB8}": Weapons.AIM_120C_5_AMRAAM___Active_Rdr_AAM, "{AIM_54A_Mk47}": Weapons.AIM_54A_Mk47, "{SHOULDER AIM_54A_Mk47 L}": Weapons.AIM_54A_Mk47_, "{SHOULDER AIM_54A_Mk47 R}": Weapons.AIM_54A_Mk47__, "{AIM_54A_Mk60}": Weapons.AIM_54A_Mk60, "{SHOULDER AIM_54A_Mk60 L}": Weapons.AIM_54A_Mk60_, "{SHOULDER AIM_54A_Mk60 R}": Weapons.AIM_54A_Mk60__, "{AIM_54C_Mk47}": Weapons.AIM_54C_Mk47, "{SHOULDER AIM_54C_Mk47 L}": Weapons.AIM_54C_Mk47_, "{7575BA0B-7294-4844-857B-031A144B2595}": Weapons.AIM_54C_Mk47_Phoenix_IN__Semi_Active_Radar, "{SHOULDER AIM_54C_Mk47 R}": Weapons.AIM_54C_Mk47__, "{AIM-7E}": Weapons.AIM_7E_Sparrow_Semi_Active_Radar, "{SHOULDER AIM-7F}": Weapons.AIM_7F, "{BELLY AIM-7F}": Weapons.AIM_7F_, "{AIM-7F}": Weapons.AIM_7F_Sparrow_Semi_Active_Radar, "{SHOULDER AIM-7M}": Weapons.AIM_7M, "{SHOULDER AIM-7MH}": Weapons.AIM_7MH, "{BELLY AIM-7MH}": Weapons.AIM_7MH_, "{AIM-7H}": Weapons.AIM_7MH_Sparrow_Semi_Active_Radar, "{BELLY AIM-7M}": Weapons.AIM_7M_, "{8D399DDA-FF81-4F14-904D-099B34FE7918}": Weapons.AIM_7M_Sparrow_Semi_Active_Radar, "{AIM-9B}": Weapons.AIM_9B_Sidewinder_IR_AAM, "{AIM-9L}": Weapons.AIM_9L_Sidewinder_IR_AAM, "{6CEB49FC-DED8-4DED-B053-E1F033FF72D3}": Weapons.AIM_9M_Sidewinder_IR_AAM, "{AIM-9P5}": Weapons.AIM_9P5_Sidewinder_IR_AAM, "{9BFD8C90-F7AE-4e90-833B-BFD0CED0E536}": Weapons.AIM_9P_Sidewinder_IR_AAM, "{5CE2FF2A-645A-4197-B48D-8720AC69394F}": Weapons.AIM_9X_Sidewinder_IR_AAM, "{VIGGEN_X-TANK}": Weapons.AJS_External_tank_1013kg_fuel, "{AKAN}": Weapons.AKAN_M_55_Gunpod__150_rnds_MINGR55_HE, "{E6747967-B1F0-4C77-977B-AB2E6EB0C102}": Weapons.ALARM, "{6D21ECEA-F85B-4E8D-9D51-31DC9B8AA4EF}": Weapons.ALQ_131___ECM_Pod, "ALQ_184": Weapons.ALQ_184, "ALQ_184_Long": Weapons.ALQ_184_Long, "{A111396E-D3E8-4b9c-8AC9-2432489304D5}": Weapons.AN_AAQ_28_LITENING___Targeting_Pod, "{AAQ-28_LEFT}": Weapons.AN_AAQ_28_LITENING___Targeting_Pod_, "{ALQ_164_RF_Jammer}": Weapons.AN_ALQ_164_DECM_Pod, "{1C2B16EB-8EB0-43de-8788-8EBB2D70B8BC}": Weapons.AN_ASQ_173_Laser_Spot_Tracker_Strike_CAMera__LST_SCAM_, "{AN_ASQ_213}": Weapons.AN_ASQ_213_HTS___HARM_Targeting_System, "{AN_ASQ_228}": Weapons.AN_ASQ_228_ATFLIR___Targeting_Pod, "{AIS_ASQ_T50}": Weapons.AN_ASQ_T50_TCTS_Pod___ACMI_Pod, "{AN_M30A1}": Weapons.AN_M30A1___100lb_GP_Bomb_LD, "{AN-M3}": Weapons.AN_M3___2_Browning_Machine_Guns_12_7mm, "{AN_M57}": Weapons.AN_M57___250lb_GP_Bomb_LD, "{AN-M64}": Weapons.AN_M64___500lb_GP_Bomb_LD, "{F86ANM64}": Weapons.AN_M64___500lb_GP_Bomb_LD_, "{AN_M65}": Weapons.AN_M65___1000lb_GP_Bomb_LD, "{AN_M66}": Weapons.AN_M66___2000lb_GP_Bomb_LD, "{APU-60-1_R_60M}": Weapons.APU_60_1M_with_R_60M__AA_8_Aphid____Infra_Red, "{B0DBC591-0F52-4F7D-AD7B-51E67725FB81}": Weapons.APU_60_2M_with_2_x_R_60M__AA_8_Aphid____Infra_Red, "{275A2855-4A79-4B2D-B082-91EA2ADF4691}": Weapons.APU_60_2M_with_2_x_R_60M__AA_8_Aphid____Infra_Red_, "{APU_68_S-24}": Weapons.APU_68___S_24B, "{A6FD14D3-6D30-4C85-88A7-8D17BEE120E2}": Weapons.APU_6___6_9A4172_Vikhr, "{F789E86A-EE2E-4E6B-B81E-D5E5F903B6ED}": Weapons.APU_8___8_9A4172_Vikhr, "{ARAKM70BAP}": Weapons.ARAK_M_70B_AP_6x_135mm_UnGd_Rkts__Pshu70_HEAT,<|fim▁hole|> "{ASO-2}": Weapons.ASO_2___countermeasures_pod, "{M2KC_RAFAUT_BLG66}": Weapons.AUF2_BLG_66_AC_x_2, "{M2KC_RAFAUT_GBU12}": Weapons.AUF2_GBU_12_x_2, "{M2KC_RAFAUT_MK82A}": Weapons.AUF2_MK_82_Air_x_2, "{M2KC_RAFAUT_MK82S}": Weapons.AUF2_MK_82_Snakeyes_x_2, "{M2KC_RAFAUT_MK82}": Weapons.AUF2_MK_82_x_2, "{M2KC_RAFAUT_ROCKEYE}": Weapons.AUF2_ROCKEYE_x_2, "{AWW-13}": Weapons.AWW_13_DATALINK_POD, "{M2KC_AAF}": Weapons.A_A_Training, "{M2KC_AGF}": Weapons.A_G_Training, "{BAP_100}": Weapons.BAP_100_Anti_Runway, "{M2KC_BAP100_12_RACK}": Weapons.BAP_100_x_12, "{M2KC_BAP100_18_RACK}": Weapons.BAP_100_x_18, "{M2KC_BAP100_6_RACK}": Weapons.BAP_100_x_6, "{BDU-33}": Weapons.BDU_33___25lb_Practice_Bomb_LD, "{BDU_45}": Weapons.BDU_45, "{BDU_45B}": Weapons.BDU_45B, "{BRU-32 BDU-45B}": Weapons.BDU_45B_, "{BRU-32 BDU-45}": Weapons.BDU_45_, "{BDU_45LG}": Weapons.BDU_45_LG, "{BDU-50HD}": Weapons.BDU_50HD___500lb_Inert_Practice_Bomb_HD, "{BDU-50LD}": Weapons.BDU_50LD___500lb_Inert_Practice_Bomb_LD, "{BDU-50LGB}": Weapons.BDU_50LGB___500lb_Laser_Guided_Inert_Practice_Bomb_LD, "{BETAB-500M}": Weapons.BETAB_500M___479_kg__bomb__penetrating, "{BETAB-500S}": Weapons.BETAB_500S___425_kg__bomb__penetrating, "{BEER_BOMB}": Weapons.Beer_Bomb, "Beer_Bomb_(D)_on_LH_Spitfire_Wing_Carrier": Weapons.Beer_Bomb__D__on_LH_Spitfire_Wing_Carrier, "Beer_Bomb_(D)_on_RH_Spitfire_Wing_Carrier": Weapons.Beer_Bomb__D__on_RH_Spitfire_Wing_Carrier, "Beer_Bomb_(L)_on_LH_Spitfire_Wing_Carrier": Weapons.Beer_Bomb__L__on_LH_Spitfire_Wing_Carrier, "Beer_Bomb_(L)_on_RH_Spitfire_Wing_Carrier": Weapons.Beer_Bomb__L__on_RH_Spitfire_Wing_Carrier, "{BLG66_BELOUGA}": Weapons.Belouga, "{BD289E34-DF84-4C5E-9220-4B14C346E79D}": Weapons.BetAB_500ShP___500kg_Concrete_Piercing_HD_w_booster_Bomb, "{35B698AC-9FEF-4EC4-AD29-484A0085F62B}": Weapons.BetAB_500___500kg_Concrete_Piercing_Bomb_LD, "BF109K_4_FUEL_TANK": Weapons.BF109K_4_FUEL_TANK, "BGM_109": Weapons.BGM_109, "BGM-109B": Weapons.BGM_109B, "BIN_200": Weapons.BIN_200, "{BKF_AO2_5RT}": Weapons.BKF___12_x_AO_2_5RT, "{BKF_PTAB2_5KO}": Weapons.BKF___12_x_PTAB_2_5KO, "{BK90}": Weapons.BK_90_MJ12__12x_MJ2_HEAT___36x_MJ1_HE_FRAG_Bomblets_, "{BK90MJ1}": Weapons.BK_90_MJ1__72_x_MJ1_HE_FRAG_Bomblets_, "{BK90MJ2}": Weapons.BK_90_MJ2__24_x_MJ2_HEAT_Bomblets_, "{BLG66_BELOUGA_AC}": Weapons.BLG_66_AC_Belouga, "{BLG66_AC}": Weapons.BLG_66_Belouga___290kg_CBU__151_Frag_Pen_bomblets, "{752B9781-F962-11d5-9190-00A0249B6F00}": Weapons.BLU_107___440lb_Anti_Runway_Penetrator_Bomb, "{08164777-5E9C-4B08-B48E-5AA7AFB246E2}": Weapons.BL_755_CBU___450kg__147_Frag_Pen_bomblets, "{8C3F26A1-FA0F-11d5-9190-00A0249B6F00}": Weapons.BOZ_107___Countermeasure_Dispenser, "{BRU33_LAU10}": Weapons.BRU_33_with_1_x_LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG, "{BRU33_LAU61}": Weapons.BRU_33_with_1_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE, "{BRU33_LAU61_M282}": Weapons.BRU_33_with_1_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP, "{BRU33_LAU68}": Weapons.BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE, "{BRU33_LAU68_M282}": Weapons.BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP, "{BRU33_LAU68_MK5}": Weapons.BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT, "{BRU33_2X_BDU-45B}": Weapons.BRU_33_with_2_x_BDU_45B___500lb_Practice_Bomb, "{BRU33_2X_BDU_45LG}": Weapons.BRU_33_with_2_x_BDU_45_LG_500lb_Practice_Laser_Guided_Bomb, "{BRU33_2X_BDU-45}": Weapons.BRU_33_with_2_x_BDU_45___500lb_Practice_Bomb, "{BRU33_2X_CBU-99}": Weapons.BRU_33_with_2_x_CBU_99___490lbs__247_x_HEAT_Bomblets, "{BRU33_2X_GBU-12}": Weapons.BRU_33_with_2_x_GBU_12___500lb_Laser_Guided_Bomb, "{BRU33_2X_GBU-16}": Weapons.BRU_33_with_2_x_GBU_16___1000lb_Laser_Guided_Bomb, "{BRU33_2*LAU10}": Weapons.BRU_33_with_2_x_LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG, "{BRU33_2*LAU61}": Weapons.BRU_33_with_2_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE, "{BRU33_2*LAU61_M282}": Weapons.BRU_33_with_2_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP, "{BRU33_2*LAU68}": Weapons.BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE, "{BRU33_2*LAU68_M282}": Weapons.BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP, "{BRU33_2*LAU68_MK5}": Weapons.BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT, "{BRU33_2X_ROCKEYE}": Weapons.BRU_33_with_2_x_Mk_20_Rockeye___490lbs_CBU__247_x_HEAT_Bomblets, "{BRU33_2X_MK-82Y}": Weapons.BRU_33_with_2_x_Mk_82Y___500lb_GP_Chute_Retarded_HD, "{BRU33_2X_MK-82_Snakeye}": Weapons.BRU_33_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD, "{BRU33_2X_MK-82}": Weapons.BRU_33_with_2_x_Mk_82___500lb_GP_Bomb_LD, "{BRU33_2X_MK-83}": Weapons.BRU_33_with_2_x_Mk_83___1000lb_GP_Bomb_LD, "{BRU41_6X_BDU-33}": Weapons.BRU_41A_with_6_x_BDU_33___25lb_Practice_Bomb_LD, "{BRU41_6X_MK-82}": Weapons.BRU_41A_with_6_x_Mk_82___500lb_GP_Bomb_LD, "BRU-42_3*BDU-33": Weapons.BRU_42_3_BDU_33, "BRU-42_3*GBU-12": Weapons.BRU_42_3_GBU_12, "BRU-42_LS": Weapons.BRU_42_LS, "{62BE78B1-9258-48AE-B882-279534C0D278}": Weapons.BRU_42_with_2_x_GBU_10___2000lb_Laser_Guided_Bombs, "{EB969276-1922-4ED1-A5CB-18590F45D7FE}": Weapons.BRU_42_with_2_x_GBU_27___2000lb_Laser_Guided_Penetrator_Bombs, "{88D49E04-78DF-4F08-B47E-B81247A9E3C5}": Weapons.BRU_42_with_3_x_GBU_16___1000lb_Laser_Guided_Bombs, "{LAU-131x3 - 7 AGR-20A}": Weapons.BRU_42_with_3_x_LAU_131_pods___7_x_2_75_Hydra__Laser_Guided_Rkts_M151__HE_APKWS, "{LAU-131x3 - 7 AGR-20 M282}": Weapons.BRU_42_with_3_x_LAU_131_pods___7_x_2_75_Hydra__Laser_Guided_Rkts_M282__MPP_APKWS, "{64329ED9-B14C-4c0b-A923-A3C911DA1527}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M151__HE, "{C2593383-3CA8-4b18-B73D-0E750BCA1C85}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos, "{E6966004-A525-4f47-AF94-BCFEDF8FDBDA}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum, "{4C044B08-886B-46c8-9B1F-AB05B3ED9C1D}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk, "{443364AE-D557-488e-9499-45EDB3BA6730}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice, "{9BC82B3D-FE70-4910-B2B7-3E54EFE73262}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT, "{C0FA251E-B645-4ce5-926B-F4BC20822F8B}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice, "{A1853B38-2160-4ffe-B7E9-9BF81E6C3D77}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice, "{BRU_42_3xLAU68_M282}": Weapons.BRU_42_with_3_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP, "{B83CB620-5BBE-4BEA-910C-EB605A327EF9}": Weapons.BRU_42_with_3_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets, "{7B34E0BB-E427-4C2A-A61A-8407CE18B54D}": Weapons.BRU_42_with_3_x_Mk_81___250lb_GP_Bombs_LD, "{BRU-42_3*Mk-82AIR}": Weapons.BRU_42_with_3_x_Mk_82_AIR_Ballute___500lb_GP_Bombs_HD, "{60CC734F-0AFA-4E2E-82B8-93B941AB11CF}": Weapons.BRU_42_with_3_x_Mk_82___500lb_GP_Bombs_LD, "{BRU-42_LS_3*SUU-25_8*LUU-2}": Weapons.BRU_42_with_3_x_SUU_25_x_8_LUU_2___Target_Marker_Flares, "{BRU55_2*AGM-154A}": Weapons.BRU_55_with_2_x_AGM_154A___JSOW_CEB__CBU_type_, "{BRU55_2*AGM-154C}": Weapons.BRU_55_with_2_x_AGM_154C___JSOW_Unitary_BROACH, "{BRU55_2*GBU-38}": Weapons.BRU_55_with_2_x_GBU_38___JDAM__500lb_GPS_Guided_Bomb, "{BRU57_2*AGM-154A}": Weapons.BRU_57_with_2_x_AGM_154A___JSOW_CEB__CBU_type_, "{BRU57_2*AGM-154B}": Weapons.BRU_57_with_2_x_AGM_154B___JSOW_Anti_Armour, "{BRU57_2*CBU-103}": Weapons.BRU_57_with_2_x_CBU_103___202_x_CEM__CBU_with_WCMD, "{BRU57_2*CBU-105}": Weapons.BRU_57_with_2_x_CBU_105___10_x_SFW__CBU_with_WCMD, "{BRU57_2*GBU-38}": Weapons.BRU_57_with_2_x_GBU_38___JDAM__500lb_GPS_Guided_Bomb, "BR_250": Weapons.BR_250, "BR_500": Weapons.BR_500, "British_GP_250LBS_Bomb_MK4_on_LH_Spitfire_Wing_Carrier": Weapons.British_GP_250LBS_Bomb_MK4_on_LH_Spitfire_Wing_Carrier, "British_GP_250LBS_Bomb_MK4_on_RH_Spitfire_Wing_Carrier": Weapons.British_GP_250LBS_Bomb_MK4_on_RH_Spitfire_Wing_Carrier, "British_GP_500LBS_Bomb_MK4_on_British_UniversalBC_MK3": Weapons.British_GP_500LBS_Bomb_MK4_on_British_UniversalBC_MK3, "{FC56DF80-9B09-44C5-8976-DCFAFF219062}": Weapons.B_13L_pod___5_x_S_13_OF__122mm_UnGd_Rkts__Blast_Frag, "B-1B_Mk-84*8": Weapons.B_1B_Mk_84_8, "{F72F47E5-C83A-4B85-96ED-D3E46671EE9A}": Weapons.B_8M1_pod___20_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP, "{3DFB7320-AB0E-11d7-9897-000476191836}": Weapons.B_8M1_pod___20_x_S_8TsM__80mm_UnGd_Rkts__Smk, "B-8M1 - 20 S-8OFP2": Weapons.B_8M1___20_S_8OFP2, "B_8V20A_CM": Weapons.B_8V20A_CM, "B_8V20A_CM_BU": Weapons.B_8V20A_CM_BU, "B_8V20A_CM_GN": Weapons.B_8V20A_CM_GN, "B_8V20A_CM_RD": Weapons.B_8V20A_CM_RD, "B_8V20A_CM_VT": Weapons.B_8V20A_CM_VT, "B_8V20A_CM_WH": Weapons.B_8V20A_CM_WH, "B_8V20A_CM_YE": Weapons.B_8V20A_CM_YE, "B_8V20A_OFP2": Weapons.B_8V20A_OFP2, "B_8V20A_OM": Weapons.B_8V20A_OM, "{6A4B9E69-64FE-439a-9163-3A87FB6A4D81}": Weapons.B_8V20A_pod___20_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP, "CATM-9M": Weapons.CATM_9M, "CBLS-200": Weapons.CBLS_200, "CBU87*10": Weapons.CBU87_10, "CBU97*10": Weapons.CBU97_10, "{CBU_103}": Weapons.CBU_103___202_x_CEM__CBU_with_WCMD, "{CBU_105}": Weapons.CBU_105___10_x_SFW__CBU_with_WCMD, "{CBU-52B}": Weapons.CBU_52B___220_x_HE_Frag_bomblets, "{CBU-87}": Weapons.CBU_87___202_x_CEM_Cluster_Bomb, "{5335D97A-35A5-4643-9D9B-026C75961E52}": Weapons.CBU_97___10_x_SFW_Cluster_Bomb, "{CBU_99}": Weapons.CBU_99___490lbs__247_x_HEAT_Bomblets, "{CM_802AKG}": Weapons.CM_802AKG, "{C_802AK}": Weapons.C_802AK, "{C-101-DEFA553}": Weapons.DEFA_553___30mm_Revolver_Cannon, "DIS_AKD-10": Weapons.DIS_AKD_10, "DIS_AKG_DLPOD": Weapons.DIS_AKG_DLPOD, "DIS_BOMB_250_2": Weapons.DIS_BOMB_250_2, "DIS_BOMB_250_3": Weapons.DIS_BOMB_250_3, "DIS_BRM1_90": Weapons.DIS_BRM1_90, "DIS_CM-802AKG": Weapons.DIS_CM_802AKG, "DIS_C-701IR": Weapons.DIS_C_701IR, "DIS_C-701T": Weapons.DIS_C_701T, "DIS_C-802AK": Weapons.DIS_C_802AK, "DIS_DF4A_KD20": Weapons.DIS_DF4A_KD20, "DIS_DF4B_YJ12": Weapons.DIS_DF4B_YJ12, "DIS_GB6": Weapons.DIS_GB6, "DIS_GB6_HE": Weapons.DIS_GB6_HE, "DIS_GB6_TSP": Weapons.DIS_GB6_TSP, "DIS_GBU_10": Weapons.DIS_GBU_10, "DIS_GBU_12": Weapons.DIS_GBU_12, "DIS_GBU_12_DUAL_GDJ_II19_L": Weapons.DIS_GBU_12_DUAL_GDJ_II19_L, "DIS_GBU_12_DUAL_GDJ_II19_R": Weapons.DIS_GBU_12_DUAL_GDJ_II19_R, "DIS_GBU_16": Weapons.DIS_GBU_16, "DIS_GDJ_KD63": Weapons.DIS_GDJ_KD63, "DIS_GDJ_KD63B": Weapons.DIS_GDJ_KD63B, "DIS_GDJ_YJ83K": Weapons.DIS_GDJ_YJ83K, "DIS_H6_250_2_N12": Weapons.DIS_H6_250_2_N12, "DIS_H6_250_2_N24": Weapons.DIS_H6_250_2_N24, "DIS_KD20": Weapons.DIS_KD20, "DIS_KD63": Weapons.DIS_KD63, "DIS_KD63B": Weapons.DIS_KD63B, "DIS_LAU68_MK5_DUAL_GDJ_II19_L": Weapons.DIS_LAU68_MK5_DUAL_GDJ_II19_L, "DIS_LAU68_MK5_DUAL_GDJ_II19_R": Weapons.DIS_LAU68_MK5_DUAL_GDJ_II19_R, "DIS_LD-10": Weapons.DIS_LD_10, "DIS_LD-10_DUAL_L": Weapons.DIS_LD_10_DUAL_L, "DIS_LD-10_DUAL_R": Weapons.DIS_LD_10_DUAL_R, "DIS_LS_6_500": Weapons.DIS_LS_6_500, "DIS_MER6_250_3_N6": Weapons.DIS_MER6_250_3_N6, "DIS_MK_20": Weapons.DIS_MK_20, "DIS_MK_20_DUAL_GDJ_II19_L": Weapons.DIS_MK_20_DUAL_GDJ_II19_L, "DIS_MK_20_DUAL_GDJ_II19_R": Weapons.DIS_MK_20_DUAL_GDJ_II19_R, "DIS_MK_82S_DUAL_GDJ_II19_L": Weapons.DIS_MK_82S_DUAL_GDJ_II19_L, "DIS_MK_82S_DUAL_GDJ_II19_R": Weapons.DIS_MK_82S_DUAL_GDJ_II19_R, "DIS_MK_82_DUAL_GDJ_II19_L": Weapons.DIS_MK_82_DUAL_GDJ_II19_L, "DIS_MK_82_DUAL_GDJ_II19_R": Weapons.DIS_MK_82_DUAL_GDJ_II19_R, "DIS_PL-12": Weapons.DIS_PL_12, "DIS_PL-5EII": Weapons.DIS_PL_5EII, "DIS_PL-8A": Weapons.DIS_PL_8A, "DIS_PL-8B": Weapons.DIS_PL_8B, "DIS_RKT_90_UG": Weapons.DIS_RKT_90_UG, "DIS_SD-10": Weapons.DIS_SD_10, "DIS_SD-10_DUAL_L": Weapons.DIS_SD_10_DUAL_L, "DIS_SD-10_DUAL_R": Weapons.DIS_SD_10_DUAL_R, "DIS_SMOKE_GENERATOR_B": Weapons.DIS_SMOKE_GENERATOR_B, "DIS_SMOKE_GENERATOR_G": Weapons.DIS_SMOKE_GENERATOR_G, "DIS_SMOKE_GENERATOR_O": Weapons.DIS_SMOKE_GENERATOR_O, "DIS_SMOKE_GENERATOR_R": Weapons.DIS_SMOKE_GENERATOR_R, "DIS_SMOKE_GENERATOR_W": Weapons.DIS_SMOKE_GENERATOR_W, "DIS_SMOKE_GENERATOR_Y": Weapons.DIS_SMOKE_GENERATOR_Y, "DIS_SPJ_POD": Weapons.DIS_SPJ_POD, "DIS_TANK1100": Weapons.DIS_TANK1100, "DIS_TANK1100_EMPTY": Weapons.DIS_TANK1100_EMPTY, "DIS_TANK800": Weapons.DIS_TANK800, "DIS_TANK800_EMPTY": Weapons.DIS_TANK800_EMPTY, "DIS_TYPE200": Weapons.DIS_TYPE200, "DIS_TYPE200_DUAL_L": Weapons.DIS_TYPE200_DUAL_L, "DIS_TYPE200_DUAL_R": Weapons.DIS_TYPE200_DUAL_R, "DIS_WMD7": Weapons.DIS_WMD7, "DIS_YJ12": Weapons.DIS_YJ12, "DIS_YJ83K": Weapons.DIS_YJ83K, "{DWS39_MJ1}": Weapons.DWS39_MJ1, "{DWS39_MJ1_MJ2}": Weapons.DWS39_MJ1_MJ2, "{DWS39_MJ2}": Weapons.DWS39_MJ2, "{Eclair}": Weapons.Eclair, "ER_4_SC50": Weapons.ER_4_SC50, "{0519A261-0AB6-11d6-9193-00A0249B6F00}": Weapons.ETHER, "FAB_100M": Weapons.FAB_100M, "FAB_100M": Weapons.FAB_100M_, "{FAB-100-4}": Weapons.FAB_100_x_4, "{FB3CE165-BF07-4979-887C-92B87F13276B}": Weapons.FAB_100___100kg_GP_Bomb_LD, "{40AA4ABE-D6EB-4CD6-AEFE-A1A0477B24AB}": Weapons.FAB_1500_M_54___1500kg_GP_Bomb_LD, "{FAB-250-M54-TU}": Weapons.FAB_250_M54_TU___235_kg__bomb__parachute, "{FAB-250-M54}": Weapons.FAB_250_M54___235_kg__bomb__parachute, "{FAB_250_M62}": Weapons.FAB_250_M62___250kg_GP_Bomb_LD, "{3C612111-C7AD-476E-8A8E-2485812F4E5C}": Weapons.FAB_250___250kg_GP_Bomb_LD, "FAB_50": Weapons.FAB_50, "{FAB-500-M54-TU}": Weapons.FAB_500_M54_TU___480_kg__bomb__parachute, "{FAB-500-M54}": Weapons.FAB_500_M54___474_kg__bomb__free_fall, "{37DCC01E-9E02-432F-B61D-10C166CA2798}": Weapons.FAB_500_M_62___500kg_GP_Bomb_LD, "{FAB-500-SL}": Weapons.FAB_500_SL___515_kg__bomb__parachute, "{FAB-500-TA}": Weapons.FAB_500_TA___477_kg__bomb__free_fall, "FAB_50": Weapons.FAB_50_, "FIM_92": Weapons.FIM_92, "{FPU_8A_FUEL_TANK}": Weapons.FPU_8A_Fuel_Tank_330_gallons, "{PTB_120_F86F35}": Weapons.Fuel_Tank_120_gallons, "{PTB_150L_L39}": Weapons.Fuel_Tank_150_liters, "{PTB_200_F86F35}": Weapons.Fuel_Tank_200_gallons, "{PTB_350L_L39}": Weapons.Fuel_Tank_350_liters, "{PTB_490C_MIG21}": Weapons.Fuel_Tank_490_L_Central__21_, "{PTB_490_MIG21}": Weapons.Fuel_Tank_490_L__21_, "{PTB_800_MIG21}": Weapons.Fuel_Tank_800_L__21_, "Fuel_Tank_FT600": Weapons.Fuel_Tank_FT600, "{414E383A-59EB-41BC-8566-2B5E0788ED1F}": Weapons.Fuel_tank_1150L, "{C0FF4842-FBAC-11d5-9190-00A0249B6F00}": Weapons.Fuel_tank_1150L_MiG_29, "{2BEC576B-CDF5-4B7F-961F-B0FA4312B841}": Weapons.Fuel_tank_1400L, "{16602053-4A12-40A2-B214-AB60D481B20E}": Weapons.Fuel_tank_2000L, "{7D7EC917-05F6-49D4-8045-61FC587DD019}": Weapons.Fuel_tank_3000L, "{8A0BE8AE-58D4-4572-9263-3144C0D06364}": Weapons.Fuel_tank_300_gal, "{F14-300gal}": Weapons.Fuel_tank_300_gal_, "{F14-300gal-empty}": Weapons.Fuel_tank_300_gal__empty_, "{EFEC8200-B922-11d7-9897-000476191836}": Weapons.Fuel_tank_330_gal, "{EFEC8201-B922-11d7-9897-000476191836}": Weapons.Fuel_tank_330_gal_, "{82364E69-5564-4043-A866-E13032926C3E}": Weapons.Fuel_tank_367_gal, "{F376DBEE-4CAE-41BA-ADD9-B2910AC95DEC}": Weapons.Fuel_tank_370_gal, "{0855A3A1-FA50-4C89-BDBB-5D5360ABA071}": Weapons.Fuel_tank_5000L, "{E1F29B21-F291-4589-9FD8-3272EEC69506}": Weapons.Fuel_tank_610_gal, "{A5BAEAB7-6FAF-4236-AF72-0FD900F493F9}": Weapons.Fuel_tank_800L, "{E8D4652F-FD48-45B7-BA5B-2AE05BB5A9CF}": Weapons.Fuel_tank_800L_Wing, "{B99EE8A8-99BC-4a8d-89AC-A26831920DCE}": Weapons.Fuel_tank_PTB_450, "{PTB_450}": Weapons.Fuel_tank_PTB_450_, "{A504D93B-4E80-4B4F-A533-0D9B65F2C55F}": Weapons.Fuel_tank_S_3, "FW109_FUEL_TANK": Weapons.FW109_FUEL_TANK, "{8B9E3FD0-F034-4A07-B6CE-C269884CC71B}": Weapons.F_4_Fuel_tank_C, "{7B4B122D-C12C-4DB4-834E-4D8BB4D863A8}": Weapons.F_4_Fuel_tank_W, "{PTB-150GAL}": Weapons.F_5_150Gal_Fuel_tank, "{0395076D-2F77-4420-9D33-087A4398130B}": Weapons.F_5_275Gal_Fuel_tank, "{GAU_12_Equalizer_AP}": Weapons.GAU_12_Gunpod_w_AP_M79, "{GAU_12_Equalizer_HE}": Weapons.GAU_12_Gunpod_w_HE_M792, "{GAU_12_Equalizer}": Weapons.GAU_12_Gunpod_w_SAPHEI_T, "{BRU-32 GBU-10}": Weapons.GBU_10, "{51F9AAE5-964F-4D21-83FB-502E3BFE5F8A}": Weapons.GBU_10___2000lb_Laser_Guided_Bomb, "{BRU-32 GBU-12}": Weapons.GBU_12, "{DB769D48-67D7-42ED-A2BE-108D566C8B1E}": Weapons.GBU_12___500lb_Laser_Guided_Bomb, "{BRU-32 GBU-16}": Weapons.GBU_16, "{0D33DDAE-524F-4A4E-B5B8-621754FE3ADE}": Weapons.GBU_16___1000lb_Laser_Guided_Bomb, "{BRU-32 GBU-24}": Weapons.GBU_24, "{34759BBC-AF1E-4AEE-A581-498FF7A6EBCE}": Weapons.GBU_24_Paveway_III___2000lb_Laser_Guided_Bomb, "{GBU-24}": Weapons.GBU_24_Paveway_III___2000lb_Laser_Guided_Bomb_, "{EF0A9419-01D6-473B-99A3-BEBDB923B14D}": Weapons.GBU_27___2000lb_Laser_Guided_Penetrator_Bomb, "{F06B775B-FC70-44B5-8A9F-5B5E2EB839C7}": Weapons.GBU_28___5000lb_Laser_Guided_Penetrator_Bomb, "GBU-31V3B*8": Weapons.GBU_31V3B_8, "GBU-31*8": Weapons.GBU_31_8, "{GBU-31}": Weapons.GBU_31_V_1_B___JDAM__2000lb_GPS_Guided_Bomb, "{GBU_31_V_2B}": Weapons.GBU_31_V_2_B___JDAM__2000lb_GPS_Guided_Bomb, "{GBU-31V3B}": Weapons.GBU_31_V_3_B___JDAM__2000lb_GPS_Guided_Penetrator_Bomb, "{GBU_31_V_4B}": Weapons.GBU_31_V_4_B___JDAM__2000lb_GPS_Guided_Penetrator_Bomb, "{GBU_32_V_2B}": Weapons.GBU_32_V_2_B___JDAM__1000lb_GPS_Guided_Bomb, "GBU-38*16": Weapons.GBU_38_16, "{GBU-38}": Weapons.GBU_38___JDAM__500lb_GPS_Guided_Bomb, "{GBU_54_V_1B}": Weapons.GBU_54B___LJDAM__500lb_Laser__GPS_Guided_Bomb_LD, "GUV_VOG": Weapons.GUV_VOG, "GUV_YakB_GSHP": Weapons.GUV_YakB_GSHP, "{HOT3G}": Weapons.HOT3, "{HOT3D}": Weapons.HOT3_, "{4CD2BB0F-5493-44EF-A927-9760350F7BA1}": Weapons.HSAB_with_9_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets, "{696CFFC4-0BDE-42A8-BE4B-0BE3D9DD723C}": Weapons.HSAB_with_9_x_Mk_83___1000lb_GP_Bombs_LD, "{HVAR_SMOKE_2}": Weapons.HVAR_SMOKE__UnGd_Rkt, "{HVAR_SMOKE_GENERATOR}": Weapons.HVAR_Smoke_Generator, "{HVAR}": Weapons.HVAR__UnGd_Rkt, "I16_DROP_FUEL_TANK": Weapons.I16_DROP_FUEL_TANK, "I16_FAB_100SV": Weapons.I16_FAB_100SV, "I16_RS_82": Weapons.I16_RS_82, "{IAB-500}": Weapons.IAB_500___470_kg__bomb__free_fall, "{IR_Deflector}": Weapons.IR_Deflector, "{KAB_1500Kr_LOADOUT}": Weapons.KAB_1500Kr___1500kg_TV_Guided_Bomb, "{KAB_1500LG_LOADOUT}": Weapons.KAB_1500LG_Pr___1500kg_Laser_Guided_Penetrator_Bomb, "{39821727-F6E2-45B3-B1F0-490CC8921D1E}": Weapons.KAB_1500L___1500kg_Laser_Guided_Bomb, "{E2C426E3-8B10-4E09-B733-9CDC26520F48}": Weapons.KAB_500Kr___500kg_TV_Guided_Bomb, "{BA565F89-2373-4A84-9502-A0E017D3A44A}": Weapons.KAB_500LG___500kg_Laser_Guided_Bomb, "{KAB_500S_LOADOUT}": Weapons.KAB_500S___500kg_GPS_Guided_Bomb, "{KB}": Weapons.KB_Flare_Chaff_dispenser_pod, "{12429ECF-03F0-4DF6-BCBD-5D38B6343DE1}": Weapons.Kh_22__AS_4_Kitchen____1000kg__AShM__IN__Act_Pas_Rdr, "{9F390892-E6F9-42C9-B84E-1136A881DCB2}": Weapons.Kh_23L_Grom__AS_7_Kerry____286kg__ASM__Laser_Guided, "{6DADF342-D4BA-4D8A-B081-BA928C4AF86D}": Weapons.Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser, "{79D73885-0801-45a9-917F-C90FE1CE3DFC}": Weapons.Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser_, "{X-25ML}": Weapons.Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser__, "{E86C5AA5-6D49-4F00-AD2E-79A62D6DDE26}": Weapons.Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr, "{752AF1D2-EBCC-4bd7-A1E7-2357F5601C70}": Weapons.Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr_, "{X-25MPU}": Weapons.Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr__, "{Kh-25MP}": Weapons.Kh_25MP__AS_12_Kegler____320kg__ARM__Pas_Rdr, "{292960BB-6518-41AC-BADA-210D65D5073C}": Weapons.Kh_25MR__AS_10_Karen____300kg__ASM__10km__RC_Guided, "{X-25MR}": Weapons.Kh_25MR__AS_10_Karen____300kg__ASM__RC_Guided, "{Kh-28}": Weapons.Kh_28__AS_9_Kyle____720kg__ARM__Pas_Rdr, "{3468C652-E830-4E73-AFA9-B5F260AB7C3D}": Weapons.Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser, "{D4A8D9B9-5C45-42e7-BBD2-0E54F8308432}": Weapons.Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser_, "{X-29L}": Weapons.Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser__, "{B4FC81C9-B861-4E87-BBDC-A1158E648EBF}": Weapons.Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided, "{601C99F7-9AF3-4ed7-A565-F8B8EC0D7AAC}": Weapons.Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided_, "{X-29T}": Weapons.Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided__, "{4D13E282-DF46-4B23-864A-A9423DFDE504}": Weapons.Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr, "{4D13E282-DF46-4B23-864A-A9423DFDE50A}": Weapons.Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr_, "{X-31A}": Weapons.Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr__, "{D8F2C90B-887B-4B9E-9FE2-996BC9E9AF03}": Weapons.Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr, "{D8F2C90B-887B-4B9E-9FE2-996BC9E9AF0A}": Weapons.Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr_, "{X-31P}": Weapons.Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr__, "{2234F529-1D57-4496-8BB0-0150F9BDBBD2}": Weapons.Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr, "{2234F529-1D57-4496-8BB0-0150F9BDBBD3}": Weapons.Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr_, "{3F26D9C5-5CC3-4E42-BC79-82FAA54E9F26}": Weapons.Kh_41__SS_N_22_Sunburn____4500kg__AShM__IN__Act_Rdr, "{FE382A68-8620-4AC0-BDF5-709BFE3977D7}": Weapons.Kh_58U__AS_11_Kilter____640kg__ARM__IN__Pas_Rdr, "{B5CA9846-776E-4230-B4FD-8BCC9BFB1676}": Weapons.Kh_58U__AS_11_Kilter____640kg__ARM__IN__Pas_Rdr_, "{40AB87E8-BEFB-4D85-90D9-B2753ACF9514}": Weapons.Kh_59M__AS_18_Kazoo____930kg__ASM__IN, "{BADAF2DE-68B5-472A-8AAC-35BAEFF6B4A1}": Weapons.Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC, "{Kh-66_Grom}": Weapons.Kh_66_Grom__21____AGM__radar_guided_APU_68, "{96A7F676-F956-404A-AD04-F33FB2C74884}": Weapons.KMGU_2___96_x_AO_2_5RT_Dispenser__CBU__HE_Frag, "{96A7F676-F956-404A-AD04-F33FB2C74881}": Weapons.KMGU_2___96_x_PTAB_2_5KO_Dispenser__CBU__HEAT_AP, "KORD_12_7": Weapons.KORD_12_7, "{F4920E62-A99A-11d8-9897-000476191836}": Weapons.Kopyo_radar_pod, "{7210496B-7B81-4B52-80D6-8529ECF847CD}": Weapons.Kormoran___ASM, "{K-13A}": Weapons.K_13A, "{44EE8698-89F9-48EE-AF36-5FD31896A82F}": Weapons.L005_Sorbtsiya_ECM_pod__left_, "{44EE8698-89F9-48EE-AF36-5FD31896A82A}": Weapons.L005_Sorbtsiya_ECM_pod__right_, "{ECM_POD_L_175V}": Weapons.L175V_Khibiny_ECM_pod, "{F14-LANTIRN-TP}": Weapons.LANTIRN_Targeting_Pod, "LAU3_HE151": Weapons.LAU3_HE151, "LAU3_HE5": Weapons.LAU3_HE5, "LAU3_WP156": Weapons.LAU3_WP156, "LAU3_WP1B": Weapons.LAU3_WP1B, "LAU3_WP61": Weapons.LAU3_WP61, "LAU-105": Weapons.LAU_105, "LAU-105_1*AIM-9L_L": Weapons.LAU_105_1_AIM_9L_L, "LAU-105_1*AIM-9L_R": Weapons.LAU_105_1_AIM_9L_R, "LAU-105_1*AIM-9M_L": Weapons.LAU_105_1_AIM_9M_L, "LAU-105_1*AIM-9M_R": Weapons.LAU_105_1_AIM_9M_R, "LAU-105_1*CATM-9M_L": Weapons.LAU_105_1_CATM_9M_L, "LAU-105_1*CATM-9M_R": Weapons.LAU_105_1_CATM_9M_R, "LAU-105_2*AIM-9L": Weapons.LAU_105_2_AIM_9L, "LAU-105_2*AIM-9P5": Weapons.LAU_105_2_AIM_9P5, "LAU-105_2*CATM-9M": Weapons.LAU_105_2_CATM_9M, "LAU-105_AIS_ASQ_T50_L": Weapons.LAU_105_AIS_ASQ_T50_L, "LAU-105_AIS_ASQ_T50_R": Weapons.LAU_105_AIS_ASQ_T50_R, "{DB434044-F5D0-4F1F-9BA9-B73027E18DD3}": Weapons.LAU_105_with_2_x_AIM_9M_Sidewinder_IR_AAM, "{3C0745ED-8B0B-42eb-B907-5BD5C1717447}": Weapons.LAU_105_with_2_x_AIM_9P_Sidewinder_IR_AAM, "{LAU_10R}": Weapons.LAU_10R_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG, "{F3EFE0AB-E91A-42D8-9CA2-B63C91ED570A}": Weapons.LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG, "{BRU42_LAU10}": Weapons.LAU_10___4_ZUNI_MK_71, "{BRU3242_LAU10}": Weapons.LAU_10___4_ZUNI_MK_71_, "{LAU-115 - AIM-7E}": Weapons.LAU_115C_with_AIM_7E_Sparrow_Semi_Active_Radar, "{LAU-115 - AIM-7F}": Weapons.LAU_115C_with_AIM_7F_Sparrow_Semi_Active_Radar, "{LAU-115 - AIM-7H}": Weapons.LAU_115C_with_AIM_7MH_Sparrow_Semi_Active_Radar, "LAU-115_2*LAU-127_AIM-120B": Weapons.LAU_115_2_LAU_127_AIM_120B, "LAU-115_2*LAU-127_AIM-120C": Weapons.LAU_115_2_LAU_127_AIM_120C, "LAU-115_2*LAU-127_AIM-9L": Weapons.LAU_115_2_LAU_127_AIM_9L, "LAU-115_2*LAU-127_AIM-9M": Weapons.LAU_115_2_LAU_127_AIM_9M, "LAU-115_2*LAU-127_AIM-9X": Weapons.LAU_115_2_LAU_127_AIM_9X, "LAU-115_2*LAU-127_CATM-9M": Weapons.LAU_115_2_LAU_127_CATM_9M, "LAU-115_LAU-127_AIM-9L": Weapons.LAU_115_LAU_127_AIM_9L, "LAU-115_LAU-127_AIM-9L_R": Weapons.LAU_115_LAU_127_AIM_9L_R, "LAU-115_LAU-127_AIM-9M": Weapons.LAU_115_LAU_127_AIM_9M, "LAU-115_LAU-127_AIM-9M_R": Weapons.LAU_115_LAU_127_AIM_9M_R, "LAU-115_LAU-127_AIM-9X": Weapons.LAU_115_LAU_127_AIM_9X, "LAU-115_LAU-127_AIM-9X_R": Weapons.LAU_115_LAU_127_AIM_9X_R, "LAU-115_LAU-127_CATM-9M": Weapons.LAU_115_LAU_127_CATM_9M, "LAU-115_LAU-127_CATM-9M_R": Weapons.LAU_115_LAU_127_CATM_9M_R, "{LAU-115 - AIM-120B}": Weapons.LAU_115_with_1_x_LAU_127_AIM_120B_AMRAAM___Active_Rdr_AAM, "{LAU-115 - AIM-120B_R}": Weapons.LAU_115_with_1_x_LAU_127_AIM_120B_AMRAAM___Active_Rdr_AAM_, "{LAU-115 - AIM-120C}": Weapons.LAU_115_with_1_x_LAU_127_AIM_120C_5_AMRAAM___Active_Rdr_AAM, "{LAU-115 - AIM-120C_R}": Weapons.LAU_115_with_1_x_LAU_127_AIM_120C_5_AMRAAM___Active_Rdr_AAM_, "{LAU-115 - AIM-7M}": Weapons.LAU_115_with_AIM_7M_Sparrow_Semi_Active_Radar, "LAU_117_AGM_65A": Weapons.LAU_117_AGM_65A, "LAU_117_AGM_65B": Weapons.LAU_117_AGM_65B, "LAU_117_AGM_65F": Weapons.LAU_117_AGM_65F, "LAU_117_AGM_65G": Weapons.LAU_117_AGM_65G, "LAU_117_AGM_65H": Weapons.LAU_117_AGM_65H, "LAU_117_AGM_65L": Weapons.LAU_117_AGM_65L, "LAU_117_CATM_65K": Weapons.LAU_117_CATM_65K, "LAU_117_TGM_65D": Weapons.LAU_117_TGM_65D, "LAU_117_TGM_65G": Weapons.LAU_117_TGM_65G, "LAU_117_TGM_65H": Weapons.LAU_117_TGM_65H, "{444BA8AE-82A7-4345-842E-76154EFCCA46}": Weapons.LAU_117_with_AGM_65D___Maverick_D__IIR_ASM_, "{F16A4DE0-116C-4A71-97F0-2CF85B0313EC}": Weapons.LAU_117_with_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_, "{69DC8AE7-8F77-427B-B8AA-B19D3F478B66}": Weapons.LAU_117_with_AGM_65K___Maverick_K__CCD_Imp_ASM_, "{3E6B632D-65EB-44D2-9501-1C2D04515405}": Weapons.LAU_118a_with_AGM_45B_Shrike_ARM__Imp_, "LAU-127_AIM-9L": Weapons.LAU_127_AIM_9L, "LAU-127_AIM-9M": Weapons.LAU_127_AIM_9M, "LAU-127_AIM-9X": Weapons.LAU_127_AIM_9X, "LAU-127_CATM-9M": Weapons.LAU_127_CATM_9M, "LAU_131x3_HYDRA_70_M151": Weapons.LAU_131x3_HYDRA_70_M151, "LAU_131x3_HYDRA_70_M156": Weapons.LAU_131x3_HYDRA_70_M156, "LAU_131x3_HYDRA_70_M257": Weapons.LAU_131x3_HYDRA_70_M257, "LAU_131x3_HYDRA_70_M274": Weapons.LAU_131x3_HYDRA_70_M274, "LAU_131x3_HYDRA_70_MK1": Weapons.LAU_131x3_HYDRA_70_MK1, "LAU_131x3_HYDRA_70_MK5": Weapons.LAU_131x3_HYDRA_70_MK5, "LAU_131x3_HYDRA_70_MK61": Weapons.LAU_131x3_HYDRA_70_MK61, "LAU_131x3_HYDRA_70_WTU1B": Weapons.LAU_131x3_HYDRA_70_WTU1B, "{LAU-131 - 7 AGR-20A}": Weapons.LAU_131_pod___7_x_2_75_Hydra__Laser_Guided_Rkts_M151__HE_APKWS, "{LAU-131 - 7 AGR-20 M282}": Weapons.LAU_131_pod___7_x_2_75_Hydra__Laser_Guided_Rkts_M282__MPP_APKWS, "{69926055-0DA8-4530-9F2F-C86B157EA9F6}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE, "{2AF2EC3F-9065-4de5-93E1-1739C9A71EF7}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos, "{DAD45FE5-CFF0-4a2b-99D4-5D044D3BC22F}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum, "{6D6D5C07-2A90-4a68-9A74-C5D0CFFB05D9}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk, "{D22C2D63-E5C9-4247-94FB-5E8F3DE22B71}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice, "{319293F2-392C-4617-8315-7C88C22AF7C4}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT, "{1CA5E00B-D545-4ff9-9B53-5970E292F14D}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice, "{DDCE7D70-5313-4181-8977-F11018681662}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice, "{LAU-138 wtip - AIM-9L}": Weapons.LAU_138_AIM_9L, "{LAU-138 wtip - AIM-9M}": Weapons.LAU_138_AIM_9M, "{LAU3_FFAR_WP156}": Weapons.LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_M156__Wht_Phos, "{LAU3_FFAR_MK1HE}": Weapons.LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_Mk1__HE, "{LAU3_FFAR_MK5HEAT}": Weapons.LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_Mk5__HEAT, "{LAU_61R}": Weapons.LAU_61R_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE, "{FD90A1DC-9147-49FA-BF56-CB83EF0BD32B}": Weapons.LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE, "{3DFB7321-AB0E-11d7-9897-000476191836}": Weapons.LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos, "{LAU_61_M282}": Weapons.LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP, "{LAU68_FFAR_WP156}": Weapons.LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_M156__Wht_Phos, "{LAU68_FFAR_MK1HE}": Weapons.LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_Mk1__HE, "{LAU68_FFAR_MK5HEAT}": Weapons.LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_Mk5__HEAT, "{A021F29D-18AB-4d3e-985C-FC9C60E35E9E}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE, "{4F977A2A-CD25-44df-90EF-164BFA2AE72F}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos, "{647C5F26-BDD1-41e6-A371-8DE1E4CC0E94}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum, "{0877B74B-5A00-4e61-BA8A-A56450BA9E27}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk, "{LAU_68_M282}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP, "{FC85D2ED-501A-48ce-9863-49D468DDD5FC}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice, "{174C6E6D-0C3D-42ff-BCB3-0853CB371F5C}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT, "{65396399-9F5C-4ec3-A7D2-5A8F4C1D90C4}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice, "{1F7136CB-8120-4e77-B97B-945FF01FB67C}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice, "{LAU-7 - AIM-9L}": Weapons.LAU_7_AIM_9L, "{LAU-7 - AIM-9M}": Weapons.LAU_7_AIM_9M, "{F4-2-AIM9B}": Weapons.LAU_7_with_2_x_AIM_9B_Sidewinder_IR_AAM, "{F4-2-AIM9L}": Weapons.LAU_7_with_2_x_AIM_9L_Sidewinder_IR_AAM, "{9DDF5297-94B9-42FC-A45E-6E316121CD85}": Weapons.LAU_7_with_2_x_AIM_9M_Sidewinder_IR_AAM, "{F4-2-AIM9P5}": Weapons.LAU_7_with_2_x_AIM_9P5_Sidewinder_IR_AAM, "{773675AB-7C29-422f-AFD8-32844A7B7F17}": Weapons.LAU_7_with_2_x_AIM_9P_Sidewinder_IR_AAM, "{GAR-8}": Weapons.LAU_7_with_AIM_9B_Sidewinder_IR_AAM, "{AIM-9M-ON-ADAPTER}": Weapons.LAU_7_with_AIM_9M_Sidewinder_IR_AAM, "{AIM-9P5-ON-ADAPTER}": Weapons.LAU_7_with_AIM_9P5_Sidewinder_IR_AAM, "{AIM-9P-ON-ADAPTER}": Weapons.LAU_7_with_AIM_9P_Sidewinder_IR_AAM, "{AIM-9X-ON-ADAPTER}": Weapons.LAU_7_with_AIM_9X_Sidewinder_IR_AAM, "{LAU-7_AIS_ASQ_T50}": Weapons.LAU_7_with_AN_ASQ_T50_TCTS_Pod___ACMI_Pod, "LAU_88_AGM_65D_ONE": Weapons.LAU_88_AGM_65D_ONE, "LAU_88_AGM_65H": Weapons.LAU_88_AGM_65H, "LAU_88_AGM_65H_2_L": Weapons.LAU_88_AGM_65H_2_L, "LAU_88_AGM_65H_2_R": Weapons.LAU_88_AGM_65H_2_R, "LAU_88_AGM_65H_3": Weapons.LAU_88_AGM_65H_3, "{E6A6262A-CA08-4B3D-B030-E1A993B98452}": Weapons.LAU_88_with_2_x_AGM_65D___Maverick_D__IIR_ASM_, "{E6A6262A-CA08-4B3D-B030-E1A993B98453}": Weapons.LAU_88_with_2_x_AGM_65D___Maverick_D__IIR_ASM__, "{2CC29C7A-E863-411C-8A6E-BD6F0E730548}": Weapons.LAU_88_with_2_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_, "{2CC29C7A-E863-411C-8A6E-BD6F0E730547}": Weapons.LAU_88_with_2_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd__, "{D7670BC7-881B-4094-906C-73879CF7EB28}": Weapons.LAU_88_with_2_x_AGM_65K___Maverick_K__CCD_Imp_ASM_, "{D7670BC7-881B-4094-906C-73879CF7EB27}": Weapons.LAU_88_with_2_x_AGM_65K___Maverick_K__CCD_Imp_ASM__, "{DAC53A2F-79CA-42FF-A77A-F5649B601308}": Weapons.LAU_88_with_3_x_AGM_65D___Maverick_D__IIR_ASM_, "{71AAB9B8-81C1-4925-BE50-1EF8E9899271}": Weapons.LAU_88_with_3_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_, "{907D835F-E650-4154-BAFD-C656882555C0}": Weapons.LAU_88_with_3_x_AGM_65K___Maverick_K__CCD_Imp_ASM_, "{LAU_SNEB68G}": Weapons.LAU_SNEB68G___8xSNEB68_EAP, "{LAU_SNEB68_WP}": Weapons.LAU_SNEB68G___8xSNEB68_WP, "{CAAC1CFD-6745-416B-AFA4-CB57414856D0}": Weapons.Lantirn_F_16, "{D1744B93-2A8A-4C4D-B004-7A09CD8C8F3F}": Weapons.Lantirn_Target_Pod, "{LR25_ARF8M3_API}": Weapons.LR_25___25_x_ARF_8_M3_API, "{LR25_ARF8M3_HEI}": Weapons.LR_25___25_x_ARF_8_M3_HEI, "{LR25_ARF8M3_TPSM}": Weapons.LR_25___25_x_ARF_8_M3_TP_SM, "{0519A264-0AB6-11d6-9193-00A0249B6F00}": Weapons.L_081_Fantasmagoria_ELINT_pod, "{US_M10_SMOKE_TANK_BLUE}": Weapons.M10_Smoke_Tank___blue, "{US_M10_SMOKE_TANK_GREEN}": Weapons.M10_Smoke_Tank___green, "{US_M10_SMOKE_TANK_ORANGE}": Weapons.M10_Smoke_Tank___orange, "{US_M10_SMOKE_TANK_RED}": Weapons.M10_Smoke_Tank___red, "{US_M10_SMOKE_TANK_WHITE}": Weapons.M10_Smoke_Tank___white, "{US_M10_SMOKE_TANK_YELLOW}": Weapons.M10_Smoke_Tank___yellow, "{00F5DAC4-0466-4122-998F-B1A298E34113}": Weapons.M117___750lb_GP_Bomb_LD, "M134_L": Weapons.M134_L, "M134_R": Weapons.M134_R, "M134_SIDE_L": Weapons.M134_SIDE_L, "M134_SIDE_R": Weapons.M134_SIDE_R, "{414DA830-B61A-4F9E-B71B-C2F6832E1D7A}": Weapons.M2000_Fuel_tank, "M260_HYDRA": Weapons.M260_HYDRA, "M260_HYDRA_WP": Weapons.M260_HYDRA_WP, "M261_MK151": Weapons.M261_MK151, "M261_MK156": Weapons.M261_MK156, "M60_SIDE_L": Weapons.M60_SIDE_L, "M60_SIDE_R": Weapons.M60_SIDE_R, "{MAK79_MK20 2L}": Weapons.MAK79_2_MK_20, "{MAK79_MK20 2R}": Weapons.MAK79_2_MK_20_, "{MAK79_BDU33 3L}": Weapons.MAK79_3_BDU_33, "{MAK79_BDU33 3R}": Weapons.MAK79_3_BDU_33_, "{MAK79_BDU45 3L}": Weapons.MAK79_3_BDU_45, "{MAK79_BDU45B 3L}": Weapons.MAK79_3_BDU_45B, "{MAK79_BDU45B 3R}": Weapons.MAK79_3_BDU_45B_, "{MAK79_BDU45 3R}": Weapons.MAK79_3_BDU_45_, "{MAK79_MK81 3L}": Weapons.MAK79_3_Mk_81, "{MAK79_MK81 3R}": Weapons.MAK79_3_Mk_81_, "{MAK79_MK82 3L}": Weapons.MAK79_3_Mk_82, "{MAK79_MK82AIR 3L}": Weapons.MAK79_3_Mk_82AIR, "{MAK79_MK82AIR 3R}": Weapons.MAK79_3_Mk_82AIR_, "{MAK79_MK82 3R}": Weapons.MAK79_3_Mk_82_, "{MAK79_MK82SE 3L}": Weapons.MAK79_3_Mk_82_SnakeEye, "{MAK79_MK82SE 3R}": Weapons.MAK79_3_Mk_82_SnakeEye_, "{MAK79_MK83 3L}": Weapons.MAK79_3_Mk_83, "{MAK79_MK83 3R}": Weapons.MAK79_3_Mk_83_, "{MAK79_BDU33 4}": Weapons.MAK79_4_BDU_33, "{MAK79_BDU45 4}": Weapons.MAK79_4_BDU_45, "{MAK79_BDU45B 4}": Weapons.MAK79_4_BDU_45B, "{MAK79_MK81 4}": Weapons.MAK79_4_Mk_81, "{MAK79_MK82 4}": Weapons.MAK79_4_Mk_82, "{MAK79_MK82AIR 4}": Weapons.MAK79_4_Mk_82AIR, "{MAK79_MK82SE 4}": Weapons.MAK79_4_Mk_82_SnakeEye, "{MAK79_MK20 1R}": Weapons.MAK79_MK_20, "{MAK79_MK20 1L}": Weapons.MAK79_MK_20_, "{MAK79_MK83 1R}": Weapons.MAK79_Mk_83, "{MAK79_MK83 1L}": Weapons.MAK79_Mk_83_, "{MMagicII}": Weapons.Matra_Magic_II, "{Matra_S530D}": Weapons.Matra_Super_530D, "{Matra155RocketPod}": Weapons.Matra_Type_155_Rocket_Pod, "{5A1AC2B4-CA4B-4D09-A1AF-AC52FBC4B60B}": Weapons.MBD2_67U_with_4_x_FAB_100___100kg_GP_Bombs_LD, "{29A828E2-C6BB-11d8-9897-000476191836}": Weapons.MBD2_67U_with_4_x_FAB_100___100kg_GP_Bombs_LD_, "{7C5F0F5F-0A0B-46E8-937C-8922303E39A8}": Weapons.MBD3_U2T_with_2_x_FAB_1500_M_54___1500kg_GP_Bombs_LD, "{6A367BB4-327F-4A04-8D9E-6D86BDC98E7E}": Weapons.MBD3_U4T_with_4_x_FAB_250___250kg_GP_Bombs_LD, "{02B81892-7E24-4795-84F9-B8110C641AF0}": Weapons.MBD3_U4T_with_4_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP, "{E659C4BE-2CD8-4472-8C08-3F28ACB61A8A}": Weapons.MBD3_U6_68_with_2_x_FAB_250___250kg_GP_Bombs_LD, "{MBD3_U6_3*FAB-250_fwd}": Weapons.MBD3_U6_68_with_3_x_FAB_250___250kg_GP_Bombs_LD, "{3E35F8C1-052D-11d6-9191-00A0249B6F00}": Weapons.MBD3_U6_68_with_4_x_FAB_250___250kg_GP_Bombs_LD, "{MBD3_U6_4*FAB-250_fwd}": Weapons.MBD3_U6_68_with_4_x_FAB_250___250kg_GP_Bombs_LD_, "{MBD3_U6_5*FAB-250}": Weapons.MBD3_U6_68_with_5_x_FAB_250___250kg_GP_Bombs_LD, "{E96E1EDD-FF3F-47CF-A959-576C3B682955}": Weapons.MBD3_U6_68_with_6_x_BetAB_500ShP___500kg_Concrete_Piercing_HD_w_booster_Bombs, "{436C6FB9-8BF2-46B6-9DC4-F55ABF3CD1EC}": Weapons.MBD3_U6_68_with_6_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD, "{F99BEC1A-869D-4AC7-9730-FBA0E3B1F5FC}": Weapons.MBD3_U6_68_with_6_x_FAB_100___100kg_GP_Bombs_LD, "{53BE25A4-C86C-4571-9BC0-47D668349595}": Weapons.MBD3_U6_68_with_6_x_FAB_250___250kg_GP_Bombs_LD, "{FA673F4C-D9E4-4993-AA7A-019A92F3C005}": Weapons.MBD3_U6_68_with_6_x_FAB_500_M_62___500kg_GP_Bombs_LD, "{0D945D78-542C-4E9B-9A17-9B5008CC8D39}": Weapons.MBD3_U6_68_with_6_x_FAB_500_M_62___500kg_GP_Bombs_LD_, "{F503C276-FE15-4C54-B310-17B50B735A84}": Weapons.MBD3_U6_68_with_6_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP, "{4D459A95-59C0-462F-8A57-34E80697F38B}": Weapons.MBD3_U6_68_with_6_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP_, "{5F1C54C0-0ABD-4868-A883-B52FF9FCB422}": Weapons.MBD3_U9M_with_9_x_FAB_100___100kg_GP_Bombs_LD, "{E1AAE713-5FC3-4CAA-9FF5-3FDCFB899E33}": Weapons.MBD3_U9M_with_9_x_FAB_250___250kg_GP_Bombs_LD, "{BF83E8FD-E7A2-40D2-9608-42E13AFE2193}": Weapons.MBD3_U9M_with_9_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP, "{005E70F5-C3EA-4E95-A148-C1044C42D845}": Weapons.MBD3_with_3_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD, "{CEE04106-B9AA-46B4-9CD1-CD3FDCF0CE78}": Weapons.MBD3_with_3_x_FAB_100___100kg_GP_Bombs_LD, "{D109EE9C-A1B7-4F1C-8D87-631C293A1D26}": Weapons.MBD3_with_3_x_FAB_250___250kg_GP_Bombs_LD, "{A1E85991-B58E-4E92-AE91-DED6DC85B2E7}": Weapons.MBD3_with_3_x_FAB_500_M_62___500kg_GP_Bombs_LD, "{EAD9B2C1-F3BA-4A7B-A2A5-84E2AF8A1975}": Weapons.MBD3_with_3_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP, "{919CE839-9390-4629-BAF7-229DE19B8523}": Weapons.MBD3_with_3_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP, "{574EDEDF-20DE-4942-B2A2-B2EDFD621562}": Weapons.MER12_with_12_x_M117___750lb_GP_Bombs_LD, "{585D626E-7F42-4073-AB70-41E728C333E2}": Weapons.MER12_with_12_x_Mk_82___500lb_GP_Bombs_LD, "{0B9ABA77-93B8-45FC-9C63-82AFB2CB50A4}": Weapons.MER2_with_2_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets, "{D5D51E24-348C-4702-96AF-97A714E72697}": Weapons.MER2_with_2_x_Mk_82___500lb_GP_Bombs_LD, "{18617C93-78E7-4359-A8CE-D754103EDF63}": Weapons.MER2_with_2_x_Mk_83___1000lb_GP_Bombs_LD, "{82F90BEC-0E2E-4CE5-A66E-1E4ADA2B5D1E}": Weapons.MER3_with_3_x_M117___750lb_GP_Bombs_LD, "{752B9782-F962-11d5-9190-00A0249B6F00}": Weapons.MER6_with_6_x_BLU_107___440lb_Anti_Runway_Penetrator_Bombs, "{6CDB6B36-7165-47D0-889F-6625FB333561}": Weapons.MER6_with_6_x_M117___750lb_GP_Bombs_LD, "{3C7CD675-7D39-41C5-8735-0F4F537818A8}": Weapons.MER6_with_6_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets, "{1C97B4A0-AA3B-43A8-8EE7-D11071457185}": Weapons.MER6_with_6_x_Mk_82___500lb_GP_Bombs_LD, "{B1EF6B0E-3D91-4047-A7A5-A99E7D8B4A8B}": Weapons.Mercury_LLTV_Pod, "{0DA03783-61E4-40B2-8FAE-6AEE0A5C5AAE}": Weapons.MICA_IR, "{6D778860-7BB8-4ACB-9E95-BA772C6BBC2C}": Weapons.MICA_RF, "MIM_104": Weapons.MIM_104, "MIM_72": Weapons.MIM_72, "{MBDA_MistralG}": Weapons.Mistral, "{MBDA_MistralD}": Weapons.Mistral_, "MK_82*28": Weapons.MK_82_28, "{BRU-32 MK-20}": Weapons.Mk_20, "{ACADB374-6D6C-45A0-BA7C-B22B2E108AE4}": Weapons.Mk_20_18, "{ADD3FAE1-EBF6-4EF9-8EFC-B36B5DDF1E6B}": Weapons.Mk_20_Rockeye___490lbs_CBU__247_x_HEAT_Bomblets, "{90321C8E-7ED1-47D4-A160-E074D5ABD902}": Weapons.Mk_81___250lb_GP_Bomb_LD, "{BRU-32 MK-82}": Weapons.Mk_82, "{BRU-32 MK-82AIR}": Weapons.Mk_82AIR, "{Mk_82Y}": Weapons.Mk_82Y___500lb_GP_Chute_Retarded_HD, "{Mk82AIR}": Weapons.Mk_82_AIR_Ballute___500lb_GP_Bomb_HD, "{BRU-32 MK-82SE}": Weapons.Mk_82_SnakeEye, "{Mk82SNAKEYE}": Weapons.Mk_82_Snakeye___500lb_GP_Bomb_HD, "{BCE4E030-38E9-423E-98ED-24BE3DA87C32}": Weapons.Mk_82___500lb_GP_Bomb_LD, "{BRU-32 MK-83}": Weapons.Mk_83, "{Mk_83CT}": Weapons.Mk_83CT, "{BRU42_MK83 RS}": Weapons.Mk_83_, "{BRU3242_MK83 RS}": Weapons.Mk_83__, "{PHXBRU3242_MK83 RS}": Weapons.Mk_83___, "{7A44FF09-527C-4B7E-B42B-3F111CFE50FB}": Weapons.Mk_83___1000lb_GP_Bomb_LD, "{BRU42_MK83 LS}": Weapons.Mk_83____, "{BRU3242_MK83 LS}": Weapons.Mk_83_____, "{PHXBRU3242_MK83 LS}": Weapons.Mk_83______, "{BRU-32 MK-84}": Weapons.Mk_84, "{F092B80C-BB54-477E-9408-66DEEF740008}": Weapons.Mk_84_18, "{D3ABF208-FA56-4D56-BB31-E0D931D57AE3}": Weapons.Mk_84_28, "{AB8B8299-F1CC-4359-89B5-2172E0CF4A5A}": Weapons.Mk_84___2000lb_GP_Bomb_LD, "{44EE8698-89F9-48EE-AF36-5FD31896A82D}": Weapons.MPS_410, "{44EE8698-89F9-48EE-AF36-5FD31896A82C}": Weapons.MPS_410_, "MXU-648-TP": Weapons.MXU_648_TP, "{ODAB-500PM}": Weapons.ODAB_500PM___525_kg__bomb__parachute__simulated_aerosol, "{OFAB-100-120-TU}": Weapons.OFAB_100_120_TU_x_4, "{OFAB_100_Jupiter}": Weapons.OFAB_100_Jupiter___100kg_GP_Bomb_LD, "{ORO57K_S5M1_HEFRAG}": Weapons.ORO_57K___S_5M1_HE_FRAG_FFAR_x_8, "{ORO57K_S5MO_HEFRAG}": Weapons.ORO_57K___S_5MO_HE_FRAG_FFAR_x_8, "{ORO57K_S5M_HEFRAG}": Weapons.ORO_57K___S_5M_x_8, "oh-58-brauning": Weapons.oh_58_brauning, "{199D6D51-1764-497E-9AE5-7D07C8D4D87E}": Weapons.Pavetack_F_111, "PKT_7_62": Weapons.PKT_7_62, "{PK-3}": Weapons.PK_3___7_62mm_GPMG, "PTB300_MIG15": Weapons.PTB300_MIG15, "PTB400_MIG15": Weapons.PTB400_MIG15, "PTB400_MIG19": Weapons.PTB400_MIG19, "PTB600_MIG15": Weapons.PTB600_MIG15, "PTB760_MIG19": Weapons.PTB760_MIG19, "{P-50T}": Weapons.P_50T___50kg_Practice_Bomb_LD, "{RBK_250_275_AO_1SCH}": Weapons.RBK_250_275___150_x_AO_1SCh__250kg_CBU_HE_Frag, "{4203753F-8198-4E85-9924-6F8FF679F9FF}": Weapons.RBK_250___42_x_PTAB_2_5M__250kg_CBU_Medium_HEAT_AP, "{RBK_500U_OAB_2_5RT}": Weapons.RBK_500U___126_x_OAB_2_5RT__500kg_CBU_HE_Frag, "{D5435F26-F120-4FA3-9867-34ACE562EF1B}": Weapons.RBK_500_255___30_x_PTAB_10_5__500kg_CBU_Heavy_HEAT_AP, "{7AEC222D-C523-425e-B714-719C0D1EB14D}": Weapons.RBK_500___268_x_PTAB_1M__500kg_CBU_Light_HEAT_AP, "{Rb04AI}": Weapons.RB_04E__for_A_I___with_launcher, "{Rb15AI}": Weapons.RB_15F__for_A_I___with_launcher, "{Rb04}": Weapons.Rb_04E_Anti_ship_Missile, "{Robot05}": Weapons.Rb_05A_MCLOS_ASM_AShM_AAM, "{Rb15}": Weapons.Rb_15F_Programmable_Anti_ship_Missile, "{Robot24J}": Weapons.Rb_24J__AIM_9P__Sidewinder_IR_AAM, "{Robot24}": Weapons.Rb_24__AIM_9B__Sidewinder_IR_AAM, "{Robot74}": Weapons.Rb_74__AIM_9L__Sidewinder_IR_AAM, "{RB75}": Weapons.Rb_75A__AGM_65A_Maverick___TV_ASM_, "{RB75B}": Weapons.Rb_75B__AGM_65B_Maverick___TV_ASM_, "{RB75T}": Weapons.Rb_75T__AGM_65A_Maverick___TV_ASM_Lg_HE_Whd_, "REFLEX_9M119": Weapons.REFLEX_9M119, "{RKL609_L}": Weapons.RKL609_ECM_Pod__Left_, "{RKL609_R}": Weapons.RKL609_ECM_Pod__Right_, "{RN-24}": Weapons.RN_24___470kg__nuclear_bomb__free_fall, "{RN-28}": Weapons.RN_28___260_kg__nuclear_bomb__free_fall, "ROLAND": Weapons.ROLAND, "{M2KC_RPL_522}": Weapons.RPL_522_1300_liters_Fuel_Tank, "{M2KC_RPL_522_EMPTY}": Weapons.RPL_522_1300_liters_Fuel_Tank__Empty_, "{M2KC_02_RPL541}": Weapons.RPL_541_2000_liters_Fuel_Tank_, "{M2KC_08_RPL541}": Weapons.RPL_541_2000_liters_Fuel_Tank__, "{M2KC_02_RPL541_EMPTY}": Weapons.RPL_541_2000_liters_Fuel_Tank__Empty_, "{M2KC_08_RPL541_EMPTY}": Weapons.RPL_541_2000_liters_Fuel_Tank__Empty__, "{British_AP_25LBNo1_3INCHNo1}": Weapons.RP_3_25lb_AP_Mk_I, "{British_HE_60LBFNo1_3INCHNo1}": Weapons.RP_3_60lb_F_No1_Mk_I, "{British_HE_60LBSAPNo2_3INCHNo1}": Weapons.RP_3_60lb_SAP_No2_Mk_I, "{RS-2US}": Weapons.RS2US___AAM__beam_rider, "{R-13M1}": Weapons.R_13M1___AAM__IR_guided, "{R-13M}": Weapons.R_13M___AAM__IR_guided, "{CCF898C9-5BC7-49A4-9D1E-C3ED3D5166A1}": Weapons.R_24R__AA_7_Apex_SA____Semi_Act_Rdr, "{6980735A-44CC-4BB9-A1B5-591532F1DC69}": Weapons.R_24T__AA_7_Apex_IR____Infra_Red, "{E8069896-8435-4B90-95C0-01A03AE6E400}": Weapons.R_27ER__AA_10_Alamo_C____Semi_Act_Extended_Range, "{B79C379A-9E87-4E50-A1EE-7F7E29C2E87A}": Weapons.R_27ET__AA_10_Alamo_D____IR_Extended_Range, "{9B25D316-0434-4954-868F-D51DB1A38DF0}": Weapons.R_27R__AA_10_Alamo_A____Semi_Act_Rdr, "{88DAC840-9F75-4531-8689-B46E64E42E53}": Weapons.R_27T__AA_10_Alamo_B____Infra_Red, "{F1243568-8EF0-49D4-9CB5-4DA90D92BC1D}": Weapons.R_33__AA_9_Amos____Semi_Act_Rdr, "{R-3R}": Weapons.R_3R___AAM__radar_guided, "{R-3S}": Weapons.R_3S___AAM__IR_guided, "{4EDBA993-2E34-444C-95FB-549300BF7CAF}": Weapons.R_40R__AA_6_Acrid____Semi_Act_Rdr, "{5F26DBC2-FB43-4153-92DE-6BBCE26CB0FF}": Weapons.R_40T__AA_6_Acrid____Infra_Red, "{FC23864E-3B80-48E3-9C03-4DA8B1D7497B}": Weapons.R_550_Magic_2, "{R-55}": Weapons.R_55___AAM__IR_guided, "{R-60}": Weapons.R_60, "{R-60M}": Weapons.R_60M, "{R-60M 2L}": Weapons.R_60M_x_2, "{R-60M 2R}": Weapons.R_60M_x_2_, "{682A481F-0CB5-4693-A382-D00DD4A156D7}": Weapons.R_60M__AA_8_Aphid____Infra_Red, "{R-60 2L}": Weapons.R_60_x_2, "{R-60 2R}": Weapons.R_60_x_2_, "{FBC29BFE-3D24-4C64-B81D-941239D12249}": Weapons.R_73__AA_11_Archer____Infra_Red, "{CBC29BFE-3D24-4C64-B81D-941239D12249}": Weapons.R_73__AA_11_Archer____Infra_Red_, "{B4C01D60-A8A3-4237-BD72-CA7655BC0FE9}": Weapons.R_77__AA_12_Adder____Active_Rdr, "{B4C01D60-A8A3-4237-BD72-CA7655BC0FEC}": Weapons.R_77__AA_12_Adder____Active_Rdr_, "{0511E528-EA28-4caf-A212-00D1408DF10A}": Weapons.SAB_100___100kg_flare_illumination_Bomb, "{FAS}": Weapons.Sand_Filter, "{SC_250_T1_L2}": Weapons.SC_250_Type_1_L2___250kg_GP_Bomb_LD, "{Schloss500XIIC1_SC_250_T3_J}": Weapons.SC_250_Type_3_J___250kg_GP_Bomb_LD, "{SC_500_L2}": Weapons.SC_500_L2___500kg_GP_Bomb_LD, "SC_501_SC250": Weapons.SC_501_SC250, "SC_501_SC500": Weapons.SC_501_SC500, "{SC_50}": Weapons.SC_50___50kg_GP_Bomb_LD, "{SD_250_Stg}": Weapons.SD_250_Stg___250kg_GP_Bomb_LD, "{SD_500_A}": Weapons.SD_500_A___500kg_GP_Bomb_LD, "SEASPARROW": Weapons.SEASPARROW, "{1461CD18-429A-42A9-A21F-4C621ECD4573}": Weapons.Sea_Eagle___ASM, "{0519A263-0AB6-11d6-9193-00A0249B6F00}": Weapons.Shpil_2_Laser_Recon__Intel_Pod, "{8C3F26A2-FA0F-11d5-9190-00A0249B6F00}": Weapons.Sky_Shadow_ECM_Pod, "SM2": Weapons.SM2, "{A4BCC903-06C8-47bb-9937-A30FEDB4E743}": Weapons.Smokewinder___blue, "{A4BCC903-06C8-47bb-9937-A30FEDB4E742}": Weapons.Smokewinder___green, "{A4BCC903-06C8-47bb-9937-A30FEDB4E746}": Weapons.Smokewinder___orange, "{A4BCC903-06C8-47bb-9937-A30FEDB4E741}": Weapons.Smokewinder___red, "{A4BCC903-06C8-47bb-9937-A30FEDB4E744}": Weapons.Smokewinder___white, "{A4BCC903-06C8-47bb-9937-A30FEDB4E745}": Weapons.Smokewinder___yellow, "{CE2_SMOKE_WHITE}": Weapons.Smoke_for_Christen_Eagle_II__white, "{D3F65166-1AB8-490f-AF2F-2FB6E22568B3}": Weapons.Smoke_Generator___blue, "{INV-SMOKE-BLUE}": Weapons.Smoke_Generator___blue_, "{D3F65166-1AB8-490f-AF2F-2FB6E22568B2}": Weapons.Smoke_Generator___green, "{INV-SMOKE-GREEN}": Weapons.Smoke_Generator___green_, "{D3F65166-1AB8-490f-AF2F-2FB6E22568B6}": Weapons.Smoke_Generator___orange, "{INV-SMOKE-ORANGE}": Weapons.Smoke_Generator___orange_, "{D3F65166-1AB8-490f-AF2F-2FB6E22568B1}": Weapons.Smoke_Generator___red, "{INV-SMOKE-RED}": Weapons.Smoke_Generator___red_, "{D3F65166-1AB8-490f-AF2F-2FB6E22568B4}": Weapons.Smoke_Generator___white, "{INV-SMOKE-WHITE}": Weapons.Smoke_Generator___white_, "{D3F65166-1AB8-490f-AF2F-2FB6E22568B5}": Weapons.Smoke_Generator___yellow, "{INV-SMOKE-YELLOW}": Weapons.Smoke_Generator___yellow_, "{SMOKE-RED-AVIOJET}": Weapons.Smoke_System_red_colorant, "{SMOKE-YELLOW-AVIOJET}": Weapons.Smoke_System_yellow_colorant, "{SMOKE-SYSTEM-AVIOJET}": Weapons.Smoke_System__White_Smoke_, "{MIG21_SMOKE_RED}": Weapons.Smoke___red___21__t, "{SMOKE_WHITE}": Weapons.Smoke___white___21, "{MIG21_SMOKE_WHITE}": Weapons.Smoke___white___21_, "SPITFIRE_45GAL_SLIPPER_TANK": Weapons.SPITFIRE_45GAL_SLIPPER_TANK, "SPITFIRE_45GAL_TORPEDO_TANK": Weapons.SPITFIRE_45GAL_TORPEDO_TANK, "{E92CBFE5-C153-11d8-9897-000476191836}": Weapons.SPPU_22_1___2_x_23mm__GSh_23L_Autocannon_Pod, "{SPRD}": Weapons.SPRD_99_takeoff_rocket, "{SPS-141-100}": Weapons.SPS_141_100__21____jamming_and_countermeasures_pod, "{F75187EF-1D9E-4DA9-84B4-1A1A14A3973A}": Weapons.SPS_141___ECM_Jamming_Pod, "{CAE48299-A294-4bad-8EE6-89EFC5DCDF00}": Weapons.SUU_25_x_8_LUU_2___Target_Marker_Flares, "{BRU42_SUU25}": Weapons.SUU_25___8_LUU_2, "{BRU3242_SUU25}": Weapons.SUU_25___8_LUU_2_, "{FD21B13E-57F3-4C2A-9F78-C522D0B5BCE1}": Weapons.Super_530D, "SVIR_9M119": Weapons.SVIR_9M119, "{S-24A}": Weapons.S_24A__21____180_kg__cumulative_unguided_rocket, "{S-24B}": Weapons.S_24B__21____180_kg__fragmented_unguided_rocket, "{1FA14DEA-8CDB-45AD-88A8-EC068DF1E65A}": Weapons.S_24B___240mm_UnGd_Rkt__235kg__HE_Frag___Low_Smk_, "{3858707D-F5D5-4bbb-BDD8-ABB0530EBC7C}": Weapons.S_24B___240mm_UnGd_Rkt__235kg__HE_Frag___Low_Smk__, "{0180F983-C14A-11d8-9897-000476191836}": Weapons.S_25L___320Kg__340mm_Laser_Guided_Rkt, "{A0648264-4BC0-4EE8-A543-D119F6BA4257}": Weapons.S_25_OFM___340mm_UnGd_Rkt__480kg_Penetrator, "{S_25_O}": Weapons.S_25_O___420mm_UnGd_Rkt__380kg_Frag, "{0519A262-0AB6-11d6-9193-00A0249B6F00}": Weapons.Tangazh_ELINT_pod, "{TER_9A_2L*CBU-87}": Weapons.TER_9A_with_2_x_CBU_87___202_x_CEM_Cluster_Bomb, "{TER_9A_2R*CBU-87}": Weapons.TER_9A_with_2_x_CBU_87___202_x_CEM_Cluster_Bomb_, "{TER_9A_2L*CBU-97}": Weapons.TER_9A_with_2_x_CBU_97___10_x_SFW_Cluster_Bomb, "{TER_9A_2R*CBU-97}": Weapons.TER_9A_with_2_x_CBU_97___10_x_SFW_Cluster_Bomb_, "{TER_9A_2L*GBU-12}": Weapons.TER_9A_with_2_x_GBU_12___500lb_Laser_Guided_Bomb, "{TER_9A_2R*GBU-12}": Weapons.TER_9A_with_2_x_GBU_12___500lb_Laser_Guided_Bomb_, "{TER_9A_2L*MK-82AIR}": Weapons.TER_9A_with_2_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD, "{TER_9A_2R*MK-82AIR}": Weapons.TER_9A_with_2_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD_, "{TER_9A_2L*MK-82_Snakeye}": Weapons.TER_9A_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD, "{TER_9A_2R*MK-82_Snakeye}": Weapons.TER_9A_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD_, "{TER_9A_2L*MK-82}": Weapons.TER_9A_with_2_x_Mk_82___500lb_GP_Bomb_LD, "{TER_9A_2R*MK-82}": Weapons.TER_9A_with_2_x_Mk_82___500lb_GP_Bomb_LD_, "{TER_9A_3*BDU-33}": Weapons.TER_9A_with_3_x_BDU_33___25lb_Practice_Bomb_LD, "{TER_9A_3*CBU-87}": Weapons.TER_9A_with_3_x_CBU_87___202_x_CEM_Cluster_Bomb, "{TER_9A_3*CBU-97}": Weapons.TER_9A_with_3_x_CBU_97___10_x_SFW_Cluster_Bomb, "{TER_9A_3*MK-82AIR}": Weapons.TER_9A_with_3_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD, "{TER_9A_3*MK-82_Snakeye}": Weapons.TER_9A_with_3_x_Mk_82_Snakeye___500lb_GP_Bomb_HD, "{TER_9A_3*MK-82}": Weapons.TER_9A_with_3_x_Mk_82___500lb_GP_Bomb_LD, "TEST_ROTARY_LAUNCHER_MK82": Weapons.TEST_ROTARY_LAUNCHER_MK82, "TGM_65H": Weapons.TGM_65H, "{EF124821-F9BB-4314-A153-E0E2FE1162C4}": Weapons.TORNADO_Fuel_tank, "TOW": Weapons.TOW, "{U22A}": Weapons.U22_A_Jammer, "{UB-16-57UMP}": Weapons.UB_16UM_pod___16_x_S_5KO__57mm_UnGd_Rkts__HEAT_Frag, "{UB-16_S5M}": Weapons.UB_16UM___16_S_5M, "{UB-32A-24}": Weapons.UB_32A_24_pod___32_x_S_5KO, "{637334E4-AB5A-47C0-83A6-51B7F1DF3CD5}": Weapons.UB_32A_pod___32_x_S_5KO__57mm_UnGd_Rkts__HEAT_Frag, "{UB-32_S5M}": Weapons.UB_32M___32_S_5M, "{05544F1A-C39C-466b-BC37-5BD1D52E57BB}": Weapons.UPK_23_250___2_x_23mm__GSh_23L_Autocannon_Pod, "{UPK-23-250 MiG-21}": Weapons.UPK_23_250___gun_pod, "{U22}": Weapons.U_22_Jammer_pod, "{WGr21}": Weapons.Werfer_Granate_21___21_cm_UnGd_air_to_air_rocket, "XM158_M151": Weapons.XM158_M151, "XM158_M156": Weapons.XM158_M156, "XM158_M257": Weapons.XM158_M257, "XM158_M274": Weapons.XM158_M274, "XM158_MK1": Weapons.XM158_MK1, "XM158_MK5": Weapons.XM158_MK5, "{MOSQUITO_100GAL_SLIPPER_TANK}": Weapons._100_gal__Drop_Tank, "{US_108GAL_PAPER_FUEL_TANK}": Weapons._108_US_gal__Paper_Fuel_Tank, "{US_110GAL_FUEL_TANK}": Weapons._110_US_gal__Fuel_Tank, "{12xM64}": Weapons._12_AN_M64___500lb_GP_Bomb_LD, "{D6A0441E-6794-4FEB-87F7-E68E2290DFAB}": Weapons._12_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD, "{E70446B7-C7E6-4B95-B685-DEA10CAD1A0E}": Weapons._12_x_FAB_500_M_62___500kg_GP_Bombs_LD, "{FW_190_R4M_LEFT_WING}": Weapons._13_R4M_3_2kg_UnGd_air_to_air_rocket, "{FW_190_R4M_RGHT_WING}": Weapons._13_R4M_3_2kg_UnGd_air_to_air_rocket_, "{US_150GAL_FUEL_TANK}": Weapons._150_US_gal__Fuel_Tank, "{22906569-A97F-404B-BA4F-D96DBF94D05E}": Weapons._20_x_AGM_86C_ALCM, "{B0241BD2-5628-47E0-954C-A8675B7E698E}": Weapons._24_x_FAB_250___250kg_GP_Bombs_LD, "{British_GP_250LB_Bomb_Mk1}": Weapons._250_lb_GP_Mk_I, "{British_GP_250LB_Bomb_Mk4}": Weapons._250_lb_GP_Mk_IV, "{British_GP_250LB_Bomb_Mk4_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_GP_Mk_IV_, "{British_GP_250LB_Bomb_Mk5}": Weapons._250_lb_GP_Mk_V, "{British_GP_250LB_Bomb_Mk5_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_GP_Mk_V_, "{British_MC_250LB_Bomb_Mk1}": Weapons._250_lb_MC_Mk_I, "{British_MC_250LB_Bomb_Mk2}": Weapons._250_lb_MC_Mk_II, "{British_MC_250LB_Bomb_Mk2_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_MC_Mk_II_, "{British_MC_250LB_Bomb_Mk1_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_MC_Mk_I_, "{British_SAP_250LB_Bomb_Mk5}": Weapons._250_lb_S_A_P_, "{British_SAP_250LB_Bomb_Mk5_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_S_A_P__, "{B58F99BA-5480-4572-8602-28B0449F5260}": Weapons._27_x_M117___750lb_GP_Bombs_LD, "{6C47D097-83FF-4FB2-9496-EAB36DDF0B05}": Weapons._27_x_Mk_82___500lb_GP_Bombs_LD, "{89D000B0-0360-461A-AD83-FB727E2ABA98}": Weapons._2xGBU_12___500lb_Laser_Guided_Bomb, "{BRU-42_2xGBU-12_right}": Weapons._2xGBU_12___500lb_Laser_Guided_Bomb_, "{LYSBOMB}": Weapons._2x_80kg_LYSB_71_Illumination_Bomb, "{BRU42_2*BDU45 RS}": Weapons._2_BDU_45, "{BRU42_2*BDU45B RS}": Weapons._2_BDU_45B, "{BRU3242_2*BDU45B RS}": Weapons._2_BDU_45B_, "{PHXBRU3242_2*BDU45B RS}": Weapons._2_BDU_45B__, "{BRU42_2*BDU45B LS}": Weapons._2_BDU_45B___, "{BRU3242_2*BDU45B LS}": Weapons._2_BDU_45B____, "{PHXBRU3242_2*BDU45B LS}": Weapons._2_BDU_45B_____, "{BRU3242_2*BDU45 RS}": Weapons._2_BDU_45_, "{PHXBRU3242_2*BDU45 RS}": Weapons._2_BDU_45__, "{BRU42_2*BDU45 LS}": Weapons._2_BDU_45___, "{BRU3242_2*BDU45 LS}": Weapons._2_BDU_45____, "{PHXBRU3242_2*BDU45 LS}": Weapons._2_BDU_45_____, "{BRU-70_2*CBU-99_LEFT}": Weapons._2_CBU_99, "{BRU-70_2*CBU-99_RIGHT}": Weapons._2_CBU_99_, "{BRU-42_2*GBU-12_LEFT}": Weapons._2_GBU_12, "{BRU-42_2*GBU-12_RIGHT}": Weapons._2_GBU_12_, "{BRU-42_2*GBU-16_LEFT}": Weapons._2_GBU_16, "{BRU-42_2*GBU-16_RIGHT}": Weapons._2_GBU_16_, "{BRU-42_2*GBU-38_LEFT}": Weapons._2_GBU_38, "{BRU-42_2*GBU-38_RIGHT}": Weapons._2_GBU_38_, "{BRU-70A_2*GBU-54_LEFT}": Weapons._2_GBU_54_V_1_B, "{BRU-70A_2*GBU-54_RIGHT}": Weapons._2_GBU_54_V_1_B_, "{BRU42_2*LAU10 L}": Weapons._2_LAU_10___4_ZUNI_MK_71, "{BRU3242_2*LAU10 L}": Weapons._2_LAU_10___4_ZUNI_MK_71_, "{BRU42_2*LAU10 R}": Weapons._2_LAU_10___4_ZUNI_MK_71__, "{BRU3242_2*LAU10 R}": Weapons._2_LAU_10___4_ZUNI_MK_71___, "{BRU42_2*LAU10 RS}": Weapons._2_LAU_10___4_ZUNI_MK_71____, "{BRU3242_2*LAU10 RS}": Weapons._2_LAU_10___4_ZUNI_MK_71_____, "{PHXBRU3242_2*LAU10 RS}": Weapons._2_LAU_10___4_ZUNI_MK_71______, "{BRU42_2*LAU10 LS}": Weapons._2_LAU_10___4_ZUNI_MK_71_______, "{BRU3242_2*LAU10 LS}": Weapons._2_LAU_10___4_ZUNI_MK_71________, "{PHXBRU3242_2*LAU10 LS}": Weapons._2_LAU_10___4_ZUNI_MK_71_________, "{BRU42_2*LUU2 R}": Weapons._2_LUU_2, "{BRU3242_2*LUU2 R}": Weapons._2_LUU_2_, "{BRU42_2*LUU2 L}": Weapons._2_LUU_2__, "{BRU3242_2*LUU2 L}": Weapons._2_LUU_2___, "{BRU42_2*MK20 RS}": Weapons._2_MK_20, "{BRU3242_2*MK20 RS}": Weapons._2_MK_20_, "{PHXBRU3242_2*MK20 RS}": Weapons._2_MK_20__, "{BRU42_2*MK20 LS}": Weapons._2_MK_20___, "{BRU3242_2*MK20 LS}": Weapons._2_MK_20____, "{PHXBRU3242_2*MK20 LS}": Weapons._2_MK_20_____, "{BRU-42_2*MK-20_LEFT}": Weapons._2_Mk_20_Rockeye, "{BRU-42_2*MK-20_RIGHT}": Weapons._2_Mk_20_Rockeye_, "{BRU42_2*MK81 RS}": Weapons._2_Mk_81, "{BRU3242_2*MK81 RS}": Weapons._2_Mk_81_, "{PHXBRU3242_2*MK81 RS}": Weapons._2_Mk_81__, "{BRU42_2*MK81 LS}": Weapons._2_Mk_81___, "{BRU3242_2*MK81 LS}": Weapons._2_Mk_81____, "{PHXBRU3242_2*MK81 LS}": Weapons._2_Mk_81_____, "{BRU-42_2*Mk-82_LEFT}": Weapons._2_Mk_82, "{BRU42_2*MK82AIR RS}": Weapons._2_Mk_82AIR, "{BRU3242_2*MK82AIR RS}": Weapons._2_Mk_82AIR_, "{PHXBRU3242_2*MK82AIR RS}": Weapons._2_Mk_82AIR__, "{BRU42_2*MK82AIR LS}": Weapons._2_Mk_82AIR___, "{BRU3242_2*MK82AIR LS}": Weapons._2_Mk_82AIR____, "{PHXBRU3242_2*MK82AIR LS}": Weapons._2_Mk_82AIR_____, "{BRU-42_2*Mk-82_RIGHT}": Weapons._2_Mk_82_, "{BRU-42_2*Mk-82AIR_LEFT}": Weapons._2_Mk_82_AIR, "{BRU-42_2*Mk-82AIR_RIGHT}": Weapons._2_Mk_82_AIR_, "{BRU42_2*MK82SE RS}": Weapons._2_Mk_82_SnakeEye, "{BRU3242_2*MK82SE RS}": Weapons._2_Mk_82_SnakeEye_, "{PHXBRU3242_2*MK82SE RS}": Weapons._2_Mk_82_SnakeEye__, "{BRU42_2*MK82SE LS}": Weapons._2_Mk_82_SnakeEye___, "{BRU3242_2*MK82SE LS}": Weapons._2_Mk_82_SnakeEye____, "{PHXBRU3242_2*MK82SE LS}": Weapons._2_Mk_82_SnakeEye_____, "{BRU-42_2*Mk-82SNAKEYE_LEFT}": Weapons._2_Mk_82_Snakeye, "{BRU-42_2*Mk-82SNAKEYE_RIGHT}": Weapons._2_Mk_82_Snakeye_, "{BRU42_2*MK82 RS}": Weapons._2_Mk_82__, "{BRU3242_2*MK82 RS}": Weapons._2_Mk_82___, "{PHXBRU3242_2*MK82 RS}": Weapons._2_Mk_82____, "{BRU42_2*MK82 LS}": Weapons._2_Mk_82_____, "{BRU3242_2*MK82 LS}": Weapons._2_Mk_82______, "{PHXBRU3242_2*MK82 LS}": Weapons._2_Mk_82_______, "{BRU-42_2*Mk-83_LEFT}": Weapons._2_Mk_83, "{BRU-42_2*Mk-83_RIGHT}": Weapons._2_Mk_83_, "{BRU42_2*SUU25 L}": Weapons._2_SUU_25___8_LUU_2, "{BRU3242_2*SUU25 L}": Weapons._2_SUU_25___8_LUU_2_, "{BRU42_2*SUU25 R}": Weapons._2_SUU_25___8_LUU_2__, "{BRU3242_2*SUU25 R}": Weapons._2_SUU_25___8_LUU_2___, "{2x9M120F_Ataka_V}": Weapons._2_x_9M120F_Ataka__AT_9_Spiral_2____AGM__SACLOS__HE, "{2x9M120_Ataka_V}": Weapons._2_x_9M120_Ataka__AT_9_Spiral_2____ATGM__SACLOS__Tandem_HEAT, "{2x9M220_Ataka_V}": Weapons._2_x_9M220O_Ataka__AT_9_Spiral_2____AAM__SACLOS__Frag, "{07BE2D19-0E48-4B0B-91DA-5F6C8F9E3C75}": Weapons._2_x_ALARM, "{C535596E-F7D2-4301-8BB4-B1658BB87ED7}": Weapons._2_x_BL_755_CBUs___450kg__147_Frag_Pen_bomblets, "{TWIN_B13L_5OF}": Weapons._2_x_B_13L_pods___10_x_S_13_OF__122mm_UnGd_Rkts__Blast_Frag, "{B13_5_S13OF_DUAL_L}": Weapons._2_x_B_13L___5_S_13_OF, "{B13_5_S13OF_DUAL_R}": Weapons._2_x_B_13L___5_S_13_OF_, "{TWIN_B_8M1_S_8KOM}": Weapons._2_x_B_8M1_pods___40_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP, "{B8M1_20_S8KOM_DUAL_L}": Weapons._2_x_B_8M1___20_S_8KOM, "{B8M1_20_S8KOM_DUAL_R}": Weapons._2_x_B_8M1___20_S_8KOM_, "{B8M1_20_S8OFP2_DUAL_L}": Weapons._2_x_B_8M1___20_S_8OFP2, "{B8M1_20_S8OFP2_DUAL_R}": Weapons._2_x_B_8M1___20_S_8OFP2_, "{B8M1_20_S8TsM_DUAL_L}": Weapons._2_x_B_8M1___20_S_8TsM, "{B8M1_20_S8TsM_DUAL_R}": Weapons._2_x_B_8M1___20_S_8TsM_, "{TWIN_B_8M1_S_8_OFP2}": Weapons._2_x_B_8V20A_pods___40_x_S_8OFP2__80mm_UnGd_Rkts__HE_Frag_AP, "{TWIN_B_8M1_S_8TsM}": Weapons._2_x_B_8V20A_pods___40_x_S_8TsM__80mm_UnGd_Rkts__Smk, "{FAB_250_DUAL_L}": Weapons._2_x_FAB_250, "{FAB_250_DUAL_R}": Weapons._2_x_FAB_250_, "{FAB_500_DUAL_L}": Weapons._2_x_FAB_500, "{FAB_500_DUAL_R}": Weapons._2_x_FAB_500_, "{HVARx2}": Weapons._2_x_HVAR__UnGd_Rkts, "{FAB-100x2}": Weapons._2_x_OFAB_100_Jupiter___100kg_GP_Bombs_LD, "{RBK_250_PTAB25M_DUAL_L}": Weapons._2_x_RBK_250_PTAB_2_5M, "{RBK_250_PTAB25M_DUAL_R}": Weapons._2_x_RBK_250_PTAB_2_5M_, "{RBK_500_PTAB105_DUAL_L}": Weapons._2_x_RBK_500_255_PTAB_10_5, "{RBK_500_PTAB105_DUAL_R}": Weapons._2_x_RBK_500_255_PTAB_10_5_, "{MOSSIE_2_British_AP_25LBNo1_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._2_x_RP_3_25lb_AP_Mk_I, "{MOSSIE_2_British_AP_25LBNo1_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._2_x_RP_3_25lb_AP_Mk_I_, "{MOSSIE_2_British_HE_60LBFNo1_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._2_x_RP_3_60lb_F_No1_Mk_I, "{MOSSIE_2_British_HE_60LBFNo1_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._2_x_RP_3_60lb_F_No1_Mk_I_, "{MOSSIE_2_British_HE_60LBSAPNo2_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._2_x_RP_3_60lb_SAP_No2_Mk_I, "{MOSSIE_2_British_HE_60LBSAPNo2_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._2_x_RP_3_60lb_SAP_No2_Mk_I_, "{S25_DUAL_L}": Weapons._2_x_S_25, "{S25_DUAL_R}": Weapons._2_x_S_25_, "{TWIN_S25}": Weapons._2_x_S_25_OFM___340mm_UnGdrocket__480kg_Penetrator, "{TWIN_S25_O}": Weapons._2_x_S_25_O___420mm_UnGd_Rkt__380kg_Frag, "{BDAD04AA-4D4A-4E51-B958-180A89F963CF}": Weapons._33_x_FAB_250___250kg_GP_Bombs_LD, "{AD5E5863-08FC-4283-B92C-162E2B2BD3FF}": Weapons._33_x_FAB_500_M_62___500kg_GP_Bombs_LD, "3M45": Weapons._3M45, "{BRU42_3*BDU33}": Weapons._3_BDU_33, "{BRU3242_3*BDU33}": Weapons._3_BDU_33_, "{BRU42_3*BDU33_N}": Weapons._3_BDU_33__, "{BRU3242_3*BDU33_N}": Weapons._3_BDU_33___, "{PHXBRU3242_BDU33}": Weapons._3_BDU_33____, "{BRU-42A_3*GBU-12}": Weapons._3_GBU_12, "{BRU-42A_3*GBU-16}": Weapons._3_GBU_16, "{BRU-42_3*GBU-38}": Weapons._3_GBU_38, "{BRU-70A_3*GBU-54}": Weapons._3_GBU_54_V_1_B, "{BRU-42_3*Mk-81LD}": Weapons._3_Mk_81, "{BRU-42_3*Mk-82LD}": Weapons._3_Mk_82, "{BRU-42_3_MK82AIR}": Weapons._3_Mk_82_AIR, "{BRU-42_3*Mk-82SNAKEYE}": Weapons._3_Mk_82_Snakeye, "{BRU-42_3*Mk-83}": Weapons._3_Mk_83, "{3xM8_ROCKETS_IN_TUBES}": Weapons._3_x_4_5_inch_M8_UnGd_Rocket, "{639DB5DD-CB7E-4E42-AC75-2112BC397B97}": Weapons._3_x_FAB_1500_M_54___1500kg_GP_Bombs_LD, "{A76344EB-32D2-4532-8FA2-0C1BDC00747E}": Weapons._3_x_LAU_61_pods___57_x_2_75_Hydra__UnGd_Rkts_M151__HE, "48N6E2": Weapons._48N6E2, "_4M80": Weapons._4M80, "{M71BOMBD}": Weapons._4x_SB_M_71_120kg_GP_Bomb_High_drag, "{M71BOMB}": Weapons._4x_SB_M_71_120kg_GP_Bomb_Low_drag, "{AABA1A14-78A1-4E85-94DD-463CF75BD9E4}": Weapons._4_x_AGM_154C___JSOW_Unitary_BROACH, "{4xAN-M64_on_InvCountedAttachmentPoints}": Weapons._4_x_AN_M64___500lb_GP_Bomb_LD, "{3EA17AB0-A805-4D9E-8732-4CE00CB00F17}": Weapons._4_x_BGM_71D_TOW_ATGM, "{B8C99F40-E486-4040-B547-6639172A5D57}": Weapons._4_x_GBU_27___2000lb_Laser_Guided_Penetrator_Bombs, "{MOSSIE_4_British_AP_25LBNo1_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._4_x_RP_3_25lb_AP_Mk_I, "{MOSSIE_4_British_AP_25LBNo1_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._4_x_RP_3_25lb_AP_Mk_I_, "{MOSSIE_4_British_HE_60LBFNo1_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._4_x_RP_3_60lb_F_No1_Mk_I, "{MOSSIE_4_British_HE_60LBFNo1_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._4_x_RP_3_60lb_F_No1_Mk_I_, "{MOSSIE_4_British_HE_60LBSAPNo2_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._4_x_RP_3_60lb_SAP_No2_Mk_I, "{MOSSIE_4_British_HE_60LBSAPNo2_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._4_x_RP_3_60lb_SAP_No2_Mk_I_, "{British_GP_500LB_Bomb_Mk1}": Weapons._500_lb_GP_Mk_I, "{British_GP_500LB_Bomb_Mk4}": Weapons._500_lb_GP_Mk_IV, "{British_GP_500LB_Bomb_Mk5}": Weapons._500_lb_GP_Mk_V, "{British_GP_500LB_Bomb_Mk4_Short}": Weapons._500_lb_GP_Short_tail, "{British_GP_500LB_Bomb_Mk4_Short_on_Handley_Page_Type_B_Cut_Bar}": Weapons._500_lb_GP_Short_tail_, "{British_MC_500LB_Bomb_Mk2}": Weapons._500_lb_MC_Mk_II, "{British_MC_500LB_Bomb_Mk1_Short}": Weapons._500_lb_MC_Short_tail, "{British_MC_500LB_Bomb_Mk1_Short_on_Handley_Page_Type_B_Cut_Bar}": Weapons._500_lb_MC_Short_tail_, "{British_SAP_500LB_Bomb_Mk5}": Weapons._500_lb_S_A_P_, "{MOSQUITO_50GAL_SLIPPER_TANK}": Weapons._50_gal__Drop_Tank, "{72CAC282-AE18-490B-BD4D-35E7EE969E73}": Weapons._51_x_M117___750lb_GP_Bombs_LD, "{B84DFE16-6AC7-4854-8F6D-34137892E166}": Weapons._51_x_Mk_82___500lb_GP_Bombs_LD, "5V55": Weapons._5V55, "{P47_5_HVARS_ON_LEFT_WING_RAILS}": Weapons._5_x_HVAR__UnGd_Rkt, "{P47_5_HVARS_ON_RIGHT_WING_RAILS}": Weapons._5_x_HVAR__UnGd_Rkt_, "{MER-5E_Mk82SNAKEYEx5}": Weapons._5_x_Mk_82_Snakeye___500lb_GP_Bomb_HD, "{MER-5E_MK82x5}": Weapons._5_x_Mk_82___500lb_GP_Bombs_LD, "{45447F82-01B5-4029-A572-9AAD28AF0275}": Weapons._6_x_AGM_86C_ALCM_on_MER, "{2B7BDB38-4F45-43F9-BE02-E7B3141F3D24}": Weapons._6_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD, "{D9179118-E42F-47DE-A483-A6C2EA7B4F38}": Weapons._6_x_FAB_1500_M_54___1500kg_GP_Bombs_LD, "{26D2AF37-B0DF-4AB6-9D61-A150FF58A37B}": Weapons._6_x_FAB_500_M_62___500kg_GP_Bombs_LD, "{C42EE4C3-355C-4B83-8B22-B39430B8F4AE}": Weapons._6_x_Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr, "{0290F5DE-014A-4BB1-9843-D717749B1DED}": Weapons._6_x_Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC, "{E79759F7-C622-4AA4-B1EF-37639A34D924}": Weapons._6_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets, "{027563C9-D87E-4A85-B317-597B510E3F03}": Weapons._6_x_Mk_82___500lb_GP_Bombs_LD, "{DT75GAL}": Weapons._75_US_gal__Fuel_Tank, "{46ACDCF8-5451-4E26-BDDB-E78D5830E93C}": Weapons._8_x_AGM_84A_Harpoon_ASM, "{8DCAF3A3-7FCF-41B8-BB88-58DEDA878EDE}": Weapons._8_x_AGM_86C_ALCM, "{CD9417DF-455F-4176-A5A2-8C58D61AA00B}": Weapons._8_x_Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC, "_9M111": Weapons._9M111, "{9M114 Shturm-V-2 Rack}": Weapons._9M114_Shturm_V_2_Rack, "{B919B0F4-7C25-455E-9A02-CEA51DB895E3}": Weapons._9M114_Shturm_V_2__AT_6_Spiral____ATGM__SACLOS, "{57232979-8B0F-4db7-8D9A-55197E06B0F5}": Weapons._9M114_Shturm_V_8__AT_6_Spiral____ATGM__SACLOS, "_9M117": Weapons._9M117, "9M133": Weapons._9M133, "9M14": Weapons._9M14, "9M31": Weapons._9M31, "9M311": Weapons._9M311, "9M33": Weapons._9M33, "_9M331": Weapons._9M331, "_9M37": Weapons._9M37, "_9M38": Weapons._9M38, "_9M39": Weapons._9M39, "{9S846_2xIGLA}": Weapons._9S846_Strelets___2_x_Igla, "_NiteHawk_FLIR": Weapons._NiteHawk_FLIR }<|fim▁end|>
"{ARAKM70BHE}": Weapons.ARAK_M_70B_HE_6x_135mm_UnGd_Rkts__Shu70_HE_FRAG,
<|file_name|>users.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import webapp2 from boilerplate import models from boilerplate import forms from boilerplate.handlers import BaseHandler from google.appengine.datastore.datastore_query import Cursor from google.appengine.ext import ndb from google.appengine.api import users as googleusers from collections import OrderedDict, Counter from wtforms import fields class Logout(BaseHandler): def get(self): self.redirect(googleusers.create_logout_url(dest_url=self.uri_for('home'))) class Geochart(BaseHandler): def get(self): users = models.User.query().fetch(projection=['country']) users_by_country = Counter() for user in users: if user.country: users_by_country[user.country] += 1 params = { "data": users_by_country.items() } return self.render_template('admin/geochart.html', **params) class EditProfileForm(forms.EditProfileForm): activated = fields.BooleanField('Activated') class List(BaseHandler): def get(self): p = self.request.get('p') q = self.request.get('q') c = self.request.get('c') forward = True if p not in ['prev'] else False cursor = Cursor(urlsafe=c) if q: qry = models.User.query(ndb.OR(models.User.last_name == q, models.User.email == q, models.User.username == q)) else: qry = models.User.query() PAGE_SIZE = 5 if forward: users, next_cursor, more = qry.order(models.User.key).fetch_page(PAGE_SIZE, start_cursor=cursor) if next_cursor and more: self.view.next_cursor = next_cursor if c: self.view.prev_cursor = cursor.reversed() else: users, next_cursor, more = qry.order(-models.User.key).fetch_page(PAGE_SIZE, start_cursor=cursor) users = list(reversed(users)) if next_cursor and more: self.view.prev_cursor = next_cursor self.view.next_cursor = cursor.reversed() def pager_url(p, cursor): params = OrderedDict() if q: params['q'] = q if p in ['prev']: params['p'] = p if cursor: params['c'] = cursor.urlsafe() return self.uri_for('user-list', **params) self.view.pager_url = pager_url self.view.q = q params = { "list_columns": [('username', 'Username'), ('last_name', 'Last Name'), ('email', 'E-Mail'), ('country', 'Country')], "users" : users, "count" : qry.count() } # FIXME: admin_user should probably go into BaseHandler params['admin_user'] = googleusers.is_current_user_admin() return self.render_template('admin/users.html', **params) class Edit(BaseHandler): def get_or_404(self, user_id): try: user = models.User.get_by_id(long(user_id)) if user: return user<|fim▁hole|> pass self.abort(404) def edit(self, user_id): if self.request.POST: user = self.get_or_404(user_id) if self.form.validate(): self.form.populate_obj(user) user.put() self.add_message("Changes saved!", 'success') return self.redirect_to("user-edit", user_id=user_id) else: self.add_message("Could not save changes!", 'error') else: user = self.get_or_404(user_id) self.form.process(obj=user) params = { 'user' : user } return self.render_template('admin/edituser.html', **params) @webapp2.cached_property def form(self): return EditProfileForm(self)<|fim▁end|>
except ValueError:
<|file_name|>database_library_validators.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from healthcareai.common.healthcareai_error import HealthcareAIError def validate_pyodbc_is_loaded(): """ Simple check that alerts user if they are do not have pyodbc installed, which is not a requirement. """ if 'pyodbc' not in sys.modules: raise HealthcareAIError('Using this function requires installation of pyodbc.') def validate_sqlite3_is_loaded(): """ Simple check that alerts user if they are do not have sqlite installed, which is not a requirement. """ if 'sqlite3' not in sys.modules: raise HealthcareAIError('Using this function requires installation of sqlite3.')<|fim▁end|>
import sys
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Support for the MAX! Cube LAN Gateway.""" import logging from socket import timeout from threading import Lock import time from maxcube.cube import MaxCube import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SCAN_INTERVAL import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import load_platform from homeassistant.util.dt import now _LOGGER = logging.getLogger(__name__) DEFAULT_PORT = 62910 DOMAIN = "maxcube" DATA_KEY = "maxcube" NOTIFICATION_ID = "maxcube_notification" NOTIFICATION_TITLE = "Max!Cube gateway setup" CONF_GATEWAYS = "gateways" CONFIG_GATEWAY = vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_SCAN_INTERVAL, default=300): cv.time_period, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_GATEWAYS, default={}): vol.All( cv.ensure_list, [CONFIG_GATEWAY] ) } ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Establish connection to MAX! Cube.""" if DATA_KEY not in hass.data: hass.data[DATA_KEY] = {} connection_failed = 0 gateways = config[DOMAIN][CONF_GATEWAYS] for gateway in gateways: host = gateway[CONF_HOST] port = gateway[CONF_PORT] scan_interval = gateway[CONF_SCAN_INTERVAL].total_seconds() try: cube = MaxCube(host, port, now=now) hass.data[DATA_KEY][host] = MaxCubeHandle(cube, scan_interval) except timeout as ex: _LOGGER.error("Unable to connect to Max!Cube gateway: %s", str(ex)) hass.components.persistent_notification.create( f"Error: {ex}<br />You will need to restart Home Assistant after fixing.", title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID, ) connection_failed += 1<|fim▁hole|> if connection_failed >= len(gateways): return False load_platform(hass, "climate", DOMAIN, {}, config) load_platform(hass, "binary_sensor", DOMAIN, {}, config) return True class MaxCubeHandle: """Keep the cube instance in one place and centralize the update.""" def __init__(self, cube, scan_interval): """Initialize the Cube Handle.""" self.cube = cube self.cube.use_persistent_connection = scan_interval <= 300 # seconds self.scan_interval = scan_interval self.mutex = Lock() self._updatets = time.monotonic() def update(self): """Pull the latest data from the MAX! Cube.""" # Acquire mutex to prevent simultaneous update from multiple threads with self.mutex: # Only update every update_interval if (time.monotonic() - self._updatets) >= self.scan_interval: _LOGGER.debug("Updating") try: self.cube.update() except timeout: _LOGGER.error("Max!Cube connection failed") return False self._updatets = time.monotonic() else: _LOGGER.debug("Skipping update")<|fim▁end|>
<|file_name|>lcd_update.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 # Example using a character LCD connected to a Raspberry Pi or BeagleBone Black. import time import datetime import Adafruit_CharLCD as LCD def file_get_contents(filename): with open(filename) as f:<|fim▁hole|># Raspberry Pi pin configuration: lcd_rs = 24 # Note this might need to be changed to 21 for older revision Pi's. lcd_en = 23 lcd_d4 = 9 lcd_d5 = 11 lcd_d6 = 10 lcd_d7 = 18 lcd_backlight = 8 # BeagleBone Black configuration: # lcd_rs = 'P8_8' # lcd_en = 'P8_10' # lcd_d4 = 'P8_18' # lcd_d5 = 'P8_16' # lcd_d6 = 'P8_14' # lcd_d7 = 'P8_12' # lcd_backlight = 'P8_7' # Define LCD column and row size for 16x2 LCD. lcd_columns = 16 lcd_rows = 2 # Alternatively specify a 20x4 LCD. # lcd_columns = 20 # lcd_rows = 4 # Initialize the LCD using the pins above. lcd = LCD.Adafruit_CharLCD(lcd_rs, lcd_en, lcd_d4, lcd_d5, lcd_d6, lcd_d7, lcd_columns, lcd_rows, lcd_backlight) datestring = datetime.datetime.now().strftime('%Y-%m-%d %H:%M') lcd.clear() lcd.message(file_get_contents("../data/lcd.txt") );<|fim▁end|>
return f.read()
<|file_name|>html.py<|end_file_name|><|fim▁begin|>"""HTML utilities suitable for global use.""" from __future__ import unicode_literals import re from django.utils.encoding import force_text, force_str from django.utils.functional import allow_lazy from django.utils.safestring import SafeData, mark_safe from django.utils import six from django.utils.six.moves.urllib.parse import quote, unquote, urlsplit, urlunsplit from django.utils.text import normalize_newlines from .html_parser import HTMLParser, HTMLParseError # Configuration for urlize() function. TRAILING_PUNCTUATION = ['.', ',', ':', ';', '.)', '"', '\''] WRAPPING_PUNCTUATION = [('(', ')'), ('<', '>'), ('[', ']'), ('&lt;', '&gt;'), ('"', '"'), ('\'', '\'')] # List of possible strings used for bullets in bulleted lists. DOTS = ['&middot;', '*', '\u2022', '&#149;', '&bull;', '&#8226;'] unencoded_ampersands_re = re.compile(r'&(?!(\w+|#\d+);)') word_split_re = re.compile(r'(\s+)') simple_url_re = re.compile(r'^https?://\[?\w', re.IGNORECASE) simple_url_2_re = re.compile(r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)$', re.IGNORECASE) simple_email_re = re.compile(r'^\S+@\S+\.\S+$') link_target_attribute_re = re.compile(r'(<a [^>]*?)target=[^\s>]+') html_gunk_re = re.compile(r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE) hard_coded_bullets_re = re.compile(r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join(re.escape(x) for x in DOTS), re.DOTALL) trailing_empty_content_re = re.compile(r'(?:<p>(?:&nbsp;|\s|<br \/>)*?</p>\s*)+\Z') def escape(text): """ Returns the given text with ampersands, quotes and angle brackets encoded for use in HTML. """ return mark_safe(force_text(text).replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;').replace('"', '&quot;').replace("'", '&#39;')) escape = allow_lazy(escape, six.text_type) _js_escapes = { ord('\\'): '\\u005C', ord('\''): '\\u0027', ord('"'): '\\u0022', ord('>'): '\\u003E', ord('<'): '\\u003C', ord('&'): '\\u0026', ord('='): '\\u003D', ord('-'): '\\u002D', ord(';'): '\\u003B', ord('\u2028'): '\\u2028', ord('\u2029'): '\\u2029' } # Escape every ASCII character with a value less than 32. _js_escapes.update((ord('%c' % z), '\\u%04X' % z) for z in range(32)) def escapejs(value): """Hex encodes characters for use in JavaScript strings.""" return mark_safe(force_text(value).translate(_js_escapes)) escapejs = allow_lazy(escapejs, six.text_type) def conditional_escape(text): """ Similar to escape(), except that it doesn't operate on pre-escaped strings. """ if hasattr(text, '__html__'): return text.__html__() else: return escape(text) def format_html(format_string, *args, **kwargs): """ Similar to str.format, but passes all arguments through conditional_escape, and calls 'mark_safe' on the result. This function should be used instead of str.format or % interpolation to build up small HTML fragments. """ args_safe = map(conditional_escape, args) kwargs_safe = dict((k, conditional_escape(v)) for (k, v) in six.iteritems(kwargs)) return mark_safe(format_string.format(*args_safe, **kwargs_safe)) def format_html_join(sep, format_string, args_generator): """ A wrapper of format_html, for the common case of a group of arguments that need to be formatted using the same format string, and then joined using 'sep'. 'sep' is also passed through conditional_escape. 'args_generator' should be an iterator that returns the sequence of 'args' that will be passed to format_html. Example: format_html_join('\n', "<li>{0} {1}</li>", ((u.first_name, u.last_name) for u in users)) """ return mark_safe(conditional_escape(sep).join( format_html(format_string, *tuple(args)) for args in args_generator)) def linebreaks(value, autoescape=False): """Converts newlines into <p> and <br />s.""" value = normalize_newlines(value) paras = re.split('\n{2,}', value) if autoescape: paras = ['<p>%s</p>' % escape(p).replace('\n', '<br />') for p in paras] else: paras = ['<p>%s</p>' % p.replace('\n', '<br />') for p in paras] return '\n\n'.join(paras) linebreaks = allow_lazy(linebreaks, six.text_type) class MLStripper(HTMLParser): def __init__(self): HTMLParser.__init__(self) self.reset() self.fed = [] def handle_data(self, d): self.fed.append(d) def handle_entityref(self, name): self.fed.append('&%s;' % name) def handle_charref(self, name): self.fed.append('&#%s;' % name) def get_data(self): return ''.join(self.fed) def strip_tags(value): """Returns the given HTML with all tags stripped.""" s = MLStripper() try: s.feed(value) s.close() except HTMLParseError: return value else: return s.get_data() strip_tags = allow_lazy(strip_tags) def remove_tags(html, tags): """Returns the given HTML with given tags removed.""" tags = [re.escape(tag) for tag in tags.split()] tags_re = '(%s)' % '|'.join(tags) starttag_re = re.compile(r'<%s(/?>|(\s+[^>]*>))' % tags_re, re.U) endtag_re = re.compile('</%s>' % tags_re) html = starttag_re.sub('', html) html = endtag_re.sub('', html) return html remove_tags = allow_lazy(remove_tags, six.text_type)<|fim▁hole|> def strip_spaces_between_tags(value): """Returns the given HTML with spaces between tags removed.""" return re.sub(r'>\s+<', '><', force_text(value)) strip_spaces_between_tags = allow_lazy(strip_spaces_between_tags, six.text_type) def strip_entities(value): """Returns the given HTML with all entities (&something;) stripped.""" return re.sub(r'&(?:\w+|#\d+);', '', force_text(value)) strip_entities = allow_lazy(strip_entities, six.text_type) def smart_urlquote(url): "Quotes a URL if it isn't already quoted." # Handle IDN before quoting. try: scheme, netloc, path, query, fragment = urlsplit(url) try: netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE except UnicodeError: # invalid domain part pass else: url = urlunsplit((scheme, netloc, path, query, fragment)) except ValueError: # invalid IPv6 URL (normally square brackets in hostname part). pass url = unquote(force_str(url)) # See http://bugs.python.org/issue2637 url = quote(url, safe=b'!*\'();:@&=+$,/?#[]~') return force_text(url) def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False): """ Converts any URLs in text into clickable links. Works on http://, https://, www. links, and also on links ending in one of the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org). Links can have trailing punctuation (periods, commas, close-parens) and leading punctuation (opening parens) and it'll still do the right thing. If trim_url_limit is not None, the URLs in the link text longer than this limit will be truncated to trim_url_limit-3 characters and appended with an ellipsis. If nofollow is True, the links will get a rel="nofollow" attribute. If autoescape is True, the link text and URLs will be autoescaped. """ def trim_url(x, limit=trim_url_limit): if limit is None or len(x) <= limit: return x return '%s...' % x[:max(0, limit - 3)] safe_input = isinstance(text, SafeData) words = word_split_re.split(force_text(text)) for i, word in enumerate(words): if '.' in word or '@' in word or ':' in word: # Deal with punctuation. lead, middle, trail = '', word, '' for punctuation in TRAILING_PUNCTUATION: if middle.endswith(punctuation): middle = middle[:-len(punctuation)] trail = punctuation + trail for opening, closing in WRAPPING_PUNCTUATION: if middle.startswith(opening): middle = middle[len(opening):] lead = lead + opening # Keep parentheses at the end only if they're balanced. if (middle.endswith(closing) and middle.count(closing) == middle.count(opening) + 1): middle = middle[:-len(closing)] trail = closing + trail # Make URL we want to point to. url = None nofollow_attr = ' rel="nofollow"' if nofollow else '' if simple_url_re.match(middle): url = smart_urlquote(middle) elif simple_url_2_re.match(middle): url = smart_urlquote('http://%s' % middle) elif not ':' in middle and simple_email_re.match(middle): local, domain = middle.rsplit('@', 1) try: domain = domain.encode('idna').decode('ascii') except UnicodeError: continue url = 'mailto:%s@%s' % (local, domain) nofollow_attr = '' # Make link. if url: trimmed = trim_url(middle) if autoescape and not safe_input: lead, trail = escape(lead), escape(trail) url, trimmed = escape(url), escape(trimmed) middle = '<a href="%s"%s>%s</a>' % (url, nofollow_attr, trimmed) words[i] = mark_safe('%s%s%s' % (lead, middle, trail)) else: if safe_input: words[i] = mark_safe(word) elif autoescape: words[i] = escape(word) elif safe_input: words[i] = mark_safe(word) elif autoescape: words[i] = escape(word) return ''.join(words) urlize = allow_lazy(urlize, six.text_type) def avoid_wrapping(value): """ Avoid text wrapping in the middle of a phrase by adding non-breaking spaces where there previously were normal spaces. """ return value.replace(" ", "\xa0")<|fim▁end|>
<|file_name|>enum_set.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A structure for holding a set of enum variants. //! //! This module defines a container which uses an efficient bit mask //! representation to hold C-like enum variants. use core::prelude::*; use core::fmt; use core::num::Int; // FIXME(contentions): implement union family of methods? (general design may be wrong here) #[deriving(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] /// A specialized `Set` implementation to use enum types. pub struct EnumSet<E> { // We must maintain the invariant that no bits are set // for which no variant exists bits: uint } impl<E:CLike+fmt::Show> fmt::Show for EnumSet<E> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(write!(fmt, "{{")); let mut first = true; for e in self.iter() { if !first { try!(write!(fmt, ", ")); } try!(write!(fmt, "{}", e)); first = false; } write!(fmt, "}}") } } /** An interface for casting C-like enum to uint and back. A typically implementation is as below. ```{rust,ignore} #[repr(uint)] enum Foo { A, B, C } impl CLike for Foo { fn to_uint(&self) -> uint { *self as uint } fn from_uint(v: uint) -> Foo { unsafe { mem::transmute(v) } } } ``` */ pub trait CLike { /// Converts a C-like enum to a `uint`. fn to_uint(&self) -> uint; /// Converts a `uint` to a C-like enum. fn from_uint(uint) -> Self; } fn bit<E:CLike>(e: &E) -> uint { use core::uint; let value = e.to_uint(); assert!(value < uint::BITS, "EnumSet only supports up to {} variants.", uint::BITS - 1); 1 << value } impl<E:CLike> EnumSet<E> { /// Deprecated: Renamed to `new`. #[deprecated = "Renamed to `new`"] pub fn empty() -> EnumSet<E> { EnumSet::new() } /// Returns an empty `EnumSet`. #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn new() -> EnumSet<E> { EnumSet {bits: 0} } /// Returns the number of elements in the given `EnumSet`. #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn len(&self) -> uint { self.bits.count_ones() } /// Returns true if the `EnumSet` is empty. #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_empty(&self) -> bool { self.bits == 0 } pub fn clear(&mut self) { self.bits = 0; } /// Returns `true` if the `EnumSet` contains any enum of the given `EnumSet`. /// Deprecated: Use `is_disjoint`. #[deprecated = "Use `is_disjoint`"] pub fn intersects(&self, e: EnumSet<E>) -> bool { !self.is_disjoint(&e) } /// Returns `false` if the `EnumSet` contains any enum of the given `EnumSet`. #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_disjoint(&self, other: &EnumSet<E>) -> bool { (self.bits & other.bits) == 0 } /// Returns `true` if a given `EnumSet` is included in this `EnumSet`. #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_superset(&self, other: &EnumSet<E>) -> bool { (self.bits & other.bits) == other.bits } /// Returns `true` if this `EnumSet` is included in the given `EnumSet`. #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_subset(&self, other: &EnumSet<E>) -> bool { other.is_superset(self) } /// Returns the union of both `EnumSets`. pub fn union(&self, e: EnumSet<E>) -> EnumSet<E> { EnumSet {bits: self.bits | e.bits} } /// Returns the intersection of both `EnumSets`. pub fn intersection(&self, e: EnumSet<E>) -> EnumSet<E> { EnumSet {bits: self.bits & e.bits} } /// Deprecated: Use `insert`. #[deprecated = "Use `insert`"] pub fn add(&mut self, e: E) { self.insert(e); } /// Adds an enum to the `EnumSet`, and returns `true` if it wasn't there before #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn insert(&mut self, e: E) -> bool { let result = !self.contains(&e); self.bits |= bit(&e); result } /// Removes an enum from the EnumSet #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn remove(&mut self, e: &E) -> bool { let result = self.contains(e); self.bits &= !bit(e); result } /// Deprecated: use `contains`. #[deprecated = "use `contains"] pub fn contains_elem(&self, e: E) -> bool { self.contains(&e) } /// Returns `true` if an `EnumSet` contains a given enum. #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn contains(&self, e: &E) -> bool { (self.bits & bit(e)) != 0 } /// Returns an iterator over an `EnumSet`. #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn iter(&self) -> Items<E> { Items::new(self.bits) } } impl<E:CLike> Sub<EnumSet<E>, EnumSet<E>> for EnumSet<E> { fn sub(&self, e: &EnumSet<E>) -> EnumSet<E> { EnumSet {bits: self.bits & !e.bits} } } impl<E:CLike> BitOr<EnumSet<E>, EnumSet<E>> for EnumSet<E> { fn bitor(&self, e: &EnumSet<E>) -> EnumSet<E> { EnumSet {bits: self.bits | e.bits} } } impl<E:CLike> BitAnd<EnumSet<E>, EnumSet<E>> for EnumSet<E> { fn bitand(&self, e: &EnumSet<E>) -> EnumSet<E> { EnumSet {bits: self.bits & e.bits} } } impl<E:CLike> BitXor<EnumSet<E>, EnumSet<E>> for EnumSet<E> { fn bitxor(&self, e: &EnumSet<E>) -> EnumSet<E> { EnumSet {bits: self.bits ^ e.bits} } } /// An iterator over an EnumSet pub struct Items<E> { index: uint, bits: uint, } impl<E:CLike> Items<E> { fn new(bits: uint) -> Items<E> { Items { index: 0, bits: bits } } } impl<E:CLike> Iterator<E> for Items<E> { fn next(&mut self) -> Option<E> { if self.bits == 0 { return None; } while (self.bits & 1) == 0 { self.index += 1; self.bits >>= 1; } let elem = CLike::from_uint(self.index); self.index += 1; self.bits >>= 1; Some(elem) } fn size_hint(&self) -> (uint, Option<uint>) { let exact = self.bits.count_ones(); (exact, Some(exact)) } } impl<E:CLike> FromIterator<E> for EnumSet<E> { fn from_iter<I:Iterator<E>>(iterator: I) -> EnumSet<E> { let mut ret = EnumSet::new(); ret.extend(iterator); ret } } impl<E:CLike> Extend<E> for EnumSet<E> { fn extend<I: Iterator<E>>(&mut self, mut iterator: I) { for element in iterator { self.insert(element); } } } #[cfg(test)] mod test { use std::prelude::*; use std::mem; use super::{EnumSet, CLike}; #[deriving(PartialEq, Show)] #[repr(uint)] enum Foo {<|fim▁hole|> A, B, C } impl CLike for Foo { fn to_uint(&self) -> uint { *self as uint } fn from_uint(v: uint) -> Foo { unsafe { mem::transmute(v) } } } #[test] fn test_new() { let e: EnumSet<Foo> = EnumSet::new(); assert!(e.is_empty()); } #[test] fn test_show() { let mut e = EnumSet::new(); assert_eq!("{}", e.to_string().as_slice()); e.insert(A); assert_eq!("{A}", e.to_string().as_slice()); e.insert(C); assert_eq!("{A, C}", e.to_string().as_slice()); } #[test] fn test_len() { let mut e = EnumSet::new(); assert_eq!(e.len(), 0); e.insert(A); e.insert(B); e.insert(C); assert_eq!(e.len(), 3); e.remove(&A); assert_eq!(e.len(), 2); e.clear(); assert_eq!(e.len(), 0); } /////////////////////////////////////////////////////////////////////////// // intersect #[test] fn test_two_empties_do_not_intersect() { let e1: EnumSet<Foo> = EnumSet::new(); let e2: EnumSet<Foo> = EnumSet::new(); assert!(e1.is_disjoint(&e2)); } #[test] fn test_empty_does_not_intersect_with_full() { let e1: EnumSet<Foo> = EnumSet::new(); let mut e2: EnumSet<Foo> = EnumSet::new(); e2.insert(A); e2.insert(B); e2.insert(C); assert!(e1.is_disjoint(&e2)); } #[test] fn test_disjoint_intersects() { let mut e1: EnumSet<Foo> = EnumSet::new(); e1.insert(A); let mut e2: EnumSet<Foo> = EnumSet::new(); e2.insert(B); assert!(e1.is_disjoint(&e2)); } #[test] fn test_overlapping_intersects() { let mut e1: EnumSet<Foo> = EnumSet::new(); e1.insert(A); let mut e2: EnumSet<Foo> = EnumSet::new(); e2.insert(A); e2.insert(B); assert!(!e1.is_disjoint(&e2)); } /////////////////////////////////////////////////////////////////////////// // contains and contains_elem #[test] fn test_superset() { let mut e1: EnumSet<Foo> = EnumSet::new(); e1.insert(A); let mut e2: EnumSet<Foo> = EnumSet::new(); e2.insert(A); e2.insert(B); let mut e3: EnumSet<Foo> = EnumSet::new(); e3.insert(C); assert!(e1.is_subset(&e2)); assert!(e2.is_superset(&e1)); assert!(!e3.is_superset(&e2)) assert!(!e2.is_superset(&e3)) } #[test] fn test_contains() { let mut e1: EnumSet<Foo> = EnumSet::new(); e1.insert(A); assert!(e1.contains(&A)); assert!(!e1.contains(&B)); assert!(!e1.contains(&C)); e1.insert(A); e1.insert(B); assert!(e1.contains(&A)); assert!(e1.contains(&B)); assert!(!e1.contains(&C)); } /////////////////////////////////////////////////////////////////////////// // iter #[test] fn test_iterator() { let mut e1: EnumSet<Foo> = EnumSet::new(); let elems: Vec<Foo> = e1.iter().collect(); assert!(elems.is_empty()) e1.insert(A); let elems = e1.iter().collect(); assert_eq!(vec![A], elems) e1.insert(C); let elems = e1.iter().collect(); assert_eq!(vec![A,C], elems) e1.insert(C); let elems = e1.iter().collect(); assert_eq!(vec![A,C], elems) e1.insert(B); let elems = e1.iter().collect(); assert_eq!(vec![A,B,C], elems) } /////////////////////////////////////////////////////////////////////////// // operators #[test] fn test_operators() { let mut e1: EnumSet<Foo> = EnumSet::new(); e1.insert(A); e1.insert(C); let mut e2: EnumSet<Foo> = EnumSet::new(); e2.insert(B); e2.insert(C); let e_union = e1 | e2; let elems = e_union.iter().collect(); assert_eq!(vec![A,B,C], elems) let e_intersection = e1 & e2; let elems = e_intersection.iter().collect(); assert_eq!(vec![C], elems) // Another way to express intersection let e_intersection = e1 - (e1 - e2); let elems = e_intersection.iter().collect(); assert_eq!(vec![C], elems) let e_subtract = e1 - e2; let elems = e_subtract.iter().collect(); assert_eq!(vec![A], elems) // Bitwise XOR of two sets, aka symmetric difference let e_symmetric_diff = e1 ^ e2; let elems = e_symmetric_diff.iter().collect(); assert_eq!(vec![A,B], elems) // Another way to express symmetric difference let e_symmetric_diff = (e1 - e2) | (e2 - e1); let elems = e_symmetric_diff.iter().collect(); assert_eq!(vec![A,B], elems) // Yet another way to express symmetric difference let e_symmetric_diff = (e1 | e2) - (e1 & e2); let elems = e_symmetric_diff.iter().collect(); assert_eq!(vec![A,B], elems) } #[test] #[should_fail] fn test_overflow() { #[allow(dead_code)] #[repr(uint)] enum Bar { V00, V01, V02, V03, V04, V05, V06, V07, V08, V09, V10, V11, V12, V13, V14, V15, V16, V17, V18, V19, V20, V21, V22, V23, V24, V25, V26, V27, V28, V29, V30, V31, V32, V33, V34, V35, V36, V37, V38, V39, V40, V41, V42, V43, V44, V45, V46, V47, V48, V49, V50, V51, V52, V53, V54, V55, V56, V57, V58, V59, V60, V61, V62, V63, V64, V65, V66, V67, V68, V69, } impl CLike for Bar { fn to_uint(&self) -> uint { *self as uint } fn from_uint(v: uint) -> Bar { unsafe { mem::transmute(v) } } } let mut set = EnumSet::new(); set.insert(V64); } }<|fim▁end|>
<|file_name|>resultados_1.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
plataformatcc.resultados
<|file_name|>tool-bar.component.ts<|end_file_name|><|fim▁begin|>import {Component, OnInit} from '@angular/core' @Component({<|fim▁hole|>}) export class ToolBarComponent implements OnInit { mockLines: [ { name: 'S1' }, { name: 'S2' }, { name: 'S3' } ] selectedLine: any constructor() { } ngOnInit() { } }<|fim▁end|>
selector: 'app-tool-bar', templateUrl: './tool-bar.component.html', styleUrls: ['./tool-bar.component.scss']
<|file_name|>pydevd_xml.py<|end_file_name|><|fim▁begin|>import pydev_log import traceback import pydevd_resolver from pydevd_constants import * #@UnusedWildImport from types import * #@UnusedWildImport try: from urllib import quote except: from urllib.parse import quote #@UnresolvedImport try: from xml.sax.saxutils import escape def makeValidXmlValue(s): return escape(s, {'"': '&quot;'}) except: #Simple replacement if it's not there. def makeValidXmlValue(s): return s.replace('<', '&lt;').replace('>', '&gt;').replace('"', '&quot;') class ExceptionOnEvaluate: def __init__(self, result): self.result = result #------------------------------------------------------------------------------------------------------ resolvers in map if not sys.platform.startswith("java"): typeMap = [ #None means that it should not be treated as a compound variable #isintance does not accept a tuple on some versions of python, so, we must declare it expanded (type(None), None,), (int, None), (float, None), (complex, None), (str, None), (tuple, pydevd_resolver.tupleResolver), (list, pydevd_resolver.tupleResolver), (dict, pydevd_resolver.dictResolver), ] try: typeMap.append((long, None)) except: pass #not available on all python versions try: typeMap.append((unicode, None)) except: pass #not available on all python versions try: typeMap.append((set, pydevd_resolver.setResolver)) except: pass #not available on all python versions try: typeMap.append((frozenset, pydevd_resolver.setResolver)) except: pass #not available on all python versions else: #platform is java from org.python import core #@UnresolvedImport typeMap = [ (core.PyNone, None), (core.PyInteger, None), (core.PyLong, None), (core.PyFloat, None), (core.PyComplex, None), (core.PyString, None), (core.PyTuple, pydevd_resolver.tupleResolver), (core.PyList, pydevd_resolver.tupleResolver), (core.PyDictionary, pydevd_resolver.dictResolver), (core.PyStringMap, pydevd_resolver.dictResolver), ] if hasattr(core, 'PyJavaInstance'): #Jython 2.5b3 removed it. typeMap.append((core.PyJavaInstance, pydevd_resolver.instanceResolver)) def getType(o): """ returns a triple (typeObject, typeString, resolver resolver != None means that variable is a container, and should be displayed as a hierarchy. Use the resolver to get its attributes. All container objects should have a resolver. """ try: type_object = type(o) type_name = type_object.__name__ except: #This happens for org.python.core.InitModule return 'Unable to get Type', 'Unable to get Type', None try: if type_name == 'org.python.core.PyJavaInstance': return type_object, type_name, pydevd_resolver.instanceResolver if type_name == 'org.python.core.PyArray': return type_object, type_name, pydevd_resolver.jyArrayResolver for t in typeMap: if isinstance(o, t[0]): return type_object, type_name, t[1] except: traceback.print_exc() #no match return default return type_object, type_name, pydevd_resolver.defaultResolver def frameVarsToXML(frame_f_locals): """ dumps frame variables to XML <var name="var_name" scope="local" type="type" value="value"/> """ xml = "" keys = frame_f_locals.keys() if hasattr(keys, 'sort'): keys.sort() #Python 3.0 does not have it else: keys = sorted(keys) #Jython 2.1 does not have it for k in keys: try: v = frame_f_locals[k] xml += varToXML(v, str(k)) except Exception: traceback.print_exc() pydev_log.error("Unexpected error, recovered safely.\n") return xml def varToXML(val, name, doTrim=True): """ single variable or dictionary to xml representation """ is_exception_on_eval = isinstance(val, ExceptionOnEvaluate) if is_exception_on_eval: v = val.result else: v = val type, typeName, resolver = getType(v) try: if hasattr(v, '__class__'): try: cName = str(v.__class__) if cName.find('.') != -1: cName = cName.split('.')[-1] elif cName.find("'") != -1: #does not have '.' (could be something like <type 'int'>) cName = cName[cName.index("'") + 1:] if cName.endswith("'>"): cName = cName[:-2] except: cName = str(v.__class__) value = '%s: %s' % (cName, v) else: value = str(v) except: try: value = repr(v) except: value = 'Unable to get repr for %s' % v.__class__ try: name = quote(name, '/>_= ') #TODO: Fix PY-5834 without using quote except: pass xml = '<var name="%s" type="%s"' % (makeValidXmlValue(name), makeValidXmlValue(typeName)) if value: #cannot be too big... communication may not handle it. if len(value) > MAXIMUM_VARIABLE_REPRESENTATION_SIZE and doTrim: value = value[0:MAXIMUM_VARIABLE_REPRESENTATION_SIZE] value += '...' #fix to work with unicode values try: if not IS_PY3K: if isinstance(value, unicode): value = value.encode('utf-8')<|fim▁hole|> value = value.encode('utf-8') except TypeError: #in java, unicode is a function pass xmlValue = ' value="%s"' % (makeValidXmlValue(quote(value, '/>_= '))) else: xmlValue = '' if is_exception_on_eval: xmlCont = ' isErrorOnEval="True"' else: if resolver is not None: xmlCont = ' isContainer="True"' else: xmlCont = '' return ''.join((xml, xmlValue, xmlCont, ' />\n'))<|fim▁end|>
else: if isinstance(value, bytes):
<|file_name|>todo-editor.component.js<|end_file_name|><|fim▁begin|>(function(){ angular.module('nbTodos') .component('todoEditor', { templateUrl: '/app/todos/todo-editor/todo-editor.component.html', controller: TodoEditorController, controllerAs: 'vm' }); <|fim▁hole|> this.text = ''; this.create = function () { if( this.text.trim().length === 0) { this.error = true; return; } todosStore.create(this.text); this.text = ''; } } })();<|fim▁end|>
TodoEditorController.$inject = ['todosStore']; function TodoEditorController(todosStore) {
<|file_name|>order_material_form.py<|end_file_name|><|fim▁begin|>############################################################################## # # Copyright (C) 2018 Compassion CH (http://www.compassion.ch) # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## import logging from base64 import b64decode, b64encode from werkzeug.datastructures import FileStorage from odoo import models, fields, _ _logger = logging.getLogger(__name__) try: import magic except ImportError: _logger.warning("Please install magic in order to use Muskathlon module") class OrderMaterialForm(models.AbstractModel): _name = "cms.form.order.material.mixin" _inherit = "cms.form" _form_model = "crm.lead" _form_model_fields = ["partner_id", "description"] _form_required_fields = ["flyer_german", "flyer_french"] partner_id = fields.Many2one("res.partner", readonly=False) event_id = fields.Many2one("crm.event.compassion", readonly=False) form_id = fields.Char() flyers_select = [(i, str(i)) for i in (0, 5, 10, 15, 20, 30)] flyer_german = fields.Selection(flyers_select, string="Number of flyers in german", default=0) flyer_french = fields.Selection(flyers_select, string="Number of flyers in french", default=0) @property def _form_fieldsets(self): return [ {"id": "flyers", "fields": ["flyer_german", "flyer_french", "form_id"]}, ] @property def form_msg_success_created(self): return _( "Thank you for your request. You will hear back from us " "within the next days." ) @property def form_widgets(self): # Hide fields res = super(OrderMaterialForm, self).form_widgets res.update( { "form_id": "cms_form_compassion.form.widget.hidden", "partner_id": "cms_form_compassion.form.widget.hidden", "event_id": "cms_form_compassion.form.widget.hidden", "description": "cms_form_compassion.form.widget.hidden", } ) return res<|fim▁hole|> def create_description(material, values, languages=["french", "german"]): lines = [] for lang in languages: if int(values[f'flyer_{lang}']) > 0: lines.append(f"<li>{values[f'flyer_{lang}']} <b>{material}</b> in {lang}</li>") description = f"<ul>{''.join(lines)}</ul>" return description def form_init(self, request, main_object=None, **kw): form = super(OrderMaterialForm, self).form_init(request, main_object, **kw) # Set default values registration = kw.get("registration") form.partner_id = registration and registration.partner_id form.event_id = registration and registration.compassion_event_id return form def form_before_create_or_update(self, values, extra_values): """ Dismiss any pending status message, to avoid multiple messages when multiple forms are present on same page. """ super(OrderMaterialForm, self).form_before_create_or_update( values, extra_values ) self.o_request.website.get_status_message() staff_id = ( self.env["res.config.settings"] .sudo() .get_param("muskathlon_order_notify_id") ) values.update( { "name": f"Muskathlon flyer order - {self.partner_id.name}", "description": self.create_description("flyer", extra_values), "user_id": staff_id, "event_ids": [(4, self.event_id.id, None)], "partner_id": self.partner_id.id, } ) def form_check_empty_value(self, fname, field, value, **req_values): """Invalidate the form if they order 0 flyers""" is_valid = super().form_check_empty_value(fname, field, value, **req_values) is_valid |= int(req_values["flyer_french"]) + int(req_values["flyer_german"]) <= 0 return is_valid def _form_create(self, values): """ Run as Muskathlon user to authorize lead creation, and prevents default mail notification to staff (a better one is sent just after).""" uid = self.env.ref("muskathlon.user_muskathlon_portal").id self.main_object = self.form_model\ .sudo(uid).with_context(tracking_disable=True).create(values.copy()) def form_after_create_or_update(self, values, extra_values): super(OrderMaterialForm, self).form_after_create_or_update( values, extra_values ) # Update contact fields on lead self.main_object._onchange_partner_id() # Send mail email_template = self.env.ref("muskathlon.order_material_mail_template") email_template.sudo().send_mail( self.main_object.id, raise_exception=False, force_send=True, email_values={ "attachments": [("picture.jpg", self.main_object.partner_id.image)], "email_to": self.main_object.user_email, }, ) return True class OrderMaterialFormFlyer(models.AbstractModel): _name = "cms.form.order.material" _inherit = "cms.form.order.material.mixin" form_id = fields.Char(default="order_material") class OrderMaterialFormChildpack(models.AbstractModel): _name = "cms.form.order.muskathlon.childpack" _inherit = "cms.form.order.material.mixin" form_id = fields.Char(default="muskathlon_childpack") flyer_german = fields.Selection(string="Number of childpacks in german", default=0) flyer_french = fields.Selection(string="Number of childpacks in french", default=0) def form_before_create_or_update(self, values, extra_values): super(OrderMaterialFormChildpack, self).form_before_create_or_update( values, extra_values ) values.update( { "name": f"Muskathlon childpack order - {self.partner_id.name}", "description": self.create_description("childpack", extra_values), } )<|fim▁end|>
@staticmethod
<|file_name|>sw.js<|end_file_name|><|fim▁begin|>var dataCacheName = 'Jblog-v1'; var cacheName = 'Jblog-1'; var filesToCache = [ '/', '/index.html' ]; self.addEventListener('install', function(e) { console.log('[ServiceWorker] Install'); e.waitUntil( caches.open(cacheName).then(function(cache) { console.log('[ServiceWorker] Caching app shell'); return cache.addAll(filesToCache); }) );<|fim▁hole|> e.waitUntil( caches.keys().then(function(keyList) { return Promise.all(keyList.map(function(key) { if (key !== cacheName && key !== dataCacheName) { console.log('[ServiceWorker] Removing old cache', key); return caches.delete(key); } })); }) ); return self.clients.claim(); }); self.addEventListener('fetch', function(e) { console.log('[Service Worker] Fetch', e.request.url); e.respondWith( caches.match(e.request).then(function(response) { return response || fetch(e.request); }) ); });<|fim▁end|>
}); self.addEventListener('activate', function(e) { console.log('[ServiceWorker] Activate');
<|file_name|>factory.go<|end_file_name|><|fim▁begin|>/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package podsecuritypolicy import ( "fmt"<|fim▁hole|> "k8s.io/kubernetes/pkg/security/podsecuritypolicy/apparmor" "k8s.io/kubernetes/pkg/security/podsecuritypolicy/capabilities" "k8s.io/kubernetes/pkg/security/podsecuritypolicy/group" "k8s.io/kubernetes/pkg/security/podsecuritypolicy/selinux" "k8s.io/kubernetes/pkg/security/podsecuritypolicy/sysctl" "k8s.io/kubernetes/pkg/security/podsecuritypolicy/user" "k8s.io/kubernetes/pkg/util/errors" ) type simpleStrategyFactory struct{} var _ StrategyFactory = &simpleStrategyFactory{} func NewSimpleStrategyFactory() StrategyFactory { return &simpleStrategyFactory{} } func (f *simpleStrategyFactory) CreateStrategies(psp *extensions.PodSecurityPolicy, namespace string) (*ProviderStrategies, error) { errs := []error{} userStrat, err := createUserStrategy(&psp.Spec.RunAsUser) if err != nil { errs = append(errs, err) } seLinuxStrat, err := createSELinuxStrategy(&psp.Spec.SELinux) if err != nil { errs = append(errs, err) } appArmorStrat, err := createAppArmorStrategy(psp) if err != nil { errs = append(errs, err) } fsGroupStrat, err := createFSGroupStrategy(&psp.Spec.FSGroup) if err != nil { errs = append(errs, err) } supGroupStrat, err := createSupplementalGroupStrategy(&psp.Spec.SupplementalGroups) if err != nil { errs = append(errs, err) } capStrat, err := createCapabilitiesStrategy(psp.Spec.DefaultAddCapabilities, psp.Spec.RequiredDropCapabilities, psp.Spec.AllowedCapabilities) if err != nil { errs = append(errs, err) } var unsafeSysctls []string if ann, found := psp.Annotations[extensions.SysctlsPodSecurityPolicyAnnotationKey]; found { var err error unsafeSysctls, err = extensions.SysctlsFromPodSecurityPolicyAnnotation(ann) if err != nil { errs = append(errs, err) } } sysctlsStrat := createSysctlsStrategy(unsafeSysctls) if len(errs) > 0 { return nil, errors.NewAggregate(errs) } strategies := &ProviderStrategies{ RunAsUserStrategy: userStrat, SELinuxStrategy: seLinuxStrat, AppArmorStrategy: appArmorStrat, FSGroupStrategy: fsGroupStrat, SupplementalGroupStrategy: supGroupStrat, CapabilitiesStrategy: capStrat, SysctlsStrategy: sysctlsStrat, } return strategies, nil } // createUserStrategy creates a new user strategy. func createUserStrategy(opts *extensions.RunAsUserStrategyOptions) (user.RunAsUserStrategy, error) { switch opts.Rule { case extensions.RunAsUserStrategyMustRunAs: return user.NewMustRunAs(opts) case extensions.RunAsUserStrategyMustRunAsNonRoot: return user.NewRunAsNonRoot(opts) case extensions.RunAsUserStrategyRunAsAny: return user.NewRunAsAny(opts) default: return nil, fmt.Errorf("Unrecognized RunAsUser strategy type %s", opts.Rule) } } // createSELinuxStrategy creates a new selinux strategy. func createSELinuxStrategy(opts *extensions.SELinuxStrategyOptions) (selinux.SELinuxStrategy, error) { switch opts.Rule { case extensions.SELinuxStrategyMustRunAs: return selinux.NewMustRunAs(opts) case extensions.SELinuxStrategyRunAsAny: return selinux.NewRunAsAny(opts) default: return nil, fmt.Errorf("Unrecognized SELinuxContext strategy type %s", opts.Rule) } } // createAppArmorStrategy creates a new AppArmor strategy. func createAppArmorStrategy(psp *extensions.PodSecurityPolicy) (apparmor.Strategy, error) { return apparmor.NewStrategy(psp.Annotations), nil } // createFSGroupStrategy creates a new fsgroup strategy func createFSGroupStrategy(opts *extensions.FSGroupStrategyOptions) (group.GroupStrategy, error) { switch opts.Rule { case extensions.FSGroupStrategyRunAsAny: return group.NewRunAsAny() case extensions.FSGroupStrategyMustRunAs: return group.NewMustRunAs(opts.Ranges, fsGroupField) default: return nil, fmt.Errorf("Unrecognized FSGroup strategy type %s", opts.Rule) } } // createSupplementalGroupStrategy creates a new supplemental group strategy func createSupplementalGroupStrategy(opts *extensions.SupplementalGroupsStrategyOptions) (group.GroupStrategy, error) { switch opts.Rule { case extensions.SupplementalGroupsStrategyRunAsAny: return group.NewRunAsAny() case extensions.SupplementalGroupsStrategyMustRunAs: return group.NewMustRunAs(opts.Ranges, supplementalGroupsField) default: return nil, fmt.Errorf("Unrecognized SupplementalGroups strategy type %s", opts.Rule) } } // createCapabilitiesStrategy creates a new capabilities strategy. func createCapabilitiesStrategy(defaultAddCaps, requiredDropCaps, allowedCaps []api.Capability) (capabilities.Strategy, error) { return capabilities.NewDefaultCapabilities(defaultAddCaps, requiredDropCaps, allowedCaps) } // createSysctlsStrategy creates a new unsafe sysctls strategy. func createSysctlsStrategy(sysctlsPatterns []string) sysctl.SysctlsStrategy { return sysctl.NewMustMatchPatterns(sysctlsPatterns) }<|fim▁end|>
"k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/apis/extensions"
<|file_name|>remakerings.py<|end_file_name|><|fim▁begin|>#!/home/mjwtom/install/python/bin/python # -*- coding: utf-8 -*- import os import subprocess from nodes import storage_nodes as ips def generate_rings(): print (os.environ["PATH"]) os.environ["PATH"] = '/home/mjwtom/install/python/bin' + ":" + os.environ["PATH"] print (os.environ["PATH"]) dev = 'sdb1' ETC_SWIFT='/etc/swift' if not os.path.exists(ETC_SWIFT): os.makedirs(ETC_SWIFT) if os.path.exists(ETC_SWIFT+'/backups'): cmd = ['rm', '-rf', '%s/backups' % ETC_SWIFT] subprocess.call(cmd) print 'current work path:%s' % os.getcwd() os.chdir(ETC_SWIFT) print 'change work path to:%s' % os.getcwd() files = os.listdir(ETC_SWIFT) for file in files: path = ETC_SWIFT + '/' + file if os.path.isdir(path): continue shotname, extentsion = os.path.splitext(file) if (extentsion == '.builder') or (extentsion == '.gz'): try: os.remove(path) except Exception as e: print e for builder, port in [('object.builder', 6000), ('object-1.builder', 6000), ('object-2.builder', 6000), ('container.builder', 6001), ('account.builder', 6002)]: cmd = ['swift-ring-builder', '%s' % builder, 'create', '10', '3', '1'] subprocess.call(cmd) i = 1 for ip in ips: cmd = ['swift-ring-builder', '%s' % builder,<|fim▁hole|> 'add', 'r%dz%d-%s:%d/%s' % (i, i, ip, port, dev), '1'] subprocess.call(cmd) i += 1 cmd = ['swift-ring-builder', '%s' % builder, 'rebalance'] subprocess.call(cmd) if __name__ == '__main__': generate_rings()<|fim▁end|>
<|file_name|>test_zplsc_c_echogram.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os from mi.logging import log from mi.dataset.parser.zplsc_c import ZplscCParser from mi.dataset.dataset_parser import DataSetDriverConfigKeys from mi.dataset.driver.zplsc_c.resource import RESOURCE_PATH __author__ = 'Rene Gelinas' <|fim▁hole|>CLASS_NAME = 'ZplscCRecoveredDataParticle' config = { DataSetDriverConfigKeys.PARTICLE_MODULE: MODULE_NAME, DataSetDriverConfigKeys.PARTICLE_CLASS: CLASS_NAME } def create_zplsc_c_parser(file_handle): """ This function creates a zplsc-c parser for recovered data. @param file_handle - File handle of the ZPLSC_C raw data. """ return ZplscCParser(config, file_handle, rec_exception_callback) def file_path(filename): log.debug('resource path = %s, file name = %s', RESOURCE_PATH, filename) return os.path.join(RESOURCE_PATH, filename) def rec_exception_callback(exception): """ Call back method to for exceptions @param exception - Exception that occurred """ log.info("Exception occurred: %s", exception.message) def zplsc_c_echogram_test(): with open(file_path('160501.01A')) as in_file: parser = create_zplsc_c_parser(in_file) parser.create_echogram() if __name__ == '__main__': zplsc_c_echogram_test()<|fim▁end|>
MODULE_NAME = 'mi.dataset.parser.zplsc_c'
<|file_name|>DifferentiationFailedException.java<|end_file_name|><|fim▁begin|>package filediff.myers;<|fim▁hole|> private static final long serialVersionUID = 1L; public DifferentiationFailedException() { } public DifferentiationFailedException(String msg) { super(msg); } }<|fim▁end|>
public class DifferentiationFailedException extends DiffException {
<|file_name|>application.go<|end_file_name|><|fim▁begin|>package gtka import ( "github.com/gotk3/gotk3/gtk" "github.com/coyim/gotk3adapter/gliba" "github.com/coyim/gotk3adapter/gtki" ) type application struct { *gliba.Application<|fim▁hole|> func wrapApplicationSimple(v *gtk.Application) *application { if v == nil { return nil } return &application{gliba.WrapApplicationSimple(&v.Application), v} } func wrapApplication(v *gtk.Application, e error) (*application, error) { return wrapApplicationSimple(v), e } func unwrapApplication(v gtki.Application) *gtk.Application { if v == nil { return nil } return v.(*application).internal } func (v *application) GetActiveWindow() gtki.Window { ret := wrapWindowSimple(v.internal.GetActiveWindow()) if ret == nil { return nil } return ret }<|fim▁end|>
internal *gtk.Application }
<|file_name|>DeviceModel.js<|end_file_name|><|fim▁begin|>/** * Model for Devices */ Ext.define('FHEM.model.DeviceModel', { extend: 'Ext.data.Model',<|fim▁hole|> } ] });<|fim▁end|>
fields: [ { name: 'DEVICE', type: 'text'
<|file_name|>perf.rs<|end_file_name|><|fim▁begin|>//~ #include <assert.h> //~ #include <string.h> //~ #include <stdio.h> //~ #include <stdlib.h> //~ #include <math.h> //~ #include <uv.h> //~ #include "cassandra.h" //~ /* //~ * Use this example with caution. It's just used as a scratch example for debugging and //~ * roughly analyzing performance. //~ */ //~ #define NUM_THREADS 1 //~ #define NUM_IO_WORKER_THREADS 4 //~ #define NUM_CONCURRENT_REQUESTS 10000 //~ #define NUM_SAMPLES 1000 //~ #define USE_PREPARED 1 //~ const char* big_string = "0123456701234567012345670123456701234567012345670123456701234567" //~ "0123456701234567012345670123456701234567012345670123456701234567" //~ "0123456701234567012345670123456701234567012345670123456701234567" //~ "0123456701234567012345670123456701234567012345670123456701234567" //~ "0123456701234567012345670123456701234567012345670123456701234567" //~ "0123456701234567012345670123456701234567012345670123456701234567" //~ "0123456701234567012345670123456701234567012345670123456701234567"; //~ CassUuidGen* uuid_gen; //~ typedef struct ThreadStats_ { //~ long count; //~ double total_averages; //~ double samples[NUM_SAMPLES]; //~ } ThreadStats; //~ void print_error(CassFuture* future) { //~ CassString message = cass_future_error_message(future); //~ fprintf(stderr, "Error: %.*s\n", (int)message.length, message.data); //~ } //~ CassCluster* create_cluster() { //~ CassCluster* cluster = cass_cluster_new(); //~ cass_cluster_set_contact_points(cluster, "127.0.0.1"); //~ cass_cluster_set_credentials(cluster, "cassandra", "cassandra"); //~ cass_cluster_set_num_threads_io(cluster, NUM_IO_WORKER_THREADS); //~ cass_cluster_set_queue_size_io(cluster, 10000); //~ cass_cluster_set_pending_requests_low_water_mark(cluster, 5000); //~ cass_cluster_set_pending_requests_high_water_mark(cluster, 10000); //~ cass_cluster_set_core_connections_per_host(cluster, 1); //~ cass_cluster_set_max_connections_per_host(cluster, 2); //~ return cluster; //~ } //~ CassError connect_session(CassSession* session, const CassCluster* cluster) { //~ CassError rc = CASS_OK; //~ CassFuture* future = cass_session_connect_keyspace(session, cluster, "examples"); //~ cass_future_wait(future); //~ rc = cass_future_error_code(future); //~ if (rc != CASS_OK) { //~ print_error(future); //~ } //~ cass_future_free(future); //~ return rc; //~ } //~ CassError execute_query(CassSession* session, const char* query) { //~ CassError rc = CASS_OK; //~ CassFuture* future = NULL; //~ CassStatement* statement = cass_statement_new(cass_string_init(query), 0); //~ future = cass_session_execute(session, statement); //~ cass_future_wait(future); //~ rc = cass_future_error_code(future); //~ if (rc != CASS_OK) { //~ print_error(future); //~ } //~ cass_future_free(future); //~ cass_statement_free(statement); //~ return rc; //~ } //~ CassError prepare_query(CassSession* session, CassString query, const CassPrepared** prepared) { //~ CassError rc = CASS_OK; //~ CassFuture* future = NULL; //~ future = cass_session_prepare(session, query); //~ cass_future_wait(future); //~ rc = cass_future_error_code(future); //~ if (rc != CASS_OK) { //~ print_error(future); //~ } else { //~ *prepared = cass_future_get_prepared(future); //~ } //~ cass_future_free(future); //~ return rc; //~ } //~ int compare_dbl(const void* d1, const void* d2) { //~ if (*((double*)d1) < *((double*)d2)) { //~ return -1; //~ } else if (*((double*)d1) > *((double*)d2)) { //~ return 1; //~ } else { //~ return 0; //~ } //~ } //~ void print_thread_stats(ThreadStats* thread_stats) { //~ double throughput_avg = 0.0; //~ double throughput_min = 0.0; //~ double throughput_median = 0.0; //~ double throughput_max = 0.0; //~ int index_median = ceil(0.5 * NUM_SAMPLES); //~ qsort(thread_stats->samples, NUM_SAMPLES, sizeof(double), compare_dbl); //~ throughput_avg = thread_stats->total_averages / thread_stats->count; //~ throughput_min = thread_stats->samples[0]; //~ throughput_median = thread_stats->samples[index_median]; //~ throughput_max = thread_stats->samples[NUM_SAMPLES - 1]; //~ printf("%d IO threads, %d requests/batch:\navg: %f\nmin: %f\nmedian: %f\nmax: %f\n", //~ NUM_IO_WORKER_THREADS, //~ NUM_CONCURRENT_REQUESTS, //~ throughput_avg, //~ throughput_min, //~ throughput_median, //~ throughput_max); //~ } //~ void insert_into_perf(CassSession* session, CassString query, const CassPrepared* prepared, //~ ThreadStats* thread_stats) { //~ int i; //~ double elapsed, throughput; //~ uint64_t start; //~ int num_requests = 0; //~ CassFuture* futures[NUM_CONCURRENT_REQUESTS]; //~ unsigned long thread_id = uv_thread_self(); //~ CassCollection* collection = cass_collection_new(CASS_COLLECTION_TYPE_SET, 2); //~ cass_collection_append_string(collection, cass_string_init("jazz")); //~ cass_collection_append_string(collection, cass_string_init("2013")); //~ start = uv_hrtime(); //~ for (i = 0; i < NUM_CONCURRENT_REQUESTS; ++i) { //~ CassUuid id; //~ CassStatement* statement; //~ if (prepared != NULL) { //~ statement = cass_prepared_bind(prepared); //~ } else { //~ statement = cass_statement_new(query, 5); //~ } //~ cass_uuid_gen_time(uuid_gen, &id); //~ cass_statement_bind_uuid(statement, 0, id); //~ cass_statement_bind_string(statement, 1, cass_string_init(big_string)); //~ cass_statement_bind_string(statement, 2, cass_string_init(big_string)); //~ cass_statement_bind_string(statement, 3, cass_string_init(big_string)); //~ cass_statement_bind_collection(statement, 4, collection); //~ futures[i] = cass_session_execute(session, statement); //~ cass_statement_free(statement);<|fim▁hole|>//~ CassFuture* future = futures[i]; //~ CassError rc = cass_future_error_code(future); //~ if (rc != CASS_OK) { //~ print_error(future); //~ } else { //~ num_requests++; //~ } //~ cass_future_free(future); //~ } //~ elapsed = (double)(uv_hrtime() - start) / 1000000000.0; //~ throughput = (double)num_requests / elapsed; //~ thread_stats->samples[thread_stats->count++] = throughput; //~ thread_stats->total_averages += throughput; //~ printf("%ld: average %f inserts/sec (%d, %f)\n", thread_id, thread_stats->total_averages / thread_stats->count, num_requests, elapsed); //~ cass_collection_free(collection); //~ } //~ void run_insert_queries(void* data) { //~ int i; //~ CassSession* session = (CassSession*)data; //~ const CassPrepared* insert_prepared = NULL; //~ CassString insert_query = cass_string_init("INSERT INTO songs (id, title, album, artist, tags) VALUES (?, ?, ?, ?, ?);"); //~ ThreadStats thread_stats; //~ thread_stats.count = 0; //~ thread_stats.total_averages = 0.0; //~ #if USE_PREPARED //~ if (prepare_query(session, insert_query, &insert_prepared) == CASS_OK) { //~ #endif //~ for (i = 0; i < NUM_SAMPLES; ++i) { //~ insert_into_perf(session, insert_query, insert_prepared, &thread_stats); //~ } //~ #if USE_PREPARED //~ cass_prepared_free(insert_prepared); //~ } //~ #endif //~ print_thread_stats(&thread_stats); //~ } //~ void select_from_perf(CassSession* session, CassString query, const CassPrepared* prepared, //~ ThreadStats* thread_stats) { //~ int i; //~ double elapsed, throughput; //~ uint64_t start; //~ int num_requests = 0; //~ CassFuture* futures[NUM_CONCURRENT_REQUESTS]; //~ unsigned long thread_id = uv_thread_self(); //~ start = uv_hrtime(); //~ for (i = 0; i < NUM_CONCURRENT_REQUESTS; ++i) { //~ CassStatement* statement; //~ if (prepared != NULL) { //~ statement = cass_prepared_bind(prepared); //~ } else { //~ statement = cass_statement_new(query, 0); //~ } //~ futures[i] = cass_session_execute(session, statement); //~ cass_statement_free(statement); //~ } //~ for (i = 0; i < NUM_CONCURRENT_REQUESTS; ++i) { //~ CassFuture* future = futures[i]; //~ CassError rc = cass_future_error_code(future); //~ if (rc != CASS_OK) { //~ print_error(future); //~ } else { //~ const CassResult* result = cass_future_get_result(future); //~ assert(cass_result_column_count(result) == 6); //~ cass_result_free(result); //~ num_requests++; //~ } //~ cass_future_free(future); //~ } //~ elapsed = (double)(uv_hrtime() - start) / 1000000000.0; //~ throughput = (double)num_requests / elapsed; //~ thread_stats->samples[thread_stats->count++] = throughput; //~ thread_stats->total_averages += throughput; //~ printf("%ld: average %f selects/sec (%d, %f)\n", thread_id, thread_stats->total_averages / thread_stats->count, num_requests, elapsed); //~ } //~ void run_select_queries(void* data) { //~ int i; //~ CassSession* session = (CassSession*)data; //~ const CassPrepared* select_prepared = NULL; //~ CassString select_query = cass_string_init("SELECT * FROM songs WHERE id = a98d21b2-1900-11e4-b97b-e5e358e71e0d"); //~ ThreadStats thread_stats; //~ thread_stats.count = 0; //~ thread_stats.total_averages = 0.0; //~ #if USE_PREPARED //~ if (prepare_query(session, select_query, &select_prepared) == CASS_OK) { //~ #endif //~ for (i = 0; i < NUM_SAMPLES; ++i) { //~ select_from_perf(session, select_query, select_prepared, &thread_stats); //~ } //~ #if USE_PREPARED //~ cass_prepared_free(select_prepared); //~ } //~ #endif //~ print_thread_stats(&thread_stats); //~ } //~ int main() { //~ int i; //~ uv_thread_t threads[NUM_THREADS]; //~ CassCluster* cluster = NULL; //~ CassSession* session = NULL; //~ CassFuture* close_future = NULL; //~ cass_log_set_level(CASS_LOG_INFO); //~ cluster = create_cluster(); //~ uuid_gen = cass_uuid_gen_new(); //~ session = cass_session_new(); //~ if (connect_session(session, cluster) != CASS_OK) { //~ cass_cluster_free(cluster); //~ cass_session_free(session); //~ return -1; //~ } //~ execute_query(session, //~ "INSERT INTO songs (id, title, album, artist, tags) VALUES " //~ "(a98d21b2-1900-11e4-b97b-e5e358e71e0d, " //~ "'La Petite Tonkinoise', 'Bye Bye Blackbird', 'Joséphine Baker', { 'jazz', '2013' });"); //~ #define DO_SELECTS //~ for (i = 0; i < NUM_THREADS; ++i) { //~ #ifdef DO_INSERTS //~ uv_thread_create(&threads[i], run_insert_queries, (void*)session); //~ #endif //~ #ifdef DO_SELECTS //~ uv_thread_create(&threads[i], run_select_queries, (void*)session); //~ #endif //~ } //~ for (i = 0; i < NUM_THREADS; ++i) { //~ uv_thread_join(&threads[i]); //~ } //~ close_future = cass_session_close(session); //~ cass_future_wait(close_future); //~ cass_future_free(close_future); //~ cass_cluster_free(cluster); //~ cass_uuid_gen_free(uuid_gen); //~ return 0; //~ }<|fim▁end|>
//~ } //~ for (i = 0; i < NUM_CONCURRENT_REQUESTS; ++i) {
<|file_name|>StatefulTabPanel.js<|end_file_name|><|fim▁begin|>/* global Ext, ViewStateManager, App */<|fim▁hole|> initComponent: function() { this.iconCls = this.iconCls || this.itemId; this.on( 'afterrender', this.onAfterRender, this); this.callParent( arguments); }, setActiveTab: function( tab, dontUpdateHistory) { this.callParent( arguments); if (!dontUpdateHistory) { ViewStateManager.change( tab.itemId); } }, addViewState: function( tab, path) { var i, item, newPath; for (i = 0; i < tab.items.length; i++) { item = tab.items.get( i); newPath = Ext.Array.clone( path); newPath.push( item); ViewStateManager.add( item.itemId, newPath); if (item instanceof App.ux.StatefulTabPanel) { this.addViewState( item, newPath); } } tab.viewStateDone = true; }, onAfterRender: function( tab) { if (!tab.viewStateDone) { if (tab.ownerCt instanceof App.ux.StatefulTabPanel) { tab = tab.ownerCt; } this.addViewState( tab, []); } } });<|fim▁end|>
Ext.define( 'App.ux.StatefulTabPanel', { extend: 'Ext.tab.Panel', alias: 'widget.statefultabpanel',
<|file_name|>subscription.py<|end_file_name|><|fim▁begin|>import json import re from trac.admin import IAdminCommandProvider from trac.attachment import Attachment, IAttachmentChangeListener from trac.core import Component, implements from trac.versioncontrol import ( RepositoryManager, NoSuchChangeset, IRepositoryChangeListener) from trac.web.api import HTTPNotFound, IRequestHandler, ITemplateStreamFilter from genshi.builder import tag from genshi.filters import Transformer from code_comments.api import ICodeCommentChangeListener from code_comments.comments import Comments class Subscription(object): """ Representation of a code comment subscription. """ id = 0 user = '' type = '' path = '' rev = '' repos = '' notify = True def __init__(self, env, data=None): if isinstance(data, dict): self.__dict__ = data self.env = env def __str__(self): """ Returns a user friendly string representation. """ template = "{0} for {1} {2}" if self.type == "changeset": _identifier = self.rev elif self.type == "browser": _identifier = "{0} @ {1}".format(self.path, self.rev) else: _identifier = self.path return template.format(self.user, self.type, _identifier) @classmethod def select(cls, env, args={}, notify=None): """ Retrieve existing subscription(s). """ select = 'SELECT * FROM code_comments_subscriptions' if notify: args['notify'] = bool(notify) if len(args) > 0: select += ' WHERE ' criteria = [] for key, value in args.iteritems(): template = '{0}={1}' if isinstance(value, basestring): template = '{0}=\'{1}\'' if (isinstance(value, tuple) or isinstance(value, list)): template = '{0} IN (\'{1}\')' value = '\',\''.join(value) if isinstance(value, bool): value = int(value) criteria.append(template.format(key, value)) select += ' AND '.join(criteria) cursor = env.get_read_db().cursor() cursor.execute(select) for row in cursor: yield cls._from_row(env, row) def insert(self, db=None): """ Insert a new subscription. Returns bool to indicate success. """ if self.id > 0: # Already has an id, don't insert return False else: with self.env.db_transaction as db: cursor = db.cursor() insert = ("INSERT INTO code_comments_subscriptions " "(user, type, path, repos, rev, notify) " "VALUES (%s, %s, %s, %s, %s, %s)") values = (self.user, self.type, self.path, self.repos, self.rev, self.notify) cursor.execute(insert, values) self.id = db.get_last_id(cursor, 'code_comments_subscriptions') return True def update(self, db=None): """ Update an existing subscription. Returns bool to indicate success. """ if self.id == 0: # Doesn't have a valid id, don't update return False else: with self.env.db_transaction as db: cursor = db.cursor() update = ("UPDATE code_comments_subscriptions SET " "user=%s, type=%s, path=%s, repos=%s, rev=%s, " "notify=%s WHERE id=%s") values = (self.user, self.type, self.path, self.repos, self.rev, self.notify, self.id) try: cursor.execute(update, values) except db.IntegrityError: self.env.log.warning("Subscription update failed.") return False return True def delete(self): """ Delete an existing subscription. """ if self.id > 0: with self.env.db_transaction as db: cursor = db.cursor() delete = ("DELETE FROM code_comments_subscriptions WHERE " "id=%s") cursor.execute(delete, (self.id,)) @classmethod def _from_row(cls, env, row): """ Creates a subscription from a list (representing a database row). """ try: subscription = cls(env) subscription.id = int(row[0]) subscription.user = row[1] subscription.type = row[2] subscription.path = row[3] subscription.repos = row[4] subscription.rev = row[5] subscription.notify = bool(row[6]) return subscription except IndexError: # Invalid row return None @classmethod def _from_dict(cls, env, dict_, create=True): """ Retrieves or (optionally) creates a subscription from a dict. """ subscription = None # Look for existing subscriptions args = { 'user': dict_['user'], 'type': dict_['type'], 'path': dict_['path'], 'repos': dict_['repos'], 'rev': dict_['rev'], } subscriptions = cls.select(env, args) # Only return the first one for _subscription in subscriptions: if subscription is None: subscription = _subscription env.log.info('Subscription found: [%d] %s', subscription.id, subscription) else: # The unique constraint on the table should prevent this ever # occurring env.log.warning('Multiple subscriptions found: [%d] %s', subscription.id, subscription) # (Optionally) create a new subscription if we didn't find one if subscription is None and create: subscription = cls(env, dict_) subscription.insert() env.log.info('Subscription created: [%d] %s', subscription.id, subscription) return subscription @classmethod def from_attachment(cls, env, attachment, user=None, notify=True): """ Creates a subscription from an Attachment object. """ _path = "/{0}/{1}/{2}".format(attachment.parent_realm, attachment.parent_id, attachment.filename) sub = { 'user': user or attachment.author, 'type': 'attachment', 'path': _path, 'repos': '', 'rev': '', 'notify': notify, } return cls._from_dict(env, sub) @classmethod def from_changeset(cls, env, changeset, user=None, notify=True): """ Creates a subscription from a Changeset object. """ sub = { 'user': user or changeset.author, 'type': 'changeset', 'path': '', 'repos': changeset.repos.reponame, 'rev': changeset.rev, 'notify': notify, } return cls._from_dict(env, sub) @classmethod def from_comment(cls, env, comment, user=None, notify=True): """ Creates a subscription from a Comment object. """ sub = { 'user': user or comment.author, 'type': comment.type, 'notify': notify, } # Munge attachments if comment.type == 'attachment': sub['path'] = comment.path.split(':')[1] sub['repos'] = '' sub['rev'] = '' # Munge changesets and browser if comment.type in ('changeset', 'browser'): if comment.type == 'browser': sub['path'] = comment.path else: sub['path'] = '' repo = RepositoryManager(env).get_repository(None) try: sub['repos'] = repo.reponame try: _cs = repo.get_changeset(comment.revision) sub['rev'] = _cs.rev except NoSuchChangeset: # Invalid changeset<|fim▁hole|> return cls._from_dict(env, sub) @classmethod def for_attachment(cls, env, attachment, path=None, notify=None): """ Returns all subscriptions for an attachment. The path can be overridden. """ path_template = "/{0}/{1}/{2}" _path = path or path_template.format(attachment.parent_realm, attachment.parent_id, attachment.filename) args = { 'type': 'attachment', 'path': _path, } return cls.select(env, args, notify) @classmethod def for_changeset(cls, env, changeset, notify=None): """ Returns all subscriptions for an changeset. """ args = { 'type': 'changeset', 'repos': changeset.repos.reponame, 'rev': changeset.rev, } return cls.select(env, args, notify) @classmethod def for_comment(cls, env, comment, notify=None): """ Return all subscriptions for a comment. """ args = {} if comment.type == 'attachment': args['type'] = comment.type args['path'] = comment.path.split(':')[1] if comment.type == 'changeset': args['type'] = comment.type args['rev'] = str(comment.revision) if comment.type == 'browser': args['type'] = ('browser', 'changeset') args['path'] = (comment.path, '') args['rev'] = str(comment.revision) return cls.select(env, args, notify) @classmethod def for_request(cls, env, req, create=False): """ Return a **single** subscription for a HTTP request. """ reponame = req.args.get('reponame') rm = RepositoryManager(env) repos = rm.get_repository(reponame) path = req.args.get('path') or '' rev = req.args.get('rev') or repos.youngest_rev dict_ = { 'user': req.authname, 'type': req.args.get('realm'), 'path': '', 'rev': '', 'repos': '', } if dict_['type'] == 'attachment': dict_['path'] = path if dict_['type'] == 'changeset': dict_['rev'] = path[1:] dict_['repos'] = repos.reponame if dict_['type'] == 'browser': if len(path) == 0: dict_['path'] = '/' else: dict_['path'] = path[1:] dict_['rev'] = rev dict_['repos'] = repos.reponame return cls._from_dict(env, dict_, create=create) class SubscriptionJSONEncoder(json.JSONEncoder): """ JSON Encoder for a Subscription object. """ def default(self, o): data = o.__dict__.copy() del data['env'] return data class SubscriptionAdmin(Component): """ trac-admin command provider for subscription administration. """ implements(IAdminCommandProvider) # IAdminCommandProvider methods def get_admin_commands(self): yield ('subscription seed', '', """Seeds subscriptions for existing attachments, changesets, and comments. """, None, self._do_seed) def _do_seed(self): # Create a subscription for all existing attachments cursor = self.env.get_read_db().cursor() cursor.execute("SELECT DISTINCT type, id FROM attachment") rows = cursor.fetchall() for row in rows: for attachment in Attachment.select(self.env, row[0], row[1]): Subscription.from_attachment(self.env, attachment) # Create a subscription for all existing revisions rm = RepositoryManager(self.env) repos = rm.get_real_repositories() for repo in repos: _rev = repo.get_oldest_rev() while _rev: try: _cs = repo.get_changeset(_rev) Subscription.from_changeset(self.env, _cs) except NoSuchChangeset: pass _rev = repo.next_rev(_rev) # Create a subscription for all existing comments comments = Comments(None, self.env).all() for comment in comments: Subscription.from_comment(self.env, comment) class SubscriptionListeners(Component): """ Automatically creates subscriptions for attachments, changesets, and comments. """ implements(IAttachmentChangeListener, IRepositoryChangeListener, ICodeCommentChangeListener) # IAttachmentChangeListener methods def attachment_added(self, attachment): Subscription.from_attachment(self.env, attachment) def attachment_deleted(self, attachment): for subscription in Subscription.for_attachment(self.env, attachment): subscription.delete() def attachment_reparented(self, attachment, old_parent_realm, old_parent_id): path_template = "/{0}/{1}/{2}" old_path = path_template.format(old_parent_realm, old_parent_id, attachment.filename) new_path = path_template.format(attachment.parent_realm, attachment.parent_id, attachment.filename) for subscription in Subscription.for_attachment(self.env, attachment, old_path): subscription.path = new_path subscription.update() # IRepositoryChangeListener methods def changeset_added(self, repos, changeset): Subscription.from_changeset(self.env, changeset) def changeset_modified(self, repos, changeset, old_changeset): if changeset.author != old_changeset.author: # Create a new author subscription Subscription.from_changeset(self.env, changeset) # ICodeCommentChangeListener methods def comment_created(self, comment): Subscription.from_comment(self.env, comment) class SubscriptionModule(Component): implements(IRequestHandler, ITemplateStreamFilter) # IRequestHandler methods def match_request(self, req): match = re.match(r'\/subscription\/(\w+)(\/?.*)$', req.path_info) if match: if match.group(1): req.args['realm'] = match.group(1) if match.group(2): req.args['path'] = match.group(2) return True def process_request(self, req): if req.method == 'POST': return self._do_POST(req) elif req.method == 'PUT': return self._do_PUT(req) return self._do_GET(req) # ITemplateStreamFilter methods def filter_stream(self, req, method, filename, stream, data): if re.match(r'^/(changeset|browser|attachment).*', req.path_info): filter = Transformer('//h1') stream |= filter.before(self._subscription_button(req.path_info)) return stream # Internal methods def _do_GET(self, req): subscription = Subscription.for_request(self.env, req) if subscription is None: req.send('', 'application/json', 204) req.send(json.dumps(subscription, cls=SubscriptionJSONEncoder), 'application/json') def _do_POST(self, req): subscription = Subscription.for_request(self.env, req, create=True) status = 201 req.send(json.dumps(subscription, cls=SubscriptionJSONEncoder), 'application/json', status) def _do_PUT(self, req): subscription = Subscription.for_request(self.env, req) if subscription is None: raise HTTPNotFound('Subscription to /%s%s for %s not found', req.args.get('realm'), req.args.get('path'), req.authname) content = req.read() if len(content) > 0: data = json.loads(content) subscription.notify = data['notify'] subscription.update() req.send(json.dumps(subscription, cls=SubscriptionJSONEncoder), 'application/json') def _subscription_button(self, path): """ Generates a (disabled) button to connect JavaScript to. """ return tag.button('Subscribe', id_='subscribe', disabled=True, title=('Code comment subscriptions require ' 'JavaScript to be enabled'), data_base_url=self.env.project_url or self.env.abs_href(), data_path=path)<|fim▁end|>
return None finally: repo.close()
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>""" Tests suite for the views of the private messages app. """ from django.test import TestCase, Client from django.conf import settings from django.core.urlresolvers import reverse from django.contrib.auth import get_user_model from django.utils import timezone from ..models import (PrivateMessage, BlockedUser) class NotificationsViewsTestCase(TestCase): """ Tests suite for the views.<|fim▁hole|> def setUp(self): """ Create a new user named "johndoe" with password "illpassword". """ self.user1 = get_user_model().objects.create_user(username='johndoe1', password='johndoe1', email='john.doe@example.com') self.user2 = get_user_model().objects.create_user(username='johndoe2', password='johndoe2', email='john.doe@example.com') self.user3 = get_user_model().objects.create_user(username='johndoe3', password='johndoe3', email='john.doe@example.com') self.msg1 = PrivateMessage.objects.create(sender=self.user1, recipient=self.user2, subject='Test message 1', body='Test message') self.msg2 = PrivateMessage.objects.create(sender=self.user1, recipient=self.user2, read_at=timezone.now(), subject='Test message 2', body='Test message') self.msg3 = PrivateMessage.objects.create(sender=self.user1, recipient=self.user2, recipient_deleted_at=timezone.now(), subject='Test message 3', body='Test message') self.msg4 = PrivateMessage.objects.create(sender=self.user1, recipient=self.user2, recipient_permanently_deleted=True, subject='Test message 4', body='Test message') self.msg5 = PrivateMessage.objects.create(sender=self.user2, recipient=self.user3, subject='Test message 5', body='Test message') self.block1 = BlockedUser.objects.create(user=self.user1, blocked_user=self.user2) def test_private_msg_list_view_available(self): """ Test the availability of the "private messages list" view. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:inbox')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/inbox.html') self.assertIn('private_messages', response.context) self.assertQuerysetEqual(response.context['private_messages'], ['<PrivateMessage: Test message 2>', '<PrivateMessage: Test message 1>']) def test_read_private_msg_list_view_available(self): """ Test the availability of the "read private messages list" view. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:inbox_read')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/inbox.html') self.assertIn('private_messages', response.context) self.assertQuerysetEqual(response.context['private_messages'], ['<PrivateMessage: Test message 2>']) def test_unread_private_msg_list_view_available(self): """ Test the availability of the "unread private messages list" view. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:inbox_unread')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/inbox.html') self.assertIn('private_messages', response.context) self.assertQuerysetEqual(response.context['private_messages'], ['<PrivateMessage: Test message 1>']) def test_private_msg_list_view_redirect_not_login(self): """ Test the redirection of the "private messages list" view when not logged-in. """ client = Client() privatemsg_list_url = reverse('privatemsg:inbox') response = client.get(privatemsg_list_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, privatemsg_list_url)) def test_mark_all_private_msg_as_read_view_available(self): """ Test the availability of the "mark all private messages as read" view. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:inbox_mark_all_as_read')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/mark_all_as_read.html') def test_mark_all_private_msg_as_read_view_redirect_not_login(self): """ Test the redirection of the "mark all private messages as read" view when not logged-in. """ client = Client() mark_all_as_read_url = reverse('privatemsg:inbox_mark_all_as_read') response = client.get(mark_all_as_read_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, mark_all_as_read_url)) def test_outbox_list_view_available(self): """ Test the availability of the "sent private messages list" view. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:outbox')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/outbox.html') self.assertIn('private_messages', response.context) self.assertQuerysetEqual(response.context['private_messages'], ['<PrivateMessage: Test message 4>', '<PrivateMessage: Test message 3>', '<PrivateMessage: Test message 2>', '<PrivateMessage: Test message 1>']) def test_outbox_list_view_redirect_not_login(self): """ Test the redirection of the "sent private messages list" view when not logged-in. """ client = Client() outbox_url = reverse('privatemsg:outbox') response = client.get(outbox_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, outbox_url)) def test_trashbox_list_view_available(self): """ Test the availability of the "deleted private messages list" view. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:trash')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/trashbox.html') self.assertIn('private_messages', response.context) self.assertQuerysetEqual(response.context['private_messages'], ['<PrivateMessage: Test message 3>']) def test_trashbox_list_view_redirect_not_login(self): """ Test the redirection of the "deleted private messages list" view when not logged-in. """ client = Client() trashbox_url = reverse('privatemsg:trash') response = client.get(trashbox_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, trashbox_url)) def test_delete_all_deleted_msg_permanently_view_available(self): """ Test the availability of the "empty trash" view. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:delete_all_deleted_msg_permanently')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/trashbox_cleanup.html') def test_delete_all_deleted_msg_permanently_view_redirect_not_login(self): """ Test the redirection of the "empty trash" view when not logged-in. """ client = Client() delete_all_deleted_msg_url = reverse('privatemsg:delete_all_deleted_msg_permanently') response = client.get(delete_all_deleted_msg_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, delete_all_deleted_msg_url)) def test_msg_compose_view_available(self): """ Test the availability of the "compose message" view. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:compose')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_compose.html') def test_msg_compose_to_view_available(self): """ Test the availability of the "compose to message" view. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:compose_to', kwargs={'recipient': 'johndoe2'})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_compose.html') def test_msg_compose_view_redirect_not_login(self): """ Test the redirection of the "compose message" view when not logged-in. """ client = Client() compose_msg_url = reverse('privatemsg:compose') response = client.get(compose_msg_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, compose_msg_url)) def test_msg_detail_view_available_as_sender(self): """ Test the availability of the "message detail" view as sender. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_detail', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_detail.html') self.assertIn('message', response.context) self.assertEqual(response.context['message'], self.msg1) self.assertIn('is_sender', response.context) self.assertTrue(response.context['is_sender']) self.assertIn('is_recipient', response.context) self.assertFalse(response.context['is_recipient']) def test_msg_detail_view_available_as_recipient(self): """ Test the availability of the "message detail" view as recipient. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:msg_detail', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_detail.html') self.assertIn('message', response.context) self.assertEqual(response.context['message'], self.msg1) self.assertIn('is_recipient', response.context) self.assertTrue(response.context['is_recipient']) self.assertIn('is_sender', response.context) self.assertFalse(response.context['is_sender']) def test_msg_detail_view_not_available_as_thirdparty(self): """ Test the UN-availability of the "message detail" view as a third party. """ client = Client() client.login(username='johndoe3', password='johndoe3') response = client.get(reverse('privatemsg:msg_detail', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_detail_view_with_unknown_msg(self): """ Test the UN-availability of the "message detail" view with an unknown message PK. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_detail', kwargs={'pk': '1337'})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_detail_view_redirect_not_login(self): """ Test the redirection of the "message detail" view when not logged-in. """ client = Client() msg_details_url = reverse('privatemsg:msg_detail', kwargs={'pk': self.msg1.pk}) response = client.get(msg_details_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, msg_details_url)) def test_msg_reply_view_available_as_sender(self): """ Test the availability of the "message reply" view as sender. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_reply', kwargs={'parent_pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_reply.html') self.assertIn('parent_msg', response.context) self.assertEqual(response.context['parent_msg'], self.msg1) def test_msg_reply_view_available_as_recipient(self): """ Test the availability of the "message reply" view as sender. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:msg_reply', kwargs={'parent_pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_reply.html') self.assertIn('parent_msg', response.context) self.assertEqual(response.context['parent_msg'], self.msg1) def test_msg_reply_view_not_available_as_thirdparty(self): """ Test the UN-availability of the "message reply" view as a third party. """ client = Client() client.login(username='johndoe3', password='johndoe3') response = client.get(reverse('privatemsg:msg_reply', kwargs={'parent_pk': self.msg1.pk})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_reply_view_with_unknown_msg(self): """ Test the UN-availability of the "message reply" view with an unknown message PK. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_reply', kwargs={'parent_pk': '1337'})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_reply_view_redirect_not_login(self): """ Test the redirection of the "message reply" view when not logged-in. """ client = Client() msg_reply_url = reverse('privatemsg:msg_reply', kwargs={'parent_pk': self.msg1.pk}) response = client.get(msg_reply_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, msg_reply_url)) def test_my_account_view_redirect_not_login(self): """ Test the redirection of the "my account" view when not logged-in. """ client = Client() myaccount_url = reverse('privatemsg:myaccount') response = client.get(myaccount_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, myaccount_url)) def test_my_account_view_available(self): """ Test the availability of the "my account" view when logged-in. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:myaccount')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/my_account.html') def test_msg_delete_view_available_as_sender(self): """ Test the availability of the "delete message" view as sender. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_delete', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_delete_confirm.html') self.assertIn('message', response.context) self.assertEqual(response.context['message'], self.msg1) def test_msg_delete_view_available_as_recipient(self): """ Test the availability of the "delete message" view as sender. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:msg_delete', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_delete_confirm.html') self.assertIn('message', response.context) self.assertEqual(response.context['message'], self.msg1) def test_msg_delete_view_not_available_as_thirdparty(self): """ Test the UN-availability of the "delete message" view as a third party. """ client = Client() client.login(username='johndoe3', password='johndoe3') response = client.get(reverse('privatemsg:msg_delete', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_delete_view_with_unknown_msg(self): """ Test the UN-availability of the "delete message" view with an unknown message PK. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_delete', kwargs={'pk': '1337'})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_delete_view_redirect_not_login(self): """ Test the redirection of the "delete message" view when not logged-in. """ client = Client() msg_delete_url = reverse('privatemsg:msg_delete', kwargs={'pk': self.msg1.pk}) response = client.get(msg_delete_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, msg_delete_url)) def test_msg_delete_permanent_view_available_as_sender(self): """ Test the availability of the "permanently delete message" view as sender. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_delete_permanent', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_delete_permanent_confirm.html') self.assertIn('message', response.context) self.assertEqual(response.context['message'], self.msg1) def test_msg_delete_permanent_view_available_as_recipient(self): """ Test the availability of the "permanently delete message" view as sender. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:msg_delete_permanent', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_delete_permanent_confirm.html') self.assertIn('message', response.context) self.assertEqual(response.context['message'], self.msg1) def test_msg_delete_permanent_view_not_available_as_thirdparty(self): """ Test the UN-availability of the "permanently delete message" view as a third party. """ client = Client() client.login(username='johndoe3', password='johndoe3') response = client.get(reverse('privatemsg:msg_delete_permanent', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_delete_permanent_view_with_unknown_msg(self): """ Test the UN-availability of the "permanently delete message" view with an unknown message PK. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_delete_permanent', kwargs={'pk': '1337'})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_delete_permanent_view_redirect_not_login(self): """ Test the redirection of the "permanently delete message" view when not logged-in. """ client = Client() msg_reply_url = reverse('privatemsg:msg_delete_permanent', kwargs={'pk': self.msg1.pk}) response = client.get(msg_reply_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, msg_reply_url)) def test_msg_undelete_view_available_as_sender(self): """ Test the availability of the "undelete message" view as sender. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_undelete', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_undelete_confirm.html') self.assertIn('message', response.context) self.assertEqual(response.context['message'], self.msg1) def test_msg_undelete_view_available_as_recipient(self): """ Test the availability of the "undelete message" view as sender. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:msg_undelete', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/msg_undelete_confirm.html') self.assertIn('message', response.context) self.assertEqual(response.context['message'], self.msg1) def test_msg_undelete_view_not_available_as_thirdparty(self): """ Test the UN-availability of the "undelete message" view as a third party. """ client = Client() client.login(username='johndoe3', password='johndoe3') response = client.get(reverse('privatemsg:msg_undelete', kwargs={'pk': self.msg1.pk})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_undelete_view_with_unknown_msg(self): """ Test the UN-availability of the "undelete message" view with an unknown message PK. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:msg_undelete', kwargs={'pk': '1337'})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_undelete_view_with_permanently_deleted_msg(self): """ Test the UN-availability of the "undelete message" view with an already permanently deleted message PK. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:msg_undelete', kwargs={'pk': self.msg4.pk})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_msg_undelete_view_redirect_not_login(self): """ Test the redirection of the "undelete message" view when not logged-in. """ client = Client() msg_undelete_url = reverse('privatemsg:msg_undelete', kwargs={'pk': self.msg1.pk}) response = client.get(msg_undelete_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, msg_undelete_url)) def test_blocked_user_list_view_available(self): """ Test the availability of the "blocked user list" view. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:blocked_users')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/blocked_user_list.html') self.assertIn('blocked_users', response.context) self.assertQuerysetEqual(response.context['blocked_users'], ['<BlockedUser: User "johndoe1" blocking "johndoe2">']) def test_blocked_user_list_view_redirect_not_login(self): """ Test the redirection of the "undelete message" view when not logged-in. """ client = Client() msg_undelete_url = reverse('privatemsg:blocked_users') response = client.get(msg_undelete_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, msg_undelete_url)) def test_block_user_view_available(self): """ Test the availability of the "block user" view. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:block_user', kwargs={'username': self.user2})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/block_user.html') self.assertIn('blocked_user', response.context) self.assertEqual(response.context['blocked_user'], self.user2) self.assertIn('trying_self_block', response.context) self.assertFalse(response.context['trying_self_block']) def test_block_user_view_available_self_block(self): """ Test the availability of the "block user" view when trying to block himself. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:block_user', kwargs={'username': self.user1})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/block_user.html') self.assertIn('blocked_user', response.context) self.assertEqual(response.context['blocked_user'], self.user1) self.assertIn('trying_self_block', response.context) self.assertTrue(response.context['trying_self_block']) def test_block_user_view_available_staff_block(self): """ Test the availability of the "block user" view when trying to block an admin. """ client = Client() client.login(username='johndoe1', password='johndoe1') self.user3.is_staff = True self.user3.save() response = client.get(reverse('privatemsg:block_user', kwargs={'username': self.user3})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/block_user.html') self.assertIn('blocked_user', response.context) self.assertEqual(response.context['blocked_user'], self.user3) self.assertIn('trying_block_staff', response.context) self.assertTrue(response.context['trying_block_staff']) def test_block_user_view_with_unknown_nickname(self): """ Test the UN-availability of the "block user" view with an unknown user name. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:block_user', kwargs={'username': 'unknown'})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_block_user_view_redirect_not_login(self): """ Test the redirection of the "undelete message" view when not logged-in. """ client = Client() msg_undelete_url = reverse('privatemsg:block_user', kwargs={'username': self.user1}) response = client.get(msg_undelete_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, msg_undelete_url)) def test_unblock_user_view_available(self): """ Test the availability of the "unblock user" view. """ client = Client() client.login(username='johndoe1', password='johndoe1') response = client.get(reverse('privatemsg:unblock_user', kwargs={'username': self.user2})) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'privatemsg/unblock_user.html') self.assertIn('blocked_user', response.context) self.assertEqual(response.context['blocked_user'], self.user2) def test_unblock_user_view_with_unknown_nickname(self): """ Test the UN-availability of the "block user" view with an unknown user name. """ client = Client() client.login(username='johndoe2', password='johndoe2') response = client.get(reverse('privatemsg:unblock_user', kwargs={'username': 'unknown'})) self.assertEqual(response.status_code, 404) self.assertTemplateUsed(response, '404.html') def test_unblock_user_view_redirect_not_login(self): """ Test the redirection of the "undelete message" view when not logged-in. """ client = Client() msg_undelete_url = reverse('privatemsg:unblock_user', kwargs={'username': self.user1}) response = client.get(msg_undelete_url) self.assertRedirects(response, '%s?next=%s' % (settings.LOGIN_URL, msg_undelete_url))<|fim▁end|>
"""
<|file_name|>TestServer.java<|end_file_name|><|fim▁begin|>package org.jsoup.integration; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletHandler; import org.jsoup.integration.servlets.BaseServlet; import java.util.concurrent.atomic.AtomicInteger; public class TestServer { private static final Server jetty = new Server(0); private static final ServletHandler handler = new ServletHandler(); private static AtomicInteger latch = new AtomicInteger(0); static { jetty.setHandler(handler); } private TestServer() { } public static void start() { synchronized (jetty) { int count = latch.getAndIncrement(); if (count == 0) { try { jetty.start(); } catch (Exception e) { throw new IllegalStateException(e); } }<|fim▁hole|> } public static void stop() { synchronized (jetty) { int count = latch.decrementAndGet(); if (count == 0) { try { jetty.stop(); } catch (Exception e) { throw new IllegalStateException(e); } } } } public static String map(Class<? extends BaseServlet> servletClass) { synchronized (jetty) { if (!jetty.isStarted()) start(); // if running out of the test cases String path = "/" + servletClass.getSimpleName(); handler.addServletWithMapping(servletClass, path + "/*"); int port = ((ServerConnector) jetty.getConnectors()[0]).getLocalPort(); return "http://localhost:" + port + path; } } }<|fim▁end|>
}
<|file_name|>decoratorMetadataForMethodWithNoReturnTypeAnnotation01.js<|end_file_name|><|fim▁begin|>//// [decoratorMetadataForMethodWithNoReturnTypeAnnotation01.ts] declare var decorator: any; class MyClass { constructor(test: string, test2: number) { } @decorator doSomething() { } } //// [decoratorMetadataForMethodWithNoReturnTypeAnnotation01.js] var MyClass = (function () { function MyClass(test, test2) { } MyClass.prototype.doSomething = function () { }; Object.defineProperty(MyClass.prototype, "doSomething", __decorate([ <|fim▁hole|> __metadata('design:paramtypes', []), __metadata('design:returntype', void 0) ], MyClass.prototype, "doSomething", Object.getOwnPropertyDescriptor(MyClass.prototype, "doSomething"))); return MyClass; })();<|fim▁end|>
decorator, __metadata('design:type', Function),
<|file_name|>assert_equal.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016-2017 Dustin Doloff # Licensed under Apache License v2.0 import argparse import difflib import hashlib import os import subprocess import zipfile # Resets color formatting COLOR_END = '\33[0m' # Modifies characters or color COLOR_BOLD = '\33[1m' COLOR_DISABLED = '\33[02m' # Mostly just means darker # Sets the text color COLOR_GREEN = '\33[32m' COLOR_YELLOW = '\33[33m' COLOR_RED = '\33[31m' def parse_args(): parser = argparse.ArgumentParser(description='Asserts files are the same') parser.add_argument('--stamp', type=argparse.FileType('w+'), required=True, help='Stamp file to record action completed') parser.add_argument('--files', type=str, nargs='+', required=True) return parser.parse_args() def bytes_to_str(bytes): return bytes.decode('utf-8', 'backslashreplace') def color_diff(text_a, text_b): """ Compares two pieces of text and returns a tuple The first value is a colorized diff of the texts. The second value is a boolean, True if there was a diff, False if there wasn't. """ sequence_matcher = difflib.SequenceMatcher(None, text_a, text_b) colorized_diff = '' diff = False for opcode, a0, a1, b0, b1 in sequence_matcher.get_opcodes(): if opcode == 'equal': colorized_diff += bytes_to_str(sequence_matcher.a[a0:a1]) elif opcode == 'insert': colorized_diff += COLOR_BOLD + COLOR_GREEN + bytes_to_str(sequence_matcher.b[b0:b1]) + COLOR_END diff = True elif opcode == 'delete': colorized_diff += COLOR_BOLD + COLOR_RED + bytes_to_str(sequence_matcher.a[a0:a1]) + COLOR_END diff = True elif opcode == 'replace': colorized_diff += (COLOR_BOLD + COLOR_YELLOW + bytes_to_str(sequence_matcher.a[a0:a1]) + COLOR_DISABLED + bytes_to_str(sequence_matcher.b[b0:b1]) + COLOR_END) diff = True else: raise RuntimeError('unexpected opcode ' + opcode) return colorized_diff, diff def hash_file(file): """ Computes the SHA-256 hash of the file file - The file to hash """ hasher = hashlib.sha256() with open(file, 'rb') as f: for block in iter(lambda: f.read(1024), b''): hasher.update(block) return hasher.digest() def summarize(file): """ Summarizes a file via it's metadata to provide structured text for diffing """ summary = None if zipfile.is_zipfile(file): with zipfile.ZipFile(file) as zf: summary = '' for info in zf.infolist(): summary += 'Entry: (' summary += ', '.join(s + ': ' + repr(getattr(info, s)) for s in info.__slots__) summary += ') ' + os.linesep assert summary is not None, 'Unable to summarize %s' % file return summary def main(): args = parse_args() files = args.files assert len(files) >= 2, 'There must be at least two files to compare' files_hashes = set() max_file_size = 0 for file in files: files_hashes.add(hash_file(file)) max_file_size = max(max_file_size, os.stat(file).st_size) # Check hashes first if len(files_hashes) != 1: for i in range(len(files) - 1): file_a = files[i] file_b = files[i + 1] file_a_contents = None file_b_contents = None if max_file_size > 1024 * 1024: file_a_contents = summarize(file_a) file_b_contents = summarize(file_b) else: with open(file_a, 'rb') as a:<|fim▁hole|> diff, problem = color_diff(file_a_contents, file_b_contents) assert not problem, 'File {a} does not match {b}:{newline}{diff}'.format( a = file_a, b = file_b, newline = os.linesep, diff = diff) assert False, 'File hashes don\'t match.' with args.stamp as stamp_file: stamp_file.write(str(args)) if __name__ == '__main__': main()<|fim▁end|>
file_a_contents = a.read() with open(file_b, 'rb') as b: file_b_contents = b.read()
<|file_name|>gamepaddb.go<|end_file_name|><|fim▁begin|>// Copyright 2021 The Ebiten Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // gamecontrollerdb.txt is downloaded at https://github.com/gabomdq/SDL_GameControllerDB. // To update the database file, run: // // curl --location --remote-name https://raw.githubusercontent.com/gabomdq/SDL_GameControllerDB/master/gamecontrollerdb.txt //go:generate file2byteslice -package gamepaddb -input=./gamecontrollerdb.txt -output=./gamecontrollerdb.txt.go -var=gamecontrollerdbTxt package gamepaddb import ( "bufio" "bytes" "fmt" "io" "runtime" "strconv" "strings" "sync" ) type platform int const ( platformUnknown platform = iota platformWindows platformMacOS platformUnix platformAndroid platformIOS ) var currentPlatform platform func init() { if runtime.GOOS == "windows" { currentPlatform = platformWindows return } if runtime.GOOS == "aix" || runtime.GOOS == "dragonfly" || runtime.GOOS == "freebsd" || runtime.GOOS == "hurd" || runtime.GOOS == "illumos" || runtime.GOOS == "linux" || runtime.GOOS == "netbsd" || runtime.GOOS == "openbsd" || runtime.GOOS == "solaris" { currentPlatform = platformUnix return } if runtime.GOOS == "android" { currentPlatform = platformAndroid return } if isIOS { currentPlatform = platformIOS return } if runtime.GOOS == "darwin" { currentPlatform = platformMacOS return } } var additionalGLFWGamepads = []byte(` 78696e70757401000000000000000000,XInput Gamepad (GLFW),platform:Windows,a:b0,b:b1,x:b2,y:b3,leftshoulder:b4,rightshoulder:b5,back:b6,start:b7,leftstick:b8,rightstick:b9,leftx:a0,lefty:a1,rightx:a2,righty:a3,lefttrigger:a4,righttrigger:a5,dpup:h0.1,dpright:h0.2,dpdown:h0.4,dpleft:h0.8, 78696e70757402000000000000000000,XInput Wheel (GLFW),platform:Windows,a:b0,b:b1,x:b2,y:b3,leftshoulder:b4,rightshoulder:b5,back:b6,start:b7,leftstick:b8,rightstick:b9,leftx:a0,lefty:a1,rightx:a2,righty:a3,lefttrigger:a4,righttrigger:a5,dpup:h0.1,dpright:h0.2,dpdown:h0.4,dpleft:h0.8, 78696e70757403000000000000000000,XInput Arcade Stick (GLFW),platform:Windows,a:b0,b:b1,x:b2,y:b3,leftshoulder:b4,rightshoulder:b5,back:b6,start:b7,leftstick:b8,rightstick:b9,leftx:a0,lefty:a1,rightx:a2,righty:a3,lefttrigger:a4,righttrigger:a5,dpup:h0.1,dpright:h0.2,dpdown:h0.4,dpleft:h0.8, 78696e70757404000000000000000000,XInput Flight Stick (GLFW),platform:Windows,a:b0,b:b1,x:b2,y:b3,leftshoulder:b4,rightshoulder:b5,back:b6,start:b7,leftstick:b8,rightstick:b9,leftx:a0,lefty:a1,rightx:a2,righty:a3,lefttrigger:a4,righttrigger:a5,dpup:h0.1,dpright:h0.2,dpdown:h0.4,dpleft:h0.8, 78696e70757405000000000000000000,XInput Dance Pad (GLFW),platform:Windows,a:b0,b:b1,x:b2,y:b3,leftshoulder:b4,rightshoulder:b5,back:b6,start:b7,leftstick:b8,rightstick:b9,leftx:a0,lefty:a1,rightx:a2,righty:a3,lefttrigger:a4,righttrigger:a5,dpup:h0.1,dpright:h0.2,dpdown:h0.4,dpleft:h0.8, 78696e70757406000000000000000000,XInput Guitar (GLFW),platform:Windows,a:b0,b:b1,x:b2,y:b3,leftshoulder:b4,rightshoulder:b5,back:b6,start:b7,leftstick:b8,rightstick:b9,leftx:a0,lefty:a1,rightx:a2,righty:a3,lefttrigger:a4,righttrigger:a5,dpup:h0.1,dpright:h0.2,dpdown:h0.4,dpleft:h0.8, 78696e70757408000000000000000000,XInput Drum Kit (GLFW),platform:Windows,a:b0,b:b1,x:b2,y:b3,leftshoulder:b4,rightshoulder:b5,back:b6,start:b7,leftstick:b8,rightstick:b9,leftx:a0,lefty:a1,rightx:a2,righty:a3,lefttrigger:a4,righttrigger:a5,dpup:h0.1,dpright:h0.2,dpdown:h0.4,dpleft:h0.8, `) func init() { if _, err := Update(gamecontrollerdbTxt); err != nil { panic(err) } if _, err := Update(additionalGLFWGamepads); err != nil { panic(err) } } type mappingType int const ( mappingTypeButton mappingType = iota mappingTypeAxis mappingTypeHat ) const ( HatUp = 1 HatRight = 2 HatDown = 4 HatLeft = 8 ) type mapping struct { Type mappingType Index int AxisScale int AxisOffset int HatState int } var ( gamepadNames = map[string]string{} gamepadButtonMappings = map[string]map[StandardButton]*mapping{} gamepadAxisMappings = map[string]map[StandardAxis]*mapping{} mappingsM sync.RWMutex ) func processLine(line string, platform platform) error { line = strings.TrimSpace(line) if len(line) == 0 { return nil } if line[0] == '#' { return nil } tokens := strings.Split(line, ",") id := tokens[0] for _, token := range tokens[2:] { if len(token) == 0 { continue } tks := strings.Split(token, ":") // Note that the platform part is listed in the definition of SDL_GetPlatform. if tks[0] == "platform" { switch tks[1] { case "Windows": if platform != platformWindows { return nil } case "Mac OS X": if platform != platformMacOS { return nil } case "Linux": if platform != platformUnix { return nil } case "Android": if platform != platformAndroid { return nil } case "iOS": if platform != platformIOS { return nil } case "": // Allow any platforms default: return fmt.Errorf("gamepaddb: unexpected platform: %s", tks[1]) } continue } gb, err := parseMappingElement(tks[1]) if err != nil { return err } if b, ok := toStandardGamepadButton(tks[0]); ok { m, ok := gamepadButtonMappings[id] if !ok { m = map[StandardButton]*mapping{} gamepadButtonMappings[id] = m } m[b] = gb continue } if a, ok := toStandardGamepadAxis(tks[0]); ok { m, ok := gamepadAxisMappings[id] if !ok { m = map[StandardAxis]*mapping{} gamepadAxisMappings[id] = m } m[a] = gb continue } // The buttons like "misc1" are ignored so far. // There is no corresponding button in the Web standard gamepad layout. } gamepadNames[id] = tokens[1] return nil } func parseMappingElement(str string) (*mapping, error) { switch { case str[0] == 'a' || strings.HasPrefix(str, "+a") || strings.HasPrefix(str, "-a"): var tilda bool if str[len(str)-1] == '~' { str = str[:len(str)-1] tilda = true } min := -1 max := 1 numstr := str[1:] if str[0] == '+' { numstr = str[2:] min = 0 } else if str[0] == '-' { numstr = str[2:] max = 0 } scale := 2 / (max - min) offset := -(max + min) if tilda { scale = -scale offset = -offset } index, err := strconv.Atoi(numstr) if err != nil { return nil, err } return &mapping{ Type: mappingTypeAxis, Index: index, AxisScale: scale, AxisOffset: offset, }, nil case str[0] == 'b': index, err := strconv.Atoi(str[1:]) if err != nil { return nil, err } return &mapping{ Type: mappingTypeButton, Index: index, }, nil case str[0] == 'h': tokens := strings.Split(str[1:], ".") if len(tokens) < 2 { return nil, fmt.Errorf("gamepaddb: unexpected hat: %s", str) } index, err := strconv.Atoi(tokens[0]) if err != nil { return nil, err } hat, err := strconv.Atoi(tokens[1]) if err != nil { return nil, err } return &mapping{ Type: mappingTypeHat, Index: index, HatState: hat, }, nil } return nil, fmt.Errorf("gamepaddb: unepxected mapping: %s", str) } func toStandardGamepadButton(str string) (StandardButton, bool) { switch str { case "a": return StandardButtonRightBottom, true case "b": return StandardButtonRightRight, true case "x": return StandardButtonRightLeft, true case "y": return StandardButtonRightTop, true case "back": return StandardButtonCenterLeft, true case "start": return StandardButtonCenterRight, true case "guide": return StandardButtonCenterCenter, true case "leftshoulder": return StandardButtonFrontTopLeft, true case "rightshoulder": return StandardButtonFrontTopRight, true case "leftstick": return StandardButtonLeftStick, true case "rightstick": return StandardButtonRightStick, true case "dpup": return StandardButtonLeftTop, true case "dpright": return StandardButtonLeftRight, true case "dpdown": return StandardButtonLeftBottom, true case "dpleft": return StandardButtonLeftLeft, true case "lefttrigger": return StandardButtonFrontBottomLeft, true case "righttrigger": return StandardButtonFrontBottomRight, true default: return 0, false } } func toStandardGamepadAxis(str string) (StandardAxis, bool) { switch str { case "leftx": return StandardAxisLeftStickHorizontal, true case "lefty": return StandardAxisLeftStickVertical, true case "rightx": return StandardAxisRightStickHorizontal, true case "righty": return StandardAxisRightStickVertical, true default: return 0, false } } func buttonMappings(id string) map[StandardButton]*mapping { if m, ok := gamepadButtonMappings[id]; ok { return m } if currentPlatform == platformAndroid { // If the gamepad is not an HID API, use the default mapping on Android. if id[14] != 'h' { if addAndroidDefaultMappings(id) { return gamepadButtonMappings[id] } } } return nil } func axisMappings(id string) map[StandardAxis]*mapping { if m, ok := gamepadAxisMappings[id]; ok { return m } if currentPlatform == platformAndroid { // If the gamepad is not an HID API, use the default mapping on Android. if id[14] != 'h' { if addAndroidDefaultMappings(id) { return gamepadAxisMappings[id] } } } return nil } func HasStandardLayoutMapping(id string) bool { mappingsM.RLock() defer mappingsM.RUnlock() return buttonMappings(id) != nil || axisMappings(id) != nil } type GamepadState interface { Axis(index int) float64 Button(index int) bool Hat(index int) int } func Name(id string) string { mappingsM.RLock() defer mappingsM.RUnlock() return gamepadNames[id] } func AxisValue(id string, axis StandardAxis, state GamepadState) float64 { mappingsM.RLock() defer mappingsM.RUnlock() mappings := axisMappings(id) if mappings == nil { return 0 } mapping := mappings[axis] if mapping == nil { return 0 } switch mapping.Type { case mappingTypeAxis: v := state.Axis(mapping.Index)*float64(mapping.AxisScale) + float64(mapping.AxisOffset) if v > 1 { return 1 } else if v < -1 { return -1 } return v case mappingTypeButton: if state.Button(mapping.Index) { return 1 } else { return -1 } case mappingTypeHat: if state.Hat(mapping.Index)&mapping.HatState != 0 { return 1 } else { return -1 } } return 0 } func ButtonValue(id string, button StandardButton, state GamepadState) float64 { mappingsM.RLock() defer mappingsM.RUnlock() return buttonValue(id, button, state) } func buttonValue(id string, button StandardButton, state GamepadState) float64 { mappings := buttonMappings(id) if mappings == nil { return 0 } mapping := mappings[button] if mapping == nil { return 0 } switch mapping.Type { case mappingTypeAxis: v := state.Axis(mapping.Index)*float64(mapping.AxisScale) + float64(mapping.AxisOffset) if v > 1 { v = 1 } else if v < -1 { v = -1 } // Adjust [-1, 1] to [0, 1] return (v + 1) / 2 case mappingTypeButton: if state.Button(mapping.Index) { return 1 } return 0 case mappingTypeHat: if state.Hat(mapping.Index)&mapping.HatState != 0 { return 1 } return 0 } return 0 } func IsButtonPressed(id string, button StandardButton, state GamepadState) bool { // Use XInput's trigger dead zone. // See https://source.chromium.org/chromium/chromium/src/+/main:device/gamepad/public/cpp/gamepad.h;l=22-23;drc=6997f8a177359bb99598988ed5e900841984d242 const threshold = 30.0 / 255.0 mappingsM.RLock() defer mappingsM.RUnlock() mappings, ok := gamepadButtonMappings[id] if !ok { return false } mapping := mappings[button] if mapping == nil { return false } switch mapping.Type { case mappingTypeAxis: v := buttonValue(id, button, state) return v > threshold case mappingTypeButton: return state.Button(mapping.Index) case mappingTypeHat: return state.Hat(mapping.Index)&mapping.HatState != 0 } return false } // Update adds new gamepad mappings. // The string must be in the format of SDL_GameControllerDB. func Update(mapping []byte) (bool, error) { mappingsM.Lock() defer mappingsM.Unlock() buf := bytes.NewBuffer(mapping) r := bufio.NewReader(buf) for { line, err := r.ReadString('\n') if err != nil && err != io.EOF { return false, err } if err := processLine(line, currentPlatform); err != nil { return false, err } if err == io.EOF { break } } return true, nil } func addAndroidDefaultMappings(id string) bool { // See https://github.com/libsdl-org/SDL/blob/120c76c84bbce4c1bfed4e9eb74e10678bd83120/include/SDL_gamecontroller.h#L655-L680 const ( SDLControllerButtonA = 0 SDLControllerButtonB = 1 SDLControllerButtonX = 2 SDLControllerButtonY = 3 SDLControllerButtonBack = 4 SDLControllerButtonGuide = 5 SDLControllerButtonStart = 6 SDLControllerButtonLeftStick = 7 SDLControllerButtonRightStick = 8 SDLControllerButtonLeftShoulder = 9 SDLControllerButtonRightShoulder = 10 SDLControllerButtonDpadUp = 11 SDLControllerButtonDpadDown = 12 SDLControllerButtonDpadLeft = 13 SDLControllerButtonDpadRight = 14 ) // See https://github.com/libsdl-org/SDL/blob/120c76c84bbce4c1bfed4e9eb74e10678bd83120/include/SDL_gamecontroller.h#L550-L560 const ( SDLControllerAxisLeftX = 0 SDLControllerAxisLeftY = 1 SDLControllerAxisRightX = 2 SDLControllerAxisRightY = 3 SDLControllerAxisTriggerLeft = 4 SDLControllerAxisTriggerRight = 5 ) // See https://github.com/libsdl-org/SDL/blob/120c76c84bbce4c1bfed4e9eb74e10678bd83120/src/joystick/SDL_gamecontroller.c#L468-L568 const faceButtonMask = ((1 << SDLControllerButtonA) | (1 << SDLControllerButtonB) | (1 << SDLControllerButtonX) | (1 << SDLControllerButtonY)) buttonMask := uint16(id[12]) | (uint16(id[13]) << 8) axisMask := uint16(id[14]) | (uint16(id[15]) << 8) if buttonMask == 0 && axisMask == 0 { return false } if buttonMask&faceButtonMask == 0 { return false } <|fim▁hole|> gamepadButtonMappings[id] = map[StandardButton]*mapping{} if buttonMask&(1<<SDLControllerButtonA) != 0 { gamepadButtonMappings[id][StandardButtonRightBottom] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonA, } } if buttonMask&(1<<SDLControllerButtonB) != 0 { gamepadButtonMappings[id][StandardButtonRightRight] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonB, } } else { // Use the back button as "B" for easy UI navigation with TV remotes. gamepadButtonMappings[id][StandardButtonRightRight] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonBack, } buttonMask &^= uint16(1) << SDLControllerButtonBack } if buttonMask&(1<<SDLControllerButtonX) != 0 { gamepadButtonMappings[id][StandardButtonRightLeft] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonX, } } if buttonMask&(1<<SDLControllerButtonY) != 0 { gamepadButtonMappings[id][StandardButtonRightTop] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonY, } } if buttonMask&(1<<SDLControllerButtonBack) != 0 { gamepadButtonMappings[id][StandardButtonCenterLeft] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonBack, } } if buttonMask&(1<<SDLControllerButtonGuide) != 0 { // TODO: If SDKVersion >= 30, add this code: // // gamepadButtonMappings[id][StandardButtonCenterCenter] = &mapping{ // Type: mappingTypeButton, // Index: SDLControllerButtonGuide, // } } if buttonMask&(1<<SDLControllerButtonStart) != 0 { gamepadButtonMappings[id][StandardButtonCenterRight] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonStart, } } if buttonMask&(1<<SDLControllerButtonLeftStick) != 0 { gamepadButtonMappings[id][StandardButtonLeftStick] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonLeftStick, } } if buttonMask&(1<<SDLControllerButtonRightStick) != 0 { gamepadButtonMappings[id][StandardButtonRightStick] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonRightStick, } } if buttonMask&(1<<SDLControllerButtonLeftShoulder) != 0 { gamepadButtonMappings[id][StandardButtonFrontTopLeft] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonLeftShoulder, } } if buttonMask&(1<<SDLControllerButtonRightShoulder) != 0 { gamepadButtonMappings[id][StandardButtonFrontTopRight] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonRightShoulder, } } if buttonMask&(1<<SDLControllerButtonDpadUp) != 0 { gamepadButtonMappings[id][StandardButtonLeftTop] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonDpadUp, } } if buttonMask&(1<<SDLControllerButtonDpadDown) != 0 { gamepadButtonMappings[id][StandardButtonLeftBottom] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonDpadDown, } } if buttonMask&(1<<SDLControllerButtonDpadLeft) != 0 { gamepadButtonMappings[id][StandardButtonLeftLeft] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonDpadLeft, } } if buttonMask&(1<<SDLControllerButtonDpadRight) != 0 { gamepadButtonMappings[id][StandardButtonLeftRight] = &mapping{ Type: mappingTypeButton, Index: SDLControllerButtonDpadRight, } } if axisMask&(1<<SDLControllerAxisLeftX) != 0 { gamepadAxisMappings[id][StandardAxisLeftStickHorizontal] = &mapping{ Type: mappingTypeAxis, Index: SDLControllerAxisLeftX, AxisScale: 1, AxisOffset: 0, } } if axisMask&(1<<SDLControllerAxisLeftY) != 0 { gamepadAxisMappings[id][StandardAxisLeftStickVertical] = &mapping{ Type: mappingTypeAxis, Index: SDLControllerAxisLeftY, AxisScale: 1, AxisOffset: 0, } } if axisMask&(1<<SDLControllerAxisRightX) != 0 { gamepadAxisMappings[id][StandardAxisRightStickHorizontal] = &mapping{ Type: mappingTypeAxis, Index: SDLControllerAxisRightX, AxisScale: 1, AxisOffset: 0, } } if axisMask&(1<<SDLControllerAxisRightY) != 0 { gamepadAxisMappings[id][StandardAxisRightStickVertical] = &mapping{ Type: mappingTypeAxis, Index: SDLControllerAxisRightY, AxisScale: 1, AxisOffset: 0, } } if axisMask&(1<<SDLControllerAxisTriggerLeft) != 0 { gamepadButtonMappings[id][StandardButtonFrontBottomLeft] = &mapping{ Type: mappingTypeAxis, Index: SDLControllerAxisTriggerLeft, AxisScale: 1, AxisOffset: 0, } } if axisMask&(1<<SDLControllerAxisTriggerRight) != 0 { gamepadButtonMappings[id][StandardButtonFrontBottomRight] = &mapping{ Type: mappingTypeAxis, Index: SDLControllerAxisTriggerRight, AxisScale: 1, AxisOffset: 0, } } return true }<|fim▁end|>
<|file_name|>csrf_test.py<|end_file_name|><|fim▁begin|>"""Tests for the CSRF helper.""" import unittest import mock import webapp2 import webtest from ctc.helpers import csrf from ctc.testing import testutil MOCKED_TIME = 123 # Tests don't need docstrings, so pylint: disable=C0111 # Tests can test protected members, so pylint: disable=W0212 class CsrfTests(testutil.CtcTestCase): # Helpers class TestHandler(csrf.CsrfHandler): """A handler for testing whether or not requests are CSRF protected.""" def get(self): self.response.write('CSRF Token:%s' % self.csrf_token)<|fim▁hole|> def put(self): pass def delete(self): pass def setUp(self): super(CsrfTests, self).setUp() # The CSRF library uses the time, so we mock it out. self.time_mock = mock.Mock() csrf.time = self.time_mock self.time_mock.time = mock.Mock(return_value=MOCKED_TIME) # The handler tests need a WSGIApplication. app = webapp2.WSGIApplication([('/', self.TestHandler)]) self.testapp = webtest.TestApp(app) def test_get_secret_key(self): first_key = csrf._get_secret_key() self.assertEqual(len(first_key), 32) second_key = csrf._get_secret_key() self.assertEqual(first_key, second_key) def test_tokens_are_equal(self): # It should fail if the tokens aren't equal length. self.assertFalse(csrf._tokens_are_equal('a', 'ab')) # It should fail if the tokens are different. self.assertFalse(csrf._tokens_are_equal('abcde', 'abcdf')) # It should succeed if the tokens are the same. self.assertTrue(csrf._tokens_are_equal('abcde', 'abcde')) # Make Token def test_make_token_includes_time(self): self.login() # It should get the current time. token1 = csrf.make_token() self.assertEqual(token1.split()[-1], str(MOCKED_TIME)) # It should use the provided time. token2 = csrf.make_token(token_time='456') self.assertEqual(token2.split()[-1], '456') # Different time should cause the digest to be different. self.assertNotEqual(token1.split()[0], token2.split()[0]) token3 = csrf.make_token(token_time='456') self.assertEqual(token2, token3) def test_make_token_requires_login(self): token1 = csrf.make_token() self.assertIsNone(token1) self.login() token2 = csrf.make_token() self.assertIsNotNone(token2) def test_make_token_includes_path(self): self.login() # It should get the current path. self.testbed.setup_env(PATH_INFO='/action/1', overwrite=True) token1 = csrf.make_token(token_time='123') self.testbed.setup_env(PATH_INFO='/action/23', overwrite=True) token2 = csrf.make_token(token_time='123') token3 = csrf.make_token(token_time='123') self.assertNotEqual(token1, token2) self.assertEqual(token2, token3) # It should let the client pass in a path. token4 = csrf.make_token(path='/action/4', token_time='123') token5 = csrf.make_token(path='/action/56', token_time='123') token6 = csrf.make_token(path='/action/56', token_time='123') self.assertNotEqual(token4, token5) self.assertEqual(token5, token6) # Token Is Valid def test_token_is_valid(self): self.login() # Token is required. self.assertFalse(csrf.token_is_valid(None)) # Token needs to have a timestamp on it. self.assertFalse(csrf.token_is_valid('hello')) # The timestamp needs to be within the current date range. self.time_mock.time = mock.Mock(return_value=9999999999999) self.assertFalse(csrf.token_is_valid('hello 123')) # The user needs to be logged in. token = csrf.make_token() self.logout() self.assertFalse(csrf.token_is_valid(token)) self.login() # Modifying the token should break everything. modified_token = '0' + token[1:] if token == modified_token: modified_token = '1' + token[1:] self.assertFalse(csrf.token_is_valid(modified_token)) # The original token that we got should work. self.assertTrue(csrf.token_is_valid(token)) def test_get_has_csrf_token(self): self.login() response = self.testapp.get('/', status=200).body self.assertIn('CSRF Token:', response) self.assertEqual(response.split(':')[-1], csrf.make_token()) def test_mutators_require_csrf_token(self): self.login() self.testapp.put('/', status=403) self.testapp.post('/', status=403) self.testapp.delete('/', status=403) csrf_param = 'csrf_token=' + csrf.make_token(path='/') self.testapp.put('/', params=csrf_param, status=200) self.testapp.post('/', params=csrf_param, status=200) # Though the spec allows DELETE to have a body, it tends to be ignored # by servers (http://stackoverflow.com/questions/299628), and webapp2 # ignores it as well, so we have to put the params in the URL. self.testapp.delete('/?' + csrf_param, status=200) if __name__ == '__main__': unittest.main()<|fim▁end|>
def post(self): pass
<|file_name|>assessments.go<|end_file_name|><|fim▁begin|>package migrate // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "context" "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "github.com/Azure/go-autorest/autorest/validation" "net/http" ) // AssessmentsClient is the move your workloads to Azure. type AssessmentsClient struct { BaseClient } // NewAssessmentsClient creates an instance of the AssessmentsClient client. func NewAssessmentsClient(subscriptionID string, acceptLanguage string) AssessmentsClient { return NewAssessmentsClientWithBaseURI(DefaultBaseURI, subscriptionID, acceptLanguage) } // NewAssessmentsClientWithBaseURI creates an instance of the AssessmentsClient client. func NewAssessmentsClientWithBaseURI(baseURI string, subscriptionID string, acceptLanguage string) AssessmentsClient { return AssessmentsClient{NewWithBaseURI(baseURI, subscriptionID, acceptLanguage)} } // Create create a new assessment with the given name and the specified settings. Since name of an assessment in a // project is a unique identiefier, if an assessment with the name provided already exists, then the existing // assessment is updated. // // Any PUT operation, resulting in either create or update on an assessment, will cause the assessment to go in a // "InProgress" state. This will be indicated by the field 'computationState' on the Assessment object. During this // time no other PUT operation will be allowed on that assessment object, nor will a Delete operation. Once the // computation for the assessment is complete, the field 'computationState' will be updated to 'Ready', and then other // PUT or DELETE operations can happen on the assessment. // // When assessment is under computation, any PUT will lead to a 400 - Bad Request error. // Parameters: // resourceGroupName - name of the Azure Resource Group that project is part of. // projectName - name of the Azure Migrate project. // groupName - unique name of a group within a project. // assessmentName - unique name of an assessment within a project. // assessment - new or Updated Assessment object. func (client AssessmentsClient) Create(ctx context.Context, resourceGroupName string, projectName string, groupName string, assessmentName string, assessment *Assessment) (result Assessment, err error) { if err := validation.Validate([]validation.Validation{ {TargetValue: assessment, Constraints: []validation.Constraint{{Target: "assessment", Name: validation.Null, Rule: false, Chain: []validation.Constraint{{Target: "assessment.AssessmentProperties", Name: validation.Null, Rule: true, Chain: []validation.Constraint{{Target: "assessment.AssessmentProperties.ScalingFactor", Name: validation.Null, Rule: true, Chain: nil}, {Target: "assessment.AssessmentProperties.DiscountPercentage", Name: validation.Null, Rule: true, Chain: nil}, }}, }}}}}); err != nil { return result, validation.NewError("migrate.AssessmentsClient", "Create", err.Error()) } req, err := client.CreatePreparer(ctx, resourceGroupName, projectName, groupName, assessmentName, assessment) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "Create", nil, "Failure preparing request") return } resp, err := client.CreateSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "Create", resp, "Failure sending request") return } result, err = client.CreateResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "Create", resp, "Failure responding to request") } return } // CreatePreparer prepares the Create request. func (client AssessmentsClient) CreatePreparer(ctx context.Context, resourceGroupName string, projectName string, groupName string, assessmentName string, assessment *Assessment) (*http.Request, error) { pathParameters := map[string]interface{}{ "assessmentName": autorest.Encode("path", assessmentName), "groupName": autorest.Encode("path", groupName), "projectName": autorest.Encode("path", projectName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-02-02" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsContentType("application/json; charset=utf-8"), autorest.AsPut(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}/groups/{groupName}/assessments/{assessmentName}", pathParameters), autorest.WithQueryParameters(queryParameters)) if assessment != nil { preparer = autorest.DecoratePreparer(preparer, autorest.WithJSON(assessment)) } if len(client.AcceptLanguage) > 0 { preparer = autorest.DecoratePreparer(preparer, autorest.WithHeader("Accept-Language", autorest.String(client.AcceptLanguage))) } return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // CreateSender sends the Create request. The method will close the // http.Response Body if it receives an error. func (client AssessmentsClient) CreateSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // CreateResponder handles the response to the Create request. The method always // closes the http.Response Body. func (client AssessmentsClient) CreateResponder(resp *http.Response) (result Assessment, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Delete delete an assessment from the project. The machines remain in the assessment. Deleting a non-existent // assessment results in a no-operation. // // When an assessment is under computation, as indicated by the 'computationState' field, it cannot be deleted. Any // such attempt will return a 400 - Bad Request. // Parameters: // resourceGroupName - name of the Azure Resource Group that project is part of. // projectName - name of the Azure Migrate project. // groupName - unique name of a group within a project. // assessmentName - unique name of an assessment within a project. func (client AssessmentsClient) Delete(ctx context.Context, resourceGroupName string, projectName string, groupName string, assessmentName string) (result autorest.Response, err error) { req, err := client.DeletePreparer(ctx, resourceGroupName, projectName, groupName, assessmentName) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "Delete", nil, "Failure preparing request") return } resp, err := client.DeleteSender(req) if err != nil { result.Response = resp err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "Delete", resp, "Failure sending request") return } result, err = client.DeleteResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "Delete", resp, "Failure responding to request") } return } // DeletePreparer prepares the Delete request. func (client AssessmentsClient) DeletePreparer(ctx context.Context, resourceGroupName string, projectName string, groupName string, assessmentName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "assessmentName": autorest.Encode("path", assessmentName), "groupName": autorest.Encode("path", groupName), "projectName": autorest.Encode("path", projectName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-02-02" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsDelete(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}/groups/{groupName}/assessments/{assessmentName}", pathParameters), autorest.WithQueryParameters(queryParameters)) if len(client.AcceptLanguage) > 0 { preparer = autorest.DecoratePreparer(preparer, autorest.WithHeader("Accept-Language", autorest.String(client.AcceptLanguage))) } return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // DeleteSender sends the Delete request. The method will close the // http.Response Body if it receives an error. func (client AssessmentsClient) DeleteSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // DeleteResponder handles the response to the Delete request. The method always // closes the http.Response Body. func (client AssessmentsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByClosing()) result.Response = resp return } // Get get an existing assessment with the specified name. Returns a json object of type 'assessment' as specified in // Models section. // Parameters: // resourceGroupName - name of the Azure Resource Group that project is part of. // projectName - name of the Azure Migrate project. // groupName - unique name of a group within a project. // assessmentName - unique name of an assessment within a project. func (client AssessmentsClient) Get(ctx context.Context, resourceGroupName string, projectName string, groupName string, assessmentName string) (result Assessment, err error) { req, err := client.GetPreparer(ctx, resourceGroupName, projectName, groupName, assessmentName) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "Get", nil, "Failure preparing request") return } resp, err := client.GetSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "Get", resp, "Failure sending request") return } result, err = client.GetResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "Get", resp, "Failure responding to request") } return } // GetPreparer prepares the Get request. func (client AssessmentsClient) GetPreparer(ctx context.Context, resourceGroupName string, projectName string, groupName string, assessmentName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "assessmentName": autorest.Encode("path", assessmentName), "groupName": autorest.Encode("path", groupName), "projectName": autorest.Encode("path", projectName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-02-02" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}/groups/{groupName}/assessments/{assessmentName}", pathParameters), autorest.WithQueryParameters(queryParameters)) if len(client.AcceptLanguage) > 0 { preparer = autorest.DecoratePreparer(preparer, autorest.WithHeader("Accept-Language", autorest.String(client.AcceptLanguage))) } return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // GetSender sends the Get request. The method will close the // http.Response Body if it receives an error. func (client AssessmentsClient) GetSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // GetResponder handles the response to the Get request. The method always // closes the http.Response Body. func (client AssessmentsClient) GetResponder(resp *http.Response) (result Assessment, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // GetReportDownloadURL get the URL for downloading the assessment in a report format. // Parameters: // resourceGroupName - name of the Azure Resource Group that project is part of. // projectName - name of the Azure Migrate project. // groupName - unique name of a group within a project. // assessmentName - unique name of an assessment within a project. func (client AssessmentsClient) GetReportDownloadURL(ctx context.Context, resourceGroupName string, projectName string, groupName string, assessmentName string) (result DownloadURL, err error) { req, err := client.GetReportDownloadURLPreparer(ctx, resourceGroupName, projectName, groupName, assessmentName) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "GetReportDownloadURL", nil, "Failure preparing request") return } resp, err := client.GetReportDownloadURLSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "GetReportDownloadURL", resp, "Failure sending request") return } result, err = client.GetReportDownloadURLResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "GetReportDownloadURL", resp, "Failure responding to request") } return } // GetReportDownloadURLPreparer prepares the GetReportDownloadURL request. func (client AssessmentsClient) GetReportDownloadURLPreparer(ctx context.Context, resourceGroupName string, projectName string, groupName string, assessmentName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "assessmentName": autorest.Encode("path", assessmentName), "groupName": autorest.Encode("path", groupName), "projectName": autorest.Encode("path", projectName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-02-02" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}/groups/{groupName}/assessments/{assessmentName}/downloadUrl", pathParameters), autorest.WithQueryParameters(queryParameters)) if len(client.AcceptLanguage) > 0 { preparer = autorest.DecoratePreparer(preparer, autorest.WithHeader("Accept-Language", autorest.String(client.AcceptLanguage))) } return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // GetReportDownloadURLSender sends the GetReportDownloadURL request. The method will close the // http.Response Body if it receives an error. func (client AssessmentsClient) GetReportDownloadURLSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // GetReportDownloadURLResponder handles the response to the GetReportDownloadURL request. The method always // closes the http.Response Body. func (client AssessmentsClient) GetReportDownloadURLResponder(resp *http.Response) (result DownloadURL, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListByGroup get all assessments created for the specified group. // // Returns a json array of objects of type 'assessment' as specified in Models section. // Parameters: // resourceGroupName - name of the Azure Resource Group that project is part of. // projectName - name of the Azure Migrate project. // groupName - unique name of a group within a project. func (client AssessmentsClient) ListByGroup(ctx context.Context, resourceGroupName string, projectName string, groupName string) (result AssessmentResultList, err error) { req, err := client.ListByGroupPreparer(ctx, resourceGroupName, projectName, groupName) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "ListByGroup", nil, "Failure preparing request") return } resp, err := client.ListByGroupSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "ListByGroup", resp, "Failure sending request") return } result, err = client.ListByGroupResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "ListByGroup", resp, "Failure responding to request") } return } // ListByGroupPreparer prepares the ListByGroup request. func (client AssessmentsClient) ListByGroupPreparer(ctx context.Context, resourceGroupName string, projectName string, groupName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "groupName": autorest.Encode("path", groupName), "projectName": autorest.Encode("path", projectName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-02-02" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}/groups/{groupName}/assessments", pathParameters), autorest.WithQueryParameters(queryParameters)) if len(client.AcceptLanguage) > 0 { preparer = autorest.DecoratePreparer(preparer, autorest.WithHeader("Accept-Language", autorest.String(client.AcceptLanguage))) } return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListByGroupSender sends the ListByGroup request. The method will close the // http.Response Body if it receives an error. func (client AssessmentsClient) ListByGroupSender(req *http.Request) (*http.Response, error) {<|fim▁hole|> return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListByGroupResponder handles the response to the ListByGroup request. The method always // closes the http.Response Body. func (client AssessmentsClient) ListByGroupResponder(resp *http.Response) (result AssessmentResultList, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListByProject get all assessments created in the project. // // Returns a json array of objects of type 'assessment' as specified in Models section. // Parameters: // resourceGroupName - name of the Azure Resource Group that project is part of. // projectName - name of the Azure Migrate project. func (client AssessmentsClient) ListByProject(ctx context.Context, resourceGroupName string, projectName string) (result AssessmentResultList, err error) { req, err := client.ListByProjectPreparer(ctx, resourceGroupName, projectName) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "ListByProject", nil, "Failure preparing request") return } resp, err := client.ListByProjectSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "ListByProject", resp, "Failure sending request") return } result, err = client.ListByProjectResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "migrate.AssessmentsClient", "ListByProject", resp, "Failure responding to request") } return } // ListByProjectPreparer prepares the ListByProject request. func (client AssessmentsClient) ListByProjectPreparer(ctx context.Context, resourceGroupName string, projectName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "projectName": autorest.Encode("path", projectName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-02-02" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Migrate/projects/{projectName}/assessments", pathParameters), autorest.WithQueryParameters(queryParameters)) if len(client.AcceptLanguage) > 0 { preparer = autorest.DecoratePreparer(preparer, autorest.WithHeader("Accept-Language", autorest.String(client.AcceptLanguage))) } return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListByProjectSender sends the ListByProject request. The method will close the // http.Response Body if it receives an error. func (client AssessmentsClient) ListByProjectSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListByProjectResponder handles the response to the ListByProject request. The method always // closes the http.Response Body. func (client AssessmentsClient) ListByProjectResponder(resp *http.Response) (result AssessmentResultList, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return }<|fim▁end|>
<|file_name|>templates.py<|end_file_name|><|fim▁begin|>import cgi import errno import io import mimetypes import os import posixpath import re import shutil import stat import sys import tempfile from os import path import django from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.core.management.utils import handle_extensions from django.template import Context, Engine from django.utils import archive, six from django.utils.six.moves.urllib.request import urlretrieve from django.utils.version import get_docs_version _drive_re = re.compile('^([a-z]):', re.I) _url_drive_re = re.compile('^([a-z])[:|]', re.I) class TemplateCommand(BaseCommand): """ Copies either a Django application layout template or a Django project layout template into the specified directory. :param style: A color style object (see django.core.management.color). :param app_or_project: The string 'app' or 'project'. :param name: The name of the application or project. :param directory: The directory to which the template should be copied. :param options: The additional variables passed to project or app templates """ requires_system_checks = False # The supported URL schemes url_schemes = ['http', 'https', 'ftp'] # Can't perform any active locale changes during this command, because # setting might not be available at all. leave_locale_alone = True # Rewrite the following suffixes when determining the target filename. rewrite_template_suffixes = ( # Allow shipping invalid .py files without byte-compilation. ('.py-tpl', '.py'), ) def add_arguments(self, parser): parser.add_argument('name', help='Name of the application or project.') parser.add_argument('directory', nargs='?', help='Optional destination directory') parser.add_argument('--template', help='The path or URL to load the template from.') parser.add_argument( '--extension', '-e', dest='extensions', action='append', default=['py'], help='The file extension(s) to render (default: "py"). ' 'Separate multiple extensions with commas, or use ' '-e multiple times.' ) parser.add_argument( '--name', '-n', dest='files', action='append', default=[], help='The file name(s) to render. Separate multiple extensions ' 'with commas, or use -n multiple times.' ) def handle(self, app_or_project, name, target=None, **options): self.app_or_project = app_or_project self.paths_to_remove = [] self.verbosity = options['verbosity'] self.validate_name(name, app_or_project) # if some directory is given, make sure it's nicely expanded if target is None:<|fim▁hole|> top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except OSError as e: if e.errno == errno.EEXIST: message = "'%s' already exists" % top_dir else: message = e raise CommandError(message) else: top_dir = os.path.abspath(path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) extensions = tuple(handle_extensions(options['extensions'])) extra_files = [] for file in options['files']: extra_files.extend(map(lambda x: x.strip(), file.split(','))) if self.verbosity >= 2: self.stdout.write("Rendering %s template files with " "extensions: %s\n" % (app_or_project, ', '.join(extensions))) self.stdout.write("Rendering %s template files with " "filenames: %s\n" % (app_or_project, ', '.join(extra_files))) base_name = '%s_name' % app_or_project base_subdir = '%s_template' % app_or_project base_directory = '%s_directory' % app_or_project camel_case_name = 'camel_case_%s_name' % app_or_project camel_case_value = ''.join(x for x in name.title() if x != '_') context = Context(dict(options, **{ base_name: name, base_directory: top_dir, camel_case_name: camel_case_value, 'docs_version': get_docs_version(), 'django_version': django.__version__, 'unicode_literals': '' if six.PY3 else '# -*- coding: utf-8 -*-\n' 'from __future__ import unicode_literals\n\n', }), autoescape=False) # Setup a stub settings environment for template rendering if not settings.configured: settings.configure() django.setup() template_dir = self.handle_template(options['template'], base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) if relative_dir: target_dir = path.join(top_dir, relative_dir) if not path.exists(target_dir): os.mkdir(target_dir) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): # Ignore some files as they cause various breakages. continue old_path = path.join(root, filename) new_path = path.join(top_dir, relative_dir, filename.replace(base_name, name)) for old_suffix, new_suffix in self.rewrite_template_suffixes: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once if path.exists(new_path): raise CommandError("%s already exists, overlaying a " "project or app into an existing " "directory won't replace conflicting " "files" % new_path) # Only render the Python files, as we don't want to # accidentally render Django templates files if new_path.endswith(extensions) or filename in extra_files: with io.open(old_path, 'r', encoding='utf-8') as template_file: content = template_file.read() template = Engine().from_string(content) content = template.render(context) with io.open(new_path, 'w', encoding='utf-8') as new_file: new_file.write(content) else: shutil.copyfile(old_path, new_path) if self.verbosity >= 2: self.stdout.write("Creating %s\n" % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE) if self.paths_to_remove: if self.verbosity >= 2: self.stdout.write("Cleaning up temporary files.\n") for path_to_remove in self.paths_to_remove: if path.isfile(path_to_remove): os.remove(path_to_remove) else: shutil.rmtree(path_to_remove) def handle_template(self, template, subdir): """ Determines where the app or project templates are. Use django.__path__[0] as the default because we don't know into which directory Django has been installed. """ if template is None: return path.join(django.__path__[0], 'conf', subdir) else: if template.startswith('file://'): template = template[7:] expanded_template = path.expanduser(template) expanded_template = path.normpath(expanded_template) if path.isdir(expanded_template): return expanded_template if self.is_url(template): # downloads the file and returns the path absolute_path = self.download(template) else: absolute_path = path.abspath(expanded_template) if path.exists(absolute_path): return self.extract(absolute_path) raise CommandError("couldn't handle %s template %s." % (self.app_or_project, template)) def validate_name(self, name, app_or_project): if name is None: raise CommandError("you must provide %s %s name" % ( "an" if app_or_project == "app" else "a", app_or_project)) # If it's not a valid directory name. if six.PY2: if not re.search(r'^[_a-zA-Z]\w*$', name): # Provide a smart error message, depending on the error. if not re.search(r'^[_a-zA-Z]', name): message = 'make sure the name begins with a letter or underscore' else: message = 'use only numbers, letters and underscores' raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message)) else: if not name.isidentifier(): raise CommandError( "%r is not a valid %s name. Please make sure the name is " "a valid identifier." % (name, app_or_project) ) def download(self, url): """ Downloads the given URL and returns the file name. """ def cleanup_url(url): tmp = url.rstrip('/') filename = tmp.split('/')[-1] if url.endswith('/'): display_url = tmp + '/' else: display_url = url return filename, display_url prefix = 'django_%s_template_' % self.app_or_project tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_download') self.paths_to_remove.append(tempdir) filename, display_url = cleanup_url(url) if self.verbosity >= 2: self.stdout.write("Downloading %s\n" % display_url) try: the_path, info = urlretrieve(url, path.join(tempdir, filename)) except IOError as e: raise CommandError("couldn't download URL %s to %s: %s" % (url, filename, e)) used_name = the_path.split('/')[-1] # Trying to get better name from response headers content_disposition = info.get('content-disposition') if content_disposition: _, params = cgi.parse_header(content_disposition) guessed_filename = params.get('filename') or used_name else: guessed_filename = used_name # Falling back to content type guessing ext = self.splitext(guessed_filename)[1] content_type = info.get('content-type') if not ext and content_type: ext = mimetypes.guess_extension(content_type) if ext: guessed_filename += ext # Move the temporary file to a filename that has better # chances of being recognized by the archive utils if used_name != guessed_filename: guessed_path = path.join(tempdir, guessed_filename) shutil.move(the_path, guessed_path) return guessed_path # Giving up return the_path def splitext(self, the_path): """ Like os.path.splitext, but takes off .tar, too """ base, ext = posixpath.splitext(the_path) if base.lower().endswith('.tar'): ext = base[-4:] + ext base = base[:-4] return base, ext def extract(self, filename): """ Extracts the given file to a temporarily and returns the path of the directory with the extracted content. """ prefix = 'django_%s_template_' % self.app_or_project tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_extract') self.paths_to_remove.append(tempdir) if self.verbosity >= 2: self.stdout.write("Extracting %s\n" % filename) try: archive.extract(filename, tempdir) return tempdir except (archive.ArchiveException, IOError) as e: raise CommandError("couldn't extract file %s to %s: %s" % (filename, tempdir, e)) def is_url(self, template): """ Returns True if the name looks like a URL """ if ':' not in template: return False scheme = template.split(':', 1)[0].lower() return scheme in self.url_schemes def make_writeable(self, filename): """ Make sure that the file is writeable. Useful if our source is read-only. """ if sys.platform.startswith('java'): # On Jython there is no os.access() return if not os.access(filename, os.W_OK): st = os.stat(filename) new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR os.chmod(filename, new_permissions)<|fim▁end|>
<|file_name|>vs9to10.py<|end_file_name|><|fim▁begin|>#Run this file after automatic conversion of the VisualStudio 2008 solution by VisualStudio 2010. #This can be done whenever the 2008 solution changes. #It will make the necessary cleanup and updates to the vcxproj files #the .props files need to be maintained by hand if the .vsprops files change from __future__ import with_statement import sys import os import os.path def vs9to10(src, dest): for name in os.listdir(src): path, ext = os.path.splitext(name) if ext.lower() not in ('.vcxproj',): continue filename = os.path.normpath(os.path.join(src, name)) destname = os.path.normpath(os.path.join(dest, name)) print("%s -> %s" % (filename, destname)) lines = [] lastline = b"" importgroup = False with open(filename, 'rb') as fin: for line in fin: #remove redundant linker output info if b"<OutputLine>" in line: continue if b"<ProgramDatabaseFile>" in line: continue if b"<ImportLibrary>" in line and b"</ImportLibrary>" in line: continue #add new property sheet to the pythoncore if importgroup and "pythoncore" in name.lower(): if b"</ImportGroup>" in line: if b"debug.props" in lastline: lines.append(b' <Import Project="pythoncore_d.props" />\r\n') <|fim▁hole|> if b"<ImportGroup Condition" in line: importgroup = True elif b"</ImportGroup>" in line: importgroup = False lines.append(line) lastline = line with open(destname, 'wb') as fout: for line in lines: fout.write(line) if __name__ == "__main__": src = "." if len(sys.argv) < 2 else sys.argv[1] name = os.path.basename(os.path.abspath(src)) dest = os.path.abspath(os.path.join(src, "..", name + "Upd")) os.makedirs(dest) vs9to10(src, dest)<|fim▁end|>
elif b"pythoncore" not in lastline: lines.append(b' <Import Project="pythoncore.props" />\r\n')
<|file_name|>dsntool.py<|end_file_name|><|fim▁begin|>import collections import re import urlparse class DSN(collections.MutableMapping): ''' Hold the results of a parsed dsn. This is very similar to urlparse.ParseResult tuple. http://docs.python.org/2/library/urlparse.html#results-of-urlparse-and-urlsplit It exposes the following attributes: scheme schemes -- if your scheme has +'s in it, then this will contain a list of schemes split by + path paths -- the path segment split by /, so "/foo/bar" would be ["foo", "bar"] host -- same as hostname (I just like host better) hostname hostloc -- host:port username password netloc query -- a dict of the query string query_str -- the raw query string port fragment ''' DSN_REGEXP = re.compile(r'^\S+://\S+') FIELDS = ('scheme', 'netloc', 'path', 'params', 'query', 'fragment') def __init__(self, dsn, **defaults): ''' Parse a dsn to parts similar to urlparse. This is a nuts function that can serve as a good basis to parsing a custom dsn :param dsn: the dsn to parse :type dsn: str :param defaults: any values you want to have defaults for if they aren't in the dsn :type defaults: dict ''' assert self.DSN_REGEXP.match(dsn), \ "{} is invalid, only full dsn urls (scheme://host...) allowed".format(dsn) first_colon = dsn.find(':') scheme = dsn[0:first_colon] dsn_url = dsn[first_colon+1:] url = urlparse.urlparse(dsn_url) options = {}<|fim▁hole|> for k, kv in urlparse.parse_qs(url.query, True, True).iteritems(): if len(kv) > 1: options[k] = kv else: options[k] = kv[0] self.scheme = scheme self.hostname = url.hostname self.path = url.path self.params = url.params self.query = options self.fragment = url.fragment self.username = url.username self.password = url.password self.port = url.port self.query_str = url.query for k, v in defaults.iteritems(): self.set_default(k, v) def __iter__(self): for f in self.FIELDS: yield getattr(self, f, '') def __len__(self): return len(iter(self)) def __getitem__(self, field): return getattr(self, field, None) def __setitem__(self, field, value): setattr(self, field, value) def __delitem__(self, field): delattr(self, field) @property def schemes(self): '''the scheme, split by plus signs''' return self.scheme.split('+') @property def netloc(self): '''return username:password@hostname:port''' s = '' prefix = '' if self.username: s += self.username prefix = '@' if self.password: s += ":{}".format(self.password) prefix = '@' s += "{}{}".format(prefix, self.hostloc) return s @property def paths(self): '''the path attribute split by /''' return filter(None, self.path.split('/')) @property def host(self): '''the hostname, but I like host better''' return self.hostname @property def hostloc(self): '''return host:port''' hostloc = self.hostname if self.port: hostloc = '{}:{}'.format(hostloc, self.port) return hostloc def set_default(self, key, value): ''' Set a default value for key. This is different than dict's setdefault because it will set default either if the key doesn't exist, or if the value at the key evaluates to False, so an empty string or a None will value will be updated. :param key: the item to update :type key: str :param value: the items new value if key has a current value that evaluates to False ''' if not getattr(self, key, None): setattr(self, key, value) def get_url(self): '''return the dsn back into url form''' return urlparse.urlunparse(( self.scheme, self.netloc, self.path, self.params, self.query_str, self.fragment, )) def copy(self): return DSN(self.get_url()) def __str__(self): return self.get_url()<|fim▁end|>
if url.query:
<|file_name|>utils.test.ts<|end_file_name|><|fim▁begin|>import polarToCartesian from '../src/utils/polarToCartesian'; describe('GridUtils', () => { describe('polarToCartesian', () => { const config = { radius: 20,<|fim▁hole|> x: config.radius * Math.cos(config.angle), y: config.radius * Math.sin(config.angle), }; expect(polarToCartesian(config)).toEqual(expected); }); }); });<|fim▁end|>
angle: 20, }; it('should return cartesian output for the given polar input config', () => { const expected = {
<|file_name|>merge_h5.py<|end_file_name|><|fim▁begin|># encoding: utf-8 from __future__ import absolute_import, division, print_function import numpy as np import tables from liam2.data import merge_arrays, get_fields, index_table_light, merge_array_records from liam2.utils import timed, loop_wh_progress, merge_items __version__ = "0.4" def get_group_fields(node): if node is None: return {} # noinspection PyProtectedMember return {table._v_name: get_fields(table) for table in node._f_iter_nodes()} def merge_group(parent1, parent2, name, output_file, index_col): print() print(name) print('=' * len(name)) group1 = getattr(parent1, name, None) group2 = getattr(parent2, name, None) if group1 is None and group2 is None: print("node not found in either input files, skipped") return output_group = output_file.create_group("/", name) fields1 = get_group_fields(group1) fields2 = get_group_fields(group2) ent_names1 = set(fields1.keys()) ent_names2 = set(fields2.keys()) for ent_name in sorted(ent_names1 | ent_names2): print() print(ent_name) ent_fields1 = fields1.get(ent_name, []) ent_fields2 = fields2.get(ent_name, []) output_fields = merge_items(ent_fields1, ent_fields2) output_table = output_file.create_table(output_group, ent_name, np.dtype(output_fields)) if ent_name in ent_names1: table1 = getattr(group1, ent_name) # noinspection PyProtectedMember print(" * indexing table from %s ..." % group1._v_file.filename, end=' ') input1_rows = index_table_light(table1, index_col) print("done.") else: table1 = None input1_rows = {} if ent_name in ent_names2: table2 = getattr(group2, ent_name) # noinspection PyProtectedMember print(" * indexing table from %s ..." % group2._v_file.filename, end=' ') input2_rows = index_table_light(table2, index_col) print("done.") else: table2 = None input2_rows = {} print(" * merging: ", end=' ') input1_periods = set(input1_rows.keys()) input2_periods = set(input2_rows.keys()) output_periods = sorted(input1_periods | input2_periods) # noinspection PyUnusedLocal def merge_period(period_idx, period): if ent_name in ent_names1: start, stop = input1_rows.get(period, (0, 0)) input1_array = table1.read(start, stop) else: input1_array = None if ent_name in ent_names2: start, stop = input2_rows.get(period, (0, 0)) input2_array = table2.read(start, stop) else: input2_array = None if ent_name in ent_names1 and ent_name in ent_names2: if 'id' in input1_array.dtype.names: assert 'id' in input2_array.dtype.names output_array, _ = merge_arrays(input1_array, input2_array) else: output_array = merge_array_records(input1_array, input2_array) elif ent_name in ent_names1: output_array = input1_array elif ent_name in ent_names2: output_array = input2_array else: raise Exception("this shouldn't have happened") output_table.append(output_array) output_table.flush() loop_wh_progress(merge_period, output_periods) print(" done.") def merge_h5(input1_path, input2_path, output_path): input1_file = tables.open_file(input1_path) input2_file = tables.open_file(input2_path) output_file = tables.open_file(output_path, mode="w") input1root = input1_file.root input2root = input2_file.root <|fim▁hole|> merge_group(input1root, input2root, 'entities', output_file, 'period') input1_file.close() input2_file.close() output_file.close() if __name__ == '__main__': import sys import platform print("LIAM HDF5 merge %s using Python %s (%s)\n" % (__version__, platform.python_version(), platform.architecture()[0])) args = sys.argv if len(args) < 4: print("Usage: %s inputpath1 inputpath2 outputpath" % args[0]) sys.exit() timed(merge_h5, args[1], args[2], args[3])<|fim▁end|>
merge_group(input1root, input2root, 'globals', output_file, 'PERIOD')
<|file_name|>handlers.py<|end_file_name|><|fim▁begin|>""" Handlers for OpenID Connect provider. """ from django.conf import settings from django.core.cache import cache from courseware.access import has_access from openedx.core.djangoapps.content.course_overviews.models import CourseOverview from openedx.core.djangoapps.lang_pref import LANGUAGE_KEY from openedx.core.djangoapps.user_api.models import UserPreference from student.models import anonymous_id_for_user from student.models import UserProfile from student.roles import GlobalStaff, CourseStaffRole, CourseInstructorRole class OpenIDHandler(object): """ Basic OpenID Connect scope handler. """ def scope_openid(self, _data): """ Only override the sub (subject) claim. """ return ['sub'] def claim_sub(self, data): """ Return the value of the sub (subject) claim. The value should be unique for each user. """ # Use the anonymous ID without any course as unique identifier. # Note that this ID is derived using the value of the `SECRET_KEY` # setting, this means that users will have different sub # values for different deployments. value = anonymous_id_for_user(data['user'], None) return value class PermissionsHandler(object): """ Permissions scope handler """ def scope_permissions(self, _data): return ['administrator'] def claim_administrator(self, data): """ Return boolean indicating user's administrator status. For our purposes an administrator is any user with is_staff set to True. """ return data['user'].is_staff class ProfileHandler(object): """ Basic OpenID Connect `profile` scope handler with `locale` claim. """ def scope_profile(self, _data): """ Add specialized claims. """ return ['name', 'locale'] def claim_name(self, data): """ User displayable full name. """ user = data['user'] profile = UserProfile.objects.get(user=user) return profile.name def claim_locale(self, data): """ Return the locale for the users based on their preferences. Does not return a value if the users have not set their locale preferences. """ # Calling UserPreference directly because it is not clear which user made the request. language = UserPreference.get_value(data['user'], LANGUAGE_KEY)<|fim▁hole|> # If the user has no language specified, return the default one. if not language: language = settings.LANGUAGE_CODE return language class CourseAccessHandler(object): """ Defines two new scopes: `course_instructor` and `course_staff`. Each one is valid only if the user is instructor or staff of at least one course. Each new scope has a corresponding claim: `instructor_courses` and `staff_courses` that lists the course_ids for which the user has instructor or staff privileges. The claims support claim request values: if there is no claim request, the value of the claim is the list all the courses for which the user has the corresponding privileges. If a claim request is used, then the value of the claim the list of courses from the requested values that have the corresponding privileges. For example, if the user is staff of course_a and course_b but not course_c, the claim corresponding to the scope request: scope = openid course_staff has the value: {staff_courses: [course_a, course_b] } For the claim request: claims = {userinfo: {staff_courses: {values=[course_b, course_d]}}} the corresponding claim will have the value: {staff_courses: [course_b] }. This is useful to quickly determine if a user has the right privileges for a given course. For a description of the function naming and arguments, see: `edx_oauth2_provider/oidc/handlers.py` """ COURSE_CACHE_TIMEOUT = getattr(settings, 'OIDC_COURSE_HANDLER_CACHE_TIMEOUT', 60) # In seconds. def __init__(self, *_args, **_kwargs): self._course_cache = {} def scope_course_instructor(self, data): """ Scope `course_instructor` valid only if the user is an instructor of at least one course. """ # TODO: unfortunately there is not a faster and still correct way to # check if a user is instructor of at least one course other than # checking the access type against all known courses. course_ids = self.find_courses(data['user'], CourseInstructorRole.ROLE) return ['instructor_courses'] if course_ids else None def scope_course_staff(self, data): """ Scope `course_staff` valid only if the user is an instructor of at least one course. """ # TODO: see :method:CourseAccessHandler.scope_course_instructor course_ids = self.find_courses(data['user'], CourseStaffRole.ROLE) return ['staff_courses'] if course_ids else None def claim_instructor_courses(self, data): """ Claim `instructor_courses` with list of course_ids for which the user has instructor privileges. """ return self.find_courses(data['user'], CourseInstructorRole.ROLE, data.get('values')) def claim_staff_courses(self, data): """ Claim `staff_courses` with list of course_ids for which the user has staff privileges. """ return self.find_courses(data['user'], CourseStaffRole.ROLE, data.get('values')) def find_courses(self, user, access_type, values=None): """ Find all courses for which the user has the specified access type. If `values` is specified, check only the courses from `values`. """ # Check the instance cache and update if not present. The instance # cache is useful since there are multiple scope and claims calls in the # same request. key = (user.id, access_type) if key in self._course_cache: course_ids = self._course_cache[key] else: course_ids = self._get_courses_with_access_type(user, access_type) self._course_cache[key] = course_ids # If values was specified, filter out other courses. if values is not None: course_ids = list(set(course_ids) & set(values)) return course_ids # pylint: disable=missing-docstring def _get_courses_with_access_type(self, user, access_type): # Check the application cache and update if not present. The application # cache is useful since there are calls to different endpoints in close # succession, for example the id_token and user_info endpoints. key = '-'.join([str(self.__class__), str(user.id), access_type]) course_ids = cache.get(key) if not course_ids: course_keys = CourseOverview.get_all_course_keys() # Global staff have access to all courses. Filter courses for non-global staff. if not GlobalStaff().has_user(user): course_keys = [course_key for course_key in course_keys if has_access(user, access_type, course_key)] course_ids = [unicode(course_key) for course_key in course_keys] cache.set(key, course_ids, self.COURSE_CACHE_TIMEOUT) return course_ids class IDTokenHandler(OpenIDHandler, ProfileHandler, CourseAccessHandler, PermissionsHandler): """ Configure the ID Token handler for the LMS. """ def claim_instructor_courses(self, data): # Don't return list of courses unless they are requested as essential. if data.get('essential'): return super(IDTokenHandler, self).claim_instructor_courses(data) else: return None def claim_staff_courses(self, data): # Don't return list of courses unless they are requested as essential. if data.get('essential'): return super(IDTokenHandler, self).claim_staff_courses(data) else: return None class UserInfoHandler(OpenIDHandler, ProfileHandler, CourseAccessHandler, PermissionsHandler): """ Configure the UserInfo handler for the LMS. """ pass<|fim▁end|>
<|file_name|>ifdef.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # Selectively preprocess #ifdef / #ifndef statements. # Usage: # ifdef [-Dname] ... [-Uname] ... [file] ... # # This scans the file(s), looking for #ifdef and #ifndef preprocessor # commands that test for one of the names mentioned in the -D and -U # options. On standard output it writes a copy of the input file(s) # minus those code sections that are suppressed by the selected # combination of defined/undefined symbols. The #if(n)def/#else/#else # lines themselfs (if the #if(n)def tests for one of the mentioned # names) are removed as well. # Features: Arbitrary nesting of recognized and unrecognized # preprocesor statements works correctly. Unrecognized #if* commands # are left in place, so it will never remove too much, only too # little. It does accept whitespace around the '#' character. # Restrictions: There should be no comments or other symbols on the # #if(n)def lines. The effect of #define/#undef commands in the input # file or in included files is not taken into account. Tests using # #if and the defined() pseudo function are not recognized. The #elif # command is not recognized. Improperly nesting is not detected. # Lines that look like preprocessor commands but which are actually # part of comments or string literals will be mistaken for # preprocessor commands. import sys import getopt defs = [] undefs = [] def main(): opts, args = getopt.getopt(sys.argv[1:], 'D:U:') for o, a in opts: if o == '-D': defs.append(a) if o == '-U': undefs.append(a) if not args: args = ['-'] for filename in args: if filename == '-': process(sys.stdin, sys.stdout) else: f = open(filename, 'r') process(f, sys.stdout) f.close() def process(fpi, fpo): keywords = ('if', 'ifdef', 'ifndef', 'else', 'endif') ok = 1 stack = [] while 1: line = fpi.readline() if not line: break while line[-2:] == '\\\n': nextline = fpi.readline() if not nextline: break line = line + nextline tmp = line.strip() if tmp[:1] != '#': if ok: fpo.write(line) continue tmp = tmp[1:].strip() words = tmp.split() keyword = words[0] if keyword not in keywords: if ok: fpo.write(line) continue if keyword in ('ifdef', 'ifndef') and len(words) == 2: if keyword == 'ifdef': ko = 1 else: ko = 0 word = words[1] if word in defs: stack.append((ok, ko, word)) if not ko: ok = 0 elif word in undefs: stack.append((ok, not ko, word)) <|fim▁hole|> stack.append((ok, -1, word)) if ok: fpo.write(line) elif keyword == 'if': stack.append((ok, -1, '')) if ok: fpo.write(line) elif keyword == 'else' and stack: s_ok, s_ko, s_word = stack[-1] if s_ko < 0: if ok: fpo.write(line) else: s_ko = not s_ko ok = s_ok if not s_ko: ok = 0 stack[-1] = s_ok, s_ko, s_word elif keyword == 'endif' and stack: s_ok, s_ko, s_word = stack[-1] if s_ko < 0: if ok: fpo.write(line) del stack[-1] ok = s_ok else: sys.stderr.write('Unknown keyword %s\n' % keyword) if stack: sys.stderr.write('stack: %s\n' % stack) if __name__ == '__main__': main()<|fim▁end|>
if ko: ok = 0 else:
<|file_name|>state.py<|end_file_name|><|fim▁begin|>"""Helpers that help with state related things.""" import asyncio import datetime as dt import json import logging from collections import defaultdict from types import TracebackType from typing import ( # noqa: F401 pylint: disable=unused-import Awaitable, Dict, Iterable, List, Optional, Tuple, Type, Union) from homeassistant.loader import bind_hass import homeassistant.util.dt as dt_util from homeassistant.components.notify import ( ATTR_MESSAGE, SERVICE_NOTIFY) from homeassistant.components.sun import ( STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON) from homeassistant.components.mysensors.switch import ( ATTR_IR_CODE, SERVICE_SEND_IR_CODE) from homeassistant.components.cover import ( ATTR_POSITION, ATTR_TILT_POSITION) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, SERVICE_LOCK, SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_UNLOCK, SERVICE_OPEN_COVER, SERVICE_CLOSE_COVER, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, STATE_CLOSED, STATE_HOME, STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, STATE_UNKNOWN, STATE_UNLOCKED, SERVICE_SELECT_OPTION) from homeassistant.core import ( Context, State, DOMAIN as HASS_DOMAIN) from homeassistant.util.async_ import run_coroutine_threadsafe from .typing import HomeAssistantType _LOGGER = logging.getLogger(__name__) GROUP_DOMAIN = 'group' # Update this dict of lists when new services are added to HA. # Each item is a service with a list of required attributes. SERVICE_ATTRIBUTES = { SERVICE_NOTIFY: [ATTR_MESSAGE], SERVICE_SEND_IR_CODE: [ATTR_IR_CODE], SERVICE_SELECT_OPTION: [ATTR_OPTION], SERVICE_SET_COVER_POSITION: [ATTR_POSITION], SERVICE_SET_COVER_TILT_POSITION: [ATTR_TILT_POSITION] } # Update this dict when new services are added to HA. # Each item is a service with a corresponding state. SERVICE_TO_STATE = { SERVICE_TURN_ON: STATE_ON, SERVICE_TURN_OFF: STATE_OFF, SERVICE_ALARM_ARM_AWAY: STATE_ALARM_ARMED_AWAY, SERVICE_ALARM_ARM_HOME: STATE_ALARM_ARMED_HOME, SERVICE_ALARM_DISARM: STATE_ALARM_DISARMED, SERVICE_ALARM_TRIGGER: STATE_ALARM_TRIGGERED, SERVICE_LOCK: STATE_LOCKED, SERVICE_UNLOCK: STATE_UNLOCKED, SERVICE_OPEN_COVER: STATE_OPEN, SERVICE_CLOSE_COVER: STATE_CLOSED } class AsyncTrackStates: """ Record the time when the with-block is entered. Add all states that have changed since the start time to the return list when with-block is exited. Must be run within the event loop. """ def __init__(self, hass: HomeAssistantType) -> None: """Initialize a TrackStates block.""" self.hass = hass self.states = [] # type: List[State] # pylint: disable=attribute-defined-outside-init def __enter__(self) -> List[State]: """Record time from which to track changes.""" self.now = dt_util.utcnow() return self.states def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> None: """Add changes states to changes list.""" self.states.extend(get_changed_since(self.hass.states.async_all(), self.now)) def get_changed_since(states: Iterable[State], utc_point_in_time: dt.datetime) -> List[State]: """Return list of states that have been changed since utc_point_in_time.""" return [state for state in states if state.last_updated >= utc_point_in_time] @bind_hass def reproduce_state(hass: HomeAssistantType, states: Union[State, Iterable[State]], blocking: bool = False) -> None: """Reproduce given state.""" return run_coroutine_threadsafe( # type: ignore async_reproduce_state(hass, states, blocking), hass.loop).result() @bind_hass async def async_reproduce_state( hass: HomeAssistantType, states: Union[State, Iterable[State]], blocking: bool = False, context: Optional[Context] = None) -> None: """Reproduce a list of states on multiple domains.""" if isinstance(states, State): states = [states] to_call = defaultdict(list) # type: Dict[str, List[State]] for state in states: to_call[state.domain].append(state) async def worker(domain: str, data: List[State]) -> None: component = getattr(hass.components, domain) if hasattr(component, 'async_reproduce_states'): await component.async_reproduce_states( data, context=context) else: await async_reproduce_state_legacy( hass, domain, data, blocking=blocking, context=context) if to_call: # run all domains in parallel await asyncio.gather(*[<|fim▁hole|> @bind_hass async def async_reproduce_state_legacy( hass: HomeAssistantType, domain: str, states: Iterable[State], blocking: bool = False, context: Optional[Context] = None) -> None: """Reproduce given state.""" to_call = defaultdict(list) # type: Dict[Tuple[str, str], List[str]] if domain == GROUP_DOMAIN: service_domain = HASS_DOMAIN else: service_domain = domain for state in states: if hass.states.get(state.entity_id) is None: _LOGGER.warning("reproduce_state: Unable to find entity %s", state.entity_id) continue domain_services = hass.services.async_services().get(service_domain) if not domain_services: _LOGGER.warning( "reproduce_state: Unable to reproduce state %s (1)", state) continue service = None for _service in domain_services.keys(): if (_service in SERVICE_ATTRIBUTES and all(attr in state.attributes for attr in SERVICE_ATTRIBUTES[_service]) or _service in SERVICE_TO_STATE and SERVICE_TO_STATE[_service] == state.state): service = _service if (_service in SERVICE_TO_STATE and SERVICE_TO_STATE[_service] == state.state): break if not service: _LOGGER.warning( "reproduce_state: Unable to reproduce state %s (2)", state) continue # We group service calls for entities by service call # json used to create a hashable version of dict with maybe lists in it key = (service, json.dumps(dict(state.attributes), sort_keys=True)) to_call[key].append(state.entity_id) domain_tasks = [] # type: List[Awaitable[Optional[bool]]] for (service, service_data), entity_ids in to_call.items(): data = json.loads(service_data) data[ATTR_ENTITY_ID] = entity_ids domain_tasks.append( hass.services.async_call(service_domain, service, data, blocking, context) ) if domain_tasks: await asyncio.wait(domain_tasks, loop=hass.loop) def state_as_number(state: State) -> float: """ Try to coerce our state to a number. Raises ValueError if this is not possible. """ from homeassistant.components.climate import ( STATE_HEAT, STATE_COOL, STATE_IDLE) if state.state in (STATE_ON, STATE_LOCKED, STATE_ABOVE_HORIZON, STATE_OPEN, STATE_HOME, STATE_HEAT, STATE_COOL): return 1 if state.state in (STATE_OFF, STATE_UNLOCKED, STATE_UNKNOWN, STATE_BELOW_HORIZON, STATE_CLOSED, STATE_NOT_HOME, STATE_IDLE): return 0 return float(state.state)<|fim▁end|>
worker(domain, data) for domain, data in to_call.items() ])
<|file_name|>device_tracker.py<|end_file_name|><|fim▁begin|>"""Device tracker for Synology SRM routers.""" from __future__ import annotations import logging import synology_srm import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_SSL, CONF_USERNAME, CONF_VERIFY_SSL, ) from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType _LOGGER = logging.getLogger(__name__) DEFAULT_USERNAME = "admin" DEFAULT_PORT = 8001 DEFAULT_SSL = True DEFAULT_VERIFY_SSL = False PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean, } ) ATTRIBUTE_ALIAS = { "band": None, "connection": None, "current_rate": None, "dev_type": None, "hostname": None, "ip6_addr": None, "ip_addr": None,<|fim▁hole|> "is_high_qos": None, "is_low_qos": None, "is_manual_dev_type": None, "is_manual_hostname": None, "is_online": None, "is_parental_controled": "is_parental_controlled", "is_qos": None, "is_wireless": None, "mac": None, "max_rate": None, "mesh_node_id": None, "rate_quality": None, "signalstrength": "signal_strength", "transferRXRate": "transfer_rx_rate", "transferTXRate": "transfer_tx_rate", } def get_scanner(hass: HomeAssistant, config: ConfigType) -> DeviceScanner | None: """Validate the configuration and return Synology SRM scanner.""" scanner = SynologySrmDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None class SynologySrmDeviceScanner(DeviceScanner): """This class scans for devices connected to a Synology SRM router.""" def __init__(self, config): """Initialize the scanner.""" self.client = synology_srm.Client( host=config[CONF_HOST], port=config[CONF_PORT], username=config[CONF_USERNAME], password=config[CONF_PASSWORD], https=config[CONF_SSL], ) if not config[CONF_VERIFY_SSL]: self.client.http.disable_https_verify() self.devices = [] self.success_init = self._update_info() _LOGGER.info("Synology SRM scanner initialized") def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" self._update_info() return [device["mac"] for device in self.devices] def get_extra_attributes(self, device) -> dict: """Get the extra attributes of a device.""" device = next( (result for result in self.devices if result["mac"] == device), None ) filtered_attributes: dict[str, str] = {} if not device: return filtered_attributes for attribute, alias in ATTRIBUTE_ALIAS.items(): if (value := device.get(attribute)) is None: continue attr = alias or attribute filtered_attributes[attr] = value return filtered_attributes def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" filter_named = [ result["hostname"] for result in self.devices if result["mac"] == device ] if filter_named: return filter_named[0] return None def _update_info(self): """Check the router for connected devices.""" _LOGGER.debug("Scanning for connected devices") try: self.devices = self.client.core.get_network_nsm_device({"is_online": True}) except synology_srm.http.SynologyException as ex: _LOGGER.error("Error with the Synology SRM: %s", ex) return False _LOGGER.debug("Found %d device(s) connected to the router", len(self.devices)) return True<|fim▁end|>
"is_baned": "is_banned", "is_beamforming_on": None, "is_guest": None,
<|file_name|>0006_course_coursesection.py<|end_file_name|><|fim▁begin|># Generated by Django 3.1.6 on 2022-01-26 20:48 import common.mixins from django.conf import settings import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("Scholarship", "0005_change_on_delete"), ] operations = [ migrations.CreateModel( name="Course", fields=[ (<|fim▁hole|> auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "catalog_code", models.CharField( max_length=10, unique=True, validators=[ django.core.validators.RegexValidator(regex="[A-Z]+[0-9]+") ], ), ), ("title", models.CharField(max_length=100)), ], bases=(common.mixins.ModelMixin, models.Model), ), migrations.CreateModel( name="CourseSection", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "term", models.CharField( choices=[ ("A", "A"), ("B", "B"), ("C", "C"), ("D", "D"), ("E", "E"), ("S", "S"), ("F", "F"), ], default="A", max_length=1, ), ), ( "year", models.PositiveIntegerField( validators=[django.core.validators.MaxValueValidator(99)] ), ), ("professor", models.CharField(max_length=100)), ( "catalog_course", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="Scholarship.course", to_field="catalog_code", ), ), ("participants", models.ManyToManyField(to=settings.AUTH_USER_MODEL)), ], bases=(common.mixins.ModelMixin, models.Model), ), ]<|fim▁end|>
"id", models.AutoField(
<|file_name|>parser.py<|end_file_name|><|fim▁begin|>""" functions for evaluating spreadsheet functions primary function is parse, which the rest revolves around evaluate should be called with the full string by a parent program A note on exec: This uses the exec function repeatedly, and where possible, use of it should be minimized, but the intention of this is only meant to be run on trusted spreadsheets. Future development of this may focus on it being more secure, but the primary goal is simply to evaluate the most common functions, regardless the ability for code to be injected. Another note: this whole thing could stand to be redone """ # import spreadsheet mirroring functions import eval.functions as functions import eval.translate as translate import eval.storage as global_file # historical reasons for name __author__ = 'user0' def evaluate(s, reference_dictionary=None): # if included, reference dictionary is a dictionary of relevant # cell references. # alternatively, if reference_dictionary is None, it is presumed # that it is not needed to replace references with values in the # formula. The reference_type arg, if none, defaults to 'sheet' if s[0] == '=': # get rid of the equals sign at the beginning of the formula s = s[1:] # send reference dictionary to storage global_file.formulas = reference_dictionary # I feel like I'm forgetting something else here return parse(s) def parse(s, function=None): # returns evaluation of formula via recursive function; # before this function is run, dependencies should be # identified and evaluated replace = {} it = 0 level = 0 # replace references with cell values s = s.lower() # for formula in global_file.formulas: # if formula in s: # s = s.replace(formula, str( # global_file.formulas[formula].return_value())) # replace values with python equivalents # ('^' with '**' for example) s = translate.spreadsheet_replace(s) # evaluate formula for char in s: if char == '(': level += 1 if level == 1: parent_start = it if char == ')': level -= 1 if level == 0: parent_close = it<|fim▁hole|> body = s[parent_start + 1: parent_close] formula = '{}({})'.format(prefix, body) replace[formula] = str(parse(prefix, body)) verbose('replacing {} with {}'.format(formula, replace[formula])) it += 1 # replace strings for entry in replace: s = s.replace(entry, replace[entry]) # depending on the presence of a function, either simply evaluate, # or use a function from functions if function: # if function is in the replacement dictionary, # replace it with that entry if function in functions.function_replace: function = functions.function_replace[function] else: print('function %s was not in function dictionary') % function # function just stopped sounding like a word # insert the formula in a python-readable format body_strings = s.split(',') # this is used below exec_string = '%s(body_strings)' % function else: # replace references with values and find result s = s.lower() for reference in global_file.formulas: while reference.lower() in s: replacement_cell = global_file.formulas[reference] if replacement_cell.data_type == 'string' and \ not replacement_cell.script: replacement = '\'%s\'' % replacement_cell.text else: replacement = replacement_cell.value s = s.replace(reference.lower(), replacement) exec_string = s exec_string = eval_append(exec_string) verbose(exec_string) exec(exec_string) return global_file.returned def get_prefix(formula_string, start): alpha = 'abcdefghijklmnopqrstuvwxyz' number = '.0123456789' prefix = '' string_position = start - 1 while True: character = formula_string[string_position] if string_position >= 0: if character in alpha or character in number: prefix = character + prefix else: return prefix else: return prefix string_position -= 1 def eval_append(s): prefix = 'global_file.returned = ' return prefix + s def verbose(s): # if verbose setting, print s if global_file.verbose: print(s)<|fim▁end|>
prefix = get_prefix(s, parent_start)
<|file_name|>mam.ts<|end_file_name|><|fim▁begin|>import { Agent } from '../'; import { mergeFields } from '../helpers/DataForms'; import * as JID from '../JID'; import { NS_MAM_2 } from '../Namespaces'; import { DataForm, DataFormField, IQ, MAMFin, MAMPrefs, MAMQuery, MAMResult, Message, ReceivedMessage } from '../protocol';<|fim▁hole|> declare module '../' { export interface Agent { getHistorySearchForm(jid: string): Promise<DataForm>; getHistoryPreferences(): Promise<MAMPrefs>; setHistoryPreferences(opts: Partial<MAMPrefs>): Promise<IQ>; searchHistory(opts: Partial<MAMQueryOptions>): Promise<MAMFin>; searchHistory(jid: string, opts: Partial<MAMQueryOptions>): Promise<MAMFin>; } export interface AgentEvents { 'mam:item': ReceivedMessage; } } export interface MAMQueryOptions extends MAMQuery { with?: string; start?: Date; end?: Date; } export default function (client: Agent): void { client.getHistorySearchForm = async (jid: string, opts: MAMQuery = {}) => { const res = await client.sendIQ<{ archive: MAMQuery }>({ archive: { type: 'query', version: opts.version }, to: jid, type: 'get' }); return res.archive.form!; }; client.searchHistory = async ( jidOrOpts: string | (Partial<MAMQueryOptions> & { to?: string }), opts: Partial<MAMQueryOptions> = {} ) => { const queryid = client.nextId(); let jid = ''; if (typeof jidOrOpts === 'string') { jid = jidOrOpts; } else { jid = jidOrOpts.to || ''; opts = jidOrOpts; } opts.queryId = queryid; const form: DataForm = opts.form || {}; form.type = 'submit'; const fields = form.fields || []; const defaultFields: DataFormField[] = [ { name: 'FORM_TYPE', type: 'hidden', value: NS_MAM_2 } ]; if (opts.with) { defaultFields.push({ name: 'with', type: 'text-single', value: opts.with }); } if (opts.start) { defaultFields.push({ name: 'start', type: 'text-single', value: opts.start.toISOString() }); } if (opts.end) { defaultFields.push({ name: 'end', type: 'text-single', value: opts.end.toISOString() }); } form.fields = mergeFields(defaultFields, fields); opts.form = form; const allowed = JID.allowedResponders(client.jid, jid); const results: MAMResult[] = []; const collector = (msg: Message) => { if (allowed.has(msg.from) && msg.archive && msg.archive.queryId === queryid) { results.push(msg.archive); } }; client.on('mam:item', collector); try { const resp = await client.sendIQ<IQ & { archive: MAMQuery }, IQ & { archive: MAMFin }>({ archive: opts, id: queryid, to: jid, type: 'set' }); return { ...resp.archive, results }; } finally { client.off('mam:item', collector); } }; client.getHistoryPreferences = async () => { const resp = await client.sendIQ({ archive: { type: 'preferences' }, type: 'get' }); return resp.archive; }; client.setHistoryPreferences = (opts: Partial<MAMPrefs>) => { return client.sendIQ({ archive: { type: 'preferences', ...opts }, type: 'set' }); }; client.on('message', msg => { if (msg.archive) { client.emit('mam:item', msg); } }); }<|fim▁end|>
<|file_name|>api.go<|end_file_name|><|fim▁begin|>package fsrateio import ( "io" "github.com/Symantec/Dominator/lib/rateio" "github.com/Symantec/tricorder/go/tricorder" "github.com/Symantec/tricorder/go/tricorder/units" ) type ReaderContext struct { maxBytesPerSecond uint64 maxBlocksPerSecond uint64 ctx *rateio.ReaderContext } func NewReaderContext(maxBytesPerSecond uint64, maxBlocksPerSecond uint64, speedPercent uint64) *ReaderContext { return newReaderContext(maxBytesPerSecond, maxBlocksPerSecond, speedPercent) } func (ctx *ReaderContext) GetContext() *rateio.ReaderContext { return ctx.ctx } func (ctx *ReaderContext) NewReader(rd io.Reader) *rateio.Reader { return ctx.ctx.NewReader(rd) } func (ctx *ReaderContext) RegisterMetrics(dir *tricorder.DirectorySpec) error { if ctx.maxBlocksPerSecond > 0 {<|fim▁hole|> return ctx.ctx.RegisterMetrics(dir, units.None, "file-system speed in blocks per second") } return ctx.ctx.RegisterMetrics(dir, units.BytePerSecond, "file-system speed") } func (ctx *ReaderContext) String() string { return ctx.format() }<|fim▁end|>
<|file_name|>training_stsbenchmark_continue_training.py<|end_file_name|><|fim▁begin|>""" This example loads the pre-trained SentenceTransformer model 'nli-distilroberta-base-v2' from the server. It then fine-tunes this model for some epochs on the STS benchmark dataset. Note: In this example, you must specify a SentenceTransformer model. If you want to fine-tune a huggingface/transformers model like bert-base-uncased, see training_nli.py and training_stsbenchmark.py """ from torch.utils.data import DataLoader import math from sentence_transformers import SentenceTransformer, LoggingHandler, losses, util, InputExample from sentence_transformers.evaluation import EmbeddingSimilarityEvaluator import logging from datetime import datetime import os import gzip import csv #### Just some code to print debug information to stdout logging.basicConfig(format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO, handlers=[LoggingHandler()]) #### /print debug information to stdout #Check if dataset exsist. If not, download and extract it sts_dataset_path = 'datasets/stsbenchmark.tsv.gz' if not os.path.exists(sts_dataset_path): util.http_get('https://sbert.net/datasets/stsbenchmark.tsv.gz', sts_dataset_path) # Read the dataset model_name = 'nli-distilroberta-base-v2' train_batch_size = 16 num_epochs = 4 model_save_path = 'output/training_stsbenchmark_continue_training-'+model_name+'-'+datetime.now().strftime("%Y-%m-%d_%H-%M-%S") # Load a pre-trained sentence transformer model model = SentenceTransformer(model_name) # Convert the dataset to a DataLoader ready for training logging.info("Read STSbenchmark train dataset") train_samples = [] dev_samples = [] test_samples = [] with gzip.open(sts_dataset_path, 'rt', encoding='utf8') as fIn: reader = csv.DictReader(fIn, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: score = float(row['score']) / 5.0 # Normalize score to range 0 ... 1 inp_example = InputExample(texts=[row['sentence1'], row['sentence2']], label=score) if row['split'] == 'dev': dev_samples.append(inp_example) elif row['split'] == 'test': test_samples.append(inp_example) else: train_samples.append(inp_example) train_dataloader = DataLoader(train_samples, shuffle=True, batch_size=train_batch_size) train_loss = losses.CosineSimilarityLoss(model=model) # Development set: Measure correlation between cosine score and gold labels logging.info("Read STSbenchmark dev dataset") evaluator = EmbeddingSimilarityEvaluator.from_input_examples(dev_samples, name='sts-dev') # Configure the training. We skip evaluation in this example warmup_steps = math.ceil(len(train_dataloader) * num_epochs * 0.1) #10% of train data for warm-up logging.info("Warmup-steps: {}".format(warmup_steps)) # Train the model model.fit(train_objectives=[(train_dataloader, train_loss)], evaluator=evaluator, epochs=num_epochs, evaluation_steps=1000, warmup_steps=warmup_steps, output_path=model_save_path) <|fim▁hole|>############################################################################## model = SentenceTransformer(model_save_path) test_evaluator = EmbeddingSimilarityEvaluator.from_input_examples(test_samples, name='sts-test') test_evaluator(model, output_path=model_save_path)<|fim▁end|>
############################################################################## # # Load the stored model and evaluate its performance on STS benchmark dataset #
<|file_name|>method-on-enum.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-tidy-linelength // min-lldb-version: 310 // ignore-test // Test temporarily ignored due to debuginfo tests being disabled, see PR 47155 // compile-flags:-g // === GDB TESTS =================================================================================== // gdb-command:run // STACK BY REF // gdb-command:print *self // gdbg-check:$1 = {{RUST$ENUM$DISR = Variant2, [...]}, {RUST$ENUM$DISR = Variant2, __0 = 117901063}} // gdbr-check:$1 = method_on_enum::Enum::Variant2(117901063) // gdb-command:print arg1 // gdb-check:$2 = -1 // gdb-command:print arg2 // gdb-check:$3 = -2 // gdb-command:continue // STACK BY VAL // gdb-command:print self // gdbg-check:$4 = {{RUST$ENUM$DISR = Variant2, [...]}, {RUST$ENUM$DISR = Variant2, __0 = 117901063}} // gdbr-check:$4 = method_on_enum::Enum::Variant2(117901063) // gdb-command:print arg1 // gdb-check:$5 = -3 // gdb-command:print arg2 // gdb-check:$6 = -4 // gdb-command:continue // OWNED BY REF // gdb-command:print *self // gdbg-check:$7 = {{RUST$ENUM$DISR = Variant1, x = 1799, y = 1799}, {RUST$ENUM$DISR = Variant1, [...]}} // gdbr-check:$7 = method_on_enum::Enum::Variant1{x: 1799, y: 1799} // gdb-command:print arg1 // gdb-check:$8 = -5 // gdb-command:print arg2 // gdb-check:$9 = -6 // gdb-command:continue // OWNED BY VAL // gdb-command:print self // gdbg-check:$10 = {{RUST$ENUM$DISR = Variant1, x = 1799, y = 1799}, {RUST$ENUM$DISR = Variant1, [...]}} // gdbr-check:$10 = method_on_enum::Enum::Variant1{x: 1799, y: 1799} // gdb-command:print arg1 // gdb-check:$11 = -7 // gdb-command:print arg2 // gdb-check:$12 = -8 // gdb-command:continue // OWNED MOVED // gdb-command:print *self // gdbg-check:$13 = {{RUST$ENUM$DISR = Variant1, x = 1799, y = 1799}, {RUST$ENUM$DISR = Variant1, [...]}} // gdbr-check:$13 = method_on_enum::Enum::Variant1{x: 1799, y: 1799} // gdb-command:print arg1 // gdb-check:$14 = -9 // gdb-command:print arg2 // gdb-check:$15 = -10 // gdb-command:continue // === LLDB TESTS ================================================================================== // lldb-command:run // STACK BY REF // lldb-command:print *self // lldb-check:[...]$0 = Variant2(117901063) // lldb-command:print arg1 // lldb-check:[...]$1 = -1 // lldb-command:print arg2 // lldb-check:[...]$2 = -2 // lldb-command:continue // STACK BY VAL // lldb-command:print self // lldb-check:[...]$3 = Variant2(117901063) // lldb-command:print arg1 // lldb-check:[...]$4 = -3 // lldb-command:print arg2 // lldb-check:[...]$5 = -4 // lldb-command:continue // OWNED BY REF // lldb-command:print *self // lldb-check:[...]$6 = Variant1 { x: 1799, y: 1799 } // lldb-command:print arg1 // lldb-check:[...]$7 = -5 // lldb-command:print arg2 // lldb-check:[...]$8 = -6 // lldb-command:continue // OWNED BY VAL // lldb-command:print self // lldb-check:[...]$9 = Variant1 { x: 1799, y: 1799 } // lldb-command:print arg1 // lldb-check:[...]$10 = -7 // lldb-command:print arg2 // lldb-check:[...]$11 = -8 // lldb-command:continue // OWNED MOVED // lldb-command:print *self // lldb-check:[...]$12 = Variant1 { x: 1799, y: 1799 } // lldb-command:print arg1 // lldb-check:[...]$13 = -9 // lldb-command:print arg2 // lldb-check:[...]$14 = -10 // lldb-command:continue #![feature(box_syntax)] #![feature(omit_gdb_pretty_printer_section)] #![omit_gdb_pretty_printer_section] #[derive(Copy, Clone)] enum Enum { Variant1 { x: u16, y: u16 }, Variant2 (u32) } impl Enum { fn self_by_ref(&self, arg1: isize, arg2: isize) -> isize { zzz(); // #break arg1 + arg2 } fn self_by_val(self, arg1: isize, arg2: isize) -> isize { zzz(); // #break arg1 + arg2 } fn self_owned(self: Box<Enum>, arg1: isize, arg2: isize) -> isize { zzz(); // #break arg1 + arg2 } } fn main() { let stack = Enum::Variant2(117901063); let _ = stack.self_by_ref(-1, -2); let _ = stack.self_by_val(-3, -4); let owned: Box<_> = box Enum::Variant1{ x: 1799, y: 1799 }; let _ = owned.self_by_ref(-5, -6); let _ = owned.self_by_val(-7, -8); let _ = owned.self_owned(-9, -10); } fn zzz() {()}<|fim▁end|>
// file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
<|file_name|>test-polyfill.ts<|end_file_name|><|fim▁begin|>/** * Tests for polyfill function. * * Copyright (C) 2016 Martin Poelstra<|fim▁hole|> import "source-map-support/register"; import { expect } from "chai"; import polyfill from "../lib/polyfill"; import tsPromise from "../lib/Promise"; import * as util from "../lib/util"; declare var Promise: any; class FakePromise { public then(): FakePromise { return new FakePromise(); } } describe("polyfill", () => { let restore: any; beforeEach(() => { restore = global.Promise; delete global["Promise"]; // tslint:disable-line:no-string-literal }); afterEach(() => { global.Promise = restore; }); it("doesn't polyfill by default when another Promise implementation is present", () => { global.Promise = FakePromise; expect(polyfill()).to.equal(false); expect(Promise).to.equal(FakePromise); }); it("does polyfill when forced when another Promise implementation is present", () => { global.Promise = FakePromise; expect(polyfill(true)).to.equal(true); expect(Promise).to.equal(tsPromise); }); it("polyfills by default when Promise implementation is not present", () => { expect(global.Promise).to.equal(undefined); expect(polyfill()).to.equal(true); expect(Promise).to.equal(tsPromise); }); it("doesn't polyfill when global can't be determined", () => { const oldGetGlobal = util.getGlobal; try { (<any>util).getGlobal = (): any => undefined; expect(polyfill()).to.equal(false); } finally { (<any>util).getGlobal = oldGetGlobal; } expect(global.Promise).to.equal(undefined); }); });<|fim▁end|>
* License: MIT */
<|file_name|>SKLearn3KMOutlier.py<|end_file_name|><|fim▁begin|># coding = utf-8 """ 3.8 将 KMeans 用于离群点检测 http://git.oschina.net/wizardforcel/sklearn-cb/blob/master/3.md """ # 生成 100 个点的单个数据块,然后识别 5 个离形心最远的点 import numpy as np from sklearn.datasets import make_blobs from sklearn.cluster import KMeans (x, labels) = make_blobs(100, centers=1) kms = KMeans(n_clusters=1) kms.fit(x) # 识别 5 个最远的点 dist = kms.transform(x) sortedIdx = np.argsort(dist.ravel())[::-1][:5] # 移除这些点 nx = np.delete(x, sortedIdx, axis=0) # 形心位置变化了 nkms = KMeans(n_clusters=1) nkms.fit(nx)<|fim▁hole|>from matplotlib import pyplot as plt plt.style.use("ggplot") (fig, ax) = plt.subplots(figsize=(6, 5)) ax.scatter(x[:, 0], x[:, 1], s=10, label="点") ax.scatter(kms.cluster_centers_[:, 0], kms.cluster_centers_[:, 1], label="形心", s=50, alpha=0.7) ax.scatter(x[sortedIdx][:, 0], x[sortedIdx][:, 1], label="极值", s=100, alpha=0.7) ax.scatter(nkms.cluster_centers_[:, 0], nkms.cluster_centers_[:, 1], label="新的形心", s=50, alpha=0.7) ax.set_title("单点簇集") ax.legend(loc="best") fig.tight_layout() fig.show() plt.show()<|fim▁end|>
<|file_name|>guest.py<|end_file_name|><|fim▁begin|># Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # Copyright (c) 2010 Citrix Systems, Inc. # Copyright (c) 2011 Piston Cloud Computing, Inc # Copyright (c) 2012 University Of Minho # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # Copyright (c) 2015 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Manages information about the guest. This class encapsulates libvirt domain provides certain higher level APIs around the raw libvirt API. These APIs are then used by all the other libvirt related classes """ from lxml import etree from oslo_log import log as logging from oslo_utils import encodeutils from oslo_utils import excutils from oslo_utils import importutils from nova.i18n import _LE from nova import utils from nova.virt.libvirt import config as vconfig libvirt = None LOG = logging.getLogger(__name__) class Guest(object): def __init__(self, domain): global libvirt if libvirt is None: libvirt = importutils.import_module('libvirt') self._domain = domain def __repr__(self): return "<Guest %(id)d %(name)s %(uuid)s>" % { 'id': self.id, 'name': self.name, 'uuid': self.uuid } @property def id(self): return self._domain.ID() @property def uuid(self): return self._domain.UUIDString() @property def name(self): return self._domain.name() @property def _encoded_xml(self): return encodeutils.safe_decode(self._domain.XMLDesc(0)) @classmethod def create(cls, xml, host): """Create a new Guest :param xml: XML definition of the domain to create :param host: host.Host connection to define the guest on :returns guest.Guest: Guest ready to be launched """ try: # TODO(sahid): Host.write_instance_config should return # an instance of Guest domain = host.write_instance_config(xml) except Exception: with excutils.save_and_reraise_exception(): LOG.error(_LE('Error defining a domain with XML: %s') % encodeutils.safe_decode(xml)) return cls(domain) def launch(self, pause=False): """Starts a created guest. :param pause: Indicates whether to start and pause the guest """ flags = pause and libvirt.VIR_DOMAIN_START_PAUSED or 0 try: return self._domain.createWithFlags(flags) except Exception: with excutils.save_and_reraise_exception(): LOG.error(_LE('Error launching a defined domain ' 'with XML: %s') % self._encoded_xml, errors='ignore') def poweroff(self): """Stops a running guest.""" self._domain.destroy() def resume(self): """Resumes a suspended guest.""" self._domain.resume() def enable_hairpin(self): """Enables hairpin mode for this guest.""" interfaces = self.get_interfaces() try: for interface in interfaces: utils.execute( 'tee', '/sys/class/net/%s/brport/hairpin_mode' % interface, process_input='1', run_as_root=True, check_exit_code=[0, 1]) except Exception: with excutils.save_and_reraise_exception(): LOG.error(_LE('Error enabling hairpin mode with XML: %s') % self._encoded_xml, errors='ignore') def get_interfaces(self): """Returns a list of all network interfaces for this domain.""" doc = None try: doc = etree.fromstring(self._encoded_xml) except Exception: return [] interfaces = [] nodes = doc.findall('./devices/interface/target') for target in nodes: interfaces.append(target.get('dev')) return interfaces def get_vcpus_info(self): """Returns virtual cpus information of guest. :returns: guest.VCPUInfo """ vcpus = self._domain.vcpus() if vcpus is not None: for vcpu in vcpus[0]: yield VCPUInfo( id=vcpu[0], cpu=vcpu[3], state=vcpu[1], time=vcpu[2]) <|fim▁hole|> libvirt.VIR_DOMAIN_UNDEFINE_MANAGED_SAVE) except libvirt.libvirtError: LOG.debug("Error from libvirt during undefineFlags. %d" "Retrying with undefine", self.id) self._domain.undefine() except AttributeError: # Older versions of libvirt don't support undefine flags, # trying to remove managed image try: if self._domain.hasManagedSaveImage(0): self._domain.managedSaveRemove(0) except AttributeError: pass self._domain.undefine() def has_persistent_configuration(self): """Whether domain config is persistently stored on the host.""" return self._domain.isPersistent() def attach_device(self, conf, persistent=False, live=False): """Attaches device to the guest. :param conf: A LibvirtConfigObject of the device to attach :param persistent: A bool to indicate whether the change is persistent or not :param live: A bool to indicate whether it affect the guest in running state """ flags = persistent and libvirt.VIR_DOMAIN_AFFECT_CONFIG or 0 flags |= live and libvirt.VIR_DOMAIN_AFFECT_LIVE or 0 self._domain.attachDeviceFlags(conf.to_xml(), flags=flags) def get_disk(self, device): """Returns the disk mounted at device :returns LivirtConfigGuestDisk: mounted at device or None """ try: doc = etree.fromstring(self._domain.XMLDesc(0)) except Exception: return None node = doc.find("./devices/disk/target[@dev='%s'].." % device) if node is not None: conf = vconfig.LibvirtConfigGuestDisk() conf.parse_dom(node) return conf def detach_device(self, conf, persistent=False, live=False): """Detaches device to the guest. :param conf: A LibvirtConfigObject of the device to detach :param persistent: A bool to indicate whether the change is persistent or not :param live: A bool to indicate whether it affect the guest in running state """ flags = persistent and libvirt.VIR_DOMAIN_AFFECT_CONFIG or 0 flags |= live and libvirt.VIR_DOMAIN_AFFECT_LIVE or 0 self._domain.detachDeviceFlags(conf.to_xml(), flags=flags) def get_xml_desc(self, dump_inactive=False, dump_sensitive=False, dump_migratable=False): """Returns xml description of guest. :param dump_inactive: Dump inactive domain information :param dump_sensitive: Dump security sensitive information :param dump_migratable: Dump XML suitable for migration :returns string: XML description of the guest """ flags = dump_inactive and libvirt.VIR_DOMAIN_XML_INACTIVE or 0 flags |= dump_sensitive and libvirt.VIR_DOMAIN_XML_SECURE or 0 flags |= dump_migratable and libvirt.VIR_DOMAIN_XML_MIGRATABLE or 0 return self._domain.XMLDesc(flags=flags) def save_memory_state(self): """Saves the domain's memory state. Requires running domain. raises: raises libvirtError on error """ self._domain.managedSave(0) class VCPUInfo(object): def __init__(self, id, cpu, state, time): """Structure for information about guest vcpus. :param id: The virtual cpu number :param cpu: The host cpu currently associated :param state: The running state of the vcpu (0 offline, 1 running, 2 blocked on resource) :param time: The cpu time used in nanoseconds """ self.id = id self.cpu = cpu self.state = state self.time = time<|fim▁end|>
def delete_configuration(self): """Undefines a domain from hypervisor.""" try: self._domain.undefineFlags(
<|file_name|>eos_user.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: eos_user version_added: "2.3" author: "Peter Sprygada (@privateip)" short_description: Manage the collection of local users on EOS devices description: - This module provides declarative management of the local usernames configured on Arista EOS devices. It allows playbooks to manage either individual usernames or the collection of usernames in the current running config. It also supports purging usernames from the configuration that are not explicitly defined. extends_documentation_fragment: eos options: users: description: - The set of username objects to be configured on the remote Arista EOS device. The list entries can either be the username or a hash of username and properties. This argument is mutually exclusive with the C(username) argument. required: false default: null username: description: - The username to be configured on the remote Arista EOS device. This argument accepts a stringv value and is mutually exclusive with the C(users) argument. required: false default: null update_password: description: - Since passwords are encrypted in the device running config, this argument will instruct the module when to change the password. When set to C(always), the password will always be updated in the device and when set to C(on_create) the password will be updated only if the username is created. required: false default: always choices: ['on_create', 'always'] privilege: description: - The C(privilege) argument configures the privilege level of the user when logged into the system. This argument accepts integer values in the range of 1 to 15. required: false default: null role: description: - The C(role) argument configures the role for the username in the device running configuration. The argument accepts a string value defining the role name. This argument does not check if the role has been configured on the device. required: false default: null sshkey: description: - The C(sshkey) argument defines the SSH public key to configure for the username. This argument accepts a valid SSH key value. required: false default: null nopassword: description: - The C(nopassword) argument defines the username without assigning a password. This will allow the user to login to the system without being authenticated by a password. This argument accepts boolean values. required: false default: null choices: ['true', 'false'] purge: description: - The C(purge) argument instructs the module to consider the resource definition absolute. It will remove any previously configured usernames on the device with the exception of the `admin` user which cannot be deleted per EOS constraints. required: false default: false state: description: - The C(state) argument configures the state of the uername definition as it relates to the device operational configuration. When set to I(present), the username(s) should be configured in the device active configuration and when set to I(absent) the username(s) should not be in the device active configuration required: false<|fim▁hole|>EXAMPLES = """ - name: create a new user eos_user: username: ansible sshkey: "{{ lookup('file', '~/.ssh/id_rsa.pub') }}" state: present - name: remove all users except admin eos_user: purge: yes - name: set multiple users to privilege level users: - username: netop - username: netend privilege: 15 state: present """ RETURN = """ commands: description: The list of configuration mode commands to send to the device returned: always type: list sample: - username ansible secret password - username admin secret admin session_name: description: The EOS config session name used to load the configuration returned: when changed is True type: str sample: ansible_1479315771 """ import re from functools import partial from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.eos import get_config, load_config from ansible.module_utils.six import iteritems from ansible.module_utils.eos import eos_argument_spec, check_args def validate_privilege(value, module): if not 1 <= value <= 15: module.fail_json(msg='privilege must be between 1 and 15, got %s' % value) def map_obj_to_commands(updates, module): commands = list() state = module.params['state'] update_password = module.params['update_password'] for update in updates: want, have = update needs_update = lambda x: want.get(x) and (want.get(x) != have.get(x)) add = lambda x: commands.append('username %s %s' % (want['username'], x)) if want['state'] == 'absent': commands.append('no username %s' % want['username']) continue if needs_update('role'): add('role %s' % want['role']) if needs_update('privilege'): add('privilege %s' % want['privilege']) if needs_update('password'): if update_password == 'always' or not have: add('secret %s' % want['password']) if needs_update('sshkey'): add('sshkey %s' % want['sshkey']) if needs_update('nopassword'): if want['nopassword']: add('nopassword') else: add('no username %s nopassword' % want['username']) return commands def parse_role(data): match = re.search(r'role (\S+)', data, re.M) if match: return match.group(1) def parse_sshkey(data): match = re.search(r'sshkey (.+)$', data, re.M) if match: return match.group(1) def parse_privilege(data): match = re.search(r'privilege (\S+)', data, re.M) if match: return int(match.group(1)) def map_config_to_obj(module): data = get_config(module, flags=['section username']) match = re.findall(r'^username (\S+)', data, re.M) if not match: return list() instances = list() for user in set(match): regex = r'username %s .+$' % user cfg = re.findall(r'username %s .+$' % user, data, re.M) cfg = '\n'.join(cfg) obj = { 'username': user, 'state': 'present', 'nopassword': 'nopassword' in cfg, 'password': None, 'sshkey': parse_sshkey(cfg), 'privilege': parse_privilege(cfg), 'role': parse_role(cfg) } instances.append(obj) return instances def get_param_value(key, item, module): # if key doesn't exist in the item, get it from module.params if not item.get(key): value = module.params[key] # if key does exist, do a type check on it to validate it else: value_type = module.argument_spec[key].get('type', 'str') type_checker = module._CHECK_ARGUMENT_TYPES_DISPATCHER[value_type] type_checker(item[key]) value = item[key] # validate the param value (if validator func exists) validator = globals().get('validate_%s' % key) if all((value, validator)): validator(value, module) return value def map_params_to_obj(module): users = module.params['users'] if not users: if not module.params['username'] and module.params['purge']: return list() elif not module.params['username']: module.fail_json(msg='username is required') else: collection = [{'username': module.params['username']}] else: collection = list() for item in users: if not isinstance(item, dict): collection.append({'username': item}) elif 'username' not in item: module.fail_json(msg='username is required') else: collection.append(item) objects = list() for item in collection: get_value = partial(get_param_value, item=item, module=module) item['password'] = get_value('password') item['nopassword'] = get_value('nopassword') item['privilege'] = get_value('privilege') item['role'] = get_value('role') item['sshkey'] = get_value('sshkey') item['state'] = get_value('state') objects.append(item) return objects def update_objects(want, have): updates = list() for entry in want: item = next((i for i in have if i['username'] == entry['username']), None) if all((item is None, entry['state'] == 'present')): updates.append((entry, {})) elif item: for key, value in iteritems(entry): if value and value != item[key]: updates.append((entry, item)) return updates def main(): """ main entry point for module execution """ argument_spec = dict( users=dict(type='list', no_log=True), username=dict(), password=dict(no_log=True), nopassword=dict(type='bool'), update_password=dict(default='always', choices=['on_create', 'always']), privilege=dict(type='int'), role=dict(), sshkey=dict(), purge=dict(type='bool', default=False), state=dict(default='present', choices=['present', 'absent']) ) argument_spec.update(eos_argument_spec) mutually_exclusive = [('username', 'users')] module = AnsibleModule(argument_spec=argument_spec, mutually_exclusive=mutually_exclusive, supports_check_mode=True) warnings = list() check_args(module, warnings) result = {'changed': False} if warnings: result['warnings'] = warnings want = map_params_to_obj(module) have = map_config_to_obj(module) commands = map_obj_to_commands(update_objects(want, have), module) if module.params['purge']: want_users = [x['username'] for x in want] have_users = [x['username'] for x in have] for item in set(have_users).difference(want_users): if item != 'admin': commands.append('no username %s' % item) result['commands'] = commands # the eos cli prevents this by rule so capture it and display # a nice failure message if 'no username admin' in commands: module.fail_json(msg='cannot delete the `admin` account') if commands: commit = not module.check_mode response = load_config(module, commands, commit=commit) if response.get('diff') and module._diff: result['diff'] = {'prepared': response.get('diff')} result['session_name'] = response.get('session') result['changed'] = True module.exit_json(**result) if __name__ == '__main__': main()<|fim▁end|>
default: present choices: ['present', 'absent'] """
<|file_name|>level.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Garrett D'Amore // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use file except in compliance with the License. // You may obtain a copy of the license at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main import ( "math/rand" "time" "github.com/gdamore/tcell" "github.com/gdamore/tcell/views" ) type Level struct { name string title string width int height int startx int starty int game *Game layer int maxlayer int view *views.ViewPort maxtime time.Duration begin time.Time expired bool gravity float64 clock AlarmClock stopped bool manager *SpriteManager terrain *Sprite data *LevelData win bool started bool } func NewLevelFromData(d *LevelData) *Level { lvl := &Level{data: d} lvl.name = d.Name lvl.title = d.Properties.PropString("title", lvl.name) timer := d.Properties.PropInt("timer", 300) lvl.gravity = d.Properties.PropFloat64("gravity", 0.0) lvl.width = d.Properties.PropInt("width", 0) lvl.height = d.Properties.PropInt("height", 0) if lvl.width == 0 || lvl.height == 0 { panic("level has no dimensions!") } lvl.maxtime = time.Duration(timer) * time.Second lvl.manager = NewSpriteManager(lvl.width, lvl.height) lvl.clock = NewAlarmClock() return lvl } func (l *Level) Name() string { return l.name } func (l *Level) Title() string { return l.title } func (l *Level) randomViewXY() (int, int) { x1, y1, x2, y2 := l.view.GetVisible() return rand.Intn(x2-x1+1) + x1, rand.Intn(y2-y1+1) + y1 } func (l *Level) Update(now time.Time) { l.clock.Tick(now) l.manager.Update(now) if l.gravity != 0 { l.HandleEvent(&EventGravity{when: now, accel: l.gravity}) } if l.GetTimer() == 0 && !l.expired && !l.win && l.started { l.HandleEvent(&EventTimesUp{}) l.expired = true } } func (l *Level) SetView(v *views.ViewPort) { l.view = v v.SetContentSize(l.width, l.height, true) l.manager.SetView(v) } func (l *Level) Draw(v *views.ViewPort) { l.manager.Draw(v) } func (l *Level) Reset() { l.manager.Reset() l.clock.Reset() for name, plist := range l.data.Objects { props := GameObjectProps{} props["label"] = name for k, v := range plist { props[k] = v } MakeGameObject(l, props["class"], props) } l.begin = time.Now() l.stopped = false l.started = false l.win = false l.expired = false } func (l *Level) GetTimer() time.Duration { if l.win || !l.started { return l.maxtime } d := time.Now().Sub(l.begin) d = l.maxtime - d if d < 0 { d = 0 } return d } func (l *Level) SetGame(g *Game) { l.game = g } func (l *Level) Layer() int { return LayerTerrain } func (l *Level) Size() (int, int) { return l.width, l.height } func (l *Level) MakeVisible(x, y int) { if l.view != nil { l.view.MakeVisible(x, y) } } func (l *Level) Center(x, y int) { if l.view != nil { l.view.Center(x, y) } } func (l *Level) Start() { l.started = true l.begin = time.Now() l.HandleEvent(&EventLevelStart{}) } func (l *Level) ShowPress() { x1, y1, x2, y2 := l.view.GetVisible() sprite := GetSprite("PressSpace") sprite.SetLayer(LayerDialog) sprite.SetFrame("F0") sprite.SetPosition(x1+(x2-x1)/2, y1+(y2-y1)/2+2) l.manager.AddSprite(sprite) } func (l *Level) ShowComplete() { x1, y1, x2, y2 := l.view.GetVisible() sprite := GetSprite("LevelComplete") sprite.SetLayer(LayerDialog) sprite.SetFrame("F0") sprite.SetPosition(x1+(x2-x1)/2, y1+(y2-y1)/2) l.manager.AddSprite(sprite) } func (l *Level) HandleEvent(ev tcell.Event) bool { switch ev := ev.(type) { case *tcell.EventKey: switch ev.Key() { case tcell.KeyCtrlR: // secret reset button l.Reset() } case *tcell.EventMouse: offx, offy, _, _ := l.view.GetVisible() px1, py1, px2, py2 := l.view.GetPhysical() mx, my := ev.Position() if mx < px1 || mx > px2 || my < py1 || my > py2 { // outside our view return false } if ev.Buttons()&tcell.Button1 != 0 { l.stopped = true l.view.Center(offx+mx-px1, offy+my-py1) } else if ev.Buttons()&tcell.Button2 != 0 { l.Reset() } else if ev.Buttons()&tcell.Button3 != 0 { l.Reset() } return true case *EventPlayerDeath: l.game.HandleEvent(ev) l.ShowPress() return true case *EventLevelComplete: l.win = true l.game.HandleEvent(ev) l.ShowComplete() l.ShowPress() return true case *EventGameOver: x1, y1, x2, y2 := l.view.GetVisible()<|fim▁hole|> sprite.SetPosition(x1+(x2-x1)/2, y1+(y2-y1)/2) l.manager.AddSprite(sprite) case *EventTimesUp: bw := GetSprite("Blastwave") bw.Resize(l.width, l.height) bw.ScheduleFrame("0", time.Now().Add(2*time.Second)) l.AddSprite(bw) dur := time.Duration(0) for i := 0; i < 100; i++ { x, y := l.randomViewXY() sprite := GetSprite("Explosion") sprite.ScheduleFrame("0", time.Now().Add(dur)) sprite.SetPosition(x, y) l.AddSprite(sprite) dur += time.Millisecond * 5 for j := 0; j < 4; j++ { x, y = l.randomViewXY() sprite = GetSprite("SmallExplosion") sprite.SetPosition(x, y) sprite.ScheduleFrame("0", time.Now().Add(dur)) l.AddSprite(sprite) dur += time.Millisecond * 50 } } } if l.stopped { return false } return l.manager.HandleEvent(ev) } func (l *Level) AddSprite(sprite *Sprite) { l.manager.AddSprite(sprite) } func (l *Level) RemoveSprite(sprite *Sprite) { l.manager.RemoveSprite(sprite) } func (l *Level) AddAlarm(d time.Duration, h EventHandler) *Alarm { return l.clock.Schedule(d, h) } func (l *Level) RemoveAlarm(a *Alarm) { l.clock.Cancel(a) }<|fim▁end|>
sprite := GetSprite("GameOver") sprite.SetLayer(LayerDialog) sprite.SetFrame("F0")
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import MPopper from '../m-popper.vue';<|fim▁hole|> */ export const UIconTooltip = { name: 'u-icon-tooltip', props: { type: { type: String, default: 'info' }, // 按钮名称 size: { type: String, default: 'normal' }, // 提示大小 content: String, trigger: { type: String, default: 'hover' }, placement: { type: String, default: 'bottom' }, ...pick(MPopper.props, [ 'opened', 'reference', 'hideDelay', 'boundariesElement', 'followCursor', 'offset', 'disabled', ]), }, }; export default UIconTooltip;<|fim▁end|>
import pick from 'lodash/pick'; /** * 默认显示一个按钮,hover 上去有提示
<|file_name|>BasicInfo.js<|end_file_name|><|fim▁begin|>import React from 'react'; import { FlexRow, FlexCell } from '../common'; import CharacterName from './CharacterName'; import InfoBlock from './InfoBlock'; const PersonalInfo = ({ character }) => ( <FlexRow> <FlexCell columns={3}> <CharacterName character={character} /> </FlexCell><|fim▁hole|> </FlexCell> </FlexRow> ); export default PersonalInfo;<|fim▁end|>
<FlexCell columns={9}> <InfoBlock character={character} />
<|file_name|>ConfigDB_Longest_8.py<|end_file_name|><|fim▁begin|>HOST = "wfSciwoncWiki:enw1989@172.31.29.101:27001,172.31.29.102:27001,172.31.29.103:27001,172.31.29.104:27001,172.31.29.105:27001,172.31.29.106:27001,172.31.29.107:27001,172.31.29.108:27001,172.31.29.109:27001/?authSource=admin" PORT = "" USER = "" PASSWORD = "" DATABASE = "wiki" READ_PREFERENCE = "primary" COLLECTION_INPUT = "user_sessions" <|fim▁hole|>SORT = ["duration", "end time"] OPERATION_TYPE = "GROUP_BY_FIXED_WINDOW" COLUMN = "end time" VALUE = [(1236381526, 1238973525),(1238973526, 1241565525),(1241565526, 1244157525),(1244157526, 1246749525),(1246749526, 1249341525),(1249341526, 1251933525),(1251933526, 1254525525),(1254525526, 1257113925),(1257113926, 1259705925),(1259705926, 1262297925),(1262297926, 1264889925),(1264889926, 1265098299)] INPUT_FILE = "user_info.csv" OUTPUT_FILE = "top_sessions_8.csv"<|fim▁end|>
COLLECTION_OUTPUT = "top_sessions" PREFIX_COLUMN = "w_" ATTRIBUTES = ["duration", "start time", "end time", "contributor_username", "edition_counts"]
<|file_name|>jano.py<|end_file_name|><|fim▁begin|>import sys from services.spawn import MobileTemplate from services.spawn import WeaponTemplate from resources.datatables import WeaponType from resources.datatables import Difficulty from resources.datatables import Options from java.util import Vector def addTemplate(core): mobileTemplate = MobileTemplate() mobileTemplate.setCreatureName('tatooine_opening_jano') mobileTemplate.setLevel(1) mobileTemplate.setDifficulty(Difficulty.NORMAL) mobileTemplate.setSocialGroup("township") mobileTemplate.setOptionsBitmask(Options.INVULNERABLE | Options.CONVERSABLE) templates = Vector() templates.add('object/mobile/shared_dressed_tatooine_opening_jano.iff') mobileTemplate.setTemplates(templates) weaponTemplates = Vector() weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic') weaponTemplates.add(weapontemplate) mobileTemplate.setWeaponTemplateVector(weaponTemplates)<|fim▁hole|> attacks = Vector() mobileTemplate.setDefaultAttack('creatureMeleeAttack') mobileTemplate.setAttacks(attacks) core.spawnService.addMobileTemplate('jano', mobileTemplate) return<|fim▁end|>
<|file_name|>MapKeyLoaderUtil.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl; import com.hazelcast.config.MaxSizeConfig; import com.hazelcast.core.IFunction; import com.hazelcast.nio.serialization.Data; import com.hazelcast.nio.serialization.SerializableByConvention; import com.hazelcast.spi.partition.IPartitionService; import com.hazelcast.util.CollectionUtil; import com.hazelcast.util.UnmodifiableIterator; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import static com.hazelcast.config.MaxSizeConfig.MaxSizePolicy.PER_NODE; import static com.hazelcast.util.MapUtil.createHashMap; import static com.hazelcast.util.Preconditions.checkNotNull; public final class MapKeyLoaderUtil { private MapKeyLoaderUtil() { } /** * Returns the role for the map key loader based on the passed parameters. * The partition owner of the map name partition is the sender. * The first replica of the map name partition is the sender backup. * Other partition owners are receivers and other partition replicas do * not have a role. * * @param isPartitionOwner if this is the partition owner * @param isMapNamePartition if this is the partition containing the map name * @param isMapNamePartitionFirstReplica if this is the first replica for the partition * containing the map name * @return the map key loader role */ static MapKeyLoader.Role assignRole(boolean isPartitionOwner, boolean isMapNamePartition, boolean isMapNamePartitionFirstReplica) { if (isMapNamePartition) { if (isPartitionOwner) { // map-name partition owner is the SENDER return MapKeyLoader.Role.SENDER; } else { if (isMapNamePartitionFirstReplica) { // first replica of the map-name partition is the SENDER_BACKUP return MapKeyLoader.Role.SENDER_BACKUP; } else { // other replicas of the map-name partition do not have a role return MapKeyLoader.Role.NONE; } } } else { // ordinary partition owners are RECEIVERs, otherwise no role return isPartitionOwner ? MapKeyLoader.Role.RECEIVER : MapKeyLoader.Role.NONE; } } /** * Transforms an iterator of entries to an iterator of entry batches * where each batch is represented as a map from entry key to * list of entry values. * The maximum size of the entry value list in any batch is * determined by the {@code maxBatch} parameter. Only one * entry value list may have the {@code maxBatch} size, other * lists will be smaller. * * @param entries the entries to be batched * @param maxBatch the maximum size of an entry group in a single batch * @return an iterator with entry batches */ static Iterator<Map<Integer, List<Data>>> toBatches(final Iterator<Entry<Integer, Data>> entries, final int maxBatch) { return new UnmodifiableIterator<Map<Integer, List<Data>>>() { @Override<|fim▁hole|> public boolean hasNext() { return entries.hasNext(); } @Override public Map<Integer, List<Data>> next() { if (!entries.hasNext()) { throw new NoSuchElementException(); } return nextBatch(entries, maxBatch); } }; } /** * Groups entries by the entry key. The entries will be grouped * until at least one group has up to {@code maxBatch} * entries or until the {@code entries} have been exhausted. * * @param entries the entries to be grouped by key * @param maxBatch the maximum size of a group * @return the grouped entries by entry key */ private static Map<Integer, List<Data>> nextBatch(Iterator<Entry<Integer, Data>> entries, int maxBatch) { Map<Integer, List<Data>> batch = createHashMap(maxBatch); while (entries.hasNext()) { Entry<Integer, Data> e = entries.next(); List<Data> partitionKeys = CollectionUtil.addToValueList(batch, e.getKey(), e.getValue()); if (partitionKeys.size() >= maxBatch) { break; } } return batch; } /** * Returns the configured maximum entry count per node if the max * size policy is {@link MaxSizeConfig.MaxSizePolicy#PER_NODE} * and is not the default, otherwise returns {@code -1}. * * @param maxSizeConfig the max size configuration * @return the max size per node or {@code -1} if not configured or is the default * @see MaxSizeConfig#getMaxSizePolicy() * @see MaxSizeConfig#getSize() */ public static int getMaxSizePerNode(MaxSizeConfig maxSizeConfig) { // max size or -1 if policy is different or not set double maxSizePerNode = maxSizeConfig.getMaxSizePolicy() == PER_NODE ? maxSizeConfig.getSize() : -1D; if (maxSizePerNode == MaxSizeConfig.DEFAULT_MAX_SIZE) { // unlimited return -1; } return (int) maxSizePerNode; } /** * Returns a {@link IFunction} that transforms a {@link Data} * parameter to an map entry where the key is the partition ID * and the value is the provided parameter. * * @param partitionService the partition service */ static IFunction<Data, Entry<Integer, Data>> toPartition(final IPartitionService partitionService) { return new DataToEntry(partitionService); } @SerializableByConvention private static class DataToEntry implements IFunction<Data, Entry<Integer, Data>> { private final IPartitionService partitionService; public DataToEntry(IPartitionService partitionService) { this.partitionService = partitionService; } @Override public Entry<Integer, Data> apply(Data input) { // Null-pointer here, in case of null key loaded by MapLoader checkNotNull(input, "Key loaded by a MapLoader cannot be null."); Integer partition = partitionService.getPartitionId(input); return new MapEntrySimple<Integer, Data>(partition, input); } } }<|fim▁end|>
<|file_name|>EditStudyLogActionTest.java<|end_file_name|><|fim▁begin|>/** * Copyright 5AM Solutions Inc, ESAC, ScenPro & SAIC * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/caintegrator/LICENSE.txt for details. */ package gov.nih.nci.caintegrator.web.action.study.management; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import gov.nih.nci.caintegrator.application.study.LogEntry; import gov.nih.nci.caintegrator.application.study.StudyConfiguration; import gov.nih.nci.caintegrator.application.study.StudyManagementServiceStub; import gov.nih.nci.caintegrator.web.action.AbstractSessionBasedTest; import gov.nih.nci.caintegrator.web.action.study.management.EditStudyLogAction; import java.util.Date; import org.junit.Before; import org.junit.Test; import com.opensymphony.xwork2.Action; /** * */ public class EditStudyLogActionTest extends AbstractSessionBasedTest { private EditStudyLogAction action = new EditStudyLogAction(); private StudyManagementServiceStub studyManagementService; private LogEntry logEntry1; private LogEntry logEntry2; @Override @Before public void setUp() throws Exception { super.setUp(); action.setStudyConfiguration(new StudyConfiguration()); action = new EditStudyLogAction(); studyManagementService = new StudyManagementServiceStub(); action.setStudyManagementService(studyManagementService); action.setWorkspaceService(workspaceService); logEntry1 = new LogEntry(); logEntry1.setTrimSystemLogMessage("message"); logEntry1.setLogDate(new Date()); logEntry1.setTrimDescription("desc"); try { Thread.sleep(20l); } catch (InterruptedException e) { } logEntry2 = new LogEntry(); logEntry2.setSystemLogMessage("message2"); logEntry2.setLogDate(new Date()); logEntry2.setDescription("desc"); action.getStudyConfiguration().getLogEntries().add(logEntry1); action.getStudyConfiguration().getLogEntries().add(logEntry2); } @Test public void testPrepare() throws InterruptedException { action.prepare(); <|fim▁hole|> // Verify the most recent ones are sorted first. assertEquals(logEntry2, action.getDisplayableLogEntries().get(0).getLogEntry()); assertEquals(logEntry1, action.getDisplayableLogEntries().get(1).getLogEntry()); assertTrue(studyManagementService.getRefreshedStudyEntityCalled); } @Test public void testSave() { action.prepare(); // logEntry2 is first action.getDisplayableLogEntries().get(0).setDescription("new"); action.getDisplayableLogEntries().get(0).setUpdateDescription(true); // logEntry1 will have a new description, but the checkbox will be false. action.getDisplayableLogEntries().get(1).setDescription("new"); action.getDisplayableLogEntries().get(1).setUpdateDescription(false); assertEquals(Action.SUCCESS, action.save()); assertEquals("new", logEntry2.getDescription()); assertEquals("desc", logEntry1.getDescription()); assertTrue(studyManagementService.saveCalled); } @Test public void testAcceptableParameterName() { assertTrue(action.acceptableParameterName(null)); assertTrue(action.acceptableParameterName("ABC")); assertFalse(action.acceptableParameterName("123")); assertFalse(action.acceptableParameterName("d-123-e")); } }<|fim▁end|>
<|file_name|>options.rs<|end_file_name|><|fim▁begin|>//! The user-configurable portion of the type checker. use std::str; use std::ascii::AsciiExt; use std::path::{Path, PathBuf, MAIN_SEPARATOR}; use kailua_env::{Spanned, WithLoc}; use kailua_diag::{Report, Stop}; use kailua_syntax::Chunk; /// Options for customizing the type checker. /// /// All of those methods return `Err(None)` if the error occurred and has not been reported, /// or `Err(Some(Stop))` if the error occurred and the caller wants to bail out. /// If the implementation has a handle to the reporter and wants to do its own reporting, /// it can report and return `Ok` (recovery) or `Err(Some(Stop))` (error propagation). pub trait Options { /// Called when `package.path` gets assigned to a string literal type. /// /// Does nothing by default. fn set_package_path(&mut self, _path: Spanned<&[u8]>, _report: &Report) -> Result<(), Option<Stop>> { Ok(()) } /// Called when `package.cpath` gets assigned to a string literal type. /// /// Does nothing by default. fn set_package_cpath(&mut self, _path: Spanned<&[u8]>, _report: &Report) -> Result<(), Option<Stop>> { Ok(()) } /// Called when `require` is called with a string literal type. /// /// Errors by default; the checker will use its own error message. fn require_chunk(&mut self, _path: Spanned<&[u8]>, _report: &Report) -> Result<Chunk, Option<Stop>> { Err(None) } } /// Checker options that are tailored to loading from the file system. /// /// Follows the same error conventions as `Options`. pub trait FsSource { /// Should try to load a given fully resolved path and return a chunk or `None`. /// /// `None` here means that the path doesn't exist and the search should continue. /// Other error cases can be reported as `Err(..)` as with `Options`. fn chunk_from_path(&self, resolved_path: Spanned<&Path>, report: &Report) -> Result<Option<Chunk>, Option<Stop>>; /// Should try to parse a byte string containing a relative path to a path buffer. /// /// Used to delegate the encoding decision to the user. /// This will only parse ASCII paths by default, in order to avoid the confusion. fn to_path_buf(&self, path: Spanned<&[u8]>, _report: &Report) -> Result<PathBuf, Option<Stop>> { if path.is_ascii() { Ok(Path::new(str::from_utf8(&path).unwrap()).to_owned()) } else { Err(None) } } } /// An implementation of `Options` that loads from the file system. /// /// The user should provide `FsSource`, which provides a simpler interface for this use case. pub struct FsOptions<S> { source: S, root: PathBuf, package_path: Vec<Vec<u8>>, package_cpath: Vec<Vec<u8>>, } impl<S: FsSource> FsOptions<S> { pub fn new(source: S, root: PathBuf) -> FsOptions<S> { FsOptions { source: source, root: root, // by default, local files only package_path: vec![b"?.lua".to_vec()], package_cpath: vec![], } } fn search_file(&self, path: Spanned<&[u8]>, search_paths: &[Vec<u8>], suffix: &[u8], report: &Report) -> Result<Option<Chunk>, Option<Stop>> { for template in search_paths { let mut newpath = Vec::new(); let mut newpathdot = Vec::new(); for (i, e) in template.split(|&b| b == b'?').enumerate() { if i > 0 { newpath.extend(path.iter().map(|&b| { if b == b'.' { MAIN_SEPARATOR as u8 } else { b } })); newpathdot.extend(path.iter().cloned()); } newpath.extend_from_slice(e); newpathdot.extend_from_slice(e); } newpath.extend_from_slice(suffix); newpathdot.extend_from_slice(suffix); let newpath = (&newpath[..]).with_loc(path); let resolved_path = self.root.join(self.source.to_path_buf(newpath, report)?); let resolved_path = (&*resolved_path).with_loc(path); trace!("trying to load {:?}", resolved_path); if let Some(chunk) = self.source.chunk_from_path(resolved_path, report)? { return Ok(Some(chunk)); } // also try to load a dotted path let newpathdot = (&newpathdot[..]).with_loc(path); let resolved_path = self.root.join(self.source.to_path_buf(newpathdot, report)?); let resolved_path = (&*resolved_path).with_loc(path); trace!("trying to load {:?}", resolved_path); if let Some(chunk) = self.source.chunk_from_path(resolved_path, report)? { return Ok(Some(chunk)); } } Ok(None) } } impl<S: FsSource> Options for FsOptions<S> { fn set_package_path(&mut self, path: Spanned<&[u8]>, _report: &Report) -> Result<(), Option<Stop>> { self.package_path = path.split(|&b| b == b';').map(|s| s.to_owned()).collect(); Ok(()) } fn set_package_cpath(&mut self, path: Spanned<&[u8]>, _report: &Report) -> Result<(), Option<Stop>> { self.package_cpath = path.split(|&b| b == b';').map(|s| s.to_owned()).collect(); Ok(()) } fn require_chunk(&mut self, path: Spanned<&[u8]>, report: &Report) -> Result<Chunk, Option<Stop>> { if let Some(chunk) = self.search_file(path, &self.package_path, b".kailua", report)? { return Ok(chunk); } if let Some(chunk) = self.search_file(path, &self.package_path, b"", report)? { return Ok(chunk); }<|fim▁hole|> if let Some(chunk) = self.search_file(path, &self.package_cpath, b".kailua", report)? { return Ok(chunk); } // avoid loading the native libraries as is Err(None) } }<|fim▁end|>
<|file_name|>block_args.rs<|end_file_name|><|fim▁begin|>// rustfmt-indent_style: Block // Function arguments layout fn lorem() {} fn lorem(ipsum: usize) {} fn lorem(ipsum: usize, dolor: usize, sit: usize, amet: usize, consectetur: usize, adipiscing: usize, elit: usize) { // body }<|fim▁hole|> pub fn GetConsoleHistoryInfo(console_history_info: *mut ConsoleHistoryInfo) -> Boooooooooooooool; } // rustfmt should not add trailing comma for variadic function. See #1623. extern "C" { pub fn variadic_fn(first_parameter: FirstParameterType, second_parameter: SecondParameterType, ...); } // #1652 fn deconstruct(foo: Bar) -> (SocketAddr, Header, Method, RequestUri, HttpVersion, AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) { }<|fim▁end|>
// #1441 extern "system" {
<|file_name|>pattern-white.go<|end_file_name|><|fim▁begin|>package opc // White // Set all pixels to white. import ( "github.com/longears/pixelslinger/colorutils" "github.com/longears/pixelslinger/config" "github.com/longears/pixelslinger/midi" ) func MakePatternWhite(locations []float64) ByteThread { return func(bytesIn chan []byte, bytesOut chan []byte, midiState *midi.MidiState) { for bytes := range bytesIn { H := float64(midiState.ControllerValues[config.HUE_KNOB]) / 127.0 FADE_TO_WHITE := float64(midiState.ControllerValues[config.MORPH_KNOB]) / 127.0 r, g, b := colorutils.HslToRgb(H, 1.0, 0.5) r = r*(1-FADE_TO_WHITE) + 1*FADE_TO_WHITE g = g*(1-FADE_TO_WHITE) + 1*FADE_TO_WHITE b = b*(1-FADE_TO_WHITE) + 1*FADE_TO_WHITE n_pixels := len(bytes) / 3 for ii := 0; ii < n_pixels; ii++ { bytes[ii*3+0] = colorutils.FloatToByte(r) bytes[ii*3+1] = colorutils.FloatToByte(g) bytes[ii*3+2] = colorutils.FloatToByte(b) } bytesOut <- bytes }<|fim▁hole|><|fim▁end|>
} }
<|file_name|>snapshots.py<|end_file_name|><|fim▁begin|># Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """The volumes snapshots api."""<|fim▁hole|>from webob import exc from cinder.api import common from cinder.api.openstack import wsgi from cinder.api.v1 import volumes from cinder.api import xmlutil from cinder import exception from cinder.openstack.common import log as logging from cinder.openstack.common import strutils from cinder import utils from cinder import volume LOG = logging.getLogger(__name__) def _translate_snapshot_detail_view(context, snapshot): """Maps keys for snapshots details view.""" d = _translate_snapshot_summary_view(context, snapshot) # NOTE(gagupta): No additional data / lookups at the moment return d def _translate_snapshot_summary_view(context, snapshot): """Maps keys for snapshots summary view.""" d = {} d['id'] = snapshot['id'] d['created_at'] = snapshot['created_at'] d['display_name'] = snapshot['display_name'] d['display_description'] = snapshot['display_description'] d['volume_id'] = snapshot['volume_id'] d['status'] = snapshot['status'] d['size'] = snapshot['volume_size'] if snapshot.get('snapshot_metadata'): metadata = snapshot.get('snapshot_metadata') d['metadata'] = dict((item['key'], item['value']) for item in metadata) # avoid circular ref when vol is a Volume instance elif snapshot.get('metadata') and isinstance(snapshot.get('metadata'), dict): d['metadata'] = snapshot['metadata'] else: d['metadata'] = {} return d def make_snapshot(elem): elem.set('id') elem.set('status') elem.set('size') elem.set('created_at') elem.set('display_name') elem.set('display_description') elem.set('volume_id') elem.append(common.MetadataTemplate()) class SnapshotTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('snapshot', selector='snapshot') make_snapshot(root) return xmlutil.MasterTemplate(root, 1) class SnapshotsTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('snapshots') elem = xmlutil.SubTemplateElement(root, 'snapshot', selector='snapshots') make_snapshot(elem) return xmlutil.MasterTemplate(root, 1) class SnapshotsController(wsgi.Controller): """The Volumes API controller for the OpenStack API.""" def __init__(self, ext_mgr=None): self.volume_api = volume.API() self.ext_mgr = ext_mgr super(SnapshotsController, self).__init__() @wsgi.serializers(xml=SnapshotTemplate) def show(self, req, id): """Return data about the given snapshot.""" context = req.environ['cinder.context'] try: vol = self.volume_api.get_snapshot(context, id) req.cache_resource(vol) except exception.NotFound: raise exc.HTTPNotFound() return {'snapshot': _translate_snapshot_detail_view(context, vol)} def delete(self, req, id): """Delete a snapshot.""" context = req.environ['cinder.context'] LOG.audit(_("Delete snapshot with id: %s"), id, context=context) try: snapshot = self.volume_api.get_snapshot(context, id) self.volume_api.delete_snapshot(context, snapshot) except exception.NotFound: raise exc.HTTPNotFound() return webob.Response(status_int=202) @wsgi.serializers(xml=SnapshotsTemplate) def index(self, req): """Returns a summary list of snapshots.""" return self._items(req, entity_maker=_translate_snapshot_summary_view) @wsgi.serializers(xml=SnapshotsTemplate) def detail(self, req): """Returns a detailed list of snapshots.""" return self._items(req, entity_maker=_translate_snapshot_detail_view) def _items(self, req, entity_maker): """Returns a list of snapshots, transformed through entity_maker.""" context = req.environ['cinder.context'] #pop out limit and offset , they are not search_opts search_opts = req.GET.copy() search_opts.pop('limit', None) search_opts.pop('offset', None) #filter out invalid option allowed_search_options = ('status', 'volume_id', 'display_name') volumes.remove_invalid_options(context, search_opts, allowed_search_options) snapshots = self.volume_api.get_all_snapshots(context, search_opts=search_opts) limited_list = common.limited(snapshots, req) req.cache_resource(limited_list) res = [entity_maker(context, snapshot) for snapshot in limited_list] return {'snapshots': res} @wsgi.serializers(xml=SnapshotTemplate) def create(self, req, body): """Creates a new snapshot.""" kwargs = {} context = req.environ['cinder.context'] if not self.is_valid_body(body, 'snapshot'): raise exc.HTTPUnprocessableEntity() snapshot = body['snapshot'] kwargs['metadata'] = snapshot.get('metadata', None) try: volume_id = snapshot['volume_id'] except KeyError: msg = _("'volume_id' must be specified") raise exc.HTTPBadRequest(explanation=msg) try: volume = self.volume_api.get(context, volume_id) except exception.NotFound: raise exc.HTTPNotFound() force = snapshot.get('force', False) msg = _("Create snapshot from volume %s") LOG.audit(msg, volume_id, context=context) if not utils.is_valid_boolstr(force): msg = _("Invalid value '%s' for force. ") % force raise exception.InvalidParameterValue(err=msg) if strutils.bool_from_string(force): new_snapshot = self.volume_api.create_snapshot_force( context, volume, snapshot.get('display_name'), snapshot.get('display_description'), **kwargs) else: new_snapshot = self.volume_api.create_snapshot( context, volume, snapshot.get('display_name'), snapshot.get('display_description'), **kwargs) retval = _translate_snapshot_detail_view(context, new_snapshot) return {'snapshot': retval} @wsgi.serializers(xml=SnapshotTemplate) def update(self, req, id, body): """Update a snapshot.""" context = req.environ['cinder.context'] if not body: raise exc.HTTPUnprocessableEntity() if 'snapshot' not in body: raise exc.HTTPUnprocessableEntity() snapshot = body['snapshot'] update_dict = {} valid_update_keys = ( 'display_name', 'display_description', ) for key in valid_update_keys: if key in snapshot: update_dict[key] = snapshot[key] try: snapshot = self.volume_api.get_snapshot(context, id) self.volume_api.update_snapshot(context, snapshot, update_dict) except exception.NotFound: raise exc.HTTPNotFound() snapshot.update(update_dict) return {'snapshot': _translate_snapshot_detail_view(context, snapshot)} def create_resource(ext_mgr): return wsgi.Resource(SnapshotsController(ext_mgr))<|fim▁end|>
import webob
<|file_name|>BlockMicrowave.java<|end_file_name|><|fim▁begin|>package us.mcsw.minerad.blocks; import net.minecraft.block.ITileEntityProvider; import net.minecraft.block.material.Material; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.IIcon; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import us.mcsw.core.BlockMR; import us.mcsw.minerad.ref.TextureReference; import us.mcsw.minerad.tiles.TileMicrowave; public class BlockMicrowave extends BlockMR implements ITileEntityProvider { IIcon active = null; public BlockMicrowave() { super(Material.iron, "microwave"); <|fim▁hole|> } @Override public TileEntity createNewTileEntity(World w, int m) { return new TileMicrowave(); } @Override public void registerBlockIcons(IIconRegister reg) { super.registerBlockIcons(reg); active = reg.registerIcon(TextureReference.RESOURCE_PREFIX + "microwaveOn"); } @Override public IIcon getIcon(IBlockAccess ba, int x, int y, int z, int m) { TileEntity te = ba.getTileEntity(x, y, z); if (te != null && te instanceof TileMicrowave) { TileMicrowave tm = (TileMicrowave) te; if (tm.isRunning()) { return active; } } return super.getIcon(ba, x, y, z, m); } }<|fim▁end|>
setHardness(4.0f); isBlockContainer = true;
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>const path = require('path') module.exports = { context: __dirname, entry: './js/ClientApp.js', devtool: 'eval', output: { path: path.join(__dirname, '/public'), publicPath: '/public/', filename: 'bundle.js' }, devServer: { publicPath: '/public/', historyApiFallback: true }, resolve: { extensions: ['.js', '.json'] }, stats: { colors: true, reasons: true, chunks: true }, module: { rules: [ { enforce: 'pre', test: /\.js$/, loader: 'eslint-loader', exclude: /node_modlues/ }, { test: /\.json$/, loader: 'json-loader' }, { include: path.resolve(__dirname, 'js'), test: /\.js$/, loader: 'babel-loader' }, { test: /\.css$/, use: [ 'style-loader', { loader: 'css-loader', options: { url: false } } ] }, { test: /\.scss$/, loader: 'style-loader!css-loader!autoprefixer-loader!sass-loader'<|fim▁hole|> } ] } }<|fim▁end|>
}, { test: /\.otf$/, loader: 'file-loader?name=fonts/[name].[ext]'
<|file_name|>Solution.java<|end_file_name|><|fim▁begin|>package com.javarush.test.level07.lesson12.home01; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; /* Вывести числа в обратном порядке Ввести с клавиатуры 10 чисел и заполнить ими список. Вывести их в обратном порядке. Использовать только цикл for. */ public class Solution { public static void main(String[] args) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); ArrayList<Integer> integers = new ArrayList<>(); for (int i = 0; i < 10; i++) { integers.add(Integer.parseInt(reader.readLine())); } for (int i = 0; i < integers.size(); i++) {<|fim▁hole|> System.out.println(integers.get(integers.size() - i - 1)); } } }<|fim▁end|>
<|file_name|>text_visual.py<|end_file_name|><|fim▁begin|>import numpy as np import os from galry import log_debug, log_info, log_warn, get_color from fontmaps import load_font from visual import Visual __all__ = ['TextVisual'] VS = """ gl_Position.x += (offset - text_width / 2) * spacing.x / window_size.x; gl_Position.y -= index * spacing.y / window_size.y; gl_Position.xy = gl_Position.xy + posoffset / window_size; gl_PointSize = point_size; flat_text_map = text_map; """ def FS(background_transparent=True): if background_transparent: background_transparent_shader = "letter_alpha" else: background_transparent_shader = "1." fs = """ // relative coordinates of the pixel within the sprite (in [0,1]) float x = gl_PointCoord.x; float y = gl_PointCoord.y; // size of the corresponding character float w = flat_text_map.z; float h = flat_text_map.w; // display the character at the left of the sprite float delta = h / w; x = delta * x; if ((x >= 0) && (x <= 1)) { // coordinates of the character in the font atlas vec2 coord = flat_text_map.xy + vec2(w * x, h * y); float letter_alpha = texture2D(tex_sampler, coord).a; out_color = color * letter_alpha; out_color.a = %s; } else out_color = vec4(0, 0, 0, 0); """ % background_transparent_shader return fs class TextVisual(Visual): """Template for displaying short text on a single line. It uses the following technique: each character is rendered as a sprite, i.e. a pixel with a large point size, and a single texture for every point. The texture contains a font atlas, i.e. all characters in a given font. Every point comes with coordinates that indicate which small portion of the font atlas to display (that portion corresponds to the character). This is all done automatically, thanks to a font atlas stored in the `fontmaps` folder. There needs to be one font atlas per font and per font size. Also, there is a configuration text file with the coordinates and size of every character. The software used to generate font maps is AngelCode Bitmap Font Generator. For now, there is only the Segoe font. """ def position_compound(self, coordinates=None): """Compound variable with the position of the text. All characters are at the exact same position, and are then shifted in the vertex shader.""" if coordinates is None: coordinates = (0., 0.) if type(coordinates) == tuple: coordinates = [coordinates] coordinates = np.array(coordinates) position = np.repeat(coordinates, self.textsizes, axis=0) return dict(position=position) def text_compound(self, text): """Compound variable for the text string. It changes the text map, the character position, and the text width.""" coordinates = self.coordinates if "\n" in text: text = text.split("\n") if type(text) == list: self.textsizes = [len(t) for t in text] text = "".join(text) if type(coordinates) != list: coordinates = [coordinates] * len(self.textsizes) index = np.repeat(np.arange(len(self.textsizes)), self.textsizes) text_map = self.get_map(text) # offset for all characters in the merging of all texts offset = np.hstack((0., np.cumsum(text_map[:, 2])[:-1])) # for each text, the cumsum of the length of all texts strictly # before d = np.hstack(([0], np.cumsum(self.textsizes)[:-1])) # compensate the offsets for the length of each text offset -= np.repeat(offset[d], self.textsizes) text_width = 0. else: self.textsizes = len(text) text_map = self.get_map(text) offset = np.hstack((0., np.cumsum(text_map[:, 2])[:-1])) text_width = offset[-1] index = np.zeros(len(text)) self.size = len(text) d = dict(text_map=text_map, offset=offset, text_width=text_width, index=index) <|fim▁hole|> def initialize_font(self, font, fontsize): """Initialize the specified font at a given size.""" self.texture, self.matrix, self.get_map = load_font(font, fontsize) def initialize(self, text, coordinates=(0., 0.), font='segoe', fontsize=24, color=None, letter_spacing=None, interline=0., autocolor=None, background_transparent=True, prevent_constrain=False, depth=None, posoffset=None): """Initialize the text template.""" if prevent_constrain: self.constrain_ratio = False if autocolor is not None: color = get_color(autocolor) if color is None: color = self.default_color self.size = len(text) self.primitive_type = 'POINTS' self.interline = interline text_length = self.size self.initialize_font(font, fontsize) self.coordinates = coordinates point_size = float(self.matrix[:,4].max() * self.texture.shape[1]) # template attributes and varyings self.add_attribute("position", vartype="float", ndim=2, data=np.zeros((self.size, 2))) self.add_attribute("offset", vartype="float", ndim=1) self.add_attribute("index", vartype="float", ndim=1) self.add_attribute("text_map", vartype="float", ndim=4) self.add_varying("flat_text_map", vartype="float", flat=True, ndim=4) if posoffset is None: posoffset = (0., 0.) self.add_uniform('posoffset', vartype='float', ndim=2, data=posoffset) # texture self.add_texture("tex_sampler", size=self.texture.shape[:2], ndim=2, ncomponents=self.texture.shape[2], data=self.texture) # pure heuristic (probably bogus) if letter_spacing is None: letter_spacing = (100 + 17. * fontsize) self.add_uniform("spacing", vartype="float", ndim=2, data=(letter_spacing, interline)) self.add_uniform("point_size", vartype="float", ndim=1, data=point_size) # one color per if isinstance(color, np.ndarray) and color.ndim > 1: self.add_attribute('color0', vartype="float", ndim=4, data=color) self.add_varying('color', vartype="float", ndim=4) self.add_vertex_main('color = color0;') else: self.add_uniform("color", vartype="float", ndim=4, data=color) self.add_uniform("text_width", vartype="float", ndim=1) # compound variables self.add_compound("text", fun=self.text_compound, data=text) self.add_compound("coordinates", fun=self.position_compound, data=coordinates) # vertex shader self.add_vertex_main(VS, after='viewport') # fragment shader self.add_fragment_main(FS(background_transparent)) self.depth = depth<|fim▁end|>
d.update(self.position_compound(coordinates)) return d
<|file_name|>tests.py<|end_file_name|><|fim▁begin|><|fim▁hole|>Created on Jun 6, 2013 @author: dmitchell ''' from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from student.tests.factories import AdminFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase import xmodule_modifiers import datetime from pytz import UTC from xmodule.modulestore.tests import factories class TestXmoduleModfiers(ModuleStoreTestCase): # FIXME disabled b/c start date inheritance is not occuring and render_... in get_html is failing due # to middleware.lookup['main'] not being defined def _test_add_histogram(self): instructor = AdminFactory.create() self.client.login(username=instructor.username, password='test') course = CourseFactory.create(org='test', number='313', display_name='histogram test') section = ItemFactory.create( parent_location=course.location, display_name='chapter hist', category='chapter') problem = ItemFactory.create( parent_location=section.location, display_name='problem hist 1', category='problem') problem.has_score = False # don't trip trying to retrieve db data late_problem = ItemFactory.create( parent_location=section.location, display_name='problem hist 2', category='problem') late_problem.start = datetime.datetime.now(UTC) + datetime.timedelta(days=32) late_problem.has_score = False problem_module = factories.get_test_xmodule_for_descriptor(problem) problem_module.get_html = xmodule_modifiers.add_histogram(lambda:'', problem_module, instructor) self.assertRegexpMatches( problem_module.get_html(), r'.*<font color=\'green\'>Not yet</font>.*') problem_module = factories.get_test_xmodule_for_descriptor(late_problem) problem_module.get_html = xmodule_modifiers.add_histogram(lambda: '', problem_module, instructor) self.assertRegexpMatches( problem_module.get_html(), r'.*<font color=\'red\'>Yes!</font>.*')<|fim▁end|>
'''
<|file_name|>orderables.py<|end_file_name|><|fim▁begin|>from collections import namedtuple from datetime import date, datetime, timedelta from cms.models import CMSPlugin from django.db import models from django.utils.formats import date_format, time_format from django.utils.functional import cached_property from django.utils.translation import ugettext_lazy as _ from ..conf import settings from .agegroup import AgeGroup from .department import Department from .roles import Leader from .schoolyear import SchoolYear from .subjects import Subject, SubjectDiscount, SubjectGroup, SubjectRegistration, SubjectType from .targetgroup import TargetGroup from .utils import PaymentStatus, copy_related_objects class Orderable(Subject): duration = models.DurationField(_("duration"), help_text=_("Format: HH:MM:SS")) due_from_days = models.IntegerField( _("number of days to send the payment request before event date"), blank=True, null=True, help_text=_( "If set, payment request will be sent this number of days before event date. " "If not set, payment request will be sent when registration is approved." ), ) due_date_days = models.IntegerField(_("number of days to due date before event date"), default=0) class Meta: app_label = "leprikon" ordering = ("code", "name") verbose_name = _("orderable event") verbose_name_plural = _("orderable events") @property def inactive_registrations(self): return self.registrations.filter(canceled__isnull=False) def get_times_list(self): return self.duration get_times_list.short_description = _("duration") def copy_to_school_year(old, school_year): new = Orderable.objects.get(id=old.id) new.id, new.pk = None, None new.school_year = school_year new.public = False new.evaluation = "" new.note = "" new.save() new.groups.set(old.groups.all()) new.age_groups.set(old.age_groups.all()) new.target_groups.set(old.target_groups.all()) for leader in old.all_leaders: school_year.leaders.add(leader) new.leaders.set(old.all_leaders) new.questions.set(old.questions.all()) copy_related_objects( new, attachments=old.attachments, times=old.times, variants=old.variants, ) return new class OrderableRegistration(SubjectRegistration): subject_type = SubjectType.ORDERABLE start_date = models.DateField(_("start date")) start_time = models.TimeField(_("start time"), blank=True, null=True) class Meta: app_label = "leprikon" verbose_name = _("orderable event registration") verbose_name_plural = _("orderable event registrations") def get_payment_status(self, d=None): return PaymentStatus( price=self.price, discount=self.get_discounted(d), explanation=",\n".join( discount.explanation.strip() for discount in self.all_discounts if (d is None or discount.accounted.date() <= d) and discount.explanation.strip() ), received=self.get_received(d), returned=self.get_returned(d), current_date=d or date.today(), due_from=self.payment_requested and ( self.payment_requested.date() if self.subject.orderable.due_from_days is None else max( self.start_date - timedelta(days=self.subject.orderable.due_from_days), self.payment_requested.date(), ) ), due_date=self.payment_requested and max( self.start_date - timedelta(days=self.subject.orderable.due_date_days), self.payment_requested.date() + timedelta(days=self.subject.min_due_date_days), ), ) @cached_property def end_date(self): if self.start_time: return (datetime.combine(self.start_date, self.start_time) + self.subject.orderable.duration).date() else: return self.start_date + self.subject.orderable.duration @cached_property def end_time(self): if self.start_time: return (datetime.combine(self.start_date, self.start_time) + self.subject.orderable.duration).time() def event_date(self): return "{start}{separator}{end}".format( start=( date_format(datetime.combine(self.start_date, self.start_time), "SHORT_DATETIME_FORMAT") if self.start_time else date_format(self.start_date, "SHORT_DATE_FORMAT") ), separator=" - " if self.start_date != self.end_date or self.end_time is not None else "", end=( (time_format(self.end_time, "TIME_FORMAT") if self.end_time else "") if self.start_date == self.end_date else ( date_format(datetime.combine(self.end_date, self.end_time), "SHORT_DATETIME_FORMAT") if self.end_time else date_format(self.end_date, "SHORT_DATE_FORMAT") ) ), ) event_date.admin_order_field = "start_date" event_date.short_description = _("event date") class OrderableDiscount(SubjectDiscount): registration = models.ForeignKey( OrderableRegistration, on_delete=models.CASCADE, related_name="discounts", verbose_name=_("registration") ) class Meta: app_label = "leprikon" verbose_name = _("orderable event discount") verbose_name_plural = _("orderable event discounts") ordering = ("accounted",) class OrderablePlugin(CMSPlugin): event = models.ForeignKey(Orderable, on_delete=models.CASCADE, related_name="+", verbose_name=_("event")) template = models.CharField( _("template"), max_length=100, choices=settings.LEPRIKON_ORDERABLE_TEMPLATES, default=settings.LEPRIKON_ORDERABLE_TEMPLATES[0][0], help_text=_("The template used to render plugin."), ) class Meta: app_label = "leprikon" class OrderableListPlugin(CMSPlugin): school_year = models.ForeignKey( SchoolYear, blank=True, null=True, on_delete=models.CASCADE, related_name="+", verbose_name=_("school year") ) departments = models.ManyToManyField( Department, blank=True, related_name="+", verbose_name=_("departments"), help_text=_("Keep empty to skip searching by departments."), ) event_types = models.ManyToManyField( SubjectType, blank=True, limit_choices_to={"subject_type": SubjectType.ORDERABLE}, related_name="+", verbose_name=_("event types"), help_text=_("Keep empty to skip searching by event types."), ) age_groups = models.ManyToManyField( AgeGroup, blank=True, related_name="+", verbose_name=_("age groups"), help_text=_("Keep empty to skip searching by age groups."), ) target_groups = models.ManyToManyField( TargetGroup, blank=True, related_name="+", verbose_name=_("target groups"), help_text=_("Keep empty to skip searching by target groups."), ) groups = models.ManyToManyField( SubjectGroup, blank=True, related_name="+", verbose_name=_("event groups"), help_text=_("Keep empty to skip searching by groups."), ) leaders = models.ManyToManyField( Leader, verbose_name=_("leaders"), blank=True, related_name="+", help_text=_("Keep empty to skip searching by leaders."), ) template = models.CharField( _("template"), max_length=100, choices=settings.LEPRIKON_ORDERABLELIST_TEMPLATES, default=settings.LEPRIKON_ORDERABLELIST_TEMPLATES[0][0],<|fim▁hole|> ) class Meta: app_label = "leprikon" def copy_relations(self, oldinstance): self.departments.set(oldinstance.departments.all()) self.event_types.set(oldinstance.event_types.all()) self.groups.set(oldinstance.groups.all()) self.age_groups.set(oldinstance.age_groups.all()) self.target_groups.set(oldinstance.age_groups.all()) self.leaders.set(oldinstance.leaders.all()) @cached_property def all_departments(self): return list(self.departments.all()) @cached_property def all_event_types(self): return list(self.event_types.all()) @cached_property def all_groups(self): return list(self.groups.all()) @cached_property def all_age_groups(self): return list(self.age_groups.all()) @cached_property def all_target_groups(self): return list(self.target_groups.all()) @cached_property def all_leaders(self): return list(self.leaders.all()) Group = namedtuple("Group", ("group", "objects")) def render(self, context): school_year = ( self.school_year or getattr(context.get("request"), "school_year") or SchoolYear.objects.get_current() ) events = Orderable.objects.filter(school_year=school_year, public=True).distinct() if self.all_departments: events = events.filter(department__in=self.all_departments) if self.all_event_types: events = events.filter(subject_type__in=self.all_event_types) if self.all_age_groups: events = events.filter(age_groups__in=self.all_age_groups) if self.all_target_groups: events = events.filter(target_groups__in=self.all_target_groups) if self.all_leaders: events = events.filter(leaders__in=self.all_leaders) if self.all_groups: events = events.filter(groups__in=self.all_groups) groups = self.all_groups elif self.all_event_types: groups = SubjectGroup.objects.filter(subject_types__in=self.all_event_types) else: groups = SubjectGroup.objects.all() context.update( { "school_year": school_year, "events": events, "groups": (self.Group(group=group, objects=events.filter(groups=group)) for group in groups), } ) return context class FilteredOrderableListPlugin(CMSPlugin): school_year = models.ForeignKey( SchoolYear, blank=True, null=True, on_delete=models.CASCADE, related_name="+", verbose_name=_("school year") ) event_types = models.ManyToManyField( SubjectType, limit_choices_to={"subject_type": SubjectType.ORDERABLE}, related_name="+", verbose_name=_("event types"), ) class Meta: app_label = "leprikon" def copy_relations(self, oldinstance): self.event_types = oldinstance.event_types.all() @cached_property def all_event_types(self): return list(self.event_types.all()) def render(self, context): school_year = ( self.school_year or getattr(context.get("request"), "school_year") or SchoolYear.objects.get_current() ) from ..forms.subjects import SubjectFilterForm form = SubjectFilterForm( subject_type_type=SubjectType.ORDERABLE, subject_types=self.all_event_types, school_year=school_year, is_staff=context["request"].user.is_staff, data=context["request"].GET, ) context.update( { "school_year": school_year, "form": form, "events": form.get_queryset(), } ) return context<|fim▁end|>
help_text=_("The template used to render plugin."),
<|file_name|>test_chart_chartarea02.py<|end_file_name|><|fim▁begin|>############################################################################### # # Tests for XlsxWriter. # # Copyright (c), 2013-2016, John McNamara, jmcnamara@cpan.org #<|fim▁hole|> class TestCompareXLSXFiles(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.maxDiff = None filename = 'chart_chartarea01.xlsx' test_dir = 'xlsxwriter/test/comparison/' self.got_filename = test_dir + '_test_2_' + filename self.exp_filename = test_dir + 'xlsx_files/' + filename self.ignore_files = [] self.ignore_elements = {} def test_create_file(self): """Test XlsxWriter chartarea properties.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() chart = workbook.add_chart({'type': 'column'}) chart.axis_ids = [82933248, 82952960] data = [ [1, 2, 3, 4, 5], [2, 4, 6, 8, 10], [3, 6, 9, 12, 15], ] worksheet.write_column('A1', data[0]) worksheet.write_column('B1', data[1]) worksheet.write_column('C1', data[2]) chart.add_series({'values': '=Sheet1!$A$1:$A$5'}) chart.add_series({'values': '=Sheet1!$B$1:$B$5'}) chart.add_series({'values': '=Sheet1!$C$1:$C$5'}) chart.set_chartarea({ 'border': {'none': 1}, 'fill': {'color': 'red'} }) chart.set_plotarea({ 'border': {'color': 'yellow', 'width': 1, 'dash_type': 'dash'}, 'fill': {'color': '#92D050'} }) worksheet.insert_chart('E9', chart) workbook.close() self.assertExcelEqual()<|fim▁end|>
from ..excel_comparsion_test import ExcelComparisonTest from ...workbook import Workbook
<|file_name|>period.py<|end_file_name|><|fim▁begin|># # Copyright 2013 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import functools import logbook import math import numpy as np import numpy.linalg as la from six import iteritems from zipline.finance import trading import pandas as pd from . import risk from . risk import ( alpha, check_entry, information_ratio, sharpe_ratio, sortino_ratio, ) log = logbook.Logger('Risk Period') choose_treasury = functools.partial(risk.choose_treasury, risk.select_treasury_duration) class RiskMetricsPeriod(object): def __init__(self, start_date, end_date, returns, benchmark_returns=None): treasury_curves = trading.environment.treasury_curves if treasury_curves.index[-1] >= start_date: mask = ((treasury_curves.index >= start_date) & (treasury_curves.index <= end_date)) self.treasury_curves = treasury_curves[mask] else: # our test is beyond the treasury curve history # so we'll use the last available treasury curve self.treasury_curves = treasury_curves[-1:] self.start_date = start_date self.end_date = end_date if benchmark_returns is None: br = trading.environment.benchmark_returns benchmark_returns = br[(br.index >= returns.index[0]) & (br.index <= returns.index[-1])] self.algorithm_returns = self.mask_returns_to_period(returns) self.benchmark_returns = self.mask_returns_to_period(benchmark_returns) self.calculate_metrics() def calculate_metrics(self): self.benchmark_period_returns = \ self.calculate_period_returns(self.benchmark_returns) self.algorithm_period_returns = \ self.calculate_period_returns(self.algorithm_returns) if not self.algorithm_returns.index.equals( self.benchmark_returns.index ): message = "Mismatch between benchmark_returns ({bm_count}) and \ algorithm_returns ({algo_count}) in range {start} : {end}" message = message.format( bm_count=len(self.benchmark_returns), algo_count=len(self.algorithm_returns), start=self.start_date, end=self.end_date ) raise Exception(message) self.num_trading_days = len(self.benchmark_returns) self.benchmark_volatility = self.calculate_volatility( self.benchmark_returns) self.algorithm_volatility = self.calculate_volatility( self.algorithm_returns) self.treasury_period_return = choose_treasury( self.treasury_curves, self.start_date, self.end_date ) self.sharpe = self.calculate_sharpe() self.sortino = self.calculate_sortino() self.information = self.calculate_information() self.beta, self.algorithm_covariance, self.benchmark_variance, \ self.condition_number, self.eigen_values = self.calculate_beta() self.alpha = self.calculate_alpha() self.excess_return = self.algorithm_period_returns - \ self.treasury_period_return self.max_drawdown = self.calculate_max_drawdown() def to_dict(self): """ Creates a dictionary representing the state of the risk report. Returns a dict object of the form: """ period_label = self.end_date.strftime("%Y-%m") rval = { 'trading_days': self.num_trading_days, 'benchmark_volatility': self.benchmark_volatility, 'algo_volatility': self.algorithm_volatility, 'treasury_period_return': self.treasury_period_return, 'algorithm_period_return': self.algorithm_period_returns, 'benchmark_period_return': self.benchmark_period_returns, 'sharpe': self.sharpe, 'sortino': self.sortino, 'information': self.information, 'beta': self.beta, 'alpha': self.alpha, 'excess_return': self.excess_return, 'max_drawdown': self.max_drawdown, 'period_label': period_label } return {k: None if check_entry(k, v) else v for k, v in iteritems(rval)} def __repr__(self): statements = [] metrics = [ "algorithm_period_returns", "benchmark_period_returns", "excess_return", "num_trading_days", "benchmark_volatility", "algorithm_volatility", "sharpe", "sortino", "information", "algorithm_covariance", "benchmark_variance", "beta", "alpha", "max_drawdown", "algorithm_returns", "benchmark_returns", "condition_number", "eigen_values" ] for metric in metrics: value = getattr(self, metric) statements.append("{m}:{v}".format(m=metric, v=value)) return '\n'.join(statements) def mask_returns_to_period(self, daily_returns): if isinstance(daily_returns, list): returns = pd.Series([x.returns for x in daily_returns], index=[x.date for x in daily_returns]) else: # otherwise we're receiving an index already returns = daily_returns trade_days = trading.environment.trading_days trade_day_mask = returns.index.normalize().isin(trade_days) mask = ((returns.index >= self.start_date) & (returns.index <= self.end_date) & trade_day_mask) returns = returns[mask] return returns def calculate_period_returns(self, returns): period_returns = (1. + returns).prod() - 1 return period_returns def calculate_volatility(self, daily_returns): return np.std(daily_returns, ddof=1) * math.sqrt(self.num_trading_days) def calculate_sharpe(self): """ http://en.wikipedia.org/wiki/Sharpe_ratio """ return sharpe_ratio(self.algorithm_volatility, self.algorithm_period_returns, self.treasury_period_return) def calculate_sortino(self, mar=None): """ http://en.wikipedia.org/wiki/Sortino_ratio """ if mar is None: mar = self.treasury_period_return <|fim▁hole|> self.algorithm_period_returns, mar) def calculate_information(self): """ http://en.wikipedia.org/wiki/Information_ratio """ return information_ratio(self.algorithm_returns, self.benchmark_returns) def calculate_beta(self): """ .. math:: \\beta_a = \\frac{\mathrm{Cov}(r_a,r_p)}{\mathrm{Var}(r_p)} http://en.wikipedia.org/wiki/Beta_(finance) """ # it doesn't make much sense to calculate beta for less than two days, # so return none. if len(self.algorithm_returns) < 2: return 0.0, 0.0, 0.0, 0.0, [] returns_matrix = np.vstack([self.algorithm_returns, self.benchmark_returns]) C = np.cov(returns_matrix, ddof=1) eigen_values = la.eigvals(C) condition_number = max(eigen_values) / min(eigen_values) algorithm_covariance = C[0][1] benchmark_variance = C[1][1] beta = algorithm_covariance / benchmark_variance return ( beta, algorithm_covariance, benchmark_variance, condition_number, eigen_values ) def calculate_alpha(self): """ http://en.wikipedia.org/wiki/Alpha_(investment) """ return alpha(self.algorithm_period_returns, self.treasury_period_return, self.benchmark_period_returns, self.beta) def calculate_max_drawdown(self): compounded_returns = [] cur_return = 0.0 for r in self.algorithm_returns: try: cur_return += math.log(1.0 + r) # this is a guard for a single day returning -100% except ValueError: log.debug("{cur} return, zeroing the returns".format( cur=cur_return)) cur_return = 0.0 # BUG? Shouldn't this be set to log(1.0 + 0) ? compounded_returns.append(cur_return) cur_max = None max_drawdown = None for cur in compounded_returns: if cur_max is None or cur > cur_max: cur_max = cur drawdown = (cur - cur_max) if max_drawdown is None or drawdown < max_drawdown: max_drawdown = drawdown if max_drawdown is None: return 0.0 return 1.0 - math.exp(max_drawdown)<|fim▁end|>
return sortino_ratio(self.algorithm_returns,
<|file_name|>RenameModulesScript.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- import os from abjad.tools import documentationtools from abjad.tools import systemtools from abjad.tools.developerscripttools.DeveloperScript import DeveloperScript from abjad.tools.developerscripttools.ReplaceInFilesScript \ import ReplaceInFilesScript class RenameModulesScript(DeveloperScript): r'''Renames classes and functions. Handle renaming the module and package, as well as any tests, documentation or mentions of the class throughout the Abjad codebase: .. shell:: ajv rename --help ''' ### PUBLIC PROPERTIES ### @property def alias(self): r'''Alias of script. Returns ``'rename'``. ''' return 'rename' @property def long_description(self): r'''Long description of script. Returns string or none. ''' return None @property def scripting_group(self): r'''Scripting group of script. Returns none. ''' return None @property def short_description(self): r'''Short description of script. Returns string. ''' return 'Rename public modules.' @property def version(self): r'''Version of script. Returns float. ''' return 1.0 ### PRIVATE METHODS ### def _codebase_name_to_codebase_docs_path(self, codebase): from abjad import abjad_configuration if codebase == 'mainline': return os.path.join( abjad_configuration.abjad_directory, 'docs', 'source', 'api', 'tools', ) elif codebase == 'experimental': return os.path.join( abjad_configuration.abjad_experimental_directory, 'docs', 'source', 'tools', ) message = 'bad codebase name: {!r}.' message = message.format(codebase) raise Exception(message) def _codebase_name_to_codebase_tools_path(self, codebase): from abjad import abjad_configuration if codebase == 'mainline': return os.path.join( abjad_configuration.abjad_directory, 'tools') elif codebase == 'experimental': return os.path.join( abjad_configuration.abjad_experimental_directory, 'tools') message = 'bad codebase name: {!r}.' message = message.format(codebase) raise Exception(message) def _confirm_name_changes(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): max_codebase = max(len(old_codebase), len(new_codebase)) old_codebase = old_codebase.ljust(max_codebase) new_codebase = new_codebase.ljust(max_codebase) print('') print('Is ...') print('') print(' [{}] {}.{}()'.format( old_codebase, old_tools_package_name, old_module_name)) print(' ===>') print(' [{}] {}.{}()'.format( new_codebase, new_tools_package_name, new_module_name)) print('') string = raw_input('... correct [yes, no, abort]? ').lower() print('') if string in ('y', 'yes'): return True elif string in ('a', 'abort', 'q', 'quit'): raise SystemExit elif string in ('n', 'no'): return False def _get_object_names(self, kind, codebase, tools_package_name): assert kind in ('class', 'function') tools_path = self._codebase_name_to_codebase_tools_path(codebase) path = os.path.join(tools_path, tools_package_name) if kind == 'class': generator = documentationtools.yield_all_classes( code_root=path, include_private_objects=True, ) elif kind == 'function': generator = documentationtools.yield_all_functions( code_root=path, include_private_objects=True, ) return tuple(sorted(generator, key=lambda x: x.__name__)) def _get_tools_package_names(self, codebase): tools_path = self._codebase_name_to_codebase_tools_path(codebase) names = [] for x in os.listdir(tools_path): if os.path.isdir(os.path.join(tools_path, x)): if not x.startswith(('_', '.')): names.append(x) return tuple(sorted(names)) def _parse_tools_package_path(self, path): from abjad import abjad_configuration if '.' not in path: raise SystemExit tools_package_name, module_name = path.split('.') mainline_tools_directory = os.path.join( abjad_configuration.abjad_directory, 'tools', ) for directory_name in os.listdir(mainline_tools_directory): directory = os.path.join( mainline_tools_directory, directory_name) if not os.path.isdir(directory): continue elif directory_name != tools_package_name: continue return 'mainline', tools_package_name, module_name experimental_tools_directory = os.path.join( abjad_configuration.abjad_experimental_directory, 'tools', ) for directory_name in os.listdir(mainline_tools_directory): directory = os.path.join( experimental_tools_directory, directory_name) if not os.path.isdir(directory): continue elif directory_name != tools_package_name: continue return 'experimental', tools_package_name, module_name raise SystemExit def _rename_old_api_page(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): print('Renaming old API page ...') old_docs_path = self._codebase_name_to_codebase_docs_path(old_codebase) new_docs_path = self._codebase_name_to_codebase_docs_path(new_codebase) old_rst_file_name = old_module_name + '.rst' new_rst_file_name = new_module_name + '.rst' old_api_path = os.path.join( old_docs_path, old_tools_package_name, old_rst_file_name) new_api_path = os.path.join( new_docs_path, new_tools_package_name, new_rst_file_name) command = 'mv {} {}'.format( old_api_path, new_api_path) systemtools.IOManager.spawn_subprocess(command) print('') def _rename_old_module(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): print('Renaming old module ...') old_tools_path = self._codebase_name_to_codebase_tools_path( old_codebase) new_tools_path = self._codebase_name_to_codebase_tools_path( new_codebase) old_module = old_module_name + '.py' old_path = os.path.join( old_tools_path, old_tools_package_name, old_module) new_module = new_module_name + '.py' new_path = os.path.join( new_tools_path, new_tools_package_name, new_module) command = 'git mv -f {} {}'.format( old_path, new_path) systemtools.IOManager.spawn_subprocess(command) print('') def _rename_old_test_files(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): print('Renaming old test file(s) ...') old_tools_path = self._codebase_name_to_codebase_tools_path( old_codebase) old_test_path = os.path.join( old_tools_path, old_tools_package_name, 'test') if not os.path.exists(old_test_path): return new_tools_path = self._codebase_name_to_codebase_tools_path( new_codebase) new_test_path = os.path.join( new_tools_path, new_tools_package_name, 'test') old_test_file_prefix = 'test_{}_{}'.format( old_tools_package_name, old_module_name) old_test_file_names = [x for x in os.listdir(old_test_path) if x.startswith(old_test_file_prefix) and x.endswith('.py')] for old_test_file_name in old_test_file_names: old_test_file_path = os.path.join( old_test_path, old_test_file_name) old_test_file_suffix = old_test_file_name[ len(old_test_file_prefix):] new_test_file_name = 'test_{}_{}{}'.format( new_tools_package_name, new_module_name, old_test_file_suffix) new_test_file_path = os.path.join( new_test_path, new_test_file_name) command = 'git mv -f {} {}'.format( old_test_file_path, new_test_file_path) systemtools.IOManager.spawn_subprocess(command) print('') def _update_codebase(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): from abjad import abjad_configuration without_dirs = ['--without-dirs', 'build', '--without-dirs', '_build'] directory = abjad_configuration.abjad_root_directory print('Updating codebase ...') print('') old_text = '{}.{}'.format(old_tools_package_name, old_module_name) new_text = '{}.{}'.format(new_tools_package_name, new_module_name) command = [ directory, old_text, new_text, '--force', '--whole-words-only', #'--verbose', ] command.extend(without_dirs) ReplaceInFilesScript()(command) print('') old_text = 'test_{}_{}_'.format( old_tools_package_name, old_module_name) new_text = 'test_{}_{}_'.format( new_tools_package_name, new_module_name) command = [directory, old_text, new_text, '--force', '--verbose'] command.extend(without_dirs) ReplaceInFilesScript()(command) print('') old_text = old_module_name new_text = new_module_name command = [ directory, old_text, new_text, '--force', '--whole-words-only', #'--verbose', ] command.extend(without_dirs) ReplaceInFilesScript()(command) print('') ### PUBLIC METHODS ### def process_args(self, args): r'''Processes `args`. Returns none. ''' systemtools.IOManager.clear_terminal() # Handle source path: old_codebase, old_tools_package_name, old_module_name = \ self._parse_tools_package_path(args.source) old_codebase_tools_path = self._codebase_name_to_codebase_tools_path( old_codebase) old_module_path = os.path.join( old_codebase_tools_path, old_tools_package_name, old_module_name + '.py', ) if not os.path.exists(old_module_path): message = 'source does not exist: {}' message = message.format(old_module_path) raise SystemExit(message) # Handle destination path: new_codebase, new_tools_package_name, new_module_name = \ self._parse_tools_package_path(args.destination) new_codebase_tools_path = self._codebase_name_to_codebase_tools_path( new_codebase) new_module_path = os.path.join( new_codebase_tools_path, new_tools_package_name, new_module_name + '.py', ) if os.path.exists(new_module_path): message = 'destination already exists: {}' message = message.format(old_module_path) raise SystemExit(message) # Process changes: new_args = ( old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ) if not self._confirm_name_changes(*new_args): raise SystemExit self._rename_old_test_files(*new_args) self._rename_old_api_page(*new_args)<|fim▁hole|> self._update_codebase(*new_args) raise SystemExit def setup_argument_parser(self, parser): r'''Sets up argument `parser`. Returns none. ''' parser.add_argument( 'source', help='toolspackage path of source module', ) parser.add_argument( 'destination', help='toolspackage path of destination module', )<|fim▁end|>
self._rename_old_module(*new_args)
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from distutils.core import setup<|fim▁hole|> setup( name='Harmony', version='0.1', author='H. Gökhan Sarı', author_email='th0th@returnfalse.net', packages=['harmony'], scripts=['bin/harmony'], url='https://github.com/th0th/harmony/', license='LICENSE.txt', description='Music folder organizer.', long_description=open('README.asciidoc').read(), )<|fim▁end|>
<|file_name|>DoublePeerAvatar.js<|end_file_name|><|fim▁begin|>/** * Copyright 2017 dialog LLC <info@dlg.im> * @flow */ import type { PeerInfo } from '@dlghq/dialog-types'; import type { AvatarSize } from '../Avatar/getAvatarSize'; import type { Gradient } from '../Avatar/getAvatarColor'; import React, { PureComponent } from 'react'; import classNames from 'classnames'; import getAvatarSize from '../Avatar/getAvatarSize'; import getAvatarText from '../Avatar/getAvatarText'; import getAvatarColor from '../Avatar/getAvatarColor'; import createSequence from '../../utils/createSequence'; import styles from '../PeerAvatar/PeerAvatar.css'; export type Props = { className?: string, peerBig: PeerInfo, peerSmall: PeerInfo, size: AvatarSize, onClick?: (event: SyntheticMouseEvent) => any }; type DefaultProps = { size: AvatarSize }; const seq = createSequence(); class DoublePeerAvatar extends PureComponent<DefaultProps, Props, void> { id: string; ids: { big: string, clip: string, small: string }; static defaultProps = { size: 'medium' }; constructor(props: Props) { super(props); this.id = 'double_peer_avatar_' + seq.next(); this.ids = { big: `${this.id}_big`, clip: `${this.id}_big_clip`, small: `${this.id}_small` }; } getAvatarSize(): number { return getAvatarSize(this.props.size); } renderDefsBig(): React.Element<any> { if (this.props.peerBig.avatar) { return ( <pattern id={this.ids.big} width="100%" height="100%" patternUnits="userSpaceOnUse"> <image x="0" y="0" width="100px" height="100px" xlinkHref={this.props.peerBig.avatar} /> </pattern> ); } const colors: Gradient = getAvatarColor(this.props.peerBig.placeholder); return ( <linearGradient id={this.ids.big} gradientUnits="userSpaceOnUse" x1="6.79%" y1="105.31%" x2="93.21%" y2="-5.31%" > <stop stopColor={colors.payload.from} /><|fim▁hole|> </linearGradient> ); } renderClipMaskBig(): React.Element<any> { return ( <clipPath id={this.ids.clip}> <path // eslint-disable-next-line d="M58.2070074,99.3297063 C55.5367715,99.7706374 52.795171,100 50,100 C22.3857625,100 0,77.6142375 0,50 C0,22.3857625 22.3857625,0 50,0 C77.6142375,0 100,22.3857625 100,50 C100,52.795171 99.7706374,55.5367715 99.3297063,58.2070074 C94.8434182,55.5348957 89.6009561,54 84,54 C67.4314575,54 54,67.4314575 54,84 C54,89.6009561 55.5348957,94.8434182 58.2070074,99.3297063 Z" /> </clipPath> ); } renderDefsSmall(): React.Element<any> { if (this.props.peerSmall.avatar) { return ( <pattern id={this.ids.small} width="100%" height="100%" x="58" y="58" patternUnits="userSpaceOnUse" > <image x="0" y="0" width="100px" height="100px" xlinkHref={this.props.peerSmall.avatar} transform="scale(0.507046569,0.507046569)" /> </pattern> ); } const colors: Gradient = getAvatarColor(this.props.peerSmall.placeholder); return ( <linearGradient id={this.ids.small} gradientUnits="userSpaceOnUse" x1="6.79%" y1="105.31%" x2="93.21%" y2="-5.31%" > <stop stopColor={colors.payload.from} /> <stop offset="1" stopColor={colors.payload.to} /> </linearGradient> ); } renderSmallAvatar(): React.Element<any> { return ( <circle cx="84" cy="84" r="25" fill={`url(#${this.ids.small})`} /> ); } renderBigAvatar(): React.Element<any> { return ( <path // eslint-disable-next-line d="M58.2070074,99.3297063 C55.5367715,99.7706374 52.795171,100 50,100 C22.3857625,100 0,77.6142375 0,50 C0,22.3857625 22.3857625,0 50,0 C77.6142375,0 100,22.3857625 100,50 C100,52.795171 99.7706374,55.5367715 99.3297063,58.2070074 C94.8434182,55.5348957 89.6009561,54 84,54 C67.4314575,54 54,67.4314575 54,84 C54,89.6009561 55.5348957,94.8434182 58.2070074,99.3297063 Z" fill={`url(#${this.ids.big})`} /> ); } renderPeerSmallText(): ?React.Element<any> { if (this.props.peerSmall.avatar) { return null; } const size = this.getAvatarSize(); const text = size >= 20 ? getAvatarText(this.props.peerSmall.title) : null; const twoChars = Boolean(text && text.length !== 1); const textStyles = { fontSize: twoChars ? 20 : 24 }; return ( <text className={styles.text} x="84" y="84" textAnchor="middle" alignmentBaseline="central" dominantBaseline="central" style={textStyles} > {text} </text> ); } renderPeerBigText(): ?React.Element<any> { if (this.props.peerBig.avatar) { return null; } const size = this.getAvatarSize(); const text = size >= 20 ? getAvatarText(this.props.peerBig.title) : null; const twoChars = Boolean(text && text.length !== 1); const textStyles = { fontSize: twoChars ? 38 : 48 }; return ( <text className={styles.text} x="50" y="50" textAnchor="middle" alignmentBaseline="central" dominantBaseline="central" style={textStyles} clipPath={`url(#${this.ids.clip})`} > {text} </text> ); } render(): React.Element<any> { const className = classNames(styles.container, { [styles.clickable]: this.props.onClick }, this.props.className); const size = this.getAvatarSize(); return ( <svg viewBox="0 0 109 109" width={size} height={size} className={className} onClick={this.props.onClick} > <defs> {this.renderDefsBig()} {this.renderClipMaskBig()} {this.renderDefsSmall()} </defs> {this.renderBigAvatar()} {this.renderSmallAvatar()} {this.renderPeerBigText()} {this.renderPeerSmallText()} </svg> ); } } export default DoublePeerAvatar;<|fim▁end|>
<stop offset="1" stopColor={colors.payload.to} />
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod buffer; mod buffered; <|fim▁hole|> pub use self::buffered::*; pub use self::buffer::*;<|fim▁end|>
#[cfg(test)] mod tests;
<|file_name|>odp.js<|end_file_name|><|fim▁begin|>'use strict' // Module dependencies. var request = require('request') var querystring = require('querystring') var userAgent = require('random-useragent') // Root for all endpoints. var _baseUrl = 'http://data.europa.eu/euodp/data/api/action' // Infrastructure prevents requests from original user agent of requestee. var headers = { 'User-Agent': userAgent.getRandom(), 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded' } /** * Calls the service and return the data in a promise, but with POST. * @function * @private * @name _sendRequest * @param {object} options - The request set of options. * @param {string} options.endpoint - Resource endpoint, without any slashes. * @param {object} options.query - The query parameters for the request. * @param {object} options.body - The body if POST, PUT * @param {string} options.method - The method to be used. * @returns {Promise} The response in a promise. */ function _sendRequest (options) { return new Promise((resolve, reject) => { var query = querystring.stringify(options.query) var bodyData = JSON.stringify(options.body) request({ url: _baseUrl + `/${options.endpoint}?${query}`, headers: headers, method: options.method, body: bodyData }, (error, response, body) => { if (error) { reject(error) } resolve(body) }) }) } /** * Get a list of the datasets in JSON. * @param {object} options - The request set of options. * @param {number} options.query.limit - Limit the number of items returned. * @param {number} options.query.offset - Acts like pagination when limited results. */ module.exports.getDatasets = (options) => { return _sendRequest({ method: 'GET', endpoint: 'package_list', query: (options !== undefined ? options.query : '') }) } /** * Return a list of the site's tags. * @param {object} options - The request set of options. * @param {object} options.query - The query parameters. * @param {string} options.query.vocabulary_id - The id or name of a vocabulary. * If given only tags that belong to this vocabulary will be returned. * @param {boolean} options.query.all_fields - Whether to include all fields. */ module.exports.getTags = (options) => { return _sendRequest({<|fim▁hole|> }) } /** * Return a list of the site's tags. * @param {object} options - The request set of options. * @param {object} options.query - The query parameters. * @param {string} options.body.id - The id of the data set. * For example: {"id": "dgt-translation-memory"} */ module.exports.getDataset = (options) => { return _sendRequest({ method: 'POST', endpoint: 'package_show', query: (options !== undefined ? options.query : ''), body: (options !== undefined ? options.body : {}) }) } /** * Searches for packages satisfying a given search criteria. * This action accepts solr search query parameters. * @see http://wiki.apache.org/solr/CommonQueryParameters * @param {object} options - The request set of options. * @param {object} options.query - The query parameters. * @param {object} options.body - The body parameter. * This accepts the solr tags to filter results. */ module.exports.datasetSearch = (options) => { return _sendRequest({ method: 'POST', endpoint: 'package_search', query: (options !== undefined ? options.query : ''), body: (options !== undefined ? options.body : {}) }) }<|fim▁end|>
method: 'GET', endpoint: 'tag_list', query: (options !== undefined ? options.query : '')
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|># Copyright 2021 Tecnativa - Sergio Teruel # License AGPL-3.0 or later (http://www.gnu.org/licenses/lgpl). { "name": "Account Invoice Margin Sale Delivered Sync", "summary": "Sync invoice margin between invoices and sale orders", "version": "12.0.1.0.1", "development_status": "Beta", "maintainers": ["sergio-teruel"], "category": "Account", "website": "https://github.com/OCA/margin-analysis",<|fim▁hole|> "license": "AGPL-3", "application": False, "installable": True, "depends": [ "sale_margin_delivered", "account_invoice_margin_sale", ], }<|fim▁end|>
"author": "Tecnativa, " "Odoo Community Association (OCA)",
<|file_name|>work-item-link.reducer.ts<|end_file_name|><|fim▁begin|>import { ActionReducer, State } from '@ngrx/store'; import * as WorkItemLinkActions from './../actions/work-item-link.actions'; import { initialState, WorkItemLinkState } from './../states/work-item-link.state'; export type Action = WorkItemLinkActions.All; export const WorkItemLinkReducer: ActionReducer<WorkItemLinkState> = (state = initialState, action: Action) => { switch (action.type) { case WorkItemLinkActions.GET_SUCCESS: { return action.payload; } case WorkItemLinkActions.GET_ERROR: { return state; } case WorkItemLinkActions.ADD_SUCCESS: {<|fim▁hole|> return [action.payload, ...state]; } case WorkItemLinkActions.ADD_ERROR: { return state; } case WorkItemLinkActions.DELETE_SUCCESS: { let linkIndex = state.findIndex(l => l.id === action.payload.id); if (linkIndex > -1) { state.splice(linkIndex, 1); } return [...state]; } case WorkItemLinkActions.DELETE_ERROR: { return state; } default: { return state; } } };<|fim▁end|>