input
stringlengths 0
1.96k
| context
stringlengths 1.23k
257k
| answers
listlengths 1
5
| length
int32 399
40.5k
| dataset
stringclasses 10
values | language
stringclasses 5
values | all_classes
listlengths | _id
stringlengths 48
48
|
|---|---|---|---|---|---|---|---|
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Andreas Korb <andreas.d.korb@gmail.com>
# Copyright (C) Nils Weiss <nils@we155.de>
# This program is published under a GPLv2 license
from scapy.fields import StrFixedLenField, BitEnumField, BitField, \
ScalingField, FlagsField, XByteEnumField, ShortField
from scapy.contrib.automotive.obd.packet import OBD_Packet
# See https://en.wikipedia.org/wiki/OBD-II_PIDs for further information
# PID = Parameter IDentification
class OBD_PID00(OBD_Packet):
name = "PID_00_PIDsSupported"
fields_desc = [
FlagsField('supported_pids', b'', 32, [
'PID20',
'PID1F',
'PID1E',
'PID1D',
'PID1C',
'PID1B',
'PID1A',
'PID19',
'PID18',
'PID17',
'PID16',
'PID15',
'PID14',
'PID13',
'PID12',
'PID11',
'PID10',
'PID0F',
'PID0E',
'PID0D',
'PID0C',
'PID0B',
'PID0A',
'PID09',
'PID08',
'PID07',
'PID06',
'PID05',
'PID04',
'PID03',
'PID02',
'PID01'
])
]
class OBD_PID01(OBD_Packet):
name = "PID_01_MonitorStatusSinceDtcsCleared"
onOff = {
0: 'off',
1: 'on'
}
fields_desc = [
BitEnumField('mil', 0, 1, onOff),
BitField('dtc_count', 0, 7),
BitField('reserved1', 0, 1),
FlagsField('continuous_tests_ready', b'', 3, [
'misfire',
'fuelSystem',
'components'
]),
BitField('reserved2', 0, 1),
FlagsField('continuous_tests_supported', b'', 3, [
'misfire',
'fuel_system',
'components'
]),
FlagsField('once_per_trip_tests_supported', b'', 8, [
'egr',
'oxygenSensorHeater',
'oxygenSensor',
'acSystemRefrigerant',
'secondaryAirSystem',
'evaporativeSystem',
'heatedCatalyst',
'catalyst'
]),
FlagsField('once_per_trip_tests_ready', b'', 8, [
'egr',
'oxygenSensorHeater',
'oxygenSensor',
'acSystemRefrigerant',
'secondaryAirSystem',
'evaporativeSystem',
'heatedCatalyst',
'catalyst'
])
]
class OBD_PID02(OBD_Packet):
name = "PID_02_FreezeDtc"
fields_desc = [
ShortField('data', 0)
]
class OBD_PID03(OBD_Packet):
name = "PID_03_FuelSystemStatus"
loopStates = {
0x00: 'OpenLoopInsufficientEngineTemperature',
0x02: 'ClosedLoop',
0x04: 'OpenLoopEngineLoadOrFuelCut',
0x08: 'OpenLoopDueSystemFailure',
0x10: 'ClosedLoopWithFault'
}
fields_desc = [
XByteEnumField('fuel_system1', 0, loopStates),
XByteEnumField('fuel_system2', 0, loopStates)
]
class OBD_PID04(OBD_Packet):
name = "PID_04_CalculatedEngineLoad"
fields_desc = [
ScalingField('data', 0, scaling=100 / 255., unit="%")
]
class OBD_PID05(OBD_Packet):
name = "PID_05_EngineCoolantTemperature"
fields_desc = [
ScalingField('data', 0, unit="deg. C", offset=-40.0)
]
class OBD_PID06(OBD_Packet):
name = "PID_06_ShortTermFuelTrimBank1"
fields_desc = [
ScalingField('data', 0, scaling=100 / 128.,
unit="%", offset=-100.0)
]
class OBD_PID07(OBD_Packet):
name = "PID_07_LongTermFuelTrimBank1"
fields_desc = [
ScalingField('data', 0, scaling=100 / 128.,
unit="%", offset=-100.0)
]
class OBD_PID08(OBD_Packet):
name = "PID_08_ShortTermFuelTrimBank2"
fields_desc = [
ScalingField('data', 0, scaling=100 / 128.,
unit="%", offset=-100.0)
]
class OBD_PID09(OBD_Packet):
name = "PID_09_LongTermFuelTrimBank2"
fields_desc = [
ScalingField('data', 0, scaling=100 / 128.,
unit="%", offset=-100.0)
]
class OBD_PID0A(OBD_Packet):
name = "PID_0A_FuelPressure"
fields_desc = [
ScalingField('data', 0, scaling=3, unit="kPa")
]
class OBD_PID0B(OBD_Packet):
name = "PID_0B_IntakeManifoldAbsolutePressure"
fields_desc = [
ScalingField('data', 0, scaling=1, unit="kPa")
]
class OBD_PID0C(OBD_Packet):
name = "PID_0C_EngineRpm"
fields_desc = [
ScalingField('data', 0, scaling=1 / 4., unit="min-1", fmt="H")
]
class OBD_PID0D(OBD_Packet):
name = "PID_0D_VehicleSpeed"
fields_desc = [
ScalingField('data', 0, unit="km/h")
]
class OBD_PID0E(OBD_Packet):
name = "PID_0E_TimingAdvance"
fields_desc = [
ScalingField('data', 0, scaling=1 / 2., unit="deg.", offset=-64.0)
]
class OBD_PID0F(OBD_Packet):
name = "PID_0F_IntakeAirTemperature"
fields_desc = [
ScalingField('data', 0, scaling=1, unit="deg. C", offset=-40.0)
]
class OBD_PID10(OBD_Packet):
name = "PID_10_MafAirFlowRate"
fields_desc = [
ScalingField('data', 0, scaling=1 / 100., unit="g/s")
]
class OBD_PID11(OBD_Packet):
name = "PID_11_ThrottlePosition"
fields_desc = [
ScalingField('data', 0, scaling=100 / 255., unit="%")
]
class OBD_PID12(OBD_Packet):
name = "PID_12_CommandedSecondaryAirStatus"
states = {
0x00: 'upstream',
0x02: 'downstreamCatalyticConverter',
0x04: 'outsideAtmosphereOrOff',
0x08: 'pumpCommanded'
}
fields_desc = [
XByteEnumField('data', 0, states)
]
class OBD_PID13(OBD_Packet):
name = "PID_13_OxygenSensorsPresent"
fields_desc = [
StrFixedLenField('data', b'', 1)
]
class _OBD_PID14_1B(OBD_Packet):
fields_desc = [
ScalingField('outputVoltage', 0, scaling=0.005, unit="V"),
ScalingField('trim', 0, scaling=100 / 128.,
unit="%", offset=-100)
]
class OBD_PID14(_OBD_PID14_1B):
name = "PID_14_OxygenSensor1"
class OBD_PID15(_OBD_PID14_1B):
name = "PID_15_OxygenSensor2"
class OBD_PID16(_OBD_PID14_1B):
name = "PID_16_OxygenSensor3"
class OBD_PID17(_OBD_PID14_1B):
name = "PID_17_OxygenSensor4"
class OBD_PID18(_OBD_PID14_1B):
name = "PID_18_OxygenSensor5"
class OBD_PID19(_OBD_PID14_1B):
name = "PID_19_OxygenSensor6"
class OBD_PID1A(_OBD_PID14_1B):
name = "PID_1A_OxygenSensor7"
class OBD_PID1B(_OBD_PID14_1B):
name = "PID_1B_OxygenSensor8"
class OBD_PID1C(OBD_Packet):
name = "PID_1C_ObdStandardsThisVehicleConformsTo"
obdStandards = {
0x01: 'OBD-II as defined by the CARB',
0x02: 'OBD as defined by the EPA',
0x03: 'OBD and OBD-II ',
0x04: 'OBD-I ',
0x05: 'Not OBD compliant',
|
[
" 0x06: 'EOBD (Europe) ',"
] | 557
|
lcc
|
python
| null |
bfeb58c9afcb4b7dfc22c93238798de6b1c61704eb37134a
|
|
#!/usr/bin/env python
#
# Copyright 2011 Stef Walter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2 of the licence or (at
# your option) any later version.
#
# See the included COPYING file for more information.
#
import getopt
import os
import sys
import time
import unittest
import aes
import dh
import hkdf
import dbus
import dbus.service
import dbus.glib
import gobject
COLLECTION_PREFIX = "/org/freedesktop/secrets/collection/"
bus_name = 'org.freedesktop.Secret.MockService'
ready_pipe = -1
objects = { }
class NotSupported(dbus.exceptions.DBusException):
def __init__(self, msg):
dbus.exceptions.DBusException.__init__(self, msg, name="org.freedesktop.DBus.Error.NotSupported")
class InvalidArgs(dbus.exceptions.DBusException):
def __init__(self, msg):
dbus.exceptions.DBusException.__init__(self, msg, name="org.freedesktop.DBus.Error.InvalidArgs")
class IsLocked(dbus.exceptions.DBusException):
def __init__(self, msg):
dbus.exceptions.DBusException.__init__(self, msg, name="org.freedesktop.Secret.Error.IsLocked")
class NoSuchObject(dbus.exceptions.DBusException):
def __init__(self, msg):
dbus.exceptions.DBusException.__init__(self, msg, name="org.freedesktop.Secret.Error.NoSuchObject")
unique_identifier = 111
def next_identifier(prefix=''):
global unique_identifier
unique_identifier += 1
return "%s%d" % (prefix, unique_identifier)
def encode_identifier(value):
return "".join([(c.isalpha() or c.isdigit()) and c or "_%02x" % ord(c) \
for c in value.encode('utf-8')])
def hex_encode(string):
return "".join([hex(ord(c))[2:].zfill(2) for c in string])
def alias_path(name):
return "/org/freedesktop/secrets/aliases/%s" % name
class PlainAlgorithm():
def negotiate(self, service, sender, param):
if type (param) != dbus.String:
raise InvalidArgs("invalid argument passed to OpenSession")
session = SecretSession(service, sender, self, None)
return (dbus.String("", variant_level=1), session)
def encrypt(self, key, data):
return ("", data)
def decrypt(self, param, data):
if params == "":
raise InvalidArgs("invalid secret plain parameter")
return data
class AesAlgorithm():
def negotiate(self, service, sender, param):
if type (param) != dbus.ByteArray:
raise InvalidArgs("invalid argument passed to OpenSession")
privat, publi = dh.generate_pair()
peer = dh.bytes_to_number(param)
# print "mock publi: ", hex(publi)
# print " mock peer: ", hex(peer)
ikm = dh.derive_key(privat, peer)
# print " mock ikm: ", hex_encode(ikm)
key = hkdf.hkdf(ikm, 16)
# print " mock key: ", hex_encode(key)
session = SecretSession(service, sender, self, key)
return (dbus.ByteArray(dh.number_to_bytes(publi), variant_level=1), session)
def encrypt(self, key, data):
key = map(ord, key)
data = aes.append_PKCS7_padding(data)
keysize = len(key)
iv = [ord(i) for i in os.urandom(16)]
mode = aes.AESModeOfOperation.modeOfOperation["CBC"]
moo = aes.AESModeOfOperation()
(mode, length, ciph) = moo.encrypt(data, mode, key, keysize, iv)
return ("".join([chr(i) for i in iv]),
"".join([chr(i) for i in ciph]))
def decrypt(self, key, param, data):
key = map(ord, key)
keysize = len(key)
iv = map(ord, param[:16])
data = map(ord, data)
moo = aes.AESModeOfOperation()
mode = aes.AESModeOfOperation.modeOfOperation["CBC"]
decr = moo.decrypt(data, None, mode, key, keysize, iv)
return aes.strip_PKCS7_padding(decr)
class SecretPrompt(dbus.service.Object):
def __init__(self, service, sender, prompt_name=None, delay=0,
dismiss=False, action=None):
self.sender = sender
self.service = service
self.delay = 0
self.dismiss = False
self.result = dbus.String("", variant_level=1)
self.action = action
self.completed = False
if prompt_name:
self.path = "/org/freedesktop/secrets/prompts/%s" % prompt_name
else:
self.path = "/org/freedesktop/secrets/prompts/%s" % next_identifier('p')
dbus.service.Object.__init__(self, service.bus_name, self.path)
service.add_prompt(self)
assert self.path not in objects
objects[self.path] = self
def _complete(self):
if self.completed:
return
self.completed = True
self.Completed(self.dismiss, self.result)
self.remove_from_connection()
@dbus.service.method('org.freedesktop.Secret.Prompt')
def Prompt(self, window_id):
if self.action:
self.result = self.action()
gobject.timeout_add(self.delay * 1000, self._complete)
@dbus.service.method('org.freedesktop.Secret.Prompt')
def Dismiss(self):
self._complete()
@dbus.service.signal(dbus_interface='org.freedesktop.Secret.Prompt', signature='bv')
def Completed(self, dismiss, result):
pass
class SecretSession(dbus.service.Object):
def __init__(self, service, sender, algorithm, key):
self.sender = sender
self.service = service
self.algorithm = algorithm
self.key = key
self.path = "/org/freedesktop/secrets/sessions/%s" % next_identifier('s')
dbus.service.Object.__init__(self, service.bus_name, self.path)
service.add_session(self)
objects[self.path] = self
def encode_secret(self, secret, content_type):
(params, data) = self.algorithm.encrypt(self.key, secret)
# print " mock iv: ", hex_encode(params)
# print " mock ciph: ", hex_encode(data)
return dbus.Struct((dbus.ObjectPath(self.path), dbus.ByteArray(params),
dbus.ByteArray(data), dbus.String(content_type)),
signature="oayays")
def decode_secret(self, value):
plain = self.algorithm.decrypt(self.key, value[1], value[2])
return (plain, value[3])
@dbus.service.method('org.freedesktop.Secret.Session')
def Close(self):
self.remove_from_connection()
self.service.remove_session(self)
class SecretItem(dbus.service.Object):
SUPPORTS_MULTIPLE_OBJECT_PATHS = True
def __init__(self, collection, identifier=None, label="Item", attributes={ },
secret="", confirm=False, content_type="text/plain", type=None):
if identifier is None:
identifier = next_identifier()
identifier = encode_identifier(identifier)
self.collection = collection
self.identifier = identifier
self.label = label or "Unnamed item"
self.secret = secret
self.type = type or "org.freedesktop.Secret.Generic"
self.attributes = attributes
self.content_type = content_type
self.path = "%s/%s" % (collection.path, identifier)
self.confirm = confirm
self.created = self.modified = time.time()
dbus.service.Object.__init__(self, collection.service.bus_name, self.path)
self.collection.add_item(self)
objects[self.path] = self
def add_alias(self, name):
path = "%s/%s" % (alias_path(name), self.identifier)
objects[path] = self
self.add_to_connection(self.connection, path)
def remove_alias(self, name):
path = "%s/%s" % (alias_path(name), self.identifier)
del objects[path]
self.remove_from_connection(self.connection, path)
def match_attributes(self, attributes):
for (key, value) in attributes.items():
if not self.attributes.get(key) == value:
return False
return True
def get_locked(self):
return self.collection.locked
def perform_xlock(self, lock):
return self.collection.perform_xlock(lock)
def perform_delete(self):
self.collection.remove_item(self)
del objects[self.path]
self.remove_from_connection()
@dbus.service.method('org.freedesktop.Secret.Item', sender_keyword='sender')
def GetSecret(self, session_path, sender=None):
session = objects.get(session_path, None)
if not session or session.sender != sender:
raise InvalidArgs("session invalid: %s" % session_path)
if self.get_locked():
raise IsLocked("secret is locked: %s" % self.path)
return session.encode_secret(self.secret, self.content_type)
@dbus.service.method('org.freedesktop.Secret.Item', sender_keyword='sender', byte_arrays=True)
def SetSecret(self, secret, sender=None):
session = objects.get(secret[0], None)
if not session or session.sender != sender:
raise InvalidArgs("session invalid: %s" % secret[0])
if self.get_locked():
raise IsLocked("secret is locked: %s" % self.path)
(self.secret, self.content_type) = session.decode_secret(secret)
@dbus.service.method('org.freedesktop.Secret.Item', sender_keyword='sender')
def Delete(self, sender=None):
item = self
def prompt_callback():
item.perform_delete()
return dbus.String("", variant_level=1)
if self.confirm:
prompt = SecretPrompt(self.collection.service, sender,
dismiss=False, action=prompt_callback)
return dbus.ObjectPath(prompt.path)
else:
self.perform_delete()
return dbus.ObjectPath("/")
@dbus.service.method(dbus.PROPERTIES_IFACE, in_signature='ss', out_signature='v')
def Get(self, interface_name, property_name):
return self.GetAll(interface_name)[property_name]
@dbus.service.method(dbus.PROPERTIES_IFACE, in_signature='s', out_signature='a{sv}')
def GetAll(self, interface_name):
if interface_name == 'org.freedesktop.Secret.Item':
return {
'Locked': self.get_locked(),
'Attributes': dbus.Dictionary(self.attributes, signature='ss', variant_level=1),
'Label': self.label,
'Created': dbus.UInt64(self.created),
'Modified': dbus.UInt64(self.modified),
# For compatibility with libgnome-keyring, not part of spec
'Type': self.type
}
else:
raise InvalidArgs('Unknown %s interface' % interface_name)
@dbus.service.method(dbus.PROPERTIES_IFACE, in_signature='ssv')
def Set(self, interface_name, property_name, new_value):
if interface_name != 'org.freedesktop.Secret.Item':
raise InvalidArgs('Unknown %s interface' % interface_name)
if property_name == "Label":
self.label = str(new_value)
elif property_name == "Attributes":
self.attributes = dict(new_value)
# For compatibility with libgnome-keyring, not part of spec
elif property_name == "Type":
self.type = str(new_value)
else:
raise InvalidArgs('Not writable %s property' % property_name)
self.PropertiesChanged(interface_name, { property_name: new_value }, [])
@dbus.service.signal(dbus.PROPERTIES_IFACE, signature='sa{sv}as')
def PropertiesChanged(self, interface_name, changed_properties, invalidated_properties):
self.modified = time.time()
class SecretCollection(dbus.service.Object):
SUPPORTS_MULTIPLE_OBJECT_PATHS = True
def __init__(self, service, identifier=None, label="Collection", locked=False,
confirm=False, master=None):
if identifier is None:
identifier = next_identifier(label)
identifier = encode_identifier(identifier)
self.service = service
self.identifier = identifier
self.label = label or "Unnamed collection"
self.locked = locked
self.items = { }
self.confirm = confirm
self.master = None
self.created = self.modified = time.time()
self.aliased = set()
self.path = "%s%s" % (COLLECTION_PREFIX, identifier)
dbus.service.Object.__init__(self, service.bus_name, self.path)
self.service.add_collection(self)
objects[self.path] = self
def add_item(self, item):
self.items[item.path] = item
for alias in self.aliased:
item.add_alias(alias)
def remove_item(self, item):
for alias in self.aliased:
item.remove_alias(alias)
del self.items[item.path]
def add_alias(self, name):
if name in self.aliased:
return
self.aliased.add(name)
for item in self.items.values():
item.add_alias(name)
path = alias_path(name)
objects[path] = self
self.add_to_connection(self.connection, path)
def remove_alias(self, name):
if name not in self.aliased:
return
path = alias_path(name)
self.aliased.remove(name)
del objects[path]
self.remove_from_connection(self.connection, path)
for item in self.items.values():
item.remove_alias(name)
def search_items(self, attributes):
results = []
for item in self.items.values():
if item.match_attributes(attributes):
results.append(item)
return results
def get_locked(self):
return self.locked
def perform_xlock(self, lock):
self.locked = lock
for item in self.items.values():
self.PropertiesChanged('org.freedesktop.Secret.Item', { "Locked" : lock }, [])
self.PropertiesChanged('org.freedesktop.Secret.Collection', { "Locked" : lock }, [])
def perform_delete(self):
for item in self.items.values():
item.perform_delete()
del objects[self.path]
self.service.remove_collection(self)
for alias in list(self.aliased):
self.remove_alias(alias)
self.remove_from_connection()
@dbus.service.method('org.freedesktop.Secret.Collection', byte_arrays=True, sender_keyword='sender')
def CreateItem(self, properties, value, replace, sender=None):
session_path = value[0]
session = objects.get(session_path, None)
if not session or session.sender != sender:
raise InvalidArgs("session invalid: %s" % session_path)
if self.locked:
raise IsLocked("collection is locked: %s" % self.path)
attributes = properties.get("org.freedesktop.Secret.Item.Attributes", { })
label = properties.get("org.freedesktop.Secret.Item.Label", None)
(secret, content_type) = session.decode_secret(value)
item = None
# This is done for compatibility with libgnome-keyring, not part of spec
type = properties.get("org.freedesktop.Secret.Item.Type", None)
if replace:
items = self.search_items(attributes)
if items:
item = items[0]
if item is None:
item = SecretItem(self, next_identifier(), label, attributes, type=type,
secret=secret, confirm=False, content_type=content_type)
else:
item.label = label
item.type = type
item.secret = secret
item.attributes = attributes
item.content_type = content_type
return (dbus.ObjectPath(item.path), dbus.ObjectPath("/"))
@dbus.service.method('org.freedesktop.Secret.Collection')
def SearchItems(self, attributes):
items = self.search_items(attributes)
return (dbus.Array([item.path for item in items], "o"))
@dbus.service.method('org.freedesktop.Secret.Collection', sender_keyword='sender')
def Delete(self, sender=None):
collection = self
def prompt_callback():
collection.perform_delete()
return dbus.String("", variant_level=1)
if self.confirm:
prompt = SecretPrompt(self.service, sender, dismiss=False,
action=prompt_callback)
return dbus.ObjectPath(prompt.path)
else:
self.perform_delete()
return dbus.ObjectPath("/")
@dbus.service.method(dbus.PROPERTIES_IFACE, in_signature='ss', out_signature='v')
def Get(self, interface_name, property_name):
return self.GetAll(interface_name)[property_name]
@dbus.service.method(dbus.PROPERTIES_IFACE, in_signature='s', out_signature='a{sv}')
def GetAll(self, interface_name):
if interface_name == 'org.freedesktop.Secret.Collection':
return {
'Locked': self.get_locked(),
'Label': self.label,
'Created': dbus.UInt64(self.created),
'Modified': dbus.UInt64(self.modified),
'Items': dbus.Array([dbus.ObjectPath(i.path) for i in self.items.values()], signature='o', variant_level=1)
}
else:
raise InvalidArgs('Unknown %s interface' % interface_name)
@dbus.service.method(dbus.PROPERTIES_IFACE, in_signature='ssv')
def Set(self, interface_name, property_name, new_value):
if interface_name != 'org.freedesktop.Secret.Collection':
raise InvalidArgs('Unknown %s interface' % interface_name)
if property_name == "Label":
self.label = str(new_value)
else:
raise InvalidArgs('Not a writable property %s' % property_name)
self.PropertiesChanged(interface_name, { property_name: new_value }, [])
@dbus.service.signal(dbus.PROPERTIES_IFACE, signature='sa{sv}as')
def PropertiesChanged(self, interface_name, changed_properties, invalidated_properties):
self.modified = time.time()
class SecretService(dbus.service.Object):
algorithms = {
'plain': PlainAlgorithm(),
"dh-ietf1024-sha256-aes128-cbc-pkcs7": AesAlgorithm(),
}
def __init__(self, name=None):
if name == None:
name = bus_name
bus = dbus.SessionBus()
self.bus_name = dbus.service.BusName(name, allow_replacement=True, replace_existing=True)
dbus.service.Object.__init__(self, self.bus_name, '/org/freedesktop/secrets')
self.sessions = { }
self.prompts = { }
self.collections = { }
self.aliases = { }
self.aliased = { }
def on_name_owner_changed(owned, old_owner, new_owner):
if not new_owner:
for session in list(self.sessions.get(old_owner, [])):
session.Close()
bus.add_signal_receiver(on_name_owner_changed,
'NameOwnerChanged',
'org.freedesktop.DBus')
def add_standard_objects(self):
collection = SecretCollection(self, "english", label="Collection One", locked=False)
SecretItem(collection, "1", label="Item One", secret="111",
attributes={ "number": "1", "string": "one", "even": "false", "xdg:schema": "org.mock.Schema" })
SecretItem(collection, "2", label="Item Two", secret="222",
attributes={ "number": "2", "string": "two", "even": "true", "xdg:schema": "org.mock.Schema" })
SecretItem(collection, "3", label="Item Three", secret="333",
attributes={ "number": "3", "string": "three", "even": "false", "xdg:schema": "org.mock.Schema" })
self.set_alias('default', collection)
collection = SecretCollection(self, "spanish", locked=True)
SecretItem(collection, "10", secret="111",
attributes={ "number": "1", "string": "uno", "even": "false", "xdg:schema": "org.mock.Schema" })
SecretItem(collection, "20", secret="222",
attributes={ "number": "2", "string": "dos", "even": "true", "xdg:schema": "org.mock.Schema" })
SecretItem(collection, "30", secret="3333",
attributes={ "number": "3", "string": "tres", "even": "false", "xdg:schema": "org.mock.Schema" })
collection = SecretCollection(self, "german", locked=True)
SecretItem(collection, "300", secret="333",
attributes={ "number": "3", "string": "drei", "prime": "true", "xdg:schema": "org.mock.Primes" })
SecretItem(collection, "400", secret="444",
attributes={ "number": "4", "string": "vier", "prime": "false", "xdg:schema": "org.mock.Primes" })
SecretItem(collection, "500", secret="555",
attributes={ "number": "5", "string": "fuenf", "prime": "true", "xdg:schema": "org.mock.Primes" })
SecretItem(collection, "600", secret="666",
attributes={ "number": "6", "string": "sechs", "prime": "false", "xdg:schema": "org.mock.Primes" })
collection = SecretCollection(self, "empty", locked=False)
collection = SecretCollection(self, "session", label="Session Keyring", locked=False)
self.set_alias('session', collection)
def listen(self):
global ready_pipe
|
[
"\t\tloop = gobject.MainLoop()"
] | 1,595
|
lcc
|
python
| null |
fe11c4d3c4d9b1392973aed3ba28df9d9cf6a13123cc8e59
|
|
using System;
using Server.Items;
using Server.Spells;
namespace Server.Engines.Craft
{
public class DefInscription : CraftSystem
{
public override SkillName MainSkill
{
get
{
return SkillName.Inscribe;
}
}
public override int GumpTitleNumber
{
get
{
return 1044009;
}// <CENTER>INSCRIPTION MENU</CENTER>
}
private static CraftSystem m_CraftSystem;
public static CraftSystem CraftSystem
{
get
{
if (m_CraftSystem == null)
m_CraftSystem = new DefInscription();
return m_CraftSystem;
}
}
public override double GetChanceAtMin(CraftItem item)
{
return 0.0; // 0%
}
private DefInscription()
: base(1, 1, 1.25)// base( 1, 1, 3.0 )
{
}
public override int CanCraft(Mobile from, BaseTool tool, Type typeItem)
{
if (tool == null || tool.Deleted || tool.UsesRemaining < 0)
return 1044038; // You have worn out your tool!
else if (!BaseTool.CheckAccessible(tool, from))
return 1044263; // The tool must be on your person to use.
if (typeItem != null)
{
object o = Activator.CreateInstance(typeItem);
if (o is SpellScroll)
{
SpellScroll scroll = (SpellScroll)o;
Spellbook book = Spellbook.Find(from, scroll.SpellID);
bool hasSpell = (book != null && book.HasSpell(scroll.SpellID));
scroll.Delete();
return (hasSpell ? 0 : 1042404); // null : You don't have that spell!
}
else if (o is Item)
{
((Item)o).Delete();
}
}
return 0;
}
public override void PlayCraftEffect(Mobile from)
{
from.PlaySound(0x249);
}
private static readonly Type typeofSpellScroll = typeof(SpellScroll);
public override int PlayEndingEffect(Mobile from, bool failed, bool lostMaterial, bool toolBroken, int quality, bool makersMark, CraftItem item)
{
if (toolBroken)
from.SendLocalizedMessage(1044038); // You have worn out your tool
if (!typeofSpellScroll.IsAssignableFrom(item.ItemType)) // not a scroll
{
if (failed)
{
if (lostMaterial)
return 1044043; // You failed to create the item, and some of your materials are lost.
else
return 1044157; // You failed to create the item, but no materials were lost.
}
else
{
if (quality == 0)
return 502785; // You were barely able to make this item. It's quality is below average.
else if (makersMark && quality == 2)
return 1044156; // You create an exceptional quality item and affix your maker's mark.
else if (quality == 2)
return 1044155; // You create an exceptional quality item.
else
return 1044154; // You create the item.
}
}
else
{
if (failed)
return 501630; // You fail to inscribe the scroll, and the scroll is ruined.
else
return 501629; // You inscribe the spell and put the scroll in your backpack.
}
}
private int m_Circle, m_Mana;
private enum Reg { BlackPearl, Bloodmoss, Garlic, Ginseng, MandrakeRoot, Nightshade, SulfurousAsh, SpidersSilk, BatWing, GraveDust, DaemonBlood, NoxCrystal, PigIron, Bone, DragonBlood, FertileDirt, DaemonBone }
private readonly Type[] m_RegTypes = new Type[]
{
typeof( BlackPearl ),
typeof( Bloodmoss ),
typeof( Garlic ),
typeof( Ginseng ),
typeof( MandrakeRoot ),
typeof( Nightshade ),
typeof( SulfurousAsh ),
typeof( SpidersSilk ),
typeof( BatWing ),
typeof( GraveDust ),
typeof( DaemonBlood ),
typeof( NoxCrystal ),
typeof( PigIron ),
typeof( Bone ),
typeof( DragonBlood ),
typeof( FertileDirt ),
typeof( DaemonBone )
};
private int m_Index;
private void AddSpell(Type type, params Reg[] regs)
{
double minSkill, maxSkill;
int cliloc;
switch (m_Circle)
{
default:
case 0: minSkill = -25.0; maxSkill = 25.0; cliloc = 1111691; break;
case 1: minSkill = -10.8; maxSkill = 39.2; cliloc = 1111691; break;
case 2: minSkill = 03.5; maxSkill = 53.5; cliloc = 1111692; break;
case 3: minSkill = 17.8; maxSkill = 67.8; cliloc = 1111692; break;
case 4: minSkill = 32.1; maxSkill = 82.1; cliloc = 1111693; break;
case 5: minSkill = 46.4; maxSkill = 96.4; cliloc = 1111693; break;
case 6: minSkill = 60.7; maxSkill = 110.7; cliloc = 1111694; break;
case 7: minSkill = 75.0; maxSkill = 125.0; cliloc = 1111694; break;
}
int index = AddCraft(type, cliloc, 1044381 + m_Index++, minSkill, maxSkill, m_RegTypes[(int)regs[0]], 1044353 + (int)regs[0], 1, 1044361 + (int)regs[0]);
for (int i = 1; i < regs.Length; ++i)
AddRes(index, m_RegTypes[(int)regs[i]], 1044353 + (int)regs[i], 1, 1044361 + (int)regs[i]);
AddRes(index, typeof(BlankScroll), 1044377, 1, 1044378);
SetManaReq(index, m_Mana);
}
private void AddNecroSpell(int spell, int mana, double minSkill, Type type, params Reg[] regs)
{
int id = GetRegLocalization(regs[0]);
int index = AddCraft(type, 1061677, 1060509 + spell, minSkill, minSkill + 1.0, m_RegTypes[(int)regs[0]], id, 1, 501627);
for (int i = 1; i < regs.Length; ++i)
{
id = GetRegLocalization(regs[i]);
AddRes(index, m_RegTypes[(int)regs[0]], id, 1, 501627);
}
AddRes(index, typeof(BlankScroll), 1044377, 1, 1044378);
SetManaReq(index, mana);
}
private void AddMysticSpell(int id, int mana, double minSkill, Type type, params Reg[] regs)
{
int index = AddCraft(type, 1111671, id, minSkill, minSkill + 1.0, m_RegTypes[(int)regs[0]], GetRegLocalization(regs[0]), 1, 501627); //Yes, on OSI it's only 1.0 skill diff'. Don't blame me, blame OSI.
for (int i = 1; i < regs.Length; ++i)
AddRes(index, m_RegTypes[(int)regs[0]], GetRegLocalization(regs[i]), 1, 501627);
AddRes(index, typeof(BlankScroll), 1044377, 1, 1044378);
SetManaReq(index, mana);
}
private int GetRegLocalization(Reg reg)
{
int loc = 0;
switch (reg)
{
case Reg.BatWing: loc = 1023960; break;
case Reg.GraveDust: loc = 1023983; break;
case Reg.DaemonBlood: loc = 1023965; break;
case Reg.NoxCrystal: loc = 1023982; break;
case Reg.PigIron: loc = 1023978; break;
case Reg.Bone: loc = 1023966; break;
case Reg.DragonBlood: loc = 1023970; break;
case Reg.FertileDirt: loc = 1023969; break;
case Reg.DaemonBone: loc = 1023968; break;
}
if (loc == 0)
loc = 1044353 + (int)reg;
return loc;
}
public override void InitCraftList()
{
m_Circle = 0;
m_Mana = 4;
AddSpell( typeof( ReactiveArmorScroll ), Reg.Garlic, Reg.SpidersSilk, Reg.SulfurousAsh );
AddSpell( typeof( ClumsyScroll ), Reg.Bloodmoss, Reg.Nightshade );
AddSpell( typeof( CreateFoodScroll ), Reg.Garlic, Reg.Ginseng, Reg.MandrakeRoot );
AddSpell( typeof( FeeblemindScroll ), Reg.Nightshade, Reg.Ginseng );
AddSpell( typeof( HealScroll ), Reg.Garlic, Reg.Ginseng, Reg.SpidersSilk );
AddSpell( typeof( MagicArrowScroll ), Reg.SulfurousAsh );
AddSpell( typeof( NightSightScroll ), Reg.SpidersSilk, Reg.SulfurousAsh );
AddSpell( typeof( WeakenScroll ), Reg.Garlic, Reg.Nightshade );
m_Circle = 1;
m_Mana = 6;
AddSpell( typeof( AgilityScroll ), Reg.Bloodmoss, Reg.MandrakeRoot );
AddSpell( typeof( CunningScroll ), Reg.Nightshade, Reg.MandrakeRoot );
AddSpell( typeof( CureScroll ), Reg.Garlic, Reg.Ginseng );
AddSpell( typeof( HarmScroll ), Reg.Nightshade, Reg.SpidersSilk );
AddSpell( typeof( MagicTrapScroll ), Reg.Garlic, Reg.SpidersSilk, Reg.SulfurousAsh );
AddSpell( typeof( MagicUnTrapScroll ), Reg.Bloodmoss, Reg.SulfurousAsh );
AddSpell( typeof( ProtectionScroll ), Reg.Garlic, Reg.Ginseng, Reg.SulfurousAsh );
AddSpell( typeof( StrengthScroll ), Reg.Nightshade, Reg.MandrakeRoot );
m_Circle = 2;
m_Mana = 9;
AddSpell( typeof( BlessScroll ), Reg.Garlic, Reg.MandrakeRoot );
AddSpell( typeof( FireballScroll ), Reg.BlackPearl );
AddSpell( typeof( MagicLockScroll ), Reg.Bloodmoss, Reg.Garlic, Reg.SulfurousAsh );
AddSpell( typeof( PoisonScroll ), Reg.Nightshade );
AddSpell( typeof( TelekinisisScroll ), Reg.Bloodmoss, Reg.MandrakeRoot );
AddSpell( typeof( TeleportScroll ), Reg.Bloodmoss, Reg.MandrakeRoot );
AddSpell( typeof( UnlockScroll ), Reg.Bloodmoss, Reg.SulfurousAsh );
AddSpell( typeof( WallOfStoneScroll ), Reg.Bloodmoss, Reg.Garlic );
m_Circle = 3;
m_Mana = 11;
AddSpell( typeof( ArchCureScroll ), Reg.Garlic, Reg.Ginseng, Reg.MandrakeRoot );
AddSpell( typeof( ArchProtectionScroll ), Reg.Garlic, Reg.Ginseng, Reg.MandrakeRoot, Reg.SulfurousAsh );
AddSpell( typeof( CurseScroll ), Reg.Garlic, Reg.Nightshade, Reg.SulfurousAsh );
AddSpell( typeof( FireFieldScroll ), Reg.BlackPearl, Reg.SpidersSilk, Reg.SulfurousAsh );
AddSpell( typeof( GreaterHealScroll ), Reg.Garlic, Reg.SpidersSilk, Reg.MandrakeRoot, Reg.Ginseng );
AddSpell( typeof( LightningScroll ), Reg.MandrakeRoot, Reg.SulfurousAsh );
AddSpell( typeof( ManaDrainScroll ), Reg.BlackPearl, Reg.SpidersSilk, Reg.MandrakeRoot );
AddSpell( typeof( RecallScroll ), Reg.BlackPearl, Reg.Bloodmoss, Reg.MandrakeRoot );
m_Circle = 4;
m_Mana = 14;
AddSpell( typeof( BladeSpiritsScroll ), Reg.BlackPearl, Reg.Nightshade, Reg.MandrakeRoot );
AddSpell( typeof( DispelFieldScroll ), Reg.BlackPearl, Reg.Garlic, Reg.SpidersSilk, Reg.SulfurousAsh );
AddSpell( typeof( IncognitoScroll ), Reg.Bloodmoss, Reg.Garlic, Reg.Nightshade );
AddSpell( typeof( MagicReflectScroll ), Reg.Garlic, Reg.MandrakeRoot, Reg.SpidersSilk );
AddSpell( typeof( MindBlastScroll ), Reg.BlackPearl, Reg.MandrakeRoot, Reg.Nightshade, Reg.SulfurousAsh );
AddSpell( typeof( ParalyzeScroll ), Reg.Garlic, Reg.MandrakeRoot, Reg.SpidersSilk );
AddSpell( typeof( PoisonFieldScroll ), Reg.BlackPearl, Reg.Nightshade, Reg.SpidersSilk );
AddSpell( typeof( SummonCreatureScroll ), Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SpidersSilk );
m_Circle = 5;
m_Mana = 20;
AddSpell( typeof( DispelScroll ), Reg.Garlic, Reg.MandrakeRoot, Reg.SulfurousAsh );
AddSpell( typeof( EnergyBoltScroll ), Reg.BlackPearl, Reg.Nightshade );
AddSpell( typeof( ExplosionScroll ), Reg.Bloodmoss, Reg.MandrakeRoot );
AddSpell( typeof( InvisibilityScroll ), Reg.Bloodmoss, Reg.Nightshade );
AddSpell( typeof( MarkScroll ), Reg.Bloodmoss, Reg.BlackPearl, Reg.MandrakeRoot );
AddSpell( typeof( MassCurseScroll ), Reg.Garlic, Reg.MandrakeRoot, Reg.Nightshade, Reg.SulfurousAsh );
AddSpell( typeof( ParalyzeFieldScroll ), Reg.BlackPearl, Reg.Ginseng, Reg.SpidersSilk );
AddSpell( typeof( RevealScroll ), Reg.Bloodmoss, Reg.SulfurousAsh );
m_Circle = 6;
m_Mana = 40;
AddSpell( typeof( ChainLightningScroll ), Reg.BlackPearl, Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SulfurousAsh );
AddSpell( typeof( EnergyFieldScroll ), Reg.BlackPearl, Reg.MandrakeRoot, Reg.SpidersSilk, Reg.SulfurousAsh );
AddSpell( typeof( FlamestrikeScroll ), Reg.SpidersSilk, Reg.SulfurousAsh );
AddSpell( typeof( GateTravelScroll ), Reg.BlackPearl, Reg.MandrakeRoot, Reg.SulfurousAsh );
AddSpell( typeof( ManaVampireScroll ), Reg.BlackPearl, Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SpidersSilk );
AddSpell( typeof( MassDispelScroll ), Reg.BlackPearl, Reg.Garlic, Reg.MandrakeRoot, Reg.SulfurousAsh );
AddSpell( typeof( MeteorSwarmScroll ), Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SulfurousAsh, Reg.SpidersSilk );
AddSpell( typeof( PolymorphScroll ), Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SpidersSilk );
m_Circle = 7;
m_Mana = 50;
AddSpell( typeof( EarthquakeScroll ), Reg.Bloodmoss, Reg.MandrakeRoot, Reg.Ginseng, Reg.SulfurousAsh );
AddSpell( typeof( EnergyVortexScroll ), Reg.BlackPearl, Reg.Bloodmoss, Reg.MandrakeRoot, Reg.Nightshade );
AddSpell( typeof( ResurrectionScroll ), Reg.Bloodmoss, Reg.Garlic, Reg.Ginseng );
AddSpell( typeof( SummonAirElementalScroll ), Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SpidersSilk );
AddSpell( typeof( SummonDaemonScroll ), Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SpidersSilk, Reg.SulfurousAsh );
AddSpell( typeof( SummonEarthElementalScroll ), Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SpidersSilk );
AddSpell( typeof( SummonFireElementalScroll ), Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SpidersSilk, Reg.SulfurousAsh );
AddSpell( typeof( SummonWaterElementalScroll ), Reg.Bloodmoss, Reg.MandrakeRoot, Reg.SpidersSilk );
if ( Core.SE )
{
AddNecroSpell( 0, 23, 39.6, typeof( AnimateDeadScroll ), Reg.GraveDust, Reg.DaemonBlood );
AddNecroSpell( 1, 13, 19.6, typeof( BloodOathScroll ), Reg.DaemonBlood );
AddNecroSpell( 2, 11, 19.6, typeof( CorpseSkinScroll ), Reg.BatWing, Reg.GraveDust );
AddNecroSpell( 3, 7, 19.6, typeof( CurseWeaponScroll ), Reg.PigIron );
AddNecroSpell( 4, 11, 19.6, typeof( EvilOmenScroll ), Reg.BatWing, Reg.NoxCrystal );
AddNecroSpell( 5, 11, 39.6, typeof( HorrificBeastScroll ), Reg.BatWing, Reg.DaemonBlood );
AddNecroSpell( 6, 23, 69.6, typeof( LichFormScroll ), Reg.GraveDust, Reg.DaemonBlood, Reg.NoxCrystal );
AddNecroSpell( 7, 17, 29.6, typeof( MindRotScroll ), Reg.BatWing, Reg.DaemonBlood, Reg.PigIron );
AddNecroSpell( 8, 5, 19.6, typeof( PainSpikeScroll ), Reg.GraveDust, Reg.PigIron );
AddNecroSpell( 9, 17, 49.6, typeof( PoisonStrikeScroll ), Reg.NoxCrystal );
AddNecroSpell( 10, 29, 64.6, typeof( StrangleScroll ), Reg.DaemonBlood, Reg.NoxCrystal );
AddNecroSpell( 11, 17, 29.6, typeof( SummonFamiliarScroll ), Reg.BatWing, Reg.GraveDust, Reg.DaemonBlood );
AddNecroSpell( 12, 23, 98.6, typeof( VampiricEmbraceScroll ), Reg.BatWing, Reg.NoxCrystal, Reg.PigIron );
AddNecroSpell( 13, 41, 79.6, typeof( VengefulSpiritScroll ), Reg.BatWing, Reg.GraveDust, Reg.PigIron );
AddNecroSpell( 14, 23, 59.6, typeof( WitherScroll ), Reg.GraveDust, Reg.NoxCrystal, Reg.PigIron );
AddNecroSpell( 15, 17, 79.6, typeof( WraithFormScroll ), Reg.NoxCrystal, Reg.PigIron );
AddNecroSpell( 16, 40, 79.6, typeof( ExorcismScroll ), Reg.NoxCrystal, Reg.GraveDust );
}
int index;
if (Core.ML)
{
index = this.AddCraft(typeof(EnchantedSwitch), 1044294, 1072893, 45.0, 95.0, typeof(BlankScroll), 1044377, 1, 1044378);
this.AddRes(index, typeof(SpidersSilk), 1044360, 1, 1044253);
this.AddRes(index, typeof(BlackPearl), 1044353, 1, 1044253);
this.AddRes(index, typeof(SwitchItem), 1073464, 1, 1044253);
this.ForceNonExceptional(index);
this.SetNeededExpansion(index, Expansion.ML);
index = this.AddCraft(typeof(RunedPrism), 1044294, 1073465, 45.0, 95.0, typeof(BlankScroll), 1044377, 1, 1044378);
this.AddRes(index, typeof(SpidersSilk), 1044360, 1, 1044253);
|
[
" this.AddRes(index, typeof(BlackPearl), 1044353, 1, 1044253);"
] | 1,615
|
lcc
|
csharp
| null |
faf59e122b5cf0c3acab0447bcd6a53f208c972065c8414a
|
|
# -*- coding: utf-8 -*-
"""
flask.ctx
~~~~~~~~~
Implements the objects required to keep the context.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import sys
from functools import update_wrapper
from werkzeug.exceptions import HTTPException
from .globals import _request_ctx_stack, _app_ctx_stack
from .signals import appcontext_pushed, appcontext_popped
from ._compat import BROKEN_PYPY_CTXMGR_EXIT, reraise
class _AppCtxGlobals(object):
"""A plain object."""
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __contains__(self, item):
return item in self.__dict__
def __iter__(self):
return iter(self.__dict__)
def __repr__(self):
top = _app_ctx_stack.top
if top is not None:
return '<flask.g of %r>' % top.app.name
return object.__repr__(self)
def after_this_request(f):
"""Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one.
Example::
@app.route('/')
def index():
@after_this_request
def add_header(response):
response.headers['X-Foo'] = 'Parachute'
return response
return 'Hello World!'
This is more useful if a function other than the view function wants to
modify a response. For instance think of a decorator that wants to add
some headers without converting the return value into a response object.
.. versionadded:: 0.9
"""
_request_ctx_stack.top._after_request_functions.append(f)
return f
def copy_current_request_context(f):
"""A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called.
Example::
import gevent
from flask import copy_current_request_context
@app.route('/')
def index():
@copy_current_request_context
def do_some_work():
# do some work here, it can access flask.request like you
# would otherwise in the view function.
...
gevent.spawn(do_some_work)
return 'Regular response'
.. versionadded:: 0.10
"""
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('This decorator can only be used at local scopes '
'when a request context is on the stack. For instance within '
'view functions.')
reqctx = top.copy()
def wrapper(*args, **kwargs):
with reqctx:
return f(*args, **kwargs)
return update_wrapper(wrapper, f)
def has_request_context():
"""If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable.
::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and has_request_context():
remote_addr = request.remote_addr
self.remote_addr = remote_addr
Alternatively you can also just test any of the context bound objects
(such as :class:`request` or :class:`g` for truthness)::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and request:
remote_addr = request.remote_addr
self.remote_addr = remote_addr
.. versionadded:: 0.7
"""
return _request_ctx_stack.top is not None
def has_app_context():
"""Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead.
.. versionadded:: 0.9
"""
return _app_ctx_stack.top is not None
class AppContext(object):
"""The application context binds an application object implicitly
to the current thread or greenlet, similar to how the
:class:`RequestContext` binds request information. The application
context is also implicitly created if a request context is created
but the application is not on top of the individual application
context.
"""
def __init__(self, app):
self.app = app
self.url_adapter = app.create_url_adapter(None)
self.g = app.app_ctx_globals_class()
# Like request context, app contexts can be pushed multiple times
# but there a basic "refcount" is enough to track them.
self._refcnt = 0
def push(self):
"""Binds the app context to the current context."""
self._refcnt += 1
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
_app_ctx_stack.push(self)
appcontext_pushed.send(self.app)
def pop(self, exc=None):
"""Pops the app context."""
self._refcnt -= 1
if self._refcnt <= 0:
if exc is None:
exc = sys.exc_info()[1]
self.app.do_teardown_appcontext(exc)
rv = _app_ctx_stack.pop()
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \
% (rv, self)
appcontext_popped.send(self.app)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
self.pop(exc_value)
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None:
reraise(exc_type, exc_value, tb)
class RequestContext(object):
"""The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
Do not attempt to use this class directly, instead use
:meth:`~flask.Flask.test_request_context` and
:meth:`~flask.Flask.request_context` to create this object.
When the request context is popped, it will evaluate all the
functions registered on the application for teardown execution
(:meth:`~flask.Flask.teardown_request`).
The request context is automatically popped at the end of the request
for you. In debug mode the request context is kept around if
exceptions happen so that interactive debuggers have a chance to
introspect the data. With 0.4 this can also be forced for requests
that did not fail and outside of ``DEBUG`` mode. By setting
``'flask._preserve_context'`` to ``True`` on the WSGI environment the
context will not pop itself at the end of the request. This is used by
the :meth:`~flask.Flask.test_client` for example to implement the
deferred cleanup functionality.
You might find this helpful for unittests where you need the
information from the context local around for a little longer. Make
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in
that situation, otherwise your unittests will leak memory.
"""
def __init__(self, app, environ, request=None):
self.app = app
if request is None:
request = app.request_class(environ)
self.request = request
self.url_adapter = app.create_url_adapter(self.request)
self.flashes = None
self.session = None
# Request contexts can be pushed multiple times and interleaved with
# other request contexts. Now only if the last level is popped we
# get rid of them. Additionally if an application context is missing
# one is created implicitly so for each level we add this information
self._implicit_app_ctx_stack = []
# indicator if the context was preserved. Next time another context
# is pushed the preserved context is popped.
self.preserved = False
# remembers the exception for pop if there is one in case the context
# preservation kicks in.
self._preserved_exc = None
# Functions that should be executed after the request on the response
# object. These will be called before the regular "after_request"
# functions.
self._after_request_functions = []
self.match_request()
def _get_g(self):
return _app_ctx_stack.top.g
def _set_g(self, value):
_app_ctx_stack.top.g = value
g = property(_get_g, _set_g)
del _get_g, _set_g
def copy(self):
"""Creates a copy of this request context with the same request object.
This can be used to move a request context to a different greenlet.
Because the actual request object is the same this cannot be used to
move a request context to a different thread unless access to the
request object is locked.
.. versionadded:: 0.10
"""
return self.__class__(self.app,
environ=self.request.environ,
request=self.request
)
def match_request(self):
"""Can be overridden by a subclass to hook into the matching
of the request.
"""
try:
url_rule, self.request.view_args = \
self.url_adapter.match(return_rule=True)
self.request.url_rule = url_rule
except HTTPException as e:
self.request.routing_exception = e
def push(self):
"""Binds the request context to the current context."""
# If an exception occurs in debug mode or if context preservation is
# activated under exception situations exactly one context stays
# on the stack. The rationale is that you want to access that
# information under debug situations. However if someone forgets to
# pop that context again we want to make sure that on the next push
# it's invalidated, otherwise we run at risk that something leaks
# memory. This is usually only a problem in test suite since this
# functionality is not active in production environments.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop(top._preserved_exc)
# Before we push the request context we have to ensure that there
# is an application context.
app_ctx = _app_ctx_stack.top
if app_ctx is None or app_ctx.app != self.app:
app_ctx = self.app.app_context()
app_ctx.push()
self._implicit_app_ctx_stack.append(app_ctx)
else:
self._implicit_app_ctx_stack.append(None)
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
_request_ctx_stack.push(self)
# Open the session at the moment that the request context is
# available. This allows a custom open_session method to use the
# request context (e.g. code that access database information
# stored on `g` instead of the appcontext).
self.session = self.app.open_session(self.request)
if self.session is None:
self.session = self.app.make_null_session()
def pop(self, exc=None):
"""Pops the request context and unbinds it by doing that. This will
also trigger the execution of functions registered by the
:meth:`~flask.Flask.teardown_request` decorator.
.. versionchanged:: 0.9
Added the `exc` argument.
"""
|
[
" app_ctx = self._implicit_app_ctx_stack.pop()"
] | 1,358
|
lcc
|
python
| null |
5cba5adbc6ed3521940b6a136fc48a061a432b5260ca8c1f
|
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2011 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
from functools import wraps
# SQLAlchemy
from sqlalchemy import func, not_
from sqlalchemy.orm import aliased
from sqlalchemy.sql.expression import case
# Zato
from zato.common import DEFAULT_HTTP_PING_METHOD, DEFAULT_HTTP_POOL_SIZE, HTTP_SOAP_SERIALIZATION_TYPE, PARAMS_PRIORITY, \
URL_PARAMS_PRIORITY
from zato.common.odb.model import AWSS3, APIKeySecurity, AWSSecurity, CassandraConn, CassandraQuery, ChannelAMQP, \
ChannelSTOMP, ChannelWebSocket, ChannelWMQ, ChannelZMQ, Cluster, ConnDefAMQP, ConnDefWMQ, CronStyleJob, \
DeliveryDefinitionBase, Delivery, DeliveryHistory, DeliveryPayload, ElasticSearch, HTTPBasicAuth, HTTPSOAP, HTTSOAPAudit, \
IMAP, IntervalBasedJob, Job, JSONPointer, JWT, MsgNamespace, NotificationOpenStackSwift as NotifOSS, \
NotificationSQL as NotifSQL, NTLM, OAuth, OutgoingOdoo, OpenStackSecurity, OpenStackSwift, OutgoingAMQP, OutgoingFTP, \
OutgoingSTOMP, OutgoingWMQ, OutgoingZMQ, PubSubConsumer, PubSubProducer, PubSubTopic, RBACClientRole, RBACPermission, \
RBACRole, RBACRolePermission, SecurityBase, Server, Service, SMTP, Solr, SQLConnectionPool, TechnicalAccount, TLSCACert, \
TLSChannelSecurity, TLSKeyCertSecurity, WebSocketClient, WebSocketSubscription, WSSDefinition, VaultConnection, \
XPath, XPathSecurity
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
_no_page_limit = 2 ** 24 # ~16.7 million results, tops
# ################################################################################################################################
class _SearchResult(object):
def __init__(self, q, result, columns, total):
self.q = q
self.result = result
self.total = total
self.columns = columns
self.num_pages = 0
self.cur_page = 0
self.prev_page = 0
self.next_page = 0
self.has_prev_page = False
self.has_next_page = False
def __iter__(self):
return iter(self.result)
def __repr__(self):
# To avoice circular imports - this is OK because we very rarely repr(self) anyway
from zato.common.util import make_repr
return make_repr(self)
class _SearchWrapper(object):
""" Wraps results in pagination and/or filters out objects by their name or other attributes.
"""
def __init__(self, q, default_page_size=_no_page_limit, **config):
# Apply WHERE conditions
for filter_by in config.get('filter_by', []):
for criterion in config.get('query', []):
q = q.filter(filter_by.contains(criterion))
# Total number of results
total_q = q.statement.with_only_columns([func.count()]).order_by(None)
self.total = q.session.execute(total_q).scalar()
# Pagination
page_size = config.get('page_size', default_page_size)
cur_page = config.get('cur_page', 0)
slice_from = cur_page * page_size
slice_to = slice_from + page_size
self.q = q.slice(slice_from, slice_to)
# ################################################################################################################################
def query_wrapper(func):
""" A decorator for queries which works out whether a given query function should return the result only
or a column list retrieved in addition to the result. This is useful because some callers prefer the former
and some need the latter. Also, paginages the results if requested to by the caller.
"""
@wraps(func)
def inner(*args, **kwargs):
# needs_columns is always the last argument
# so we don't have to look it up using the 'inspect' module or anything like that.
needs_columns = args[-1]
tool = _SearchWrapper(func(*args), **kwargs)
result = _SearchResult(tool.q, tool.q.all(), tool.q.statement.columns, tool.total)
if needs_columns:
return result, result.columns
return result
return inner
# ################################################################################################################################
def internal_channel_list(session, cluster_id):
""" All the HTTP/SOAP channels that point to internal services.
"""
return session.query(
HTTPSOAP.soap_action, Service.name).\
filter(HTTPSOAP.cluster_id==Cluster.id).\
filter(HTTPSOAP.service_id==Service.id).filter(Service.is_internal==True).filter(Cluster.id==cluster_id).filter(Cluster.id==HTTPSOAP.cluster_id) # noqa
# ################################################################################################################################
def _job(session, cluster_id):
return session.query(
Job.id, Job.name, Job.is_active,
Job.job_type, Job.start_date, Job.extra,
Service.name.label('service_name'), Service.impl_name.label('service_impl_name'),
Service.id.label('service_id'),
IntervalBasedJob.weeks, IntervalBasedJob.days,
IntervalBasedJob.hours, IntervalBasedJob.minutes,
IntervalBasedJob.seconds, IntervalBasedJob.repeats,
CronStyleJob.cron_definition).\
outerjoin(IntervalBasedJob, Job.id==IntervalBasedJob.job_id).\
outerjoin(CronStyleJob, Job.id==CronStyleJob.job_id).\
filter(Job.cluster_id==Cluster.id).\
filter(Job.service_id==Service.id).\
filter(Cluster.id==cluster_id).\
order_by('job.name')
@query_wrapper
def job_list(session, cluster_id, needs_columns=False):
""" All the scheduler's jobs defined in the ODB.
"""
return _job(session, cluster_id)
def job_by_name(session, cluster_id, name):
""" A scheduler's job fetched by its name.
"""
return _job(session, cluster_id).\
filter(Job.name==name).\
one()
# ################################################################################################################################
@query_wrapper
def apikey_security_list(session, cluster_id, needs_columns=False):
""" All the API keys.
"""
return session.query(
APIKeySecurity.id, APIKeySecurity.name,
APIKeySecurity.is_active,
APIKeySecurity.username,
APIKeySecurity.password, APIKeySecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==APIKeySecurity.cluster_id).\
filter(SecurityBase.id==APIKeySecurity.id).\
order_by('sec_base.name')
@query_wrapper
def aws_security_list(session, cluster_id, needs_columns=False):
""" All the Amazon security definitions.
"""
return session.query(
AWSSecurity.id, AWSSecurity.name,
AWSSecurity.is_active,
AWSSecurity.username,
AWSSecurity.password, AWSSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==AWSSecurity.cluster_id).\
filter(SecurityBase.id==AWSSecurity.id).\
order_by('sec_base.name')
@query_wrapper
def basic_auth_list(session, cluster_id, cluster_name, needs_columns=False):
""" All the HTTP Basic Auth definitions.
"""
q = session.query(
HTTPBasicAuth.id, HTTPBasicAuth.name,
HTTPBasicAuth.is_active,
HTTPBasicAuth.username, HTTPBasicAuth.realm,
HTTPBasicAuth.password, HTTPBasicAuth.sec_type,
HTTPBasicAuth.password_type,
Cluster.id.label('cluster_id'), Cluster.name.label('cluster_name')).\
filter(Cluster.id==HTTPBasicAuth.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.filter(SecurityBase.id==HTTPBasicAuth.id).\
order_by('sec_base.name')
return q
def _jwt(session, cluster_id, cluster_name, needs_columns=False):
""" All the JWT definitions.
"""
q = session.query(
JWT.id, JWT.name, JWT.is_active, JWT.username, JWT.password,
JWT.ttl, JWT.sec_type, JWT.password_type,
Cluster.id.label('cluster_id'),
Cluster.name.label('cluster_name')).\
filter(Cluster.id==JWT.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.filter(SecurityBase.id==JWT.id).\
order_by('sec_base.name')
return q
@query_wrapper
def jwt_list(*args, **kwargs):
return _jwt(*args, **kwargs)
def jwt_by_username(session, cluster_id, username, needs_columns=False):
""" An individual JWT definition by its username.
"""
return _jwt(session, cluster_id, None, needs_columns).\
filter(JWT.username==username).\
one()
@query_wrapper
def ntlm_list(session, cluster_id, needs_columns=False):
""" All the NTLM definitions.
"""
return session.query(
NTLM.id, NTLM.name,
NTLM.is_active,
NTLM.username,
NTLM.password, NTLM.sec_type,
NTLM.password_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==NTLM.cluster_id).\
filter(SecurityBase.id==NTLM.id).\
order_by('sec_base.name')
@query_wrapper
def oauth_list(session, cluster_id, needs_columns=False):
""" All the OAuth definitions.
"""
return session.query(
OAuth.id, OAuth.name,
OAuth.is_active,
OAuth.username, OAuth.password,
OAuth.proto_version, OAuth.sig_method,
OAuth.max_nonce_log, OAuth.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OAuth.cluster_id).\
filter(SecurityBase.id==OAuth.id).\
order_by('sec_base.name')
@query_wrapper
def openstack_security_list(session, cluster_id, needs_columns=False):
""" All the OpenStackSecurity definitions.
"""
return session.query(
OpenStackSecurity.id, OpenStackSecurity.name, OpenStackSecurity.is_active,
OpenStackSecurity.username, OpenStackSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OpenStackSecurity.cluster_id).\
filter(SecurityBase.id==OpenStackSecurity.id).\
order_by('sec_base.name')
@query_wrapper
def tech_acc_list(session, cluster_id, needs_columns=False):
""" All the technical accounts.
"""
return session.query(
TechnicalAccount.id, TechnicalAccount.name,
TechnicalAccount.is_active,
TechnicalAccount.password, TechnicalAccount.salt,
TechnicalAccount.sec_type, TechnicalAccount.password_type).\
order_by(TechnicalAccount.name).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TechnicalAccount.cluster_id).\
filter(SecurityBase.id==TechnicalAccount.id).\
order_by('sec_base.name')
@query_wrapper
def tls_ca_cert_list(session, cluster_id, needs_columns=False):
""" TLS CA certs.
"""
return session.query(TLSCACert).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSCACert.cluster_id).\
order_by('sec_tls_ca_cert.name')
@query_wrapper
def tls_channel_sec_list(session, cluster_id, needs_columns=False):
""" TLS-based channel security.
"""
return session.query(
TLSChannelSecurity.id, TLSChannelSecurity.name,
TLSChannelSecurity.is_active, TLSChannelSecurity.value,
TLSChannelSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSChannelSecurity.cluster_id).\
filter(SecurityBase.id==TLSChannelSecurity.id).\
order_by('sec_base.name')
@query_wrapper
def tls_key_cert_list(session, cluster_id, needs_columns=False):
""" TLS key/cert pairs.
"""
return session.query(
TLSKeyCertSecurity.id, TLSKeyCertSecurity.name,
TLSKeyCertSecurity.is_active, TLSKeyCertSecurity.info,
TLSKeyCertSecurity.value, TLSKeyCertSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSKeyCertSecurity.cluster_id).\
filter(SecurityBase.id==TLSKeyCertSecurity.id).\
order_by('sec_base.name')
@query_wrapper
def wss_list(session, cluster_id, needs_columns=False):
""" All the WS-Security definitions.
"""
return session.query(
WSSDefinition.id, WSSDefinition.name, WSSDefinition.is_active,
WSSDefinition.username, WSSDefinition.password, WSSDefinition.password_type,
WSSDefinition.reject_empty_nonce_creat, WSSDefinition.reject_stale_tokens,
WSSDefinition.reject_expiry_limit, WSSDefinition.nonce_freshness_time,
WSSDefinition.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==WSSDefinition.cluster_id).\
filter(SecurityBase.id==WSSDefinition.id).\
order_by('sec_base.name')
@query_wrapper
def xpath_sec_list(session, cluster_id, needs_columns=False):
""" All the XPath security definitions.
"""
return session.query(
XPathSecurity.id, XPathSecurity.name, XPathSecurity.is_active, XPathSecurity.username, XPathSecurity.username_expr,
XPathSecurity.password_expr, XPathSecurity.password, XPathSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==XPathSecurity.cluster_id).\
filter(SecurityBase.id==XPathSecurity.id).\
order_by('sec_base.name')
# ################################################################################################################################
def _def_amqp(session, cluster_id):
return session.query(
ConnDefAMQP.name, ConnDefAMQP.id, ConnDefAMQP.host,
ConnDefAMQP.port, ConnDefAMQP.vhost, ConnDefAMQP.username,
ConnDefAMQP.frame_max, ConnDefAMQP.heartbeat, ConnDefAMQP.password).\
filter(ConnDefAMQP.def_type=='amqp').\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ConnDefAMQP.name)
def def_amqp(session, cluster_id, id):
""" A particular AMQP definition
"""
return _def_amqp(session, cluster_id).\
filter(ConnDefAMQP.id==id).\
one()
@query_wrapper
def def_amqp_list(session, cluster_id, needs_columns=False):
""" AMQP connection definitions.
"""
return _def_amqp(session, cluster_id)
# ################################################################################################################################
def _def_jms_wmq(session, cluster_id):
return session.query(
ConnDefWMQ.id, ConnDefWMQ.name, ConnDefWMQ.host,
ConnDefWMQ.port, ConnDefWMQ.queue_manager, ConnDefWMQ.channel,
ConnDefWMQ.cache_open_send_queues, ConnDefWMQ.cache_open_receive_queues,
ConnDefWMQ.use_shared_connections, ConnDefWMQ.ssl, ConnDefWMQ.ssl_cipher_spec,
ConnDefWMQ.ssl_key_repository, ConnDefWMQ.needs_mcd, ConnDefWMQ.max_chars_printed).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ConnDefWMQ.name)
def def_jms_wmq(session, cluster_id, id):
""" A particular JMS WebSphere MQ definition
"""
return _def_jms_wmq(session, cluster_id).\
filter(ConnDefWMQ.id==id).\
one()
@query_wrapper
def def_jms_wmq_list(session, cluster_id, needs_columns=False):
""" JMS WebSphere MQ connection definitions.
"""
return _def_jms_wmq(session, cluster_id)
# ################################################################################################################################
def _out_amqp(session, cluster_id):
return session.query(
OutgoingAMQP.id, OutgoingAMQP.name, OutgoingAMQP.is_active,
OutgoingAMQP.delivery_mode, OutgoingAMQP.priority, OutgoingAMQP.content_type,
OutgoingAMQP.content_encoding, OutgoingAMQP.expiration, OutgoingAMQP.user_id,
OutgoingAMQP.app_id, ConnDefAMQP.name.label('def_name'), OutgoingAMQP.def_id).\
filter(OutgoingAMQP.def_id==ConnDefAMQP.id).\
filter(ConnDefAMQP.id==OutgoingAMQP.def_id).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingAMQP.name)
def out_amqp(session, cluster_id, id):
""" An outgoing AMQP connection.
"""
return _out_amqp(session, cluster_id).\
filter(OutgoingAMQP.id==id).\
one()
@query_wrapper
def out_amqp_list(session, cluster_id, needs_columns=False):
""" Outgoing AMQP connections.
"""
return _out_amqp(session, cluster_id)
# ################################################################################################################################
def _out_jms_wmq(session, cluster_id):
return session.query(
OutgoingWMQ.id, OutgoingWMQ.name, OutgoingWMQ.is_active,
OutgoingWMQ.delivery_mode, OutgoingWMQ.priority, OutgoingWMQ.expiration,
ConnDefWMQ.name.label('def_name'), OutgoingWMQ.def_id).\
filter(OutgoingWMQ.def_id==ConnDefWMQ.id).\
filter(ConnDefWMQ.id==OutgoingWMQ.def_id).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingWMQ.name)
def out_jms_wmq(session, cluster_id, id):
""" An outgoing JMS WebSphere MQ connection (by ID).
"""
return _out_jms_wmq(session, cluster_id).\
filter(OutgoingWMQ.id==id).\
one()
def out_jms_wmq_by_name(session, cluster_id, name):
""" An outgoing JMS WebSphere MQ connection (by name).
"""
return _out_jms_wmq(session, cluster_id).\
filter(OutgoingWMQ.name==name).\
first()
@query_wrapper
def out_jms_wmq_list(session, cluster_id, needs_columns=False):
""" Outgoing JMS WebSphere MQ connections.
"""
return _out_jms_wmq(session, cluster_id)
# ################################################################################################################################
def _channel_amqp(session, cluster_id):
return session.query(
ChannelAMQP.id, ChannelAMQP.name, ChannelAMQP.is_active,
ChannelAMQP.queue, ChannelAMQP.consumer_tag_prefix,
ConnDefAMQP.name.label('def_name'), ChannelAMQP.def_id,
ChannelAMQP.data_format,
Service.name.label('service_name'),
Service.impl_name.label('service_impl_name')).\
filter(ChannelAMQP.def_id==ConnDefAMQP.id).\
filter(ChannelAMQP.service_id==Service.id).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelAMQP.name)
def channel_amqp(session, cluster_id, id):
""" A particular AMQP channel.
"""
return _channel_amqp(session, cluster_id).\
filter(ChannelAMQP.id==id).\
one()
@query_wrapper
def channel_amqp_list(session, cluster_id, needs_columns=False):
""" AMQP channels.
"""
return _channel_amqp(session, cluster_id)
# ################################################################################################################################
def _channel_stomp(session, cluster_id):
return session.query(
ChannelSTOMP.id, ChannelSTOMP.name, ChannelSTOMP.is_active, ChannelSTOMP.username,
ChannelSTOMP.password, ChannelSTOMP.address, ChannelSTOMP.proto_version,
ChannelSTOMP.timeout, ChannelSTOMP.sub_to, ChannelSTOMP.service_id,
Service.name.label('service_name')).\
filter(Service.id==ChannelSTOMP.service_id).\
filter(Cluster.id==ChannelSTOMP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelSTOMP.name)
def channel_stomp(session, cluster_id, id):
""" A STOMP channel.
"""
return _channel_stomp(session, cluster_id).\
filter(ChannelSTOMP.id==id).\
one()
@query_wrapper
def channel_stomp_list(session, cluster_id, needs_columns=False):
""" A list of STOMP channels.
"""
return _channel_stomp(session, cluster_id)
# ################################################################################################################################
def _channel_jms_wmq(session, cluster_id):
return session.query(
ChannelWMQ.id, ChannelWMQ.name, ChannelWMQ.is_active,
ChannelWMQ.queue, ConnDefWMQ.name.label('def_name'), ChannelWMQ.def_id,
ChannelWMQ.data_format, Service.name.label('service_name'),
Service.impl_name.label('service_impl_name')).\
filter(ChannelWMQ.def_id==ConnDefWMQ.id).\
filter(ChannelWMQ.service_id==Service.id).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelWMQ.name)
def channel_jms_wmq(session, cluster_id, id):
""" A particular JMS WebSphere MQ channel.
"""
return _channel_jms_wmq(session, cluster_id).\
filter(ChannelWMQ.id==id).\
one()
@query_wrapper
def channel_jms_wmq_list(session, cluster_id, needs_columns=False):
""" JMS WebSphere MQ channels.
"""
return _channel_jms_wmq(session, cluster_id)
# ################################################################################################################################
def _out_stomp(session, cluster_id):
return session.query(OutgoingSTOMP).\
filter(Cluster.id==OutgoingSTOMP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingSTOMP.name)
def out_stomp(session, cluster_id, id):
""" An outgoing STOMP connection.
"""
return _out_zmq(session, cluster_id).\
filter(OutgoingSTOMP.id==id).\
one()
@query_wrapper
def out_stomp_list(session, cluster_id, needs_columns=False):
""" Outgoing STOMP connections.
"""
return _out_stomp(session, cluster_id)
# ################################################################################################################################
def _out_zmq(session, cluster_id):
return session.query(
OutgoingZMQ.id, OutgoingZMQ.name, OutgoingZMQ.is_active,
OutgoingZMQ.address, OutgoingZMQ.socket_type).\
filter(Cluster.id==OutgoingZMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingZMQ.name)
def out_zmq(session, cluster_id, id):
""" An outgoing ZeroMQ connection.
"""
return _out_zmq(session, cluster_id).\
filter(OutgoingZMQ.id==id).\
one()
@query_wrapper
def out_zmq_list(session, cluster_id, needs_columns=False):
""" Outgoing ZeroMQ connections.
"""
return _out_zmq(session, cluster_id)
# ################################################################################################################################
def _channel_zmq(session, cluster_id):
return session.query(
ChannelZMQ.id, ChannelZMQ.name, ChannelZMQ.is_active,
ChannelZMQ.address, ChannelZMQ.socket_type, ChannelZMQ.socket_method, ChannelZMQ.sub_key,
ChannelZMQ.pool_strategy, ChannelZMQ.service_source, ChannelZMQ.data_format,
Service.name.label('service_name'), Service.impl_name.label('service_impl_name')).\
filter(Service.id==ChannelZMQ.service_id).\
filter(Cluster.id==ChannelZMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelZMQ.name)
def channel_zmq(session, cluster_id, id):
""" An incoming ZeroMQ connection.
"""
return _channel_zmq(session, cluster_id).\
filter(ChannelZMQ.id==id).\
one()
@query_wrapper
def channel_zmq_list(session, cluster_id, needs_columns=False):
""" Incoming ZeroMQ connections.
"""
return _channel_zmq(session, cluster_id)
# ################################################################################################################################
def _http_soap(session, cluster_id):
return session.query(
HTTPSOAP.id, HTTPSOAP.name, HTTPSOAP.is_active,
HTTPSOAP.is_internal, HTTPSOAP.transport, HTTPSOAP.host,
HTTPSOAP.url_path, HTTPSOAP.method, HTTPSOAP.soap_action,
HTTPSOAP.soap_version, HTTPSOAP.data_format, HTTPSOAP.security_id,
HTTPSOAP.has_rbac,
HTTPSOAP.connection, HTTPSOAP.content_type,
case([(HTTPSOAP.ping_method != None, HTTPSOAP.ping_method)], else_=DEFAULT_HTTP_PING_METHOD).label('ping_method'), # noqa
case([(HTTPSOAP.pool_size != None, HTTPSOAP.pool_size)], else_=DEFAULT_HTTP_POOL_SIZE).label('pool_size'),
case([(HTTPSOAP.merge_url_params_req != None, HTTPSOAP.merge_url_params_req)], else_=True).label('merge_url_params_req'),
case([(HTTPSOAP.url_params_pri != None, HTTPSOAP.url_params_pri)], else_=URL_PARAMS_PRIORITY.DEFAULT).label('url_params_pri'),
case([(HTTPSOAP.params_pri != None, HTTPSOAP.params_pri)], else_=PARAMS_PRIORITY.DEFAULT).label('params_pri'),
case([(
HTTPSOAP.serialization_type != None, HTTPSOAP.serialization_type)],
else_=HTTP_SOAP_SERIALIZATION_TYPE.DEFAULT.id).label('serialization_type'),
HTTPSOAP.audit_enabled,
HTTPSOAP.audit_back_log,
HTTPSOAP.audit_max_payload,
HTTPSOAP.audit_repl_patt_type,
HTTPSOAP.timeout,
HTTPSOAP.sec_tls_ca_cert_id,
HTTPSOAP.sec_use_rbac,
TLSCACert.name.label('sec_tls_ca_cert_name'),
SecurityBase.sec_type,
Service.name.label('service_name'),
Service.id.label('service_id'),
Service.impl_name.label('service_impl_name'),
SecurityBase.name.label('security_name'),
SecurityBase.username.label('username'),
SecurityBase.password.label('password'),
SecurityBase.password_type.label('password_type'),).\
outerjoin(Service, Service.id==HTTPSOAP.service_id).\
outerjoin(TLSCACert, TLSCACert.id==HTTPSOAP.sec_tls_ca_cert_id).\
outerjoin(SecurityBase, HTTPSOAP.security_id==SecurityBase.id).\
filter(Cluster.id==HTTPSOAP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(HTTPSOAP.name)
def http_soap_security_list(session, cluster_id, connection=None):
""" HTTP/SOAP security definitions.
"""
q = _http_soap(session, cluster_id)
if connection:
q = q.filter(HTTPSOAP.connection==connection)
return q
def http_soap(session, cluster_id, id):
""" An HTTP/SOAP connection.
"""
return _http_soap(session, cluster_id).\
filter(HTTPSOAP.id==id).\
one()
@query_wrapper
def http_soap_list(session, cluster_id, connection=None, transport=None, return_internal=True, needs_columns=False, **kwargs):
""" HTTP/SOAP connections, both channels and outgoing ones.
"""
q = _http_soap(session, cluster_id)
if connection:
q = q.filter(HTTPSOAP.connection==connection)
if transport:
q = q.filter(HTTPSOAP.transport==transport)
if not return_internal:
q = q.filter(not_(HTTPSOAP.name.startswith('zato')))
return q
# ################################################################################################################################
def _out_sql(session, cluster_id):
return session.query(SQLConnectionPool).\
filter(Cluster.id==SQLConnectionPool.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(SQLConnectionPool.name)
def out_sql(session, cluster_id, id):
""" An outgoing SQL connection.
"""
return _out_sql(session, cluster_id).\
filter(SQLConnectionPool.id==id).\
one()
@query_wrapper
def out_sql_list(session, cluster_id, needs_columns=False):
""" Outgoing SQL connections.
"""
return _out_sql(session, cluster_id)
# ################################################################################################################################
def _out_ftp(session, cluster_id):
return session.query(
OutgoingFTP.id, OutgoingFTP.name, OutgoingFTP.is_active,
OutgoingFTP.host, OutgoingFTP.port, OutgoingFTP.user, OutgoingFTP.password,
OutgoingFTP.acct, OutgoingFTP.timeout, OutgoingFTP.dircache).\
filter(Cluster.id==OutgoingFTP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingFTP.name)
def out_ftp(session, cluster_id, id):
""" An outgoing FTP connection.
"""
return _out_ftp(session, cluster_id).\
filter(OutgoingFTP.id==id).\
one()
@query_wrapper
def out_ftp_list(session, cluster_id, needs_columns=False):
""" Outgoing FTP connections.
"""
return _out_ftp(session, cluster_id)
# ################################################################################################################################
def _service(session, cluster_id):
return session.query(
Service.id, Service.name, Service.is_active,
Service.impl_name, Service.is_internal, Service.slow_threshold).\
filter(Cluster.id==Service.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(Service.name)
def service(session, cluster_id, id):
""" A service.
"""
return _service(session, cluster_id).\
filter(Service.id==id).\
one()
@query_wrapper
def service_list(session, cluster_id, return_internal=True, needs_columns=False):
""" All services.
"""
result = _service(session, cluster_id)
if not return_internal:
result = result.filter(not_(Service.name.startswith('zato')))
return result
# ################################################################################################################################
def _delivery_definition(session, cluster_id):
return session.query(DeliveryDefinitionBase).\
filter(Cluster.id==DeliveryDefinitionBase.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(DeliveryDefinitionBase.name)
def delivery_definition_list(session, cluster_id, target_type=None):
""" Returns a list of delivery definitions for a given target type.
"""
def_list = _delivery_definition(session, cluster_id)
if target_type:
def_list = def_list.\
filter(DeliveryDefinitionBase.target_type==target_type)
return def_list
# ################################################################################################################################
def delivery_count_by_state(session, def_id):
return session.query(Delivery.state, func.count(Delivery.state)).\
filter(Delivery.definition_id==def_id).\
group_by(Delivery.state)
def delivery_list(session, cluster_id, def_name, state, start=None, stop=None, needs_payload=False):
columns = [
DeliveryDefinitionBase.name.label('def_name'),
DeliveryDefinitionBase.target_type,
Delivery.task_id,
Delivery.creation_time.label('creation_time_utc'),
Delivery.last_used.label('last_used_utc'),
Delivery.source_count,
Delivery.target_count,
Delivery.resubmit_count,
Delivery.state,
DeliveryDefinitionBase.retry_repeats,
DeliveryDefinitionBase.check_after,
DeliveryDefinitionBase.retry_seconds
]
if needs_payload:
columns.extend([DeliveryPayload.payload, Delivery.args, Delivery.kwargs])
q = session.query(*columns).\
filter(DeliveryDefinitionBase.id==Delivery.definition_id).\
filter(DeliveryDefinitionBase.cluster_id==cluster_id).\
filter(DeliveryDefinitionBase.name==def_name).\
filter(Delivery.state.in_(state))
if needs_payload:
q = q.filter(DeliveryPayload.task_id==Delivery.task_id)
if start:
q = q.filter(Delivery.last_used >= start)
if stop:
q = q.filter(Delivery.last_used <= stop)
q = q.order_by(Delivery.last_used.desc())
return q
def delivery(session, task_id, target_def_class):
return session.query(
target_def_class.name.label('def_name'),
target_def_class.target_type,
Delivery.task_id,
Delivery.creation_time.label('creation_time_utc'),
Delivery.last_used.label('last_used_utc'),
Delivery.source_count,
Delivery.target_count,
Delivery.resubmit_count,
Delivery.state,
target_def_class.retry_repeats,
target_def_class.check_after,
target_def_class.retry_seconds,
DeliveryPayload.payload,
Delivery.args,
Delivery.kwargs,
target_def_class.target,
).\
filter(target_def_class.id==Delivery.definition_id).\
filter(Delivery.task_id==task_id).\
filter(DeliveryPayload.task_id==Delivery.task_id)
@query_wrapper
def delivery_history_list(session, task_id, needs_columns=True):
return session.query(
DeliveryHistory.entry_type,
DeliveryHistory.entry_time,
DeliveryHistory.entry_ctx,
DeliveryHistory.resubmit_count).\
filter(DeliveryHistory.task_id==task_id).\
order_by(DeliveryHistory.entry_time.desc())
# ################################################################################################################################
def _msg_list(class_, order_by, session, cluster_id, needs_columns=False):
""" All the namespaces.
"""
return session.query(
class_.id, class_.name,
class_.value).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==class_.cluster_id).\
order_by(order_by)
@query_wrapper
def namespace_list(session, cluster_id, needs_columns=False):
""" All the namespaces.
"""
return _msg_list(MsgNamespace, 'msg_ns.name', session, cluster_id, query_wrapper)
@query_wrapper
def xpath_list(session, cluster_id, needs_columns=False):
""" All the XPaths.
"""
return _msg_list(XPath, 'msg_xpath.name', session, cluster_id, query_wrapper)
@query_wrapper
def json_pointer_list(session, cluster_id, needs_columns=False):
""" All the JSON Pointers.
"""
return _msg_list(JSONPointer, 'msg_json_pointer.name', session, cluster_id, query_wrapper)
# ################################################################################################################################
def _http_soap_audit(session, cluster_id, conn_id=None, start=None, stop=None, query=None, id=None, needs_req_payload=False):
columns = [
HTTSOAPAudit.id,
HTTSOAPAudit.name.label('conn_name'),
HTTSOAPAudit.cid,
HTTSOAPAudit.transport,
HTTSOAPAudit.connection,
HTTSOAPAudit.req_time.label('req_time_utc'),
HTTSOAPAudit.resp_time.label('resp_time_utc'),
HTTSOAPAudit.user_token,
HTTSOAPAudit.invoke_ok,
HTTSOAPAudit.auth_ok,
HTTSOAPAudit.remote_addr,
]
if needs_req_payload:
columns.extend([
HTTSOAPAudit.req_headers, HTTSOAPAudit.req_payload, HTTSOAPAudit.resp_headers, HTTSOAPAudit.resp_payload
])
q = session.query(*columns)
if query:
query = '%{}%'.format(query)
q = q.filter(
HTTSOAPAudit.cid.ilike(query) |
HTTSOAPAudit.req_headers.ilike(query) | HTTSOAPAudit.req_payload.ilike(query) |
HTTSOAPAudit.resp_headers.ilike(query) | HTTSOAPAudit.resp_payload.ilike(query)
)
if id:
q = q.filter(HTTSOAPAudit.id == id)
if conn_id:
q = q.filter(HTTSOAPAudit.conn_id == conn_id)
if start:
q = q.filter(HTTSOAPAudit.req_time >= start)
if stop:
q = q.filter(HTTSOAPAudit.req_time <= start)
q = q.order_by(HTTSOAPAudit.req_time.desc())
return q
@query_wrapper
def http_soap_audit_item_list(session, cluster_id, conn_id, start, stop, query, needs_req_payload, needs_columns=False):
return _http_soap_audit(session, cluster_id, conn_id, start, stop, query)
@query_wrapper
def http_soap_audit_item(session, cluster_id, id, needs_columns=False):
return _http_soap_audit(session, cluster_id, id=id, needs_req_payload=True)
# ################################################################################################################################
def _cloud_openstack_swift(session, cluster_id):
return session.query(OpenStackSwift).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OpenStackSwift.cluster_id).\
order_by(OpenStackSwift.name)
def cloud_openstack_swift(session, cluster_id, id):
""" An OpenStack Swift connection.
"""
return _cloud_openstack_swift(session, cluster_id).\
filter(OpenStackSwift.id==id).\
one()
@query_wrapper
def cloud_openstack_swift_list(session, cluster_id, needs_columns=False):
""" OpenStack Swift connections.
"""
return _cloud_openstack_swift(session, cluster_id)
# ################################################################################################################################
def _cloud_aws_s3(session, cluster_id):
return session.query(
AWSS3.id, AWSS3.name, AWSS3.is_active, AWSS3.pool_size, AWSS3.address, AWSS3.debug_level, AWSS3.suppr_cons_slashes,
AWSS3.content_type, AWSS3.metadata_, AWSS3.security_id, AWSS3.bucket, AWSS3.encrypt_at_rest, AWSS3.storage_class,
SecurityBase.username, SecurityBase.password).\
filter(Cluster.id==cluster_id).\
filter(AWSS3.security_id==SecurityBase.id).\
order_by(AWSS3.name)
def cloud_aws_s3(session, cluster_id, id):
""" An AWS S3 connection.
"""
return _cloud_aws_s3(session, cluster_id).\
filter(AWSS3.id==id).\
one()
@query_wrapper
def cloud_aws_s3_list(session, cluster_id, needs_columns=False):
""" AWS S3 connections.
"""
return _cloud_aws_s3(session, cluster_id)
# ################################################################################################################################
def _pubsub_topic(session, cluster_id):
return session.query(PubSubTopic.id, PubSubTopic.name, PubSubTopic.is_active, PubSubTopic.max_depth).\
filter(Cluster.id==PubSubTopic.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(PubSubTopic.name)
def pubsub_topic(session, cluster_id, id):
""" A pub/sub topic.
"""
return _pubsub_topic(session, cluster_id).\
filter(PubSubTopic.id==id).\
one()
@query_wrapper
def pubsub_topic_list(session, cluster_id, needs_columns=False):
""" All pub/sub topics.
"""
return _pubsub_topic(session, cluster_id)
def pubsub_default_client(session, cluster_id, name):
""" Returns a client ID of a given name used internally for pub/sub.
"""
return session.query(HTTPBasicAuth.id, HTTPBasicAuth.name).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==HTTPBasicAuth.cluster_id).\
filter(HTTPBasicAuth.name==name).\
first()
# ################################################################################################################################
def _pubsub_producer(session, cluster_id, needs_columns=False):
return session.query(
PubSubProducer.id,
PubSubProducer.is_active,
SecurityBase.id.label('client_id'),
SecurityBase.name,
SecurityBase.sec_type,
PubSubTopic.name.label('topic_name')).\
filter(Cluster.id==cluster_id).\
filter(PubSubProducer.topic_id==PubSubTopic.id).\
filter(PubSubProducer.cluster_id==Cluster.id).\
filter(PubSubProducer.sec_def_id==SecurityBase.id).\
order_by(SecurityBase.sec_type, SecurityBase.name)
@query_wrapper
def pubsub_producer_list(session, cluster_id, topic_name, needs_columns=False):
""" All pub/sub producers.
"""
response = _pubsub_producer(session, cluster_id, query_wrapper)
if topic_name:
response = response.filter(PubSubTopic.name==topic_name)
return response
# ################################################################################################################################
def _pubsub_consumer(session, cluster_id, needs_columns=False):
return session.query(
PubSubConsumer.id,
PubSubConsumer.is_active,
PubSubConsumer.max_depth,
PubSubConsumer.sub_key,
PubSubConsumer.delivery_mode,
PubSubConsumer.callback_id,
PubSubConsumer.callback_type,
HTTPSOAP.name.label('callback_name'),
HTTPSOAP.soap_version,
SecurityBase.id.label('client_id'),
SecurityBase.name,
SecurityBase.sec_type,
PubSubTopic.name.label('topic_name')).\
outerjoin(HTTPSOAP, HTTPSOAP.id==PubSubConsumer.callback_id).\
filter(Cluster.id==cluster_id).\
filter(PubSubConsumer.topic_id==PubSubTopic.id).\
filter(PubSubConsumer.cluster_id==Cluster.id).\
filter(PubSubConsumer.sec_def_id==SecurityBase.id).\
order_by(SecurityBase.sec_type, SecurityBase.name)
@query_wrapper
def pubsub_consumer_list(session, cluster_id, topic_name, needs_columns=False):
""" All pub/sub consumers.
"""
|
[
" response = _pubsub_consumer(session, cluster_id, query_wrapper)"
] | 2,251
|
lcc
|
python
| null |
9621a523cfd2361541d82c5585397ca2326e223496d5585c
|
|
/*
* AMW - Automated Middleware allows you to manage the configurations of
* your Java EE applications on an unlimited number of different environments
* with various versions, including the automated deployment of those apps.
* Copyright (C) 2013-2016 by Puzzle ITC
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ch.puzzle.itc.mobiliar.business.generator.control.extracted;
import ch.puzzle.itc.mobiliar.business.releasing.boundary.ReleaseLocator;
import ch.puzzle.itc.mobiliar.business.releasing.entity.ReleaseEntity;
import ch.puzzle.itc.mobiliar.business.resourcegroup.boundary.ResourceGroupLocator;
import ch.puzzle.itc.mobiliar.business.resourcegroup.control.ResourceReleaseComparator;
import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceEntity;
import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceGroupEntity;
import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.ConsumedResourceRelationEntity;
import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.ProvidedResourceRelationEntity;
import ch.puzzle.itc.mobiliar.common.util.DefaultResourceTypeDefinition;
import javax.ejb.Stateless;
import javax.inject.Inject;
import javax.validation.constraints.NotNull;
import java.util.*;
/**
* This service contains the logic of
*
*/
@Stateless
public class ResourceDependencyResolverService {
@Inject
ResourceReleaseComparator resourceReleaseComparator;
@Inject
ReleaseLocator releaseLocator;
@Inject
ResourceGroupLocator resourceGroupLocator;
static class ReleaseComparator implements Comparator<ReleaseEntity> {
@Override
public int compare(ReleaseEntity arg0, ReleaseEntity arg1) {
if (arg0 == null || arg0.getInstallationInProductionAt() == null) {
return arg1 == null || arg1.getInstallationInProductionAt() == null ? 0 : -1;
}
return arg1 == null || arg1.getInstallationInProductionAt() == null ? 1 : arg0.getInstallationInProductionAt().compareTo(arg1.getInstallationInProductionAt());
}
}
public Set<ConsumedResourceRelationEntity> getConsumedMasterRelationsForRelease(ResourceEntity resource, ReleaseEntity release) {
Set<ConsumedResourceRelationEntity> relations = resource.getConsumedMasterRelations();
Set<ConsumedResourceRelationEntity> result = new HashSet<ConsumedResourceRelationEntity>();
if (relations != null) {
for (ConsumedResourceRelationEntity r : relations) {
if (isBestResource(r.getSlaveResource(), release)) {
result.add(r);
}
}
}
return result;
}
public Set<ProvidedResourceRelationEntity> getProvidedSlaveRelationsForRelease(ResourceEntity resource, ReleaseEntity release) {
Set<ProvidedResourceRelationEntity> relations = resource.getProvidedSlaveRelations();
Set<ProvidedResourceRelationEntity> result = new HashSet<ProvidedResourceRelationEntity>();
for (ProvidedResourceRelationEntity r : relations) {
if (isBestResource(r.getMasterResource(), release)) {
result.add(r);
}
}
return result;
}
public Set<ProvidedResourceRelationEntity> getProvidedMasterRelationsForRelease(ResourceEntity resource, ReleaseEntity release) {
Set<ProvidedResourceRelationEntity> relations = resource.getProvidedMasterRelations();
Set<ProvidedResourceRelationEntity> result = new HashSet<ProvidedResourceRelationEntity>();
for (ProvidedResourceRelationEntity r : relations) {
if (isBestResource(r.getSlaveResource(), release)) {
result.add(r);
}
}
return result;
}
/**
* Returns best-matching Release. 1. Priority nearest in future 2. Priority nearest in past
*
* @param releases Sorted set of Releases
* @param currentDate
* @return Returns ReleaseEntity
*/
public ReleaseEntity findMostRelevantRelease(SortedSet<ReleaseEntity> releases, Date currentDate) {
return findMostRelevantRelease(releases, currentDate, true);
}
/**
* Returns best-matching Release. (nearest in past)
*
* @param releases Sorted set of Releases
* @param currentDate
* @return Returns ReleaseEntity
*/
public ReleaseEntity findExactOrClosestPastRelease(SortedSet<ReleaseEntity> releases, Date currentDate) {
return findMostRelevantRelease(releases, currentDate, false);
}
private ReleaseEntity findMostRelevantRelease(SortedSet<ReleaseEntity> releases, Date currentDate, boolean includingFuture) {
ReleaseEntity bestMatch = null;
long currentTime = currentDate != null ? currentDate.getTime() : (new Date()).getTime();
for (ReleaseEntity releaseEntity : releases) {
long releaseInstallationTime = releaseEntity.getInstallationInProductionAt().getTime();
Long bestMatchingReleaseTime = bestMatch != null ? bestMatch.getInstallationInProductionAt().getTime() : null;
if (includingFuture && isBestMatchingFutureReleaseTime(bestMatchingReleaseTime, releaseInstallationTime, currentTime)) {
bestMatch = releaseEntity;
}
if (isBestMatchingPastReleaseTime(bestMatchingReleaseTime, releaseInstallationTime, currentTime)) {
bestMatch = releaseEntity;
}
}
return bestMatch;
}
public boolean isBestMatchingPastReleaseTime(Long bestMatchingReleaseTime, long releaseInstallationTime, long currentTime) {
boolean isMatchingPastRelease = false;
if (releaseInstallationTime <= currentTime) {
// past release found
if (bestMatchingReleaseTime == null) {
// take it, it is the only one so far
isMatchingPastRelease = true;
} else if ((bestMatchingReleaseTime <= currentTime) && (releaseInstallationTime >= bestMatchingReleaseTime)) {
// take it, the existing bestMatch was an earlier date
isMatchingPastRelease = true;
}
}
return isMatchingPastRelease;
}
public Boolean isBestMatchingFutureReleaseTime(Long bestMatchingReleaseTime, long releaseInstallationTime, long currentTime) {
boolean isMatchingFutureRelease = false;
if (releaseInstallationTime >= currentTime) {
// future release found
if (bestMatchingReleaseTime == null) {
// take it, it is the only one so far
isMatchingFutureRelease = true;
} else if (bestMatchingReleaseTime < currentTime) {
// take it, the existing bestMatch was from past
isMatchingFutureRelease = true;
} else if (releaseInstallationTime < bestMatchingReleaseTime) {
// take it, the existing bestMatch was a later date
isMatchingFutureRelease = true;
}
}
return isMatchingFutureRelease;
}
public ResourceEntity findMostRelevantResource(List<ResourceEntity> resources, Date relevantDate) {
if (resources == null || relevantDate == null) {
return null;
}
List<ResourceEntity> allReleaseResourcesOrderedByRelease = new ArrayList<>(resources);
Collections.sort(allReleaseResourcesOrderedByRelease, resourceReleaseComparator);
SortedSet<ReleaseEntity> releases = new TreeSet<>();
for (ResourceEntity resourceEntity : allReleaseResourcesOrderedByRelease) {
releases.add(resourceEntity.getRelease());
}
ReleaseEntity mostRelevantRelease = findMostRelevantRelease(releases, relevantDate);
if (mostRelevantRelease != null) {
for (ResourceEntity resourceEntity : allReleaseResourcesOrderedByRelease) {
if (mostRelevantRelease.equals(resourceEntity.getRelease())) {
return resourceEntity;
}
}
}
return null;
}
/**
* @param resources
* @param limit
* @return all Resources that are linked to a Release which is after or equal the given limit
*/
public List<ResourceEntity> getAllFutureReleases(Set<ResourceEntity> resources, ReleaseEntity limit) {
List<ResourceEntity> allReleaseResourcesOrderedByRelease = new ArrayList<>(resources);
Collections.sort(allReleaseResourcesOrderedByRelease, resourceReleaseComparator);
List<ResourceEntity> resourcesBefore = new ArrayList<>();
for (ResourceEntity resourceEntity : allReleaseResourcesOrderedByRelease) {
if (limit != null && limit.getInstallationInProductionAt() != null
&& !limit.getInstallationInProductionAt().after(resourceEntity.getRelease().getInstallationInProductionAt())) {
resourcesBefore.add(resourceEntity);
}
}
return resourcesBefore;
}
/**
* analyzes if the given resource is the best matching for the given release. returns true if so, false otherwise.
*/
private boolean isBestResource(@NotNull ResourceEntity resource, @NotNull ReleaseEntity release) {
return resource.equals(getResourceEntityForRelease(resource.getResourceGroup(), release));
}
public ResourceEntity getResourceEntityForRelease(@NotNull ResourceGroupEntity resourceGroup, @NotNull ReleaseEntity release) {
return getResourceEntityForRelease(resourceGroup.getResources(), release);
}
/**
* Used by Angular-Rest
* @param resourceGroupId
* @param releaseId
* @return
*/
public ResourceEntity getResourceEntityForRelease(@NotNull Integer resourceGroupId, @NotNull Integer releaseId) {
ResourceGroupEntity resourceGroup = resourceGroupLocator.getResourceGroupForCreateDeploy(resourceGroupId);
return getResourceEntityForRelease(resourceGroup.getResources(), releaseLocator.getReleaseById(releaseId));
}
public ResourceEntity getResourceEntityForRelease(@NotNull Collection<ResourceEntity> resources, @NotNull ReleaseEntity release) {
ReleaseComparator comparator = new ReleaseComparator();
ResourceEntity bestResource = null;
for (ResourceEntity resource : resources) {
int compareValue = comparator.compare(resource.getRelease(), release);
//If the resource group contains a matching release, this is the one we would like to use
if (compareValue == 0) {
return resource;
}
//Otherwise, we're only interested in earlier releases than the requested one
else if (compareValue < 0) {
if (comparator.compare(resource.getRelease(), bestResource == null ? null : bestResource.getRelease()) > 0) {
//If the release date of the current resource is later than the the best release we've found yet, it is better suited and is our new "best resource"
bestResource = resource;
}
}
}
return bestResource;
}
/**
* Expects a set of resource entities which possibly contains multiple instances for one resource group.
* Returns a subset of the given list of resource entities by extracting the best matching resource entity dependent on the given release
*
* @param resourceEntities
* @param release
* @return
*/
public Set<ResourceEntity> getResourceEntitiesByRelease(Collection<ResourceEntity> resourceEntities, ReleaseEntity release) {
Set<ResourceGroupEntity> handledResourceGroups = new HashSet<ResourceGroupEntity>();
Set<ResourceEntity> result = new HashSet<ResourceEntity>();
if (resourceEntities != null) {
for (ResourceEntity r : resourceEntities) {
if (!handledResourceGroups.contains(r.getResourceGroup())) {
ResourceEntity resourceForRelease = getResourceEntityForRelease(r.getResourceGroup(), release);
if (resourceForRelease != null) {
result.add(resourceForRelease);
}
handledResourceGroups.add(r.getResourceGroup());
}
}
}
return result;
}
//TODO extract logic from the resource entity and place it here
public Set<ResourceEntity> getConsumedRelatedResourcesByResourceType(ResourceEntity resource, DefaultResourceTypeDefinition defaultResourceTypeDefinition, ReleaseEntity release) {
List<ResourceEntity> resources = resource.getConsumedRelatedResourcesByResourceType(defaultResourceTypeDefinition);
if (resources == null) {
return null;
}
Set<ResourceEntity> result = new LinkedHashSet<ResourceEntity>();
for (ResourceEntity r : resources) {
ResourceEntity resourceEntityForRelease = getResourceEntityForRelease(r.getResourceGroup(), release);
|
[
" if (resourceEntityForRelease != null) {"
] | 1,149
|
lcc
|
java
| null |
693b0b0207d00086cfcee5f5943068055d8630c9db62e018
|
|
# Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
# Imports for common Python 2/3 codebase
from __future__ import print_function, division, absolute_import
from future import standard_library
standard_library.install_aliases()
import numpy as np
import scipy as sp
from odl.discr import ResizingOperator
from odl.trafos import FourierTransform, PYFFTW_AVAILABLE
__all__ = ('fbp_op', 'fbp_filter_op', 'tam_danielson_window',
'parker_weighting')
def _axis_in_detector(geometry):
"""A vector in the detector plane that points along the rotation axis."""
du = geometry.det_init_axes[0]
dv = geometry.det_init_axes[1]
axis = geometry.axis
c = np.array([np.vdot(axis, du), np.vdot(axis, dv)])
cnorm = np.linalg.norm(c)
# Check for numerical errors
assert cnorm != 0
return c / cnorm
def _rotation_direction_in_detector(geometry):
"""A vector in the detector plane that points in the rotation direction."""
du = geometry.det_init_axes[0]
dv = geometry.det_init_axes[1]
axis = geometry.axis
det_normal = np.cross(du, dv)
rot_dir = np.cross(axis, det_normal)
c = np.array([np.vdot(rot_dir, du), np.vdot(rot_dir, dv)])
cnorm = np.linalg.norm(c)
# Check for numerical errors
assert cnorm != 0
return c / cnorm
def _fbp_filter(norm_freq, filter_type, frequency_scaling):
"""Create a smoothing filter for FBP.
Parameters
----------
norm_freq : `array-like`
Frequencies normalized to lie in the interval [0, 1].
filter_type : {'Ram-Lak', 'Shepp-Logan', 'Cosine', 'Hamming', 'Hann'}
The type of filter to be used.
frequency_scaling : float
Scaling of the frequencies for the filter. All frequencies are scaled
by this number, any relative frequency above ``frequency_scaling`` is
set to 0.
Returns
-------
smoothing_filter : `numpy.ndarray`
Examples
--------
Create an FBP filter
>>> norm_freq = np.linspace(0, 1, 10)
>>> filt = _fbp_filter(norm_freq,
... filter_type='Hann',
... frequency_scaling=0.8)
"""
if filter_type == 'Ram-Lak':
filt = 1
elif filter_type == 'Shepp-Logan':
filt = np.sinc(norm_freq / (2 * frequency_scaling))
elif filter_type == 'Cosine':
filt = np.cos(norm_freq * np.pi / (2 * frequency_scaling))
elif filter_type == 'Hamming':
filt = 0.54 + 0.46 * np.cos(norm_freq * np.pi / (frequency_scaling))
elif filter_type == 'Hann':
filt = np.cos(norm_freq * np.pi / (2 * frequency_scaling)) ** 2
else:
raise ValueError('unknown `filter_type` ({})'
''.format(filter_type))
indicator = (norm_freq <= frequency_scaling)
return indicator * filt
def tam_danielson_window(ray_trafo, smoothing_width=0.05, n_half_rot=1):
"""Create Tam-Danielson window from a `RayTransform`.
The Tam-Danielson window is an indicator function on the minimal set of
data needed to reconstruct a volume from given data. It is useful in
analytic reconstruction methods such as FBP to give a more accurate
reconstruction.
See TAM1998_ for more information.
Parameters
----------
ray_trafo : `RayTransform`
The ray transform for which to compute the window.
smoothing_width : positive float, optional
Width of the smoothing applied to the window's edges given as a
fraction of the width of the full window.
n_half_rot : odd int
Total number of half rotations to include in the window. Values larger
than 1 should be used if the pitch is much smaller than the detector
height.
Returns
-------
tam_danielson_window : ``ray_trafo.range`` element
See Also
--------
fbp_op : Filtered back-projection operator from `RayTransform`
tam_danielson_window : Weighting for short scan data
HelicalConeFlatGeometry : The primary use case for this window function.
References
----------
.. _TAM1998: http://iopscience.iop.org/article/10.1088/0031-9155/43/4/028
"""
# Extract parameters
src_radius = ray_trafo.geometry.src_radius
det_radius = ray_trafo.geometry.det_radius
pitch = ray_trafo.geometry.pitch
if pitch == 0:
raise ValueError('Tam-Danielson window is only defined with '
'`pitch!=0`')
smoothing_width = float(smoothing_width)
if smoothing_width < 0:
raise ValueError('`smoothing_width` should be a positive float')
if n_half_rot % 2 != 1:
raise ValueError('`n_half_rot` must be odd, got {}'.format(n_half_rot))
# Find projection of axis on detector
axis_proj = _axis_in_detector(ray_trafo.geometry)
rot_dir = _rotation_direction_in_detector(ray_trafo.geometry)
# Find distance from projection of rotation axis for each pixel
dx = (rot_dir[0] * ray_trafo.range.meshgrid[1] +
rot_dir[1] * ray_trafo.range.meshgrid[2])
# Compute angles
phi = np.arctan(dx / (src_radius + det_radius))
theta = phi * 2
# Compute lower and upper bound
source_to_line_distance = src_radius + src_radius * np.cos(theta)
scale = (src_radius + det_radius) / source_to_line_distance
source_to_line_lower = pitch * (theta - n_half_rot * np.pi) / (2 * np.pi)
source_to_line_upper = pitch * (theta + n_half_rot * np.pi) / (2 * np.pi)
lower_proj = source_to_line_lower * scale
upper_proj = source_to_line_upper * scale
# Compute a smoothed width
interval = (upper_proj - lower_proj)
width = interval * smoothing_width / np.sqrt(2)
# Create window function
def window_fcn(x):
x_along_axis = axis_proj[0] * x[1] + axis_proj[1] * x[2]
if smoothing_width != 0:
lower_wndw = 0.5 * (
1 + sp.special.erf((x_along_axis - lower_proj) / width))
upper_wndw = 0.5 * (
1 + sp.special.erf((upper_proj - x_along_axis) / width))
else:
lower_wndw = (x_along_axis >= lower_proj)
upper_wndw = (x_along_axis <= upper_proj)
return lower_wndw * upper_wndw
return ray_trafo.range.element(window_fcn) / n_half_rot
def parker_weighting(ray_trafo, q=0.25):
"""Create parker weighting for a `RayTransform`.
Parker weighting is a weighting function that ensures that oversampled
fan/cone beam data are weighted such that each line has unit weight. It is
useful in analytic reconstruction methods such as FBP to give a more
accurate result and can improve convergence rates for iterative methods.
See the article `Parker weights revisited`_ for more information.
Parameters
----------
ray_trafo : `RayTransform`
The ray transform for which to compute the weights.
q : float
Parameter controlling the speed of the roll-off at the edges of the
weighting. 1.0 gives the classical Parker weighting, while smaller
values in general lead to lower noise but stronger discretization
artifacts.
Returns
-------
parker_weighting : ``ray_trafo.range`` element
See Also
--------
fbp_op : Filtered back-projection operator from `RayTransform`
tam_danielson_window : Indicator function for helical data
FanFlatGeometry : Use case in 2d
CircularConeFlatGeometry : Use case in 3d
References
----------
.. _Parker weights revisited: https://www.ncbi.nlm.nih.gov/pubmed/11929021
"""
# Note: Parameter names taken from WES2002
# Extract parameters
src_radius = ray_trafo.geometry.src_radius
det_radius = ray_trafo.geometry.det_radius
ndim = ray_trafo.geometry.ndim
angles = ray_trafo.range.meshgrid[0]
min_rot_angle = ray_trafo.geometry.motion_partition.min_pt
alen = ray_trafo.geometry.motion_params.length
# Parker weightings are not defined for helical geometries
if ray_trafo.geometry.ndim != 2:
pitch = ray_trafo.geometry.pitch
if pitch != 0:
raise ValueError('Parker weighting window is only defined with '
'`pitch==0`')
# Find distance from projection of rotation axis for each pixel
if ndim == 2:
|
[
" dx = ray_trafo.range.meshgrid[1]"
] | 1,057
|
lcc
|
python
| null |
6515bcd028819c4ca6bc104bc8610724275c7ed4d429f30c
|
|
# coding=utf-8
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import datetime
import os
import re
import itertools
import urllib
import sickbeard
import requests
from sickbeard import helpers, classes, logger, db
from sickbeard.common import MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
from sickbeard import tvcache
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.common import Quality
from hachoir_parser import createParser
from base64 import b16encode, b32decode
class GenericProvider:
NZB = "nzb"
TORRENT = "torrent"
def __init__(self, name):
# these need to be set in the subclass
self.providerType = None
self.name = name
self.proxy = ProviderProxy()
self.urls = {}
self.url = ''
self.show = None
self.supportsBacklog = False
self.supportsAbsoluteNumbering = False
self.anime_only = False
self.search_mode = None
self.search_fallback = False
self.enable_daily = False
self.enable_backlog = False
self.cache = tvcache.TVCache(self)
self.session = requests.session()
self.headers = {'User-Agent': USER_AGENT}
def getID(self):
return GenericProvider.makeID(self.name)
@staticmethod
def makeID(name):
return re.sub("[^\w\d_]", "_", name.strip().lower())
def imageName(self):
return self.getID() + '.png'
def _checkAuth(self):
return True
def _doLogin(self):
return True
def isActive(self):
if self.providerType == GenericProvider.NZB and sickbeard.USE_NZBS:
return self.isEnabled()
elif self.providerType == GenericProvider.TORRENT and sickbeard.USE_TORRENTS:
return self.isEnabled()
else:
return False
def isEnabled(self):
"""
This should be overridden and should return the config setting eg. sickbeard.MYPROVIDER
"""
return False
def getResult(self, episodes):
"""
Returns a result of the correct type for this provider
"""
if self.providerType == GenericProvider.NZB:
result = classes.NZBSearchResult(episodes)
elif self.providerType == GenericProvider.TORRENT:
result = classes.TorrentSearchResult(episodes)
else:
result = classes.SearchResult(episodes)
result.provider = self
return result
def getURL(self, url, post_data=None, params=None, timeout=30, json=False):
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
"""
# check for auth
if not self._doLogin():
return
if self.proxy.isEnabled():
self.headers.update({'Referer': self.proxy.getProxyURL()})
return helpers.getURL(self.proxy._buildURL(url), post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json)
def downloadResult(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self._doLogin():
return False
if self.providerType == GenericProvider.TORRENT:
try:
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash)).lower()
if not torrent_hash:
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
return False
urls = [
'http://torcache.net/torrent/' + torrent_hash + '.torrent',
'http://torrage.com/torrent/' + torrent_hash + '.torrent',
'http://zoink.it/torrent/' + torrent_hash + '.torrent',
]
except:
urls = [result.url]
filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
elif self.providerType == GenericProvider.NZB:
urls = [result.url]
filename = ek.ek(os.path.join, sickbeard.NZB_DIR,
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
else:
return
for url in urls:
if helpers.download_file(url, filename, session=self.session):
logger.log(u"Downloading a result from " + self.name + " at " + url)
if self.providerType == GenericProvider.TORRENT:
logger.log(u"Saved magnet link to " + filename, logger.INFO)
else:
logger.log(u"Saved result to " + filename, logger.INFO)
if self._verify_download(filename):
return True
logger.log(u"Failed to download result", logger.WARNING)
return False
def _verify_download(self, file_name=None):
"""
Checks the saved file to see if it was actually valid, if not then consider the download a failure.
"""
# primitive verification of torrents, just make sure we didn't get a text file or something
if self.providerType == GenericProvider.TORRENT:
try:
parser = createParser(file_name)
if parser:
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except:
pass
if mime_type == 'application/x-bittorrent':
return True
except Exception as e:
logger.log(u"Failed to validate torrent file: " + ex(e), logger.DEBUG)
logger.log(u"Result is not a valid torrent file", logger.WARNING)
return False
return True
def searchRSS(self, episodes):
return self.cache.findNeededEpisodes(episodes)
def getQuality(self, item, anime=False):
"""
Figures out the quality of the given RSS item node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns a Quality value obtained from the node's data
"""
(title, url) = self._get_title_and_url(item)
quality = Quality.sceneQuality(title, anime)
return quality
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
return []
def _get_season_search_strings(self, episode):
return []
def _get_episode_search_strings(self, eb_obj, add_string=''):
return []
def _get_title_and_url(self, item):
"""
Retrieves the title and URL data from the item XML node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns: A tuple containing two strings representing title and URL respectively
"""
title = item.get('title')
if title:
title = u'' + title.replace(' ', '.')
url = item.get('link')
if url:
url = url.replace('&', '&')
return title, url
def findSearchResults(self, show, episodes, search_mode, manualSearch=False):
self._checkAuth()
self.show = show
results = {}
itemList = []
searched_scene_season = None
for epObj in episodes:
# search cache for episode result
cacheResult = self.cache.searchCache(epObj, manualSearch)
if cacheResult:
if epObj.episode not in results:
results[epObj.episode] = cacheResult
else:
results[epObj.episode].extend(cacheResult)
# found result, search next episode
continue
# skip if season already searched
if len(episodes) > 1 and searched_scene_season == epObj.scene_season:
continue
# mark season searched for season pack searches so we can skip later on
searched_scene_season = epObj.scene_season
if len(episodes) > 1:
# get season search results
for curString in self._get_season_search_strings(epObj):
itemList += self._doSearch(curString, search_mode, len(episodes))
else:
# get single episode search results
for curString in self._get_episode_search_strings(epObj):
itemList += self._doSearch(curString, 'eponly', len(episodes))
# if we found what we needed already from cache then return results and exit
if len(results) == len(episodes):
return results
# sort list by quality
if len(itemList):
items = {}
itemsUnknown = []
for item in itemList:
quality = self.getQuality(item, anime=show.is_anime)
if quality == Quality.UNKNOWN:
itemsUnknown += [item]
else:
if quality not in items:
items[quality] = [item]
else:
items[quality].append(item)
itemList = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)]))
itemList += itemsUnknown if itemsUnknown else []
# filter results
cl = []
for item in itemList:
(title, url) = self._get_title_and_url(item)
# parse the file name
try:
myParser = NameParser(False, convert=True)
parse_result = myParser.parse(title)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG)
continue
except InvalidShowException:
logger.log(u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG)
continue
showObj = parse_result.show
quality = parse_result.quality
release_group = parse_result.release_group
version = parse_result.version
addCacheEntry = False
if not (showObj.air_by_date or showObj.sports):
if search_mode == 'sponly':
if len(parse_result.episode_numbers):
logger.log(
u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
logger.DEBUG)
addCacheEntry = True
if len(parse_result.episode_numbers) and (
parse_result.season_number not in set([ep.season for ep in episodes]) or not [ep for ep in episodes if
ep.scene_episode in parse_result.episode_numbers]):
logger.log(
u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
logger.DEBUG)
addCacheEntry = True
else:
if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in
episodes if
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
logger.log(
u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring",
logger.DEBUG)
addCacheEntry = True
elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
logger.log(
u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
logger.DEBUG)
addCacheEntry = True
if not addCacheEntry:
# we just use the existing info for normal searches
actual_season = parse_result.season_number
actual_episodes = parse_result.episode_numbers
else:
if not (parse_result.is_air_by_date):
logger.log(
u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
logger.DEBUG)
addCacheEntry = True
else:
airdate = parse_result.air_date.toordinal()
myDB = db.DBConnection()
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[showObj.indexerid, airdate])
if len(sql_results) != 1:
logger.log(
u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
logger.WARNING)
addCacheEntry = True
if not addCacheEntry:
actual_season = int(sql_results[0]["season"])
actual_episodes = [int(sql_results[0]["episode"])]
# add parsed result to cache for usage later on
if addCacheEntry:
logger.log(u"Adding item from search to cache: " + title, logger.DEBUG)
ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)
if ci is not None:
cl.append(ci)
continue
# make sure we want the episode
wantEp = True
for epNo in actual_episodes:
if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch):
wantEp = False
break
if not wantEp:
logger.log(
u"Ignoring result " + title + " because we don't want an episode that is " +
Quality.qualityStrings[
quality], logger.DEBUG)
continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
# make a result object
epObj = []
for curEp in actual_episodes:
epObj.append(showObj.getEpisode(actual_season, curEp))
result = self.getResult(epObj)
result.show = showObj
result.url = url
result.name = title
result.quality = quality
result.release_group = release_group
result.content = None
result.version = version
if len(epObj) == 1:
epNum = epObj[0].episode
logger.log(u"Single episode result.", logger.DEBUG)
elif len(epObj) > 1:
epNum = MULTI_EP_RESULT
logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
parse_result.episode_numbers), logger.DEBUG)
elif len(epObj) == 0:
epNum = SEASON_RESULT
logger.log(u"Separating full season result to check for later", logger.DEBUG)
if epNum not in results:
results[epNum] = [result]
else:
results[epNum].append(result)
# check if we have items to add to cache
if len(cl) > 0:
myDB = self.cache._getDB()
myDB.mass_action(cl)
return results
def findPropers(self, search_date=None):
results = self.cache.listPropers(search_date)
return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
results]
def seedRatio(self):
'''
Provider should override this value if custom seed ratio enabled
It should return the value of the provider seed ratio
'''
return ''
class NZBProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.providerType = GenericProvider.NZB
class TorrentProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.providerType = GenericProvider.TORRENT
class ProviderProxy:
def __init__(self):
self.Type = 'GlypeProxy'
self.param = 'browse.php?u='
self.option = '&b=32&f=norefer'
self.enabled = False
self.url = None
self.urls = {
'getprivate.eu (NL)': 'http://getprivate.eu/',
'hideme.nl (NL)': 'http://hideme.nl/',
'proxite.eu (DE)': 'http://proxite.eu/',
'interproxy.net (EU)': 'http://interproxy.net/',
}
def isEnabled(self):
""" Return True if we Choose to call TPB via Proxy """
return self.enabled
def getProxyURL(self):
""" Return the Proxy URL Choosen via Provider Setting """
return str(self.url)
def _buildURL(self, url):
""" Return the Proxyfied URL of the page """
if self.isEnabled():
url = self.getProxyURL() + self.param + urllib.quote_plus(url) + self.option
logger.log(u"Proxified URL: " + url, logger.DEBUG)
return url
def _buildRE(self, regx):
""" Return the Proxyfied RE string """
if self.isEnabled():
regx = re.sub('//1', self.option, regx).replace('&', '&')
|
[
" logger.log(u\"Proxified REGEX: \" + regx, logger.DEBUG)"
] | 1,751
|
lcc
|
python
| null |
73869275419cb7e29c3d7b53bed42f41e39223637ced7d1d
|
|
//
// System.Web.UI.WebControls.FontUnit.cs
//
// Authors:
// Miguel de Icaza (miguel@novell.com)
// Ben Maurer (bmaurer@ximian.com).
//
// Copyright (C) 2005-2010 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System.Threading;
using System.Globalization;
using System.ComponentModel;
using System.Security.Permissions;
using System.Web.Util;
namespace System.Web.UI.WebControls
{
[TypeConverter (typeof (FontUnitConverter))]
[Serializable]
public struct FontUnit
{
FontSize type;
Unit unit;
public static readonly FontUnit Empty;
public static readonly FontUnit Smaller = new FontUnit (FontSize.Smaller);
public static readonly FontUnit Larger = new FontUnit (FontSize.Larger);
public static readonly FontUnit XXSmall = new FontUnit (FontSize.XXSmall);
public static readonly FontUnit XSmall = new FontUnit (FontSize.XSmall);
public static readonly FontUnit Small = new FontUnit (FontSize.Small);
public static readonly FontUnit Medium = new FontUnit (FontSize.Medium);
public static readonly FontUnit Large = new FontUnit (FontSize.Large);
public static readonly FontUnit XLarge = new FontUnit (FontSize.XLarge);
public static readonly FontUnit XXLarge = new FontUnit (FontSize.XXLarge);
static string [] font_size_names = new string [] {null, null, "Smaller", "Larger", "XX-Small", "X-Small", "Small",
"Medium", "Large", "X-Large", "XX-Large" };
public FontUnit (FontSize type)
{
int t = (int) type;
if (t < 0 || t > (int)FontSize.XXLarge)
throw new ArgumentOutOfRangeException ("type");
this.type = type;
if (type == FontSize.AsUnit)
unit = new Unit (10, UnitType.Point);
else
unit = Unit.Empty;
}
public FontUnit (int value) : this (new Unit (value, UnitType.Point))
{
}
public FontUnit (double value) : this (new Unit (value, UnitType.Point))
{
}
public FontUnit (double value, UnitType type) : this (new Unit (value, type))
{
}
public FontUnit (Unit value)
{
type = FontSize.AsUnit;
unit = value;
}
public FontUnit (string value) : this (value, Thread.CurrentThread.CurrentCulture)
{}
public FontUnit (string value, CultureInfo culture)
{
if (String.IsNullOrEmpty (value)) {
type = FontSize.NotSet;
unit = Unit.Empty;
return;
}
switch (value.ToLower (Helpers.InvariantCulture)) {
case "smaller":
type = FontSize.Smaller;
break;
case "larger":
type = FontSize.Larger;
break;
case "xxsmall":
type = FontSize.XXSmall;
break;
case "xx-small":
type = FontSize.XXSmall;
break;
case "xsmall":
type = FontSize.XSmall;
break;
case "x-small":
type = FontSize.XSmall;
break;
case "small":
type = FontSize.Small;
break;
case "medium":
type = FontSize.Medium;
break;
case "large":
type = FontSize.Large;
break;
case "xlarge":
type = FontSize.XLarge;
break;
case "x-large":
type = FontSize.XLarge;
break;
case "xxlarge":
type = FontSize.XXLarge;
break;
case "xx-large":
type = FontSize.XXLarge;
break;
default:
type = FontSize.AsUnit;
unit = new Unit (value, culture);
return;
}
unit = Unit.Empty;
}
public bool IsEmpty {
get { return type == FontSize.NotSet; }
}
public FontSize Type {
get { return type; }
}
public Unit Unit {
get { return unit; }
}
public static FontUnit Parse (string s)
{
return new FontUnit (s);
}
public static FontUnit Parse (string s, CultureInfo culture)
{
return new FontUnit (s, culture);
}
public static FontUnit Point (int n)
{
return new FontUnit (n);
}
public override bool Equals (object obj)
{
if (obj is FontUnit) {
FontUnit other = (FontUnit) obj;
return (other.type == type && other.unit == unit);
}
return false;
}
public override int GetHashCode ()
{
return type.GetHashCode () ^ unit.GetHashCode ();
}
public static bool operator == (FontUnit left, FontUnit right)
{
return left.type == right.type && left.unit == right.unit;
}
public static bool operator != (FontUnit left, FontUnit right)
{
return left.type != right.type || left.unit != right.unit;
}
public static implicit operator FontUnit (int n)
{
return new FontUnit (n);
}
public string ToString (IFormatProvider fmt)
{
|
[
"\t\t\tif (type == FontSize.NotSet)"
] | 726
|
lcc
|
csharp
| null |
a1362f2667219e18121103e3f58abc48d5b819038a41374c
|
|
using System;
using System.Collections;
using Nequeo.Cryptography.Key.Crypto.Parameters;
using Nequeo.Cryptography.Key.Math;
using Nequeo.Cryptography.Key.Utilities;
namespace Nequeo.Cryptography.Key.Crypto.Engines
{
/**
* NaccacheStern Engine. For details on this cipher, please see
* http://www.gemplus.com/smart/rd/publications/pdf/NS98pkcs.pdf
*/
public class NaccacheSternEngine
: IAsymmetricBlockCipher
{
private bool forEncryption;
private NaccacheSternKeyParameters key;
private IList[] lookup = null;
private bool debug = false;
public string AlgorithmName
{
get { return "NaccacheStern"; }
}
/**
* Initializes this algorithm. Must be called before all other Functions.
*
* @see Nequeo.Cryptography.Key.crypto.AsymmetricBlockCipher#init(bool,
* Nequeo.Cryptography.Key.crypto.CipherParameters)
*/
public void Init(
bool forEncryption,
ICipherParameters parameters)
{
this.forEncryption = forEncryption;
if (parameters is ParametersWithRandom)
{
parameters = ((ParametersWithRandom) parameters).Parameters;
}
key = (NaccacheSternKeyParameters)parameters;
// construct lookup table for faster decryption if necessary
if (!this.forEncryption)
{
if (debug)
{
Console.WriteLine("Constructing lookup Array");
}
NaccacheSternPrivateKeyParameters priv = (NaccacheSternPrivateKeyParameters)key;
IList primes = priv.SmallPrimesList;
lookup = new IList[primes.Count];
for (int i = 0; i < primes.Count; i++)
{
BigInteger actualPrime = (BigInteger) primes[i];
int actualPrimeValue = actualPrime.IntValue;
lookup[i] = Platform.CreateArrayList(actualPrimeValue);
lookup[i].Add(BigInteger.One);
if (debug)
{
Console.WriteLine("Constructing lookup ArrayList for " + actualPrimeValue);
}
BigInteger accJ = BigInteger.Zero;
for (int j = 1; j < actualPrimeValue; j++)
{
// BigInteger bigJ = BigInteger.ValueOf(j);
// accJ = priv.PhiN.Multiply(bigJ);
accJ = accJ.Add(priv.PhiN);
BigInteger comp = accJ.Divide(actualPrime);
lookup[i].Add(priv.G.ModPow(comp, priv.Modulus));
}
}
}
}
public bool Debug
{
set { this.debug = value; }
}
/**
* Returns the input block size of this algorithm.
*
* @see Nequeo.Cryptography.Key.crypto.AsymmetricBlockCipher#GetInputBlockSize()
*/
public int GetInputBlockSize()
{
if (forEncryption)
{
// We can only encrypt values up to lowerSigmaBound
return (key.LowerSigmaBound + 7) / 8 - 1;
}
else
{
// We pad to modulus-size bytes for easier decryption.
// return key.Modulus.ToByteArray().Length;
return key.Modulus.BitLength / 8 + 1;
}
}
/**
* Returns the output block size of this algorithm.
*
* @see Nequeo.Cryptography.Key.crypto.AsymmetricBlockCipher#GetOutputBlockSize()
*/
public int GetOutputBlockSize()
{
if (forEncryption)
{
// encrypted Data is always padded up to modulus size
// return key.Modulus.ToByteArray().Length;
return key.Modulus.BitLength / 8 + 1;
}
else
{
// decrypted Data has upper limit lowerSigmaBound
return (key.LowerSigmaBound + 7) / 8 - 1;
}
}
/**
* Process a single Block using the Naccache-Stern algorithm.
*
* @see Nequeo.Cryptography.Key.crypto.AsymmetricBlockCipher#ProcessBlock(byte[],
* int, int)
*/
public byte[] ProcessBlock(
byte[] inBytes,
int inOff,
int length)
{
if (key == null)
throw new InvalidOperationException("NaccacheStern engine not initialised");
if (length > (GetInputBlockSize() + 1))
throw new DataLengthException("input too large for Naccache-Stern cipher.\n");
if (!forEncryption)
{
// At decryption make sure that we receive padded data blocks
if (length < GetInputBlockSize())
{
throw new InvalidCipherTextException("BlockLength does not match modulus for Naccache-Stern cipher.\n");
}
}
// transform input into BigInteger
BigInteger input = new BigInteger(1, inBytes, inOff, length);
if (debug)
{
Console.WriteLine("input as BigInteger: " + input);
}
byte[] output;
if (forEncryption)
{
output = Encrypt(input);
}
else
{
IList plain = Platform.CreateArrayList();
NaccacheSternPrivateKeyParameters priv = (NaccacheSternPrivateKeyParameters)key;
IList primes = priv.SmallPrimesList;
// Get Chinese Remainders of CipherText
for (int i = 0; i < primes.Count; i++)
{
BigInteger exp = input.ModPow(priv.PhiN.Divide((BigInteger)primes[i]), priv.Modulus);
IList al = lookup[i];
if (lookup[i].Count != ((BigInteger)primes[i]).IntValue)
{
if (debug)
{
Console.WriteLine("Prime is " + primes[i] + ", lookup table has size " + al.Count);
}
throw new InvalidCipherTextException("Error in lookup Array for "
+ ((BigInteger)primes[i]).IntValue
+ ": Size mismatch. Expected ArrayList with length "
+ ((BigInteger)primes[i]).IntValue + " but found ArrayList of length "
+ lookup[i].Count);
}
int lookedup = al.IndexOf(exp);
if (lookedup == -1)
{
if (debug)
{
Console.WriteLine("Actual prime is " + primes[i]);
Console.WriteLine("Decrypted value is " + exp);
Console.WriteLine("LookupList for " + primes[i] + " with size " + lookup[i].Count
+ " is: ");
for (int j = 0; j < lookup[i].Count; j++)
{
Console.WriteLine(lookup[i][j]);
}
}
throw new InvalidCipherTextException("Lookup failed");
}
plain.Add(BigInteger.ValueOf(lookedup));
}
BigInteger test = chineseRemainder(plain, primes);
// Should not be used as an oracle, so reencrypt output to see
// if it corresponds to input
// this breaks probabilisic encryption, so disable it. Anyway, we do
// use the first n primes for key generation, so it is pretty easy
// to guess them. But as stated in the paper, this is not a security
// breach. So we can just work with the correct sigma.
// if (debug) {
// Console.WriteLine("Decryption is " + test);
// }
// if ((key.G.ModPow(test, key.Modulus)).Equals(input)) {
// output = test.ToByteArray();
// } else {
// if(debug){
// Console.WriteLine("Engine seems to be used as an oracle,
// returning null");
// }
// output = null;
// }
output = test.ToByteArray();
}
return output;
}
/**
* Encrypts a BigInteger aka Plaintext with the public key.
*
* @param plain
* The BigInteger to encrypt
* @return The byte[] representation of the encrypted BigInteger (i.e.
* crypted.toByteArray())
*/
public byte[] Encrypt(
BigInteger plain)
{
// Always return modulus size values 0-padded at the beginning
// 0-padding at the beginning is correctly parsed by BigInteger :)
// byte[] output = key.Modulus.ToByteArray();
// Array.Clear(output, 0, output.Length);
byte[] output = new byte[key.Modulus.BitLength / 8 + 1];
byte[] tmp = key.G.ModPow(plain, key.Modulus).ToByteArray();
Array.Copy(tmp, 0, output, output.Length - tmp.Length, tmp.Length);
if (debug)
{
Console.WriteLine("Encrypted value is: " + new BigInteger(output));
}
return output;
}
/**
* Adds the contents of two encrypted blocks mod sigma
*
* @param block1
* the first encrypted block
* @param block2
* the second encrypted block
* @return encrypt((block1 + block2) mod sigma)
* @throws InvalidCipherTextException
*/
public byte[] AddCryptedBlocks(
byte[] block1,
byte[] block2)
{
// check for correct blocksize
if (forEncryption)
{
if ((block1.Length > GetOutputBlockSize())
|| (block2.Length > GetOutputBlockSize()))
{
throw new InvalidCipherTextException(
"BlockLength too large for simple addition.\n");
}
}
else
{
if ((block1.Length > GetInputBlockSize())
|
[
"\t\t\t\t\t\t|| (block2.Length > GetInputBlockSize()))"
] | 924
|
lcc
|
csharp
| null |
dc4f68ed20529e95f1739fef515a496692b2f641f11bc128
|
|
using NUnit.Framework;
using System;
using NConfiguration.Json.Parsing;
using System.Linq;
namespace NConfiguration.Json
{
[TestFixture]
public class ParsingTests
{
[TestCase(@"[""Unclosed array""")]
[TestCase(@"{unquoted_key: ""keys must be quoted""}")]
[TestCase(@"[""extra comma"",]")]
[TestCase(@"[""double extra comma"",,]")]
[TestCase(@"[ , ""<-- missing value""]")]
[TestCase(@"[""Comma after the close""],")]
[TestCase(@"[""Extra close""]]")]
[TestCase(@"{""Extra comma"": true,}")]
[TestCase(@"{""Extra value after close"": true} ""misplaced quoted value""")]
[TestCase(@"{""Illegal expression"": 1 + 2}")]
[TestCase(@"{""Illegal invocation"": alert()}")]
[TestCase(@"{""Numbers cannot have leading zeroes"": 013}")]
[TestCase(@"{""Numbers cannot be hex"": 0x14}")]
[TestCase(@"[""Illegal backslash escape: \x15""]")]
[TestCase(@"[\naked]")]
[TestCase(@"[""Illegal backslash escape: \017""]")]
[TestCase(@"{""Missing colon"" null}")]
[TestCase(@"{""Double colon"":: null}")]
[TestCase(@"{""Comma instead of colon"", null}")]
[TestCase(@"[""Colon instead of comma"": false]")]
[TestCase(@"[""Bad value"", truth]")]
[TestCase(@"['single quote']")]
[TestCase(@"["" tab character in string ""]")]
[TestCase(@"[""tab\ character\ in\ string\ ""]")]
[TestCase(@"[""line
break""]")]
[TestCase(@"[""line\
break""]")]
[TestCase(@"[0e]")]
[TestCase(@"[0e+]")]
[TestCase(@"[0e+-1]")]
[TestCase(@"{""Comma instead if closing brace"": true,")]
[TestCase(@"[""mismatch""}")]
public void BadParse(string text)
{
Assert.Throws<FormatException>(() => JValue.Parse(text));
}
string _text1 = @"[
""JSON Test Pattern pass1"",
{""object with 1 member"":[""array with 1 element""]},
{},
[],
-42,
true,
false,
null,
{
""integer"": 1234567890,
""real"": -9876.543210,
""e"": 0.123456789e-12,
""E"": 1.234567890E+34,
"""": 23456789012E66,
""zero"": 0,
""one"": 1,
""space"": "" "",
""quote"": ""\"""",
""backslash"": ""\\"",
""controls"": ""\b\f\n\r\t"",
""slash"": ""/ & \/"",
""alpha"": ""abcdefghijklmnopqrstuvwyz"",
""ALPHA"": ""ABCDEFGHIJKLMNOPQRSTUVWYZ"",
""digit"": ""0123456789"",
""0123456789"": ""digit"",
""special"": ""`1~!@#$%^&*()_+-={':[,]}|;.</>?"",
""hex"": ""\u0123\u4567\u89AB\uCDEF\uabcd\uef4A"",
""true"": true,
""false"": false,
""null"": null,
""array"":[ ],
""object"":{ },
""address"": ""50 St. James Street"",
""url"": ""http://www.JSON.org/"",
""comment"": ""// /* <!-- --"",
""# -- --> */"": "" "",
"" s p a c e d "" :[1,2 , 3
,
4 , 5 , 6 ,7 ],""compact"":[1,2,3,4,5,6,7],
""jsontext"": ""{\""object with 1 member\"":[\""array with 1 element\""]}"",
""quotes"": """ \u0022 %22 0x22 034 """,
""\/\\\""\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?""
: ""A key can be any string""
},
0.5 ,98.6
,
99.44
,
1066,
1e1,
0.1e1,
1e-1,
1e00,2e+00,2e-00
,""rosebud""]";
[Test]
public void SuccessParse1()
{
var rootArr = (JArray)JValue.Parse(_text1);
Assert.That(rootArr.Items[0].ToString(), Is.EqualTo("JSON Test Pattern pass1"));
Assert.That(((JArray)(((JObject)rootArr.Items[1])["object with 1 member"])).Items[0].ToString(),
Is.EqualTo("array with 1 element"));
Assert.That(((JObject)rootArr.Items[2]).Properties, Is.Empty);
Assert.That(((JArray)rootArr.Items[3]).Items, Is.Empty);
Assert.That(rootArr.Items[4].ToString(), Is.EqualTo("-42"));
Assert.That(rootArr.Items[5].ToString(), Is.EqualTo("true"));
Assert.That(rootArr.Items[6].ToString(), Is.EqualTo("false"));
Assert.That(rootArr.Items[7].ToString(), Is.EqualTo("null"));
var o8 = (JObject)rootArr.Items[8];
Assert.That(o8["integer"].ToString(), Is.EqualTo("1234567890"));
Assert.That(o8["real"].ToString(), Is.EqualTo("-9876.543210"));
Assert.That(o8["e"].ToString(), Is.EqualTo("0.123456789e-12"));
Assert.That(o8["E"].ToString(), Is.EqualTo("1.234567890E+34"));
Assert.That(o8[""].ToString(), Is.EqualTo("23456789012E66"));
Assert.That(o8["zero"].ToString(), Is.EqualTo("0"));
Assert.That(o8["one"].ToString(), Is.EqualTo("1"));
Assert.That(o8["space"].ToString(), Is.EqualTo(" "));
Assert.That(o8["quote"].ToString(), Is.EqualTo("\""));
Assert.That(o8["backslash"].ToString(), Is.EqualTo("\\"));
Assert.That(o8["controls"].ToString(), Is.EqualTo("\b\f\n\r\t"));
Assert.That(o8["slash"].ToString(), Is.EqualTo("/ & /"));
Assert.That(o8["alpha"].ToString(), Is.EqualTo("abcdefghijklmnopqrstuvwyz"));
Assert.That(o8["ALPHA"].ToString(), Is.EqualTo("ABCDEFGHIJKLMNOPQRSTUVWYZ"));
Assert.That(o8["digit"].ToString(), Is.EqualTo("0123456789"));
Assert.That(o8["0123456789"].ToString(), Is.EqualTo("digit"));
Assert.That(o8["special"].ToString(), Is.EqualTo("`1~!@#$%^&*()_+-={':[,]}|;.</>?"));
Assert.That(o8["hex"].ToString(), Is.EqualTo("\u0123\u4567\u89AB\uCDEF\uabcd\uef4A"));
Assert.That(o8["true"].ToString(), Is.EqualTo("true"));
Assert.That(o8["false"].ToString(), Is.EqualTo("false"));
Assert.That(o8["null"].ToString(), Is.EqualTo("null"));
Assert.That(o8["array"], Is.InstanceOf<JArray>());
Assert.That(o8["object"], Is.InstanceOf<JObject>());
Assert.That(o8["address"].ToString(), Is.EqualTo("50 St. James Street"));
Assert.That(o8["url"].ToString(), Is.EqualTo("http://www.JSON.org/"));
Assert.That(o8["comment"].ToString(), Is.EqualTo("// /* <!-- --"));
Assert.That(o8["# -- --> */"].ToString(), Is.EqualTo(" "));
Assert.That(((JArray)o8[" s p a c e d "]).Items.Select(i => i.ToString()), Is.EquivalentTo(Enumerable.Range(1, 7).Select(i => i.ToString())));
Assert.That(((JArray)o8["compact"]).Items.Select(i => i.ToString()), Is.EquivalentTo(Enumerable.Range(1, 7).Select(i => i.ToString())));
Assert.That(o8["jsontext"].ToString(), Is.EqualTo(@"{""object with 1 member"":[""array with 1 element""]}"));
Assert.That(o8["quotes"].ToString(), Is.EqualTo("" \u0022 %22 0x22 034 ""));
Assert.That(o8["/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"].ToString(), Is.EqualTo("A key can be any string"));
Assert.That(rootArr.Items[9].ToString(), Is.EqualTo("0.5"));
Assert.That(rootArr.Items[10].ToString(), Is.EqualTo("98.6"));
Assert.That(rootArr.Items[11].ToString(), Is.EqualTo("99.44"));
Assert.That(rootArr.Items[12].ToString(), Is.EqualTo("1066"));
Assert.That(rootArr.Items[13].ToString(), Is.EqualTo("1e1"));
Assert.That(rootArr.Items[14].ToString(), Is.EqualTo("0.1e1"));
Assert.That(rootArr.Items[15].ToString(), Is.EqualTo("1e-1"));
Assert.That(rootArr.Items[16].ToString(), Is.EqualTo("1e00"));
Assert.That(rootArr.Items[17].ToString(), Is.EqualTo("2e+00"));
Assert.That(rootArr.Items[18].ToString(), Is.EqualTo("2e-00"));
Assert.That(rootArr.Items[19].ToString(), Is.EqualTo("rosebud"));
}
string _text2 = @"[[[[[[[[[[[[[[[[[[[""Too deep""]]]]]]]]]]]]]]]]]]]";
[Test]
public void SuccessParse2()
{
var rootVal = JValue.Parse(_text2);
Assert.That(rootVal.Type, Is.EqualTo(TokenType.Array));
var arr = (JArray)rootVal;
for(int i=0;i<18; i++)
{
Assert.That(arr.Items.Count, Is.EqualTo(1));
arr = (JArray)arr.Items[0];
}
Assert.That(arr.Items.Count, Is.EqualTo(1));
Assert.That(arr.Items[0].Type, Is.EqualTo(TokenType.String));
Assert.That(((JString)arr.Items[0]).Value, Is.EqualTo("Too deep"));
}
string _text3 = @"{
""JSON Test Pattern pass3"": {
""The outermost value"": ""must be an object or array."",
""In this test"": ""It is an object.""
}
}";
[Test]
public void SuccessParse3()
{
var rootVal = JValue.Parse(_text3);
Assert.That(rootVal.Type, Is.EqualTo(TokenType.Object));
var obj = (JObject)rootVal;
Assert.That(obj.Properties[0].Key, Is.EqualTo("JSON Test Pattern pass3"));
Assert.That(obj.Properties[0].Value.Type, Is.EqualTo(TokenType.Object));
|
[
"\t\t\tobj = (JObject)obj.Properties[0].Value;"
] | 537
|
lcc
|
csharp
| null |
05a84c831d09b24c15be892934f316b53ccabf77bfe3253f
|
|
/*
* WANDORA Knowledge Extraction, Management, and Publishing Application
* http://wandora.org
*
* Copyright (C) 2004-2016 Wandora Team
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* CylinderGenerator.java
*
* Created on 2012-05-11
*
*/
package org.wandora.application.tools.generators;
import org.wandora.application.tools.*;
import org.wandora.topicmap.*;
import org.wandora.topicmap.layered.*;
import org.wandora.application.contexts.*;
import org.wandora.application.*;
import java.io.*;
import java.util.*;
import org.wandora.application.gui.WandoraOptionPane;
import static org.wandora.utils.Tuples.T2;
import org.wandora.utils.swing.GuiTools;
/**
*
* http://en.wikipedia.org/wiki/Tiling_by_regular_polygons
*
* @author elehtonen
*/
public class CylinderGenerator extends AbstractGenerator implements WandoraTool {
public static String globalSiPattern = "";
public static String globalBasenamePattern = "";
public static boolean connectWithWandoraClass = true;
/**
* Creates a new instance of Cylinder Generator
*/
public CylinderGenerator() {
}
@Override
public String getName() {
return "Cylinder graph generator";
}
@Override
public String getDescription() {
return "Generates cylinder graph topic maps";
}
@Override
public void execute(Wandora wandora, Context context) throws TopicMapException {
TopicMap topicmap = solveContextTopicMap(wandora, context);
GenericOptionsDialog god = new GenericOptionsDialog(wandora,
"Cylinder graph generator",
"Cylinder graph generator creates simple graphs that resemble cylinders created with regular polygons. "+
"Created cylinders consist of topics and associations. Topics can be thought as cylinder vertices and "+
"associations as cylinder edges. Select the type and size of created tiling below. Optionally you "+
"can set the name and subject identifier patterns for vertex topics as well as the assocation type and "+
"roles of cylinder graph edges. Connecting topics with Wandora class creates some additional topics and "+
"associations that link the cylinder graph with Wandora class topic.",
true, new String[][]{
new String[]{"Create a cylinder with square tiling", "boolean"},
new String[]{"Create a cylinder with triangular tiling", "boolean"},
new String[]{"Create a cylinder with hexagonal tiling", "boolean"},
new String[]{"Width of cylinder", "string"},
new String[]{"Height of cylinder", "string"},
new String[]{"Toroid", "boolean"},
new String[]{"---3","separator"},
new String[]{"Subject identifier pattern","string",globalSiPattern,"Subject identifier patterns for the created node topics. Part __n__ in patterns is replaced with vertex identifier."},
new String[]{"Basename pattern","string",globalBasenamePattern,"Basename patterns for the created node topics. Part __n__ in patterns is replaced with vertex identifier."},
new String[]{"Connect topics with Wandora class","boolean", connectWithWandoraClass ? "true" : "false","Create additional topics and associations that connect created topics with the Wandora class." },
new String[]{"Association type topic","topic",null,"Optional association type for graph edges."},
new String[]{"First role topic","topic",null,"Optional role topic for graph edges."},
new String[]{"Second role topic","topic",null,"Optional role topic for graph edges."},
},
wandora);
god.setSize(700, 620);
GuiTools.centerWindow(god,wandora);
god.setVisible(true);
if (god.wasCancelled()) {
return;
}
Map<String, String> values = god.getValues();
try {
globalSiPattern = values.get("Subject identifier pattern");
if(globalSiPattern != null && globalSiPattern.trim().length() > 0) {
if(!globalSiPattern.contains("__n__")) {
int a = WandoraOptionPane.showConfirmDialog(Wandora.getWandora(), "Subject identifier pattern doesn't contain part for topic counter '__n__'. This causes all generated topics to merge. Do you want to use it?", "Missing topic counter part", WandoraOptionPane.WARNING_MESSAGE);
if(a != WandoraOptionPane.YES_OPTION) globalSiPattern = null;
}
}
globalBasenamePattern = values.get("Basename pattern");
if(globalBasenamePattern != null && globalBasenamePattern.trim().length() > 0) {
if(!globalBasenamePattern.contains("__n__")) {
int a = WandoraOptionPane.showConfirmDialog(Wandora.getWandora(), "Basename pattern doesn't contain part for topic counter '__n__'. This causes all generated topics to merge. Do you want to use it?", "Missing topic counter part", WandoraOptionPane.WARNING_MESSAGE);
if(a != WandoraOptionPane.YES_OPTION) globalBasenamePattern = null;
}
}
connectWithWandoraClass = "true".equalsIgnoreCase(values.get("Connect topics with Wandora class"));
}
catch(Exception e) {
log(e);
}
ArrayList<Cylinder> cylinders = new ArrayList<>();
int progress = 0;
int width = 0;
int height = 0;
boolean toggleToroid = false;
try {
toggleToroid = "true".equals(values.get("Toroid"));
width = Integer.parseInt(values.get("Width of cylinder"));
height = Integer.parseInt(values.get("Height of cylinder"));
if ("true".equals(values.get("Create a cylinder with square tiling"))) {
cylinders.add(new SquareCylinder(width, height, toggleToroid));
}
if ("true".equals(values.get("Create a cylinder with triangular tiling"))) {
cylinders.add(new TriangularCylinder(width, height, toggleToroid));
}
if ("true".equals(values.get("Create a cylinder with hexagonal tiling"))) {
cylinders.add(new HexagonalCylinder(width, height, toggleToroid));
}
}
catch (Exception e) {
singleLog(e);
return;
}
setDefaultLogger();
setLogTitle("Cylinder graph generator");
for (Cylinder cylinder : cylinders) {
Collection<T2> edges = cylinder.getEdges();
log("Creating " + cylinder.getName() + " graph");
Topic atype = cylinder.getAssociationTypeTopic(topicmap,values);
Topic role1 = cylinder.getRole1Topic(topicmap,values);
Topic role2 = cylinder.getRole2Topic(topicmap,values);
Association a = null;
Topic node1 = null;
Topic node2 = null;
if (edges.size() > 0) {
setProgressMax(edges.size());
for (T2<String,String> edge : edges) {
if (edge != null) {
node1 = cylinder.getVertexTopic(edge.e1, topicmap, values);
node2 = cylinder.getVertexTopic(edge.e2, topicmap, values);
if (node1 != null && node2 != null) {
a = topicmap.createAssociation(atype);
a.addPlayer(node1, role1);
a.addPlayer(node2, role2);
}
setProgress(progress++);
}
}
if(connectWithWandoraClass) {
log("You'll find created topics under the '"+cylinder.getName()+" graph' topic.");
}
else {
String searchWord = cylinder.getName();
if(globalBasenamePattern != null && globalBasenamePattern.trim().length() > 0) {
searchWord = globalBasenamePattern.replaceAll("__n__", "");
searchWord = searchWord.trim();
}
log("You'll find created topics by searching with a '"+searchWord+"'.");
}
}
else {
log("Number of cylinder edges is zero. Cylinder has no vertices neithers.");
}
}
if(cylinders.isEmpty()) {
log("No cylinder selected.");
}
log("Ready.");
setState(WAIT);
}
// -------------------------------------------------------------------------
// ----------------------------------------------------------- CYLINDERS ---
// -------------------------------------------------------------------------
public interface Cylinder {
public String getSIPrefix();
public String getName();
public int getSize();
public Collection<T2> getEdges();
public Collection<String> getVertices();
public Topic getVertexTopic(String vertex, TopicMap topicmap, Map<String,String> optionsValues);
public Topic getAssociationTypeTopic(TopicMap topicmap, Map<String,String> optionsValues);
public Topic getRole1Topic(TopicMap topicmap, Map<String,String> optionsValues);
public Topic getRole2Topic(TopicMap topicmap, Map<String,String> optionsValues);
}
public abstract class AbstractCylinder implements Cylinder {
@Override
public Topic getVertexTopic(String vertex, TopicMap topicmap, Map<String,String> optionsValues) {
String newBasename = getName()+" vertex "+vertex;
if(globalBasenamePattern != null && globalBasenamePattern.trim().length() > 0) {
newBasename = globalBasenamePattern.replaceAll("__n__", vertex);
}
String newSubjectIdentifier = getSIPrefix()+"vertex-"+vertex;
if(globalSiPattern != null && globalSiPattern.trim().length() > 0) {
newSubjectIdentifier = globalSiPattern.replaceAll("__n__", vertex);
}
Topic t = getOrCreateTopic(topicmap, newSubjectIdentifier, newBasename);
if(connectWithWandoraClass) {
try {
Topic graphTopic = getOrCreateTopic(topicmap, getSIPrefix(), getName()+" graph");
Topic wandoraClass = getOrCreateTopic(topicmap, TMBox.WANDORACLASS_SI);
makeSuperclassSubclass(topicmap, wandoraClass, graphTopic);
t.addType(graphTopic);
}
catch(Exception e) {
e.printStackTrace();
}
}
return t;
}
@Override
public Topic getAssociationTypeTopic(TopicMap topicmap, Map<String,String> optionsValues) {
String atypeStr = null;
Topic atype = null;
if(optionsValues != null) {
atypeStr = optionsValues.get("Association type topic");
}
if(atypeStr != null) {
try {
atype = topicmap.getTopic(atypeStr);
}
catch(Exception e) {
e.printStackTrace();
}
}
if(atype == null) {
atype = getOrCreateTopic(topicmap, getSIPrefix()+"edge", getName()+" edge");
}
return atype;
}
@Override
public Topic getRole1Topic(TopicMap topicmap, Map<String,String> optionsValues) {
String roleStr = null;
Topic role = null;
if(optionsValues != null) {
roleStr = optionsValues.get("First role topic");
}
if(roleStr != null) {
try {
role = topicmap.getTopic(roleStr);
}
catch(Exception e) {
e.printStackTrace();
}
}
if(role == null) {
role = getOrCreateTopic(topicmap, getSIPrefix()+"role-1", "role 1");
}
return role;
}
@Override
public Topic getRole2Topic(TopicMap topicmap, Map<String,String> optionsValues) {
String roleStr = null;
Topic role = null;
if(optionsValues != null) {
roleStr = optionsValues.get("Second role topic");
}
if(roleStr != null) {
try {
role = topicmap.getTopic(roleStr);
}
catch(Exception e) {
e.printStackTrace();
}
}
if(role == null) {
role = getOrCreateTopic(topicmap, getSIPrefix()+"role-2", "role 2");
}
return role;
}
}
// -------------------------------------------------------------------------
public class SquareCylinder extends AbstractCylinder implements Cylinder {
private int size = 0;
private int width = 0;
private int height = 0;
private boolean isToroid = false;
public SquareCylinder(int w, int h, boolean toroid) {
this.width = w;
this.height = h;
this.size = w * h;
this.isToroid = toroid;
}
@Override
public String getSIPrefix() {
return "http://wandora.org/si/cylinder/square/";
}
@Override
public String getName() {
return "Square-cylinder";
}
@Override
public int getSize() {
return size;
}
@Override
public Collection<T2> getEdges() {
ArrayList<T2> edges = new ArrayList<>();
for (int h = 0; h < height; h++) {
|
[
" for (int w = 0; w < width; w++) {"
] | 1,282
|
lcc
|
java
| null |
74db7236c3c75feb5089de045fc0919ca9061c171999a049
|
|
import pathlib
import warnings
import numpy as np
import dclab
from dclab import isoelastics as iso
from dclab.features import emodulus
from dclab.features.emodulus import pxcorr
from helper_methods import example_data_dict
def get_isofile(name="example_isoelastics.txt"):
thisdir = pathlib.Path(__file__).parent
return thisdir / "data" / name
def test_bad_isoelastic():
i1 = iso.Isoelastics([get_isofile()])
try:
i1.get(col1="deform",
col2="area_ratio",
lut_identifier="test-LE-2D-ana-18",
channel_width=20,
flow_rate=0.04,
viscosity=15,
add_px_err=False,
px_um=None)
except KeyError:
pass
else:
assert False, "features should not work"
def test_bad_isoelastic_2():
i1 = iso.Isoelastics([get_isofile()])
try:
i1.get(col1="deform",
col2="area_um",
lut_identifier="LE-2D-FEM-19",
channel_width=20,
flow_rate=0.04,
viscosity=15,
add_px_err=False,
px_um=None)
except KeyError:
pass
else:
assert False, "only analytical should not work with this set"
def test_bad_isoelastic_3():
i1 = iso.Isoelastics([get_isofile()])
try:
i1.get(col1="deform",
col2="bad_feature",
lut_identifier="LE-2D-FEM-19",
channel_width=20,
flow_rate=0.04,
viscosity=15,
add_px_err=False,
px_um=None)
except ValueError:
pass
else:
assert False, "bad feature does not work"
def test_bad_isoelastic_4():
i1 = iso.Isoelastics([get_isofile()])
try:
i1.get(col1="deform",
col2="area_um",
lut_identifier="LE-2D-FEM-99-nonexistent",
channel_width=20,
flow_rate=0.04,
viscosity=15,
add_px_err=False,
px_um=None)
except KeyError:
pass
else:
assert False, "bad lut_identifier does not work"
def test_circ():
i1 = iso.Isoelastics([get_isofile()])
iso1 = i1._data["test-LE-2D-ana-18"]["area_um"]["deform"]["isoelastics"]
iso2 = i1._data["test-LE-2D-ana-18"]["area_um"]["circ"]["isoelastics"]
assert np.allclose(iso1[0][:, 1], 1 - iso2[0][:, 1])
def test_circ_get():
i1 = iso.Isoelastics([get_isofile()])
iso_circ = i1.get(col1="area_um",
col2="circ",
lut_identifier="test-LE-2D-ana-18",
channel_width=15,
flow_rate=0.04,
viscosity=15)
iso_deform = i1.get(col1="area_um",
col2="deform",
lut_identifier="test-LE-2D-ana-18",
channel_width=15,
flow_rate=0.04,
viscosity=15)
for ii in range(len(iso_circ)):
isc = iso_circ[ii]
isd = iso_deform[ii]
assert np.allclose(isc[:, 0], isd[:, 0])
assert np.allclose(isc[:, 1], 1 - isd[:, 1])
def test_convert():
i1 = iso.Isoelastics([get_isofile()])
isoel = i1._data["test-LE-2D-ana-18"]["area_um"]["deform"]["isoelastics"]
isoel15 = i1.convert(isoel=isoel,
col1="area_um",
col2="deform",
channel_width_in=20,
channel_width_out=15,
flow_rate_in=0.04,
flow_rate_out=0.04,
viscosity_in=15,
viscosity_out=15)
# These values were taken from previous isoelasticity files
# used in Shape-Out.
assert np.allclose(isoel15[0][:, 2], 7.11111111e-01)
assert np.allclose(isoel15[1][:, 2], 9.48148148e-01)
# area_um
assert np.allclose(isoel15[0][1, 0], 2.245995843750000276e+00)
assert np.allclose(isoel15[0][9, 0], 9.954733499999999680e+00)
assert np.allclose(isoel15[1][1, 0], 2.247747243750000123e+00)
# deform
assert np.allclose(isoel15[0][1, 1], 5.164055600000000065e-03)
assert np.allclose(isoel15[0][9, 1], 2.311524599999999902e-02)
assert np.allclose(isoel15[1][1, 1], 2.904264599999999922e-03)
def test_convert_error():
i1 = iso.Isoelastics([get_isofile()])
isoel = i1.get(col1="area_um",
col2="deform",
lut_identifier="test-LE-2D-ana-18",
channel_width=15)
kwargs = dict(channel_width_in=15,
channel_width_out=20,
flow_rate_in=.12,
flow_rate_out=.08,
viscosity_in=15,
viscosity_out=15)
try:
i1.convert(isoel=isoel,
col1="deform",
col2="area_ratio",
**kwargs)
except KeyError:
pass
except BaseException:
raise
else:
assert False, "undefined column volume"
def test_data_slicing():
i1 = iso.Isoelastics([get_isofile()])
iso1 = i1._data["test-LE-2D-ana-18"]["area_um"]["deform"]["isoelastics"]
iso2 = i1._data["test-LE-2D-ana-18"]["deform"]["area_um"]["isoelastics"]
for ii in range(len(iso1)):
assert np.all(iso1[ii][:, 2] == iso2[ii][:, 2])
assert np.all(iso1[ii][:, 0] == iso2[ii][:, 1])
assert np.all(iso1[ii][:, 1] == iso2[ii][:, 0])
def test_data_structure():
i1 = iso.Isoelastics([get_isofile()])
# basic import
assert "test-LE-2D-ana-18" in i1._data
assert "deform" in i1._data["test-LE-2D-ana-18"]
assert "area_um" in i1._data["test-LE-2D-ana-18"]["deform"]
assert "area_um" in i1._data["test-LE-2D-ana-18"]
assert "deform" in i1._data["test-LE-2D-ana-18"]["area_um"]
# circularity
assert "circ" in i1._data["test-LE-2D-ana-18"]
assert "area_um" in i1._data["test-LE-2D-ana-18"]["circ"]
assert "area_um" in i1._data["test-LE-2D-ana-18"]
assert "circ" in i1._data["test-LE-2D-ana-18"]["area_um"]
# metadata
meta1 = i1._data["test-LE-2D-ana-18"]["area_um"]["deform"]["meta"]
meta2 = i1._data["test-LE-2D-ana-18"]["deform"]["area_um"]["meta"]
assert meta1 == meta2
def test_get():
i1 = iso.Isoelastics([get_isofile()])
data = i1.get(col1="area_um",
col2="deform",
channel_width=20,
flow_rate=0.04,
viscosity=15,
lut_identifier="test-LE-2D-ana-18")
refd = i1._data["test-LE-2D-ana-18"]["area_um"]["deform"]["isoelastics"]
for a, b in zip(data, refd):
assert np.all(a == b)
def test_pixel_err():
i1 = iso.Isoelastics([get_isofile()])
isoel = i1._data["test-LE-2D-ana-18"]["area_um"]["deform"]["isoelastics"]
px_um = .10
# add the error
isoel_err = i1.add_px_err(isoel=isoel,
col1="area_um",
col2="deform",
px_um=px_um,
inplace=False)
# remove the error manually
isoel_corr = []
for iss in isoel_err:
iss = iss.copy()
iss[:, 1] -= pxcorr.corr_deform_with_area_um(area_um=iss[:, 0],
px_um=px_um)
isoel_corr.append(iss)
for ii in range(len(isoel)):
assert not np.allclose(isoel[ii], isoel_err[ii])
assert np.allclose(isoel[ii], isoel_corr[ii])
try:
i1.add_px_err(isoel=isoel,
col1="deform",
col2="deform",
px_um=px_um,
inplace=False)
except ValueError:
pass
else:
assert False, "identical columns"
try:
i1.add_px_err(isoel=isoel,
col1="deform",
col2="circ",
px_um=px_um,
inplace=False)
except KeyError:
pass
except BaseException:
raise
else:
assert False, "area_um required"
def test_volume_basic():
"""Reproduce exact data from simulation result"""
i1 = iso.get_default()
data = i1.get(col1="volume",
col2="deform",
channel_width=20,
flow_rate=0.04,
viscosity=15,
lut_identifier="LE-2D-FEM-19",
add_px_err=False,
px_um=None)
assert np.allclose(data[0][0], [1.61819e+02, 4.18005e-02, 1.08000e+00])
assert np.allclose(data[0][-1], [5.90127e+02, 1.47449e-01, 1.08000e+00])
assert np.allclose(data[1][0], [1.61819e+02, 2.52114e-02, 1.36000e+00])
assert np.allclose(data[-1][-1], [3.16212e+03, 1.26408e-02, 1.08400e+01])
def test_volume_pxcorr():
"""Deformation is pixelation-corrected using volume"""
i1 = iso.get_default()
data = i1.get(col1="volume",
col2="deform",
channel_width=20,
flow_rate=None,
viscosity=None,
lut_identifier="LE-2D-FEM-19",
add_px_err=True,
px_um=0.34)
ddelt = pxcorr.corr_deform_with_volume(1.61819e+02, px_um=0.34)
assert np.allclose(data[0][0], [1.61819e+02,
4.18005e-02 + ddelt,
1.08000e+00])
def test_volume_scale():
"""Simple volume scale"""
i1 = iso.get_default()
data = i1.get(col1="volume",
col2="deform",
channel_width=25,
flow_rate=0.04,
viscosity=15,
lut_identifier="LE-2D-FEM-19",
add_px_err=False,
px_um=None)
assert np.allclose(data[0][0], [1.61819e+02 * (25 / 20)**3,
4.18005e-02,
1.08000e+00 * (20 / 25)**3])
def test_volume_scale_2():
"""The default values are used if set to None"""
i1 = iso.get_default()
data = i1.get(col1="volume",
col2="deform",
channel_width=25,
flow_rate=None,
viscosity=None,
lut_identifier="LE-2D-FEM-19",
add_px_err=False,
px_um=None)
assert np.allclose(data[0][0], [1.61819e+02 * (25 / 20)**3,
4.18005e-02,
1.08000e+00 * (20 / 25)**3])
def test_volume_switch():
"""Switch the columns"""
i1 = iso.get_default()
data = i1.get(col1="deform",
col2="volume",
channel_width=20,
flow_rate=0.04,
viscosity=15,
lut_identifier="LE-2D-FEM-19",
add_px_err=False,
px_um=None)
assert np.allclose(data[0][0], [4.18005e-02, 1.61819e+02, 1.08000e+00])
assert np.allclose(data[-1][-1], [1.26408e-02, 3.16212e+03, 1.08400e+01])
def test_volume_switch_scale():
"""Switch the columns and change the scale"""
i1 = iso.get_default()
data = i1.get(col1="deform",
col2="volume",
channel_width=25,
flow_rate=0.04,
viscosity=15,
lut_identifier="LE-2D-FEM-19",
add_px_err=False,
px_um=None)
assert np.allclose(data[0][0], [4.18005e-02,
1.61819e+02 * (25 / 20)**3,
1.08000e+00 * (20 / 25)**3])
assert np.allclose(data[-1][-1], [1.26408e-02,
3.16212e+03 * (25 / 20)**3,
1.08400e+01 * (20 / 25)**3])
def test_with_rtdc():
keys = ["area_um", "deform"]
ddict = example_data_dict(size=8472, keys=keys)
# legacy
ds = dclab.new_dataset(ddict)
ds.config["setup"]["flow rate"] = 0.16
ds.config["setup"]["channel width"] = 30
ds.config["setup"]["temperature"] = 23.0
ds.config["setup"]["medium"] = "CellCarrier"
ds.config["imaging"]["pixel size"] = .34
|
[
" i1 = iso.get_default()"
] | 761
|
lcc
|
python
| null |
815f132424cf8772d1a660e02d231ddd3309d07fe636e616
|
|
/*
Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved.
Contact:
SYSTAP, LLC DBA Blazegraph
2501 Calvert ST NW #106
Washington, DC 20008
licenses@blazegraph.com
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/*
* Created on Nov 14, 2008
*/
package com.bigdata.rdf.store;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import org.apache.log4j.Logger;
import org.openrdf.model.Statement;
import com.bigdata.journal.Journal;
import com.bigdata.journal.TimestampUtility;
import com.bigdata.rdf.axioms.Axioms;
import com.bigdata.rdf.axioms.NoAxioms;
import com.bigdata.rdf.internal.IV;
import com.bigdata.rdf.model.BigdataStatement;
import com.bigdata.rdf.rio.AbstractStatementBuffer.StatementBuffer2;
import com.bigdata.rdf.rio.StatementBuffer;
import com.bigdata.rdf.rules.BackchainAccessPath;
import com.bigdata.rdf.spo.ISPO;
import com.bigdata.rdf.spo.SPO;
import com.bigdata.rdf.store.AbstractTripleStore.Options;
import com.bigdata.relation.accesspath.BlockingBuffer;
import com.bigdata.relation.accesspath.IAccessPath;
import com.bigdata.striterator.IChunkedOrderedIterator;
import cutthecrap.utils.striterators.ICloseableIterator;
/**
* Utility class for comparing graphs for equality, bulk export, etc.
*
* @author <a href="mailto:thompsonbry@users.sourceforge.net">Bryan Thompson</a>
* @version $Id$
*/
public class TripleStoreUtility {
protected static final Logger log = Logger.getLogger(TripleStoreUtility.class);
/**
* Compares two RDF graphs for equality (same statements).
* <p>
* Note: This does NOT handle bnodes, which much be treated as variables for
* RDF semantics.
* <p>
* Note: Comparison is performed in terms of the externalized RDF
* {@link Statement}s rather than {@link SPO}s since different graphs use
* different lexicons.
* <p>
* Note: If the graphs differ in which entailments they are storing in their
* data and which entailments are backchained then you MUST make them
* consistent in this regard. You can do this by exporting one or both using
* {@link #bulkExport(AbstractTripleStore)}, which will cause all
* entailments to be materialized in the returned {@link TempTripleStore}.
*
* @param expected
* One graph.
*
* @param actual
* Another graph <strong>with a consistent policy for forward and
* backchained entailments</strong>.
*
* @return true if all statements in the expected graph are in the actual
* graph and if the actual graph does not contain any statements
* that are not also in the expected graph.
*/
public static boolean modelsEqual(AbstractTripleStore expected,
AbstractTripleStore actual) throws Exception {
// int actualSize = 0;
int notExpecting = 0;
int expecting = 0;
boolean sameStatements1 = true;
{
final ICloseableIterator<BigdataStatement> it = notFoundInTarget(actual, expected);
try {
while (it.hasNext()) {
final BigdataStatement stmt = it.next();
sameStatements1 = false;
log("Not expecting: " + stmt);
notExpecting++;
// actualSize++; // count #of statements actually visited.
}
} finally {
it.close();
}
log("all the statements in actual in expected? " + sameStatements1);
}
// int expectedSize = 0;
boolean sameStatements2 = true;
{
final ICloseableIterator<BigdataStatement> it = notFoundInTarget(expected, actual);
try {
while (it.hasNext()) {
final BigdataStatement stmt = it.next();
sameStatements2 = false;
log(" Expecting: " + stmt);
expecting++;
// expectedSize++; // counts statements actually visited.
}
} finally {
it.close();
}
// BigdataStatementIterator it = expected.asStatementIterator(expected
// .getInferenceEngine().backchainIterator(
// expected.getAccessPath(NULL, NULL, NULL)));
//
// try {
//
// while(it.hasNext()) {
//
// BigdataStatement stmt = it.next();
//
// if (!hasStatement(actual,//
// (Resource)actual.getValueFactory().asValue(stmt.getSubject()),//
// (URI)actual.getValueFactory().asValue(stmt.getPredicate()),//
// (Value)actual.getValueFactory().asValue(stmt.getObject()))//
// ) {
//
// sameStatements2 = false;
//
// log(" Expecting: " + stmt);
//
// expecting++;
//
// }
//
// expectedSize++; // counts statements actually visited.
//
// }
//
// } finally {
//
// it.close();
//
// }
log("all the statements in expected in actual? " + sameStatements2);
}
// final boolean sameSize = expectedSize == actualSize;
//
// log("size of 'expected' repository: " + expectedSize);
//
// log("size of 'actual' repository: " + actualSize);
log("# expected but not found: " + expecting);
log("# not expected but found: " + notExpecting);
return /*sameSize &&*/sameStatements1 && sameStatements2;
}
public static void log(final String s) {
if(log.isInfoEnabled())
log.info(s);
}
/**
* Visits <i>expected</i> {@link BigdataStatement}s not found in <i>actual</i>.
*
* @param expected
* @param actual
*
* @return An iterator visiting {@link BigdataStatement}s present in
* <i>expected</i> but not found in <i>actual</i>.
*
* @throws ExecutionException
* @throws InterruptedException
*/
public static ICloseableIterator<BigdataStatement> notFoundInTarget(//
final AbstractTripleStore expected,//
final AbstractTripleStore actual //
) throws InterruptedException, ExecutionException {
/*
* The source access path is a full scan of the SPO index.
*/
final IAccessPath<ISPO> expectedAccessPath = expected.getAccessPath(
(IV) null, (IV) null, (IV) null);
/*
* Efficiently convert SPOs to BigdataStatements (externalizes
* statements).
*/
final BigdataStatementIterator itr2 = expected
.asStatementIterator(expectedAccessPath.iterator());
final int capacity = 100000;
final BlockingBuffer<BigdataStatement> buffer = new BlockingBuffer<BigdataStatement>(
capacity);
final StatementBuffer2<Statement, BigdataStatement> sb = new StatementBuffer2<Statement, BigdataStatement>(
actual, true/* readOnly */, capacity) {
/**
* Statements not found in [actual] are written on the
* BlockingBuffer.
*
* @return The #of statements that were not found.
*/
@Override
protected int handleProcessedStatements(final BigdataStatement[] a) {
if (log.isInfoEnabled())
log.info("Given " + a.length + " statements");
// bulk filter for statements not present in [actual].
final IChunkedOrderedIterator<ISPO> notFoundItr = actual
.bulkFilterStatements(a, a.length, false/* present */);
int nnotFound = 0;
try {
while (notFoundItr.hasNext()) {
final ISPO notFoundStmt = notFoundItr.next();
if (log.isInfoEnabled())
log.info("Not found: " + notFoundStmt);
buffer.add((BigdataStatement) notFoundStmt);
nnotFound++;
}
} finally {
notFoundItr.close();
}
if (log.isInfoEnabled())
log.info("Given " + a.length + " statements, " + nnotFound
+ " of them were not found");
return nnotFound;
}
};
/**
* Run task. The task consumes externalized statements from [expected]
* and writes statements not found in [actual] onto the blocking buffer.
*/
final Callable<Void> myTask = new Callable<Void>() {
public Void call() throws Exception {
try {
while (itr2.hasNext()) {
// a statement from the source db.
final BigdataStatement stmt = itr2.next();
// if (log.isInfoEnabled()) log.info("Source: "
// + stmt);
// add to the buffer.
sb.add(stmt);
}
} finally {
itr2.close();
}
/*
* Flush everything in the StatementBuffer so that it
* shows up in the BlockingBuffer's iterator().
*/
final long nnotFound = sb.flush();
if (log.isInfoEnabled())
log.info("Flushed: #notFound=" + nnotFound);
return null;
}
};
/**
* @see <a href="https://sourceforge.net/apps/trac/bigdata/ticket/707">
* BlockingBuffer.close() does not unblock threads </a>
*/
// Wrap computation as FutureTask.
final FutureTask<Void> ft = new FutureTask<Void>(myTask);
// Set Future on BlockingBuffer.
buffer.setFuture(ft);
// Submit computation for evaluation.
actual.getExecutorService().submit(ft);
/*
* Return iterator reading "not found" statements from the blocking
* buffer.
*/
return buffer.iterator();
}
/**
* Exports all statements found in the data and all backchained entailments
* for the <i>db</i> into a {@link TempTripleStore}. This may be used to
* compare graphs purely in their data by pre-generation of all backchained
* entailments.
* <p>
* Note: This is not a general purpose bulk export as it uses only a single
* access path, does not store justifications, and does retain the
* {@link Axioms} model of the source graph. This method is specifically
* designed to export "just the triples", e.g., for purposes of comparison.
*
* @param db
* The source database.
*
* @return The {@link TempTripleStore}.
*/
static public TempTripleStore bulkExport(final AbstractTripleStore db) {
final Properties properties = new Properties();
properties.setProperty(Options.ONE_ACCESS_PATH, "true");
properties.setProperty(Options.JUSTIFY, "false");
properties.setProperty(Options.AXIOMS_CLASS,
NoAxioms.class.getName());
properties.setProperty(Options.STATEMENT_IDENTIFIERS,
"" + db.isStatementIdentifiers());
final TempTripleStore tmp = new TempTripleStore(properties);
try {
final StatementBuffer<Statement> sb = new StatementBuffer<Statement>(tmp, 100000/* capacity */,
10/* queueCapacity */);
final IV NULL = null;
final IChunkedOrderedIterator<ISPO> itr1 = new BackchainAccessPath(
db, db.getAccessPath(NULL, NULL, NULL)).iterator();
final BigdataStatementIterator itr2 = db.asStatementIterator(itr1);
try {
while (itr2.hasNext()) {
final BigdataStatement stmt = itr2.next();
sb.add(stmt);
}
} finally {
itr2.close();
}
sb.flush();
} catch (Throwable t) {
tmp.close();
throw new RuntimeException(t);
}
return tmp;
}
/**
* Compares two {@link LocalTripleStore}s
*
* @param args
* filename filename (namespace)
*
* @throws Exception
*
* @todo namespace for each, could be the same file, and timestamp for each.
*
* @todo handle other database modes.
*/
public static void main(String[] args) throws Exception {
|
[
" if (args.length < 2 || args.length > 3) {"
] | 1,331
|
lcc
|
java
| null |
0ab3fa6fa37f02b5f789515e8bd2a7357fa959242a06e4e3
|
|
#!/usr/bin/env python
"""
Test alerts
"""
import unittest
import datetime
from dateutil.tz import tzutc
from spotbot import alert
def isclose(a, b, rel_tol=1e-09, abs_tol=0.0):
""" Borrow isclose from Python 3.5 """
return abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
class AlertTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_check_for_alert_empty_history_and_subscriptions(self):
""" Test that we handle no history and no subscriptions in a sane way."""
assert alert.check_for_alert([],None) is None, "Alerts should have been an empty list."
def test_check_for_alert_over_under(self):
""" Test that we can match an alert description against relevant history. """
history = [ {u'Timestamp': datetime.datetime(2015, 12, 31, 22, 13, 43,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.105200',
u'AvailabilityZone': 'us-east-1b'},
{u'Timestamp': datetime.datetime(2015, 12, 31, 21, 56, 18,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.104400',
u'AvailabilityZone': 'us-east-1d'},
{u'Timestamp': datetime.datetime(2015, 12, 31, 21, 56, 18,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.106300',
u'AvailabilityZone': 'us-east-1c'},
{u'Timestamp': datetime.datetime(2015, 12, 31, 21, 31, 6,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.767100',
u'AvailabilityZone': 'us-east-1e'},
{u'Timestamp': datetime.datetime(2015, 12, 31, 21, 29, 47,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.105300',
u'AvailabilityZone': 'us-east-1b'},
]
virginia_under_a_nickle = {'name': 'Virginia Under A Nickle',
'threshold':'0.05', 'region':'us-east-1', 'zone':
'us-east-1b', 'instance_type':'g2.2xlarge',
'product':'Windows', 'user':'1', 'last_alert':'Under'}
virginia_over_twenty = {'name': 'Virginia Over Twenty',
'threshold':'0.2', 'region':'us-east-1', 'zone':
'us-east-1b', 'instance_type':'g2.2xlarge',
'product':'Windows', 'user':'1', 'last_alert':'Over'}
assert not alert.check_for_alert(history, virginia_under_a_nickle) is None, "Should see an alert for Virginia Under A Nickle"
assert not alert.check_for_alert(history, virginia_over_twenty) is None, "Should see an alert for Virginia Over Twenty"
dublin_under_twenty = {'name': 'Dublin Under Twenty',
'threshold':'0.2', 'region':'eu-west-1', 'zone': 'eu-west-1b',
'instance_type':'g2.2xlarge', 'product':'Windows', 'user':'1',
'last_alert':'Under'}
virginia_under_twenty = {'name': 'Virginia Under Twenty',
'threshold':'0.2', 'region':'us-east-1', 'zone': 'us-east-1b',
'instance_type':'g2.2xlarge', 'product':'Windows', 'user':'1',
'last_alert':'Under'}
assert alert.check_for_alert(history, dublin_under_twenty) is None, "Should not see an alert for Dublin Under Twenty"
assert alert.check_for_alert(history, virginia_under_twenty) is None, "Should not see an alert for Virginia Under Twenty"
def test_check_for_alert_with_no_matched_zones(self):
"""Alerts are only valid if the availability zone in the history matches an availability zone in the subscription"""
history = [{u'Timestamp': datetime.datetime(2015, 12, 31, 22, 13, 43,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.105200',
u'AvailabilityZone': 'us-east-1d'},
{u'Timestamp': datetime.datetime(2015, 12, 31, 21, 56, 18,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.104400',
u'AvailabilityZone': 'us-east-1d'}]
just_1a = {'name': 'Just 1a',
'threshold':'0.05',
'region':'us-east-1',
'zone': 'us-east-1a',
'instance_type':'g2.2xlarge',
'product':'Windows',
'user':'1',
'last_alert':'Under'}
result = alert.check_for_alert(history, just_1a)
assert result is None, 'There should not be an alert for Just 1a'
def test_check_that_alert_matches_zone(self):
"""When we match a zone and all other criteria, we should create an alert."""
history = [{u'Timestamp': datetime.datetime(2015, 12, 31, 22, 13, 43,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.105200',
u'AvailabilityZone': 'us-east-1d'},
{u'Timestamp': datetime.datetime(2015, 12, 31, 21, 56, 18,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.104400',
u'AvailabilityZone': 'us-east-1d'}]
match_1d = {'name': 'Sub for just 1d',
'threshold':'0.05',
'region':'us-east-1',
'zone': 'us-east-1d',
'instance_type':'g2.2xlarge',
'product':'Windows',
'user':'1',
'last_alert':'Under'}
assert not alert.check_for_alert(history, match_1d) is None, "There should be an alert from match_1d"
match_1q = {'name': 'Sub for 1q',
'threshold':'0.05',
'region':'us-east-1',
'zone': 'us-east-1q',
'instance_type':'g2.2xlarge',
'product':'Windows',
'user':'1',
'last_alert':'Under'}
assert alert.check_for_alert(history, match_1q) is None, "There should not be any alerts for us_east-1q"
def test_check_for_alert_sets_last_alert(self):
"""check_for_alert should set the last_alert attribute of the alert to indication the type of the alert."""
history = [ {u'Timestamp': datetime.datetime(2015, 12, 31, 22, 13, 43,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.105200',
u'AvailabilityZone': 'us-east-1b'}]
subscription = {'name': 'Sub for 1b',
'threshold':'0.05',
'region':'us-east-1',
'zone': 'us-east-1b',
'instance_type':'g2.2xlarge',
'product':'Windows',
'user':'1',
'last_alert':'Under'}
result = alert.check_for_alert(history, subscription)
assert not result is None, "There should be an alert for us_east-1b"
assert result['last_alert'] == 'Over'
def test_check_for_alert_sets_spotprice(self):
"""check_for_alert should set the last_alert attribute of the alert to indication the type of the alert."""
history = [ {u'Timestamp': datetime.datetime(2015, 12, 31, 22, 13, 43,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.105200',
u'AvailabilityZone': 'us-east-1b'}]
subscription = {'name': 'Sub for 1b',
'threshold':'0.05',
'region':'us-east-1',
'zone': 'us-east-1b',
'instance_type':'g2.2xlarge',
'product':'Windows',
'user':'1',
'last_alert':'Under'}
result = alert.check_for_alert(history, subscription)
assert not result is None, "There should be an alert for us_east-1b"
assert result['spot_price'] == 0.105200
def test_lowest_spotprice(self):
"""We should find the lowest spotprice for a given zone or return None."""
history = [ {u'Timestamp': datetime.datetime(2015, 12, 31, 22, 13, 43,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.105200',
u'AvailabilityZone': 'us-east-1b'},
{u'Timestamp': datetime.datetime(2015, 12, 31, 21, 56, 18,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.104400',
u'AvailabilityZone': 'us-east-1d'},
{u'Timestamp': datetime.datetime(2015, 12, 31, 21, 56, 18,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.106300',
u'AvailabilityZone': 'us-east-1c'},
{u'Timestamp': datetime.datetime(2015, 12, 31, 21, 31, 6,
tzinfo=tzutc()),
u'ProductDescription': 'Windows',
u'InstanceType': 'g2.2xlarge',
u'SpotPrice': '0.767100',
u'AvailabilityZone': 'us-east-1e'},
|
[
" {u'Timestamp': datetime.datetime(2015, 12, 31, 21, 29, 47,"
] | 706
|
lcc
|
python
| null |
ec079a446c9b8d37501282c4d7ea9038eeec3621948ada9e
|
|
package fr.inria.arles.yarta.desktop.library.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URL;
import java.net.URLConnection;
import javax.swing.JOptionPane;
import fr.inria.arles.yarta.desktop.library.DownloaderDialog;
import fr.inria.arles.yarta.desktop.library.RMIUtil;
import fr.inria.arles.yarta.desktop.library.Service;
/**
* Helper class which permits (un)installing & updating the application.
*/
public class Installer {
public static final String InstallPath = System.getProperty("user.home")
+ "/.yarta/";
public static final String FilesPath = InstallPath + "res/";
private static final String[] files = { "mse-1.2.rdf", "policies" };
private String currentJarPath;
private String installedJarPath;
private Exception error;
public Installer() {
String jarFile = System.getProperty("java.class.path");
if (!jarFile.endsWith("jar")) {
jarFile = "yarta.jar";
}
currentJarPath = new File(jarFile).getAbsolutePath();
installedJarPath = InstallPath + "yarta.jar";
}
/**
* Checks whether Yarta is installed on the current machine.
*
* @return
*/
public boolean isInstalled() {
return checkFilesConsistency();
}
/**
* Checks if Yarta Service is running.
*
* @return
*/
public boolean isRunning() {
Service service = RMIUtil.getObject(Service.Name);
boolean running = service != null;
service = null;
return running;
}
/**
* Runs a jar file with the specified arguments;
*
* @param jarPath
* @param args
* @return
*/
private Process runJar(String jarPath, String... args) {
String command = "java -jar " + jarPath;
if (isWindows()) {
command = "javaw -jar " + jarPath;
}
for (String arg : args) {
command += " " + arg;
}
try {
return Runtime.getRuntime().exec(command);
} catch (Exception ex) {
}
return null;
}
/**
* Launches the application.
*
* @return
*/
public boolean launchApp() {
return runJar(installedJarPath) != null;
}
/**
* Returns the timestamp of yarta.jar from Internet.
*
* @return
*/
private long getLastModifiedRemote() {
long lastModified = 0;
try {
URL url = new URL(Strings.DownloaderYartaLink);
URLConnection conn = url.openConnection();
lastModified = conn.getLastModified();
} catch (Exception ex) {
}
return lastModified;
}
/**
* Checks for updates, and if there are any, asks users and update. Returns
* false otherwise.
*
* @return true/false
*/
public boolean checkAndUpdate() {
long lastModifiedLocal = new File(installedJarPath).lastModified();
long lastModifiedRemote = getLastModifiedRemote();
if (lastModifiedRemote > lastModifiedLocal) {
int option = 0;
try {
option = JOptionPane.showConfirmDialog(null,
Strings.InstallerUpdatePrompt,
Strings.InstallerUpdateTitle,
JOptionPane.OK_CANCEL_OPTION,
JOptionPane.INFORMATION_MESSAGE);
} catch (Exception ex) {
// system does not have UI
option = JOptionPane.OK_OPTION;
}
if (option == JOptionPane.OK_OPTION) {
String downloadedJarFile = performDownload();
if (downloadedJarFile != null) {
return performInstallerLaunch(downloadedJarFile);
}
}
}
return false;
}
public boolean launchService() {
return runJar(installedJarPath, "/start") != null;
}
public boolean stopService() {
if (!new File(installedJarPath).exists()) {
return true;
}
try {
Process process = runJar(installedJarPath, "/stop");
process.waitFor();
return true;
} catch (Exception ex) {
return false;
}
}
public boolean install() {
boolean hasUI = true;
try {
int selection = JOptionPane.showConfirmDialog(null,
Strings.InstallerPrompt, Strings.InstallerPromptTitle,
JOptionPane.OK_CANCEL_OPTION,
JOptionPane.INFORMATION_MESSAGE);
if (selection == JOptionPane.OK_OPTION) {
return performInstallation();
}
} catch (Exception ex) {
hasUI = false;
}
if (!hasUI) {
return performInstallation();
}
return false;
}
/**
* This should download and install Yarta.
*
* When the function returns true Yarta will be installed.
*
* @return true/false
*/
public boolean downloadAndInstall() {
int selection = JOptionPane.showConfirmDialog(null,
Strings.InstallerDownloadPrompt,
Strings.InstallerDownloadTitle, JOptionPane.OK_CANCEL_OPTION,
JOptionPane.INFORMATION_MESSAGE);
if (selection == JOptionPane.OK_OPTION) {
|
[
"\t\t\tString downloadedJarFile = performDownload();"
] | 519
|
lcc
|
java
| null |
2048127690b0643c5439f0746443d4a8223e39ac290d9891
|
|
#region Copyright & License Information
/*
* Copyright 2007-2019 The OpenRA Developers (see AUTHORS)
* This file is part of OpenRA, which is free software. It is made
* available to you under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version. For more
* information, see COPYING.
*/
#endregion
using System;
using OpenRA.Graphics;
using OpenRA.Primitives;
using SDL2;
namespace OpenRA.Platforms.Default
{
sealed class Sdl2GraphicsContext : ThreadAffine, IGraphicsContext
{
readonly Sdl2PlatformWindow window;
bool disposed;
IntPtr context;
public Sdl2GraphicsContext(Sdl2PlatformWindow window)
{
this.window = window;
}
internal void InitializeOpenGL()
{
SetThreadAffinity();
context = SDL.SDL_GL_CreateContext(window.Window);
if (context == IntPtr.Zero || SDL.SDL_GL_MakeCurrent(window.Window, context) < 0)
throw new InvalidOperationException("Can not create OpenGL context. (Error: {0})".F(SDL.SDL_GetError()));
OpenGL.Initialize();
uint vao;
OpenGL.CheckGLError();
OpenGL.glGenVertexArrays(1, out vao);
OpenGL.CheckGLError();
OpenGL.glBindVertexArray(vao);
OpenGL.CheckGLError();
OpenGL.glEnableVertexAttribArray(Shader.VertexPosAttributeIndex);
OpenGL.CheckGLError();
OpenGL.glEnableVertexAttribArray(Shader.TexCoordAttributeIndex);
OpenGL.CheckGLError();
OpenGL.glEnableVertexAttribArray(Shader.TexMetadataAttributeIndex);
OpenGL.CheckGLError();
}
public IVertexBuffer<Vertex> CreateVertexBuffer(int size)
{
VerifyThreadAffinity();
return new VertexBuffer<Vertex>(size);
}
public ITexture CreateTexture()
{
VerifyThreadAffinity();
return new Texture();
}
public IFrameBuffer CreateFrameBuffer(Size s)
{
VerifyThreadAffinity();
return new FrameBuffer(s, new Texture(), Color.FromArgb(0));
}
public IFrameBuffer CreateFrameBuffer(Size s, Color clearColor)
{
VerifyThreadAffinity();
return new FrameBuffer(s, new Texture(), clearColor);
}
public IFrameBuffer CreateFrameBuffer(Size s, ITextureInternal texture, Color clearColor)
{
VerifyThreadAffinity();
return new FrameBuffer(s, texture, clearColor);
}
public IShader CreateShader(string name)
{
VerifyThreadAffinity();
return new Shader(name);
}
public void EnableScissor(int x, int y, int width, int height)
{
VerifyThreadAffinity();
if (width < 0)
width = 0;
if (height < 0)
height = 0;
var windowSize = window.WindowSize;
var windowScale = window.WindowScale;
var surfaceSize = window.SurfaceSize;
if (windowSize != surfaceSize)
{
x = (int)Math.Round(windowScale * x);
y = (int)Math.Round(windowScale * y);
width = (int)Math.Round(windowScale * width);
height = (int)Math.Round(windowScale * height);
}
OpenGL.glScissor(x, y, width, height);
OpenGL.CheckGLError();
OpenGL.glEnable(OpenGL.GL_SCISSOR_TEST);
OpenGL.CheckGLError();
}
public void DisableScissor()
{
VerifyThreadAffinity();
OpenGL.glDisable(OpenGL.GL_SCISSOR_TEST);
OpenGL.CheckGLError();
}
public void Present()
{
VerifyThreadAffinity();
SDL.SDL_GL_SwapWindow(window.Window);
}
static int ModeFromPrimitiveType(PrimitiveType pt)
{
switch (pt)
{
case PrimitiveType.PointList: return OpenGL.GL_POINTS;
case PrimitiveType.LineList: return OpenGL.GL_LINES;
case PrimitiveType.TriangleList: return OpenGL.GL_TRIANGLES;
}
throw new NotImplementedException();
}
public void DrawPrimitives(PrimitiveType pt, int firstVertex, int numVertices)
{
VerifyThreadAffinity();
OpenGL.glDrawArrays(ModeFromPrimitiveType(pt), firstVertex, numVertices);
OpenGL.CheckGLError();
}
public void Clear()
{
VerifyThreadAffinity();
OpenGL.glClearColor(0, 0, 0, 1);
OpenGL.CheckGLError();
OpenGL.glClear(OpenGL.GL_COLOR_BUFFER_BIT | OpenGL.GL_DEPTH_BUFFER_BIT);
OpenGL.CheckGLError();
}
public void EnableDepthBuffer()
{
VerifyThreadAffinity();
OpenGL.glClear(OpenGL.GL_DEPTH_BUFFER_BIT);
OpenGL.CheckGLError();
OpenGL.glEnable(OpenGL.GL_DEPTH_TEST);
OpenGL.CheckGLError();
OpenGL.glDepthFunc(OpenGL.GL_LEQUAL);
OpenGL.CheckGLError();
}
public void DisableDepthBuffer()
{
VerifyThreadAffinity();
OpenGL.glDisable(OpenGL.GL_DEPTH_TEST);
OpenGL.CheckGLError();
}
public void ClearDepthBuffer()
{
VerifyThreadAffinity();
OpenGL.glClear(OpenGL.GL_DEPTH_BUFFER_BIT);
OpenGL.CheckGLError();
}
public void SetBlendMode(BlendMode mode)
{
VerifyThreadAffinity();
OpenGL.glBlendEquation(OpenGL.GL_FUNC_ADD);
OpenGL.CheckGLError();
switch (mode)
{
case BlendMode.None:
OpenGL.glDisable(OpenGL.GL_BLEND);
break;
case BlendMode.Alpha:
OpenGL.glEnable(OpenGL.GL_BLEND);
OpenGL.CheckGLError();
OpenGL.glBlendFunc(OpenGL.GL_ONE, OpenGL.GL_ONE_MINUS_SRC_ALPHA);
break;
case BlendMode.Additive:
case BlendMode.Subtractive:
OpenGL.glEnable(OpenGL.GL_BLEND);
OpenGL.CheckGLError();
OpenGL.glBlendFunc(OpenGL.GL_ONE, OpenGL.GL_ONE);
if (mode == BlendMode.Subtractive)
{
OpenGL.CheckGLError();
OpenGL.glBlendEquationSeparate(OpenGL.GL_FUNC_REVERSE_SUBTRACT, OpenGL.GL_FUNC_ADD);
}
break;
case BlendMode.Multiply:
OpenGL.glEnable(OpenGL.GL_BLEND);
OpenGL.CheckGLError();
OpenGL.glBlendFunc(OpenGL.GL_DST_COLOR, OpenGL.GL_ONE_MINUS_SRC_ALPHA);
OpenGL.CheckGLError();
break;
case BlendMode.Multiplicative:
OpenGL.glEnable(OpenGL.GL_BLEND);
OpenGL.CheckGLError();
OpenGL.glBlendFunc(OpenGL.GL_ZERO, OpenGL.GL_SRC_COLOR);
break;
case BlendMode.DoubleMultiplicative:
OpenGL.glEnable(OpenGL.GL_BLEND);
OpenGL.CheckGLError();
OpenGL.glBlendFunc(OpenGL.GL_DST_COLOR, OpenGL.GL_SRC_COLOR);
break;
}
OpenGL.CheckGLError();
}
public void Dispose()
{
if (disposed)
return;
disposed = true;
|
[
"\t\t\tif (context != IntPtr.Zero)"
] | 469
|
lcc
|
csharp
| null |
704587a1d393bef058060b716bb0ae02a9df9635baed95b0
|
|
try:
import typing # help IDEs with type-hinting inside docstrings
except ImportError:
pass
import numpy # help IDEs with type-hinting inside docstrings
from collections import OrderedDict
from copy import deepcopy
import numpy as np
from ...core.error import CovMat
from ...tools import print_dict_as_table
from .._base import FitException, FitBase, DataContainerBase, ModelFunctionBase
from .container import XYContainer
from .cost import XYCostFunction_Chi2, STRING_TO_COST_FUNCTION
from .model import XYParametricModel
from .plot import XYPlotAdapter
from ..util import function_library, add_in_quadrature, invert_matrix
__all__ = ['XYFit', 'XYFitException']
class XYFitException(FitException):
pass
class XYFit(FitBase):
CONTAINER_TYPE = XYContainer
MODEL_TYPE = XYParametricModel
MODEL_FUNCTION_TYPE = ModelFunctionBase
PLOT_ADAPTER_TYPE = XYPlotAdapter
EXCEPTION_TYPE = XYFitException
RESERVED_NODE_NAMES = {'y_data', 'y_model', 'cost',
'x_error', 'y_data_error', 'y_model_error', 'total_error',
'x_cov_mat', 'y_data_cov_mat', 'y_model_cov_mat', 'total_cov_mat',
'x_cor_mat', 'y_data_cor_mat', 'y_model_cor_mat', 'total_cor_mat',
'x_cov_mat_inverse', 'y_data_cov_mat_inverse', 'y_model_cov_mat_inverse', 'total_cor_mat_inverse'
'x_data_cov_mat'}
_BASIC_ERROR_NAMES = {
'x_data_error', 'x_model_error', 'x_data_cov_mat', 'x_model_cov_mat',
'y_data_error', 'y_model_error', 'y_data_cov_mat', 'y_model_cov_mat'
}
X_ERROR_ALGORITHMS = ('iterative linear', 'nonlinear')
_STRING_TO_COST_FUNCTION = STRING_TO_COST_FUNCTION
_AXES = (None, "x", "y")
_MODEL_NAME = "y_model"
_MODEL_ERROR_NODE_NAMES = ["y_model_error", "y_model_cov_mat"]
_PROJECTED_NODE_NAMES = ["total_error", "total_cov_mat"]
def __init__(self,
xy_data,
model_function=function_library.linear_model,
cost_function=XYCostFunction_Chi2(
axes_to_use='xy', errors_to_use='covariance'),
minimizer=None, minimizer_kwargs=None,
dynamic_error_algorithm="nonlinear"):
"""Construct a fit of a model to *xy* data.
:param xy_data: A :py:obj:`~.XYContainer` or a raw 2D array of shape ``(2, N)``
containing the measurement data.
:type xy_data: XYContainer or typing.Sequence
:param model_function: The model function as a native Python function where the first
argument denotes the independent *x* variable or an already defined
:py:class:`~kafe2.fit.xy.XYModelFunction` object.
:type model_function: typing.Callable
:param cost_function: The cost function this fit uses to find the best parameters.
:type cost_function: str or typing.Callable
:param minimizer: The minimizer to use for fitting. Either :py:obj:`None`, ``"iminuit"``,
``"tminuit"``, or ``"scipy"``.
:type minimizer: str or None
:param minimizer_kwargs: Dictionary with kwargs for the minimizer.
:type minimizer_kwargs: dict
"""
super(XYFit, self).__init__(
data=xy_data, model_function=model_function, cost_function=cost_function,
minimizer=minimizer, minimizer_kwargs=minimizer_kwargs,
dynamic_error_algorithm=dynamic_error_algorithm)
# -- private methods
def _init_nexus(self):
super(XYFit, self)._init_nexus()
self._nexus.add_function(
func=self._project_cov_mat,
func_name="total_cov_mat",
par_names=[
"x_total_cov_mat",
"y_total_cov_mat",
"x_model",
"parameter_values"
],
existing_behavior="replace"
)
self._nexus.add_function(
func=self._project_error,
func_name="total_error",
par_names=[
"x_total_error",
"y_total_error",
"x_model",
"parameter_values"
],
existing_behavior="replace"
)
self._nexus.add_dependency(
'y_model',
depends_on=(
'x_model',
'parameter_values'
)
)
self._nexus.add_dependency(
'x_model',
depends_on=(
'x_data',
)
)
def _set_new_data(self, new_data):
if isinstance(new_data, self.CONTAINER_TYPE):
self._data_container = deepcopy(new_data)
elif isinstance(new_data, DataContainerBase):
raise XYFitException("Incompatible container type '%s' (expected '%s')"
% (type(new_data), self.CONTAINER_TYPE))
else:
_x_data = new_data[0]
_y_data = new_data[1]
self._data_container = XYContainer(_x_data, _y_data, dtype=float)
self._data_container._on_error_change_callback = self._on_error_change
# update nexus data nodes
self._nexus.get('x_data').mark_for_update()
self._nexus.get('y_data').mark_for_update()
def _set_new_parametric_model(self):
self._param_model = XYParametricModel(
self.x_model,
self._model_function,
self.parameter_values
)
def _report_data(self, output_stream, indent, indentation_level):
output_stream.write(indent * indentation_level + '########\n')
output_stream.write(indent * indentation_level + '# Data #\n')
output_stream.write(indent * indentation_level + '########\n\n')
_data_table_dict = OrderedDict()
_data_table_dict['X Data'] = self.x_data
if self._data_container.has_x_errors:
_data_table_dict['X Data Error'] = self.x_data_error
_data_table_dict['X Data Correlation Matrix'] = self.x_data_cor_mat
print_dict_as_table(_data_table_dict, output_stream=output_stream, indent_level=indentation_level + 1)
output_stream.write('\n')
_data_table_dict = OrderedDict()
_data_table_dict['Y Data'] = self.y_data
if self._data_container.has_y_errors:
_data_table_dict['Y Data Error'] = self.y_data_error
_data_table_dict['Y Data Correlation Matrix'] = self.y_data_cor_mat
print_dict_as_table(_data_table_dict, output_stream=output_stream, indent_level=indentation_level + 1)
output_stream.write('\n')
def _report_model(self, output_stream, indent, indentation_level):
# call base method to show header and model function
super(XYFit, self)._report_model(output_stream, indent, indentation_level)
_model_table_dict = OrderedDict()
_model_table_dict['X Model'] = self.x_model
if self._param_model.has_x_errors:
_model_table_dict['X Model Error'] = self.x_model_error
_model_table_dict['X Model Correlation Matrix'] = self.x_model_cor_mat
print_dict_as_table(_model_table_dict, output_stream=output_stream, indent_level=indentation_level + 1)
output_stream.write('\n')
_model_table_dict = OrderedDict()
_model_table_dict['Y Model'] = self.y_model
if self._param_model.has_y_errors:
_model_table_dict['Y Model Error'] = self.y_model_error
_model_table_dict['Y Model Correlation Matrix'] = self.y_model_cor_mat
print_dict_as_table(_model_table_dict, output_stream=output_stream, indent_level=indentation_level + 1)
output_stream.write('\n')
if self._param_model.get_matching_errors({"relative": True, "axis": 1}):
output_stream.write(indent * (indentation_level + 1))
output_stream.write(
"y model covariance matrix was calculated dynamically relative to y model values.\n"
)
output_stream.write("\n")
def _project_cov_mat(self, x_cov_mat, y_cov_mat, x_model, parameter_values):
_derivatives = self._param_model.eval_model_function_derivative_by_x(
x=x_model,
dx=0.01 * np.sqrt(np.diag(x_cov_mat)),
model_parameters=parameter_values
)
return y_cov_mat + x_cov_mat * np.outer(_derivatives, _derivatives)
def _project_error(self, x_error, y_error, x_model, parameter_values):
_derivatives = self._param_model.eval_model_function_derivative_by_x(
x=x_model,
dx=0.01 * x_error,
model_parameters=parameter_values
)
return np.sqrt(np.square(y_error) + np.square(x_error * _derivatives))
def _set_data_as_model_ref(self):
_errs_and_old_refs = []
for _err in self._param_model.get_matching_errors({"relative": True, "axis": 1}).values():
_old_ref = _err.reference
_err.reference = self._data_container.y
_errs_and_old_refs.append((_err, _old_ref))
return _errs_and_old_refs
def _iterative_fits_needed(self):
return (bool(self._param_model.get_matching_errors({"relative": True, "axis": 1}))
or self.has_x_errors) \
and self._dynamic_error_algorithm == "iterative"
def _second_fit_needed(self):
return bool(self._param_model.get_matching_errors({"relative": True, "axis": 1})) \
and self._dynamic_error_algorithm == "nonlinear"
def _get_node_names_to_freeze(self, first_fit):
if not self.has_x_errors or self._dynamic_error_algorithm == "iterative":
return self._PROJECTED_NODE_NAMES + super(
XYFit, self)._get_node_names_to_freeze(first_fit)
else:
return super(XYFit, self)._get_node_names_to_freeze(first_fit)
# -- public properties
@property
def has_x_errors(self):
""":py:obj:`True`` if at least one *x* uncertainty source has been defined.
:rtype: bool
"""
return self._data_container.has_x_errors or self._param_model.has_x_errors
@property
def has_y_errors(self):
""":py:obj:`True`` if at least one *y* uncertainty source has been defined
:rtype: bool
"""
return self._data_container.has_y_errors or self._param_model.has_y_errors
@property
def x_data(self):
"""1D array containing the measurement *x* values.
:rtype: numpy.ndarray[float]
"""
return self._data_container.x
@property
def x_model(self):
"""1D array containing the model *x* values. The same as :py;obj:`.x_data` for an
:py:obj:`~.XYFit`.
:rtype: numpy.ndarray[float]
"""
return self.x_data
@property
def y_data(self):
"""1D array containing the measurement *y* values.
:rtype: numpy.ndarray[float]
"""
return self._data_container.y
@property
def model(self):
"""2D array of shape ``(2, N)`` containing the *x* and *y* model values
:rtype: numpy.ndarray
"""
return self._param_model.data
@property
def x_data_error(self):
"""1D array containing the pointwise *x* data uncertainties
:rtype: numpy.ndarray[float]
"""
return self._data_container.x_err
@property
def y_data_error(self):
"""1D array containing the pointwise *y* data uncertainties
:rtype: numpy.ndarray[float]
"""
return self._data_container.y_err
@property
def data_error(self):
"""1D array containing the pointwise *xy* uncertainties projected onto the *y* axis.
:rtype: numpy.ndarray[float]
"""
return self._project_error(
self.x_data_error, self.y_data_error, self.x_model, self.parameter_values)
@property
def x_data_cov_mat(self):
"""2D array of shape ``(N, N)`` containing the data *x* covariance matrix.
:rtype: numpy.ndarray
"""
return self._data_container.x_cov_mat
@property
def y_data_cov_mat(self):
"""2D array of shape ``(N, N)`` containing the data *y* covariance matrix.
:rtype: numpy.ndarray
"""
return self._data_container.y_cov_mat
@property
def data_cov_mat(self):
"""2D array of shape ``(N, N)`` containing the data *xy* covariance matrix (projected
onto the *y* axis).
:rtype: numpy.ndarray
"""
return self._project_cov_mat(
self.x_data_cov_mat, self.y_data_cov_mat, self.x_model, self.parameter_values)
@property
def x_data_cov_mat_inverse(self):
"""2D array of shape ``(N, N)`` containing the inverse of the data *x* covariance matrix or
:py:obj:`None` if singular.
:rtype: numpy.ndarray or None
"""
return self._data_container.x_cov_mat_inverse
@property
def y_data_cov_mat_inverse(self):
"""2D array of shape ``(N, N)`` containing the inverse of the data *y* covariance matrix or
:py:obj:`None` if singular.
:rtype: numpy.ndarray or None
"""
return self._data_container.y_cov_mat_inverse
@property
def data_cov_mat_inverse(self):
"""2D array of shape ``(N, N)`` containing the inverse of the data *xy* covariance matrix
|
[
" projected onto the *y* axis. :py:obj:`None` if singular."
] | 985
|
lcc
|
python
| null |
846340ba2c3c023a2837435e9ce17bc793645f33a20dcb42
|
|
/*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH & Co. KG (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.relations;
import org.opencms.util.CmsUUID;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
/**
* Immutable bean representing most of the information in a CmsLink.
*
*/
public class CmsLinkInfo {
/** Empty link. */
public static final CmsLinkInfo EMPTY = new CmsLinkInfo(CmsUUID.getNullUUID(), null, null, null, null, true);
/** The anchor. */
private String m_anchor;
/** Cached hash code. */
private transient int m_hashCode;
/** Indicates whether the link is internal or not. */
private boolean m_internal;
/** The query. */
private String m_query;
/** The structure id. */
private CmsUUID m_structureId;
/** The link target. */
private String m_target;
/** Cached toString() result. */
private transient String m_toStringRepr;
/** The relation type. */
private CmsRelationType m_type;
/**
* Creates a new instance.
*
* @param structureId the structure id
* @param target the link target
* @param query the query
* @param anchor the anchor
* @param type the type
* @param internal true if the link is internal
*/
public CmsLinkInfo(
CmsUUID structureId,
String target,
String query,
String anchor,
CmsRelationType type,
boolean internal) {
m_structureId = structureId;
m_target = target;
m_query = query;
m_anchor = anchor;
m_type = type;
m_internal = internal;
HashCodeBuilder hashCodeBuilder = new HashCodeBuilder();
// don't use the type in the hash code
m_hashCode = hashCodeBuilder.append(m_structureId).append(m_target).append(m_query).append(m_anchor).append(
m_internal).toHashCode();
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
// equals() method auto-generated by Eclipse. Does *not* compare the type.
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
CmsLinkInfo other = (CmsLinkInfo)obj;
if (m_anchor == null) {
if (other.m_anchor != null) {
return false;
}
} else if (!m_anchor.equals(other.m_anchor)) {
return false;
}
if (m_internal != other.m_internal) {
return false;
}
if (m_query == null) {
if (other.m_query != null) {
return false;
}
} else if (!m_query.equals(other.m_query)) {
return false;
}
if (m_structureId == null) {
if (other.m_structureId != null) {
return false;
}
} else if (!m_structureId.equals(other.m_structureId)) {
return false;
}
if (m_target == null) {
if (other.m_target != null) {
return false;
}
} else if (!m_target.equals(other.m_target)) {
return false;
}
return true;
}
/**
* Gets the anchor.
*
* @return the anchor
*/
public String getAnchor() {
return m_anchor;
}
/**
* Gets the query
*
* @return the query
*/
public String getQuery() {
return m_query;
}
/**
* Gets the structure id.
*
* @return the structure id
*/
public CmsUUID getStructureId() {
return m_structureId;
}
/**
* Gets the target.
*
* @return the target
*/
public String getTarget() {
return m_target;
}
/**
* Gets the relation type.
*
* @return the type
*/
public CmsRelationType getType() {
return m_type;
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return m_hashCode;
}
/**
* Checks whether the link is internal.
*
* @return true if this is an internal
*/
public boolean isInternal() {
return m_internal;
}
/**
* Converts this to a CmsLink.
*
* @return a new CmsLink instance with the information from this bean
*/
public CmsLink toLink() {
|
[
" if (this == EMPTY) {"
] | 703
|
lcc
|
java
| null |
1915494441abd4076b5a0f96dc9ffb2c41a828a4c1abde95
|
|
/*************************************************************************
* Copyright 2009-2015 Eucalyptus Systems, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*
* Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta
* CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you need
* additional information or have any questions.
*
* This file may incorporate work covered under the following copyright
* and permission notice:
*
* Software License Agreement (BSD License)
*
* Copyright (c) 2008, Regents of the University of California
* All rights reserved.
*
* Redistribution and use of this software in source and binary forms,
* with or without modification, are permitted provided that the
* following conditions are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE. USERS OF THIS SOFTWARE ACKNOWLEDGE
* THE POSSIBLE PRESENCE OF OTHER OPEN SOURCE LICENSED MATERIAL,
* COPYRIGHTED MATERIAL OR PATENTED MATERIAL IN THIS SOFTWARE,
* AND IF ANY SUCH MATERIAL IS DISCOVERED THE PARTY DISCOVERING
* IT MAY INFORM DR. RICH WOLSKI AT THE UNIVERSITY OF CALIFORNIA,
* SANTA BARBARA WHO WILL THEN ASCERTAIN THE MOST APPROPRIATE REMEDY,
* WHICH IN THE REGENTS' DISCRETION MAY INCLUDE, WITHOUT LIMITATION,
* REPLACEMENT OF THE CODE SO IDENTIFIED, LICENSING OF THE CODE SO
* IDENTIFIED, OR WITHDRAWAL OF THE CODE CAPABILITY TO THE EXTENT
* NEEDED TO COMPLY WITH ANY SUCH LICENSES OR RIGHTS.
************************************************************************/
package com.eucalyptus.objectstorage.entities.upgrade;
import static com.eucalyptus.upgrade.Upgrades.Version.v4_0_0;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.persistence.EntityTransaction;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import com.eucalyptus.auth.Accounts;
import com.eucalyptus.auth.AuthException;
import com.eucalyptus.auth.euare.persist.entities.AccountEntity;
import com.eucalyptus.auth.principal.AccountIdentifiers;
import com.eucalyptus.auth.principal.User;
import com.eucalyptus.auth.util.SystemAccountProvider;
import com.eucalyptus.entities.Entities;
import com.eucalyptus.entities.Transactions;
import com.eucalyptus.objectstorage.BucketState;
import com.eucalyptus.objectstorage.ObjectState;
import com.eucalyptus.objectstorage.ObjectStorage;
import com.eucalyptus.objectstorage.entities.Bucket;
import com.eucalyptus.objectstorage.entities.ObjectEntity;
import com.eucalyptus.objectstorage.util.ObjectStorageProperties;
import com.eucalyptus.objectstorage.util.ObjectStorageProperties.VersioningStatus;
import com.eucalyptus.storage.msgs.s3.AccessControlList;
import com.eucalyptus.storage.msgs.s3.AccessControlPolicy;
import com.eucalyptus.storage.msgs.s3.CanonicalUser;
import com.eucalyptus.storage.msgs.s3.Grant;
import com.eucalyptus.storage.msgs.s3.Grantee;
import com.eucalyptus.storage.msgs.s3.Group;
import com.eucalyptus.upgrade.Upgrades.EntityUpgrade;
import com.eucalyptus.util.Exceptions;
import com.eucalyptus.walrus.entities.BucketInfo;
import com.eucalyptus.walrus.entities.GrantInfo;
import com.eucalyptus.walrus.entities.ImageCacheInfo;
import com.eucalyptus.walrus.entities.ObjectInfo;
import com.eucalyptus.walrus.entities.WalrusSnapshotInfo;
import com.eucalyptus.walrus.util.WalrusProperties;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* Upgrade process for transferring information from Walrus to OSG, and modifying Walrus entities to work with OSG. The upgrade is broken down in to
* well defined ordered stages. A failing stage will halt the upgrade process and the subsequent stages won't be processed.
*
* @author Swathi Gangisetty
*/
public class ObjectStorage400Upgrade {
private static Logger LOG = Logger.getLogger(ObjectStorage400Upgrade.class);
private static Map<String, AccountIdentifiers> accountIdAccountMap = Maps.newHashMap(); // Cache account ID -> account info
private static Map<String, User> accountIdAdminMap = Maps.newHashMap(); // Cache account ID -> admin user info
private static Map<String, User> userIdUserMap = Maps.newHashMap(); // Cache user ID -> user info
private static Set<String> deletedAccountIds = Sets.newHashSet(); // Cache deleted account IDs
private static Set<String> deletedUserIds = Sets.newHashSet(); // Cache deleted user IDs
private static Set<String> deletedAdminAccountIds = Sets.newHashSet(); // Cache account IDs whose admin is deleted
private static Set<String> noCanonicalIdAccountIds = Sets.newHashSet(); // Cache account IDs without any canonical IDs
private static Map<String, Bucket> bucketMap = Maps.newHashMap(); // Cache bucket name -> bucket object
private static Set<String> walrusSnapshotBuckets = Sets.newHashSet(); // Cache all snapshot buckets
private static Set<String> walrusSnapshotObjects = Sets.newHashSet(); // Cache all snapshot objects
private static AccountIdentifiers eucalyptusAccount = null;
private static User eucalyptusAdmin = null;
private static AccountIdentifiers blockStorageAccount = null;
private static User blockStorageAdmin = null;
public interface UpgradeTask {
public void apply() throws Exception;
}
private static final ArrayList<? extends UpgradeTask> upgrades = Lists.newArrayList(Setup.INSTANCE, CopyBucketsToOSG.INSTANCE,
CopyObjectsToOSG.INSTANCE, ModifyWalrusBuckets.INSTANCE, ModifyWalrusObjects.INSTANCE, FlushImageCache.INSTANCE);
@EntityUpgrade(entities = {ObjectEntity.class}, since = v4_0_0, value = ObjectStorage.class)
public static enum OSGUpgrade implements Predicate<Class> {
INSTANCE;
@Override
public boolean apply(@Nullable Class arg0) {
// Iterate through each upgrade task, using iterators.all to bail out on the first failure
return Iterators.all(upgrades.iterator(), new Predicate<UpgradeTask>() {
@Override
public boolean apply(UpgradeTask task) {
try {
LOG.info("Executing objectstorage upgrade task: " + task.getClass().getSimpleName());
task.apply();
return true;
} catch (Exception e) {
LOG.error("Upgrade task failed: " + task.getClass().getSimpleName());
// Returning false does not seem to halt the upgrade and cause a rollback, must throw an exception
throw Exceptions.toUndeclared("Objectstorage upgrade failed due to an error in upgrade task: " + task.getClass().getSimpleName(), e);
}
}
});
}
}
/**
* Setup stage for configuring the prerequisites before performing the upgrade
*
* <li>Initialize the Accounts library</li>
*
* <li>Setup a blockstorage account</li>
*
* <li>Assign canonical IDs to accounts that don't have it</li>
*
*/
public enum Setup implements UpgradeTask {
INSTANCE;
@Override
public void apply() throws Exception {
// Setup the blockstorage account
createBlockStorageAccount();
// Generate canonical IDs for accounts that don't have them
generateCanonicaIDs();
}
}
/**
* Transform Walrus bucket entities to OSG bucket entities and persist them. A transformation function is used for converting a Walrus bucket entity
* to OSG bucket entity
*
*/
public enum CopyBucketsToOSG implements UpgradeTask {
INSTANCE;
@Override
public void apply() throws Exception {
EntityTransaction osgTran = Entities.get(Bucket.class);
try {
List<Bucket> osgBuckets = Entities.query(new Bucket());
if (osgBuckets != null && osgBuckets.isEmpty()) { // Perform the upgrade only if osg entities are empty
EntityTransaction walrusTran = Entities.get(BucketInfo.class);
try {
List<BucketInfo> walrusBuckets = Entities.query(new BucketInfo(), Boolean.TRUE);
if (walrusBuckets != null && !walrusBuckets.isEmpty()) { // Check if there are any walrus objects to upgrade
// Populate snapshot buckets and objects the snapshot buckets and objects
populateSnapshotBucketsAndObjects();
// Create an OSG bucket for the corresponding walrus Bucket and persist it
for (Bucket osgBucket : Lists.transform(walrusBuckets, bucketTransformationFunction())) {
Entities.persist(osgBucket);
}
} else {
// no buckets in walrus, nothing to do here
}
walrusTran.commit();
} catch (Exception e) {
walrusTran.rollback();
throw e;
} finally {
if (walrusTran.isActive()) {
walrusTran.commit();
}
}
} else {
// nothing to do here since buckets might already be there
}
osgTran.commit();
} catch (Exception e) {
osgTran.rollback();
throw e;
} finally {
if (osgTran.isActive()) {
osgTran.commit();
}
}
}
}
/**
* Transform Walrus object entities to OSG object entities and persist them. A transformation function is used for converting a Walrus object entity
* to OSG object entity
*
*/
public enum CopyObjectsToOSG implements UpgradeTask {
INSTANCE;
@Override
public void apply() throws Exception {
EntityTransaction osgTran = Entities.get(ObjectEntity.class);
try {
List<ObjectEntity> osgObjects = Entities.query(new ObjectEntity());
if (osgObjects != null && osgObjects.isEmpty()) { // Perform the upgrade only if osg entities are empty
EntityTransaction walrusTran = Entities.get(ObjectInfo.class);
try {
List<ObjectInfo> walrusObjects = Entities.query(new ObjectInfo(), Boolean.TRUE);
if (walrusObjects != null && !walrusObjects.isEmpty()) { // Check if there are any walrus objects to upgrade
// Lists.transform() is a lazy operation, so all elements are iterated through only once
for (ObjectEntity osgObject : Lists.transform(walrusObjects, objectTransformationFunction())) {
Entities.persist(osgObject);
}
} else {
// no objects in walrus, nothing to do here
}
walrusTran.commit();
} catch (Exception e) {
walrusTran.rollback();
throw e;
} finally {
if (walrusTran.isActive()) {
walrusTran.commit();
}
}
} else {
// nothing to do here since buckets might already be there
}
osgTran.commit();
} catch (Exception e) {
osgTran.rollback();
throw e;
} finally {
if (osgTran.isActive()) {
osgTran.commit();
}
}
}
}
/**
* Modify Walrus buckets to work better with OSG
*
* <li>Reset the ownership of every bucket to Eucalyptus account</li>
*
* <li>Reset the ACLs on every bucket and set it to private (FULL_CONTROL for the bucket owner)</li>
*
* <li>Disable versioning entirely (even if its suspended)</li>
*/
public enum ModifyWalrusBuckets implements UpgradeTask {
INSTANCE;
@Override
public void apply() throws Exception {
EntityTransaction tran = Entities.get(BucketInfo.class);
try {
List<BucketInfo> walrusBuckets = Entities.query(new BucketInfo());
if (walrusBuckets != null && !walrusBuckets.isEmpty()) { // Check if there are any walrus objects to upgrade
for (BucketInfo walrusBucket : walrusBuckets) {
try {
// Reset the ownership and assign it to Eucalyptus admin account and user
walrusBucket.setOwnerId(getEucalyptusAccount().getAccountNumber());
walrusBucket.setUserId(getEucalyptusAdmin().getUserId());
// Reset the ACLs and assign the owner full control
walrusBucket.resetGlobalGrants();
List<GrantInfo> grantInfos = new ArrayList<GrantInfo>();
GrantInfo.setFullControl(walrusBucket.getOwnerId(), grantInfos);
walrusBucket.setGrants(grantInfos);
// Disable versioning, could probably suspend it but that might not entirely stop walrus from doing versioning related tasks
if (walrusBucket.getVersioning() != null
&& (WalrusProperties.VersioningStatus.Enabled.toString().equals(walrusBucket.getVersioning()) || WalrusProperties.VersioningStatus.Suspended
.toString().equals(walrusBucket.getVersioning()))) {
walrusBucket.setVersioning(WalrusProperties.VersioningStatus.Disabled.toString());
}
} catch (Exception e) {
LOG.error("Failed to modify Walrus bucket " + walrusBucket.getBucketName(), e);
throw e;
}
}
} else {
// no buckets in walrus, nothing to do here
}
tran.commit();
} catch (Exception e) {
tran.rollback();
throw e;
} finally {
if (tran.isActive()) {
tran.commit();
}
}
}
}
/**
* Modify Walrus objects to work with OSG
*
* <li>Remove delete markers since versioning is entirely handled by OSG</li>
*
* <li>Overwrite objectKey with the objectName, this is the same as the objectUuid in OSG and will be used by the OSG to refer to the object</li>
*
* <li>Overwrite the version ID with the string "null" as Walrus no longer keeps track of versions</li>
*
* <li>Mark the object as the latest since all the objects are unique to Walrus after changing the object key</li>
*
* <li>Reset the ownership of every object to Eucalyptus account</li>
*
* <li>Reset the ACLs on every object and set it to private (FULL_CONTROL for the object owner)</li>
*/
public enum ModifyWalrusObjects implements UpgradeTask {
INSTANCE;
@Override
public void apply() throws Exception {
EntityTransaction tran = Entities.get(ObjectInfo.class);
try {
List<ObjectInfo> walrusObjects = Entities.query(new ObjectInfo());
if (walrusObjects != null && !walrusObjects.isEmpty()) { // Check if there are any walrus objects to upgrade
for (ObjectInfo walrusObject : walrusObjects) {
try {
// Check and remove the record if its a delete marker
if (walrusObject.getDeleted() != null && walrusObject.getDeleted()) {
LOG.info("Removing delete marker from Walrus for object " + walrusObject.getObjectKey() + " in bucket "
+ walrusObject.getBucketName() + " with version ID " + walrusObject.getVersionId());
Entities.delete(walrusObject);
continue;
}
// Copy object name to object key since thats the reference used by OSG
walrusObject.setObjectKey(walrusObject.getObjectName());
// Change the version ID to null
walrusObject.setVersionId(WalrusProperties.NULL_VERSION_ID);
// Mark the object as latest
walrusObject.setLast(Boolean.TRUE);
// Reset the ownership and assign it to Eucalyptus admin account
walrusObject.setOwnerId(getEucalyptusAccount().getAccountNumber());
// Reset the ACLs and assign the owner full control
walrusObject.resetGlobalGrants();
List<GrantInfo> grantInfos = new ArrayList<GrantInfo>();
GrantInfo.setFullControl(walrusObject.getOwnerId(), grantInfos);
walrusObject.setGrants(grantInfos);
} catch (Exception e) {
LOG.error("Failed to modify Walrus object " + walrusObject.getObjectKey(), e);
throw e;
}
}
} else {
// no objects in walrus, nothing to do here
}
tran.commit();
} catch (Exception e) {
tran.rollback();
throw e;
} finally {
if (tran.isActive()) {
tran.commit();
}
}
}
}
/**
* Add cached images as objects to walrus and OSG and mark them for deletion in OSG. When the OSG boots up, it'll start deleting the objects
*
*/
public enum FlushImageCache implements UpgradeTask {
INSTANCE;
@Override
public void apply() throws Exception {
EntityTransaction walrusImageTran = Entities.get(ImageCacheInfo.class);
try {
List<ImageCacheInfo> images = Entities.query(new ImageCacheInfo());
if (images != null && !images.isEmpty()) { // Check if there are any cached images to delete
EntityTransaction osgObjectTran = Entities.get(ObjectEntity.class);
EntityTransaction walrusObjectTran = Entities.get(ObjectInfo.class);
try {
for (ImageCacheInfo image : images) {
Entities.persist(imageToOSGObjectTransformation().apply(image)); // Persist a new OSG object
Entities.persist(imageToWalrusObjectTransformation().apply(image));
Entities.delete(image); // Delete the cached image from database
}
osgObjectTran.commit();
walrusObjectTran.commit();
} catch (Exception e) {
osgObjectTran.rollback();
walrusObjectTran.rollback();
throw e;
} finally {
if (osgObjectTran.isActive()) {
osgObjectTran.commit();
}
if (walrusObjectTran.isActive()) {
walrusObjectTran.commit();
}
}
} else {
// no images in walrus, nothing to do here
}
walrusImageTran.commit();
} catch (Exception e) {
walrusImageTran.rollback();
// Exceptions here should not halt the upgrade process, the cached images can be flushed manually
LOG.warn("Cannot flush cached images in Walrus due to an error. May have to be flushed manually");
} finally {
if (walrusImageTran.isActive()) {
walrusImageTran.commit();
}
}
}
}
private static AccountIdentifiers getEucalyptusAccount() throws Exception {
if (eucalyptusAccount == null) {
eucalyptusAccount = Accounts.lookupAccountIdentifiersByAlias( AccountIdentifiers.SYSTEM_ACCOUNT );
}
return eucalyptusAccount;
}
private static User getEucalyptusAdmin() throws Exception {
if (eucalyptusAdmin == null) {
eucalyptusAdmin = Accounts.lookupPrincipalByAccountNumber( getEucalyptusAccount( ).getAccountNumber( ) );
}
return eucalyptusAdmin;
}
private static void createBlockStorageAccount () throws Exception {
SystemAccountProvider.Init.initialize( (SystemAccountProvider)
Class.forName( "com.eucalyptus.blockstorage.BlockStorageSystemAccountProvider" ).newInstance( ) );
}
private static AccountIdentifiers getBlockStorageAccount() throws Exception {
if (blockStorageAccount == null) {
createBlockStorageAccount( );
blockStorageAccount = Accounts.lookupAccountIdentifiersByAlias( AccountIdentifiers.BLOCKSTORAGE_SYSTEM_ACCOUNT );
}
return blockStorageAccount;
}
private static User getBlockStorageAdmin() throws Exception {
if (blockStorageAdmin == null) {
blockStorageAdmin = Accounts.lookupPrincipalByAccountNumber( getBlockStorageAccount().getAccountNumber() );
}
return blockStorageAdmin;
}
private static void populateSnapshotBucketsAndObjects() {
EntityTransaction tran = Entities.get(WalrusSnapshotInfo.class);
try {
List<WalrusSnapshotInfo> walrusSnapshots = Entities.query(new WalrusSnapshotInfo(), Boolean.TRUE);
for (WalrusSnapshotInfo walrusSnapshot : walrusSnapshots) {
walrusSnapshotBuckets.add(walrusSnapshot.getSnapshotBucket());
walrusSnapshotObjects.add(walrusSnapshot.getSnapshotId());
}
tran.commit();
} catch (Exception e) {
LOG.error("Failed to lookup snapshots stored in Walrus", e);
tran.rollback();
throw e;
} finally {
if (tran.isActive()) {
tran.commit();
}
}
}
private static void generateCanonicaIDs() throws Exception {
EntityTransaction tran = Entities.get(AccountEntity.class);
try {
List<AccountEntity> accounts = Entities.query(new AccountEntity());
if (accounts != null && accounts.size() > 0) {
for (AccountEntity account : accounts) {
if (account.getCanonicalId() == null || account.getCanonicalId().equals("")) {
account.populateCanonicalId();
LOG.debug("Assigning canonical id " + account.getCanonicalId() + " for account " + account.getAccountNumber());
}
}
}
tran.commit();
} catch (Exception e) {
LOG.error("Failed to generate and assign canonical ids", e);
tran.rollback();
throw e;
} finally {
if (tran.isActive()) {
tran.commit();
}
}
}
private static ArrayList<Grant> getBucketGrants(BucketInfo walrusBucket) throws Exception {
ArrayList<Grant> grants = new ArrayList<Grant>();
walrusBucket.readPermissions(grants); // Add global grants
grants = convertGrantInfosToGrants(grants, walrusBucket.getGrants()); // Add account/group specific grant
return grants;
}
private static ArrayList<Grant> getObjectGrants(ObjectInfo walrusObject) throws Exception {
ArrayList<Grant> grants = new ArrayList<Grant>();
walrusObject.readPermissions(grants); // Add global grants
grants = convertGrantInfosToGrants(grants, walrusObject.getGrants()); // Add account/group specific grant
return grants;
}
private static ArrayList<Grant> convertGrantInfosToGrants(ArrayList<Grant> grants, List<GrantInfo> grantInfos) throws Exception {
if (grants == null) {
grants = new ArrayList<Grant>();
}
if (grantInfos == null) {
// nothing to do here
return grants;
}
for (GrantInfo grantInfo : grantInfos) {
if (grantInfo.getGrantGroup() != null) {
// Add it as a group
Group group = new Group(grantInfo.getGrantGroup());
transferPermissions(grants, grantInfo, new Grantee(group));
} else {
// Assume it's a user/account
AccountIdentifiers account = null;
if (accountIdAccountMap.containsKey(grantInfo.getUserId())) {
account = accountIdAccountMap.get(grantInfo.getUserId());
} else if (deletedAccountIds.contains(grantInfo.getUserId())) {// In case the account is deleted, skip the grant
LOG.warn("Account ID " + grantInfo.getUserId() + " does not not exist. Skipping this grant");
continue;
} else if (noCanonicalIdAccountIds.contains(grantInfo.getUserId())) { // If canonical ID is missing, use the eucalyptus admin account
LOG.warn("Account ID " + grantInfo.getUserId() + " does not not have a canonical ID. Skipping this grant");
continue;
} else {
try {
// Lookup owning account
account = Accounts.lookupAccountIdentifiersById( grantInfo.getUserId() );
if (StringUtils.isBlank(grantInfo.getUserId())) { // If canonical ID is missing, use the eucalyptus admin account
LOG.warn("Account ID " + grantInfo.getUserId() + " does not not have a canonical ID. Skipping this grant");
noCanonicalIdAccountIds.add(grantInfo.getUserId());
continue;
} else {
// Add it to the map
accountIdAccountMap.put(grantInfo.getUserId(), account);
}
} catch (Exception e) { // In case the account is deleted, skip the grant
LOG.warn("Account ID " + grantInfo.getUserId() + " does not not exist. Skipping this grant");
deletedAccountIds.add(grantInfo.getUserId());
continue;
}
}
CanonicalUser user = new CanonicalUser(account.getCanonicalId(), account.getAccountAlias());
transferPermissions(grants, grantInfo, new Grantee(user));
}
}
return grants;
}
private static void transferPermissions(List<Grant> grants, GrantInfo grantInfo, Grantee grantee) {
if (grantInfo.canRead() && grantInfo.canWrite() && grantInfo.canReadACP() && grantInfo.canWriteACP()) {
grants.add(new Grant(grantee, ObjectStorageProperties.Permission.FULL_CONTROL.toString()));
return;
}
if (grantInfo.canRead()) {
grants.add(new Grant(grantee, ObjectStorageProperties.Permission.READ.toString()));
}
if (grantInfo.canWrite()) {
grants.add(new Grant(grantee, ObjectStorageProperties.Permission.WRITE.toString()));
}
if (grantInfo.canReadACP()) {
grants.add(new Grant(grantee, ObjectStorageProperties.Permission.READ_ACP.toString()));
}
if (grantInfo.canWriteACP()) {
grants.add(new Grant(grantee, ObjectStorageProperties.Permission.WRITE_ACP.toString()));
}
}
/**
* This method transforms a Walrus bucket to an OSG bucket. While the appropriate fields are copied over from the Walrus entity to OSG entity when
* available, the process includes the following additional steps
*
* <li>Copy the bucketName in Walrus entity to bucketName and bucketUuid of the OSG entity</li>
*
* <li>If any account information is missing due to unavailable/deleted accounts, transfer the ownership of the bucket to the Eucalyptus account</li>
*
* <li>If the user associated with the bucket is unavailable, transfer the IAM ownership to either the admin of the owning account if available or
* the Eucalyptus account admin</li>
*
* <li>Skip the grant if the grant owner cannot be retrieved</li>
*
* <li>Transfer the ownership of Snapshot buckets to the blockstorage system account and configure the ACL to private</li>
*/
public static Function<BucketInfo, Bucket> bucketTransformationFunction() {
return new Function<BucketInfo, Bucket>() {
@Override
@Nullable
public Bucket apply(@Nonnull BucketInfo walrusBucket) {
Bucket osgBucket = null;
try {
AccountIdentifiers owningAccount = null;
User owningUser = null;
// Get the owning account
if (walrusSnapshotBuckets.contains(walrusBucket.getBucketName())) { // If its a snapshot bucket, set the owner to blockstorage account
LOG.warn("Changing the ownership of snapshot bucket " + walrusBucket.getBucketName() + " to blockstorage system account");
owningAccount = getBlockStorageAccount();
owningUser = getBlockStorageAdmin();
} else if (accountIdAccountMap.containsKey(walrusBucket.getOwnerId())) { // If account was previously looked up, get it from the map
owningAccount = accountIdAccountMap.get(walrusBucket.getOwnerId());
} else if (deletedAccountIds.contains(walrusBucket.getOwnerId())) { // If the account is deleted, use the eucalyptus admin account
LOG.warn("Account ID " + walrusBucket.getOwnerId() + " does not not exist. Changing the ownership of bucket "
+ walrusBucket.getBucketName() + " to eucalyptus admin account");
owningAccount = getEucalyptusAccount();
owningUser = getEucalyptusAdmin();
} else if (noCanonicalIdAccountIds.contains(walrusBucket.getOwnerId())) { // If canonical ID is missing, use eucalyptus admin account
LOG.warn("Account ID " + walrusBucket.getOwnerId() + " does not have a canonical ID. Changing the ownership of bucket "
+ walrusBucket.getBucketName() + " to eucalyptus admin account");
owningAccount = getEucalyptusAccount();
owningUser = getEucalyptusAdmin();
} else { // If none of the above conditions match, lookup for the account
try {
owningAccount = Accounts.lookupAccountIdentifiersById( walrusBucket.getOwnerId() );
if (StringUtils.isBlank(owningAccount.getCanonicalId())) { // If canonical ID is missing, use eucalyptus admin account
LOG.warn("Account ID " + walrusBucket.getOwnerId() + " does not have a canonical ID. Changing the ownership of bucket "
+ walrusBucket.getBucketName() + " to eucalyptus admin account");
owningAccount = getEucalyptusAccount();
owningUser = getEucalyptusAdmin();
noCanonicalIdAccountIds.add(walrusBucket.getOwnerId());
} else {
accountIdAccountMap.put(walrusBucket.getOwnerId(), owningAccount);
}
} catch (AuthException e) { // In case the account is deleted, transfer the ownership to eucalyptus admin
LOG.warn("Account ID " + walrusBucket.getOwnerId() + " does not not exist. Changing the ownership of bucket "
+ walrusBucket.getBucketName() + " to eucalyptus admin account");
owningAccount = getEucalyptusAccount();
owningUser = getEucalyptusAdmin();
deletedAccountIds.add(walrusBucket.getOwnerId());
deletedUserIds.add(walrusBucket.getUserId());
}
}
// Get the owning user if its not already set
if (owningUser == null) {
if (userIdUserMap.containsKey(walrusBucket.getUserId())) { // If the user was previously looked up, get it from the map
owningUser = userIdUserMap.get(walrusBucket.getUserId());
} else if (deletedUserIds.contains(walrusBucket.getUserId()) && accountIdAdminMap.containsKey(walrusBucket.getOwnerId())) {
// If the user was deleted and the admin for the account was previously looked up, get it from the map
LOG.warn("User ID " + walrusBucket.getUserId() + " does not exist. Changing the IAM ownership of bucket "
+ walrusBucket.getBucketName() + " to the account admin");
owningUser = accountIdAdminMap.get(walrusBucket.getOwnerId());
} else if (deletedUserIds.contains(walrusBucket.getUserId()) && deletedAdminAccountIds.contains(walrusBucket.getOwnerId())) {
// If the user was deleted and the account was also deleted, transfer the IAM ownership to eucalyptus admin
LOG.warn("User ID " + walrusBucket.getUserId() + " and the account admin do not exist. Changing the IAM ownership of bucket "
+ walrusBucket.getBucketName() + " to the eucalyptus account admin");
owningUser = getEucalyptusAdmin();
} else { // If none of the above conditions match, lookup for the user
if (walrusBucket.getUserId() != null) {
try {
owningUser = Accounts.lookupPrincipalByUserId( walrusBucket.getUserId() );
userIdUserMap.put(walrusBucket.getUserId(), owningUser);
} catch (AuthException e) { // User is deleted, lookup for the account admin
deletedUserIds.add(walrusBucket.getUserId());
try {
owningUser = Accounts.lookupPrincipalByAccountNumber( owningAccount.getAccountNumber( ) );
accountIdAdminMap.put(walrusBucket.getOwnerId(), owningUser);
LOG.warn("User ID " + walrusBucket.getUserId() + " does not exist. Changing the IAM ownership of bucket "
+ walrusBucket.getBucketName() + " to the account admin");
} catch (AuthException ie) { // User and admin are both deleted, transfer the IAM ownership to the eucalyptus admin
LOG.warn("User ID " + walrusBucket.getUserId() + " and the account admin do not exist. Changing the IAM ownership of bucket "
+ walrusBucket.getBucketName() + " to the eucalyptus account admin");
owningUser = getEucalyptusAdmin();
deletedAdminAccountIds.add(walrusBucket.getOwnerId());
}
}
} else { // If no owner ID was found for the bucket, set user to account admin or eucalyptus admin.
// This is to avoid insert null IDs into cached sets/maps
if (accountIdAdminMap.containsKey(walrusBucket.getOwnerId())) {
// If the admin to the account was looked up previously, get it from the map
LOG.warn("No user ID listed for bucket " + walrusBucket.getBucketName()
+ ". Changing the IAM ownership of bucket to the account admin");
owningUser = accountIdAdminMap.get(walrusBucket.getBucketName());
} else { // Lookup up the admin if its not available in the map
try {
owningUser = Accounts.lookupPrincipalByAccountNumber( owningAccount.getAccountNumber( ) );
accountIdAdminMap.put(walrusBucket.getOwnerId(), owningUser);
LOG.warn("No user ID listed for bucket " + walrusBucket.getBucketName()
+ ". Changing the IAM ownership of bucket to the account admin");
} catch (AuthException ie) {// User and admin are both deleted, transfer the IAM ownership to the eucalyptus admin
LOG.warn("No user ID listed for bucket " + walrusBucket.getBucketName()
+ " and account admin does not exist. Changing the IAM ownership of bucket to the eucalyptus account admin");
owningUser = getEucalyptusAdmin();
}
}
}
}
}
// Create a new instance of osg bucket and popluate all the fields
osgBucket = new Bucket();
osgBucket.setBucketName(walrusBucket.getBucketName());
osgBucket.withUuid(walrusBucket.getBucketName());
osgBucket.setBucketSize(walrusBucket.getBucketSize());
osgBucket.setLocation(walrusBucket.getLocation());
osgBucket.setLoggingEnabled(walrusBucket.getLoggingEnabled());
osgBucket.setState(BucketState.extant);
osgBucket.setLastState(BucketState.creating); // Set the last state after setting the current state
osgBucket.setTargetBucket(walrusBucket.getTargetBucket());
osgBucket.setTargetPrefix(walrusBucket.getTargetPrefix());
osgBucket.setVersioning(VersioningStatus.valueOf(walrusBucket.getVersioning()));
// Set the owner and IAM user fields
osgBucket.setOwnerCanonicalId(owningAccount.getCanonicalId());
osgBucket.setOwnerDisplayName(owningAccount.getAccountAlias());
osgBucket.setOwnerIamUserId(owningUser.getUserId());
osgBucket.setOwnerIamUserDisplayName(owningUser.getName());
// Generate access control policy
AccessControlList acl = new AccessControlList();
if (walrusSnapshotBuckets.contains(walrusBucket.getBucketName())) { // Dont set any grants for a snapshot bucket
acl.setGrants(new ArrayList<Grant>());
} else {
acl.setGrants(getBucketGrants(walrusBucket));
}
AccessControlPolicy acp = new AccessControlPolicy(new CanonicalUser(owningAccount.getCanonicalId(), owningAccount.getAccountAlias()), acl);
osgBucket.setAcl(acp);
} catch (Exception e) {
LOG.error("Failed to transform Walrus bucket " + walrusBucket.getBucketName() + " to objectstorage bucket", e);
Exceptions.toUndeclared("Failed to transform Walrus bucket " + walrusBucket.getBucketName() + " to objectstorage bucket", e);
}
return osgBucket;
}
};
}
/**
* This method transforms a Walrus object to an OSG object. While the appropriate fields are copied over from the Walrus entity to OSG entity when
* available, the process includes the following additional steps
*
* <li>For delete markers, generate the objectUuid, set the ownership to bucket owner and the leave the grants empty</li>
*
* <li>OSG refers to the backend object using the objectUuid. Use objectName of Walrus entity as the objectUuid in OSG entity. Second part of this
* step is to overwrite the objectKey with the objectName in the Walrus entity. This is executed in the {@code ModifyWalrusBuckets} stage</li>
*
* <li>If any account information is missing due to unavailable/deleted accounts, transfer the ownership of the object to the Eucalyptus account</li>
*
* <li>Since Walrus does not keep track of the user that created the object, transfer the IAM ownership to either the admin of the owning account if
* available or the Eucalyptus account admin</li>
*
* <li>Skip the grant if the grant owner cannot be retrieved</li>
*
* <li>Transfer the ownership of Snapshot objects to the blockstorage system account and configure the ACL to private</li>
*/
public static Function<ObjectInfo, ObjectEntity> objectTransformationFunction() {
return new Function<ObjectInfo, ObjectEntity>() {
@Override
@Nullable
public ObjectEntity apply(@Nonnull ObjectInfo walrusObject) {
ObjectEntity osgObject = null;
try {
Bucket osgBucket = null;
if (bucketMap.containsKey(walrusObject.getBucketName())) {
osgBucket = bucketMap.get(walrusObject.getBucketName());
} else {
osgBucket = Transactions.find(new Bucket(walrusObject.getBucketName()));
bucketMap.put(walrusObject.getBucketName(), osgBucket);
}
osgObject = new ObjectEntity(osgBucket, walrusObject.getObjectKey(), walrusObject.getVersionId());
if (walrusObject.getDeleted() != null && walrusObject.getDeleted()) { // delete marker
osgObject.setObjectUuid(UUID.randomUUID().toString());
osgObject.setStorageClass(ObjectStorageProperties.STORAGE_CLASS.STANDARD.toString());
osgObject.setObjectModifiedTimestamp(walrusObject.getLastModified());
osgObject.setIsDeleteMarker(Boolean.TRUE);
osgObject.setSize(0L);
osgObject.setIsLatest(walrusObject.getLast());
osgObject.setState(ObjectState.extant);
// Set the ownership to bucket owner as the bucket owning account/user
osgObject.setOwnerCanonicalId(osgBucket.getOwnerCanonicalId());
osgObject.setOwnerDisplayName(osgBucket.getOwnerDisplayName());
osgObject.setOwnerIamUserId(osgBucket.getOwnerIamUserId());
osgObject.setOwnerIamUserDisplayName(osgBucket.getOwnerIamUserDisplayName());
// Generate empty access control policy, OSG should set it to private acl for the owner
AccessControlList acl = new AccessControlList();
acl.setGrants(new ArrayList<Grant>());
AccessControlPolicy acp =
new AccessControlPolicy(new CanonicalUser(osgBucket.getOwnerCanonicalId(), osgBucket.getOwnerDisplayName()), acl);
osgObject.setAcl(acp);
} else { // not a delete marker
AccountIdentifiers owningAccount = null;
User adminUser = null;
// Get the owning account
if (walrusSnapshotObjects.contains(walrusObject.getObjectKey())) {// If its a snapshot object, set the owner to blockstorage account
LOG.warn("Changing the ownership of snapshot object " + walrusObject.getObjectKey() + " to blockstorage system account");
owningAccount = getBlockStorageAccount();
adminUser = getBlockStorageAdmin();
} else if (accountIdAccountMap.containsKey(walrusObject.getOwnerId())) { // If account was previously looked up, get it from the map
owningAccount = accountIdAccountMap.get(walrusObject.getOwnerId());
} else if (deletedAccountIds.contains(walrusObject.getOwnerId())) { // If the account is deleted, use the eucalyptus admin account
// Account is deleted, transfer the entire ownership to eucalyptus account admin
LOG.warn("Account ID " + walrusObject.getOwnerId() + " does not not exist. Changing the ownership of object "
+ walrusObject.getObjectKey() + " in bucket " + walrusObject.getBucketName() + " to eucalyptus admin account");
owningAccount = getEucalyptusAccount();
adminUser = getEucalyptusAdmin();
} else if (noCanonicalIdAccountIds.contains(walrusObject.getOwnerId())) { // If canonical ID is missing, use eucalyptus admin account
LOG.warn("Account ID " + walrusObject.getOwnerId() + " does not have a canonical ID. Changing the ownership of object "
+ walrusObject.getObjectKey() + " in bucket " + walrusObject.getBucketName() + " to eucalyptus admin account");
owningAccount = getEucalyptusAccount();
adminUser = getEucalyptusAdmin();
} else { // If none of the above conditions match, lookup for the account
try {
owningAccount = Accounts.lookupAccountIdentifiersById( walrusObject.getOwnerId() );
if (StringUtils.isBlank(owningAccount.getCanonicalId())) {
LOG.warn("Account ID " + walrusObject.getOwnerId() + " does not have a canonical ID. Changing the ownership of object "
+ walrusObject.getObjectKey() + " in bucket " + walrusObject.getBucketName() + " to eucalyptus admin account");
owningAccount = getEucalyptusAccount();
|
[
" adminUser = getEucalyptusAdmin();"
] | 4,345
|
lcc
|
java
| null |
cf6d3d1f0f07aeb719a37db4e37a1cd437cb894df0b12bdd
|
|
#region License
// Copyright (c) 2013, ClearCanvas Inc.
// All rights reserved.
// http://www.clearcanvas.ca
//
// This file is part of the ClearCanvas RIS/PACS open source project.
//
// The ClearCanvas RIS/PACS open source project is free software: you can
// redistribute it and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// The ClearCanvas RIS/PACS open source project is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
// Public License for more details.
//
// You should have received a copy of the GNU General Public License along with
// the ClearCanvas RIS/PACS open source project. If not, see
// <http://www.gnu.org/licenses/>.
#endregion
using System;
using ClearCanvas.Common;
using ClearCanvas.Common.Utilities;
using ClearCanvas.Desktop;
using ClearCanvas.Desktop.Trees;
using ClearCanvas.Desktop.Tables;
using ClearCanvas.Desktop.Actions;
namespace ClearCanvas.Ris.Client
{
/// <summary>
/// Extension point for views onto <see cref="FolderExplorerComponent"/>
/// </summary>
[ExtensionPoint]
public class FolderExplorerComponentViewExtensionPoint : ExtensionPoint<IApplicationComponentView>
{
}
/// <summary>
/// WorklistExplorerComponent class
/// </summary>
[AssociateView(typeof(FolderExplorerComponentViewExtensionPoint))]
public class FolderExplorerComponent : ApplicationComponent, IFolderExplorerComponent
{
enum InitializationState
{
NotInitialized,
Initializing,
Initialized
}
private readonly FolderTreeRoot _folderTreeRoot;
private FolderTreeNode _selectedTreeNode;
private event EventHandler _selectedFolderChanged;
private event EventHandler _intialized;
private InitializationState _initializationState;
private readonly IFolderSystem _folderSystem;
private Timer _folderInvalidateTimer;
private readonly FolderExplorerGroupComponent _owner;
/// <summary>
/// Constructor
/// </summary>
public FolderExplorerComponent(IFolderSystem folderSystem, FolderExplorerGroupComponent owner)
{
_folderTreeRoot = new FolderTreeRoot(this);
_folderSystem = folderSystem;
_owner = owner;
}
#region IFolderExplorerComponent implementation
/// <summary>
/// Gets a value indicating whether this folder explorer has already been initialized.
/// </summary>
bool IFolderExplorerComponent.IsInitialized
{
get { return IsInitialized; }
}
/// <summary>
/// Instructs the folder explorer to initialize (build the folder system).
/// </summary>
void IFolderExplorerComponent.Initialize()
{
Initialize();
}
/// <summary>
/// Occurs when asynchronous initialization of this folder system has completed.
/// </summary>
event EventHandler IFolderExplorerComponent.Initialized
{
add { _intialized += value; }
remove { _intialized -= value; }
}
/// <summary>
/// Gets or sets the currently selected folder.
/// </summary>
IFolder IFolderExplorerComponent.SelectedFolder
{
get { return this.SelectedFolder; }
set
{
this.SelectedFolder = value;
}
}
/// <summary>
/// Invalidates all folders.
/// </summary>
void IFolderExplorerComponent.InvalidateFolders()
{
// check initialized
if (!IsInitialized)
return;
// invalidate all folders, and update starting at the root
_folderSystem.InvalidateFolders();
}
/// <summary>
/// Gets the underlying folder system associated with this folder explorer.
/// </summary>
IFolderSystem IFolderExplorerComponent.FolderSystem
{
get { return _folderSystem; }
}
/// <summary>
/// Occurs when the selected folder changes.
/// </summary>
event EventHandler IFolderExplorerComponent.SelectedFolderChanged
{
add { _selectedFolderChanged += value; }
remove { _selectedFolderChanged -= value; }
}
/// <summary>
/// Executes a search on this folder system.
/// </summary>
/// <param name="searchParams"></param>
void IFolderExplorerComponent.ExecuteSearch(SearchParams searchParams)
{
// check initialized
if (!IsInitialized)
return;
if (_folderSystem.SearchEnabled)
_folderSystem.ExecuteSearch(searchParams);
}
void IFolderExplorerComponent.LaunchAdvancedSearchComponent()
{
_folderSystem.LaunchSearchComponent();
}
/// <summary>
/// Gets the application component that displays the content of a folder for this folder system.
/// </summary>
/// <returns></returns>
IApplicationComponent IFolderExplorerComponent.GetContentComponent()
{
return _folderSystem.GetContentComponent();
}
#endregion
#region Application Component overrides
public override void Start()
{
// if the folder system needs immediate initialization, do that now
if(!_folderSystem.LazyInitialize)
{
Initialize();
}
base.Start();
}
public override void Stop()
{
if (_folderInvalidateTimer != null)
{
_folderInvalidateTimer.Stop();
_folderInvalidateTimer.Dispose();
}
// un-subscribe to events (important because the folderSystem object may be re-used by another explorer)
_folderSystem.Folders.ItemAdded -= FolderAddedEventHandler;
_folderSystem.Folders.ItemRemoved -= FolderRemovedEventHandler;
_folderSystem.FoldersChanged -= FoldersChangedEventHandler;
_folderSystem.FoldersInvalidated -= FoldersInvalidatedEventHandler;
_folderSystem.FolderPropertiesChanged -= FolderPropertiesChangedEventHandler;
_folderSystem.Dispose();
base.Stop();
}
public override IActionSet ExportedActions
{
get
{
return _folderSystem.FolderTools == null
? new ActionSet()
: _folderSystem.FolderTools.Actions;
}
}
#endregion
#region Presentation Model
public ITree FolderTree
{
get { return _folderTreeRoot.GetSubTree(); }
}
public ISelection SelectedFolderTreeNode
{
get { return new Selection(_selectedTreeNode); }
set
{
var nodeToSelect = (FolderTreeNode)value.Item;
SelectFolder(nodeToSelect);
}
}
public ITable FolderContentsTable
{
get { return _selectedTreeNode == null ? null : _selectedTreeNode.Folder.ItemsTable; }
}
public event EventHandler SelectedFolderChanged
{
|
[
" add { _selectedFolderChanged += value; }"
] | 666
|
lcc
|
csharp
| null |
2a81205ef1f5abf727ff63850ffd1162fadd475e92b2d3ca
|
|
package org.ovirt.engine.core.bll;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.validation.ConstraintViolation;
import org.junit.Test;
import org.mockito.Mockito;
import org.ovirt.engine.core.bll.context.EngineContext;
import org.ovirt.engine.core.bll.network.macpoolmanager.MacPoolManagerStrategy;
import org.ovirt.engine.core.common.action.ImportVmTemplateParameters;
import org.ovirt.engine.core.common.businessentities.BusinessEntitiesDefinitions;
import org.ovirt.engine.core.common.businessentities.StorageDomain;
import org.ovirt.engine.core.common.businessentities.StorageDomainStatic;
import org.ovirt.engine.core.common.businessentities.StorageDomainStatus;
import org.ovirt.engine.core.common.businessentities.StorageDomainType;
import org.ovirt.engine.core.common.businessentities.StoragePool;
import org.ovirt.engine.core.common.businessentities.VDSGroup;
import org.ovirt.engine.core.common.businessentities.VmDevice;
import org.ovirt.engine.core.common.businessentities.VmTemplate;
import org.ovirt.engine.core.common.businessentities.storage.DiskImage;
import org.ovirt.engine.core.common.businessentities.storage.StorageType;
import org.ovirt.engine.core.common.businessentities.storage.VolumeFormat;
import org.ovirt.engine.core.common.businessentities.storage.VolumeType;
import org.ovirt.engine.core.common.errors.EngineMessage;
import org.ovirt.engine.core.common.queries.VdcQueryParametersBase;
import org.ovirt.engine.core.common.queries.VdcQueryReturnValue;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.common.utils.ValidationUtils;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.dao.StorageDomainDao;
import org.ovirt.engine.core.dao.StorageDomainStaticDao;
import org.ovirt.engine.core.dao.StoragePoolDao;
import org.ovirt.engine.core.dao.VmTemplateDao;
import org.springframework.util.Assert;
public class ImportVmTemplateCommandTest {
@Test
public void insufficientDiskSpace() {
// The following is enough since the validation is mocked out anyway. Just want to make sure the flow in CDA is correct.
// Full test for the scenarios is done in the inherited class.
final ImportVmTemplateCommand command = setupVolumeFormatAndTypeTest(VolumeFormat.RAW, VolumeType.Preallocated, StorageType.NFS);
doReturn(false).when(command).validateSpaceRequirements(anyList());
assertFalse(command.canDoAction());
}
@Test
public void validVolumeFormatAndTypeCombinations() throws Exception {
assertValidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Preallocated, StorageType.NFS);
assertValidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Sparse, StorageType.NFS);
assertValidVolumeInfoCombination(VolumeFormat.COW, VolumeType.Sparse, StorageType.NFS);
assertValidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Preallocated, StorageType.ISCSI);
assertValidVolumeInfoCombination(VolumeFormat.COW, VolumeType.Sparse, StorageType.ISCSI);
assertValidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Sparse, StorageType.ISCSI);
assertValidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Preallocated, StorageType.FCP);
assertValidVolumeInfoCombination(VolumeFormat.COW, VolumeType.Sparse, StorageType.FCP);
assertValidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Sparse, StorageType.FCP);
assertValidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Preallocated, StorageType.LOCALFS);
assertValidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Sparse, StorageType.LOCALFS);
assertValidVolumeInfoCombination(VolumeFormat.COW, VolumeType.Sparse, StorageType.LOCALFS);
}
@Test
public void invalidVolumeFormatAndTypeCombinations() throws Exception {
assertInvalidVolumeInfoCombination(VolumeFormat.COW, VolumeType.Preallocated, StorageType.NFS);
assertInvalidVolumeInfoCombination(VolumeFormat.COW, VolumeType.Preallocated, StorageType.ISCSI);
assertInvalidVolumeInfoCombination(VolumeFormat.COW, VolumeType.Preallocated, StorageType.FCP);
assertInvalidVolumeInfoCombination(VolumeFormat.COW, VolumeType.Preallocated, StorageType.LOCALFS);
assertInvalidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Unassigned, StorageType.NFS);
assertInvalidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Unassigned, StorageType.ISCSI);
assertInvalidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Unassigned, StorageType.FCP);
assertInvalidVolumeInfoCombination(VolumeFormat.RAW, VolumeType.Unassigned, StorageType.LOCALFS);
assertInvalidVolumeInfoCombination(VolumeFormat.Unassigned, VolumeType.Preallocated, StorageType.NFS);
assertInvalidVolumeInfoCombination(VolumeFormat.Unassigned, VolumeType.Preallocated, StorageType.ISCSI);
assertInvalidVolumeInfoCombination(VolumeFormat.Unassigned, VolumeType.Preallocated, StorageType.FCP);
assertInvalidVolumeInfoCombination(VolumeFormat.Unassigned, VolumeType.Preallocated, StorageType.LOCALFS);
}
public void testValidateUniqueTemplateNameInDC() {
ImportVmTemplateCommand command =
setupVolumeFormatAndTypeTest(VolumeFormat.RAW, VolumeType.Preallocated, StorageType.NFS);
doReturn(true).when(command).isVmTemplateWithSameNameExist();
CanDoActionTestUtils.runAndAssertCanDoActionFailure(command,
EngineMessage.VM_CANNOT_IMPORT_TEMPLATE_NAME_EXISTS);
}
private void assertValidVolumeInfoCombination(VolumeFormat volumeFormat,
VolumeType volumeType,
StorageType storageType) {
CanDoActionTestUtils.runAndAssertCanDoActionSuccess(
setupVolumeFormatAndTypeTest(volumeFormat, volumeType, storageType));
}
private void assertInvalidVolumeInfoCombination(VolumeFormat volumeFormat,
VolumeType volumeType,
StorageType storageType) {
CanDoActionTestUtils.runAndAssertCanDoActionFailure(
setupVolumeFormatAndTypeTest(volumeFormat, volumeType, storageType),
EngineMessage.ACTION_TYPE_FAILED_DISK_CONFIGURATION_NOT_SUPPORTED);
}
/**
* Prepare a command for testing the given volume format and type combination.
*
* @param volumeFormat
* The volume format of the "imported" image.
* @param volumeType
* The volume type of the "imported" image.
* @param storageType
* The target domain's storage type.
* @return The command which can be called to test the given combination.
*/
private ImportVmTemplateCommand setupVolumeFormatAndTypeTest(
VolumeFormat volumeFormat,
VolumeType volumeType,
StorageType storageType) {
ImportVmTemplateCommand command = spy(new ImportVmTemplateCommand(createParameters()){
@Override
public VDSGroup getVdsGroup() {
return null;
}
});
Backend backend = mock(Backend.class);
doReturn(backend).when(command).getBackend();
doReturn(false).when(command).isVmTemplateWithSameNameExist();
doReturn(true).when(command).isVDSGroupCompatible();
doReturn(true).when(command).validateNoDuplicateDiskImages(any(Iterable.class));
mockGetTemplatesFromExportDomainQuery(volumeFormat, volumeType, command);
mockStorageDomainStatic(command, storageType);
doReturn(mock(VmTemplateDao.class)).when(command).getVmTemplateDao();
doReturn(Mockito.mock(MacPoolManagerStrategy.class)).when(command).getMacPool();
mockStoragePool(command);
mockStorageDomains(command);
doReturn(true).when(command).setAndValidateDiskProfiles();
doReturn(true).when(command).setAndValidateCpuProfile();
doReturn(true).when(command).validateSpaceRequirements(anyList());
return command;
}
private static void mockStorageDomains(ImportVmTemplateCommand command) {
final ImportVmTemplateParameters parameters = command.getParameters();
final StorageDomainDao dao = mock(StorageDomainDao.class);
final StorageDomain srcDomain = new StorageDomain();
srcDomain.setStorageDomainType(StorageDomainType.ImportExport);
srcDomain.setStatus(StorageDomainStatus.Active);
when(dao.getForStoragePool(parameters.getSourceDomainId(), parameters.getStoragePoolId()))
.thenReturn(srcDomain);
final StorageDomain destDomain = new StorageDomain();
destDomain.setStorageDomainType(StorageDomainType.Data);
destDomain.setUsedDiskSize(0);
destDomain.setAvailableDiskSize(1000);
destDomain.setStatus(StorageDomainStatus.Active);
when(dao.getForStoragePool(parameters.getDestDomainId(), parameters.getStoragePoolId()))
.thenReturn(destDomain);
doReturn(dao).when(command).getStorageDomainDao();
}
private static void mockStoragePool(ImportVmTemplateCommand command) {
final StoragePoolDao dao = mock(StoragePoolDao.class);
final StoragePool pool = new StoragePool();
pool.setId(command.getParameters().getStoragePoolId());
when(dao.get(any(Guid.class))).thenReturn(pool);
doReturn(dao).when(command).getStoragePoolDao();
}
private static void mockGetTemplatesFromExportDomainQuery(VolumeFormat volumeFormat,
VolumeType volumeType,
ImportVmTemplateCommand command) {
final VdcQueryReturnValue result = new VdcQueryReturnValue();
Map<VmTemplate, List<DiskImage>> resultMap = new HashMap<VmTemplate, List<DiskImage>>();
DiskImage image = new DiskImage();
image.setActualSizeInBytes(2);
image.setvolumeFormat(volumeFormat);
image.setVolumeType(volumeType);
resultMap.put(new VmTemplate(), Arrays.asList(image));
result.setReturnValue(resultMap);
result.setSucceeded(true);
when(command.getBackend().runInternalQuery(eq(VdcQueryType.GetTemplatesFromExportDomain),
any(VdcQueryParametersBase.class), any(EngineContext.class))).thenReturn(result);
}
private static void mockStorageDomainStatic(
ImportVmTemplateCommand command,
StorageType storageType) {
final StorageDomainStaticDao dao = mock(StorageDomainStaticDao.class);
final StorageDomainStatic domain = new StorageDomainStatic();
domain.setStorageType(storageType);
when(dao.get(any(Guid.class))).thenReturn(domain);
doReturn(dao).when(command).getStorageDomainStaticDao();
}
protected ImportVmTemplateParameters createParameters() {
VmTemplate t = new VmTemplate();
t.setName("testTemplate");
final ImportVmTemplateParameters p =
new ImportVmTemplateParameters(Guid.newGuid(), Guid.newGuid(), Guid.newGuid(), Guid.newGuid(), t);
return p;
}
private final String string100 = "0987654321" +
"0987654321" +
"0987654321" +
"0987654321" +
"0987654321" +
"0987654321" +
"0987654321" +
"0987654321" +
"0987654321" +
"0987654321";
@Test
public void testValidateNameSizeImportAsCloned() {
checkTemplateName(true, string100);
}
@Test
public void testDoNotValidateNameSizeImport() {
checkTemplateName(false, string100);
}
@Test
public void testValidateNameSpecialCharImportAsCloned() {
checkTemplateName(true, "vm_$%$#%#$");
}
@Test
public void testDoNotValidateNameSpecialCharImport() {
checkTemplateName(false, "vm_$%$#%#$");
}
private void checkTemplateName(boolean isImportAsNewEntity, String name) {
|
[
" ImportVmTemplateParameters parameters = createParameters();"
] | 616
|
lcc
|
java
| null |
b8a0388946b2d3e0cf2a7f1db681ccb8c70cc44275b2f173
|
|
// This file has been generated by the GUI designer. Do not modify.
namespace BlinkStickClient
{
public partial class CpuEditorWidget
{
private global::Gtk.VBox vbox2;
private global::Gtk.Frame frame1;
private global::Gtk.Alignment GtkAlignment;
private global::Gtk.VBox vbox3;
private global::Gtk.RadioButton radiobuttonMonitor;
private global::Gtk.Label labelMonitorHint;
private global::Gtk.RadioButton radiobuttonAlert;
private global::Gtk.Label labelAlertHint;
private global::Gtk.Alignment alignment2;
private global::Gtk.Table table1;
private global::Gtk.ComboBox comboboxTriggerType;
private global::Gtk.Label labelCheck;
private global::Gtk.Label labelMinutes;
private global::Gtk.Label labelPercent;
private global::Gtk.Label labelWhen;
private global::Gtk.SpinButton spinbuttonCheckPeriod;
private global::Gtk.SpinButton spinbuttonCpuPercent;
private global::Gtk.Label GtkLabel2;
private global::Gtk.Frame frame3;
private global::Gtk.Alignment GtkAlignment1;
private global::Gtk.HBox hbox1;
private global::Gtk.Label labelCurrentValue;
private global::Gtk.Button buttonRefresh;
private global::Gtk.Label GtkLabel3;
protected virtual void Build ()
{
global::Stetic.Gui.Initialize (this);
// Widget BlinkStickClient.CpuEditorWidget
global::Stetic.BinContainer.Attach (this);
this.Name = "BlinkStickClient.CpuEditorWidget";
// Container child BlinkStickClient.CpuEditorWidget.Gtk.Container+ContainerChild
this.vbox2 = new global::Gtk.VBox ();
this.vbox2.Name = "vbox2";
this.vbox2.Spacing = 6;
// Container child vbox2.Gtk.Box+BoxChild
this.frame1 = new global::Gtk.Frame ();
this.frame1.Name = "frame1";
this.frame1.ShadowType = ((global::Gtk.ShadowType)(0));
// Container child frame1.Gtk.Container+ContainerChild
this.GtkAlignment = new global::Gtk.Alignment (0F, 0F, 1F, 1F);
this.GtkAlignment.Name = "GtkAlignment";
this.GtkAlignment.LeftPadding = ((uint)(12));
this.GtkAlignment.TopPadding = ((uint)(12));
// Container child GtkAlignment.Gtk.Container+ContainerChild
this.vbox3 = new global::Gtk.VBox ();
this.vbox3.Name = "vbox3";
this.vbox3.Spacing = 6;
// Container child vbox3.Gtk.Box+BoxChild
this.radiobuttonMonitor = new global::Gtk.RadioButton (global::Mono.Unix.Catalog.GetString ("Monitor"));
this.radiobuttonMonitor.CanFocus = true;
this.radiobuttonMonitor.Name = "radiobuttonMonitor";
this.radiobuttonMonitor.DrawIndicator = true;
this.radiobuttonMonitor.UseUnderline = true;
this.radiobuttonMonitor.Group = new global::GLib.SList (global::System.IntPtr.Zero);
this.vbox3.Add (this.radiobuttonMonitor);
global::Gtk.Box.BoxChild w1 = ((global::Gtk.Box.BoxChild)(this.vbox3 [this.radiobuttonMonitor]));
w1.Position = 0;
w1.Expand = false;
w1.Fill = false;
// Container child vbox3.Gtk.Box+BoxChild
this.labelMonitorHint = new global::Gtk.Label ();
this.labelMonitorHint.Name = "labelMonitorHint";
this.labelMonitorHint.Xpad = 20;
this.labelMonitorHint.Xalign = 0F;
this.labelMonitorHint.LabelProp = global::Mono.Unix.Catalog.GetString ("<i>Uses pattern\'s first animation color to display 0% and second to transition to" +
" 100%. Define a pattern with two Set Color animations for this to take effect</i" +
">");
this.labelMonitorHint.UseMarkup = true;
this.labelMonitorHint.Wrap = true;
this.vbox3.Add (this.labelMonitorHint);
global::Gtk.Box.BoxChild w2 = ((global::Gtk.Box.BoxChild)(this.vbox3 [this.labelMonitorHint]));
w2.Position = 1;
w2.Expand = false;
w2.Fill = false;
// Container child vbox3.Gtk.Box+BoxChild
this.radiobuttonAlert = new global::Gtk.RadioButton (global::Mono.Unix.Catalog.GetString ("Alert"));
this.radiobuttonAlert.CanFocus = true;
this.radiobuttonAlert.Name = "radiobuttonAlert";
this.radiobuttonAlert.DrawIndicator = true;
this.radiobuttonAlert.UseUnderline = true;
this.radiobuttonAlert.Group = this.radiobuttonMonitor.Group;
this.vbox3.Add (this.radiobuttonAlert);
global::Gtk.Box.BoxChild w3 = ((global::Gtk.Box.BoxChild)(this.vbox3 [this.radiobuttonAlert]));
w3.Position = 2;
w3.Expand = false;
w3.Fill = false;
// Container child vbox3.Gtk.Box+BoxChild
this.labelAlertHint = new global::Gtk.Label ();
this.labelAlertHint.Name = "labelAlertHint";
this.labelAlertHint.Xpad = 20;
this.labelAlertHint.Xalign = 0F;
this.labelAlertHint.LabelProp = global::Mono.Unix.Catalog.GetString ("<i>When event occurs triggers pattern playback</i>");
this.labelAlertHint.UseMarkup = true;
this.labelAlertHint.Wrap = true;
this.vbox3.Add (this.labelAlertHint);
global::Gtk.Box.BoxChild w4 = ((global::Gtk.Box.BoxChild)(this.vbox3 [this.labelAlertHint]));
w4.Position = 3;
w4.Expand = false;
w4.Fill = false;
// Container child vbox3.Gtk.Box+BoxChild
this.alignment2 = new global::Gtk.Alignment (0.5F, 0.5F, 1F, 1F);
this.alignment2.Name = "alignment2";
this.alignment2.LeftPadding = ((uint)(40));
// Container child alignment2.Gtk.Container+ContainerChild
this.table1 = new global::Gtk.Table (((uint)(2)), ((uint)(5)), false);
this.table1.Name = "table1";
this.table1.RowSpacing = ((uint)(6));
this.table1.ColumnSpacing = ((uint)(6));
// Container child table1.Gtk.Table+TableChild
this.comboboxTriggerType = global::Gtk.ComboBox.NewText ();
this.comboboxTriggerType.AppendText (global::Mono.Unix.Catalog.GetString ("increases above"));
this.comboboxTriggerType.AppendText (global::Mono.Unix.Catalog.GetString ("drops below"));
this.comboboxTriggerType.Name = "comboboxTriggerType";
this.table1.Add (this.comboboxTriggerType);
global::Gtk.Table.TableChild w5 = ((global::Gtk.Table.TableChild)(this.table1 [this.comboboxTriggerType]));
w5.LeftAttach = ((uint)(1));
w5.RightAttach = ((uint)(2));
w5.XOptions = ((global::Gtk.AttachOptions)(4));
w5.YOptions = ((global::Gtk.AttachOptions)(4));
// Container child table1.Gtk.Table+TableChild
this.labelCheck = new global::Gtk.Label ();
this.labelCheck.Name = "labelCheck";
this.labelCheck.Xalign = 1F;
this.labelCheck.LabelProp = global::Mono.Unix.Catalog.GetString ("Check every");
this.table1.Add (this.labelCheck);
global::Gtk.Table.TableChild w6 = ((global::Gtk.Table.TableChild)(this.table1 [this.labelCheck]));
w6.TopAttach = ((uint)(1));
w6.BottomAttach = ((uint)(2));
w6.XOptions = ((global::Gtk.AttachOptions)(4));
w6.YOptions = ((global::Gtk.AttachOptions)(4));
// Container child table1.Gtk.Table+TableChild
this.labelMinutes = new global::Gtk.Label ();
this.labelMinutes.Name = "labelMinutes";
this.labelMinutes.Xalign = 0F;
this.labelMinutes.LabelProp = global::Mono.Unix.Catalog.GetString ("min");
this.table1.Add (this.labelMinutes);
global::Gtk.Table.TableChild w7 = ((global::Gtk.Table.TableChild)(this.table1 [this.labelMinutes]));
w7.TopAttach = ((uint)(1));
w7.BottomAttach = ((uint)(2));
w7.LeftAttach = ((uint)(3));
w7.RightAttach = ((uint)(4));
w7.XOptions = ((global::Gtk.AttachOptions)(4));
w7.YOptions = ((global::Gtk.AttachOptions)(4));
// Container child table1.Gtk.Table+TableChild
this.labelPercent = new global::Gtk.Label ();
this.labelPercent.Name = "labelPercent";
this.labelPercent.Xalign = 0F;
this.labelPercent.LabelProp = global::Mono.Unix.Catalog.GetString ("%");
this.table1.Add (this.labelPercent);
global::Gtk.Table.TableChild w8 = ((global::Gtk.Table.TableChild)(this.table1 [this.labelPercent]));
w8.LeftAttach = ((uint)(3));
w8.RightAttach = ((uint)(4));
w8.XOptions = ((global::Gtk.AttachOptions)(4));
w8.YOptions = ((global::Gtk.AttachOptions)(4));
// Container child table1.Gtk.Table+TableChild
this.labelWhen = new global::Gtk.Label ();
this.labelWhen.Name = "labelWhen";
this.labelWhen.Xalign = 1F;
this.labelWhen.LabelProp = global::Mono.Unix.Catalog.GetString ("When");
this.table1.Add (this.labelWhen);
global::Gtk.Table.TableChild w9 = ((global::Gtk.Table.TableChild)(this.table1 [this.labelWhen]));
w9.XOptions = ((global::Gtk.AttachOptions)(4));
w9.YOptions = ((global::Gtk.AttachOptions)(4));
// Container child table1.Gtk.Table+TableChild
this.spinbuttonCheckPeriod = new global::Gtk.SpinButton (1D, 120D, 1D);
this.spinbuttonCheckPeriod.CanFocus = true;
this.spinbuttonCheckPeriod.Name = "spinbuttonCheckPeriod";
this.spinbuttonCheckPeriod.Adjustment.PageIncrement = 10D;
this.spinbuttonCheckPeriod.ClimbRate = 1D;
this.spinbuttonCheckPeriod.Numeric = true;
this.spinbuttonCheckPeriod.Value = 1D;
this.table1.Add (this.spinbuttonCheckPeriod);
global::Gtk.Table.TableChild w10 = ((global::Gtk.Table.TableChild)(this.table1 [this.spinbuttonCheckPeriod]));
|
[
"\t\t\tw10.TopAttach = ((uint)(1));"
] | 650
|
lcc
|
csharp
| null |
6261fc22209c7ae19b5098f7004853c527e546aedd74f793
|
|
# -*- encoding: utf-8 -*-
#
# A scripting wrapper for NZBGet's Post Processing Scripting
#
# Copyright (C) 2014 Chris Caron <lead2gold@gmail.com>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
"""
This class was intended to make writing NZBGet Scripts easier to manage and
write by handling the common error handling and provide the most reused code
in a re-usable container. It was initially written to work with NZBGet v13
but provides most backwards compatibility.
It was designed to be inheritied as a base class requiring you to only write
the main() function which should preform the task you are intending.
It looks after fetching all of the environment variables and will parse
the meta information out of the NZB-File.
It allows you to set variables that other scripts can access if they need to
using the set() and get() variables. This is done through a simply self
maintained hash table type structure within a sqlite database. All the
wrapper functions are already written. If you call 'set('MYKEY', 1')
you can call get('MYKEY') in another script and continue working
push() functions written to pass information back to nzbget using it's
processing engine.
all exceptions are now automatically handled and logging can be easily
changed from stdout, to stderr or to a file.
Test suite built in (using python-nose) to ensure old global variables
will still work as well as make them easier to access and manipulate.
Some inline documentation was based on content provided at:
- http://nzbget.net/Extension_scripts
############################################################################
Post Process Script Usage/Example
############################################################################
#############################################################################
### NZBGET POST-PROCESSING SCRIPT ###
#
# Describe your Post-Process Script here
# Author: Chris Caron <lead2gold@gmail.com>
#
############################################################################
### OPTIONS ###
#
# Enable NZBGet debug logging (yes, no)
# Debug=no
#
### NZBGET POST-PROCESSING SCRIPT ###
#############################################################################
from nzbget import PostProcessScript
# Now define your class while inheriting the rest
class MyPostProcessScript(PostProcessScript):
def main(self, *args, **kwargs):
# Version Checking, Environment Variables Present, etc
if not self.validate():
# No need to document a failure, validate will do that
# on the reason it failed anyway
return False
# write all of your code here you would have otherwise put in the
# script
# All system environment variables (NZBOP_.*) as well as Post
# Process script specific content (NZBPP_.*)
# following dictionary (without the NZBOP_ or NZBPP_ prefix):
print('TEMPDIR (directory is: %s' % self.get('TEMPDIR'))
print('DIRECTORY %s' self.get('DIRECTORY'))
print('NZBNAME %s' self.get('NZBNAME'))
print('NZBFILENAME %s' self.get('NZBFILENAME'))
print('CATEGORY %s' self.get('CATEGORY'))
print('TOTALSTATUS %s' self.get('TOTALSTATUS'))
print('STATUS %s' self.get('STATUS'))
print('SCRIPTSTATUS %s' self.get('SCRIPTSTATUS'))
# Set any variable you want by any key. Note that if you use
# keys that were defined by the system (such as CATEGORY, DIRECTORY,
# etc, you may have some undesirable results. Try to avoid reusing
# system variables already defined (identified above):
self.set('MY_KEY', 'MY_VALUE')
# You can fetch it back; this will also set an entry in the
# sqlite database for each hash references that can be pulled from
# another script that simply calls self.get('MY_KEY')
print(self.get('MY_KEY')) # prints MY_VALUE
# You can also use push() which is similar to set()
# except that it interacts with the NZBGet Server and does not use
# the sqlite database. This can only be reached across other
# scripts if the calling application is NZBGet itself
self.push('ANOTHER_KEY', 'ANOTHER_VALUE')
# You can still however locally retrieve what you set using push()
# with the get() function
print(self.get('ANOTHER_KEY')) # prints ANOTHER_VALUE
# Your script configuration files (NZBPP_.*) are here in this
# dictionary (again without the NZBPP_ prefix):
# assume you defined `Debug=no` in the first 10K of your
# PostProcessScript NZBGet translates this to `NZBPP_DEBUG` which can
# be retrieved as follows:
print('DEBUG %s' self.get('DEBUG'))
# Returns have been made easy. Just return:
# * True if everything was successful
# * False if there was a problem
# * None if you want to report that you've just gracefully
skipped processing (this is better then False)
in some circumstances. This is neither a failure or a
success status.
# Feel free to use the actual exit codes as well defined by
# NZBGet on their website. They have also been defined here
# from nzbget import EXIT_CODE
return True
# Call your script as follows:
if __name__ == "__main__":
from sys import exit
# Create an instance of your Script
myscript = MyPostProcessScript()
# call run() and exit() using it's returned value
exit(myscript.run())
"""
import re
import six
from os import chdir
from os import environ
from os.path import isdir
from os.path import join
from os.path import splitext
from os.path import basename
from os.path import abspath
from socket import error as SocketError
# Relative Includes
from .ScriptBase import ScriptBase
from .ScriptBase import Health
from .ScriptBase import SCRIPT_MODE
from .ScriptBase import NZBGET_BOOL_FALSE
from .Utils import os_path_split as split
from .PostProcessCommon import OBFUSCATED_PATH_RE
from .PostProcessCommon import OBFUSCATED_FILE_RE
from .PostProcessCommon import PAR_STATUS
from .PostProcessCommon import UNPACK_STATUS
class TOTAL_STATUS(object):
"""Cumulative (Total) Status of NZB Processing
"""
# everything OK
SUCCESS = 'SUCCESS'
# download is damaged but probably can be repaired; user intervention is
# required;
WARNING = 'WARNING'
# download has failed or a serious error occurred during
# post-processing (unpack, par);
FAILURE = 'FAILURE'
# download was deleted; post-processing scripts are usually not called in
# this case; however it's possible to force calling scripts with command
# "post-process again".
DELETED = 'DELETED'
# Environment variable that prefixes all NZBGET options being passed into
# scripts with respect to the NZB-File (used in Post Processing Scripts)
POSTPROC_ENVIRO_ID = 'NZBPP_'
# Precompile Regulare Expression for Speed
POSTPROC_OPTS_RE = re.compile('^%s([A-Z0-9_]+)$' % POSTPROC_ENVIRO_ID)
class PostProcessScript(ScriptBase):
"""POST PROCESS mode is called after the unpack stage
"""
def __init__(self, *args, **kwargs):
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Multi-Script Support
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
if not hasattr(self, 'script_dict'):
# Only define once
self.script_dict = {}
self.script_dict[SCRIPT_MODE.POSTPROCESSING] = self
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Initialize Parent
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
super(PostProcessScript, self).__init__(*args, **kwargs)
def postprocess_init(self, *args, **kwargs):
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Fetch Script Specific Arguments
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
directory = kwargs.get('directory')
nzbname = kwargs.get('nzbname')
nzbfilename = kwargs.get('nzbfilename')
category = kwargs.get('category')
totalstatus = kwargs.get('totalstatus')
status = kwargs.get('status')
scriptstatus = kwargs.get('scriptstatus')
parse_nzbfile = kwargs.get('parse_nzbfile', True)
use_database = kwargs.get('use_database', True)
# Support Depricated Variables
parstatus = kwargs.get('parstatus')
unpackstatus = kwargs.get('unpackstatus')
# Fetch/Load Post Process Script Configuration
script_config = \
dict([(POSTPROC_OPTS_RE.match(k).group(1), v.strip())
for (k, v) in environ.items() if POSTPROC_OPTS_RE.match(k)])
if self.vvdebug:
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Print Global Script Varables to help debugging process
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
for k, v in script_config.items():
self.logger.vvdebug('%s%s=%s' % (POSTPROC_ENVIRO_ID, k, v))
# Merge Script Configuration With System Config
script_config.update(self.system)
self.system = script_config
# self.directory
# This is the path to the destination directory for downloaded files.
if directory is None:
self.directory = environ.get(
'%sDIRECTORY' % POSTPROC_ENVIRO_ID,
)
_final_directory = environ.get(
'%sFINALDIR' % POSTPROC_ENVIRO_ID,
)
if self.directory and not isdir(self.directory):
if _final_directory and isdir(_final_directory):
# adjust path
self.directory = _final_directory
else:
self.directory = directory
if self.directory:
self.directory = abspath(self.directory)
# self.nzbname
# User-friendly name of processed nzb-file as it is displayed by the
# program. The file path and extension are removed. If download was
# renamed, this parameter reflects the new name.
if nzbname is None:
self.nzbname = environ.get(
'%sNZBNAME' % POSTPROC_ENVIRO_ID,
)
else:
self.nzbname = nzbname
# self.nzbfilename
# Name of processed nzb-file. If the file was added from incoming
# nzb-directory, this is a full file name, including path and
# extension. If the file was added from web-interface, it's only the
# file name with extension. If the file was added via RPC-API (method
# append), this can be any string but the use of actual file name is
# recommended for developers.
if nzbfilename is None:
self.nzbfilename = environ.get(
'%sNZBFILENAME' % POSTPROC_ENVIRO_ID,
)
else:
self.nzbfilename = nzbfilename
# self.category
# Category assigned to nzb-file (can be empty string).
if category is None:
self.category = environ.get(
'%sCATEGORY' % POSTPROC_ENVIRO_ID,
)
else:
self.category = category
# self.totalstatus
# Total status of the processing of the NZB-File. This value
# includes the result from previous scripts that may have ran
# before this one.
if totalstatus is None:
self.totalstatus = environ.get(
'%sTOTALSTATUS' % POSTPROC_ENVIRO_ID,
)
else:
self.totalstatus = totalstatus
# self.status
# Complete status info for nzb-file: it consists of total status and
# status detail separated with slash. There are many combinations.
# Just few examples:
# FAILURE/HEALTH
# FAILURE/PAR
# FAILURE/UNPACK
# WARNING/REPAIRABLE
# WARNING/SPACE
# WARNING/PASSWORD
# SUCCESS/ALL
# SUCCESS/UNPACK
#
# For the complete list see description of method history in RPC API
# reference: http://nzbget.net/RPC_API_reference
if status is None:
self.status = Health(environ.get(
'%sSTATUS' % POSTPROC_ENVIRO_ID,
))
else:
self.status = Health(status)
# self.scriptstatus
# Summary status of the scripts executed before the current one
if scriptstatus is None:
self.scriptstatus = environ.get(
'%sSCRIPTSTATUS' % POSTPROC_ENVIRO_ID,
)
else:
self.scriptstatus = scriptstatus
# self.parstatus (NZBGet < v13) - Depreciated
# Result of par-check
if parstatus is None:
self.parstatus = environ.get(
'%sPARSTATUS' % POSTPROC_ENVIRO_ID,
# Default
PAR_STATUS.SKIPPED,
)
else:
self.parstatus = parstatus
# self.unpackstatus (NZBGet < v13) - Depreciated
# Result of unpack
if unpackstatus is None:
self.unpackstatus = environ.get(
'%sUNPACKSTATUS' % POSTPROC_ENVIRO_ID,
# Default
UNPACK_STATUS.SKIPPED,
)
else:
self.unpackstatus = unpackstatus
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Error Handling
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
if self.nzbfilename:
# absolute path names
self.nzbfilename = abspath(self.nzbfilename)
if parse_nzbfile:
# Initialize information fetched from NZB-File
# We intentionally allow existing nzbheaders to over-ride
# any found in the nzbfile
self.nzbheaders = \
self.parse_nzbfile(self.nzbfilename, check_queued=True)
self.nzbheaders.update(self.pull_dnzb())
if self.directory:
# absolute path names
self.directory = abspath(self.directory)
if not (self.directory and isdir(self.directory)):
self.logger.debug(
'Process directory is missing: %s' % self.directory)
else:
try:
chdir(self.directory)
except OSError:
self.logger.debug(
'Process directory is not accessible: %s' % self.directory)
# Total Status
if not isinstance(self.totalstatus, six.string_types):
self.totalstatus = TOTAL_STATUS.SUCCESS
# Par Status
if not isinstance(self.parstatus, int):
try:
self.parstatus = int(self.parstatus)
except:
self.parstatus = PAR_STATUS.SKIPPED
# Unpack Status
if not isinstance(self.unpackstatus, int):
try:
self.unpackstatus = int(self.unpackstatus)
except:
self.unpackstatus = UNPACK_STATUS.SKIPPED
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
# Enforce system/global variables for script processing
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
self.system['DIRECTORY'] = self.directory
if self.directory is not None:
environ['%sDIRECTORY' % POSTPROC_ENVIRO_ID] = \
self.directory
self.system['NZBNAME'] = self.nzbname
if self.nzbname is not None:
environ['%sNZBNAME' % POSTPROC_ENVIRO_ID] = \
self.nzbname
self.system['NZBFILENAME'] = self.nzbfilename
if self.nzbfilename is not None:
environ['%sNZBFILENAME' % POSTPROC_ENVIRO_ID] = \
self.nzbfilename
self.system['CATEGORY'] = self.category
if self.category is not None:
|
[
" environ['%sCATEGORY' % POSTPROC_ENVIRO_ID] = \\"
] | 1,764
|
lcc
|
python
| null |
a5e1e43a793649afc91e7bd083f0d034f0df3e24509c448e
|
|
/*
Copyright (C) 2008-2011 Jeroen Frijters
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
Jeroen Frijters
jeroen@frijters.net
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using IKVM.Reflection.Impl;
using IKVM.Reflection.Metadata;
using IKVM.Reflection.Writer;
namespace IKVM.Reflection.Emit
{
public sealed class GenericTypeParameterBuilder : TypeInfo
{
private readonly string name;
private readonly TypeBuilder type;
private readonly MethodBuilder method;
private readonly int paramPseudoIndex;
private readonly int position;
private int typeToken;
private Type baseType;
private GenericParameterAttributes attr;
internal GenericTypeParameterBuilder(string name, TypeBuilder type, int position)
: this(name, type, null, position, Signature.ELEMENT_TYPE_VAR)
{
}
internal GenericTypeParameterBuilder(string name, MethodBuilder method, int position)
: this(name, null, method, position, Signature.ELEMENT_TYPE_MVAR)
{
}
private GenericTypeParameterBuilder(string name, TypeBuilder type, MethodBuilder method, int position, byte sigElementType)
: base(sigElementType)
{
this.name = name;
this.type = type;
this.method = method;
this.position = position;
GenericParamTable.Record rec = new GenericParamTable.Record();
rec.Number = (short)position;
rec.Flags = 0;
rec.Owner = type != null ? type.MetadataToken : method.MetadataToken;
rec.Name = this.ModuleBuilder.Strings.Add(name);
this.paramPseudoIndex = this.ModuleBuilder.GenericParam.AddRecord(rec);
}
public override string AssemblyQualifiedName
{
get { return null; }
}
public override bool IsValueType
{
get { return (this.GenericParameterAttributes & GenericParameterAttributes.NotNullableValueTypeConstraint) != 0; }
}
public override Type BaseType
{
get { return baseType; }
}
public override Type[] __GetDeclaredInterfaces()
{
throw new NotImplementedException();
}
public override TypeAttributes Attributes
{
get { return TypeAttributes.Public; }
}
public override string Namespace
{
get { return DeclaringType.Namespace; }
}
public override string Name
{
get { return name; }
}
public override string FullName
{
get { return null; }
}
public override string ToString()
{
return this.Name;
}
private ModuleBuilder ModuleBuilder
{
get { return type != null ? type.ModuleBuilder : method.ModuleBuilder; }
}
public override Module Module
{
get { return ModuleBuilder; }
}
public override int GenericParameterPosition
{
get { return position; }
}
public override Type DeclaringType
{
get { return type; }
}
public override MethodBase DeclaringMethod
{
get { return method; }
}
public override Type[] GetGenericParameterConstraints()
{
throw new NotImplementedException();
}
public override GenericParameterAttributes GenericParameterAttributes
{
get
{
CheckBaked();
return attr;
}
}
internal override void CheckBaked()
{
if (type != null)
{
type.CheckBaked();
}
else
{
method.CheckBaked();
}
}
private void AddConstraint(Type type)
{
GenericParamConstraintTable.Record rec = new GenericParamConstraintTable.Record();
rec.Owner = paramPseudoIndex;
rec.Constraint = this.ModuleBuilder.GetTypeTokenForMemberRef(type);
this.ModuleBuilder.GenericParamConstraint.AddRecord(rec);
}
public void SetBaseTypeConstraint(Type baseTypeConstraint)
{
this.baseType = baseTypeConstraint;
AddConstraint(baseTypeConstraint);
}
public void SetInterfaceConstraints(params Type[] interfaceConstraints)
{
foreach (Type type in interfaceConstraints)
{
AddConstraint(type);
}
}
public void SetGenericParameterAttributes(GenericParameterAttributes genericParameterAttributes)
{
this.attr = genericParameterAttributes;
// for now we'll back patch the table
this.ModuleBuilder.GenericParam.PatchAttribute(paramPseudoIndex, genericParameterAttributes);
}
public void SetCustomAttribute(CustomAttributeBuilder customBuilder)
{
this.ModuleBuilder.SetCustomAttribute((GenericParamTable.Index << 24) | paramPseudoIndex, customBuilder);
}
public void SetCustomAttribute(ConstructorInfo con, byte[] binaryAttribute)
{
SetCustomAttribute(new CustomAttributeBuilder(con, binaryAttribute));
}
public override int MetadataToken
{
get
{
CheckBaked();
return (GenericParamTable.Index << 24) | paramPseudoIndex;
}
}
internal override int GetModuleBuilderToken()
{
if (typeToken == 0)
{
ByteBuffer spec = new ByteBuffer(5);
Signature.WriteTypeSpec(this.ModuleBuilder, spec, this);
typeToken = 0x1B000000 | this.ModuleBuilder.TypeSpec.AddRecord(this.ModuleBuilder.Blobs.Add(spec));
}
return typeToken;
}
internal override Type BindTypeParameters(IGenericBinder binder)
{
if (type != null)
{
return binder.BindTypeParameter(this);
}
else
{
return binder.BindMethodParameter(this);
}
}
internal override int GetCurrentToken()
{
if (this.ModuleBuilder.IsSaved)
{
return (GenericParamTable.Index << 24) | this.Module.GenericParam.GetIndexFixup()[paramPseudoIndex - 1] + 1;
}
else
{
return (GenericParamTable.Index << 24) | paramPseudoIndex;
}
}
internal override bool IsBaked
{
get { return ((MemberInfo)type ?? method).IsBaked; }
}
}
public sealed class TypeBuilder : TypeInfo, ITypeOwner
{
public const int UnspecifiedTypeSize = 0;
private readonly ITypeOwner owner;
private readonly int token;
private int extends;
private Type lazyBaseType; // (lazyBaseType == null && attribs & TypeAttributes.Interface) == 0) => BaseType == System.Object
private readonly int typeName;
private readonly int typeNameSpace;
private readonly string ns;
private readonly string name;
private readonly List<MethodBuilder> methods = new List<MethodBuilder>();
private readonly List<FieldBuilder> fields = new List<FieldBuilder>();
private List<PropertyBuilder> properties;
private List<EventBuilder> events;
private TypeAttributes attribs;
private GenericTypeParameterBuilder[] gtpb;
private List<CustomAttributeBuilder> declarativeSecurity;
private List<Type> interfaces;
private int size;
private short pack;
private bool hasLayout;
internal TypeBuilder(ITypeOwner owner, string ns, string name)
{
this.owner = owner;
this.token = this.ModuleBuilder.TypeDef.AllocToken();
this.ns = ns;
this.name = name;
this.typeNameSpace = ns == null ? 0 : this.ModuleBuilder.Strings.Add(ns);
this.typeName = this.ModuleBuilder.Strings.Add(name);
MarkKnownType(ns, name);
}
public ConstructorBuilder DefineDefaultConstructor(MethodAttributes attributes)
{
ConstructorBuilder cb = DefineConstructor(attributes, CallingConventions.Standard, Type.EmptyTypes);
ILGenerator ilgen = cb.GetILGenerator();
ilgen.Emit(OpCodes.Ldarg_0);
ilgen.Emit(OpCodes.Call, BaseType.GetConstructor(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic, null, Type.EmptyTypes, null));
ilgen.Emit(OpCodes.Ret);
return cb;
}
public ConstructorBuilder DefineConstructor(MethodAttributes attribs, CallingConventions callConv, Type[] parameterTypes)
{
return DefineConstructor(attribs, callConv, parameterTypes, null, null);
}
public ConstructorBuilder DefineConstructor(MethodAttributes attribs, CallingConventions callingConvention, Type[] parameterTypes, Type[][] requiredCustomModifiers, Type[][] optionalCustomModifiers)
{
attribs |= MethodAttributes.RTSpecialName | MethodAttributes.SpecialName;
string name = (attribs & MethodAttributes.Static) == 0 ? ConstructorInfo.ConstructorName : ConstructorInfo.TypeConstructorName;
MethodBuilder mb = DefineMethod(name, attribs, callingConvention, null, null, null, parameterTypes, requiredCustomModifiers, optionalCustomModifiers);
return new ConstructorBuilder(mb);
}
public ConstructorBuilder DefineTypeInitializer()
{
MethodBuilder mb = DefineMethod(ConstructorInfo.TypeConstructorName, MethodAttributes.Private | MethodAttributes.Static | MethodAttributes.RTSpecialName | MethodAttributes.SpecialName, null, Type.EmptyTypes);
|
[
"\t\t\treturn new ConstructorBuilder(mb);"
] | 910
|
lcc
|
csharp
| null |
6b78cc0ec714622bb22d89aa972b1eae59e59788b357955a
|
|
// Generated by ProtoGen, Version=2.3.0.277, Culture=neutral, PublicKeyToken=17b3b1f090c3ea48. DO NOT EDIT!
#pragma warning disable 1591
#region Designer generated code
using pb = global::Google.ProtocolBuffers;
using pbc = global::Google.ProtocolBuffers.Collections;
using pbd = global::Google.ProtocolBuffers.Descriptors;
using scg = global::System.Collections.Generic;
namespace bnet.protocol.channel_invitation {
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("ProtoGen", "2.3.0.277")]
public static partial class ChannelInvitationTypes {
#region Extension registration
public static void RegisterAllExtensions(pb::ExtensionRegistry registry) {
registry.Add(global::bnet.protocol.channel_invitation.Invitation.ChannelInvitation);
registry.Add(global::bnet.protocol.channel_invitation.SendInvitationRequest.ChannelInvitation);
}
#endregion
#region Static variables
internal static pbd::MessageDescriptor internal__static_bnet_protocol_channel_invitation_Invitation__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::bnet.protocol.channel_invitation.Invitation, global::bnet.protocol.channel_invitation.Invitation.Builder> internal__static_bnet_protocol_channel_invitation_Invitation__FieldAccessorTable;
internal static pbd::MessageDescriptor internal__static_bnet_protocol_channel_invitation_SendInvitationRequest__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::bnet.protocol.channel_invitation.SendInvitationRequest, global::bnet.protocol.channel_invitation.SendInvitationRequest.Builder> internal__static_bnet_protocol_channel_invitation_SendInvitationRequest__FieldAccessorTable;
internal static pbd::MessageDescriptor internal__static_bnet_protocol_channel_invitation_InvitationCollection__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::bnet.protocol.channel_invitation.InvitationCollection, global::bnet.protocol.channel_invitation.InvitationCollection.Builder> internal__static_bnet_protocol_channel_invitation_InvitationCollection__FieldAccessorTable;
#endregion
#region Descriptor
public static pbd::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbd::FileDescriptor descriptor;
static ChannelInvitationTypes() {
byte[] descriptorData = global::System.Convert.FromBase64String(
"CjlzZXJ2aWNlL2NoYW5uZWxfaW52aXRhdGlvbi9jaGFubmVsX2ludml0YXRp" +
"b25fdHlwZXMucHJvdG8SIGJuZXQucHJvdG9jb2wuY2hhbm5lbF9pbnZpdGF0" +
"aW9uGh1saWIvcHJvdG9jb2wvaW52aXRhdGlvbi5wcm90bxoZbGliL3Byb3Rv" +
"Y29sL2VudGl0eS5wcm90bxojc2VydmljZS9jaGFubmVsL2NoYW5uZWxfdHlw" +
"ZXMucHJvdG8iigIKCkludml0YXRpb24SRgoTY2hhbm5lbF9kZXNjcmlwdGlv" +
"bhgBIAIoCzIpLmJuZXQucHJvdG9jb2wuY2hhbm5lbC5DaGFubmVsRGVzY3Jp" +
"cHRpb24SFwoIcmVzZXJ2ZWQYAiABKAg6BWZhbHNlEhUKBnJlam9pbhgDIAEo" +
"CDoFZmFsc2USFAoMc2VydmljZV90eXBlGAQgASgNMm4KEmNoYW5uZWxfaW52" +
"aXRhdGlvbhIkLmJuZXQucHJvdG9jb2wuaW52aXRhdGlvbi5JbnZpdGF0aW9u" +
"GGkgASgLMiwuYm5ldC5wcm90b2NvbC5jaGFubmVsX2ludml0YXRpb24uSW52" +
"aXRhdGlvbiKDAgoVU2VuZEludml0YXRpb25SZXF1ZXN0EisKCmNoYW5uZWxf" +
"aWQYASABKAsyFy5ibmV0LnByb3RvY29sLkVudGl0eUlkEhAKCHJlc2VydmVk" +
"GAIgASgIEg4KBnJlam9pbhgDIAEoCBIUCgxzZXJ2aWNlX3R5cGUYBCABKA0y" +
"hAEKEmNoYW5uZWxfaW52aXRhdGlvbhIvLmJuZXQucHJvdG9jb2wuaW52aXRh" +
"dGlvbi5TZW5kSW52aXRhdGlvblJlcXVlc3QYaSABKAsyNy5ibmV0LnByb3Rv" +
"Y29sLmNoYW5uZWxfaW52aXRhdGlvbi5TZW5kSW52aXRhdGlvblJlcXVlc3Qi" +
"pAEKFEludml0YXRpb25Db2xsZWN0aW9uEhQKDHNlcnZpY2VfdHlwZRgBIAEo" +
"DRIgChhtYXhfcmVjZWl2ZWRfaW52aXRhdGlvbnMYAiABKA0SEQoJb2JqZWN0" +
"X2lkGAMgASgEEkEKE3JlY2VpdmVkX2ludml0YXRpb24YBCADKAsyJC5ibmV0" +
"LnByb3RvY29sLmludml0YXRpb24uSW52aXRhdGlvbg==");
pbd::FileDescriptor.InternalDescriptorAssigner assigner = delegate(pbd::FileDescriptor root) {
descriptor = root;
internal__static_bnet_protocol_channel_invitation_Invitation__Descriptor = Descriptor.MessageTypes[0];
internal__static_bnet_protocol_channel_invitation_Invitation__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::bnet.protocol.channel_invitation.Invitation, global::bnet.protocol.channel_invitation.Invitation.Builder>(internal__static_bnet_protocol_channel_invitation_Invitation__Descriptor,
new string[] { "ChannelDescription", "Reserved", "Rejoin", "ServiceType", });
global::bnet.protocol.channel_invitation.Invitation.ChannelInvitation = pb::GeneratedSingleExtension<global::bnet.protocol.channel_invitation.Invitation>.CreateInstance(global::bnet.protocol.channel_invitation.Invitation.Descriptor.Extensions[0]);
internal__static_bnet_protocol_channel_invitation_SendInvitationRequest__Descriptor = Descriptor.MessageTypes[1];
internal__static_bnet_protocol_channel_invitation_SendInvitationRequest__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::bnet.protocol.channel_invitation.SendInvitationRequest, global::bnet.protocol.channel_invitation.SendInvitationRequest.Builder>(internal__static_bnet_protocol_channel_invitation_SendInvitationRequest__Descriptor,
new string[] { "ChannelId", "Reserved", "Rejoin", "ServiceType", });
global::bnet.protocol.channel_invitation.SendInvitationRequest.ChannelInvitation = pb::GeneratedSingleExtension<global::bnet.protocol.channel_invitation.SendInvitationRequest>.CreateInstance(global::bnet.protocol.channel_invitation.SendInvitationRequest.Descriptor.Extensions[0]);
internal__static_bnet_protocol_channel_invitation_InvitationCollection__Descriptor = Descriptor.MessageTypes[2];
internal__static_bnet_protocol_channel_invitation_InvitationCollection__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::bnet.protocol.channel_invitation.InvitationCollection, global::bnet.protocol.channel_invitation.InvitationCollection.Builder>(internal__static_bnet_protocol_channel_invitation_InvitationCollection__Descriptor,
new string[] { "ServiceType", "MaxReceivedInvitations", "ObjectId", "ReceivedInvitation", });
return null;
};
pbd::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
new pbd::FileDescriptor[] {
global::bnet.protocol.invitation.Proto.Invitation.Descriptor,
global::bnet.protocol.Entity.Descriptor,
global::bnet.protocol.channel.ChannelTypes.Descriptor,
}, assigner);
}
#endregion
}
#region Messages
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("ProtoGen", "2.3.0.277")]
public sealed partial class Invitation : pb::GeneratedMessage<Invitation, Invitation.Builder> {
private static readonly Invitation defaultInstance = new Builder().BuildPartial();
private static readonly string[] _invitationFieldNames = new string[] { "channel_description", "rejoin", "reserved", "service_type" };
private static readonly uint[] _invitationFieldTags = new uint[] { 10, 24, 16, 32 };
public static Invitation DefaultInstance {
get { return defaultInstance; }
}
public override Invitation DefaultInstanceForType {
get { return defaultInstance; }
}
protected override Invitation ThisMessage {
get { return this; }
}
public static pbd::MessageDescriptor Descriptor {
get { return global::bnet.protocol.channel_invitation.ChannelInvitationTypes.internal__static_bnet_protocol_channel_invitation_Invitation__Descriptor; }
}
protected override pb::FieldAccess.FieldAccessorTable<Invitation, Invitation.Builder> InternalFieldAccessors {
get { return global::bnet.protocol.channel_invitation.ChannelInvitationTypes.internal__static_bnet_protocol_channel_invitation_Invitation__FieldAccessorTable; }
}
public const int ChannelInvitationFieldNumber = 105;
public static pb::GeneratedExtensionBase<global::bnet.protocol.channel_invitation.Invitation> ChannelInvitation;
public const int ChannelDescriptionFieldNumber = 1;
private bool hasChannelDescription;
private global::bnet.protocol.channel.ChannelDescription channelDescription_ = global::bnet.protocol.channel.ChannelDescription.DefaultInstance;
public bool HasChannelDescription {
get { return hasChannelDescription; }
}
public global::bnet.protocol.channel.ChannelDescription ChannelDescription {
get { return channelDescription_; }
}
public const int ReservedFieldNumber = 2;
private bool hasReserved;
private bool reserved_;
public bool HasReserved {
get { return hasReserved; }
}
public bool Reserved {
get { return reserved_; }
}
public const int RejoinFieldNumber = 3;
private bool hasRejoin;
private bool rejoin_;
public bool HasRejoin {
get { return hasRejoin; }
}
public bool Rejoin {
get { return rejoin_; }
}
public const int ServiceTypeFieldNumber = 4;
private bool hasServiceType;
private uint serviceType_;
public bool HasServiceType {
get { return hasServiceType; }
}
public uint ServiceType {
get { return serviceType_; }
}
public override bool IsInitialized {
get {
if (!hasChannelDescription) return false;
if (!ChannelDescription.IsInitialized) return false;
return true;
}
}
public override void WriteTo(pb::ICodedOutputStream output) {
int size = SerializedSize;
string[] field_names = _invitationFieldNames;
if (hasChannelDescription) {
output.WriteMessage(1, field_names[0], ChannelDescription);
}
if (hasReserved) {
output.WriteBool(2, field_names[2], Reserved);
}
if (hasRejoin) {
output.WriteBool(3, field_names[1], Rejoin);
}
if (hasServiceType) {
output.WriteUInt32(4, field_names[3], ServiceType);
}
UnknownFields.WriteTo(output);
}
private int memoizedSerializedSize = -1;
public override int SerializedSize {
get {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasChannelDescription) {
size += pb::CodedOutputStream.ComputeMessageSize(1, ChannelDescription);
}
if (hasReserved) {
size += pb::CodedOutputStream.ComputeBoolSize(2, Reserved);
}
if (hasRejoin) {
size += pb::CodedOutputStream.ComputeBoolSize(3, Rejoin);
}
if (hasServiceType) {
size += pb::CodedOutputStream.ComputeUInt32Size(4, ServiceType);
}
size += UnknownFields.SerializedSize;
memoizedSerializedSize = size;
return size;
}
}
public static Invitation ParseFrom(pb::ByteString data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static Invitation ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static Invitation ParseFrom(byte[] data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static Invitation ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static Invitation ParseFrom(global::System.IO.Stream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static Invitation ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static Invitation ParseDelimitedFrom(global::System.IO.Stream input) {
return CreateBuilder().MergeDelimitedFrom(input).BuildParsed();
}
public static Invitation ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed();
}
public static Invitation ParseFrom(pb::ICodedInputStream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static Invitation ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static Builder CreateBuilder() { return new Builder(); }
public override Builder ToBuilder() { return CreateBuilder(this); }
public override Builder CreateBuilderForType() { return new Builder(); }
public static Builder CreateBuilder(Invitation prototype) {
return (Builder) new Builder().MergeFrom(prototype);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("ProtoGen", "2.3.0.277")]
public sealed partial class Builder : pb::GeneratedBuilder<Invitation, Builder> {
protected override Builder ThisBuilder {
get { return this; }
}
public Builder() {}
Invitation result = new Invitation();
protected override Invitation MessageBeingBuilt {
get { return result; }
}
public override Builder Clear() {
result = new Invitation();
return this;
}
public override Builder Clone() {
return new Builder().MergeFrom(result);
}
public override pbd::MessageDescriptor DescriptorForType {
get { return global::bnet.protocol.channel_invitation.Invitation.Descriptor; }
}
public override Invitation DefaultInstanceForType {
get { return global::bnet.protocol.channel_invitation.Invitation.DefaultInstance; }
}
public override Invitation BuildPartial() {
if (result == null) {
throw new global::System.InvalidOperationException("build() has already been called on this Builder");
}
Invitation returnMe = result;
result = null;
return returnMe;
}
public override Builder MergeFrom(pb::IMessage other) {
if (other is Invitation) {
return MergeFrom((Invitation) other);
} else {
base.MergeFrom(other);
return this;
}
}
public override Builder MergeFrom(Invitation other) {
if (other == global::bnet.protocol.channel_invitation.Invitation.DefaultInstance) return this;
if (other.HasChannelDescription) {
MergeChannelDescription(other.ChannelDescription);
}
if (other.HasReserved) {
Reserved = other.Reserved;
}
if (other.HasRejoin) {
Rejoin = other.Rejoin;
}
if (other.HasServiceType) {
ServiceType = other.ServiceType;
}
this.MergeUnknownFields(other.UnknownFields);
return this;
}
public override Builder MergeFrom(pb::ICodedInputStream input) {
return MergeFrom(input, pb::ExtensionRegistry.Empty);
}
public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
pb::UnknownFieldSet.Builder unknownFields = null;
uint tag;
string field_name;
while (input.ReadTag(out tag, out field_name)) {
if(tag == 0 && field_name != null) {
int field_ordinal = global::System.Array.BinarySearch(_invitationFieldNames, field_name, global::System.StringComparer.Ordinal);
if(field_ordinal >= 0)
tag = _invitationFieldTags[field_ordinal];
else {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
continue;
}
}
switch (tag) {
case 0: {
throw pb::InvalidProtocolBufferException.InvalidTag();
}
default: {
if (pb::WireFormat.IsEndGroupTag(tag)) {
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
break;
}
case 10: {
global::bnet.protocol.channel.ChannelDescription.Builder subBuilder = global::bnet.protocol.channel.ChannelDescription.CreateBuilder();
if (result.hasChannelDescription) {
subBuilder.MergeFrom(ChannelDescription);
}
input.ReadMessage(subBuilder, extensionRegistry);
ChannelDescription = subBuilder.BuildPartial();
break;
}
case 16: {
result.hasReserved = input.ReadBool(ref result.reserved_);
break;
}
case 24: {
result.hasRejoin = input.ReadBool(ref result.rejoin_);
break;
}
case 32: {
result.hasServiceType = input.ReadUInt32(ref result.serviceType_);
break;
}
}
}
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
public bool HasChannelDescription {
get { return result.hasChannelDescription; }
}
public global::bnet.protocol.channel.ChannelDescription ChannelDescription {
get { return result.ChannelDescription; }
set { SetChannelDescription(value); }
}
public Builder SetChannelDescription(global::bnet.protocol.channel.ChannelDescription value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
result.hasChannelDescription = true;
result.channelDescription_ = value;
return this;
}
public Builder SetChannelDescription(global::bnet.protocol.channel.ChannelDescription.Builder builderForValue) {
pb::ThrowHelper.ThrowIfNull(builderForValue, "builderForValue");
result.hasChannelDescription = true;
result.channelDescription_ = builderForValue.Build();
return this;
}
public Builder MergeChannelDescription(global::bnet.protocol.channel.ChannelDescription value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
if (result.hasChannelDescription &&
result.channelDescription_ != global::bnet.protocol.channel.ChannelDescription.DefaultInstance) {
result.channelDescription_ = global::bnet.protocol.channel.ChannelDescription.CreateBuilder(result.channelDescription_).MergeFrom(value).BuildPartial();
} else {
result.channelDescription_ = value;
}
result.hasChannelDescription = true;
return this;
}
public Builder ClearChannelDescription() {
result.hasChannelDescription = false;
result.channelDescription_ = global::bnet.protocol.channel.ChannelDescription.DefaultInstance;
return this;
}
public bool HasReserved {
get { return result.hasReserved; }
}
public bool Reserved {
get { return result.Reserved; }
set { SetReserved(value); }
}
public Builder SetReserved(bool value) {
result.hasReserved = true;
result.reserved_ = value;
return this;
}
public Builder ClearReserved() {
result.hasReserved = false;
result.reserved_ = false;
return this;
}
public bool HasRejoin {
get { return result.hasRejoin; }
}
public bool Rejoin {
get { return result.Rejoin; }
set { SetRejoin(value); }
}
public Builder SetRejoin(bool value) {
result.hasRejoin = true;
result.rejoin_ = value;
return this;
}
public Builder ClearRejoin() {
result.hasRejoin = false;
result.rejoin_ = false;
return this;
}
public bool HasServiceType {
get { return result.hasServiceType; }
}
public uint ServiceType {
get { return result.ServiceType; }
set { SetServiceType(value); }
}
public Builder SetServiceType(uint value) {
result.hasServiceType = true;
result.serviceType_ = value;
return this;
}
public Builder ClearServiceType() {
result.hasServiceType = false;
result.serviceType_ = 0;
return this;
}
}
static Invitation() {
object.ReferenceEquals(global::bnet.protocol.channel_invitation.ChannelInvitationTypes.Descriptor, null);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("ProtoGen", "2.3.0.277")]
public sealed partial class SendInvitationRequest : pb::GeneratedMessage<SendInvitationRequest, SendInvitationRequest.Builder> {
private static readonly SendInvitationRequest defaultInstance = new Builder().BuildPartial();
private static readonly string[] _sendInvitationRequestFieldNames = new string[] { "channel_id", "rejoin", "reserved", "service_type" };
private static readonly uint[] _sendInvitationRequestFieldTags = new uint[] { 10, 24, 16, 32 };
public static SendInvitationRequest DefaultInstance {
get { return defaultInstance; }
}
public override SendInvitationRequest DefaultInstanceForType {
get { return defaultInstance; }
}
protected override SendInvitationRequest ThisMessage {
get { return this; }
}
public static pbd::MessageDescriptor Descriptor {
get { return global::bnet.protocol.channel_invitation.ChannelInvitationTypes.internal__static_bnet_protocol_channel_invitation_SendInvitationRequest__Descriptor; }
}
protected override pb::FieldAccess.FieldAccessorTable<SendInvitationRequest, SendInvitationRequest.Builder> InternalFieldAccessors {
get { return global::bnet.protocol.channel_invitation.ChannelInvitationTypes.internal__static_bnet_protocol_channel_invitation_SendInvitationRequest__FieldAccessorTable; }
}
public const int ChannelInvitationFieldNumber = 105;
public static pb::GeneratedExtensionBase<global::bnet.protocol.channel_invitation.SendInvitationRequest> ChannelInvitation;
public const int ChannelIdFieldNumber = 1;
private bool hasChannelId;
private global::bnet.protocol.EntityId channelId_ = global::bnet.protocol.EntityId.DefaultInstance;
public bool HasChannelId {
get { return hasChannelId; }
}
public global::bnet.protocol.EntityId ChannelId {
get { return channelId_; }
}
public const int ReservedFieldNumber = 2;
private bool hasReserved;
private bool reserved_;
public bool HasReserved {
get { return hasReserved; }
}
public bool Reserved {
get { return reserved_; }
}
public const int RejoinFieldNumber = 3;
private bool hasRejoin;
private bool rejoin_;
public bool HasRejoin {
get { return hasRejoin; }
}
public bool Rejoin {
get { return rejoin_; }
}
public const int ServiceTypeFieldNumber = 4;
private bool hasServiceType;
private uint serviceType_;
public bool HasServiceType {
get { return hasServiceType; }
}
public uint ServiceType {
get { return serviceType_; }
}
public override bool IsInitialized {
get {
if (HasChannelId) {
if (!ChannelId.IsInitialized) return false;
}
return true;
}
}
public override void WriteTo(pb::ICodedOutputStream output) {
int size = SerializedSize;
string[] field_names = _sendInvitationRequestFieldNames;
if (hasChannelId) {
output.WriteMessage(1, field_names[0], ChannelId);
}
if (hasReserved) {
output.WriteBool(2, field_names[2], Reserved);
}
if (hasRejoin) {
output.WriteBool(3, field_names[1], Rejoin);
}
if (hasServiceType) {
output.WriteUInt32(4, field_names[3], ServiceType);
}
UnknownFields.WriteTo(output);
}
private int memoizedSerializedSize = -1;
public override int SerializedSize {
get {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasChannelId) {
size += pb::CodedOutputStream.ComputeMessageSize(1, ChannelId);
}
if (hasReserved) {
size += pb::CodedOutputStream.ComputeBoolSize(2, Reserved);
}
if (hasRejoin) {
size += pb::CodedOutputStream.ComputeBoolSize(3, Rejoin);
}
if (hasServiceType) {
size += pb::CodedOutputStream.ComputeUInt32Size(4, ServiceType);
}
size += UnknownFields.SerializedSize;
memoizedSerializedSize = size;
return size;
}
}
public static SendInvitationRequest ParseFrom(pb::ByteString data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static SendInvitationRequest ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static SendInvitationRequest ParseFrom(byte[] data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static SendInvitationRequest ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static SendInvitationRequest ParseFrom(global::System.IO.Stream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static SendInvitationRequest ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static SendInvitationRequest ParseDelimitedFrom(global::System.IO.Stream input) {
return CreateBuilder().MergeDelimitedFrom(input).BuildParsed();
}
public static SendInvitationRequest ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed();
}
public static SendInvitationRequest ParseFrom(pb::ICodedInputStream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static SendInvitationRequest ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static Builder CreateBuilder() { return new Builder(); }
public override Builder ToBuilder() { return CreateBuilder(this); }
public override Builder CreateBuilderForType() { return new Builder(); }
public static Builder CreateBuilder(SendInvitationRequest prototype) {
|
[
" return (Builder) new Builder().MergeFrom(prototype);"
] | 1,897
|
lcc
|
csharp
| null |
773a363f2c8a3fdcc084a85466fde2a1350769f67776f1f4
|
|
/*
* Copyright (c) 2003-2009 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme.scene;
import java.io.IOException;
import java.io.Serializable;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.logging.Logger;
import com.jme.intersection.CollisionResults;
import com.jme.math.Vector3f;
import com.jme.renderer.Renderer;
import com.jme.system.JmeException;
import com.jme.util.export.InputCapsule;
import com.jme.util.export.JMEExporter;
import com.jme.util.export.JMEImporter;
import com.jme.util.export.OutputCapsule;
import com.jme.util.geom.BufferUtils;
/**
* <code>QuadMesh</code> defines a geometry mesh. This mesh defines a three
* dimensional object via a collection of points, colors, normals and textures.
* The points are referenced via a indices array. This array instructs the
* renderer the order in which to draw the points, creating quads based on the mode set.
*
* @author Joshua Slack
* @version $Id: $
*/
public class QuadMesh extends Geometry implements Serializable {
private static final Logger logger = Logger.getLogger(QuadMesh.class
.getName());
private static final long serialVersionUID = 2L;
public enum Mode {
/**
* Every four vertices referenced by the indexbuffer will be considered
* a stand-alone quad.
*/
Quads,
/**
* The first four vertices referenced by the indexbuffer create a
* triangle, from there, every two additional vertices are paired with
* the two preceding vertices to make a new quad.
*/
Strip;
}
protected transient IntBuffer indexBuffer;
protected Mode mode = Mode.Quads;
protected int quadQuantity;
private static Vector3f[] quads;
/**
* Empty Constructor to be used internally only.
*/
public QuadMesh() {
super();
}
/**
* Constructor instantiates a new <code>TriMesh</code> object.
*
* @param name
* the name of the scene element. This is required for
* identification and comparision purposes.
*/
public QuadMesh(String name) {
super(name);
}
/**
* Constructor instantiates a new <code>TriMesh</code> object. Provided
* are the attributes that make up the mesh all attributes may be null,
* except for vertices and indices.
*
* @param name
* the name of the scene element. This is required for
* identification and comparision purposes.
* @param vertices
* the vertices of the geometry.
* @param normal
* the normals of the geometry.
* @param color
* the colors of the geometry.
* @param coords
* the texture coordinates of the mesh.
* @param indices
* the indices of the vertex array.
*/
public QuadMesh(String name, FloatBuffer vertices, FloatBuffer normal,
FloatBuffer color, TexCoords coords, IntBuffer indices) {
super(name);
reconstruct(vertices, normal, color, coords);
if (null == indices) {
logger.severe("Indices may not be null.");
throw new JmeException("Indices may not be null.");
}
setIndexBuffer(indices);
logger.info("QuadMesh created.");
}
/**
* Recreates the geometric information of this TriMesh from scratch. The
* index and vertex array must not be null, but the others may be. Every 3
* indices define an index in the <code>vertices</code> array that
* refrences a vertex of a triangle.
*
* @param vertices
* The vertex information for this TriMesh.
* @param normal
* The normal information for this TriMesh.
* @param color
* The color information for this TriMesh.
* @param coords
* The texture information for this TriMesh.
* @param indices
* The index information for this TriMesh.
*/
public void reconstruct(FloatBuffer vertices, FloatBuffer normal,
FloatBuffer color, TexCoords coords, IntBuffer indices) {
super.reconstruct(vertices, normal, color, coords);
if (null == indices) {
logger.severe("Indices may not be null.");
throw new JmeException("Indices may not be null.");
}
setIndexBuffer(indices);
}
public void setMode(Mode mode) {
this.mode = mode;
}
public Mode getMode() {
return mode;
}
public IntBuffer getIndexBuffer() {
return indexBuffer;
}
public void setIndexBuffer(IntBuffer indices) {
this.indexBuffer = indices;
recalcQuadQuantity();
}
protected void recalcQuadQuantity() {
if (indexBuffer == null) {
quadQuantity = 0;
return;
}
switch (mode) {
case Quads:
quadQuantity = indexBuffer.limit() / 4;
break;
case Strip:
quadQuantity = indexBuffer.limit() / 2 - 1;
break;
}
}
/**
* Returns the number of triangles contained in this mesh.
*/
public int getQuadCount() {
return quadQuantity;
}
public void setQuadQuantity(int quadQuantity) {
this.quadQuantity = quadQuantity;
}
/**
* Clears the buffers of this QuadMesh. The buffers include its indexBuffer
* only.
*/
public void clearBuffers() {
super.clearBuffers();
setIndexBuffer(null);
}
public static Vector3f[] getQuads() {
return quads;
}
public static void setQuads(Vector3f[] quads) {
QuadMesh.quads = quads;
}
/**
* Stores in the <code>storage</code> array the indices of quad
* <code>i</code>. If <code>i</code> is an invalid index, or if
* <code>storage.length<4</code>, then nothing happens
*
* @param i
* The index of the quad to get.
* @param storage
* The array that will hold the i's indexes.
*/
public void getQuad(int i, int[] storage) {
if (i < getQuadCount() && storage.length >= 4) {
IntBuffer indices = getIndexBuffer();
storage[0] = indices.get(getVertIndex(i, 0));
storage[1] = indices.get(getVertIndex(i, 1));
storage[2] = indices.get(getVertIndex(i, 2));
storage[3] = indices.get(getVertIndex(i, 3));
}
}
/**
* Stores in the <code>vertices</code> array the vertex values of quad
* <code>i</code>. If <code>i</code> is an invalid quad index,
* nothing happens.
*
* @param i
* @param vertices
*/
public void getQuad(int i, Vector3f[] vertices) {
if (i < getQuadCount() && i >= 0) {
for (int x = 0; x < 4; x++) {
if (vertices[x] == null)
|
[
" vertices[x] = new Vector3f();"
] | 1,059
|
lcc
|
java
| null |
1354213b38f26ed15b8f898742ff5002cfb7db09baf32dd7
|
|
/*
* File : $Source: /alkacon/cvs/alkacon/com.alkacon.opencms.documentcenter/src/com/alkacon/opencms/documentcenter/CmsDocumentFrontend.java,v $
* Date : $Date: 2010/03/19 15:31:13 $
* Version: $Revision: 1.3 $
*
* This file is part of the Alkacon OpenCms Add-On Module Package
*
* Copyright (c) 2010 Alkacon Software GmbH (http://www.alkacon.com)
*
* The Alkacon OpenCms Add-On Module Package is free software:
* you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The Alkacon OpenCms Add-On Module Package is distributed
* in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with the Alkacon OpenCms Add-On Module Package.
* If not, see http://www.gnu.org/licenses/.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com.
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org.
*/
package com.alkacon.opencms.v8.documentcenter;
import org.opencms.file.CmsPropertyDefinition;
import org.opencms.i18n.CmsMessages;
import org.opencms.jsp.CmsJspActionElement;
import org.opencms.jsp.CmsJspNavElement;
import org.opencms.util.CmsStringUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.jsp.PageContext;
/**
* Provides customized methods for the document center frontend output.<p>
*
* @author Andreas Zahner
*
* @version $Revision: 1.3 $
*
* @since 6.2.1
*/
/**
*
*/
public class CmsDocumentFrontend extends CmsJspActionElement {
/** Request attribute that stores if a disclaimer should be shown. */
public static final String ATTR_DISCLAIMER = "oamp_doccenter_disclaimer";
/** Request attribute that stores the absolute path to the current document center folder. */
public static final String ATTR_FULLPATH = "oamp_doccenter_fullpath";
/** Request attribute that stores the relative path to the current document center folder. */
public static final String ATTR_PATHPART = "oamp_doccenter_pathpart";
/** Request attribute that stores the absolute path to the document center start folder. */
public static final String ATTR_STARTPATH = "oamp_doccenter_startpath";
/** Name of the column: date created. */
public static final String COLUMN_NAME_DATECREATED = "datecreated";
/** Name of the column: date modified. */
public static final String COLUMN_NAME_DATEMODIFIED = "datemodified";
/** Name of the column: document folder. */
public static final String COLUMN_NAME_FOLDER = "folder";
/** Name of the column: document id. */
public static final String COLUMN_NAME_ID = "id";
/** Name of the column: document languages. */
public static final String COLUMN_NAME_LANGUAGE = "lang";
/** Name of the column: document size. */
public static final String COLUMN_NAME_SIZE = "size";
/** Name of the column: document title. */
public static final String COLUMN_NAME_TITLE = "title";
/** Name of the column: document type. */
public static final String COLUMN_NAME_TYPE = "type";
/** The sortable column default sort directions, must correspond to the sortable columns {@link #COLUMNS_SORTABLE}. */
public static final String[] COLUMNS_DIRECTIONS = {
CmsDocument.SORT_DIRECTION_ASC,
CmsDocument.SORT_DIRECTION_ASC,
CmsDocument.SORT_DIRECTION_ASC,
CmsDocument.SORT_DIRECTION_DESC,
CmsDocument.SORT_DIRECTION_DESC,
CmsDocument.SORT_DIRECTION_DESC};
/** The sortable column default sort directions as list. */
public static final List<String> COLUMNS_DIRECTIONS_LIST = Arrays.asList(COLUMNS_DIRECTIONS);
/** Stores the column names that are sortable. */
public static final String[] COLUMNS_SORTABLE = {
COLUMN_NAME_TYPE,
COLUMN_NAME_ID,
COLUMN_NAME_TITLE,
COLUMN_NAME_SIZE,
COLUMN_NAME_DATEMODIFIED,
COLUMN_NAME_DATECREATED};
/** The column names that are sortable as list. */
public static final List<String> COLUMNS_SORTABLE_LIST = Arrays.asList(COLUMNS_SORTABLE);
/** Name of the file extensions of the icons of the document list. */
public static final String ICON_POSTFIX = ".gif";
/** Page type: default (shows the document list). */
public static final String PAGE_TYPE_DEFAULT = "default";
/** Request parameter name for the sort column parameter. */
public static final String PARAM_SORT_COLUMN = "sortcol";
/** Request parameter name for the sort direction parameter. */
public static final String PARAM_SORT_DIRECTION = "sortdir";
/** Property name to look if the document id column is shown. */
public static final String PROPERTY_COLUMN_ID = "docs.columnid";
/** Property name to look for document list column names to hide. */
public static final String PROPERTY_COLUMNS_HIDE = "docs.hidecolumns";
/** Property name to look for document list date columns to hide (old way, used for compatibility reasons). */
public static final String PROPERTY_COLUMNS_HIDE_DATE = "categoryDateCreated";
/** Property name to determine if the document center should consider attachments of the documents. */
public static final String PROPERTY_USE_ATTACHMENTS = "docs.useattachments";
/** Property name to set the default type if using different types. */
public static final String PROPERTY_USE_DEFAULTTYPE = "docs.defaulttype";
/** Property name to determine if the document center should consider language versions of the documents. */
public static final String PROPERTY_USE_LANGUAGES = "docs.uselanguages";
/** Property name to determine if the document center should consider different types of the documents. */
public static final String PROPERTY_USE_TYPES = "docs.usetypes";
/** The property values of the sort methods, must be in the same order as {@link #COLUMNS_SORTABLE}. */
public static final String[] SORT_METHODS = {
CmsDocument.SORT_METHOD_TYPE,
CmsDocument.SORT_METHOD_BY_ID,
CmsDocument.SORT_METHOD_ALPHABETICAL,
CmsDocument.SORT_METHOD_SIZE,
CmsDocument.SORT_METHOD_BY_DATEMODIFIED,
CmsDocument.SORT_METHOD_BY_DATECREATED};
/** The property values of the sort methods as list. */
public static final List<String> SORT_METHODS_LIST = Arrays.asList(SORT_METHODS);
/** The extension of the default type if using different types of documents. */
private String m_defaultType;
/** The page type to show. */
private String m_pageType;
/** The parameter of the sort column. */
private String m_paramSortColumn;
/** The parameter of the sort direction. */
private String m_paramSortDirection;
/** The value of the sort method property ("method:direction:includefolders"). */
private String m_sortMethod;
/** Determines if attachments of documents are present. */
private Boolean m_useAttachments;
/** Determines if language versions of documents are present. */
private Boolean m_useLanguages;
/** Determines if different types of documents are present. */
private Boolean m_useTypes;
/**
* Empty constructor, required for every JavaBean.
*/
public CmsDocumentFrontend() {
super();
}
/**
* Constructor, with parameters.
*
* @param context the JSP page context object
* @param req the JSP request
* @param res the JSP response
*/
public CmsDocumentFrontend(PageContext context, HttpServletRequest req, HttpServletResponse res) {
super(context, req, res);
// TODO: fix all current uri references to use proper sitepath
}
/**
* Creates the HTML code for the default breadcrumb navigation without the "up one folder" icon.<p>
*
* Used by: elements/navigation.jsp.<p>
*
* @param startFolder the start folder to build the navigation from
* @param navList the navigation elements (CmsJspNavElement)
* @param anchorClass the CSS class which will be used for the anchors
* @param separator the separator which will be used to separate the entries
* @param sepBeforeFirst if true, separator will be displayed before first element, too
* @return the HTML code for the breadcrumb navigation
*/
public String buildBreadCrumbNavigation(
String startFolder,
List<CmsJspNavElement> navList,
String anchorClass,
String separator,
boolean sepBeforeFirst) {
StringBuffer result = new StringBuffer(64);
boolean isFirst = true;
if (sepBeforeFirst) {
isFirst = false;
}
String locNavText = CmsPropertyDefinition.PROPERTY_NAVTEXT + "_" + getRequestContext().getLocale().toString();
String locTitle = CmsPropertyDefinition.PROPERTY_TITLE + "_" + getRequestContext().getLocale().toString();
String currFolder = (String)getRequest().getAttribute(ATTR_FULLPATH);
// create the navigation
Iterator<CmsJspNavElement> i = navList.iterator();
while (i.hasNext()) {
CmsJspNavElement navElement = i.next();
String navText = navElement.getProperties().get(locNavText);
if (CmsStringUtil.isEmptyOrWhitespaceOnly(navText)) {
navText = navElement.getNavText();
}
if (navElement.getResourceName().startsWith(startFolder)) {
// check the navigation text
if (navText.indexOf("??? NavText") != -1) {
navText = navElement.getProperties().get(locTitle);
if (CmsStringUtil.isEmptyOrWhitespaceOnly(navText)) {
navText = navElement.getTitle();
}
if (CmsStringUtil.isEmptyOrWhitespaceOnly(navText)) {
navText = navElement.getFileName();
}
if (navText.endsWith("/")) {
navText = navText.substring(0, (navText.length() - 1));
}
}
// don't show separator in front of first element
if (!isFirst) {
result.append(separator);
} else {
isFirst = false;
}
if (navElement.getResourceName().equals(currFolder) && (navList.size() > 1)) {
// the current folder will not be linked
result.append("<span class=\"");
result.append(anchorClass);
result.append("\">");
result.append(navText);
result.append("</span>");
} else {
// create the link to the folder
result.append("<a href=\"");
result.append(CmsDocumentFactory.getLink(this, navElement.getResourceName()));
result.append("\" class=\"");
result.append(anchorClass);
result.append("\">");
result.append(navText);
result.append("</a>");
}
}
}
return result.toString();
}
/**
* Creates the HTML code for the document or resource icon in document list, version list and search result list.<p>
*
* Used by: jsptemplates/list_documents.txt, elements/docversions.jsp, pages/jsp_pages/page_search_code.jsp.<p>
*
* @param docName the resource name of the document
* @param messages the localized messages
* @param resourcePath the path to the images
* @param isFolder true if the document is a folder, otherwise false
* @return the HTML code for the document icon
*/
public String buildDocIcon(String docName, CmsMessages messages, String resourcePath, boolean isFolder) {
return buildDocIcon(docName, messages, resourcePath, isFolder, 16, 16);
}
/**
* Creates the HTML code for the document or resource icon in document list, version list and search result list.<p>
*
* Used by: jsptemplates/list_documents.txt, elements/docversions.jsp, pages/jsp_pages/page_search_code.jsp.<p>
*
* @param docName the resource name of the document
* @param messages the localized messages
* @param resourcePath the path to the images
* @param isFolder true if the document is a folder, otherwise false
* @param imgWidth the width of the icon image
* @param imgHeight the height of the icon image
* @return the HTML code for the document icon
*/
public String buildDocIcon(
String docName,
CmsMessages messages,
String resourcePath,
boolean isFolder,
int imgWidth,
int imgHeight) {
String iconSrc, iconTitle, iconAlt;
// folder
if (isFolder) {
iconSrc = "ic_folder";
iconTitle = messages.key("documentlist.icon.folder.title");
iconAlt = messages.key("documentlist.icon.folder.alt");
}
// file
else {
String postfix = CmsDocument.getPostfix(docName);
postfix = CmsDocument.getPostfixAdjusted(postfix);
iconSrc = "ic_app_" + postfix;
iconTitle = messages.keyDefault("documentlist.icon.file.title." + postfix, "");
iconAlt = messages.keyDefault("documentlist.icon.file.alt." + postfix, "");
if ((postfix.equals("")) || (!getCmsObject().existsResource(resourcePath + iconSrc + ICON_POSTFIX))) {
iconSrc = "ic_app_unknown";
iconTitle = messages.key("documentlist.icon.file.title.unknown");
iconAlt = messages.key("documentlist.icon.file.alt.unknown");
}
}
StringBuffer result = new StringBuffer(256);
result.append("<img src=\"");
result.append(link(resourcePath + iconSrc + ICON_POSTFIX));
result.append("\" width=\"").append(imgWidth).append("\" height=\"").append(imgHeight);
result.append("\" border=\"0\" alt=\"");
result.append(iconAlt);
result.append("\" title=\"");
result.append(iconTitle);
result.append("\"/>");
return result.toString();
}
/**
* Returns the column header including the link to sort the list by the column criteria.<p>
*
* @param columnName the internal column name
* @param resourcePath the path to the image resources
* @param messages the initialized localized messages to use
* @return the column header including the link to sort the list by the column criteria
*/
public String getColumnHeader(String columnName, String resourcePath, CmsMessages messages) {
if (!isBeanSortInitialized()) {
initSort();
}
if (m_pageType.equals("default") && COLUMNS_SORTABLE_LIST.contains(columnName)) {
// column is sortable and we are on a default page, so columns are sortable
StringBuffer result = new StringBuffer(256);
String dir = m_paramSortDirection;
String newDir = dir;
boolean isCurrentColumn = false;
if (columnName.equals(m_paramSortColumn)) {
// the column is the current sort column
isCurrentColumn = true;
// switch new sort direction link for current sort column
if ((dir != null) && dir.equals(CmsDocument.SORT_DIRECTION_ASC)) {
newDir = CmsDocument.SORT_DIRECTION_DESC;
} else {
newDir = CmsDocument.SORT_DIRECTION_ASC;
}
} else {
// use default sort direction for other columns
newDir = COLUMNS_DIRECTIONS_LIST.get(COLUMNS_SORTABLE_LIST.indexOf(columnName));
}
// create the link for sorting the column
StringBuffer link = new StringBuffer(128);
link.append((String)getRequest().getAttribute(ATTR_FULLPATH));
link.append("?").append(PARAM_SORT_COLUMN).append("=").append(columnName);
link.append("&").append(PARAM_SORT_DIRECTION).append("=").append(newDir);
// set the title for the headline
String sortTitle = messages.key(
"documentlist.sort." + newDir,
messages.key("documentlist.headline." + columnName));
result.append("<a href=\"");
result.append(CmsDocumentFactory.getLink(this, link.toString()));
result.append("\" class=\"docshead\" title=\"");
result.append(sortTitle);
result.append("\">");
result.append(messages.key("documentlist.headline." + columnName));
if (isCurrentColumn) {
// set the marker icon for the current sort column
result.append(" ");
result.append("<img src=\"");
result.append(resourcePath).append("ic_sort_").append(dir).append(".png");
result.append("\" border=\"0\" alt=\"");
result.append(sortTitle);
result.append("\" title=\"");
result.append(sortTitle);
result.append("\"/>");
}
result.append("</a>");
return result.toString();
} else {
// column is not sortable, simply print localized headline
return messages.key("documentlist.headline." + columnName);
}
}
/**
* Returns the defaultType.<p>
*
* @return the defaultType
*/
public String getDefaultType() {
return m_defaultType;
}
/**
* Collects the names of the columns to hide in the document list view.<p>
*
* Columns that can be hidden are: date created, date last modified, document id.<p>
*
* @return the names of the clumns to hide
*/
public List<String> getHiddenColumns() {
List<String> result = new ArrayList<String>(4);
String ignoredCols = property(PROPERTY_COLUMNS_HIDE, "search", "");
result = CmsStringUtil.splitAsList(ignoredCols, ';');
// backward compatibility: check for property defining visibility of date columns
String showDateData = property("categoryDateCreated", "search", "");
|
[
" if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(showDateData)) {"
] | 1,920
|
lcc
|
java
| null |
5b8e2643405d58c8009b6c2812341072d705748a64e8f971
|
|
// Copyright 2014 Invex Games http://invexgames.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using UnityEngine.EventSystems;
namespace MaterialUI
{
public class InputFieldConfig : MonoBehaviour, ISelectHandler, IDeselectHandler
{
public Color activeColor = Color.black;
bool dynamicHeight;
bool selected;
public float animationDuration = 0.75f;
[SerializeField] private RectTransform parentRect;
[SerializeField] private Text placeholderText;
[SerializeField] private Text inputText;
[SerializeField] private Text displayText;
[SerializeField] private Image activeLine;
RectTransform textRect;
RectTransform displayTextRect;
InputField inputField;
RectTransform activeLineRect;
RectTransform placeholderRect;
Color placeholderOffColor;
Color placeholderColor;
float placeholderScale;
float placeholderPivot;
float activeLineAlpha;
float activeLinePos;
float animStartTime;
float animDeltaTime;
bool selectedBefore;
int state;
void Awake() // Get references
{
inputField = gameObject.GetComponent<InputField>();
activeLineRect = activeLine.GetComponent<RectTransform>();
placeholderRect = placeholderText.GetComponent<RectTransform>();
textRect = inputText.GetComponent<RectTransform>();
displayTextRect = displayText.GetComponent<RectTransform>();
}
void Start ()
{
activeLineRect.sizeDelta = new Vector2 (placeholderRect.rect.width, activeLineRect.sizeDelta.y);
inputText.font = displayText.font;
inputText.fontStyle = displayText.fontStyle;
inputText.fontSize = displayText.fontSize;
inputText.lineSpacing = displayText.lineSpacing;
inputText.supportRichText = displayText.supportRichText;
inputText.alignment = displayText.alignment;
inputText.horizontalOverflow = displayText.horizontalOverflow;
inputText.resizeTextForBestFit = displayText.resizeTextForBestFit;
inputText.material = displayText.material;
inputText.color = displayText.color;
placeholderOffColor = placeholderText.color;
if (inputField.lineType == InputField.LineType.MultiLineNewline || inputField.lineType == InputField.LineType.MultiLineSubmit)
{
dynamicHeight = true;
}
}
public void OnSelect (BaseEventData data)
{
placeholderColor = placeholderText.color;
placeholderPivot = placeholderRect.pivot.y;
placeholderScale = placeholderRect.localScale.x;
activeLine.color = activeColor;
selected = true;
activeLineRect.position = Input.mousePosition;
activeLineRect.localPosition = new Vector3 (activeLineRect.localPosition.x, 0.5f, 0f);
activeLineRect.localScale = new Vector3 (0f, 1f, 1f);
activeLinePos = activeLineRect.localPosition.x;
animStartTime = Time.realtimeSinceStartup;
state = 1;
}
public void OnDeselect (BaseEventData data)
{
placeholderColor = placeholderText.color;
placeholderPivot = placeholderRect.pivot.y;
placeholderScale = placeholderRect.localScale.x;
selected = false;
animStartTime = Time.realtimeSinceStartup;
state = 2;
}
public void CalculateHeight ()
{
StartCoroutine (DelayedHeight());
}
void Update ()
{
animDeltaTime = Time.realtimeSinceStartup - animStartTime;
if (state == 1) // Activating
{
if (animDeltaTime <= animationDuration)
{
Color tempColor = placeholderText.color;
tempColor.r = Anim.Quint.Out(placeholderColor.r, activeColor.r, animDeltaTime, animationDuration);
tempColor.g = Anim.Quint.Out(placeholderColor.g, activeColor.g, animDeltaTime, animationDuration);
tempColor.b = Anim.Quint.Out(placeholderColor.b, activeColor.b, animDeltaTime, animationDuration);
tempColor.a = Anim.Quint.Out(placeholderColor.a, activeColor.a, animDeltaTime, animationDuration);
placeholderText.color = tempColor;
Vector3 tempVec3 = placeholderRect.localScale;
tempVec3.x = Anim.Quint.Out (placeholderScale, 0.75f, animDeltaTime, animationDuration);
tempVec3.y =tempVec3.x;
tempVec3.z =tempVec3.x;
placeholderRect.localScale = tempVec3;
Vector2 tempVec2 = placeholderRect.pivot;
tempVec2.y = Anim.Quint.InOut (placeholderPivot, 0f, animDeltaTime, animationDuration);
placeholderRect.pivot = tempVec2;
tempVec3 = activeLineRect.localScale;
tempVec3.x = Anim.Quint.Out(0f, 1f, animDeltaTime, animationDuration);
activeLineRect.localScale = tempVec3;
tempVec2 = activeLineRect.localPosition;
tempVec2.x = Anim.Quint.Out (activeLinePos, 0f, animDeltaTime, animationDuration);
activeLineRect.localPosition = tempVec2;
}
else
{
state = 0;
}
}
else if (state == 2) // Deactivating
{
if (animDeltaTime <= 1f)
{
Color tempColor = placeholderText.color;
tempColor.r = Anim.Quint.Out(placeholderColor.r, placeholderOffColor.r, animDeltaTime, animationDuration);
tempColor.g = Anim.Quint.Out(placeholderColor.g, placeholderOffColor.g, animDeltaTime, animationDuration);
tempColor.b = Anim.Quint.Out(placeholderColor.b, placeholderOffColor.b, animDeltaTime, animationDuration);
tempColor.a = Anim.Quint.Out(placeholderColor.a, placeholderOffColor.a, animDeltaTime, animationDuration);
placeholderText.color = tempColor;
if (inputField.text.Length == 0)
{
Vector3 tempVec3 = placeholderRect.localScale;
tempVec3.x = Anim.Quint.InOut (placeholderScale, 1f, animDeltaTime, animationDuration);
tempVec3.y =tempVec3.x;
tempVec3.z =tempVec3.x;
placeholderRect.localScale = tempVec3;
Vector2 tempVec2 = placeholderRect.pivot;
tempVec2.y = Anim.Quint.Out (placeholderPivot, 1f, animDeltaTime, animationDuration);
placeholderRect.pivot = tempVec2;
}
tempColor = activeLine.color;
tempColor.a = Anim.Quint.Out(1f, 0f, animDeltaTime, animationDuration);
activeLine.color = tempColor;
}
else
{
state = 0;
}
}
if (selected)
{
if (dynamicHeight)
{
textRect.sizeDelta = displayTextRect.sizeDelta;
displayText.text = inputField.text;
}
else
{
displayText.text = inputText.text;
}
}
}
IEnumerator DelayedHeight ()
{
|
[
"\t\t\tyield return new WaitForEndOfFrame();"
] | 583
|
lcc
|
csharp
| null |
6f54b8bdeec79fa980ac72092ce615f8de384ea5d04ccc11
|
|
using System;
using System.ComponentModel;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Windows.Forms;
namespace mRemoteNG.UI.TaskDialog
{
public sealed partial class CommandButton : Button
{
//--------------------------------------------------------------------------------
#region PRIVATE MEMBERS
//--------------------------------------------------------------------------------
Image imgArrow1;
Image imgArrow2;
const int LEFT_MARGIN = 10;
const int TOP_MARGIN = 10;
const int ARROW_WIDTH = 19;
enum eButtonState { Normal, MouseOver, Down }
eButtonState m_State = eButtonState.Normal;
#endregion
//--------------------------------------------------------------------------------
#region PUBLIC PROPERTIES
//--------------------------------------------------------------------------------
// Override this to make sure the control is invalidated (repainted) when 'Text' is changed
public override string Text
{
get { return base.Text; }
set
{
base.Text = value;
if (m_autoHeight)
Height = GetBestHeight();
Invalidate();
}
}
// SmallFont is the font used for secondary lines
private Font SmallFont { get; set; }
// AutoHeight determines whether the button automatically resizes itself to fit the Text
bool m_autoHeight = true;
[Browsable(true)]
[Category("Behavior")]
[DefaultValue(true)]
public bool AutoHeight { get { return m_autoHeight; } set { m_autoHeight = value; if (m_autoHeight) Invalidate(); } }
#endregion
//--------------------------------------------------------------------------------
#region CONSTRUCTOR
//--------------------------------------------------------------------------------
public CommandButton()
{
InitializeComponent();
Font = new Font("Segoe UI", 11.75F, FontStyle.Regular, GraphicsUnit.Point, 0);
SmallFont = new Font("Segoe UI", 8F, FontStyle.Regular, GraphicsUnit.Point, 0);
}
#endregion
//--------------------------------------------------------------------------------
#region PUBLIC ROUTINES
//--------------------------------------------------------------------------------
public int GetBestHeight()
{
return (TOP_MARGIN * 2) + (int)GetSmallTextSizeF().Height + (int)GetLargeTextSizeF().Height;
}
#endregion
//--------------------------------------------------------------------------------
#region PRIVATE ROUTINES
//--------------------------------------------------------------------------------
string GetLargeText()
{
string[] lines = Text.Split('\n');
return lines[0];
}
string GetSmallText()
{
if (Text.IndexOf('\n') < 0)
return "";
string s = Text;
string[] lines = s.Split('\n');
s = "";
for (int i = 1; i < lines.Length; i++)
s += lines[i] + "\n";
return s.Trim('\n');
}
SizeF GetLargeTextSizeF()
{
int x = LEFT_MARGIN + ARROW_WIDTH + 5;
SizeF mzSize = new SizeF(Width - x - LEFT_MARGIN, 5000.0F); // presume RIGHT_MARGIN = LEFT_MARGIN
Graphics g = Graphics.FromHwnd(Handle);
SizeF textSize = g.MeasureString(GetLargeText(), Font, mzSize);
return textSize;
}
SizeF GetSmallTextSizeF()
{
string s = GetSmallText();
if (s == "") return new SizeF(0, 0);
int x = LEFT_MARGIN + ARROW_WIDTH + 8; // <- indent small text slightly more
SizeF mzSize = new SizeF(Width - x - LEFT_MARGIN, 5000.0F); // presume RIGHT_MARGIN = LEFT_MARGIN
Graphics g = Graphics.FromHwnd(Handle);
SizeF textSize = g.MeasureString(s, SmallFont, mzSize);
return textSize;
}
#endregion
//--------------------------------------------------------------------------------
#region OVERRIDEs
//--------------------------------------------------------------------------------
protected override void OnCreateControl()
{
base.OnCreateControl();
imgArrow1 = Resources.green_arrow1;
imgArrow2 = Resources.green_arrow2;
}
//--------------------------------------------------------------------------------
protected override void OnPaint(PaintEventArgs e)
{
e.Graphics.SmoothingMode = SmoothingMode.HighQuality;
e.Graphics.TextRenderingHint = System.Drawing.Text.TextRenderingHint.ClearTypeGridFit;
LinearGradientBrush brush;
LinearGradientMode mode = LinearGradientMode.Vertical;
Rectangle newRect = new Rectangle(ClientRectangle.X, ClientRectangle.Y, ClientRectangle.Width - 1, ClientRectangle.Height - 1);
Color text_color = SystemColors.WindowText;
Image img = imgArrow1;
if (Enabled)
{
switch (m_State)
{
case eButtonState.Normal:
e.Graphics.FillRectangle(SystemBrushes.Control, newRect);
e.Graphics.DrawRectangle(Focused ? new Pen(Color.Silver, 1) : new Pen(SystemColors.Control, 1), newRect);
text_color = Color.DarkBlue;
break;
case eButtonState.MouseOver:
brush = new LinearGradientBrush(newRect, SystemColors.Control, SystemColors.Control, mode);
e.Graphics.FillRectangle(brush, newRect);
e.Graphics.DrawRectangle(new Pen(Color.Silver, 1), newRect);
img = imgArrow2;
text_color = Color.Blue;
break;
case eButtonState.Down:
brush = new LinearGradientBrush(newRect, SystemColors.Control, SystemColors.Control, mode);
e.Graphics.FillRectangle(brush, newRect);
e.Graphics.DrawRectangle(new Pen(Color.DarkGray, 1), newRect);
text_color = Color.DarkBlue;
break;
}
}
else
{
brush = new LinearGradientBrush(newRect, SystemColors.Control, SystemColors.Control, mode);
e.Graphics.FillRectangle(brush, newRect);
e.Graphics.DrawRectangle(new Pen(Color.DarkGray, 1), newRect);
text_color = Color.DarkBlue;
}
string largetext = GetLargeText();
string smalltext = GetSmallText();
SizeF szL = GetLargeTextSizeF();
//e.Graphics.DrawString(largetext, base.Font, new SolidBrush(text_color), new RectangleF(new PointF(LEFT_MARGIN + imgArrow1.Width + 5, TOP_MARGIN), szL));
TextRenderer.DrawText(e.Graphics, largetext, Font, new Rectangle(LEFT_MARGIN + imgArrow1.Width + 5, TOP_MARGIN, (int)szL.Width, (int)szL.Height), text_color, TextFormatFlags.Default);
if (smalltext != "")
{
SizeF szS = GetSmallTextSizeF();
e.Graphics.DrawString(smalltext, SmallFont, new SolidBrush(text_color), new RectangleF(new PointF(LEFT_MARGIN + imgArrow1.Width + 8, TOP_MARGIN + (int)szL.Height), szS));
}
e.Graphics.DrawImage(img, new Point(LEFT_MARGIN, TOP_MARGIN + (int)(szL.Height / 2) - img.Height / 2));
}
//--------------------------------------------------------------------------------
protected override void OnMouseLeave(EventArgs e)
{
m_State = eButtonState.Normal;
Invalidate();
base.OnMouseLeave(e);
}
//--------------------------------------------------------------------------------
protected override void OnMouseEnter(EventArgs e)
{
m_State = eButtonState.MouseOver;
Invalidate();
base.OnMouseEnter(e);
}
//--------------------------------------------------------------------------------
protected override void OnMouseUp(MouseEventArgs e)
{
m_State = eButtonState.MouseOver;
Invalidate();
base.OnMouseUp(e);
}
//--------------------------------------------------------------------------------
protected override void OnMouseDown(MouseEventArgs e)
{
m_State = eButtonState.Down;
Invalidate();
base.OnMouseDown(e);
}
//--------------------------------------------------------------------------------
protected override void OnSizeChanged(EventArgs e)
{
if (m_autoHeight)
{
|
[
" int h = GetBestHeight();"
] | 638
|
lcc
|
csharp
| null |
9ee2a42b13526dd952de8a27bb5404cee70aa93dc9e35ee7
|
|
//
// DO NOT REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
//
// @Authors:
// timop
//
// Copyright 2004-2013 by OM International
//
// This file is part of OpenPetra.org.
//
// OpenPetra.org is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// OpenPetra.org is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with OpenPetra.org. If not, see <http://www.gnu.org/licenses/>.
//
using System;
using System.IO;
using System.Data;
using System.Collections.Generic;
using Ict.Common;
using Ict.Common.DB;
using Ict.Common.Data;
using Ict.Common.IO;
using Ict.Petra.Server.MSysMan.Cacheable.WebConnectors;
using Ict.Petra.Shared.MSysMan.Data;
using Ict.Petra.Server.MSysMan.Maintenance.SystemDefaults.WebConnectors;
using Ict.Petra.Server.MSysMan.Data.Access;
namespace Ict.Petra.Server.MCommon.Processing
{
/// <summary>
/// run some data checks against the database and tell the users how to fix consistency issues
/// </summary>
public class TProcessDataChecks
{
private const string PROCESSDATACHECK_LAST_RUN = "PROCESSDATACHECK_LAST_RUN";
private const float SENDREPORTFORDAYS_TOUSERS = 14.0f;
private static DateTime Errors_SinceDate;
/// <summary>
/// Gets called in regular intervals from a Timer in Class TTimedProcessing.
/// </summary>
/// <param name="ADBAccessObj">Instantiated DB Access object with opened DB connection.</param>
/// <param name="ARunManually">this is true if the process was called manually from the server admin console</param>
public static void Process(TDataBase ADBAccessObj, bool ARunManually)
{
// only check once a day (or as specified in config file), if not manually called
if (!ARunManually)
{
DateTime LastRun =
TVariant.DecodeFromString(
TSystemDefaults.GetSystemDefault(
PROCESSDATACHECK_LAST_RUN,
new TVariant(DateTime.MinValue).EncodeToString())).ToDate();
if (LastRun.AddDays(TAppSettingsManager.GetInt16("DataChecks.RunEveryXDays", 1)) > DateTime.Now)
{
// do not run the data check more than once a day or a week (depending on configuration setting), too many emails
TLogging.LogAtLevel(1, "TProcessDataChecks.Process: not running, since last run was at " + LastRun.ToString());
return;
}
}
Errors_SinceDate = DateTime.Today.AddDays(-1 * SENDREPORTFORDAYS_TOUSERS);
TLogging.LogAtLevel(1, "TProcessDataChecks.Process: Checking Modules");
CheckModule(ADBAccessObj, "DataCheck.MPartner.");
TSystemDefaults.SetSystemDefault(PROCESSDATACHECK_LAST_RUN, new TVariant(DateTime.Now).EncodeToString());
}
private static void CheckModule(TDataBase ADBAccessObj, string AModule)
{
// get all sql files starting with module
string[] sqlfiles = Directory.GetFiles(Path.GetFullPath(TAppSettingsManager.GetValue("SqlFiles.Path", ".")),
AModule + "*.sql");
DataTable errors = new DataTable(AModule + "Errors");
foreach (string sqlfile in sqlfiles)
{
string sql = TDataBase.ReadSqlFile(Path.GetFileName(sqlfile));
// extend the sql to load the s_date_created_d, s_created_by_c, s_date_modified_d, s_modified_by_c
// only for the first table in the FROM clause
string firstTableAlias = sql.Substring(sql.ToUpper().IndexOf("FROM ") + "FROM ".Length);
firstTableAlias = firstTableAlias.Substring(0, firstTableAlias.ToUpper().IndexOf("WHERE"));
int indexOfAs = firstTableAlias.ToUpper().IndexOf(" AS ");
if (indexOfAs > -1)
{
firstTableAlias = firstTableAlias.Substring(indexOfAs + " AS ".Length).Trim();
if (firstTableAlias.Contains(","))
{
firstTableAlias = firstTableAlias.Substring(0, firstTableAlias.IndexOf(",")).Trim();
}
}
sql = sql.Replace("FROM ", ", " + firstTableAlias + ".s_date_created_d AS DateCreated, " +
firstTableAlias + ".s_created_by_c AS CreatedBy, " +
firstTableAlias + ".s_date_modified_d AS DateModified, " +
firstTableAlias + ".s_modified_by_c AS ModifiedBy FROM ");
errors.Merge(ADBAccessObj.SelectDT(sql, "temp", null));
}
if (errors.Rows.Count > 0)
{
SendEmailToAdmin(errors);
SendEmailsPerUser(errors);
}
}
private static void SendEmailToAdmin(DataTable AErrors)
{
// Create excel output of the errors table
string excelfile = TAppSettingsManager.GetValue("DataChecks.TempPath") + "/errors.xlsx";
try
{
using (StreamWriter sw = new StreamWriter(excelfile))
{
using (MemoryStream m = new MemoryStream())
{
if (!TCsv2Xml.DataTable2ExcelStream(AErrors, m))
{
return;
}
m.WriteTo(sw.BaseStream);
m.Close();
sw.Close();
}
}
}
catch (Exception e)
{
TLogging.Log("Problems writing to file " + excelfile);
TLogging.Log(e.ToString());
return;
}
if (TAppSettingsManager.HasValue("DataChecks.Email.Recipient"))
{
new TSmtpSender().SendEmail("<" + TAppSettingsManager.GetValue("DataChecks.Email.Sender") + ">",
"OpenPetra DataCheck Robot",
TAppSettingsManager.GetValue("DataChecks.Email.Recipient"),
"Data Check",
"there are " + AErrors.Rows.Count.ToString() + " errors. Please see attachment!",
new string[] { excelfile });
}
else
{
TLogging.Log("there is no email sent because DataChecks.Email.Recipient is not defined in the config file");
}
}
private static void SendEmailForUser(string AUserId, DataTable AErrors)
{
// get the email address of the user
SUserRow userrow = SUserAccess.LoadByPrimaryKey(AUserId, null)[0];
string excelfile = TAppSettingsManager.GetValue("DataChecks.TempPath") + "/errors" + AUserId + ".xlsx";
DataView v = new DataView(AErrors,
"(CreatedBy='" + AUserId + "' AND ModifiedBy IS NULL AND DateCreated > #" + Errors_SinceDate.ToString("MM/dd/yyyy") + "#) " +
"OR (ModifiedBy='" + AUserId + "' AND DateModified > #" + Errors_SinceDate.ToString("MM/dd/yyyy") + "#)",
string.Empty, DataViewRowState.CurrentRows);
try
{
using (StreamWriter sw = new StreamWriter(excelfile))
{
using (MemoryStream m = new MemoryStream())
{
if (!TCsv2Xml.DataTable2ExcelStream(v.ToTable(), m))
{
return;
}
m.WriteTo(sw.BaseStream);
m.Close();
sw.Close();
}
}
}
catch (Exception e)
{
TLogging.Log("Problems writing to file " + excelfile);
TLogging.Log(e.ToString());
return;
}
string recipientEmail = string.Empty;
if (!userrow.IsEmailAddressNull())
{
recipientEmail = userrow.EmailAddress;
}
else if (TAppSettingsManager.HasValue("DataChecks.Email.Recipient.UserDomain"))
{
recipientEmail = userrow.FirstName + "." + userrow.LastName + "@" + TAppSettingsManager.GetValue(
"DataChecks.Email.Recipient.UserDomain");
}
else if (TAppSettingsManager.HasValue("DataChecks.Email.Recipient"))
{
recipientEmail = TAppSettingsManager.GetValue("DataChecks.Email.Recipient");
}
if (recipientEmail.Length > 0)
{
new TSmtpSender().SendEmail("<" + TAppSettingsManager.GetValue("DataChecks.Email.Sender") + ">",
"OpenPetra DataCheck Robot",
recipientEmail,
"Data Check for " + AUserId,
"there are " + v.Count.ToString() + " errors. Please see attachment!",
new string[] { excelfile });
}
else
{
TLogging.Log("no email can be sent to " + AUserId);
}
}
private static void SendEmailsPerUser(DataTable AErrors)
{
// get all users that have created or modified the records in the past week(s)
List <String>Users = new List <string>();
foreach (DataRow r in AErrors.Rows)
{
string lastUser = string.Empty;
if (!r.IsNull("DateModified") && (Convert.ToDateTime(r["DateModified"]) > Errors_SinceDate))
{
lastUser = r["ModifiedBy"].ToString();
}
else if (!r.IsNull("DateCreated") && (Convert.ToDateTime(r["DateCreated"]) > Errors_SinceDate))
{
|
[
" lastUser = r[\"CreatedBy\"].ToString();"
] | 864
|
lcc
|
csharp
| null |
fee4dc71675158dd9374b556629af2d83cdc0503955dba84
|
|
#region Copyright & License Information
/*
* Copyright 2007-2014 The OpenRA Developers (see AUTHORS)
* This file is part of OpenRA, which is free software. It is made
* available to you under the terms of the GNU General Public License
* as published by the Free Software Foundation. For more information,
* see COPYING.
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using OpenRA.Graphics;
using OpenRA.Orders;
using OpenRA.Traits;
namespace OpenRA.Widgets
{
public enum WorldTooltipType { None, Unexplored, Actor, FrozenActor }
public class ViewportControllerWidget : Widget
{
public readonly string TooltipTemplate = "WORLD_TOOLTIP";
public readonly string TooltipContainer;
Lazy<TooltipContainerWidget> tooltipContainer;
public WorldTooltipType TooltipType { get; private set; }
public IToolTip ActorTooltip { get; private set; }
public FrozenActor FrozenActorTooltip { get; private set; }
public int EdgeScrollThreshold = 15;
public int EdgeCornerScrollThreshold = 35;
static readonly Dictionary<ScrollDirection, string> ScrollCursors = new Dictionary<ScrollDirection, string>
{
{ ScrollDirection.Up | ScrollDirection.Left, "scroll-tl" },
{ ScrollDirection.Up | ScrollDirection.Right, "scroll-tr" },
{ ScrollDirection.Down | ScrollDirection.Left, "scroll-bl" },
{ ScrollDirection.Down | ScrollDirection.Right, "scroll-br" },
{ ScrollDirection.Up, "scroll-t" },
{ ScrollDirection.Down, "scroll-b" },
{ ScrollDirection.Left, "scroll-l" },
{ ScrollDirection.Right, "scroll-r" },
};
static readonly Dictionary<ScrollDirection, float2> ScrollOffsets = new Dictionary<ScrollDirection, float2>
{
{ ScrollDirection.Up, new float2(0, -1) },
{ ScrollDirection.Down, new float2(0, 1) },
{ ScrollDirection.Left, new float2(-1, 0) },
{ ScrollDirection.Right, new float2(1, 0) },
};
ScrollDirection keyboardDirections;
ScrollDirection edgeDirections;
World world;
WorldRenderer worldRenderer;
[ObjectCreator.UseCtor]
public ViewportControllerWidget(World world, WorldRenderer worldRenderer)
{
this.world = world;
this.worldRenderer = worldRenderer;
tooltipContainer = Exts.Lazy(() =>
Ui.Root.Get<TooltipContainerWidget>(TooltipContainer));
}
public override void MouseEntered()
{
if (TooltipContainer == null)
return;
tooltipContainer.Value.SetTooltip(TooltipTemplate,
new WidgetArgs() {{ "world", world }, { "viewport", this }});
}
public override void MouseExited()
{
if (TooltipContainer == null)
return;
tooltipContainer.Value.RemoveTooltip();
}
public override void Draw()
{
UpdateMouseover();
base.Draw();
}
public void UpdateMouseover()
{
TooltipType = WorldTooltipType.None;
var cell = worldRenderer.Viewport.ViewToWorld(Viewport.LastMousePos);
if (!world.Map.Contains(cell))
return;
if (world.ShroudObscures(cell))
{
TooltipType = WorldTooltipType.Unexplored;
return;
}
var underCursor = world.ScreenMap.ActorsAt(worldRenderer.Viewport.ViewToWorldPx(Viewport.LastMousePos))
.Where(a => !world.FogObscures(a) && a.HasTrait<IToolTip>())
.WithHighestSelectionPriority();
if (underCursor != null)
{
ActorTooltip = underCursor.TraitsImplementing<IToolTip>().First();
TooltipType = WorldTooltipType.Actor;
return;
}
var frozen = world.ScreenMap.FrozenActorsAt(world.RenderPlayer, worldRenderer.Viewport.ViewToWorldPx(Viewport.LastMousePos))
.Where(a => a.TooltipName != null && a.IsValid)
.WithHighestSelectionPriority();
if (frozen != null)
{
FrozenActorTooltip = frozen;
TooltipType = WorldTooltipType.FrozenActor;
}
}
public override string GetCursor(int2 pos)
{
if (!Game.Settings.Game.ViewportEdgeScroll || Ui.MouseOverWidget != this)
return null;
var blockedDirections = worldRenderer.Viewport.GetBlockedDirections();
foreach (var dir in ScrollCursors)
if (edgeDirections.Includes(dir.Key))
return dir.Value + (blockedDirections.Includes(dir.Key) ? "-blocked" : "");
return null;
}
public override bool HandleMouseInput(MouseInput mi)
{
var scrolltype = Game.Settings.Game.MouseScroll;
if (scrolltype == MouseScrollType.Disabled)
return false;
if (mi.Event == MouseInputEvent.Move &&
(mi.Button == MouseButton.Middle || mi.Button == (MouseButton.Left | MouseButton.Right)))
{
var d = scrolltype == MouseScrollType.Inverted ? -1 : 1;
worldRenderer.Viewport.Scroll((Viewport.LastMousePos - mi.Location) * d, false);
return true;
}
return false;
}
public override bool YieldKeyboardFocus()
{
keyboardDirections = ScrollDirection.None;
return base.YieldKeyboardFocus();
}
public override bool HandleKeyPress(KeyInput e)
{
switch (e.Key)
{
case Keycode.UP: keyboardDirections = keyboardDirections.Set(ScrollDirection.Up, e.Event == KeyInputEvent.Down); return true;
case Keycode.DOWN: keyboardDirections = keyboardDirections.Set(ScrollDirection.Down, e.Event == KeyInputEvent.Down); return true;
case Keycode.LEFT: keyboardDirections = keyboardDirections.Set(ScrollDirection.Left, e.Event == KeyInputEvent.Down); return true;
case Keycode.RIGHT: keyboardDirections = keyboardDirections.Set(ScrollDirection.Right, e.Event == KeyInputEvent.Down); return true;
}
return false;
}
public override void Tick()
{
edgeDirections = ScrollDirection.None;
if (Game.Settings.Game.ViewportEdgeScroll && Game.HasInputFocus)
edgeDirections = CheckForDirections();
if (keyboardDirections != ScrollDirection.None || edgeDirections != ScrollDirection.None)
{
var scroll = float2.Zero;
foreach (var kv in ScrollOffsets)
if (keyboardDirections.Includes(kv.Key) || edgeDirections.Includes(kv.Key))
scroll += kv.Value;
var length = Math.Max(1, scroll.Length);
scroll *= (1f / length) * Game.Settings.Game.ViewportEdgeScrollStep;
worldRenderer.Viewport.Scroll(scroll, false);
}
}
ScrollDirection CheckForDirections()
{
var directions = ScrollDirection.None;
if (Viewport.LastMousePos.X < EdgeScrollThreshold)
directions |= ScrollDirection.Left;
if (Viewport.LastMousePos.Y < EdgeScrollThreshold)
directions |= ScrollDirection.Up;
if (Viewport.LastMousePos.X >= Game.Renderer.Resolution.Width - EdgeScrollThreshold)
|
[
"\t\t\t\tdirections |= ScrollDirection.Right;"
] | 598
|
lcc
|
csharp
| null |
91b83a11b6dc2c86381251a96905d31244b8ac184fe12ef5
|
|
//
// DO NOT REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
//
// @Authors:
// peters
//
// Copyright 2004-2012 by OM International
//
// This file is part of OpenPetra.org.
//
// OpenPetra.org is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// OpenPetra.org is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with OpenPetra.org. If not, see <http://www.gnu.org/licenses/>.
//
using System;
using System.Collections.Generic;
using System.Data;
using System.Windows.Forms;
using GNU.Gettext;
using Ict.Common;
using Ict.Common.Exceptions;
using Ict.Common.Verification;
using Ict.Petra.Client.App.Core;
using Ict.Petra.Client.App.Core.RemoteObjects;
using Ict.Petra.Client.MPartner.Gui;
using Ict.Petra.Shared;
using Ict.Petra.Shared.MConference;
using Ict.Petra.Shared.MConference.Data;
using Ict.Petra.Shared.MConference.Validation;
using Ict.Petra.Shared.MPartner;
using Ict.Petra.Shared.MPartner.Partner.Data;
namespace Ict.Petra.Client.MConference.Gui.Setup
{
public partial class TFrmConferenceMasterSettings
{
/// PartnerKey for selected conference to be set from outside
public static Int64 FPartnerKey {
private get; set;
}
private void InitializeManualCode()
{
string ConferenceName;
// load data into dataset
FMainDS.Clear();
FMainDS.Merge(TRemote.MConference.Conference.WebConnectors.LoadConferenceSettings(FPartnerKey, out ConferenceName));
// display conference name
this.Text = this.Text + " [" + ConferenceName + "]";
txtConferenceName.Text = ConferenceName;
// display campaign code prefix
txtCampaignPrefixCode.Text = ((PcConferenceRow)FMainDS.PcConference.Rows[0]).OutreachPrefix;
// display start/end dates
dtpStartDate.Date = ((PPartnerLocationRow)FMainDS.PPartnerLocation.Rows[0]).DateEffective;
dtpEndDate.Date = ((PPartnerLocationRow)FMainDS.PPartnerLocation.Rows[0]).DateGoodUntil;
// enable dtps only if date is null
if ((dtpStartDate.Date == null) || (dtpStartDate.Date == DateTime.MinValue))
{
dtpStartDate.Enabled = true;
}
if ((dtpEndDate.Date == null) || (dtpEndDate.Date == DateTime.MinValue))
{
dtpEndDate.Enabled = true;
}
// display currency (if currency code in PUnit has changed then use that over the currency code in PcConference)
if ((FMainDS.PUnit.Rows.Count == 0)
|| (((PUnitRow)FMainDS.PUnit.Rows[0]).OutreachCostCurrencyCode == ((PcConferenceRow)FMainDS.PcConference.Rows[0]).CurrencyCode))
{
cmbCurrency.SetSelectedString(((PcConferenceRow)FMainDS.PcConference.Rows[0]).CurrencyCode, -1);
}
else
{
cmbCurrency.SetSelectedString(((PUnitRow)FMainDS.PUnit.Rows[0]).OutreachCostCurrencyCode, -1);
}
// set radio buttons and checkbox
Boolean ChargeCampaign = true;
Boolean AddAccommodationCosts = false;
foreach (PcConferenceOptionRow CurrentRow in FMainDS.PcConferenceOption.Rows)
{
if ((CurrentRow.OptionTypeCode == "COST_PER_NIGHT") && (CurrentRow.OptionSet == true))
{
ChargeCampaign = false;
rbtNight.Checked = true;
}
else if ((CurrentRow.OptionTypeCode == "COST_PER_DAY") && (CurrentRow.OptionSet == true))
{
ChargeCampaign = false;
rbtDay.Checked = true;
}
else if ((CurrentRow.OptionTypeCode == "ADD_ACCOMM_COST_FOR_TOTAL") && (CurrentRow.OptionSet == true))
{
AddAccommodationCosts = true;
}
}
if (ChargeCampaign == true)
{
rbtCampaign.Checked = true;
chkAddAccommodationCosts.Enabled = false;
}
else if (AddAccommodationCosts == true)
{
chkAddAccommodationCosts.Checked = true;
txtSpecialRolePreAccommodation.ReadOnly = false;
txtVolunteerPreAccommodation.ReadOnly = false;
txtParticipantPreAccommodation.ReadOnly = false;
txtSpecialRoleAccommodation.ReadOnly = false;
txtVolunteerAccommodation.ReadOnly = false;
txtSpecialRoleCampaignAccommodation.ReadOnly = false;
txtSpecialRolePreAccommodation.TabStop = true;
txtVolunteerPreAccommodation.TabStop = true;
txtParticipantPreAccommodation.TabStop = true;
txtSpecialRoleAccommodation.TabStop = true;
txtVolunteerAccommodation.TabStop = true;
txtSpecialRoleCampaignAccommodation.TabStop = true;
}
// display conference discounts
foreach (PcDiscountRow CurrentRow in FMainDS.PcDiscount.Rows)
{
if (CurrentRow.CostTypeCode == "CONFERENCE")
{
if (CurrentRow.Validity == "PRE")
{
if (CurrentRow.DiscountCriteriaCode == "ROLE")
{
txtSpecialRolePreAttendance.NumberValueInt = (int)CurrentRow.Discount;
}
else if (CurrentRow.DiscountCriteriaCode == "VOL")
{
txtVolunteerPreAttendance.NumberValueInt = (int)CurrentRow.Discount;
}
else if (CurrentRow.DiscountCriteriaCode == "OTHER")
{
txtParticipantPreAttendance.NumberValueInt = (int)CurrentRow.Discount;
}
}
else if (CurrentRow.Validity == "CONF")
{
if (CurrentRow.DiscountCriteriaCode == "ROLE")
{
txtSpecialRoleAttendance.NumberValueInt = (int)CurrentRow.Discount;
}
else if (CurrentRow.DiscountCriteriaCode == "VOL")
{
txtVolunteerAttendance.NumberValueInt = (int)CurrentRow.Discount;
}
}
else if ((CurrentRow.Validity == "POST") && (CurrentRow.DiscountCriteriaCode == "ROLE"))
{
txtSpecialRoleCampaignAttendance.NumberValueInt = (int)CurrentRow.Discount;
}
}
else if (CurrentRow.CostTypeCode == "ACCOMMODATION")
{
if (CurrentRow.Validity == "PRE")
{
if (CurrentRow.DiscountCriteriaCode == "ROLE")
{
txtSpecialRolePreAccommodation.NumberValueInt = (int)CurrentRow.Discount;
}
else if (CurrentRow.DiscountCriteriaCode == "VOL")
{
txtVolunteerPreAccommodation.NumberValueInt = (int)CurrentRow.Discount;
}
else if (CurrentRow.DiscountCriteriaCode == "OTHER")
{
txtParticipantPreAccommodation.NumberValueInt = (int)CurrentRow.Discount;
}
}
else if (CurrentRow.Validity == "CONF")
{
if (CurrentRow.DiscountCriteriaCode == "ROLE")
{
txtSpecialRoleAccommodation.NumberValueInt = (int)CurrentRow.Discount;
}
else if (CurrentRow.DiscountCriteriaCode == "VOL")
{
txtVolunteerAccommodation.NumberValueInt = (int)CurrentRow.Discount;
}
}
else if ((CurrentRow.Validity == "POST") && (CurrentRow.DiscountCriteriaCode == "ROLE"))
{
txtSpecialRoleCampaignAccommodation.NumberValueInt = (int)CurrentRow.Discount;
}
}
}
// display grid containing venue details
grdVenues.Columns.Clear();
grdVenues.AddPartnerKeyColumn(Catalog.GetString("Venue Key"), FMainDS.PcConferenceVenue.ColumnVenueKey);
grdVenues.AddTextColumn(Catalog.GetString("Venue Name"), FMainDS.PcConferenceVenue.ColumnVenueName);
DataView MyDataView = FMainDS.PcConferenceVenue.DefaultView;
MyDataView.Sort = "p_venue_name_c ASC";
MyDataView.AllowNew = false;
grdVenues.DataSource = new DevAge.ComponentModel.BoundDataView(MyDataView);
}
// disables or enables the checkbox when a different radio button is selected
private void AttendanceChargeChanged(object sender, EventArgs e)
{
if (rbtDay.Checked || rbtNight.Checked)
{
chkAddAccommodationCosts.Enabled = true;
}
else
{
chkAddAccommodationCosts.Checked = false;
chkAddAccommodationCosts.Enabled = false;
}
}
// Called when the checkbox is changed. Toggles textboxes' ReadOnly property.
private void UpdateDiscounts(object sender, EventArgs e)
{
Boolean AccommodationDiscountsReadOnly = true;
if (chkAddAccommodationCosts.Checked)
{
AccommodationDiscountsReadOnly = false;
}
txtSpecialRolePreAccommodation.ReadOnly = AccommodationDiscountsReadOnly;
txtVolunteerPreAccommodation.ReadOnly = AccommodationDiscountsReadOnly;
txtParticipantPreAccommodation.ReadOnly = AccommodationDiscountsReadOnly;
txtSpecialRoleAccommodation.ReadOnly = AccommodationDiscountsReadOnly;
txtVolunteerAccommodation.ReadOnly = AccommodationDiscountsReadOnly;
txtSpecialRoleCampaignAccommodation.ReadOnly = AccommodationDiscountsReadOnly;
txtSpecialRolePreAccommodation.TabStop = !AccommodationDiscountsReadOnly;
txtVolunteerPreAccommodation.TabStop = !AccommodationDiscountsReadOnly;
txtParticipantPreAccommodation.TabStop = !AccommodationDiscountsReadOnly;
txtSpecialRoleAccommodation.TabStop = !AccommodationDiscountsReadOnly;
txtVolunteerAccommodation.TabStop = !AccommodationDiscountsReadOnly;
txtSpecialRoleCampaignAccommodation.TabStop = !AccommodationDiscountsReadOnly;
}
// Called with Add button. Adds new venue to conference.
private void AddVenue(object sender, EventArgs e)
{
long ResultVenueKey;
String ResultVenueName;
TPartnerClass? PartnerClass;
TLocationPK ResultLocationPK;
DataRow[] ExistingVenueDataRows;
// the user has to select an existing venue to make that venue a conference venue
try
{
// launches partner find screen and returns true if a venue is selected
if (TPartnerFindScreenManager.OpenModalForm("VENUE", out ResultVenueKey, out ResultVenueName, out PartnerClass, out ResultLocationPK,
this))
{
// search for selected venue in dataset
ExistingVenueDataRows = FMainDS.PcConferenceVenue.Select(ConferenceSetupTDSPcConferenceVenueTable.GetVenueKeyDBName() +
" = " + ResultVenueKey.ToString());
// if venue does not already exist for venue
if (ExistingVenueDataRows.Length == 0)
{
ConferenceSetupTDSPcConferenceVenueRow AddedVenue = FMainDS.PcConferenceVenue.NewRowTyped(true);
AddedVenue.ConferenceKey = FPartnerKey;
AddedVenue.VenueKey = ResultVenueKey;
AddedVenue.VenueName = ResultVenueName;
FMainDS.PcConferenceVenue.Rows.Add(AddedVenue);
FPetraUtilsObject.SetChangedFlag();
}
// if venue does already exist for venue
else
{
MessageBox.Show(Catalog.GetString("This venue is already included for this conference"),
Catalog.GetString("Add Venue to Conference"),
MessageBoxButtons.OK,
MessageBoxIcon.Information);
}
}
}
catch (Exception exp)
{
throw new EOPAppException("Exception occured while calling VenueFindScreen!", exp);
}
}
// Called with Remove button. Removes a venue from conference.
private void RemoveVenue(object sender, EventArgs e)
{
if (grdVenues.SelectedDataRows.Length == 1)
{
long SelectedVenueKey;
SelectedVenueKey = (Int64)((DataRowView)grdVenues.SelectedDataRows[0]).Row[PcConferenceVenueTable.GetVenueKeyDBName()];
DataRow RowToRemove = FMainDS.PcConferenceVenue.Rows.Find(new object[] { FPartnerKey, SelectedVenueKey });
RowToRemove.Delete();
FPetraUtilsObject.SetChangedFlag();
}
}
// get data from screen and ammend/add to dataset
private void GetDataFromControlsManual(PcConferenceRow ARow)
{
PcConferenceRow ConferenceData = (PcConferenceRow)FMainDS.PcConference.Rows[0];
PPartnerLocationRow PartnerLocationData = (PPartnerLocationRow)FMainDS.PPartnerLocation.Rows[0];
PUnitRow UnitData = (PUnitRow)FMainDS.PUnit.Rows[0];
// do not save currency if it is blank but instead change the combo box to display original value
if (cmbCurrency.GetSelectedString() != "")
{
ConferenceData.CurrencyCode = cmbCurrency.GetSelectedString();
UnitData.OutreachCostCurrencyCode = cmbCurrency.GetSelectedString();
}
else
{
cmbCurrency.SetSelectedString(ConferenceData.CurrencyCode);
}
ConferenceData.Start = dtpStartDate.Date;
ConferenceData.End = dtpEndDate.Date;
PartnerLocationData.DateEffective = dtpStartDate.Date;
PartnerLocationData.DateGoodUntil = dtpEndDate.Date;
// get data from radio buttons and check button for PcConferenceOption
string[] OptionTypeCodes =
{
"COST_PER_NIGHT", "COST_PER_DAY", "ADD_ACCOMM_COST_FOR_TOTAL"
};
Boolean[] OptionSet =
{
rbtNight.Checked, rbtDay.Checked, chkAddAccommodationCosts.Checked
};
for (int i = 0; i < 3; i++)
{
DataRow RowExists = FMainDS.PcConferenceOption.Rows.Find(new object[] { FPartnerKey, OptionTypeCodes[i] });
// create new row if needed
if ((RowExists == null) && OptionSet[i])
{
PcConferenceOptionRow RowToAdd = FMainDS.PcConferenceOption.NewRowTyped(true);
RowToAdd.ConferenceKey = FPartnerKey;
RowToAdd.OptionTypeCode = OptionTypeCodes[i];
RowToAdd.OptionSet = true;
FMainDS.PcConferenceOption.Rows.Add(RowToAdd);
}
// update existing record
else if ((RowExists != null) && OptionSet[i])
{
((PcConferenceOptionRow)RowExists).OptionSet = true;
}
// delete existing record if discount is 0
else if ((RowExists != null) && !OptionSet[i])
{
RowExists.Delete();
}
}
// reset the Accommodation text boxs to 0 if no longer needed
if (!chkAddAccommodationCosts.Checked)
{
txtSpecialRolePreAccommodation.NumberValueInt = 0;
txtVolunteerPreAccommodation.NumberValueInt = 0;
txtParticipantPreAccommodation.NumberValueInt = 0;
txtSpecialRoleAccommodation.NumberValueInt = 0;
txtVolunteerAccommodation.NumberValueInt = 0;
txtSpecialRoleCampaignAccommodation.NumberValueInt = 0;
}
// get data from discount text boxes for PcDiscount
string[, ] Discounts =
{
{ "ROLE", "CONFERENCE", "PRE", txtSpecialRolePreAttendance.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "VOL", "CONFERENCE", "PRE", txtVolunteerPreAttendance.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "OTHER", "CONFERENCE", "PRE", txtParticipantPreAttendance.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "ROLE", "CONFERENCE", "CONF", txtSpecialRoleAttendance.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "VOL", "CONFERENCE", "CONF", txtVolunteerAttendance.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "ROLE", "CONFERENCE", "POST", txtSpecialRoleCampaignAttendance.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "ROLE", "ACCOMMODATION", "PRE", txtSpecialRolePreAccommodation.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "VOL", "ACCOMMODATION", "PRE", txtVolunteerPreAccommodation.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "OTHER", "ACCOMMODATION", "PRE", txtParticipantPreAccommodation.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "ROLE", "ACCOMMODATION", "CONF", txtSpecialRoleAccommodation.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "VOL", "ACCOMMODATION", "CONF", txtVolunteerAccommodation.Text.TrimEnd(new char[] { ' ', '%' }) },
{ "ROLE", "ACCOMMODATION", "POST", txtSpecialRoleCampaignAccommodation.Text.TrimEnd(new char[] { ' ', '%' }) }
};
for (int i = 0; i < 12; i++)
{
DataRow RowExists = FMainDS.PcDiscount.Rows.Find(new object[] { FPartnerKey, Discounts[i, 0], Discounts[i, 1], Discounts[i, 2], -1 });
if (Discounts[i, 3] == "")
{
Discounts[i, 3] = "0";
}
// create new row if needed
if ((RowExists == null) && (Convert.ToInt32(Discounts[i, 3]) != 0))
{
PcDiscountRow RowToAdd = FMainDS.PcDiscount.NewRowTyped(true);
RowToAdd.ConferenceKey = FPartnerKey;
RowToAdd.DiscountCriteriaCode = Discounts[i, 0];
RowToAdd.CostTypeCode = Discounts[i, 1];
RowToAdd.Validity = Discounts[i, 2];
RowToAdd.UpToAge = -1;
RowToAdd.Percentage = true;
RowToAdd.Discount = Convert.ToInt32(Discounts[i, 3]);
FMainDS.PcDiscount.Rows.Add(RowToAdd);
}
// update existing record
else if ((RowExists != null) && (Convert.ToInt32(Discounts[i, 3]) != 0))
{
((PcDiscountRow)RowExists).Discount = Convert.ToInt32(Discounts[i, 3]);
}
// delete existing record if discount is 0
else if ((RowExists != null) && (Convert.ToInt32(Discounts[i, 3]) == 0))
{
RowExists.Delete();
}
}
}
// save data
private TSubmitChangesResult StoreManualCode(ref ConferenceSetupTDS ASubmitChanges, out TVerificationResultCollection AVerificationResult)
{
AVerificationResult = null;
return TRemote.MConference.Conference.WebConnectors.SaveConferenceSetupTDS(ref ASubmitChanges);
}
private void ValidateDataManual(PcConferenceRow ARow)
{
PcDiscountTable DiscountTable = FMainDS.PcDiscount;
TVerificationResultCollection VerificationResultCollection = FPetraUtilsObject.VerificationResultCollection;
TValidationControlsData ValidationControlsData;
TScreenVerificationResult VerificationResult = null;
DataColumn ValidationColumn;
List <string>CriteriaCodesUsed = new List <string>();
foreach (PcDiscountRow Row in DiscountTable.Rows)
{
if ((Row.RowState != DataRowState.Deleted) && (Row.DiscountCriteriaCode != "CHILD"))
{
if (Row.Discount > 100)
{
ValidationColumn = Row.Table.Columns[PcDiscountTable.ColumnDiscountId];
// displays a warning message
VerificationResult = new TScreenVerificationResult(new TVerificationResult(this, ErrorCodes.GetErrorInfo(
PetraErrorCodes.ERR_DISCOUNT_PERCENTAGE_GREATER_THAN_100)),
ValidationColumn, ValidationControlsData.ValidationControl);
// Handle addition to/removal from TVerificationResultCollection
VerificationResultCollection.Auto_Add_Or_AddOrRemove(this, VerificationResult, ValidationColumn);
}
if (!CriteriaCodesUsed.Exists(element => element == Row.DiscountCriteriaCode))
{
CriteriaCodesUsed.Add(Row.DiscountCriteriaCode);
}
}
}
|
[
" string[] CriteriaCodesUsedArray = CriteriaCodesUsed.ToArray();"
] | 1,562
|
lcc
|
csharp
| null |
56926abd2bf7483455bb0fefd23ad8e304fab1e8b70b8eee
|
|
/*
* Phosphorus Five, copyright 2014 - 2017, Thomas Hansen, thomas@gaiasoul.com
*
* This file is part of Phosphorus Five.
*
* Phosphorus Five is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3, as published by
* the Free Software Foundation.
*
*
* Phosphorus Five is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Phosphorus Five. If not, see <http://www.gnu.org/licenses/>.
*
* If you cannot for some reasons use the GPL license, Phosphorus
* Five is also commercially available under Quid Pro Quo terms. Check
* out our website at http://gaiasoul.com for more details.
*/
using System;
using System.IO;
using p5.exp;
using p5.core;
using p5.io.common;
using p5.exp.exceptions;
namespace p5.io.file
{
/// <summary>
/// Loads one or more file(s).
/// </summary>
public static class Load
{
/// <summary>
/// Loads one or more file(s) from local disc.
/// </summary>
/// <param name="context">Application Context</param>
/// <param name="e">Parameters passed into Active Event</param>
[ActiveEvent (Name = "load-file")]
[ActiveEvent (Name = "p5.io.file.load")]
public static void p5_io_file_load (ApplicationContext context, ActiveEventArgs e)
{
ObjectIterator.Iterate (
context,
e.Args,
true,
"read-file",
delegate (string filename, string fullpath) {
if (File.Exists (fullpath)) {
// Text files and binary files are loaded differently.
// Text file might for instance be converted automatically.
if (IsTextFile (filename)) {
// Text file of some sort.
LoadTextFile (context, e.Args, fullpath, filename);
} else {
// Some sort of binary file (probably).
LoadBinaryFile (e.Args, fullpath, filename);
}
} else {
// Oops, file didn't exist.
throw new LambdaException (
string.Format ("Couldn't find file '{0}'", filename),
e.Args,
context);
}
});
}
/// <summary>
/// Loads one or more file(s) from local disc and saves into given stream.
/// </summary>
/// <param name="context">Application Context</param>
/// <param name="e">Parameters passed into Active Event</param>
[ActiveEvent (Name = ".p5.io.file.serialize-to-stream")]
public static void _p5_io_file_serialize_to_stream (ApplicationContext context, ActiveEventArgs e)
{
// Retrieving stream argument.
var tuple = e.Args.Value as Tuple<object, Stream>;
// Retrieving stream and doing some basic sanity check.
var outStream = tuple.Item2;
if (outStream == null)
throw new LambdaException ("No stream supplied to [.p5.io.file.serialize-to-stream]", e.Args, context);
// Iterating through files specified.
ObjectIterator.Iterate (
context,
e.Args,
true,
"read-file",
delegate (string filename, string fullpath) {
if (File.Exists (fullpath)) {
// Serializing file into stream.
using (FileStream stream = File.OpenRead (fullpath)) {
stream.CopyTo (outStream);
}
} else {
// Oops, file didn't exist.
throw new LambdaException (
string.Format ("Couldn't find file '{0}'", filename),
e.Args,
context);
}
});
}
/*
* Determines if file is text according to the most common file extensions
*/
static bool IsTextFile (string fileName)
{
switch (Path.GetExtension (fileName)) {
case ".txt":
case ".md":
case ".css":
case ".js":
case ".html":
case ".htm":
case ".hl":
case ".xml":
case ".csv":
return true;
default:
return false;
}
}
/*
* Loads specified file as text and appends into args, possibly converting into lambda.
*/
static void LoadTextFile (
ApplicationContext context,
Node args,
string fullpath,
string fileName)
{
// Checking if we should automatically convert file content to lambda.
if (fileName.EndsWithEx (".hl") && args.GetExChildValue ("convert", context, true)) {
// Automatically converting to lambda before returning, making sure we
// parse the lambda directly from the stream.
using (Stream stream = File.OpenRead (fullpath)) {
// Invoking our "stream to lambda" event.
var fileNode = args.Add (fileName, stream).LastChild;
try {
context.RaiseEvent (".stream2lambda", fileNode);
} finally {
fileNode.Value = null;
}
}
} else {
// Using a TextReader to read file's content.
using (TextReader reader = File.OpenText (fullpath)) {
// Reading file content.
string fileContent = reader.ReadToEnd ();
if (fileName.EndsWithEx (".csv") && args.GetExChildValue ("convert", context, true)) {
// Automatically converting to lambda before returning.
var csvLambda = new Node ("", fileContent);
context.RaiseEvent ("p5.csv.csv2lambda", csvLambda);
args.Add (fileName, null, csvLambda ["result"].Children);
} else {
// Adding file content as string.
args.Add (fileName, fileContent);
}
}
}
}
/*
* Loads a binary file and appends as blob/byte[] into args.
*/
static void LoadBinaryFile (
Node args,
string fullpath,
string filename)
{
using (FileStream stream = File.OpenRead (fullpath)) {
// Reading file content
var buffer = new byte [stream.Length];
|
[
" stream.Read (buffer, 0, buffer.Length);"
] | 722
|
lcc
|
csharp
| null |
50f7d00750145f888361c63d85ebf5e9fb68a039385b8255
|
|
#
# Copyright (C) 2019 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from collections import OrderedDict, namedtuple
from pyanaconda.core.constants import PayloadRequirementType
from pyanaconda.payload.errors import PayloadRequirementsMissingApply
from pyanaconda.anaconda_loggers import get_module_logger
log = get_module_logger(__name__)
PayloadRequirementReason = namedtuple('PayloadRequirementReason', ['reason', 'strong'])
__all__ = ["PayloadRequirements", "PayloadRequirement"]
class PayloadRequirement(object):
"""An object to store a payload requirement with info about its reasons.
For each requirement multiple reasons together with their strength
can be stored in this object using the add_reason method.
A reason should be just a string with description (ie for tracking purposes).
Strength is a boolean flag that can be used to indicate whether missing the
requirement should be considered fatal. Strength of the requirement is
given by strength of all its reasons.
"""
def __init__(self, req_id, reasons=None):
self._id = req_id
self._reasons = reasons or []
@property
def id(self):
"""Identifier of the requirement (eg a package name)"""
return self._id
@property
def reasons(self):
"""List of reasons for the requirement"""
return [reason for reason, strong in self._reasons]
@property
def strong(self):
"""Strength of the requirement (ie should it be considered fatal?)"""
return any(strong for reason, strong in self._reasons)
def add_reason(self, reason, strong=False):
"""Adds a reason to the requirement with optional strength of the reason"""
self._reasons.append(PayloadRequirementReason(reason, strong))
def __str__(self):
return "PayloadRequirement(id=%s, reasons=%s, strong=%s)" % (self.id,
self.reasons,
self.strong)
def __repr__(self):
return 'PayloadRequirement(id=%s, reasons=%s)' % (self.id, self._reasons)
class PayloadRequirements(object):
"""A container for payload requirements imposed by installed functionality.
Stores names of packages and groups required by used installer features,
together with descriptions of reasons why the object is required and if the
requirement is strong. Not satisfying strong requirement would be fatal for
installation.
"""
def __init__(self):
self._apply_called_for_all_requirements = True
self._apply_cb = None
self._reqs = {}
for req_type in PayloadRequirementType:
self._reqs[req_type] = OrderedDict()
def add_packages(self, package_names, reason, strong=True):
"""Add packages required for the reason.
If a package is already required, the new reason will be
added and the strength of the requirement will be updated.
:param package_names: names of packages to be added
:type package_names: list of str
:param reason: description of reason for adding the packages
:type reason: str
:param strong: is the requirement strong (ie is not satisfying it fatal?)
:type strong: bool
"""
self._add(PayloadRequirementType.package, package_names, reason, strong)
def add_groups(self, group_ids, reason, strong=True):
"""Add groups required for the reason.
If a group is already required, the new reason will be
added and the strength of the requirement will be updated.
:param group_ids: ids of groups to be added
:type group_ids: list of str
:param reason: descripiton of reason for adding the groups
:type reason: str
:param strong: is the requirement strong
:type strong: bool
"""
self._add(PayloadRequirementType.group, group_ids, reason, strong)
def add_requirements(self, requirements):
"""Add requirements from a list of Requirement instances.
:param requirements: list of Requirement instances
"""
for requirement in requirements:
# check requirement type and add a payload requirement appropriately
if requirement.type == "package":
self.add_packages([requirement.name], reason=requirement.reason)
elif requirement.type == "group":
self.add_groups([requirement.name], reason=requirement.reason)
else:
log.warning("Unknown type: %s in requirement: %s, skipping.", requirement.type, requirement)
def _add(self, req_type, ids, reason, strong):
if not ids:
log.debug("no %s requirement added for %s", req_type.value, reason)
reqs = self._reqs[req_type]
for r_id in ids:
if r_id not in reqs:
reqs[r_id] = PayloadRequirement(r_id)
reqs[r_id].add_reason(reason, strong)
self._apply_called_for_all_requirements = False
log.debug("added %s requirement '%s' for %s, strong=%s",
req_type.value, r_id, reason, strong)
@property
def packages(self):
"""List of package requirements.
return: list of package requirements
rtype: list of PayloadRequirement
"""
return list(self._reqs[PayloadRequirementType.package].values())
@property
def groups(self):
"""List of group requirements.
return: list of group requirements
rtype: list of PayloadRequirement
"""
return list(self._reqs[PayloadRequirementType.group].values())
def set_apply_callback(self, callback):
"""Set the callback for applying requirements.
The callback will be called by apply() method.
param callback: callback function to be called by apply() method
type callback: a function taking one argument (requirements object)
"""
self._apply_cb = callback
def apply(self):
"""Apply requirements using callback function.
Calls the callback supplied via set_apply_callback() method. If no
callback was set, an axception is raised.
return: return value of the callback
rtype: type of the callback return value
raise PayloadRequirementsMissingApply: if there is no callback set
"""
if self._apply_cb:
self._apply_called_for_all_requirements = True
rv = self._apply_cb(self)
log.debug("apply with result %s called on requirements %s", rv, self)
return rv
else:
raise PayloadRequirementsMissingApply
@property
def applied(self):
"""Was all requirements applied?
return: Was apply called for all current requirements?
rtype: bool
"""
return self.empty or self._apply_called_for_all_requirements
@property
def empty(self):
"""Are requirements empty?
return: True if there are no requirements, else False
rtype: bool
"""
|
[
" return not any(self._reqs.values())"
] | 879
|
lcc
|
python
| null |
59b6354e9591524b4ada354f2fa122917bcd1253262c12f2
|
|
#region License
// Copyright (c) 2013, ClearCanvas Inc.
// All rights reserved.
// http://www.ClearCanvas.ca
//
// This file is part of the ClearCanvas RIS/PACS open source project.
//
// The ClearCanvas RIS/PACS open source project is free software: you can
// redistribute it and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// The ClearCanvas RIS/PACS open source project is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
// Public License for more details.
//
// You should have received a copy of the GNU General Public License along with
// the ClearCanvas RIS/PACS open source project. If not, see
// <http://www.gnu.org/licenses/>.
#endregion
using System;
using System.Collections.Generic;
using Macro.Common;
using Macro.Common.Utilities;
using Macro.Desktop;
using Macro.Desktop.Validation;
using Macro.ImageViewer.StudyManagement;
#pragma warning disable 0419,1574,1587,1591
namespace Macro.ImageViewer.Clipboard.CopyToClipboard
{
public sealed class CopySubsetToClipboardComponentViewExtensionPoint : ExtensionPoint<IApplicationComponentView>
{
}
[AssociateView(typeof(CopySubsetToClipboardComponentViewExtensionPoint))]
public partial class CopySubsetToClipboardComponent : ApplicationComponent
{
private enum RangeSelectionOption
{
InstanceNumber = 0,
Position
}
private enum CopyOption
{
CopyRange = 0,
CopyCustom
}
private enum CopyRangeOption
{
CopyAll = 0,
CopyAtInterval
}
private readonly IDesktopWindow _desktopWindow;
private IImageViewer _activeViewer;
private IDisplaySet _currentDisplaySet;
private int _numberOfImages;
private RangeSelectionOption _rangeSelectionOption;
private int _minInstanceNumber;
private int _maxInstanceNumber;
private CopyOption _copyOption;
private CopyRangeOption _copyRangeOption;
private int _copyRangeStart;
private int _copyRangeEnd;
private int _rangeMinimum;
private int _rangeMaximum;
private bool _updatingCopyRange;
private int _copyRangeInterval;
private static readonly int _rangeMinInterval = 2;
private int _rangeMaxInterval;
private string _customRange;
internal CopySubsetToClipboardComponent(IDesktopWindow desktopWindow)
{
Platform.CheckForNullReference(desktopWindow, "desktopWindow");
_desktopWindow = desktopWindow;
}
#region Internal / Private Methods
internal IDesktopWindow DesktopWindow
{
get { return _desktopWindow; }
}
internal void Close()
{
this.Host.Exit();
}
private void OnWorkspaceChanged(object sender, ItemEventArgs<Workspace> e)
{
IImageViewer viewer = null;
if (_desktopWindow.ActiveWorkspace != null)
viewer = ImageViewerComponent.GetAsImageViewer(_desktopWindow.ActiveWorkspace);
SetActiveViewer(viewer);
}
private void OnImageBoxSelected(object sender, ImageBoxSelectedEventArgs e)
{
CurrentDisplaySet = e.SelectedImageBox.DisplaySet;
}
private void OnDisplaySetSelected(object sender, DisplaySetSelectedEventArgs e)
{
CurrentDisplaySet = e.SelectedDisplaySet;
}
private void SetActiveViewer(IImageViewer viewer)
{
if (_activeViewer != null)
{
_activeViewer.EventBroker.ImageBoxSelected -= OnImageBoxSelected;
_activeViewer.EventBroker.DisplaySetSelected -= OnDisplaySetSelected;
}
_activeViewer = viewer;
IDisplaySet displaySet = null;
if (_activeViewer != null)
{
_activeViewer.EventBroker.ImageBoxSelected += OnImageBoxSelected;
_activeViewer.EventBroker.DisplaySetSelected += OnDisplaySetSelected;
if (_activeViewer.SelectedImageBox != null)
displaySet = _activeViewer.SelectedImageBox.DisplaySet;
}
CurrentDisplaySet = displaySet;
}
private void CopyToClipboardInternal()
{
if (this.HasValidationErrors)
{
base.ShowValidation(true);
}
else
{
IImageSelectionStrategy strategy;
if (CopyRange)
{
int interval = 1;
if (CopyRangeAtInterval)
interval = CopyRangeInterval;
strategy = new RangeImageSelectionStrategy(CopyRangeStart, CopyRangeEnd, interval, UseInstanceNumber);
}
else
{
strategy = new CustomImageSelectionStrategy(CustomRange, RangeMinimum, RangeMaximum, UseInstanceNumber);
}
Clipboard.Add(CurrentDisplaySet, strategy);
this.Host.Exit();
}
}
#endregion
public override void Start()
{
_desktopWindow.Workspaces.ItemActivationChanged += OnWorkspaceChanged;
OnWorkspaceChanged(null, null);
base.Start();
}
public override void Stop()
{
_desktopWindow.Workspaces.ItemActivationChanged -= OnWorkspaceChanged;
SetActiveViewer(null);
base.Stop();
}
#region Validation Methods
[ValidationMethodFor("CustomRange")]
private ValidationResult ValidateCustomRange()
{
List<Range> ranges;
if (CopyCustom && !CustomImageSelectionStrategy.Parse(CustomRange, RangeMinimum, RangeMaximum, out ranges))
return new ValidationResult(false, SR.MessageCustomRangeInvalid);
return new ValidationResult(true, "");
}
[ValidationMethodFor("CopyRangeStart")]
private ValidationResult ValidateCopyRangeStart()
{
if (CopyRange && (CopyRangeStart < RangeMinimum || CopyRangeStart > CopyRangeEnd))
return new ValidationResult(false, SR.MessageStartValueOutOfRange);
return new ValidationResult(true, "");
}
[ValidationMethodFor("CopyRangeEnd")]
private ValidationResult ValidateCopyRangeEnd()
{
if (CopyRange)
{
if (CopyRangeEnd < CopyRangeStart || CopyRangeEnd > RangeMaximum)
return new ValidationResult(false, SR.MessageEndValueOutOfRange);
}
return new ValidationResult(true, "");
}
[ValidationMethodFor("CopyRangeInterval")]
private ValidationResult ValidateCopyRangeInterval()
{
if (CopyRange && CopyRangeAtInterval)
{
if (CopyRangeInterval < RangeMinInterval || CopyRangeInterval > RangeMaxInterval)
return new ValidationResult(false, SR.MessageRangeIntervalInvalid);
}
return new ValidationResult(true, "");
}
#endregion
private IDisplaySet CurrentDisplaySet
{
get { return _currentDisplaySet; }
set
{
if (_currentDisplaySet == value)
return;
_currentDisplaySet = value;
UpdateUseInstanceNumber();
UpdateCopyRange();
UpdateCopyCustom();
NotifyPropertyChanged("SourceDisplaySetDescription");
NotifyPropertyChanged("UsePositionNumberEnabled");
NotifyPropertyChanged("CopyRangeEnabled");
NotifyPropertyChanged("CopyRangeAllEnabled");
NotifyPropertyChanged("CopyRangeStartEnabled");
NotifyPropertyChanged("CopyRangeEndEnabled");
NotifyPropertyChanged("Enabled");
}
}
private void UpdateUseInstanceNumber()
{
//only change values when there is a display set.
if (CurrentDisplaySet != null)
{
_numberOfImages = 0;
_minInstanceNumber = int.MaxValue;
_maxInstanceNumber = int.MinValue;
_numberOfImages = CurrentDisplaySet.PresentationImages.Count;
foreach (IPresentationImage image in CurrentDisplaySet.PresentationImages)
{
if (image is IImageSopProvider)
{
IImageSopProvider provider = (IImageSopProvider)image;
if (provider.ImageSop.InstanceNumber < _minInstanceNumber)
_minInstanceNumber = provider.ImageSop.InstanceNumber;
if (provider.ImageSop.InstanceNumber > _maxInstanceNumber)
_maxInstanceNumber = provider.ImageSop.InstanceNumber;
}
}
if (!UseInstanceNumberEnabled)
UseInstanceNumber = false;
}
NotifyPropertyChanged("UseInstanceNumberEnabled");
}
private void UpdateCopyRange()
{
if (CurrentDisplaySet == null)
return;
_updatingCopyRange = true;
if (UseInstanceNumber)
{
RangeMinimum = _minInstanceNumber;
RangeMaximum = _maxInstanceNumber;
}
else
{
RangeMinimum = 1;
RangeMaximum = _currentDisplaySet == null ? 1 : _currentDisplaySet.PresentationImages.Count;
}
CopyRangeStart = RangeMinimum;
CopyRangeEnd = RangeMaximum;
_updatingCopyRange = false;
UpdateRangeInterval();
}
private void UpdateRangeInterval()
{
if (_updatingCopyRange)
return;
if (CurrentDisplaySet != null)
{
RangeMaxInterval = Math.Max(RangeMinInterval, CopyRangeEnd - CopyRangeStart);
CopyRangeInterval = Math.Min(CopyRangeInterval, RangeMaxInterval);
CopyRangeInterval = Math.Max(CopyRangeInterval, RangeMinInterval);
if (!CopyRangeAtIntervalEnabled)
CopyRangeAtInterval = false;
}
NotifyPropertyChanged("CopyRangeIntervalEnabled");
NotifyPropertyChanged("CopyRangeAtIntervalEnabled");
}
private void UpdateCopyCustom()
{
if (CurrentDisplaySet != null)
{
if (!CopyCustomEnabled)
CopyCustom = false;
}
NotifyPropertyChanged("CustomRangeEnabled");
NotifyPropertyChanged("CopyCustomEnabled");
}
#region Presentation Model
public string SourceDisplaySetDescription
{
get
{
if (this.CurrentDisplaySet != null)
return this.CurrentDisplaySet.Name;
else
return SR.MessageNotApplicable;
}
}
public bool UsePositionNumber
{
get { return _rangeSelectionOption == RangeSelectionOption.Position; }
set
{
if (!value)
{
_rangeSelectionOption = RangeSelectionOption.InstanceNumber;
NotifyPropertyChanged("UsePositionNumber");
NotifyPropertyChanged("UseInstanceNumber");
UpdateCopyRange();
}
}
}
public bool UsePositionNumberEnabled
{
get { return Enabled; }
}
public bool UseInstanceNumber
{
get { return _rangeSelectionOption == RangeSelectionOption.InstanceNumber; }
set
{
if (!value)
{
_rangeSelectionOption = RangeSelectionOption.Position;
NotifyPropertyChanged("UseInstanceNumber");
NotifyPropertyChanged("UsePositionNumber");
UpdateCopyRange();
}
}
}
public bool UseInstanceNumberEnabled
{
get { return Enabled && _minInstanceNumber != int.MaxValue && _maxInstanceNumber != int.MinValue; }
}
public int RangeMinimum
{
get { return _rangeMinimum; }
private set
{
if (_rangeMinimum == value)
return;
_rangeMinimum = value;
NotifyPropertyChanged("RangeMinimum");
}
}
public int RangeMaximum
{
get { return _rangeMaximum; }
private set
{
if (_rangeMaximum == value)
return;
_rangeMaximum = value;
NotifyPropertyChanged("RangeMaximum");
}
}
public int RangeMinInterval
{
get { return _rangeMinInterval; }
}
public int RangeMaxInterval
{
get { return _rangeMaxInterval; }
private set
{
if (value == _rangeMaxInterval)
return;
_rangeMaxInterval = value;
NotifyPropertyChanged("RangeMaxInterval");
}
}
public bool CopyRange
{
get { return _copyOption == CopyOption.CopyRange; }
set
{
if (!value)
{
_copyOption = CopyOption.CopyCustom;
NotifyPropertyChanged("CopyRange");
NotifyPropertyChanged("CopyCustom");
}
}
}
public bool CopyRangeEnabled
{
get { return Enabled; }
}
public bool CopyRangeAll
{
get { return _copyRangeOption == CopyRangeOption.CopyAll; }
set
{
if (!value)
{
_copyRangeOption = CopyRangeOption.CopyAtInterval;
NotifyPropertyChanged("CopyRangeAll");
NotifyPropertyChanged("CopyRangeAtInterval");
}
}
}
public bool CopyRangeAllEnabled
{
get { return Enabled && CopyRange; }
}
public int CopyRangeStart
{
get { return _copyRangeStart; }
set
{
if (value == _copyRangeStart)
return;
_copyRangeStart = value;
NotifyPropertyChanged("CopyRangeStart");
UpdateRangeInterval();
}
}
public bool CopyRangeStartEnabled
{
get { return Enabled && CopyRange; }
}
public int CopyRangeEnd
{
get { return _copyRangeEnd; }
set
{
if (value == _copyRangeEnd)
return;
_copyRangeEnd = value;
NotifyPropertyChanged("CopyRangeEnd");
UpdateRangeInterval();
}
}
public bool CopyRangeEndEnabled
{
get { return Enabled && CopyRange; }
}
public bool CopyRangeAtInterval
{
get { return _copyRangeOption == CopyRangeOption.CopyAtInterval; }
set
{
if (!value)
{
_copyRangeOption = CopyRangeOption.CopyAll;
NotifyPropertyChanged("CopyRangeAtInterval");
NotifyPropertyChanged("CopyRangeAll");
}
}
}
public bool CopyRangeAtIntervalEnabled
{
get { return CopyRange && CopyRangeEnabled && (CopyRangeEnd - CopyRangeStart) >= RangeMinInterval; }
}
public int CopyRangeInterval
{
get { return _copyRangeInterval; }
set
{
if (value == _copyRangeInterval)
return;
_copyRangeInterval = value;
NotifyPropertyChanged("CopyRangeInterval");
}
}
public bool CopyRangeIntervalEnabled
{
get { return CopyRangeAtInterval && CopyRangeAtIntervalEnabled; }
}
public bool CopyCustom
{
get { return _copyOption == CopyOption.CopyCustom; }
set
{
if (!value)
{
_copyOption = CopyOption.CopyRange;
NotifyPropertyChanged("CopyCustom");
NotifyPropertyChanged("CopyRange");
}
}
}
public bool CopyCustomEnabled
{
|
[
"\t\t\tget { return Enabled && _numberOfImages > 2; }"
] | 1,205
|
lcc
|
csharp
| null |
949612e418b169f935d6b0410ff5626ba21152c71a086f2d
|
|
using System;
using System.Runtime.InteropServices;
using System.Text;
using System.IO;
using System.Xml;
using Mono.Unix;
using Mono.Unix.Native;
using Hyena;
using NDesk.DBus;
using org.gnome.SessionManager;
namespace Tomboy
{
public class GnomeApplication : INativeApplication
{
#if PANEL_APPLET
private Gnome.Program program;
#endif
private static string confDir;
private static string dataDir;
private static string cacheDir;
private static ObjectPath session_client_id;
private const string tomboyDirName = "tomboy";
static GnomeApplication ()
{
dataDir = Path.Combine (XdgBaseDirectorySpec.GetUserDirectory ("XDG_DATA_HOME",
Path.Combine (".local", "share")),
tomboyDirName);
confDir = Path.Combine (XdgBaseDirectorySpec.GetUserDirectory ("XDG_CONFIG_HOME",
".config"),
tomboyDirName);
cacheDir = Path.Combine (XdgBaseDirectorySpec.GetUserDirectory ("XDG_CACHE_HOME",
".cache"),
tomboyDirName);
// NOTE: Other directories created on demand
// (non-existence is an indicator that migration is needed)
if (!Directory.Exists (cacheDir))
Directory.CreateDirectory (cacheDir);
}
public void Initialize (string locale_dir,
string display_name,
string process_name,
string [] args)
{
try {
SetProcessName (process_name);
} catch {} // Ignore exception if fail (not needed to run)
// Register handler for saving session when logging out of Gnome
BusG.Init ();
string startup_id = Environment.GetEnvironmentVariable ("DESKTOP_AUTOSTART_ID");
if (String.IsNullOrEmpty (startup_id))
startup_id = display_name;
try {
SessionManager session = Bus.Session.GetObject<SessionManager> (Constants.SessionManagerInterfaceName,
new ObjectPath (Constants.SessionManagerPath));
session_client_id = session.RegisterClient (display_name, startup_id);
ClientPrivate client = Bus.Session.GetObject<ClientPrivate> (Constants.SessionManagerInterfaceName,
session_client_id);
client.QueryEndSession += OnQueryEndSession;
client.EndSession += OnEndSession;
} catch (Exception e) {
Logger.Debug ("Failed to register with session manager: {0}", e.Message);
}
Gtk.Application.Init ();
#if PANEL_APPLET
program = new Gnome.Program (display_name,
Defines.VERSION,
Gnome.Modules.UI,
args);
#endif
}
public void RegisterSessionManagerRestart (string executable_path,
string[] args,
string[] environment)
{
// Nothing to do, we dropped the .desktop file in the autostart
// folder which should be enough to handle this in Gnome
}
public void RegisterSignalHandlers ()
{
// Connect to SIGTERM and SIGINT, so we don't lose
// unsaved notes on exit...
Stdlib.signal (Signum.SIGTERM, OnExitSignal);
Stdlib.signal (Signum.SIGINT, OnExitSignal);
}
public event EventHandler ExitingEvent;
public void Exit (int exitcode)
{
OnExitSignal (-1);
System.Environment.Exit (exitcode);
}
public void StartMainLoop ()
{
#if PANEL_APPLET
program.Run ();
#else
Gtk.Application.Run ();
#endif
}
[DllImport("libc")]
private static extern int prctl (int option,
byte [] arg2,
IntPtr arg3,
IntPtr arg4,
IntPtr arg5);
// From Banshee: Banshee.Base/Utilities.cs
private void SetProcessName (string name)
{
if (prctl (15 /* PR_SET_NAME */,
Encoding.ASCII.GetBytes (name + "\0"),
IntPtr.Zero,
IntPtr.Zero,
IntPtr.Zero) != 0)
throw new ApplicationException (
"Error setting process name: " +
Mono.Unix.Native.Stdlib.GetLastError ());
}
private void OnExitSignal (int signal)
{
if (ExitingEvent != null)
ExitingEvent (null, new EventArgs ());
if (signal >= 0)
System.Environment.Exit (0);
}
private void OnQueryEndSession (uint flags)
{
Logger.Info ("Received end session query");
// The session might not actually end but it would be nice to start
// some cleanup actions like saving notes here
// Let the session manager know its OK to continue
try {
ClientPrivate client = Bus.Session.GetObject<ClientPrivate> (Constants.SessionManagerInterfaceName,
session_client_id);
client.EndSessionResponse(true, String.Empty);
} catch (Exception e) {
Logger.Debug("Failed to respond to session manager: {0}", e.Message);
}
}
private void OnEndSession (uint flags)
{
Logger.Info ("Received end session signal");
if (ExitingEvent != null)
ExitingEvent (null, new EventArgs ());
// Let the session manager know its OK to continue
// Ideally we would wait for all the exit events to finish
try {
ClientPrivate client = Bus.Session.GetObject<ClientPrivate> (Constants.SessionManagerInterfaceName,
session_client_id);
client.EndSessionResponse (true, String.Empty);
} catch (Exception e) {
Logger.Debug ("Failed to respond to session manager: {0}", e.Message);
}
}
public void OpenUrl (string url, Gdk.Screen screen)
{
GtkBeans.Global.ShowUri (screen, url);
}
[DllImport ("glib-2.0.dll")]
static extern IntPtr g_get_language_names ();
public void DisplayHelp (string project, string page, Gdk.Screen screen)
{
string helpUrl = string.Format("http://library.gnome.org/users/{0}/", project);
var langsPtr = g_get_language_names ();
var langs = GLib.Marshaller.NullTermPtrToStringArray (langsPtr, false);
var baseHelpDir = Path.Combine (Path.Combine (Defines.DATADIR, "gnome/help"), project);
if (Directory.Exists (baseHelpDir)) {
foreach (var lang in langs) {
var langHelpDir = Path.Combine (baseHelpDir, lang);
if (Directory.Exists (langHelpDir))
// TODO:Support page
helpUrl = String.Format ("ghelp://{0}", langHelpDir);
}
}
OpenUrl (helpUrl, screen);
}
public string DataDirectory {
|
[
"\t\t\tget { return dataDir; }"
] | 614
|
lcc
|
csharp
| null |
e2ce1e79b1ea6b76bdda716904c17e746e52a6a0261ef941
|
|
/*
* FindBugs - Find bugs in Java programs
* Copyright (C) 2003-2008, University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.Iterator;
import edu.umd.cs.findbugs.charsets.UTF8;
/**
* Base class for BugReporters which provides convenient formatting and
* reporting of warnings and analysis errors.
*
* <p>
* "TextUIBugReporter" is a bit of a misnomer, since this class is useful in
* GUIs, too.
* </p>
*
* @author David Hovemeyer
*/
public abstract class TextUIBugReporter extends AbstractBugReporter {
private boolean reportStackTrace;
private boolean useLongBugCodes = false;
private boolean showRank = false;
private boolean reportHistory = false;
private boolean applySuppressions = false;
static final String OTHER_CATEGORY_ABBREV = "X";
protected PrintWriter outputStream = UTF8.printWriter(System.out, true);
public TextUIBugReporter() {
reportStackTrace = true;
}
/**
* Set the PrintStream to write bug output to.
*
* @param outputStream
* the PrintStream to write bug output to
*/
public void setOutputStream(PrintStream outputStream) {
this.outputStream = UTF8.printWriter(outputStream, true);
}
public void setWriter(PrintWriter writer) {
this.outputStream = writer;
}
/**
* Set whether or not stack traces should be reported in error output.
*
* @param reportStackTrace
* true if stack traces should be reported, false if not
*/
public void setReportStackTrace(boolean reportStackTrace) {
this.reportStackTrace = reportStackTrace;
}
/**
* Print bug in one-line format.
*
* @param bugInstance
* the bug to print
*/
protected void printBug(BugInstance bugInstance) {
if (showRank) {
int rank = BugRanker.findRank(bugInstance);
outputStream.printf("%2d ", rank);
}
switch (bugInstance.getPriority()) {
case Priorities.EXP_PRIORITY:
outputStream.print("E ");
break;
case Priorities.LOW_PRIORITY:
outputStream.print("L ");
break;
case Priorities.NORMAL_PRIORITY:
outputStream.print("M ");
break;
case Priorities.HIGH_PRIORITY:
outputStream.print("H ");
break;
default:
assert false;
}
BugPattern pattern = bugInstance.getBugPattern();
if (pattern != null) {
String categoryAbbrev = null;
BugCategory bcat = DetectorFactoryCollection.instance().getBugCategory(pattern.getCategory());
if (bcat != null) {
categoryAbbrev = bcat.getAbbrev();
}
if (categoryAbbrev == null) {
categoryAbbrev = OTHER_CATEGORY_ABBREV;
}
outputStream.print(categoryAbbrev);
outputStream.print(" ");
}
if (useLongBugCodes) {
outputStream.print(bugInstance.getType());
outputStream.print(" ");
}
if (reportHistory) {
long first = bugInstance.getFirstVersion();
long last = bugInstance.getLastVersion();
outputStream.print(first);
outputStream.print(" ");
outputStream.print(last);
outputStream.print(" ");
}
SourceLineAnnotation line = bugInstance.getPrimarySourceLineAnnotation();
outputStream.println(bugInstance.getMessage().replace('\n', ' ') + " " + line.toString());
}
private boolean analysisErrors;
private boolean missingClasses;
@Override
public void reportQueuedErrors() {
boolean errors = analysisErrors || missingClasses || getQueuedErrors().size() > 0;
analysisErrors = missingClasses = false;
super.reportQueuedErrors();
if (errors) {
emitLine("");
}
}
@Override
public void reportAnalysisError(AnalysisError error) {
if (!analysisErrors) {
emitLine("The following errors occurred during analysis:");
analysisErrors = true;
}
emitLine("\t" + error.getMessage());
if (error.getExceptionMessage() != null) {
emitLine("\t\t" + error.getExceptionMessage());
if (reportStackTrace) {
String[] stackTrace = error.getStackTrace();
if (stackTrace != null) {
for (String aStackTrace : stackTrace) {
emitLine("\t\t\tAt " + aStackTrace);
}
}
}
}
}
@Override
public void reportMissingClass(String message) {
if (!missingClasses) {
emitLine("The following classes needed for analysis were missing:");
missingClasses = true;
}
emitLine("\t" + message);
}
/**
* Emit one line of the error message report. By default, error messages are
* printed to System.err. Subclasses may override.
*
* @param line
* one line of the error report
*/
protected void emitLine(String line) {
line = line.replaceAll("\t", " ");
System.err.println(line);
}
public boolean getUseLongBugCodes() {
return useLongBugCodes;
}
public void setReportHistory(boolean reportHistory) {
this.reportHistory = reportHistory;
}
public void setUseLongBugCodes(boolean useLongBugCodes) {
this.useLongBugCodes = useLongBugCodes;
}
public void setShowRank(boolean showRank) {
this.showRank = showRank;
}
public void setApplySuppressions(boolean applySuppressions) {
this.applySuppressions = applySuppressions;
}
/*
* (non-Javadoc)
*
* @see edu.umd.cs.findbugs.BugReporter#getRealBugReporter()
*/
public BugReporter getRealBugReporter() {
return this;
}
/**
* For debugging: check a BugInstance to make sure it is valid.
*
* @param bugInstance
* the BugInstance to check
*/
protected void checkBugInstance(BugInstance bugInstance) {
for (Iterator<BugAnnotation> i = bugInstance.annotationIterator(); i.hasNext();) {
BugAnnotation bugAnnotation = i.next();
|
[
" if (bugAnnotation instanceof PackageMemberAnnotation) {"
] | 733
|
lcc
|
java
| null |
cb3e34be4795caaf719f520787549681e8b00fbac71530a7
|
|
using System;
using System.Collections;
using Server.Network;
using System.Collections.Generic;
using Server.ContextMenus;
namespace Server.Items
{
public abstract class Food : Item
{
private Mobile m_Poisoner;
private Poison m_Poison;
private int m_FillFactor;
[CommandProperty( AccessLevel.GameMaster )]
public Mobile Poisoner
{
get { return m_Poisoner; }
set { m_Poisoner = value; }
}
[CommandProperty( AccessLevel.GameMaster )]
public Poison Poison
{
get { return m_Poison; }
set { m_Poison = value; }
}
[CommandProperty( AccessLevel.GameMaster )]
public int FillFactor
{
get { return m_FillFactor; }
set { m_FillFactor = value; }
}
public Food( int itemID ) : this( 1, itemID )
{
}
public Food( int amount, int itemID ) : base( itemID )
{
Stackable = true;
Amount = amount;
m_FillFactor = 1;
}
public Food( Serial serial ) : base( serial )
{
}
public override void GetContextMenuEntries( Mobile from, List<ContextMenuEntry> list )
{
base.GetContextMenuEntries( from, list );
if ( from.Alive )
list.Add( new ContextMenus.EatEntry( from, this ) );
}
public override void OnDoubleClick( Mobile from )
{
if ( !Movable )
return;
if ( from.InRange( this.GetWorldLocation(), 1 ) )
{
Eat( from );
}
}
public virtual bool Eat( Mobile from )
{
// Fill the Mobile with FillFactor
if ( CheckHunger( from ) )
{
// Play a random "eat" sound
from.PlaySound( Utility.Random( 0x3A, 3 ) );
if ( from.Body.IsHuman && !from.Mounted )
from.Animate( 34, 5, 1, true, false, 0 );
if ( m_Poison != null )
from.ApplyPoison( m_Poisoner, m_Poison );
Consume();
return true;
}
return false;
}
public virtual bool CheckHunger( Mobile from )
{
return FillHunger( from, m_FillFactor );
}
public static bool FillHunger( Mobile from, int fillFactor )
{
if ( from.Hunger >= 20 )
{
from.SendLocalizedMessage( 500867 ); // You are simply too full to eat any more!
return false;
}
int iHunger = from.Hunger + fillFactor;
if ( from.Stam < from.StamMax )
from.Stam += Utility.Random( 6, 3 ) + fillFactor / 5;
if ( iHunger >= 20 )
{
from.Hunger = 20;
from.SendLocalizedMessage( 500872 ); // You manage to eat the food, but you are stuffed!
}
else
{
from.Hunger = iHunger;
if ( iHunger < 5 )
from.SendLocalizedMessage( 500868 ); // You eat the food, but are still extremely hungry.
else if ( iHunger < 10 )
from.SendLocalizedMessage( 500869 ); // You eat the food, and begin to feel more satiated.
else if ( iHunger < 15 )
from.SendLocalizedMessage( 500870 ); // After eating the food, you feel much less hungry.
else
from.SendLocalizedMessage( 500871 ); // You feel quite full after consuming the food.
}
Misc.FoodDecayTimer.ApplyHungerStatMod(from);
return true;
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 4 ); // version
writer.Write( m_Poisoner );
Poison.Serialize( m_Poison, writer );
writer.Write( m_FillFactor );
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
switch ( version )
{
case 1:
{
switch ( reader.ReadInt() )
{
case 0: m_Poison = null; break;
case 1: m_Poison = Poison.Lesser; break;
case 2: m_Poison = Poison.Regular; break;
case 3: m_Poison = Poison.Greater; break;
case 4: m_Poison = Poison.Deadly; break;
}
break;
}
case 2:
{
m_Poison = Poison.Deserialize( reader );
break;
}
case 3:
{
m_Poison = Poison.Deserialize( reader );
m_FillFactor = reader.ReadInt();
break;
}
case 4:
{
m_Poisoner = reader.ReadMobile();
goto case 3;
}
}
}
}
public class BreadLoaf : Food
{
[Constructable]
public BreadLoaf() : this( 1 )
{
}
[Constructable]
public BreadLoaf( int amount ) : base( amount, 0x103B )
{
this.Weight = 1.0;
this.FillFactor = 3;
}
public BreadLoaf( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class Bacon : Food
{
[Constructable]
public Bacon() : this( 1 )
{
}
[Constructable]
public Bacon( int amount ) : base( amount, 0x979 )
{
this.Weight = 1.0;
this.FillFactor = 1;
}
public Bacon( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class SlabOfBacon : Food
{
[Constructable]
public SlabOfBacon() : this( 1 )
{
}
[Constructable]
public SlabOfBacon( int amount ) : base( amount, 0x976 )
{
this.Weight = 1.0;
this.FillFactor = 3;
}
public SlabOfBacon( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class FishSteak : Food
{
public override double DefaultWeight
{
get { return 0.1; }
}
[Constructable]
public FishSteak() : this( 1 )
{
}
[Constructable]
public FishSteak( int amount ) : base( amount, 0x97B )
{
this.FillFactor = 3;
}
public FishSteak( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class CheeseWheel : Food
{
public override double DefaultWeight
{
get { return 0.1; }
}
[Constructable]
public CheeseWheel() : this( 1 )
{
}
[Constructable]
public CheeseWheel( int amount ) : base( amount, 0x97E )
{
this.FillFactor = 3;
}
public CheeseWheel( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class CheeseWedge : Food
{
public override double DefaultWeight
{
get { return 0.1; }
}
[Constructable]
public CheeseWedge() : this( 1 )
{
}
[Constructable]
public CheeseWedge( int amount ) : base( amount, 0x97D )
{
this.FillFactor = 3;
}
public CheeseWedge( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class CheeseSlice : Food
{
public override double DefaultWeight
{
get { return 0.1; }
}
[Constructable]
public CheeseSlice() : this( 1 )
{
}
[Constructable]
public CheeseSlice( int amount ) : base( amount, 0x97C )
{
this.FillFactor = 1;
}
public CheeseSlice( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class FrenchBread : Food
{
[Constructable]
public FrenchBread() : this( 1 )
{
}
[Constructable]
public FrenchBread( int amount ) : base( amount, 0x98C )
{
this.Weight = 2.0;
this.FillFactor = 3;
}
public FrenchBread( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class FriedEggs : Food
{
[Constructable]
public FriedEggs() : this( 1 )
{
}
[Constructable]
public FriedEggs( int amount ) : base( amount, 0x9B6 )
{
this.Weight = 1.0;
this.FillFactor = 4;
}
public FriedEggs( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class CookedBird : Food
{
[Constructable]
public CookedBird() : this( 1 )
{
}
[Constructable]
public CookedBird( int amount ) : base( amount, 0x9B7 )
{
this.Weight = 1.0;
this.FillFactor = 5;
}
public CookedBird( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class RoastPig : Food
{
[Constructable]
public RoastPig() : this( 1 )
{
}
[Constructable]
public RoastPig( int amount ) : base( amount, 0x9BB )
{
this.Weight = 45.0;
this.FillFactor = 20;
}
public RoastPig( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class Sausage : Food
{
[Constructable]
public Sausage() : this( 1 )
{
}
[Constructable]
public Sausage( int amount ) : base( amount, 0x9C0 )
{
this.Weight = 1.0;
this.FillFactor = 4;
}
public Sausage( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class Ham : Food
{
[Constructable]
public Ham() : this( 1 )
{
}
[Constructable]
public Ham( int amount ) : base( amount, 0x9C9 )
{
this.Weight = 1.0;
this.FillFactor = 5;
}
public Ham( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class Cake : Food
{
[Constructable]
public Cake() : base( 0x9E9 )
{
Stackable = false;
this.Weight = 1.0;
this.FillFactor = 10;
}
public Cake( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class Ribs : Food
{
[Constructable]
public Ribs() : this( 1 )
{
}
[Constructable]
public Ribs( int amount ) : base( amount, 0x9F2 )
{
this.Weight = 1.0;
this.FillFactor = 5;
}
public Ribs( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class Cookies : Food
{
[Constructable]
public Cookies() : base( 0x160b )
{
Stackable = Core.ML;
this.Weight = 1.0;
this.FillFactor = 4;
}
public Cookies( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
public class Muffins : Food
{
[Constructable]
public Muffins() : base( 0x9eb )
{
Stackable = false;
this.Weight = 1.0;
this.FillFactor = 4;
}
public Muffins( Serial serial ) : base( serial )
{
}
public override void Serialize( GenericWriter writer )
{
base.Serialize( writer );
writer.Write( (int) 0 ); // version
}
public override void Deserialize( GenericReader reader )
{
base.Deserialize( reader );
int version = reader.ReadInt();
}
}
|
[
"\t[TypeAlias( \"Server.Items.Pizza\" )]"
] | 1,914
|
lcc
|
csharp
| null |
c539b59ccd2559e5f2823b3d0c876ed3d71dad59f7a6e121
|
|
using EloBuddy;
using LeagueSharp.Common;
namespace Flowers_ADC_Series.Pluging
{
using ADCCOMMON;
using System;
using System.Linq;
using LeagueSharp;
using LeagueSharp.Common;
using Color = System.Drawing.Color;
internal class KogMaw : Logic
{
public KogMaw()
{
Q = new Spell(SpellSlot.Q, 980f);
W = new Spell(SpellSlot.W, Me.AttackRange);
E = new Spell(SpellSlot.E, 1200f);
R = new Spell(SpellSlot.R, 1800f);
Q.SetSkillshot(0.25f, 50f, 2000f, true, SkillshotType.SkillshotLine);
E.SetSkillshot(0.25f, 120f, 1400f, false, SkillshotType.SkillshotLine);
R.SetSkillshot(1.2f, 120f, float.MaxValue, false, SkillshotType.SkillshotCircle);
var comboMenu = Menu.AddSubMenu(new Menu("Combo", "Combo"));
{
comboMenu.AddItem(new MenuItem("ComboQ", "Use Q", true).SetValue(true));
comboMenu.AddItem(new MenuItem("ComboW", "Use W", true).SetValue(true));
comboMenu.AddItem(new MenuItem("ComboE", "Use E", true).SetValue(true));
comboMenu.AddItem(new MenuItem("ComboR", "Use R", true).SetValue(true));
comboMenu.AddItem(
new MenuItem("ComboRLimit", "Use R|Limit Stack >= x", true).SetValue(new Slider(3, 0, 10)));
}
var harassMenu = Menu.AddSubMenu(new Menu("Harass", "Harass"));
{
harassMenu.AddItem(new MenuItem("HarassQ", "Use Q", true).SetValue(true));
harassMenu.AddItem(new MenuItem("HarassE", "Use E", true).SetValue(true));
harassMenu.AddItem(new MenuItem("HarassR", "Use R", true).SetValue(true));
harassMenu.AddItem(
new MenuItem("HarassRLimit", "Use R|Limit Stack >= x", true).SetValue(new Slider(5, 0, 10)));
harassMenu.AddItem(
new MenuItem("HarassMana", "When Player ManaPercent >= x%", true).SetValue(new Slider(60)));
}
var clearMenu = Menu.AddSubMenu(new Menu("Clear", "Clear"));
{
var laneClearMenu = clearMenu.AddSubMenu(new Menu("LaneClear", "LaneClear"));
{
laneClearMenu.AddItem(new MenuItem("LaneClearQ", "Use Q", true).SetValue(true));
laneClearMenu.AddItem(new MenuItem("LaneClearE", "Use E", true).SetValue(true));
laneClearMenu.AddItem(
new MenuItem("LaneClearECount", "If E CanHit Counts >= x", true).SetValue(new Slider(3, 1, 5)));
laneClearMenu.AddItem(new MenuItem("LaneClearR", "Use R", true).SetValue(true));
laneClearMenu.AddItem(
new MenuItem("LaneClearRLimit", "Use R|Limit Stack >= x", true).SetValue(new Slider(4, 0, 10)));
laneClearMenu.AddItem(
new MenuItem("LaneClearMana", "If Player ManaPercent >= %", true).SetValue(new Slider(60)));
}
var jungleClearMenu = clearMenu.AddSubMenu(new Menu("JungleClear", "JungleClear"));
{
jungleClearMenu.AddItem(new MenuItem("JungleClearQ", "Use Q", true).SetValue(true));
jungleClearMenu.AddItem(new MenuItem("JungleClearW", "Use W", true).SetValue(true));
jungleClearMenu.AddItem(new MenuItem("JungleClearE", "Use E", true).SetValue(true));
jungleClearMenu.AddItem(new MenuItem("JungleClearR", "Use R", true).SetValue(true));
jungleClearMenu.AddItem(
new MenuItem("JungleClearRLimit", "Use R|Limit Stack >= x", true).SetValue(new Slider(5, 0, 10)));
jungleClearMenu.AddItem(
new MenuItem("JungleClearMana", "When Player ManaPercent >= x%", true).SetValue(new Slider(30)));
}
clearMenu.AddItem(new MenuItem("asdqweqwe", " ", true));
ManaManager.AddSpellFarm(clearMenu);
}
var killStealMenu = Menu.AddSubMenu(new Menu("KillSteal", "KillSteal"));
{
killStealMenu.AddItem(new MenuItem("KillStealQ", "Use Q", true).SetValue(true));
killStealMenu.AddItem(new MenuItem("KillStealE", "Use E", true).SetValue(true));
killStealMenu.AddItem(new MenuItem("KillStealR", "Use R", true).SetValue(true));
}
var miscMenu = Menu.AddSubMenu(new Menu("Misc", "Misc"));
{
miscMenu.AddItem(new MenuItem("GapE", "Anti GapCloser E", true).SetValue(true));
miscMenu.AddItem(
new MenuItem("SemiR", "Semi-manual R Key", true).SetValue(new KeyBind('T', KeyBindType.Press)));
}
var utilityMenu = Menu.AddSubMenu(new Menu("Utility", "Utility"));
{
var skinMenu = utilityMenu.AddSubMenu(new Menu("Skin Change", "Skin Change"));
{
SkinManager.AddToMenu(skinMenu);
}
var autoLevelMenu = utilityMenu.AddSubMenu(new Menu("Auto Levels", "Auto Levels"));
{
LevelsManager.AddToMenu(autoLevelMenu);
}
var humainzerMenu = utilityMenu.AddSubMenu(new Menu("Humanier", "Humanizer"));
{
HumanizerManager.AddToMenu(humainzerMenu);
}
var itemsMenu = utilityMenu.AddSubMenu(new Menu("Items", "Items"));
{
ItemsManager.AddToMenu(itemsMenu);
}
}
var drawMenu = Menu.AddSubMenu(new Menu("Drawings", "Drawings"));
{
drawMenu.AddItem(new MenuItem("DrawQ", "Draw Q Range", true).SetValue(false));
drawMenu.AddItem(new MenuItem("DrawW", "Draw W Range", true).SetValue(false));
drawMenu.AddItem(new MenuItem("DrawE", "Draw E Range", true).SetValue(false));
drawMenu.AddItem(new MenuItem("DrawR", "Draw R Range", true).SetValue(false));
ManaManager.AddDrawFarm(drawMenu);
DamageIndicator.AddToMenu(drawMenu);
}
AntiGapcloser.OnEnemyGapcloser += OnEnemyGapcloser;
Obj_AI_Base.OnSpellCast += OnSpellCast;
Game.OnUpdate += OnUpdate;
Drawing.OnDraw += OnDraw;
}
private void OnEnemyGapcloser(ActiveGapcloser Args)
{
if (Menu.GetBool("GapE") && E.IsReady() && Args.Sender.IsValidTarget(E.Range))
{
SpellManager.PredCast(E, Args.Sender, true);
}
}
private void OnSpellCast(Obj_AI_Base sender, GameObjectProcessSpellCastEventArgs Args)
{
if (!sender.IsMe || !Orbwalking.IsAutoAttack(Args.SData.Name))
{
return;
}
if (Orbwalker.ActiveMode == Orbwalking.OrbwalkingMode.Combo)
{
var target = (AIHeroClient)Args.Target;
if (target != null && !target.IsDead && !target.IsZombie)
{
if (Menu.GetBool("ComboW") && W.IsReady() && target.IsValidTarget(W.Range))
{
W.Cast();
}
else if (Menu.GetBool("ComboR") && R.IsReady() && Menu.GetSlider("ComboRLimit") >= GetRCount &&
target.IsValidTarget(R.Range))
{
SpellManager.PredCast(R, target, true);
}
else if (Menu.GetBool("ComboQ") && Q.IsReady() && target.IsValidTarget(Q.Range))
{
SpellManager.PredCast(Q, target);
}
else if (Menu.GetBool("ComboE") && E.IsReady() && target.IsValidTarget(E.Range))
{
SpellManager.PredCast(E, target, true);
}
}
}
if (Orbwalker.ActiveMode == Orbwalking.OrbwalkingMode.LaneClear)
{
if (ManaManager.HasEnoughMana(Menu.GetSlider("JungleClearMana")) && ManaManager.SpellFarm)
{
var mobs = MinionManager.GetMinions(Me.Position, R.Range, MinionTypes.All, MinionTeam.Neutral,
MinionOrderTypes.MaxHealth);
if (mobs.Any())
{
var mob = mobs.FirstOrDefault();
var bigmob = mobs.FirstOrDefault(x => !x.Name.ToLower().Contains("mini"));
if (Menu.GetBool("JungleClearW") && W.IsReady() && bigmob != null && bigmob.IsValidTarget(W.Range))
{
W.Cast();
}
else if (Menu.GetBool("JungleClearR") && R.IsReady() && Menu.GetSlider("JungleClearRLimit") >= GetRCount &&
bigmob != null)
{
R.Cast(bigmob);
}
else if (Menu.GetBool("JungleClearE") && E.IsReady())
{
if (bigmob != null && bigmob.IsValidTarget(E.Range))
{
E.Cast(bigmob);
}
else
{
var eMobs = MinionManager.GetMinions(Me.Position, E.Range, MinionTypes.All, MinionTeam.Neutral,
MinionOrderTypes.MaxHealth);
var eFarm = E.GetLineFarmLocation(eMobs, E.Width);
if (eFarm.MinionsHit >= 2)
{
E.Cast(eFarm.Position);
}
}
}
else if (Menu.GetBool("JungleClearQ") && Q.IsReady() && mob != null && mob.IsValidTarget(Q.Range))
{
Q.Cast(mob);
}
}
}
}
}
private void OnUpdate(EventArgs Args)
{
if (Me.IsDead || Me.IsRecalling())
{
return;
}
if (W.Level > 0)
{
W.Range = Me.AttackRange + new[] { 130, 150, 170, 190, 210 }[W.Level - 1];
}
if (R.Level > 0)
{
R.Range = 1200 + 300*R.Level - 1;
}
SemiRLogic();
KillSteal();
switch (Orbwalker.ActiveMode)
{
case Orbwalking.OrbwalkingMode.Combo:
Combo();
break;
case Orbwalking.OrbwalkingMode.Mixed:
Harass();
break;
case Orbwalking.OrbwalkingMode.LaneClear:
FarmHarass();
LaneClear();
JungleClear();
break;
}
}
private void SemiRLogic()
{
if (Menu.GetKey("SemiR") && R.IsReady())
{
var target = TargetSelector.GetSelectedTarget() ??
TargetSelector.GetTarget(R.Range, TargetSelector.DamageType.Physical);
if (target.Check(R.Range))
{
SpellManager.PredCast(R, target, true);
}
}
}
private void KillSteal()
{
if (Menu.GetBool("KillStealQ") && Q.IsReady())
{
foreach (var target in HeroManager.Enemies.Where(x => x.IsValidTarget(Q.Range) && x.Health < Q.GetDamage(x)))
{
SpellManager.PredCast(Q, target);
return;
}
}
if (Menu.GetBool("KillStealE") && E.IsReady())
{
foreach (var target in HeroManager.Enemies.Where(x => x.IsValidTarget(E.Range) && x.Health < E.GetDamage(x)))
{
SpellManager.PredCast(E, target, true);
return;
}
}
if (Menu.GetBool("KillStealR") && R.IsReady())
{
foreach (var target in HeroManager.Enemies.Where(x => x.IsValidTarget(R.Range) && x.Health < R.GetDamage(x)))
{
SpellManager.PredCast(R, target, true);
return;
}
}
}
private void Combo()
{
var target = TargetSelector.GetSelectedTarget() ??
TargetSelector.GetTarget(R.Range, TargetSelector.DamageType.Physical);
if (target.Check(R.Range))
{
if (Menu.GetBool("ComboR") && R.IsReady() &&
Menu.GetSlider("ComboRLimit") >= GetRCount &&
target.IsValidTarget(R.Range))
{
SpellManager.PredCast(R, target, true);
}
if (Menu.GetBool("ComboQ") && Q.IsReady() && target.IsValidTarget(Q.Range))
{
SpellManager.PredCast(Q, target, true);
}
if (Menu.GetBool("ComboE") && E.IsReady() && target.IsValidTarget(E.Range))
{
SpellManager.PredCast(E, target);
}
if (Menu.GetBool("ComboW") && W.IsReady() && target.IsValidTarget(W.Range) &&
target.DistanceToPlayer() > Orbwalking.GetRealAutoAttackRange(Me) && Me.CanAttack)
{
W.Cast();
}
}
}
private void Harass()
{
if (ManaManager.HasEnoughMana(Menu.GetSlider("HarassMana")))
{
var target = TargetSelector.GetTarget(R.Range, TargetSelector.DamageType.Magical);
if (target.Check(R.Range))
{
if (Menu.GetBool("HarassR") && R.IsReady() && Menu.GetSlider("HarassRLimit") >= GetRCount &&
target.IsValidTarget(R.Range))
{
SpellManager.PredCast(R, target, true);
}
if (Menu.GetBool("HarassQ") && Q.IsReady() && target.IsValidTarget(Q.Range))
{
SpellManager.PredCast(Q, target);
}
if (Menu.GetBool("HarassE") && E.IsReady() && target.IsValidTarget(E.Range))
{
SpellManager.PredCast(E, target, true);
}
}
}
}
private void FarmHarass()
{
if (ManaManager.SpellHarass)
{
Harass();
}
}
private void LaneClear()
{
if (ManaManager.HasEnoughMana(Menu.GetSlider("LaneClearMana")) && ManaManager.SpellFarm)
{
var minions = MinionManager.GetMinions(Me.Position, R.Range);
if (minions.Any())
{
if (Menu.GetBool("LaneClearR") && R.IsReady() && Menu.GetSlider("LaneClearRLimit") >= GetRCount)
{
var rMinion =
minions.FirstOrDefault(x => x.DistanceToPlayer() > Orbwalking.GetRealAutoAttackRange(Me));
if (rMinion != null && HealthPrediction.GetHealthPrediction(rMinion, 250) > 0)
{
R.Cast(rMinion);
}
}
if (Menu.GetBool("LaneClearE") && E.IsReady())
{
var eMinions = MinionManager.GetMinions(Me.Position, E.Range);
var eFarm =
MinionManager.GetBestLineFarmLocation(eMinions.Select(x => x.Position.To2D()).ToList(),
E.Width, E.Range);
|
[
" if (eFarm.MinionsHit >= Menu.GetSlider(\"LaneClearECount\"))"
] | 993
|
lcc
|
csharp
| null |
d8843d3a7c6577b304c94d48767a3f75337b2aeadd3c5f76
|
|
package ch.sfdr.fractals.gui.component;
import java.awt.GridBagConstraints;
import java.awt.Insets;
/**
* GBC, a small helper class to create GridBagConstraints in a more readable way
* with less typing: just use the static methods when adding a component, eg.
* <code>
* container.add(someComponent, GBC.get(0, 0, 1, 1, 'b'));
* </code>
* Imported from another (old) project, adopted a bit
*/
public final class GBC
{
private static char DEFAULT_FILL = 'n';
private static String DEFAULT_ANCHOR = "W";
private static String[] ANCHOR_STRINGS = {
"n", "ne", "e", "se", "s", "sw", "w", "nw", "c"
};
private static int[] ANCHOR_VALUES = {
GridBagConstraints.NORTH, GridBagConstraints.NORTHEAST,
GridBagConstraints.EAST, GridBagConstraints.SOUTHEAST,
GridBagConstraints.SOUTH, GridBagConstraints.SOUTHWEST,
GridBagConstraints.WEST, GridBagConstraints.NORTHWEST,
GridBagConstraints.CENTER
};
private static int getAnchor(String str)
{
str = str.toLowerCase();
for (int i = 0; i < ANCHOR_STRINGS.length; i++) {
if (str.equals(ANCHOR_STRINGS[i]))
return ANCHOR_VALUES[i];
}
return -1;
}
private static int getFill(char c)
{
switch (c) {
case 'n':
case 'N':
return GridBagConstraints.NONE;
case 'v':
case 'V':
return GridBagConstraints.VERTICAL;
case 'h':
case 'H':
return GridBagConstraints.HORIZONTAL;
case 'b':
case 'B':
return GridBagConstraints.BOTH;
}
return -1;
}
/**
* Returns a GridBagConstraint, setting all values directly
* @param x
* @param y
* @param width
* @param height
* @param wx
* @param wy
* @param insetTop
* @param insetLeft
* @param insetBottom
* @param insetRight
* @param fill
* @param anchor
* @return GridBagConstraints
*/
public static GridBagConstraints get(int x, int y, int width, int height,
double wx, double wy, int insetTop, int insetLeft, int insetBottom,
int insetRight, char fill, String anchor)
{
return new GridBagConstraints(x, y, width, height,
wx, wy, getAnchor(anchor), getFill(fill),
new Insets(insetTop, insetLeft, insetBottom, insetRight),
0, 0);
}
/**
* Returns a GridBagConstraint
* @param x
* @param y
* @param width
* @param height
* @param wx
* @param wy
* @param fill
* @param anchor
* @return GridBagConstraints
*/
public static GridBagConstraints get(int x, int y, int width, int height,
double wx, double wy, char fill, String anchor)
{
return get(x, y, width, height, wx, wy, 2, 2, 2, 2, fill, anchor);
}
/**
* Returns a GridBagConstraint
* @param x
* @param y
* @param width
* @param height
* @param wx
* @param wy
* @param fill
* @return GridBagConstraints
*/
public static GridBagConstraints get(int x, int y, int width, int height,
double wx, double wy, char fill)
{
return get(x, y, width, height, wx, wy, fill, DEFAULT_ANCHOR);
}
/**
* Returns a GridBagConstraint
* @param x
* @param y
* @param width
* @param height
* @param wx
* @param wy
* @return GridBagConstraints
*/
public static GridBagConstraints get(int x, int y, int width, int height,
double wx, double wy)
{
return get(x, y, width, height, wx, wy, DEFAULT_FILL, DEFAULT_ANCHOR);
}
/**
* Returns a GridBagConstraint
* @param x
* @param y
* @param width
* @param height
* @param wx
* @param wy
* @param anchor
* @return GridBagConstraints
*/
public static GridBagConstraints get(int x, int y, int width, int height,
double wx, double wy, String anchor)
{
return get(x, y, width, height, wx, wy, DEFAULT_FILL, anchor);
}
/**
* Returns a GridBagConstraint
* @param x
* @param y
* @param width
* @param height
* @param fill
* @param anchor
* @return GridBagConstraints
*/
public static GridBagConstraints get(int x, int y, int width, int height,
char fill, String anchor)
{
return get(x, y, width, height, 0.0, 0.0, fill, anchor);
}
/**
* Returns a GridBagConstraint
* @param x
* @param y
* @param width
* @param height
* @param fill
* @return GridBagConstraints
*/
public static GridBagConstraints get(int x, int y, int width, int height,
char fill)
{
return get(x, y, width, height, 0.0, 0.0, fill, DEFAULT_ANCHOR);
}
/**
* Returns a GridBagConstraint
* @param x
* @param y
* @param width
* @param height
* @return GridBagConstraints
*/
public static GridBagConstraints get(int x, int y, int width, int height)
{
return get(x, y, width, height, 0.0, 0.0, DEFAULT_FILL, DEFAULT_ANCHOR);
}
/**
* Returns a GridBagConstraint
* @param x
* @param y
* @param width
* @param height
* @param anchor
* @return GridBagConstraints
*/
public static GridBagConstraints get(int x, int y, int width, int height,
String anchor)
{
|
[
"\t\treturn get(x, y, width, height, 0.0, 0.0, DEFAULT_FILL, anchor);"
] | 691
|
lcc
|
java
| null |
9812a3e53ee25cf9a764b1de80d781f37d829475a8bf8980
|
|
#!/usr/bin/env python3
# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab
#########################################################################
# Copyright 2012-2013 Marcus Popp marcus@popp.mx
#########################################################################
# This file is part of SmartHome.py. http://mknx.github.io/smarthome/
#
# SmartHome.py is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SmartHome.py is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SmartHome.py. If not, see <http://www.gnu.org/licenses/>.
#########################################################################
import logging
import csv
import ftplib
import socket
import re
import datetime
import dateutil.parser
import dateutil.tz
import dateutil.relativedelta
import xml.etree.cElementTree
import threading
logger = logging.getLogger('')
class DWD():
_dwd_host = 'ftp-outgoing2.dwd.de'
_warning_cat = {}
def __init__(self, smarthome, username, password=True):
self._sh = smarthome
self._warnings_csv = smarthome.base_dir + '/plugins/dwd/warnings.csv'
self._dwd_user = username
self._dwd_password = password
self.lock = threading.Lock()
self.tz = dateutil.tz.gettz('Europe/Berlin')
try:
warnings = csv.reader(open(self._warnings_csv, "r", encoding='utf_8'), delimiter=';')
except IOError as e:
logger.error('Could not open warning catalog {}: {}'.format(self._warnings_csv, e))
for row in warnings:
self._warning_cat[int(row[0])] = {'summary': row[1], 'kind': row[2]}
def _connect(self):
# open ftp connection to dwd
if not hasattr(self, '_ftp'):
try:
self._ftp = ftplib.FTP(self._dwd_host, self._dwd_user, self._dwd_password, timeout=1)
except (socket.error, socket.gaierror) as e:
logger.error('Could not connect to {}: {}'.format(self._dwd_host, e))
self.ftp_quit()
except ftplib.error_perm as e:
logger.error('Could not login: {}'.format(e))
self.ftp_quit()
def run(self):
self.alive = True
def stop(self):
self.ftp_quit()
self.alive = False
def ftp_quit(self):
try:
self._ftp.close()
except Exception:
pass
if hasattr(self, '_ftp'):
del(self._ftp)
def parse_item(self, item):
return None
def parse_logic(self, logic):
return None
def _buffer_file(self, data):
self._buffer.extend(data)
def _retr_file(self, filename):
self.lock.acquire()
self._connect()
self._buffer = bytearray()
try:
self._ftp.retrbinary("RETR {}".format(filename), self._buffer_file)
except Exception as e:
logger.info("problem fetching {0}: {1}".format(filename, e))
del(self._buffer)
self._buffer = bytearray()
self.ftp_quit()
self.lock.release()
return self._buffer.decode('iso-8859-1')
def _retr_list(self, dirname):
self.lock.acquire()
self._connect()
try:
filelist = self._ftp.nlst(dirname)
except Exception:
filelist = []
finally:
self.lock.release()
return filelist
def warnings(self, region, location):
directory = 'gds/specials/warnings'
warnings = []
filepath = "{0}/{1}/W*_{2}_*".format(directory, region, location)
files = self._retr_list(filepath)
for filename in files:
fb = self._retr_file(filename)
if fb == '':
continue
dates = re.findall(r"\d\d\.\d\d\.\d\d\d\d \d\d:\d\d", fb)
now = datetime.datetime.now(self.tz)
if len(dates) > 1: # Entwarnungen haben nur ein Datum
start = dateutil.parser.parse(dates[0], dayfirst=True)
start = start.replace(tzinfo=self.tz)
end = dateutil.parser.parse(dates[1], dayfirst=True)
end = end.replace(tzinfo=self.tz)
notice = dateutil.parser.parse(dates[2])
notice = notice.replace(tzinfo=self.tz)
if end > now:
area_splitter = re.compile(r'^\r\r\n', re.M)
area = area_splitter.split(fb)
code = int(re.findall(r"\d\d", area[0])[0])
desc = area[5].replace('\r\r\n', '').strip()
kind = self._warning_cat[code]['kind']
warnings.append({'start': start, 'end': end, 'kind': kind, 'notice': notice, 'desc': desc})
return warnings
def current(self, location):
directory = 'gds/specials/observations/tables/germany'
files = self._retr_list(directory)
if files == []:
return {}
last = sorted(files)[-1]
fb = self._retr_file(last)
fb = fb.splitlines()
if len(fb) < 8:
logger.info("problem fetching {0}".format(last))
return {}
header = fb[4]
legend = fb[8].split()
date = re.findall(r"\d\d\.\d\d\.\d\d\d\d", header)[0].split('.')
date = "{}-{}-{}".format(date[2], date[1], date[0])
for line in fb:
if line.count(location):
space = re.compile(r' +')
line = space.split(line)
return dict(zip(legend, line))
return {}
def forecast(self, region, location):
path = 'gds/specials/forecasts/tables/germany/Daten_'
frames = ['frueh', 'mittag', 'spaet', 'nacht', 'morgen_frueh', 'morgen_spaet', 'uebermorgen_frueh', 'uebermorgen_spaet', 'Tag4_frueh', 'Tag4_spaet']
forecast = {}
for frame in frames:
filepath = "{0}{1}_{2}".format(path, region, frame)
fb = self._retr_file(filepath)
if fb == '':
continue
minute = 0
if frame.count('frueh'):
hour = 6
elif frame == 'mittag':
hour = 12
elif frame == 'nacht':
hour = 23
minute = 59
else:
hour = 18
for line in fb.splitlines():
if line.count('Termin ist nicht mehr'): # already past
date = self._sh.now().replace(hour=hour, minute=minute, second=0, microsecond=0, tzinfo=self.tz)
forecast[date] = ['', '', '']
continue
elif line.startswith('Vorhersage'):
header = line
elif line.count(location):
header = re.sub(r"/\d\d?", '', header)
day, month, year = re.findall(r"\d\d\.\d\d\.\d\d\d\d", header)[0].split('.')
date = datetime.datetime(int(year), int(month), int(day), hour, tzinfo=self.tz)
space = re.compile(r' +')
fc = space.split(line)
forecast[date] = fc[1:]
return forecast
def uvi(self, location):
directory = 'gds/specials/warnings/FG'
forecast = {}
for frame in ['12', '36', '60']:
filename = "{0}/u_vindex{1}.xml".format(directory, frame)
fb = self._retr_file(filename)
try:
year, month, day = re.findall(r"\d\d\d\d\-\d\d\-\d\d", fb)[0].split('-')
except:
continue
date = datetime.datetime(int(year), int(month), int(day), 12, 0, 0, 0, tzinfo=self.tz)
uv = re.findall(r"{}<\/tns:Ort>\n *<tns:Wert>([^<]+)".format(location), fb)
if len(uv) == 1:
forecast[date] = int(uv[0])
return forecast
def pollen(self, region):
filename = 'gds/specials/warnings/FG/s_b31fg.xml'
|
[
" filexml = self._retr_file(filename)"
] | 709
|
lcc
|
python
| null |
140b5a1cf9a62eb1f7eac54fcb8dec74b23e9ea6e4d3d93a
|
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for the parser engine."""
__revision__ = \
"$Id$"
import tempfile
from flask.ext.registry import PkgResourcesDirDiscoveryRegistry, \
ImportPathRegistry, RegistryProxy
from invenio.base.wrappers import lazy_import
from invenio.testsuite import make_test_suite, run_test_suite, InvenioTestCase
Field_parser = lazy_import('invenio.modules.jsonalchemy.parser:FieldParser')
Model_parser = lazy_import('invenio.modules.jsonalchemy.parser:ModelParser')
guess_legacy_field_names = lazy_import(
'invenio.modules.jsonalchemy.parser:guess_legacy_field_names')
get_producer_rules = lazy_import(
'invenio.modules.jsonalchemy.parser:get_producer_rules')
TEST_PACKAGE = 'invenio.modules.jsonalchemy.testsuite'
test_registry = RegistryProxy('testsuite', ImportPathRegistry,
initial=[TEST_PACKAGE])
field_definitions = lambda: PkgResourcesDirDiscoveryRegistry(
'fields', registry_namespace=test_registry)
model_definitions = lambda: PkgResourcesDirDiscoveryRegistry(
'models', registry_namespace=test_registry)
def clean_field_model_definitions():
Field_parser._field_definitions = {}
Field_parser._legacy_field_matchings = {}
Model_parser._model_definitions = {}
class TestParser(InvenioTestCase):
def setUp(self):
self.app.extensions['registry'][
'testsuite.fields'] = field_definitions()
self.app.extensions['registry'][
'testsuite.models'] = model_definitions()
def tearDown(self):
del self.app.extensions['registry']['testsuite.fields']
del self.app.extensions['registry']['testsuite.models']
def test_wrong_indent(self):
"""JSONAlchemy - wrong indent"""
from invenio.modules.jsonalchemy.parser import _create_field_parser
import pyparsing
parser = _create_field_parser()
test = """
foo:
creator:
bar, '1', foo()
"""
self.assertRaises(pyparsing.ParseException, parser.parseString, test)
from invenio.modules.jsonalchemy.errors import FieldParserException
tmp_file = tempfile.NamedTemporaryFile()
config = """
foo:
creator:
bar, '1', foo()
"""
tmp_file.write(config)
tmp_file.flush()
self.app.extensions['registry'][
'testsuite.fields'].register(tmp_file.name)
clean_field_model_definitions()
self.assertRaises(
FieldParserException, Field_parser.reparse, 'testsuite')
tmp_file.close()
clean_field_model_definitions()
def test_wrong_field_definitions(self):
"""JSONAlchemy - wrong field definitions"""
from invenio.modules.jsonalchemy.errors import FieldParserException
tmp_file_4 = tempfile.NamedTemporaryFile()
config_4 = '''
title:
creator:
marc, '245__', value
'''
tmp_file_4.write(config_4)
tmp_file_4.flush()
clean_field_model_definitions()
self.app.extensions['registry'][
'testsuite.fields'].register(tmp_file_4.name)
self.assertRaises(
FieldParserException, Field_parser.reparse, 'testsuite')
tmp_file_4.close()
clean_field_model_definitions()
def test_wrong_field_inheritance(self):
"""JSONAlchmey - not parent field definition"""
from invenio.modules.jsonalchemy.errors import FieldParserException
tmp_file_5 = tempfile.NamedTemporaryFile()
config_5 = '''
@extend
wrong_field:
""" Desc """
'''
tmp_file_5.write(config_5)
tmp_file_5.flush()
clean_field_model_definitions()
self.app.extensions['registry'][
'testsuite.fields'].register(tmp_file_5.name)
self.assertRaises(
FieldParserException, Field_parser.reparse, 'testsuite')
tmp_file_5.close()
clean_field_model_definitions()
def test_field_rules(self):
"""JsonAlchemy - field parser"""
self.assertTrue(len(Field_parser.field_definitions('testsuite')) >= 22)
# Check that all files are parsed
self.assertTrue(
'authors' in Field_parser.field_definitions('testsuite'))
self.assertTrue('title' in Field_parser.field_definitions('testsuite'))
# Check work around for [n] and [0]
self.assertTrue(
Field_parser.field_definitions('testsuite')['doi']['pid'])
# Check if derived and calulated are well parserd
self.assertTrue('dummy' in Field_parser.field_definitions('testsuite'))
self.assertEquals(
Field_parser.field_definitions('testsuite')['dummy']['pid'], 2)
self.assertEquals(Field_parser.field_definitions(
'testsuite')['dummy']['rules'].keys(), ['json', 'derived'])
self.assertTrue(
len(Field_parser.field_definitions(
'testsuite')['dummy']['producer']
),
2
)
self.assertTrue(Field_parser.field_definitions('testsuite')['_random'])
# Check override
value = {'a': 'a', 'b': 'b', 'k': 'k'} # noqa
self.assertEquals(
eval(Field_parser.field_definitions('testsuite')
['title']['rules']['marc'][1]['function']),
{'form': 'k', 'subtitle': 'b', 'title': 'a'})
# Check extras
self.assertTrue(
'json_ext' in
Field_parser.field_definitions('testsuite')['modification_date']
)
tmp = Field_parser.field_definitions('testsuite')
Field_parser.reparse('testsuite')
self.assertEquals(
len(Field_parser.field_definitions('testsuite')), len(tmp))
def test_wrong_field_name_inside_model(self):
"""JSONAlchmey - wrong field name inside model"""
from invenio.modules.jsonalchemy.errors import ModelParserException
tmp_file_8 = tempfile.NamedTemporaryFile()
config_8 = '''
fields:
not_existing_field
'''
tmp_file_8.write(config_8)
tmp_file_8.flush()
clean_field_model_definitions()
self.app.extensions['registry'][
'testsuite.models'].register(tmp_file_8.name)
self.assertRaises(
ModelParserException, Model_parser.reparse, 'testsuite')
tmp_file_8.close()
clean_field_model_definitions()
def test_model_definitions(self):
"""JsonAlchemy - model parser"""
clean_field_model_definitions()
self.assertTrue(len(Model_parser.model_definitions('testsuite')) >= 2)
self.assertTrue(
'test_base' in Model_parser.model_definitions('testsuite'))
tmp = Model_parser.model_definitions('testsuite')
Model_parser.reparse('testsuite')
self.assertEquals(
len(Model_parser.model_definitions('testsuite')), len(tmp))
clean_field_model_definitions()
def test_resolve_several_models(self):
"""JSONAlchemy - test resolve several models"""
test_model = Model_parser.model_definitions('testsuite')['test_model']
clean_field_model_definitions()
self.assertEquals(
Model_parser.resolve_models('test_model', 'testsuite')['fields'],
test_model['fields'])
self.assertEquals(
Model_parser.resolve_models(
['test_base', 'test_model'], 'testsuite')['fields'],
test_model['fields'])
clean_field_model_definitions()
def test_field_name_model_based(self):
"""JSONAlchemy - field name model based"""
clean_field_model_definitions()
field_model_def = Field_parser.field_definition_model_based(
'title', 'test_model', 'testsuite')
field_def = Field_parser.field_definitions('testsuite')['title_title']
|
[
" value = {'a': 'Awesome title', 'b': 'sub title', 'k': 'form'}"
] | 559
|
lcc
|
python
| null |
377a5cd5de82ca79a2db39ae2f6c6eefe06766c9245639af
|
|
///////////////////////////////////////////////////////////////////////////////////////
// Copyright (C) 2006-2015 Esper Team. All rights reserved. /
// http://esper.codehaus.org /
// ---------------------------------------------------------------------------------- /
// The software in this package is published under the terms of the GPL license /
// a copy of which has been included with this distribution in the license.txt file. /
///////////////////////////////////////////////////////////////////////////////////////
using System;
using System.Collections.Generic;
using com.espertech.esper.common.client.scopetest;
using com.espertech.esper.common.@internal.epl.@join.lookup;
using com.espertech.esper.common.@internal.epl.lookupplansubord;
using com.espertech.esper.common.@internal.support;
using com.espertech.esper.compat;
using com.espertech.esper.compat.collections;
using com.espertech.esper.regressionlib.framework;
using com.espertech.esper.regressionlib.support.bean;
using com.espertech.esper.regressionlib.support.util;
using NUnit.Framework;
using static com.espertech.esper.regressionlib.framework.SupportMessageAssertUtil;
namespace com.espertech.esper.regressionlib.suite.infra.nwtable
{
public class InfraNWTableCreateIndex
{
public static IList<RegressionExecution> Executions()
{
var execs = new List<RegressionExecution>();
execs.Add(new InfraMultiRangeAndKey(true));
execs.Add(new InfraMultiRangeAndKey(false));
execs.Add(new InfraHashBTreeWidening(true));
execs.Add(new InfraHashBTreeWidening(false));
execs.Add(new InfraWidening(true));
execs.Add(new InfraWidening(false));
execs.Add(new InfraCompositeIndex(true));
execs.Add(new InfraCompositeIndex(false));
execs.Add(new InfraLateCreate(true));
execs.Add(new InfraLateCreate(false));
execs.Add(new InfraLateCreateSceneTwo(true));
execs.Add(new InfraLateCreateSceneTwo(false));
execs.Add(new InfraMultipleColumnMultipleIndex(true));
execs.Add(new InfraMultipleColumnMultipleIndex(false));
execs.Add(new InfraDropCreate(true));
execs.Add(new InfraDropCreate(false));
execs.Add(new InfraOnSelectReUse(true));
execs.Add(new InfraOnSelectReUse(false));
execs.Add(new InfraInvalid(true));
execs.Add(new InfraInvalid(false));
execs.Add(new InfraMultikeyIndexFAF(true));
execs.Add(new InfraMultikeyIndexFAF(false));
return execs;
}
private static void RunQueryAssertion(
RegressionEnvironment env,
RegressionPath path,
string epl,
string[] fields,
object[][] expected)
{
var result = env.CompileExecuteFAF(epl, path);
EPAssertionUtil.AssertPropsPerRow(result.Array, fields, expected);
}
private static void SendEventLong(
RegressionEnvironment env,
string theString,
long longPrimitive)
{
var theEvent = new SupportBean();
theEvent.TheString = theString;
theEvent.LongPrimitive = longPrimitive;
env.SendEventBean(theEvent);
}
private static void SendEventShort(
RegressionEnvironment env,
string theString,
short shortPrimitive)
{
var theEvent = new SupportBean();
theEvent.TheString = theString;
theEvent.ShortPrimitive = shortPrimitive;
env.SendEventBean(theEvent);
}
private static void MakeSendSupportBean(
RegressionEnvironment env,
string theString,
int intPrimitive,
long longPrimitive)
{
var b = new SupportBean(theString, intPrimitive);
b.LongPrimitive = longPrimitive;
env.SendEventBean(b);
}
private static void AssertCols(
RegressionEnvironment env,
string listOfP00,
object[][] expected)
{
var p00s = listOfP00.SplitCsv();
Assert.AreEqual(p00s.Length, expected.Length);
for (var i = 0; i < p00s.Length; i++) {
env.SendEventBean(new SupportBean_S0(0, p00s[i]));
if (expected[i] == null) {
Assert.IsFalse(env.Listener("s0").IsInvoked);
}
else {
EPAssertionUtil.AssertProps(
env.Listener("s0").AssertOneGetNewAndReset(),
new [] { "col0","col1" },
expected[i]);
}
}
}
private static int GetIndexCount(
RegressionEnvironment env,
bool namedWindow,
string infraStmtName,
string infraName)
{
return SupportInfraUtil.GetIndexCountNoContext(env, namedWindow, infraStmtName, infraName);
}
private static void AssertIndexesRef(
RegressionEnvironment env,
bool namedWindow,
string name,
string csvNames)
{
var entry = GetIndexEntry(env, namedWindow, name);
if (string.IsNullOrEmpty(csvNames)) {
Assert.IsNull(entry);
}
else {
EPAssertionUtil.AssertEqualsAnyOrder(csvNames.SplitCsv(), entry.ReferringDeployments);
}
}
private static void AssertIndexCountInstance(
RegressionEnvironment env,
bool namedWindow,
string name,
int count)
{
var repo = GetIndexInstanceRepo(env, namedWindow, name);
Assert.AreEqual(count, repo.Tables.Count);
}
private static EventTableIndexRepository GetIndexInstanceRepo(
RegressionEnvironment env,
bool namedWindow,
string name)
{
if (namedWindow) {
var namedWindowInstance = SupportInfraUtil.GetInstanceNoContextNW(env, "create", name);
return namedWindowInstance.RootViewInstance.IndexRepository;
}
var instance = SupportInfraUtil.GetInstanceNoContextTable(env, "create", name);
return instance.IndexRepository;
}
private static EventTableIndexMetadataEntry GetIndexEntry(
RegressionEnvironment env,
bool namedWindow,
string name)
{
var descOne = new IndexedPropDesc("col0", typeof(string));
var index = new IndexMultiKey(
false,
Arrays.AsList(descOne),
Collections.GetEmptyList<IndexedPropDesc>(),
null);
var meta = GetIndexMetaRepo(env, namedWindow, name);
return meta.Indexes.Get(index);
}
private static EventTableIndexMetadata GetIndexMetaRepo(
RegressionEnvironment env,
bool namedWindow,
string name)
{
if (namedWindow) {
var processor = SupportInfraUtil.GetNamedWindow(env, "create", name);
return processor.EventTableIndexMetadata;
}
var table = SupportInfraUtil.GetTable(env, "create", name);
return table.EventTableIndexMetadata;
}
internal class InfraInvalid : RegressionExecution
{
private readonly bool namedWindow;
public InfraInvalid(bool namedWindow)
{
this.namedWindow = namedWindow;
}
public void Run(RegressionEnvironment env)
{
var path = new RegressionPath();
var eplCreate = namedWindow
? "create window MyInfraOne#keepall as (f1 string, f2 int)"
: "create table MyInfraOne as (f1 string primary key, f2 int primary key)";
env.CompileDeploy(eplCreate, path);
env.CompileDeploy("create index MyInfraIndex on MyInfraOne(f1)", path);
env.CompileDeploy("create context ContextOne initiated by SupportBean terminated after 5 sec", path);
env.CompileDeploy("create context ContextTwo initiated by SupportBean terminated after 5 sec", path);
var eplCreateWContext = namedWindow
? "context ContextOne create window MyInfraCtx#keepall as (f1 string, f2 int)"
: "context ContextOne create table MyInfraCtx as (f1 string primary key, f2 int primary key)";
env.CompileDeploy(eplCreateWContext, path);
// invalid context
TryInvalidCompile(
env,
path,
"create unique index IndexTwo on MyInfraCtx(f1)",
(namedWindow ? "Named window" : "Table") +
" by name 'MyInfraCtx' has been declared for context 'ContextOne' and can only be used within the same context");
TryInvalidCompile(
env,
path,
"context ContextTwo create unique index IndexTwo on MyInfraCtx(f1)",
(namedWindow ? "Named window" : "Table") +
" by name 'MyInfraCtx' has been declared for context 'ContextOne' and can only be used within the same context");
TryInvalidCompile(
env,
path,
"create index MyInfraIndex on MyInfraOne(f1)",
"An index by name 'MyInfraIndex' already exists [");
TryInvalidCompile(
env,
path,
"create index IndexTwo on MyInfraOne(fx)",
"Property named 'fx' not found");
TryInvalidCompile(
env,
path,
"create index IndexTwo on MyInfraOne(f1, f1)",
"Property named 'f1' has been declared more then once [create index IndexTwo on MyInfraOne(f1, f1)]");
TryInvalidCompile(
env,
path,
"create index IndexTwo on MyWindowX(f1, f1)",
"A named window or table by name 'MyWindowX' does not exist [create index IndexTwo on MyWindowX(f1, f1)]");
TryInvalidCompile(
env,
path,
"create index IndexTwo on MyInfraOne(f1 bubu, f2)",
"Unrecognized advanced-type index 'bubu'");
TryInvalidCompile(
env,
path,
"create gugu index IndexTwo on MyInfraOne(f2)",
"Invalid keyword 'gugu' in create-index encountered, expected 'unique' [create gugu index IndexTwo on MyInfraOne(f2)]");
TryInvalidCompile(
env,
path,
"create unique index IndexTwo on MyInfraOne(f2 btree)",
"Combination of unique index with btree (range) is not supported [create unique index IndexTwo on MyInfraOne(f2 btree)]");
// invalid insert-into unique index
var eplCreateTwo = namedWindow
? "@Name('create') create window MyInfraTwo#keepall as SupportBean"
: "@Name('create') create table MyInfraTwo(TheString string primary key, IntPrimitive int primary key)";
env.CompileDeploy(eplCreateTwo, path);
env.CompileDeploy(
"@Name('insert') insert into MyInfraTwo select TheString, IntPrimitive from SupportBean",
path);
env.CompileDeploy("create unique index I1 on MyInfraTwo(TheString)", path);
env.SendEventBean(new SupportBean("E1", 1));
try {
env.SendEventBean(new SupportBean("E1", 2));
Assert.Fail();
}
catch (Exception ex) {
var text = namedWindow
? "Unexpected exception in statement 'create': Unique index violation, index 'I1' is a unique index and key 'E1' already exists"
: "Unexpected exception in statement 'insert': Unique index violation, index 'I1' is a unique index and key 'E1' already exists";
Assert.AreEqual(text, ex.Message);
}
if (!namedWindow) {
env.CompileDeploy("create table MyTable (p0 string, sumint sum(int))", path);
TryInvalidCompile(
env,
path,
"create index MyIndex on MyTable(p0)",
"Tables without primary key column(s) do not allow creating an index [");
}
env.UndeployAll();
}
}
internal class InfraOnSelectReUse : RegressionExecution
{
private readonly bool namedWindow;
public InfraOnSelectReUse(bool namedWindow)
{
this.namedWindow = namedWindow;
}
public void Run(RegressionEnvironment env)
{
var path = new RegressionPath();
var stmtTextCreateOne = namedWindow
? "@Name('create') create window MyInfraONR#keepall as (f1 string, f2 int)"
: "@Name('create') create table MyInfraONR as (f1 string primary key, f2 int primary key)";
env.CompileDeploy(stmtTextCreateOne, path);
env.CompileDeploy(
"insert into MyInfraONR(f1, f2) select TheString, IntPrimitive from SupportBean",
path);
env.CompileDeploy("@Name('indexOne') create index MyInfraONRIndex1 on MyInfraONR(f2)", path);
var fields = new [] { "f1","f2" };
env.SendEventBean(new SupportBean("E1", 1));
env.CompileDeploy(
"@Name('s0') on SupportBean_S0 S0 select nw.f1 as f1, nw.f2 as f2 from MyInfraONR nw where nw.f2 = S0.Id",
path)
.AddListener("s0");
Assert.AreEqual(namedWindow ? 1 : 2, GetIndexCount(env, namedWindow, "create", "MyInfraONR"));
env.SendEventBean(new SupportBean_S0(1));
EPAssertionUtil.AssertProps(
env.Listener("s0").AssertOneGetNewAndReset(),
fields,
new object[] {"E1", 1});
// create second identical statement
env.CompileDeploy(
"@Name('stmtTwo') on SupportBean_S0 S0 select nw.f1 as f1, nw.f2 as f2 from MyInfraONR nw where nw.f2 = S0.Id",
path);
Assert.AreEqual(namedWindow ? 1 : 2, GetIndexCount(env, namedWindow, "create", "MyInfraONR"));
env.UndeployModuleContaining("s0");
Assert.AreEqual(namedWindow ? 1 : 2, GetIndexCount(env, namedWindow, "create", "MyInfraONR"));
env.UndeployModuleContaining("stmtTwo");
Assert.AreEqual(namedWindow ? 1 : 2, GetIndexCount(env, namedWindow, "create", "MyInfraONR"));
env.UndeployModuleContaining("indexOne");
// two-key index order test
env.CompileDeploy("@Name('cw') create window MyInfraFour#keepall as SupportBean", path);
env.CompileDeploy("create index Idx1 on MyInfraFour (TheString, IntPrimitive)", path);
env.CompileDeploy(
"on SupportBean sb select * from MyInfraFour w where w.TheString = sb.TheString and w.IntPrimitive = sb.IntPrimitive",
path);
env.CompileDeploy(
"on SupportBean sb select * from MyInfraFour w where w.IntPrimitive = sb.IntPrimitive and w.TheString = sb.TheString",
path);
Assert.AreEqual(1, SupportInfraUtil.GetIndexCountNoContext(env, true, "cw", "MyInfraFour"));
env.UndeployAll();
}
}
internal class InfraDropCreate : RegressionExecution
{
private readonly bool namedWindow;
public InfraDropCreate(bool namedWindow)
{
this.namedWindow = namedWindow;
}
public void Run(RegressionEnvironment env)
{
var path = new RegressionPath();
var stmtTextCreateOne = namedWindow
? "@Name('create') create window MyInfraDC#keepall as (f1 string, f2 int, f3 string, f4 string)"
: "@Name('create') create table MyInfraDC as (f1 string primary key, f2 int primary key, f3 string primary key, f4 string primary key)";
env.CompileDeploy(stmtTextCreateOne, path);
env.CompileDeploy(
"insert into MyInfraDC(f1, f2, f3, f4) select TheString, IntPrimitive, '>'||TheString||'<', '?'||TheString||'?' from SupportBean",
path);
env.CompileDeploy("@Name('indexOne') create index MyInfraDCIndex1 on MyInfraDC(f1)", path);
env.CompileDeploy("@Name('indexTwo') create index MyInfraDCIndex2 on MyInfraDC(f4)", path);
var fields = new [] { "f1","f2" };
env.SendEventBean(new SupportBean("E1", -2));
env.UndeployModuleContaining("indexOne");
var result = env.CompileExecuteFAF("select * from MyInfraDC where f1='E1'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2}});
result = env.CompileExecuteFAF("select * from MyInfraDC where f4='?E1?'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2}});
env.UndeployModuleContaining("indexTwo");
result = env.CompileExecuteFAF("select * from MyInfraDC where f1='E1'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2}});
result = env.CompileExecuteFAF("select * from MyInfraDC where f4='?E1?'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2}});
path.Compileds.RemoveAt(path.Compileds.Count - 1);
env.CompileDeploy("@Name('IndexThree') create index MyInfraDCIndex2 on MyInfraDC(f4)", path);
result = env.CompileExecuteFAF("select * from MyInfraDC where f1='E1'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2}});
result = env.CompileExecuteFAF("select * from MyInfraDC where f4='?E1?'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2}});
env.UndeployModuleContaining("IndexThree");
Assert.AreEqual(namedWindow ? 0 : 1, GetIndexCount(env, namedWindow, "create", "MyInfraDC"));
env.UndeployAll();
}
}
internal class InfraMultipleColumnMultipleIndex : RegressionExecution
{
private readonly bool namedWindow;
public InfraMultipleColumnMultipleIndex(bool namedWindow)
{
this.namedWindow = namedWindow;
}
public void Run(RegressionEnvironment env)
{
var path = new RegressionPath();
var stmtTextCreateOne = namedWindow
? "create window MyInfraMCMI#keepall as (f1 string, f2 int, f3 string, f4 string)"
: "create table MyInfraMCMI as (f1 string primary key, f2 int, f3 string, f4 string)";
env.CompileDeploy(stmtTextCreateOne, path);
env.CompileDeploy(
"insert into MyInfraMCMI(f1, f2, f3, f4) select TheString, IntPrimitive, '>'||TheString||'<', '?'||TheString||'?' from SupportBean",
path);
env.CompileDeploy("create index MyInfraMCMIIndex1 on MyInfraMCMI(f2, f3, f1)", path);
env.CompileDeploy("create index MyInfraMCMIIndex2 on MyInfraMCMI(f2, f3)", path);
env.CompileDeploy("create index MyInfraMCMIIndex3 on MyInfraMCMI(f2)", path);
var fields = new [] { "f1","f2","f3","f4" };
env.SendEventBean(new SupportBean("E1", -2));
env.SendEventBean(new SupportBean("E2", -4));
env.SendEventBean(new SupportBean("E3", -3));
var result = env.CompileExecuteFAF("select * from MyInfraMCMI where f3='>E1<'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2, ">E1<", "?E1?"}});
result = env.CompileExecuteFAF("select * from MyInfraMCMI where f3='>E1<' and f2=-2", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2, ">E1<", "?E1?"}});
result = env.CompileExecuteFAF("select * from MyInfraMCMI where f3='>E1<' and f2=-2 and f1='E1'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2, ">E1<", "?E1?"}});
result = env.CompileExecuteFAF("select * from MyInfraMCMI where f2=-2", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2, ">E1<", "?E1?"}});
result = env.CompileExecuteFAF("select * from MyInfraMCMI where f1='E1'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2, ">E1<", "?E1?"}});
result = env.CompileExecuteFAF(
"select * from MyInfraMCMI where f3='>E1<' and f2=-2 and f1='E1' and f4='?E1?'",
path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2, ">E1<", "?E1?"}});
env.UndeployAll();
}
}
public class InfraLateCreate : RegressionExecution
{
private readonly bool namedWindow;
public InfraLateCreate(bool namedWindow)
{
this.namedWindow = namedWindow;
}
public void Run(RegressionEnvironment env)
{
string[] fields = {"TheString", "IntPrimitive"};
var path = new RegressionPath();
// create infra
var stmtTextCreate = namedWindow
? "@Name('Create') create window MyInfra.win:keepall() as SupportBean"
: "@Name('Create') create table MyInfra(TheString string primary key, IntPrimitive int primary key)";
env.CompileDeploy(stmtTextCreate, path).AddListener("Create");
// create insert into
var stmtTextInsertOne =
"@Name('Insert') insert into MyInfra select TheString, IntPrimitive from SupportBean";
env.CompileDeploy(stmtTextInsertOne, path);
env.SendEventBean(new SupportBean("A1", 1));
env.SendEventBean(new SupportBean("B2", 2));
env.SendEventBean(new SupportBean("B2", 1));
// create index
var stmtTextCreateIndex = "@Name('Index') create index MyInfra_IDX on MyInfra(TheString)";
env.CompileDeploy(stmtTextCreateIndex, path);
env.Milestone(0);
// perform on-demand query
var result = env.CompileExecuteFAF(
"select * from MyInfra where TheString = 'B2' order by IntPrimitive asc",
path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"B2", 1}, new object[] {"B2", 2}});
// cleanup
env.UndeployAll();
env.Milestone(1);
}
}
internal class InfraLateCreateSceneTwo : RegressionExecution
{
private readonly bool namedWindow;
public InfraLateCreateSceneTwo(bool namedWindow)
{
this.namedWindow = namedWindow;
}
public void Run(RegressionEnvironment env)
{
var path = new RegressionPath();
var stmtTextCreateOne = namedWindow
? "create window MyInfraLC#keepall as (f1 string, f2 int, f3 string, f4 string)"
: "create table MyInfraLC as (f1 string primary key, f2 int primary key, f3 string primary key, f4 string primary key)";
env.CompileDeploy(stmtTextCreateOne, path);
env.CompileDeploy(
"insert into MyInfraLC(f1, f2, f3, f4) select TheString, IntPrimitive, '>'||TheString||'<', '?'||TheString||'?' from SupportBean",
path);
env.SendEventBean(new SupportBean("E1", -4));
env.Milestone(0);
env.SendEventBean(new SupportBean("E1", -2));
env.SendEventBean(new SupportBean("E1", -3));
env.CompileDeploy("create index MyInfraLCIndex on MyInfraLC(f2, f3, f1)", path);
var fields = new [] { "f1","f2","f3","f4" };
env.Milestone(1);
var result = env.CompileExecuteFAF("select * from MyInfraLC where f3='>E1<' order by f2 asc", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {
new object[] {"E1", -4, ">E1<", "?E1?"}, new object[] {"E1", -3, ">E1<", "?E1?"},
new object[] {"E1", -2, ">E1<", "?E1?"}
});
env.UndeployAll();
}
}
internal class InfraCompositeIndex : RegressionExecution
{
private readonly bool namedWindow;
public InfraCompositeIndex(bool namedWindow)
{
this.namedWindow = namedWindow;
}
public void Run(RegressionEnvironment env)
{
var path = new RegressionPath();
var stmtTextCreate = namedWindow
? "create window MyInfraCI#keepall as (f1 string, f2 int, f3 string, f4 string)"
: "create table MyInfraCI as (f1 string primary key, f2 int, f3 string, f4 string)";
env.CompileDeploy(stmtTextCreate, path);
var compiledWindow = path.Compileds[0];
env.CompileDeploy(
"insert into MyInfraCI(f1, f2, f3, f4) select TheString, IntPrimitive, '>'||TheString||'<', '?'||TheString||'?' from SupportBean",
path);
env.CompileDeploy("@Name('indexOne') create index MyInfraCIIndex on MyInfraCI(f2, f3, f1)", path);
var fields = new [] { "f1","f2","f3","f4" };
env.SendEventBean(new SupportBean("E1", -2));
var result = env.CompileExecuteFAF("select * from MyInfraCI where f3='>E1<'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2, ">E1<", "?E1?"}});
result = env.CompileExecuteFAF("select * from MyInfraCI where f3='>E1<' and f2=-2", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2, ">E1<", "?E1?"}});
result = env.CompileExecuteFAF("select * from MyInfraCI where f3='>E1<' and f2=-2 and f1='E1'", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
new[] {new object[] {"E1", -2, ">E1<", "?E1?"}});
env.UndeployModuleContaining("indexOne");
// test SODA
path.Clear();
path.Add(compiledWindow);
env.EplToModelCompileDeploy("create index MyInfraCIIndexTwo on MyInfraCI(f2, f3, f1)", path)
.UndeployAll();
}
}
internal class InfraWidening : RegressionExecution
{
private readonly bool namedWindow;
public InfraWidening(bool namedWindow)
{
this.namedWindow = namedWindow;
}
public void Run(RegressionEnvironment env)
{
var path = new RegressionPath();
// widen to long
var stmtTextCreate = namedWindow
? "create window MyInfraW#keepall as (f1 long, f2 string)"
: "create table MyInfraW as (f1 long primary key, f2 string primary key)";
env.CompileDeploy(stmtTextCreate, path);
env.CompileDeploy(
"insert into MyInfraW(f1, f2) select LongPrimitive, TheString from SupportBean",
path);
env.CompileDeploy("create index MyInfraWIndex1 on MyInfraW(f1)", path);
var fields = new [] { "f1","f2" };
SendEventLong(env, "E1", 10L);
var result = env.CompileExecuteFAF("select * from MyInfraW where f1=10", path);
EPAssertionUtil.AssertPropsPerRow(
result.Array,
fields,
|
[
" new[] {new object[] {10L, \"E1\"}});"
] | 2,245
|
lcc
|
csharp
| null |
3b38e1fde0c4d94fe262da07601e96359f818e86edae6271
|
|
from ctypes import Structure, c_double, c_int, byref, memmove, sizeof, c_uint32, c_uint, c_ulong
from . import clibrebound
import math
import ctypes.util
import rebound
import sys
import random
__all__ = ["Particle"]
def notNone(a):
"""
Returns True if array a contains at least one element that is not None. Returns False otherwise.
"""
return a.count(None) != len(a)
class Particle(Structure):
"""
The main REBOUND particle data structure.
This is an abstraction of the reb_particle structure in C.
The Particle fields are set at the end of simulation.py to avoid circular references.
Attributes
----------
x, y, z : float
Particle positions
vx, vy, vz : float
Particle velocities
ax, ay, az : float
Particle accelerations
m : float
Particle mass
r : float
Particle radius
lastcollision : float
Last time the particle had a physical collision (if checking for collisions)
c : c_void_p (C void pointer)
Pointer to the cell the particle is currently in (if using tree code)
hash : c_uint32
Particle hash (permanent identifier for the particle)
ap : c_void_p (C void pointer)
Pointer to additional parameters one might want to add to particles
_sim : POINTER(rebound.Simulation)
Internal pointer to the parent simulation (used in C version of REBOUND)
a, e, inc, Omega, omega, f : float
(Kepler Elements) Semi-major axis, eccentricity, inclination, longitude of the ascending node, argument of periapsis, and true anomaly respectively. The Keplerian Elements are in Jacobi coordinates (with mu = G*Minc, where Minc is the total mass from index 0 to the particle's index, inclusive).
"""
def __str__(self):
"""
Returns a string with the position and velocity of the particle.
"""
return "<rebound.Particle object, m=%s x=%s y=%s z=%s vx=%s vy=%s vz=%s>"%(self.m,self.x,self.y,self.z,self.vx,self.vy,self.vz)
__repr__ = __str__
def __init__(self, simulation=None, particle=None, m=None, x=None, y=None, z=None, vx=None, vy=None, vz=None, primary=None, a=None, P=None, e=None, inc=None, Omega=None, omega=None, pomega=None, f=None, M=None, l=None, theta=None, T=None, r=None, date=None, variation=None, variation2=None, h=None, k=None, ix=None, iy=None, hash=0, jacobi_masses=False):
"""
Initializes a Particle structure. Rather than explicitly creating
a Particle structure, users may use the ``add()`` member function
of a Simulation instance, which will both create a Particle and
then add it to the simulation with one function call.
This function accepts either cartesian positions and velocities,
classical orbital elements together with the reference Particle
(the primary), as well as orbital parameters defined by Pal (2009).
For convenience, optional keywords that are not passed default
to zero (mass, cartesian and orbital elements).
Whenever initializing a particle from orbital elements, one must
specify either the semimajor axis or the period of the orbit.
For classical orbital paramerers, one can specify the longitude
of the ascending node by passing Omega, to specify the pericenter
one can pass either omega or pomega (not both), and for the
longitude/anomaly one can pass one of f, M, l or theta.
See ipython_examples/OrbitalElements.ipynb for examples.
See also Murray & Dermott Solar System Dynamics for formal
definitions of angles in orbital mechanics.
All angles should be specified in radians.
Parameters
----------
simulation : Simulation
Simulation instance associated with this particle (Required if passing orbital elements or setting up a variation).
particle : Particle, optional
If a particle is passed, a copy of that particle is returned.
If a variational particle is initialized, then ``particle`` is
original particle that will be varied.
m : float
Mass (Default: 0)
x, y, z : float
Positions in Cartesian coordinates (Default: 0)
vx, vy, vz : float
Velocities in Cartesian coordinates (Default: 0)
primary : Particle
Primary body for converting orbital elements to cartesian (Default: center of mass of the particles in the passed simulation, i.e., this will yield Jacobi coordinates as one progressively adds particles)
a : float
Semimajor axis (a or P required if passing orbital elements)
P : float
Orbital period (a or P required if passing orbital elements)
e : float
Eccentricity (Default: 0)
inc : float
Inclination (Default: 0)
Omega : float
Longitude of ascending node (Default: 0)
omega : float
Argument of pericenter (Default: 0)
pomega : float
Longitude of pericenter (Default: 0)
f : float
True anomaly (Default: 0)
M : float
Mean anomaly (Default: 0)
l : float
Mean longitude (Default: 0)
theta : float
True longitude (Default: 0)
T : float
Time of pericenter passage
h : float
h variable, see Pal (2009) for a definition (Default: 0)
k : float
k variable, see Pal (2009) for a definition (Default: 0)
ix : float
ix variable, see Pal (2009) for a definition (Default: 0)
iy : float
iy variable, see Pal (2009) for a definition (Default: 0)
r : float
Particle radius (only used for collisional simulations)
date : string
For consistency with adding particles through horizons. Not used here.
variation : string (Default: None)
Set this string to the name of an orbital parameter to initialize the particle as a variational particle.
Can be one of the following: m, a, e, inc, omega, Omega, f, k, h, lambda, ix, iy.
variation2 : string (Default: None)
Set this string to the name of a second orbital parameter to initialize the particle as a second order variational particle. Only used for second order variational equations.
Can be one of the following: m, a, e, inc, omega, Omega, f, k, h, lambda, ix, iy.
hash : c_uint32
Unsigned integer identifier for particle. Can pass an integer directly, or a string that will be converted to a hash. User is responsible for assigning unique hashes.
jacobi_masses: bool
Whether to use jacobi primary mass in orbit initialization. Particle mass will still be set to physical value (Default: False)
Examples
--------
>>> sim = rebound.Simulation()
>>> sim.add(m=1.)
>>> p1 = rebound.Particle(simulation=sim, m=0.001, a=0.5, e=0.01)
>>> p2 = rebound.Particle(simulation=sim, m=0.0, x=1., vy=1.)
>>> p3 = rebound.Particle(simulation=sim, m=0.001, a=1.5, h=0.1, k=0.2, l=0.1)
>>> p4 = rebound.Particle(simulation=sim, m=0.001, a=1.5, omega="uniform") # omega will be a random number between 0 and 2pi
"""
if Omega == "uniform":
Omega = random.vonmisesvariate(0.,0.)
if omega == "uniform":
omega = random.vonmisesvariate(0.,0.)
if pomega == "uniform":
pomega = random.vonmisesvariate(0.,0.)
if f == "uniform":
f = random.vonmisesvariate(0.,0.)
if M == "uniform":
M = random.vonmisesvariate(0.,0.)
if l == "uniform":
l = random.vonmisesvariate(0.,0.)
if theta == "uniform":
theta = random.vonmisesvariate(0.,0.)
self.hash = hash # set via the property, which checks for type
if variation:
if primary is None:
primary = simulation.particles[0]
# Find particle to differenciate
lc = locals().copy()
del lc["self"]
del lc["variation"]
del lc["variation2"]
if particle is None:
particle = Particle(**lc)
# First or second order?
if variation and variation2:
variation_order = 2
else:
variation_order = 1
# Shortcuts for variable names
if variation == "l":
variation = "lambda"
if variation2 == "l":
variation2 = "lambda"
if variation == "i":
variation = "inc"
if variation2 == "i":
variation2 = "inc"
variationtypes = ["m","a","e","inc","omega","Omega","f","k","h","lambda","ix","iy"]
if variation_order==1:
if variation in variationtypes:
method = getattr(clibrebound, 'reb_derivatives_'+variation)
method.restype = Particle
p = method(c_double(simulation.G), primary, particle)
else:
raise ValueError("Variational particles can only be initializes using the derivatives with respect to one of the following: %s."%", ".join(variationtypes))
elif variation_order==2:
if variation in variationtypes and variation2 in variationtypes:
# Swap variations if needed
vi1 = variationtypes.index(variation)
vi2 = variationtypes.index(variation2)
if vi2 < vi1:
variation, variation2 = variation2, variation
method = getattr(clibrebound, 'reb_derivatives_'+variation+'_'+variation2)
method.restype = Particle
p = method(c_double(simulation.G), primary, particle)
else:
raise ValueError("Variational particles can only be initializes using the derivatives with respect to one of the following: %s."%", ".join(variationtypes))
else:
raise ValueError("Variational equations beyond second order are not implemented.")
self.m = p.m
self.x = p.x
self.y = p.y
self.z = p.z
self.vx = p.vx
self.vy = p.vy
self.vz = p.vz
return
if particle is not None:
memmove(byref(self), byref(particle), sizeof(self))
return
cart = [x,y,z,vx,vy,vz]
orbi = [primary,a,P,e,inc,Omega,omega,pomega,f,M,l,theta,T]
pal = [h,k,ix,iy]
self.ax = 0.
self.ay = 0.
self.az = 0.
if m is None:
self.m = 0.
else:
self.m = m
if r is None:
self.r = 0.
else:
self.r = r
self.lastcollision = 0.
self.c = None
self.ap = None
if notNone([e,inc,omega,pomega,Omega,M,f,theta,T]) and notNone(pal):
raise ValueError("You cannot mix Pal coordinates (h,k,ix,iy) with the following orbital elements: e,inc,Omega,omega,pomega,f,M,theta,T. If a longitude/anomaly is needed in Pal coordinates, use l.")
if notNone(cart) and notNone(orbi):
raise ValueError("You cannot pass cartesian coordinates and orbital elements (and/or primary) at the same time.")
if notNone(orbi):
if simulation is None:
raise ValueError("Need to specify simulation when initializing particle with orbital elements.")
if primary is None:
clibrebound.reb_get_com.restype = Particle
primary = clibrebound.reb_get_com(byref(simulation)) # this corresponds to adding in Jacobi coordinates
if jacobi_masses is True:
interior_mass = 0
for p in simulation.particles:
interior_mass += p.m
# orbit conversion uses mu=G*(p.m+primary.m) so set prim.m=Mjac-m so mu=G*Mjac
primary.m = simulation.particles[0].m*(self.m + interior_mass)/interior_mass - self.m
if a is None and P is None:
raise ValueError("You need to pass either a semimajor axis or orbital period to initialize the particle using orbital elements.")
if a is not None and P is not None:
raise ValueError("You can pass either the semimajor axis or orbital period, but not both.")
if a is None:
a = (P**2*simulation.G*(primary.m + self.m)/(4.*math.pi**2))**(1./3.)
if notNone(pal):
# Pal orbital parameters
if h is None:
h = 0.
if k is None:
k = 0.
if l is None:
l = 0.
if ix is None:
ix = 0.
if iy is None:
iy = 0.
if((ix*ix + iy*iy) > 4.0):
raise ValueError("Passed (ix, iy) coordinates are not valid, squared sum exceeds 4.")
clibrebound.reb_tools_pal_to_particle.restype = Particle
p = clibrebound.reb_tools_pal_to_particle(c_double(simulation.G), primary, c_double(self.m), c_double(a), c_double(l), c_double(k), c_double(h), c_double(ix), c_double(iy))
else:
# Normal orbital parameters
if e is None:
e = 0.
if inc is None:
inc = 0.
if Omega is None: # we require that Omega be passed if you want to specify longitude of node
Omega = 0.
pericenters = [omega, pomega] # Need omega for C function. Can specify it either directly or through pomega indirectly.
numNones = pericenters.count(None)
if numNones == 0:
raise ValueError("Can't pass both omega and pomega")
if numNones == 2: # Neither passed. Default to 0.
omega = 0.
if numNones == 1:
if pomega is not None: # Only have to find omega is pomega was passed
if math.cos(inc) > 0: # inc is in range [-pi/2, pi/2] (prograde), so pomega = Omega + omega
omega = pomega - Omega
else:
omega = Omega - pomega # for retrograde orbits, pomega = Omega - omega
longitudes = [f,M,l,theta,T] # can specify longitude through any of these four. Need f for C function.
numNones = longitudes.count(None)
if numNones < 4:
raise ValueError("Can only pass one longitude/anomaly in the set [f, M, l, theta, T]")
if numNones == 5: # none of them passed. Default to 0.
f = 0.
if numNones == 4: # Only one was passed.
if f is None: # Only have to work if f wasn't passed.
if theta is not None: # theta is next easiest
if math.cos(inc) > 0: # for prograde orbits, theta = Omega + omega + f
f = theta - Omega - omega
else:
f = Omega - omega - theta # for retrograde, theta = Omega - omega - f
else: # Either M, l, or T was passed. Will need to find M first (if not passed) to find f
if l is not None:
if math.cos(inc) > 0: # for prograde orbits, l = Omega + omega + M
M = l - Omega - omega
else:
M = Omega - omega - l # for retrograde, l = Omega - omega - M
else:
if T is not None: # works for both elliptical and hyperbolic orbits
# TODO: has accuracy problems for M=n*(t-T) << 1
n = (simulation.G*(primary.m+self.m)/abs(a**3))**0.5
M = n*(simulation.t - T)
clibrebound.reb_tools_M_to_f.restype = c_double
f = clibrebound.reb_tools_M_to_f(c_double(e), c_double(M))
err = c_int()
clibrebound.reb_tools_orbit_to_particle_err.restype = Particle
p = clibrebound.reb_tools_orbit_to_particle_err(c_double(simulation.G), primary, c_double(self.m), c_double(a), c_double(e), c_double(inc), c_double(Omega), c_double(omega), c_double(f), byref(err))
if err.value == 1:
raise ValueError("Can't set e exactly to 1.")
if err.value == 2:
raise ValueError("Eccentricity must be greater than or equal to zero.")
if err.value == 3:
raise ValueError("Bound orbit (a > 0) must have e < 1.")
if err.value == 4:
raise ValueError("Unbound orbit (a < 0) must have e > 1.")
if err.value == 5:
raise ValueError("Unbound orbit can't have f beyond the range allowed by the asymptotes set by the hyperbola.")
if err.value == 6:
raise ValueError("Primary has no mass.")
self.x = p.x
self.y = p.y
self.z = p.z
self.vx = p.vx
self.vy = p.vy
self.vz = p.vz
else:
if x is None:
x = 0.
if y is None:
y = 0.
if z is None:
z = 0.
if vx is None:
vx = 0.
if vy is None:
vy = 0.
if vz is None:
vz = 0.
self.x = x
self.y = y
self.z = z
self.vx = vx
self.vy = vy
self.vz = vz
def copy(self):
"""
Returns a deep copy of the particle. The particle is not added to any simulation by default.
"""
np = Particle()
memmove(byref(np), byref(self), sizeof(self))
return np
def calculate_orbit(self, primary=None, G=None):
"""
Returns a rebound.Orbit object with the keplerian orbital elements
corresponding to the particle around the passed primary
(rebound.Particle) If no primary is passed, defaults to Jacobi coordinates
(with mu = G*Minc, where Minc is the total mass from index 0 to the particle's index, inclusive).
Examples
--------
>>> sim = rebound.Simulation()
>>> sim.add(m=1.)
>>> sim.add(x=1.,vy=1.)
>>> orbit = sim.particles[1].calculate_orbit(sim.particles[0])
>>> print(orbit.e) # gives the eccentricity
Parameters
----------
primary : rebound.Particle
Central body (Optional. Default uses Jacobi coordinates)
G : float
Gravitational constant (Optional. Default takes G from simulation in which particle is in)
Returns
-------
A rebound.Orbit object
"""
if not self._sim:
# Particle not in a simulation
if primary is None:
raise ValueError("Particle does not belong to any simulation and no primary given. Cannot calculate orbit.")
if G is None:
raise ValueError("Particle does not belong to any simulation and G not given. Cannot calculate orbit.")
else:
G = c_double(G)
else:
# First check whether this is particles[0]
clibrebound.reb_get_particle_index.restype = c_int
index = clibrebound.reb_get_particle_index(byref(self)) # first check this isn't particles[0]
if index == 0 and primary is None:
raise ValueError("Orbital elements for particle[0] not implemented unless primary is provided")
if primary is None: # Use default, i.e., Jacobi coordinates
clibrebound.reb_get_jacobi_com.restype = Particle # now return jacobi center of mass
primary = clibrebound.reb_get_jacobi_com(byref(self))
G = c_double(self._sim.contents.G)
err = c_int()
clibrebound.reb_tools_particle_to_orbit_err.restype = rebound.Orbit
o = clibrebound.reb_tools_particle_to_orbit_err(G, self, primary, byref(err))
if err.value == 1:
raise ValueError("Primary has no mass.")
if err.value == 2:
raise ValueError("Particle and primary positions are the same.")
return o
def sample_orbit(self, Npts=100, primary=None, trailing=True, timespan=None, useTrueAnomaly=True):
"""
Returns a nested list of xyz positions along the osculating orbit of the particle.
If primary is not passed, returns xyz positions along the Jacobi osculating orbit
(with mu = G*Minc, where Minc is the total mass from index 0 to the particle's index, inclusive).
Parameters
----------
Npts : int, optional
Number of points along the orbit to return (default: 100)
primary : rebound.Particle, optional
Primary to use for the osculating orbit (default: Jacobi center of mass)
trailing: bool, optional
Whether to return points stepping backwards in time (True) or forwards (False). (default: True)
timespan: float, optional
Return points (for the osculating orbit) from the current position to timespan (forwards or backwards in time depending on trailing keyword).
Defaults to the orbital period for bound orbits, and to the rough time it takes the orbit to move by the current distance from the primary for a hyperbolic orbit. Implementation currently only supports this option if useTrueAnomaly=False.
useTrueAnomaly: bool, optional
Will sample equally spaced points in true anomaly if True, otherwise in mean anomaly.
Latter might be better for hyperbolic orbits, where true anomaly can stay near the limiting value for a long time, and then switch abruptly at pericenter. (Default: True)
"""
pts = []
if primary is None:
primary = self.jacobi_com
o = self.calculate_orbit(primary=primary)
if timespan is None:
if o.a < 0.: # hyperbolic orbit
timespan = 2*math.pi*o.d/o.v # rough time to cross display box
else:
timespan = o.P
lim_phase = abs(o.n)*timespan # n is negative for hyperbolic orbits
if trailing is True:
lim_phase *= -1 # sample phase backwards from current value
phase = [lim_phase*i/(Npts-1) for i in range(Npts)]
for i,ph in enumerate(phase):
if useTrueAnomaly is True:
newp = Particle(a=o.a, f=o.f+ph, inc=o.inc, omega=o.omega, Omega=o.Omega, e=o.e, m=self.m, primary=primary, simulation=self._sim.contents)
else:
newp = Particle(a=o.a, M=o.M+ph, inc=o.inc, omega=o.omega, Omega=o.Omega, e=o.e, m=self.m, primary=primary, simulation=self._sim.contents)
pts.append(newp.xyz)
return pts
# Simple operators for particles.
def __add__(self, other):
if not isinstance(other, Particle):
return NotImplemented
c = self.copy()
return c.__iadd__(other)
def __iadd__(self, other):
if not isinstance(other, Particle):
return NotImplemented
clibrebound.reb_particle_iadd(byref(self), byref(other))
return self
def __sub__(self, other):
if not isinstance(other, Particle):
return NotImplemented
c = self.copy()
return c.__isub__(other)
def __isub__(self, other):
if not isinstance(other, Particle):
return NotImplemented
clibrebound.reb_particle_isub(byref(self), byref(other))
return self
def __mul__(self, other):
try:
other = float(other)
except:
return NotImplemented
c = self.copy()
return c.__imul__(other)
def __imul__(self, other):
try:
other = float(other)
except:
return NotImplemented
clibrebound.reb_particle_imul(byref(self), c_double(other))
return self
def __rmul__(self, other):
try:
other = float(other)
except:
return NotImplemented
|
[
" c = self.copy()"
] | 2,793
|
lcc
|
python
| null |
87167f35efb4266c3e3919ffabd45083b35092d703ea2b45
|
|
# -*- coding: utf-8 -*-
import copy
import re
import simplejson
import werkzeug
from lxml import etree, html
from openerp import SUPERUSER_ID
from openerp.addons.website.models import website
from openerp.http import request
from openerp.osv import osv, fields
class view(osv.osv):
_inherit = "ir.ui.view"
_columns = {
'page': fields.boolean("Whether this view is a web page template (complete)"),
'website_meta_title': fields.char("Website meta title", size=70, translate=True),
'website_meta_description': fields.text("Website meta description", size=160, translate=True),
'website_meta_keywords': fields.char("Website meta keywords", translate=True),
}
_defaults = {
'page': False,
}
def _view_obj(self, cr, uid, view_id, context=None):
if isinstance(view_id, basestring):
return self.pool['ir.model.data'].xmlid_to_object(
cr, uid, view_id, raise_if_not_found=True, context=context
)
elif isinstance(view_id, (int, long)):
return self.browse(cr, uid, view_id, context=context)
# assume it's already a view object (WTF?)
return view_id
# Returns all views (called and inherited) related to a view
# Used by translation mechanism, SEO and optional templates
def _views_get(self, cr, uid, view_id, options=True, context=None, root=True):
""" For a given view ``view_id``, should return:
* the view itself
* all views inheriting from it, enabled or not
- but not the optional children of a non-enabled child
* all views called from it (via t-call)
"""
try:
view = self._view_obj(cr, uid, view_id, context=context)
except ValueError:
# Shall we log that ?
return []
while root and view.inherit_id:
view = view.inherit_id
result = [view]
node = etree.fromstring(view.arch)
for child in node.xpath("//t[@t-call]"):
try:
called_view = self._view_obj(cr, uid, child.get('t-call'), context=context)
except ValueError:
continue
if called_view not in result:
result += self._views_get(cr, uid, called_view, options=options, context=context)
extensions = view.inherit_children_ids
if not options:
# only active children
extensions = (v for v in view.inherit_children_ids
if v.application in ('always', 'enabled'))
# Keep options in a deterministic order regardless of their applicability
for extension in sorted(extensions, key=lambda v: v.id):
for r in self._views_get(
cr, uid, extension,
# only return optional grandchildren if this child is enabled
options=extension.application in ('always', 'enabled'),
context=context, root=False):
if r not in result:
result.append(r)
return result
def extract_embedded_fields(self, cr, uid, arch, context=None):
return arch.xpath('//*[@data-oe-model != "ir.ui.view"]')
def save_embedded_field(self, cr, uid, el, context=None):
Model = self.pool[el.get('data-oe-model')]
field = el.get('data-oe-field')
column = Model._all_columns[field].column
converter = self.pool['website.qweb'].get_converter_for(
el.get('data-oe-type'))
value = converter.from_html(cr, uid, Model, column, el)
if value is not None:
# TODO: batch writes?
Model.write(cr, uid, [int(el.get('data-oe-id'))], {
field: value
}, context=context)
def to_field_ref(self, cr, uid, el, context=None):
# filter out meta-information inserted in the document
attributes = dict((k, v) for k, v in el.items()
if not k.startswith('data-oe-'))
attributes['t-field'] = el.get('data-oe-expression')
out = html.html_parser.makeelement(el.tag, attrib=attributes)
out.tail = el.tail
return out
def replace_arch_section(self, cr, uid, view_id, section_xpath, replacement, context=None):
# the root of the arch section shouldn't actually be replaced as it's
# not really editable itself, only the content truly is editable.
[view] = self.browse(cr, uid, [view_id], context=context)
arch = etree.fromstring(view.arch.encode('utf-8'))
# => get the replacement root
if not section_xpath:
root = arch
else:
# ensure there's only one match
[root] = arch.xpath(section_xpath)
root.text = replacement.text
root.tail = replacement.tail
# replace all children
del root[:]
for child in replacement:
root.append(copy.deepcopy(child))
return arch
def render(self, cr, uid, id_or_xml_id, values=None, engine='ir.qweb', context=None):
if request and getattr(request, 'website_enabled', False):
engine='website.qweb'
if isinstance(id_or_xml_id, list):
id_or_xml_id = id_or_xml_id[0]
if not context:
context = {}
qcontext = dict(
context.copy(),
website=request.website,
url_for=website.url_for,
slug=website.slug,
res_company=request.website.company_id,
user_id=self.pool.get("res.users").browse(cr, uid, uid),
translatable=context.get('lang') != request.website.default_lang_code,
editable=request.website.is_publisher(),
menu_data=self.pool['ir.ui.menu'].load_menus_root(cr, uid, context=context) if request.website.is_user() else None,
)
# add some values
if values:
qcontext.update(values)
# in edit mode ir.ui.view will tag nodes
context['inherit_branding'] = qcontext.get('editable', False)
view_obj = request.website.get_template(id_or_xml_id)
if 'main_object' not in qcontext:
qcontext['main_object'] = view_obj
values = qcontext
return super(view, self).render(cr, uid, id_or_xml_id, values=values, engine=engine, context=context)
def _pretty_arch(self, arch):
# remove_blank_string does not seem to work on HTMLParser, and
# pretty-printing with lxml more or less requires stripping
# whitespace: http://lxml.de/FAQ.html#why-doesn-t-the-pretty-print-option-reformat-my-xml-output
# so serialize to XML, parse as XML (remove whitespace) then serialize
# as XML (pretty print)
arch_no_whitespace = etree.fromstring(
etree.tostring(arch, encoding='utf-8'),
parser=etree.XMLParser(encoding='utf-8', remove_blank_text=True))
return etree.tostring(
arch_no_whitespace, encoding='unicode', pretty_print=True)
def save(self, cr, uid, res_id, value, xpath=None, context=None):
""" Update a view section. The view section may embed fields to write
:param str model:
:param int res_id:
:param str xpath: valid xpath to the tag to replace
"""
res_id = int(res_id)
arch_section = html.fromstring(
value, parser=html.HTMLParser(encoding='utf-8'))
if xpath is None:
# value is an embedded field on its own, not a view section
self.save_embedded_field(cr, uid, arch_section, context=context)
return
for el in self.extract_embedded_fields(cr, uid, arch_section, context=context):
self.save_embedded_field(cr, uid, el, context=context)
# transform embedded field back to t-field
el.getparent().replace(el, self.to_field_ref(cr, uid, el, context=context))
arch = self.replace_arch_section(cr, uid, res_id, xpath, arch_section, context=context)
self.write(cr, uid, res_id, {
'arch': self._pretty_arch(arch)
}, context=context)
|
[
" view = self.browse(cr, SUPERUSER_ID, res_id, context=context)"
] | 739
|
lcc
|
python
| null |
f1012042daf639a1e2d064433991b95b1734be709802fbc1
|
|
/*
* Copyright (c) 1998, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing.plaf.basic;
import java.io.*;
import java.awt.*;
import java.net.URL;
import javax.accessibility.AccessibleContext;
import javax.swing.*;
import javax.swing.text.*;
import javax.swing.text.html.*;
import sun.swing.SwingUtilities2;
/**
* Support for providing html views for the swing components.
* This translates a simple html string to a javax.swing.text.View
* implementation that can render the html and provide the necessary
* layout semantics.
*
* @author Timothy Prinzing
* @since 1.3
*/
public class BasicHTML {
/**
* Create an html renderer for the given component and
* string of html.
*
* @param c a component
* @param html an HTML string
* @return an HTML renderer
*/
public static View createHTMLView(JComponent c, String html) {
BasicEditorKit kit = getFactory();
Document doc = kit.createDefaultDocument(c.getFont(),
c.getForeground());
Object base = c.getClientProperty(documentBaseKey);
if (base instanceof URL) {
((HTMLDocument)doc).setBase((URL)base);
}
Reader r = new StringReader(html);
try {
kit.read(r, doc, 0);
} catch (Throwable e) {
}
ViewFactory f = kit.getViewFactory();
View hview = f.create(doc.getDefaultRootElement());
View v = new Renderer(c, f, hview);
return v;
}
/**
* Returns the baseline for the html renderer.
*
* @param view the View to get the baseline for
* @param w the width to get the baseline for
* @param h the height to get the baseline for
* @throws IllegalArgumentException if width or height is < 0
* @return baseline or a value < 0 indicating there is no reasonable
* baseline
* @see java.awt.FontMetrics
* @see javax.swing.JComponent#getBaseline(int,int)
* @since 1.6
*/
public static int getHTMLBaseline(View view, int w, int h) {
if (w < 0 || h < 0) {
throw new IllegalArgumentException(
"Width and height must be >= 0");
}
if (view instanceof Renderer) {
return getBaseline(view.getView(0), w, h);
}
return -1;
}
/**
* Gets the baseline for the specified component. This digs out
* the View client property, and if non-null the baseline is calculated
* from it. Otherwise the baseline is the value <code>y + ascent</code>.
*/
static int getBaseline(JComponent c, int y, int ascent,
int w, int h) {
View view = (View)c.getClientProperty(BasicHTML.propertyKey);
if (view != null) {
int baseline = getHTMLBaseline(view, w, h);
if (baseline < 0) {
return baseline;
}
return y + baseline;
}
return y + ascent;
}
/**
* Gets the baseline for the specified View.
*/
static int getBaseline(View view, int w, int h) {
if (hasParagraph(view)) {
view.setSize(w, h);
return getBaseline(view, new Rectangle(0, 0, w, h));
}
return -1;
}
private static int getBaseline(View view, Shape bounds) {
if (view.getViewCount() == 0) {
return -1;
}
AttributeSet attributes = view.getElement().getAttributes();
Object name = null;
if (attributes != null) {
name = attributes.getAttribute(StyleConstants.NameAttribute);
}
int index = 0;
if (name == HTML.Tag.HTML && view.getViewCount() > 1) {
// For html on widgets the header is not visible, skip it.
index++;
}
bounds = view.getChildAllocation(index, bounds);
if (bounds == null) {
return -1;
}
View child = view.getView(index);
if (view instanceof javax.swing.text.ParagraphView) {
Rectangle rect;
if (bounds instanceof Rectangle) {
rect = (Rectangle)bounds;
}
else {
rect = bounds.getBounds();
}
return rect.y + (int)(rect.height *
child.getAlignment(View.Y_AXIS));
}
return getBaseline(child, bounds);
}
private static boolean hasParagraph(View view) {
if (view instanceof javax.swing.text.ParagraphView) {
return true;
}
if (view.getViewCount() == 0) {
return false;
}
AttributeSet attributes = view.getElement().getAttributes();
Object name = null;
if (attributes != null) {
name = attributes.getAttribute(StyleConstants.NameAttribute);
}
int index = 0;
if (name == HTML.Tag.HTML && view.getViewCount() > 1) {
// For html on widgets the header is not visible, skip it.
index = 1;
}
return hasParagraph(view.getView(index));
}
/**
* Check the given string to see if it should trigger the
* html rendering logic in a non-text component that supports
* html rendering.
*
* @param s a text
* @return {@code true} if the given string should trigger the
* html rendering logic in a non-text component
*/
public static boolean isHTMLString(String s) {
if (s != null) {
if ((s.length() >= 6) && (s.charAt(0) == '<') && (s.charAt(5) == '>')) {
String tag = s.substring(1,5);
return tag.equalsIgnoreCase(propertyKey);
}
}
return false;
}
/**
* Stash the HTML render for the given text into the client
* properties of the given JComponent. If the given text is
* <em>NOT HTML</em> the property will be cleared of any
* renderer.
* <p>
* This method is useful for ComponentUI implementations
* that are static (i.e. shared) and get their state
* entirely from the JComponent.
*
* @param c a component
* @param text a text
*/
public static void updateRenderer(JComponent c, String text) {
View value = null;
View oldValue = (View)c.getClientProperty(BasicHTML.propertyKey);
Boolean htmlDisabled = (Boolean) c.getClientProperty(htmlDisable);
if (htmlDisabled != Boolean.TRUE && BasicHTML.isHTMLString(text)) {
|
[
" value = BasicHTML.createHTMLView(c, text);"
] | 980
|
lcc
|
java
| null |
79f8206b5d5b0bfc5beeb608d3ce99dca25c4d4a1db02f09
|
|
import os
import pytest
from six import BytesIO
from ..sourcefile import SourceFile, read_script_metadata, js_meta_re, python_meta_re
def create(filename, contents=b""):
assert isinstance(contents, bytes)
return SourceFile("/", filename, "/", contents=contents)
def items(s):
item_type, items = s.manifest_items()
if item_type == "support":
return []
else:
return [(item_type, item.url) for item in items]
@pytest.mark.parametrize("rel_path", [
".gitignore",
".travis.yml",
"MANIFEST.json",
"tools/test.html",
"resources/test.html",
"common/test.html",
"support/test.html",
"css21/archive/test.html",
"work-in-progress/test.html",
"conformance-checkers/test.html",
"conformance-checkers/README.md",
"conformance-checkers/html/Makefile",
"conformance-checkers/html/test.html",
"foo/tools/test.html",
"foo/resources/test.html",
"foo/support/test.html",
"foo/test-support.html",
"css/common/test.html",
"css/CSS2/archive/test.html",
"css/work-in-progress/test.html",
])
def test_name_is_non_test(rel_path):
s = create(rel_path)
assert s.name_is_non_test or s.name_is_conformance_support
assert not s.content_is_testharness
assert items(s) == []
@pytest.mark.parametrize("rel_path", [
"foo/common/test.html",
"foo/conformance-checkers/test.html",
"foo/_certs/test.html",
"foo/css21/archive/test.html",
"foo/work-in-progress/test.html",
"foo/CSS2/archive/test.html",
"css/css21/archive/test.html",
])
def test_not_name_is_non_test(rel_path):
s = create(rel_path)
assert not (s.name_is_non_test or s.name_is_conformance_support)
# We aren't actually asserting what type of test these are, just their
# name doesn't prohibit them from being tests.
@pytest.mark.parametrize("rel_path", [
"html/test-manual.html",
"html/test-manual.xhtml",
"html/test-manual.https.html",
"html/test-manual.https.xhtml"
])
def test_name_is_manual(rel_path):
s = create(rel_path)
assert not s.name_is_non_test
assert s.name_is_manual
assert not s.content_is_testharness
assert items(s) == [("manual", "/" + rel_path)]
@pytest.mark.parametrize("rel_path", [
"html/test-visual.html",
"html/test-visual.xhtml",
])
def test_name_is_visual(rel_path):
s = create(rel_path)
assert not s.name_is_non_test
assert s.name_is_visual
assert not s.content_is_testharness
assert items(s) == [("visual", "/" + rel_path)]
@pytest.mark.parametrize("rel_path", [
"css-namespaces-3/reftest/ref-lime-1.xml",
"css21/reference/pass_if_box_ahem.html",
"css21/csswg-issues/submitted/css2.1/reference/ref-green-box-100x100.xht",
"selectors-3/selectors-empty-001-ref.xml",
"css21/text/text-indent-wrap-001-notref-block-margin.xht",
"css21/text/text-indent-wrap-001-notref-block-margin.xht",
"css21/css-e-notation-ref-1.html",
"css21/floats/floats-placement-vertical-004-ref2.xht",
"css21/box/rtl-linebreak-notref1.xht",
"css21/box/rtl-linebreak-notref2.xht",
"2dcontext/drawing-images-to-the-canvas/drawimage_html_image_5_ref.html",
"2dcontext/line-styles/lineto_ref.html",
"html/rendering/non-replaced-elements/the-fieldset-element-0/ref.html"
])
def test_name_is_reference(rel_path):
s = create(rel_path)
assert not s.name_is_non_test
assert s.name_is_reference
assert not s.content_is_testharness
assert items(s) == []
def test_worker():
s = create("html/test.worker.js")
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_visual
assert not s.name_is_multi_global
assert s.name_is_worker
assert not s.name_is_window
assert not s.name_is_reference
assert not s.content_is_testharness
item_type, items = s.manifest_items()
assert item_type == "testharness"
expected_urls = [
"/html/test.worker.html",
]
assert len(items) == len(expected_urls)
for item, url in zip(items, expected_urls):
assert item.url == url
assert item.timeout is None
def test_window():
s = create("html/test.window.js")
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_visual
assert not s.name_is_multi_global
assert not s.name_is_worker
assert s.name_is_window
assert not s.name_is_reference
assert not s.content_is_testharness
item_type, items = s.manifest_items()
assert item_type == "testharness"
expected_urls = [
"/html/test.window.html",
]
assert len(items) == len(expected_urls)
for item, url in zip(items, expected_urls):
assert item.url == url
assert item.timeout is None
def test_worker_long_timeout():
contents = b"""// META: timeout=long
importScripts('/resources/testharness.js')
test()"""
metadata = list(read_script_metadata(BytesIO(contents), js_meta_re))
assert metadata == [(b"timeout", b"long")]
s = create("html/test.worker.js", contents=contents)
assert s.name_is_worker
item_type, items = s.manifest_items()
assert item_type == "testharness"
for item in items:
assert item.timeout == "long"
def test_window_long_timeout():
contents = b"""// META: timeout=long
test()"""
metadata = list(read_script_metadata(BytesIO(contents), js_meta_re))
assert metadata == [(b"timeout", b"long")]
s = create("html/test.window.js", contents=contents)
assert s.name_is_window
item_type, items = s.manifest_items()
assert item_type == "testharness"
for item in items:
assert item.timeout == "long"
def test_python_long_timeout():
contents = b"""# META: timeout=long
"""
metadata = list(read_script_metadata(BytesIO(contents),
python_meta_re))
assert metadata == [(b"timeout", b"long")]
s = create("webdriver/test.py", contents=contents)
assert s.name_is_webdriver
item_type, items = s.manifest_items()
assert item_type == "wdspec"
for item in items:
assert item.timeout == "long"
def test_multi_global():
s = create("html/test.any.js")
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_visual
assert s.name_is_multi_global
assert not s.name_is_worker
assert not s.name_is_reference
assert not s.content_is_testharness
item_type, items = s.manifest_items()
assert item_type == "testharness"
expected_urls = [
"/html/test.any.html",
"/html/test.any.worker.html",
]
assert len(items) == len(expected_urls)
for item, url in zip(items, expected_urls):
assert item.url == url
assert item.timeout is None
def test_multi_global_long_timeout():
contents = b"""// META: timeout=long
importScripts('/resources/testharness.js')
test()"""
metadata = list(read_script_metadata(BytesIO(contents), js_meta_re))
assert metadata == [(b"timeout", b"long")]
s = create("html/test.any.js", contents=contents)
assert s.name_is_multi_global
item_type, items = s.manifest_items()
assert item_type == "testharness"
for item in items:
assert item.timeout == "long"
@pytest.mark.parametrize("input,expected", [
(b"""//META: foo=bar\n""", [(b"foo", b"bar")]),
(b"""// META: foo=bar\n""", [(b"foo", b"bar")]),
(b"""// META: foo=bar\n""", [(b"foo", b"bar")]),
(b"""\n// META: foo=bar\n""", []),
(b""" // META: foo=bar\n""", []),
(b"""// META: foo=bar\n// META: baz=quux\n""", [(b"foo", b"bar"), (b"baz", b"quux")]),
(b"""// META: foo=bar\n\n// META: baz=quux\n""", [(b"foo", b"bar")]),
(b"""// META: foo=bar\n// Start of the test\n// META: baz=quux\n""", [(b"foo", b"bar")]),
(b"""// META:\n""", []),
(b"""// META: foobar\n""", []),
])
def test_script_metadata(input, expected):
metadata = read_script_metadata(BytesIO(input), js_meta_re)
assert list(metadata) == expected
@pytest.mark.parametrize("ext", ["htm", "html"])
def test_testharness(ext):
content = b"<script src=/resources/testharness.js></script>"
filename = "html/test." + ext
|
[
" s = create(filename, content)"
] | 633
|
lcc
|
python
| null |
d15c99c3eb39bc99358e7e6768ef9b03d171255c2c8c033a
|
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using NHibernate.Cfg.MappingSchema;
using NHibernate.Persister.Collection;
using NHibernate.UserTypes;
namespace NHibernate.Mapping.ByCode.Impl
{
public class MapMapper : IMapPropertiesMapper
{
private readonly IAccessorPropertyMapper entityPropertyMapper;
private readonly KeyMapper keyMapper;
private readonly HbmMapping mapDoc;
private readonly HbmMap mapping;
private ICacheMapper cacheMapper;
public MapMapper(System.Type ownerType, System.Type keyType, System.Type valueType, HbmMap mapping, HbmMapping mapDoc)
: this(ownerType, keyType, valueType, new AccessorPropertyMapper(ownerType, mapping.Name, x => mapping.access = x), mapping, mapDoc) {}
public MapMapper(System.Type ownerType, System.Type keyType, System.Type valueType, IAccessorPropertyMapper accessorMapper, HbmMap mapping, HbmMapping mapDoc)
{
if (ownerType == null)
{
throw new ArgumentNullException("ownerType");
}
if (keyType == null)
{
throw new ArgumentNullException("keyType");
}
if (valueType == null)
{
throw new ArgumentNullException("valueType");
}
if (mapping == null)
{
throw new ArgumentNullException("mapping");
}
OwnerType = ownerType;
KeyType = keyType;
ValueType = valueType;
this.mapping = mapping;
this.mapDoc = mapDoc;
if (mapping.Key == null)
{
mapping.key = new HbmKey();
}
keyMapper = new KeyMapper(ownerType, mapping.Key);
if (KeyType.IsValueType || KeyType == typeof (string))
{
mapping.Item = new HbmMapKey {type = KeyType.GetNhTypeName()};
}
else
{
mapping.Item = new HbmMapKeyManyToMany {@class = KeyType.GetShortClassName(mapDoc)};
}
entityPropertyMapper = accessorMapper;
}
public System.Type OwnerType { get; private set; }
public System.Type KeyType { get; private set; }
public System.Type ValueType { get; private set; }
#region Implementation of IMapPropertiesMapper
public void Inverse(bool value)
{
mapping.inverse = value;
}
public void Mutable(bool value)
{
mapping.mutable = value;
}
public void Where(string sqlWhereClause)
{
mapping.where = sqlWhereClause;
}
public void BatchSize(int value)
{
if (value > 0)
{
mapping.batchsize = value;
mapping.batchsizeSpecified = true;
}
else
{
mapping.batchsize = 0;
mapping.batchsizeSpecified = false;
}
}
public void Lazy(CollectionLazy collectionLazy)
{
mapping.lazySpecified = true;
switch (collectionLazy)
{
case CollectionLazy.Lazy:
mapping.lazy = HbmCollectionLazy.True;
break;
case CollectionLazy.NoLazy:
mapping.lazy = HbmCollectionLazy.False;
break;
case CollectionLazy.Extra:
mapping.lazy = HbmCollectionLazy.Extra;
break;
}
}
public void Key(Action<IKeyMapper> keyMapping)
{
keyMapping(keyMapper);
}
public void OrderBy(MemberInfo property)
{
// TODO: read the mapping of the element to know the column of the property (second-pass)
mapping.orderby = property.Name;
}
public void OrderBy(string sqlOrderByClause)
{
mapping.orderby = sqlOrderByClause;
}
public void Sort()
{
mapping.sort = "natural";
}
public void Sort<TComparer>() {}
public void Cascade(Cascade cascadeStyle)
{
mapping.cascade = cascadeStyle.ToCascadeString();
}
public void Type<TCollection>() where TCollection : IUserCollectionType
{
mapping.collectiontype = typeof (TCollection).AssemblyQualifiedName;
}
public void Type(System.Type collectionType)
{
if (collectionType == null)
{
throw new ArgumentNullException("collectionType");
}
if (!typeof (IUserCollectionType).IsAssignableFrom(collectionType))
{
throw new ArgumentOutOfRangeException("collectionType",
string.Format(
"The collection type should be an implementation of IUserCollectionType.({0})",
collectionType));
}
mapping.collectiontype = collectionType.AssemblyQualifiedName;
}
public void Type(string collectionType)
{
mapping.collectiontype = collectionType ?? throw new ArgumentNullException(nameof(collectionType));
}
public void Table(string tableName)
{
mapping.table = tableName;
}
public void Catalog(string catalogName)
{
mapping.catalog = catalogName;
}
public void Schema(string schemaName)
{
mapping.schema = schemaName;
}
public void Cache(Action<ICacheMapper> cacheMapping)
{
if (cacheMapper == null)
{
var hbmCache = new HbmCache();
mapping.cache = hbmCache;
cacheMapper = new CacheMapper(hbmCache);
}
cacheMapping(cacheMapper);
}
public void Filter(string filterName, Action<IFilterMapper> filterMapping)
{
if (filterMapping == null)
{
filterMapping = x => { };
}
var hbmFilter = new HbmFilter();
var filterMapper = new FilterMapper(filterName, hbmFilter);
filterMapping(filterMapper);
Dictionary<string, HbmFilter> filters = mapping.filter != null ? mapping.filter.ToDictionary(f => f.name, f => f) : new Dictionary<string, HbmFilter>(1);
filters[filterName] = hbmFilter;
mapping.filter = filters.Values.ToArray();
}
public void Fetch(CollectionFetchMode fetchMode)
{
if (fetchMode == null)
{
return;
}
mapping.fetch = fetchMode.ToHbm();
mapping.fetchSpecified = mapping.fetch != HbmCollectionFetchMode.Select;
}
public void Persister(System.Type persister)
{
if (persister == null)
{
throw new ArgumentNullException("persister");
}
if (!typeof(ICollectionPersister).IsAssignableFrom(persister))
{
throw new ArgumentOutOfRangeException("persister", "Expected type implementing ICollectionPersister.");
}
mapping.persister = persister.AssemblyQualifiedName;
}
#endregion
#region Implementation of IEntityPropertyMapper
public void Access(Accessor accessor)
{
entityPropertyMapper.Access(accessor);
}
public void Access(System.Type accessorType)
{
entityPropertyMapper.Access(accessorType);
}
public void OptimisticLock(bool takeInConsiderationForOptimisticLock)
{
mapping.optimisticlock = takeInConsiderationForOptimisticLock;
}
#endregion
#region IMapPropertiesMapper Members
public void Loader(string namedQueryReference)
{
if (mapping.SqlLoader == null)
{
|
[
"\t\t\t\tmapping.loader = new HbmLoader();"
] | 620
|
lcc
|
csharp
| null |
941de4a30042444c37ae61552e7f11d288717b818793b5d1
|
|
using System;
namespace Server.Factions
{
public class FactionState
{
private Faction m_Faction;
private Mobile m_Commander;
private int m_Tithe;
private int m_Silver;
private PlayerStateCollection m_Members;
private Election m_Election;
private FactionItemCollection m_FactionItems;
private FactionTrapCollection m_FactionTraps;
private const int BroadcastsPerPeriod = 2;
private static readonly TimeSpan BroadcastPeriod = TimeSpan.FromHours( 1.0 );
private DateTime[] m_LastBroadcasts = new DateTime[BroadcastsPerPeriod];
public bool FactionMessageReady
{
get
{
for ( int i = 0; i < m_LastBroadcasts.Length; ++i )
{
if ( DateTime.UtcNow >= ( m_LastBroadcasts[i] + BroadcastPeriod ) )
{
return true;
}
}
return false;
}
}
public void RegisterBroadcast()
{
for ( int i = 0; i < m_LastBroadcasts.Length; ++i )
{
if ( DateTime.UtcNow >= ( m_LastBroadcasts[i] + BroadcastPeriod ) )
{
m_LastBroadcasts[i] = DateTime.UtcNow;
break;
}
}
}
public FactionItemCollection FactionItems { get { return m_FactionItems; } set { m_FactionItems = value; } }
public FactionTrapCollection Traps { get { return m_FactionTraps; } set { m_FactionTraps = value; } }
public Election Election { get { return m_Election; } set { m_Election = value; } }
public Mobile Commander
{
get { return m_Commander; }
set
{
if ( m_Commander != null )
{
m_Commander.InvalidateProperties();
}
m_Commander = value;
if ( m_Commander != null )
{
m_Commander.SendLocalizedMessage( 1042227 ); // You have been elected Commander of your faction
m_Commander.InvalidateProperties();
PlayerState pl = PlayerState.Find( m_Commander );
if ( pl != null && pl.Finance != null )
{
pl.Finance.Finance = null;
}
if ( pl != null && pl.Sheriff != null )
{
pl.Sheriff.Sheriff = null;
}
}
}
}
public int Tithe { get { return m_Tithe; } set { m_Tithe = value; } }
public int Silver { get { return m_Silver; } set { m_Silver = value; } }
public PlayerStateCollection Members { get { return m_Members; } set { m_Members = value; } }
public FactionState( Faction faction )
{
m_Faction = faction;
m_Tithe = 50;
m_Members = new PlayerStateCollection();
m_Election = new Election( faction );
m_FactionItems = new FactionItemCollection();
m_FactionTraps = new FactionTrapCollection();
}
public FactionState( GenericReader reader )
{
int version = reader.ReadEncodedInt();
switch ( version )
{
case 4:
{
int count = reader.ReadEncodedInt();
for ( int i = 0; i < count; ++i )
{
DateTime time = reader.ReadDateTime();
if ( i < m_LastBroadcasts.Length )
{
m_LastBroadcasts[i] = time;
}
}
goto case 3;
}
case 3:
case 2:
case 1:
{
m_Election = new Election( reader );
goto case 0;
}
case 0:
{
m_Faction = Faction.ReadReference( reader );
m_Commander = reader.ReadMobile();
if ( version < 4 )
{
DateTime time = reader.ReadDateTime();
if ( m_LastBroadcasts.Length > 0 )
{
m_LastBroadcasts[0] = time;
}
}
m_Tithe = reader.ReadEncodedInt();
m_Silver = reader.ReadEncodedInt();
int memberCount = reader.ReadEncodedInt();
m_Members = new PlayerStateCollection();
for ( int i = 0; i < memberCount; ++i )
{
PlayerState pl = new PlayerState( reader, m_Faction, m_Members );
if ( pl.Mobile != null )
{
m_Members.Add( pl );
}
}
m_Faction.State = this;
m_Faction.UpdateRanks();
m_FactionItems = new FactionItemCollection();
if ( version >= 2 )
{
int factionItemCount = reader.ReadEncodedInt();
for ( int i = 0; i < factionItemCount; ++i )
{
FactionItem factionItem = new FactionItem( reader, m_Faction );
if ( !factionItem.HasExpired )
{
factionItem.Attach();
}
else
{
Timer.DelayCall( TimeSpan.Zero, new TimerCallback( factionItem.Detach ) ); // sandbox detachment
}
}
}
m_FactionTraps = new FactionTrapCollection();
if ( version >= 3 )
{
int factionTrapCount = reader.ReadEncodedInt();
for ( int i = 0; i < factionTrapCount; ++i )
{
BaseFactionTrap trap = reader.ReadItem() as BaseFactionTrap;
if ( trap != null && !trap.CheckDecay() )
{
m_FactionTraps.Add( trap );
}
}
}
break;
}
}
if ( version < 1 )
{
m_Election = new Election( m_Faction );
}
}
public void Serialize( GenericWriter writer )
{
writer.WriteEncodedInt( (int) 4 ); // version
writer.WriteEncodedInt( (int) m_LastBroadcasts.Length );
for ( int i = 0; i < m_LastBroadcasts.Length; ++i )
{
writer.Write( (DateTime) m_LastBroadcasts[i] );
}
m_Election.Serialize( writer );
Faction.WriteReference( writer, m_Faction );
writer.Write( (Mobile) m_Commander );
writer.WriteEncodedInt( (int) m_Tithe );
writer.WriteEncodedInt( (int) m_Silver );
|
[
"\t\t\twriter.WriteEncodedInt( (int) m_Members.Count );"
] | 670
|
lcc
|
csharp
| null |
8beb1da6b11326207584621c562d0cde00dc7c7e21ffd3f0
|
|
/*
* Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.phases.common.inlining.walker;
import static com.oracle.graal.compiler.common.GraalOptions.Intrinsify;
import static com.oracle.graal.compiler.common.GraalOptions.MaximumRecursiveInlining;
import static com.oracle.graal.compiler.common.GraalOptions.MegamorphicInliningMinMethodProbability;
import static com.oracle.graal.compiler.common.GraalOptions.OptCanonicalizer;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import jdk.vm.ci.code.BailoutException;
import jdk.vm.ci.common.JVMCIError;
import jdk.vm.ci.meta.Assumptions.AssumptionResult;
import jdk.vm.ci.meta.JavaTypeProfile;
import jdk.vm.ci.meta.ResolvedJavaMethod;
import jdk.vm.ci.meta.ResolvedJavaType;
import com.oracle.graal.compiler.common.type.ObjectStamp;
import com.oracle.graal.debug.Debug;
import com.oracle.graal.debug.DebugMetric;
import com.oracle.graal.graph.Graph;
import com.oracle.graal.graph.Node;
import com.oracle.graal.nodes.CallTargetNode;
import com.oracle.graal.nodes.Invoke;
import com.oracle.graal.nodes.ParameterNode;
import com.oracle.graal.nodes.StructuredGraph;
import com.oracle.graal.nodes.ValueNode;
import com.oracle.graal.nodes.java.AbstractNewObjectNode;
import com.oracle.graal.nodes.java.MethodCallTargetNode;
import com.oracle.graal.nodes.virtual.AllocatedObjectNode;
import com.oracle.graal.nodes.virtual.VirtualObjectNode;
import com.oracle.graal.phases.OptimisticOptimizations;
import com.oracle.graal.phases.common.CanonicalizerPhase;
import com.oracle.graal.phases.common.inlining.InliningUtil;
import com.oracle.graal.phases.common.inlining.info.AssumptionInlineInfo;
import com.oracle.graal.phases.common.inlining.info.ExactInlineInfo;
import com.oracle.graal.phases.common.inlining.info.InlineInfo;
import com.oracle.graal.phases.common.inlining.info.MultiTypeGuardInlineInfo;
import com.oracle.graal.phases.common.inlining.info.TypeGuardInlineInfo;
import com.oracle.graal.phases.common.inlining.info.elem.Inlineable;
import com.oracle.graal.phases.common.inlining.info.elem.InlineableGraph;
import com.oracle.graal.phases.common.inlining.policy.InliningPolicy;
import com.oracle.graal.phases.tiers.HighTierContext;
import com.oracle.graal.phases.util.Providers;
/**
* <p>
* The space of inlining decisions is explored depth-first with the help of a stack realized by
* {@link InliningData}. At any point in time, the topmost element of that stack consists of:
* <ul>
* <li>the callsite under consideration is tracked as a {@link MethodInvocation}.</li>
* <li>
* one or more {@link CallsiteHolder}s, all of them associated to the callsite above. Why more than
* one? Depending on the type-profile for the receiver more than one concrete method may be feasible
* target.</li>
* </ul>
* </p>
*
* <p>
* The bottom element in the stack consists of:
* <ul>
* <li>
* a single {@link MethodInvocation} (the
* {@link com.oracle.graal.phases.common.inlining.walker.MethodInvocation#isRoot root} one, ie the
* unknown caller of the root graph)</li>
* <li>
* a single {@link CallsiteHolder} (the root one, for the method on which inlining was called)</li>
* </ul>
* </p>
*
* @see #moveForward()
*/
public class InliningData {
// Metrics
private static final DebugMetric metricInliningPerformed = Debug.metric("InliningPerformed");
private static final DebugMetric metricInliningRuns = Debug.metric("InliningRuns");
private static final DebugMetric metricInliningConsidered = Debug.metric("InliningConsidered");
/**
* Call hierarchy from outer most call (i.e., compilation unit) to inner most callee.
*/
private final ArrayDeque<CallsiteHolder> graphQueue = new ArrayDeque<>();
private final ArrayDeque<MethodInvocation> invocationQueue = new ArrayDeque<>();
private final HighTierContext context;
private final int maxMethodPerInlining;
private final CanonicalizerPhase canonicalizer;
private final InliningPolicy inliningPolicy;
private final StructuredGraph rootGraph;
private int maxGraphs;
public InliningData(StructuredGraph rootGraph, HighTierContext context, int maxMethodPerInlining, CanonicalizerPhase canonicalizer, InliningPolicy inliningPolicy) {
assert rootGraph != null;
this.context = context;
this.maxMethodPerInlining = maxMethodPerInlining;
this.canonicalizer = canonicalizer;
this.inliningPolicy = inliningPolicy;
this.maxGraphs = 1;
this.rootGraph = rootGraph;
invocationQueue.push(new MethodInvocation(null, 1.0, 1.0, null));
graphQueue.push(new CallsiteHolderExplorable(rootGraph, 1.0, 1.0, null));
}
public static boolean isFreshInstantiation(ValueNode arg) {
return (arg instanceof AbstractNewObjectNode) || (arg instanceof AllocatedObjectNode) || (arg instanceof VirtualObjectNode);
}
private String checkTargetConditionsHelper(ResolvedJavaMethod method, int invokeBci) {
if (method == null) {
return "the method is not resolved";
} else if (method.isNative() && (!Intrinsify.getValue() || !InliningUtil.canIntrinsify(context.getReplacements(), method, invokeBci))) {
return "it is a non-intrinsic native method";
} else if (method.isAbstract()) {
return "it is an abstract method";
} else if (!method.getDeclaringClass().isInitialized()) {
return "the method's class is not initialized";
} else if (!method.canBeInlined()) {
return "it is marked non-inlinable";
} else if (countRecursiveInlining(method) > MaximumRecursiveInlining.getValue()) {
return "it exceeds the maximum recursive inlining depth";
} else if (new OptimisticOptimizations(rootGraph.getProfilingInfo(method)).lessOptimisticThan(context.getOptimisticOptimizations())) {
return "the callee uses less optimistic optimizations than caller";
} else {
return null;
}
}
private boolean checkTargetConditions(Invoke invoke, ResolvedJavaMethod method) {
final String failureMessage = checkTargetConditionsHelper(method, invoke.bci());
if (failureMessage == null) {
return true;
} else {
InliningUtil.logNotInlined(invoke, inliningDepth(), method, failureMessage);
return false;
}
}
/**
* Determines if inlining is possible at the given invoke node.
*
* @param invoke the invoke that should be inlined
* @return an instance of InlineInfo, or null if no inlining is possible at the given invoke
*/
private InlineInfo getInlineInfo(Invoke invoke) {
final String failureMessage = InliningUtil.checkInvokeConditions(invoke);
if (failureMessage != null) {
InliningUtil.logNotInlinedMethod(invoke, failureMessage);
return null;
}
MethodCallTargetNode callTarget = (MethodCallTargetNode) invoke.callTarget();
ResolvedJavaMethod targetMethod = callTarget.targetMethod();
if (callTarget.invokeKind() == CallTargetNode.InvokeKind.Special || targetMethod.canBeStaticallyBound()) {
return getExactInlineInfo(invoke, targetMethod);
}
assert callTarget.invokeKind().isIndirect();
ResolvedJavaType holder = targetMethod.getDeclaringClass();
if (!(callTarget.receiver().stamp() instanceof ObjectStamp)) {
return null;
}
ObjectStamp receiverStamp = (ObjectStamp) callTarget.receiver().stamp();
if (receiverStamp.alwaysNull()) {
// Don't inline if receiver is known to be null
return null;
}
ResolvedJavaType contextType = invoke.getContextType();
if (receiverStamp.type() != null) {
// the invoke target might be more specific than the holder (happens after inlining:
// parameters lose their declared type...)
ResolvedJavaType receiverType = receiverStamp.type();
if (receiverType != null && holder.isAssignableFrom(receiverType)) {
holder = receiverType;
if (receiverStamp.isExactType()) {
assert targetMethod.getDeclaringClass().isAssignableFrom(holder) : holder + " subtype of " + targetMethod.getDeclaringClass() + " for " + targetMethod;
ResolvedJavaMethod resolvedMethod = holder.resolveConcreteMethod(targetMethod, contextType);
if (resolvedMethod != null) {
return getExactInlineInfo(invoke, resolvedMethod);
}
}
}
}
if (holder.isArray()) {
// arrays can be treated as Objects
ResolvedJavaMethod resolvedMethod = holder.resolveConcreteMethod(targetMethod, contextType);
if (resolvedMethod != null) {
return getExactInlineInfo(invoke, resolvedMethod);
}
}
if (callTarget.graph().getAssumptions() != null) {
AssumptionResult<ResolvedJavaType> leafConcreteSubtype = holder.findLeafConcreteSubtype();
if (leafConcreteSubtype != null) {
ResolvedJavaMethod resolvedMethod = leafConcreteSubtype.getResult().resolveConcreteMethod(targetMethod, contextType);
if (resolvedMethod != null) {
return getAssumptionInlineInfo(invoke, resolvedMethod, leafConcreteSubtype);
}
}
AssumptionResult<ResolvedJavaMethod> concrete = holder.findUniqueConcreteMethod(targetMethod);
if (concrete != null) {
return getAssumptionInlineInfo(invoke, concrete.getResult(), concrete);
}
}
// type check based inlining
return getTypeCheckedInlineInfo(invoke, targetMethod);
}
private InlineInfo getTypeCheckedInlineInfo(Invoke invoke, ResolvedJavaMethod targetMethod) {
JavaTypeProfile typeProfile = ((MethodCallTargetNode) invoke.callTarget()).getProfile();
if (typeProfile == null) {
InliningUtil.logNotInlined(invoke, inliningDepth(), targetMethod, "no type profile exists");
return null;
}
JavaTypeProfile.ProfiledType[] ptypes = typeProfile.getTypes();
if (ptypes == null || ptypes.length <= 0) {
InliningUtil.logNotInlined(invoke, inliningDepth(), targetMethod, "no types in profile");
return null;
}
ResolvedJavaType contextType = invoke.getContextType();
double notRecordedTypeProbability = typeProfile.getNotRecordedProbability();
final OptimisticOptimizations optimisticOpts = context.getOptimisticOptimizations();
if (ptypes.length == 1 && notRecordedTypeProbability == 0) {
if (!optimisticOpts.inlineMonomorphicCalls()) {
InliningUtil.logNotInlined(invoke, inliningDepth(), targetMethod, "inlining monomorphic calls is disabled");
return null;
}
ResolvedJavaType type = ptypes[0].getType();
assert type.isArray() || type.isConcrete();
ResolvedJavaMethod concrete = type.resolveConcreteMethod(targetMethod, contextType);
if (!checkTargetConditions(invoke, concrete)) {
return null;
}
return new TypeGuardInlineInfo(invoke, concrete, type);
} else {
invoke.setPolymorphic(true);
if (!optimisticOpts.inlinePolymorphicCalls() && notRecordedTypeProbability == 0) {
InliningUtil.logNotInlinedInvoke(invoke, inliningDepth(), targetMethod, "inlining polymorphic calls is disabled (%d types)", ptypes.length);
return null;
}
if (!optimisticOpts.inlineMegamorphicCalls() && notRecordedTypeProbability > 0) {
// due to filtering impossible types, notRecordedTypeProbability can be > 0 although
// the number of types is lower than what can be recorded in a type profile
InliningUtil.logNotInlinedInvoke(invoke, inliningDepth(), targetMethod, "inlining megamorphic calls is disabled (%d types, %f %% not recorded types)", ptypes.length,
notRecordedTypeProbability * 100);
return null;
}
// Find unique methods and their probabilities.
ArrayList<ResolvedJavaMethod> concreteMethods = new ArrayList<>();
ArrayList<Double> concreteMethodsProbabilities = new ArrayList<>();
for (int i = 0; i < ptypes.length; i++) {
ResolvedJavaMethod concrete = ptypes[i].getType().resolveConcreteMethod(targetMethod, contextType);
if (concrete == null) {
InliningUtil.logNotInlined(invoke, inliningDepth(), targetMethod, "could not resolve method");
return null;
}
int index = concreteMethods.indexOf(concrete);
double curProbability = ptypes[i].getProbability();
if (index < 0) {
index = concreteMethods.size();
concreteMethods.add(concrete);
concreteMethodsProbabilities.add(curProbability);
} else {
concreteMethodsProbabilities.set(index, concreteMethodsProbabilities.get(index) + curProbability);
}
}
// Clear methods that fall below the threshold.
if (notRecordedTypeProbability > 0) {
ArrayList<ResolvedJavaMethod> newConcreteMethods = new ArrayList<>();
ArrayList<Double> newConcreteMethodsProbabilities = new ArrayList<>();
for (int i = 0; i < concreteMethods.size(); ++i) {
if (concreteMethodsProbabilities.get(i) >= MegamorphicInliningMinMethodProbability.getValue()) {
newConcreteMethods.add(concreteMethods.get(i));
newConcreteMethodsProbabilities.add(concreteMethodsProbabilities.get(i));
}
}
if (newConcreteMethods.isEmpty()) {
// No method left that is worth inlining.
InliningUtil.logNotInlinedInvoke(invoke, inliningDepth(), targetMethod, "no methods remaining after filtering less frequent methods (%d methods previously)",
concreteMethods.size());
return null;
}
concreteMethods = newConcreteMethods;
concreteMethodsProbabilities = newConcreteMethodsProbabilities;
}
if (concreteMethods.size() > maxMethodPerInlining) {
InliningUtil.logNotInlinedInvoke(invoke, inliningDepth(), targetMethod, "polymorphic call with more than %d target methods", maxMethodPerInlining);
return null;
}
// Clean out types whose methods are no longer available.
ArrayList<JavaTypeProfile.ProfiledType> usedTypes = new ArrayList<>();
ArrayList<Integer> typesToConcretes = new ArrayList<>();
for (JavaTypeProfile.ProfiledType type : ptypes) {
ResolvedJavaMethod concrete = type.getType().resolveConcreteMethod(targetMethod, contextType);
int index = concreteMethods.indexOf(concrete);
if (index == -1) {
notRecordedTypeProbability += type.getProbability();
} else {
assert type.getType().isArray() || !type.getType().isAbstract() : type + " " + concrete;
usedTypes.add(type);
typesToConcretes.add(index);
}
}
if (usedTypes.isEmpty()) {
// No type left that is worth checking for.
InliningUtil.logNotInlinedInvoke(invoke, inliningDepth(), targetMethod, "no types remaining after filtering less frequent types (%d types previously)", ptypes.length);
return null;
}
for (ResolvedJavaMethod concrete : concreteMethods) {
if (!checkTargetConditions(invoke, concrete)) {
InliningUtil.logNotInlined(invoke, inliningDepth(), targetMethod, "it is a polymorphic method call and at least one invoked method cannot be inlined");
return null;
}
}
return new MultiTypeGuardInlineInfo(invoke, concreteMethods, usedTypes, typesToConcretes, notRecordedTypeProbability);
}
}
private InlineInfo getAssumptionInlineInfo(Invoke invoke, ResolvedJavaMethod concrete, AssumptionResult<?> takenAssumption) {
assert concrete.isConcrete();
if (checkTargetConditions(invoke, concrete)) {
return new AssumptionInlineInfo(invoke, concrete, takenAssumption);
}
return null;
}
private InlineInfo getExactInlineInfo(Invoke invoke, ResolvedJavaMethod targetMethod) {
assert targetMethod.isConcrete();
if (checkTargetConditions(invoke, targetMethod)) {
return new ExactInlineInfo(invoke, targetMethod);
}
return null;
}
@SuppressWarnings("try")
private void doInline(CallsiteHolderExplorable callerCallsiteHolder, MethodInvocation calleeInvocation) {
StructuredGraph callerGraph = callerCallsiteHolder.graph();
InlineInfo calleeInfo = calleeInvocation.callee();
try {
try (Debug.Scope scope = Debug.scope("doInline", callerGraph)) {
Set<Node> canonicalizedNodes = Node.newSet();
calleeInfo.invoke().asNode().usages().snapshotTo(canonicalizedNodes);
Collection<Node> parameterUsages = calleeInfo.inline(new Providers(context));
canonicalizedNodes.addAll(parameterUsages);
metricInliningRuns.increment();
Debug.dump(callerGraph, "after %s", calleeInfo);
if (OptCanonicalizer.getValue()) {
Graph.Mark markBeforeCanonicalization = callerGraph.getMark();
canonicalizer.applyIncremental(callerGraph, context, canonicalizedNodes);
// process invokes that are possibly created during canonicalization
for (Node newNode : callerGraph.getNewNodes(markBeforeCanonicalization)) {
if (newNode instanceof Invoke) {
callerCallsiteHolder.pushInvoke((Invoke) newNode);
}
}
}
callerCallsiteHolder.computeProbabilities();
metricInliningPerformed.increment();
}
} catch (BailoutException bailout) {
throw bailout;
} catch (AssertionError | RuntimeException e) {
throw new JVMCIError(e).addContext(calleeInfo.toString());
} catch (JVMCIError e) {
throw e.addContext(calleeInfo.toString());
} catch (Throwable e) {
throw Debug.handle(e);
}
}
/**
*
* This method attempts:
* <ol>
* <li>
* to inline at the callsite given by <code>calleeInvocation</code>, where that callsite belongs
* to the {@link CallsiteHolderExplorable} at the top of the {@link #graphQueue} maintained in
* this class.</li>
* <li>
* otherwise, to devirtualize the callsite in question.</li>
* </ol>
*
* @return true iff inlining was actually performed
*/
private boolean tryToInline(MethodInvocation calleeInvocation, int inliningDepth) {
CallsiteHolderExplorable callerCallsiteHolder = (CallsiteHolderExplorable) currentGraph();
InlineInfo calleeInfo = calleeInvocation.callee();
assert callerCallsiteHolder.containsInvoke(calleeInfo.invoke());
metricInliningConsidered.increment();
if (inliningPolicy.isWorthInlining(context.getReplacements(), calleeInvocation, inliningDepth, true)) {
doInline(callerCallsiteHolder, calleeInvocation);
return true;
}
if (context.getOptimisticOptimizations().devirtualizeInvokes()) {
calleeInfo.tryToDevirtualizeInvoke(new Providers(context));
}
return false;
}
/**
* This method picks one of the callsites belonging to the current
* {@link CallsiteHolderExplorable}. Provided the callsite qualifies to be analyzed for
* inlining, this method prepares a new stack top in {@link InliningData} for such callsite,
* which comprises:
* <ul>
* <li>preparing a summary of feasible targets, ie preparing an {@link InlineInfo}</li>
* <li>based on it, preparing the stack top proper which consists of:</li>
* <ul>
* <li>one {@link MethodInvocation}</li>
* <li>a {@link CallsiteHolder} for each feasible target</li>
* </ul>
* </ul>
*
* <p>
* The thus prepared "stack top" is needed by {@link #moveForward()} to explore the space of
* inlining decisions (each decision one of: backtracking, delving, inlining).
* </p>
*
* <p>
* The {@link InlineInfo} used to get things rolling is kept around in the
* {@link MethodInvocation}, it will be needed in case of inlining, see
* {@link InlineInfo#inline(Providers)}
* </p>
*/
private void processNextInvoke() {
CallsiteHolderExplorable callsiteHolder = (CallsiteHolderExplorable) currentGraph();
Invoke invoke = callsiteHolder.popInvoke();
InlineInfo info = getInlineInfo(invoke);
if (info != null) {
info.populateInlinableElements(context, currentGraph().graph(), canonicalizer);
double invokeProbability = callsiteHolder.invokeProbability(invoke);
double invokeRelevance = callsiteHolder.invokeRelevance(invoke);
MethodInvocation methodInvocation = new MethodInvocation(info, invokeProbability, invokeRelevance, freshlyInstantiatedArguments(invoke, callsiteHolder.getFixedParams()));
pushInvocationAndGraphs(methodInvocation);
}
}
/**
* Gets the freshly instantiated arguments.
* <p>
* A freshly instantiated argument is either:
* <uL>
* <li>an {@link InliningData#isFreshInstantiation(com.oracle.graal.nodes.ValueNode)}</li>
* <li>a fixed-param, ie a {@link ParameterNode} receiving a freshly instantiated argument</li>
* </uL>
* </p>
*
* @return the positions of freshly instantiated arguments in the argument list of the
* <code>invoke</code>, or null if no such positions exist.
*/
public static BitSet freshlyInstantiatedArguments(Invoke invoke, Set<ParameterNode> fixedParams) {
assert fixedParams != null;
assert paramsAndInvokeAreInSameGraph(invoke, fixedParams);
BitSet result = null;
int argIdx = 0;
for (ValueNode arg : invoke.callTarget().arguments()) {
assert arg != null;
if (isFreshInstantiation(arg) || fixedParams.contains(arg)) {
if (result == null) {
result = new BitSet();
}
result.set(argIdx);
}
argIdx++;
}
return result;
}
private static boolean paramsAndInvokeAreInSameGraph(Invoke invoke, Set<ParameterNode> fixedParams) {
if (fixedParams.isEmpty()) {
return true;
}
for (ParameterNode p : fixedParams) {
if (p.graph() != invoke.asNode().graph()) {
return false;
}
}
return true;
}
public int graphCount() {
return graphQueue.size();
}
public boolean hasUnprocessedGraphs() {
return !graphQueue.isEmpty();
}
private CallsiteHolder currentGraph() {
return graphQueue.peek();
}
private void popGraph() {
graphQueue.pop();
assert graphQueue.size() <= maxGraphs;
}
private void popGraphs(int count) {
assert count >= 0;
for (int i = 0; i < count; i++) {
graphQueue.pop();
}
}
private static final Object[] NO_CONTEXT = {};
/**
* Gets the call hierarchy of this inlining from outer most call to inner most callee.
*/
private Object[] inliningContext() {
if (!Debug.isDumpEnabled()) {
return NO_CONTEXT;
}
Object[] result = new Object[graphQueue.size()];
int i = 0;
for (CallsiteHolder g : graphQueue) {
result[i++] = g.method();
}
return result;
}
private MethodInvocation currentInvocation() {
return invocationQueue.peekFirst();
}
private void pushInvocationAndGraphs(MethodInvocation methodInvocation) {
invocationQueue.addFirst(methodInvocation);
InlineInfo info = methodInvocation.callee();
maxGraphs += info.numberOfMethods();
assert graphQueue.size() <= maxGraphs;
for (int i = 0; i < info.numberOfMethods(); i++) {
CallsiteHolder ch = methodInvocation.buildCallsiteHolderForElement(i);
assert !contains(ch.graph());
graphQueue.push(ch);
assert graphQueue.size() <= maxGraphs;
}
}
private void popInvocation() {
maxGraphs -= invocationQueue.peekFirst().callee().numberOfMethods();
|
[
" assert graphQueue.size() <= maxGraphs;"
] | 2,259
|
lcc
|
java
| null |
67ef14339142182591850f1edd39a2ecbd4e4b7e0f37c376
|
|
#!/usr/bin/env python2
from cfme.utils.conf import docker as docker_conf
from cfme.utils.net import random_port, my_ip_address
import argparse
import fauxfactory
import requests
import os
import os.path
import docker
import re
import subprocess
import sys
import yaml
from six.moves.urllib.parse import urlsplit
def _dgci(d, key):
# dgci = dict get case-insensitive
keymap = {k.lower(): k for k in d.keys()}
return d.get(keymap[key.lower()])
def _name(docker_info):
return _dgci(docker_info, 'name').strip('/')
if os.getenv("DOCKER_MACHINE_NAME", "None") == "None":
dc = docker.Client(base_url='unix://var/run/docker.sock',
version='1.12',
timeout=10)
else:
from docker.utils import kwargs_from_env
dc = docker.Client(version='1.12',
timeout=10,
**kwargs_from_env(assert_hostname=False))
class DockerInstance(object):
def process_bindings(self, bindings):
self.port_bindings = {}
self.ports = []
for bind in bindings:
self.port_bindings[bindings[bind][0]] = bindings[bind][1]
print(" {}: {}".format(bind, bindings[bind][1]))
self.ports.append(bindings[bind][1])
def wait(self):
if not self.dry_run:
dc.wait(self.container_id)
else:
print("Waiting for container")
def stop(self):
if not self.dry_run:
dc.stop(self.container_id)
else:
print("Stopping container")
def remove(self):
if not self.dry_run:
dc.remove_container(self.container_id, v=True)
else:
print("Removing container")
def kill(self):
if not self.dry_run:
dc.kill(self.container_id)
else:
print("Killing container")
class SeleniumDocker(DockerInstance):
def __init__(self, bindings, image, dry_run=False):
self.dry_run = dry_run
sel_name = fauxfactory.gen_alphanumeric(8)
if not self.dry_run:
sel_create_info = dc.create_container(image, tty=True, name=sel_name)
self.container_id = _dgci(sel_create_info, 'id')
sel_container_info = dc.inspect_container(self.container_id)
self.sel_name = _name(sel_container_info)
else:
self.sel_name = "SEL_FF_CHROME_TEST"
self.process_bindings(bindings)
def run(self):
if not self.dry_run:
dc.start(self.container_id, privileged=True, port_bindings=self.port_bindings)
else:
print("Dry run running sel_ff_chrome")
class PytestDocker(DockerInstance):
def __init__(self, name, bindings, env, log_path, links, pytest_con, artifactor_dir,
dry_run=False):
self.dry_run = dry_run
self.links = links
self.log_path = log_path
self.artifactor_dir = artifactor_dir
self.process_bindings(bindings)
if not self.dry_run:
pt_name = name
pt_create_info = dc.create_container(pytest_con, tty=True,
name=pt_name, environment=env,
command='sh /setup.sh',
volumes=[artifactor_dir],
ports=self.ports)
self.container_id = _dgci(pt_create_info, 'id')
pt_container_info = dc.inspect_container(self.container_id)
pt_name = _name(pt_container_info)
def run(self):
if not self.dry_run:
dc.start(self.container_id, privileged=True, links=self.links,
binds={self.log_path: {'bind': self.artifactor_dir, 'ro': False}},
port_bindings=self.port_bindings)
else:
print("Dry run running pytest")
class DockerBot(object):
def __init__(self, **args):
links = []
self.args = args
self.base_branch = 'master'
self.validate_args()
self.display_banner()
self.process_appliance()
self.cache_files()
self.create_pytest_command()
if not self.args['use_wharf']:
self.sel_vnc_port = random_port()
sel = SeleniumDocker(bindings={'VNC_PORT': (5999, self.sel_vnc_port)},
image=self.args['selff'], dry_run=self.args['dry_run'])
sel.run()
sel_container_name = sel.sel_name
links = [(sel_container_name, 'selff')]
self.pytest_name = self.args['test_id']
self.create_pytest_envvars()
self.handle_pr()
self.log_path = self.create_log_path()
self.pytest_bindings = self.create_pytest_bindings()
if self.args['dry_run']:
for i in self.env_details:
print('export {}="{}"'.format(i, self.env_details[i]))
print(self.env_details)
pytest = PytestDocker(name=self.pytest_name, bindings=self.pytest_bindings,
env=self.env_details, log_path=self.log_path,
links=links,
pytest_con=self.args['pytest_con'],
artifactor_dir=self.args['artifactor_dir'],
dry_run=self.args['dry_run'])
pytest.run()
if not self.args['nowait']:
self.handle_watch()
if self.args['dry_run']:
with open(os.path.join(self.log_path, 'setup.txt'), "w") as f:
f.write("finshed")
try:
pytest.wait()
except KeyboardInterrupt:
print(" TEST INTERRUPTED....KILLING ALL THE THINGS")
pass
pytest.kill()
pytest.remove()
if not self.args['use_wharf']:
sel.kill()
sel.remove()
self.handle_output()
def cache_files(self):
if self.args['pr']:
self.modified_files = self.find_files_by_pr(self.args['pr'])
if self.requirements_update:
self.args['update_pip'] = True
def get_base_branch(self, pr):
token = self.args['gh_token']
owner = self.args['gh_owner']
repo = self.args['gh_repo']
if token:
headers = {'Authorization': 'token {}'.format(token)}
r = requests.get(
'https://api.github.com/repos/{}/{}/pulls/{}'.format(owner, repo, pr),
headers=headers)
return r.json()['base']['ref']
def get_dev_branch(self, pr=None):
token = self.args['gh_token']
owner = self.args['gh_dev_owner']
repo = self.args['gh_dev_repo']
if token:
headers = {'Authorization': 'token {}'.format(token)}
r = requests.get(
'https://api.github.com/repos/{}/{}/pulls/{}'.format(owner, repo, pr),
headers=headers)
user, user_branch = r.json()['head']['label'].split(":")
return "https://github.com/{}/{}.git".format(user, repo), user_branch
def get_pr_metadata(self, pr=None):
token = self.args['gh_token']
owner = self.args['gh_owner']
repo = self.args['gh_repo']
if token:
headers = {'Authorization': 'token {}'.format(token)}
r = requests.get(
'https://api.github.com/repos/{}/{}/pulls/{}'.format(owner, repo, pr),
headers=headers)
body = r.json()['body'] or ""
metadata = re.findall("{{(.*?)}}", body)
if not metadata:
return {}
else:
ydata = yaml.safe_load(metadata[0])
return ydata
def find_files_by_pr(self, pr=None):
self.requirements_update = False
files = []
token = self.args['gh_token']
owner = self.args['gh_owner']
repo = self.args['gh_repo']
if token:
headers = {'Authorization': 'token {}'.format(token)}
page = 1
while True:
r = requests.get(
'https://api.github.com/repos/{}/{}/pulls/{}/files?page={}'.format(
owner, repo, pr, page),
headers=headers)
try:
if not r.json():
break
for filen in r.json():
if filen['status'] != "deleted" and filen['status'] != "removed":
if filen['filename'].startswith('cfme/tests') or \
filen['filename'].startswith('utils/tests'):
files.append(filen['filename'])
if filen['filename'].endswith('requirements/frozen.txt'):
self.requirements_update = True
except:
return None
page += 1
return files
def check_arg(self, name, default):
self.args[name] = self.args.get(name)
if not self.args[name]:
self.args[name] = docker_conf.get(name, default)
def validate_args(self):
ec = 0
appliance = self.args.get('appliance', None)
if self.args.get('appliance_name', None) and not appliance:
self.args['appliance'] = docker_conf['appliances'][self.args['appliance_name']]
self.check_arg('nowait', False)
self.check_arg('banner', False)
self.check_arg('watch', False)
self.check_arg('output', True)
self.check_arg('dry_run', False)
self.check_arg('server_ip', None)
if not self.args['server_ip']:
self.args['server_ip'] = my_ip_address()
self.check_arg('sprout', False)
self.check_arg('provision_appliance', False)
if self.args['provision_appliance']:
if not self.args['provision_template'] or not self.args['provision_provider'] or \
not self.args['provision_vm_name']:
print("You don't have all the required options to provision an appliance")
ec += 1
self.check_arg('sprout_stream', None)
if self.args['sprout'] and not self.args['sprout_stream']:
print("You need to supply a stream for sprout")
ec += 1
self.check_arg('appliance_name', None)
self.check_arg('appliance', None)
if not self.args['appliance_name'] != self.args['appliance'] and \
not self.args['provision_appliance'] and not self.args['sprout']:
print("You must supply either an appliance OR an appliance name from config")
ec += 1
self.check_arg('branch', 'origin/master')
self.check_arg('pr', None)
self.check_arg('dev_pr', None)
self.check_arg('cfme_repo', None)
self.check_arg('cfme_repo_dir', '/cfme_tests_te')
self.check_arg('cfme_cred_repo', None)
self.check_arg('cfme_cred_repo_dir', '/cfme-qe-yamls')
self.check_arg('dev_repo', None)
if not self.args['cfme_repo']:
print("You must supply a CFME REPO")
ec += 1
if not self.args['cfme_cred_repo']:
print("You must supply a CFME Credentials REPO")
ec += 1
self.check_arg('selff', 'cfme/sel_ff_chrome')
self.check_arg('gh_token', None)
self.check_arg('gh_owner', None)
self.check_arg('gh_repo', None)
self.check_arg('gh_dev_repo', None)
self.check_arg('gh_dev_owner', None)
if self.args['dev_pr']:
dev_check = [self.args[i] for i in ['gh_dev_repo', 'gh_dev_owner']]
if not all(dev_check):
print("To use dev_pr you must have a gh_dev_repo and gh_dev_owner defined")
ec += 1
self.check_arg('browser', 'firefox')
self.check_arg('pytest', None)
self.check_arg('pytest_con', 'py_test_base')
if not self.args['pytest']:
print("You must specify a py.test command")
ec += 1
self.check_arg('update_pip', False)
self.check_arg('wheel_host_url', None)
self.check_arg('auto_gen_test', False)
self.check_arg('artifactor_dir', '/log_depot')
self.check_arg('log_depot', None)
if not self.args['log_depot']:
print("You must specify a log_depot")
ec += 1
if self.args['pr'] and self.args['auto_gen_test'] and not \
all([self.args['gh_token'], self.args['gh_owner'], self.args['gh_repo']]):
print("You chose to use Auto Test Gen, without supplying GitHub details")
ec += 1
self.check_arg('capture', False)
self.check_arg('test_id', fauxfactory.gen_alphanumeric(8))
self.check_arg('prtester', False)
self.check_arg('trackerbot', None)
self.check_arg('wharf', False)
self.check_arg('sprout_username', None)
self.check_arg('sprout_password', None)
self.check_arg('sprout_description', None)
if ec:
sys.exit(127)
def display_banner(self):
if self.args['banner']:
banner = """
==================================================================
____ __ ____ __
: / __ \____ _____/ /_____ _____/ __ )____ / /_
[* *] / / / / __ \/ ___/ //_/ _ \/ ___/ __ / __ \/ __/
-[___]- / /_/ / /_/ / /__/ ,< / __/ / / /_/ / /_/ / /_
/_____/\____/\___/_/|_|\___/_/ /_____/\____/\__/
==================================================================
"""
print(banner)
def process_appliance(self):
self.appliance = self.args['appliance']
self.app_name = self.args.get('appliance_name', "Unnamed")
print(" APPLIANCE: {} ({})".format(self.appliance, self.app_name))
def create_pytest_command(self):
if self.args['auto_gen_test'] and self.args['pr']:
self.pr_metadata = self.get_pr_metadata(self.args['pr'])
pytest = self.pr_metadata.get('pytest', None)
sprout_appliances = self.pr_metadata.get('sprouts', 1)
if pytest:
|
[
" self.args['pytest'] = \"py.test {}\".format(pytest)"
] | 951
|
lcc
|
python
| null |
7b8b8a69f57173dd972113558b3540a67621319e219d30c4
|
|
/*
* Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.virtual.phases.ea;
import static com.oracle.graal.api.meta.LocationIdentity.*;
import java.util.*;
import com.oracle.graal.api.meta.*;
import com.oracle.graal.compiler.common.type.*;
import com.oracle.graal.graph.*;
import com.oracle.graal.nodes.*;
import com.oracle.graal.nodes.cfg.*;
import com.oracle.graal.nodes.extended.*;
import com.oracle.graal.nodes.java.*;
import com.oracle.graal.nodes.util.*;
import com.oracle.graal.virtual.phases.ea.ReadEliminationBlockState.CacheEntry;
import com.oracle.graal.virtual.phases.ea.ReadEliminationBlockState.LoadCacheEntry;
import com.oracle.graal.virtual.phases.ea.ReadEliminationBlockState.ReadCacheEntry;
import com.oracle.graal.virtual.phases.ea.ReadEliminationBlockState.UnsafeLoadCacheEntry;
public class ReadEliminationClosure extends EffectsClosure<ReadEliminationBlockState> {
public ReadEliminationClosure(ControlFlowGraph cfg) {
super(null, cfg);
}
@Override
protected ReadEliminationBlockState getInitialState() {
return new ReadEliminationBlockState();
}
@Override
protected boolean processNode(Node node, ReadEliminationBlockState state, GraphEffectList effects, FixedWithNextNode lastFixedNode) {
boolean deleted = false;
if (node instanceof AccessFieldNode) {
AccessFieldNode access = (AccessFieldNode) node;
if (access.isVolatile()) {
processIdentity(state, ANY_LOCATION);
} else {
ValueNode object = GraphUtil.unproxify(access.object());
LoadCacheEntry identifier = new LoadCacheEntry(object, access.field());
ValueNode cachedValue = state.getCacheEntry(identifier);
if (node instanceof LoadFieldNode) {
if (cachedValue != null) {
effects.replaceAtUsages(access, cachedValue);
addScalarAlias(access, cachedValue);
deleted = true;
} else {
state.addCacheEntry(identifier, access);
}
} else {
assert node instanceof StoreFieldNode;
StoreFieldNode store = (StoreFieldNode) node;
ValueNode value = getScalarAlias(store.value());
if (GraphUtil.unproxify(value) == GraphUtil.unproxify(cachedValue)) {
effects.deleteFixedNode(store);
deleted = true;
}
state.killReadCache(store.field());
state.addCacheEntry(identifier, value);
}
}
} else if (node instanceof ReadNode) {
ReadNode read = (ReadNode) node;
if (read.location() instanceof ConstantLocationNode) {
ValueNode object = GraphUtil.unproxify(read.object());
ReadCacheEntry identifier = new ReadCacheEntry(object, read.location());
ValueNode cachedValue = state.getCacheEntry(identifier);
if (cachedValue != null) {
if (read.getGuard() != null && !(read.getGuard() instanceof FixedNode)) {
effects.addFixedNodeBefore(ValueAnchorNode.create((ValueNode) read.getGuard()), read);
}
effects.replaceAtUsages(read, cachedValue);
addScalarAlias(read, cachedValue);
deleted = true;
} else {
state.addCacheEntry(identifier, read);
}
}
} else if (node instanceof WriteNode) {
WriteNode write = (WriteNode) node;
if (write.location() instanceof ConstantLocationNode) {
ValueNode object = GraphUtil.unproxify(write.object());
ReadCacheEntry identifier = new ReadCacheEntry(object, write.location());
ValueNode cachedValue = state.getCacheEntry(identifier);
ValueNode value = getScalarAlias(write.value());
if (GraphUtil.unproxify(value) == GraphUtil.unproxify(cachedValue)) {
effects.deleteFixedNode(write);
deleted = true;
}
processIdentity(state, write.location().getLocationIdentity());
state.addCacheEntry(identifier, value);
} else {
processIdentity(state, write.location().getLocationIdentity());
}
} else if (node instanceof UnsafeAccessNode) {
if (node instanceof UnsafeLoadNode) {
UnsafeLoadNode load = (UnsafeLoadNode) node;
if (load.offset().isConstant() && load.getLocationIdentity() != LocationIdentity.ANY_LOCATION) {
ValueNode object = GraphUtil.unproxify(load.object());
UnsafeLoadCacheEntry identifier = new UnsafeLoadCacheEntry(object, load.offset(), load.getLocationIdentity());
ValueNode cachedValue = state.getCacheEntry(identifier);
if (cachedValue != null) {
effects.replaceAtUsages(load, cachedValue);
addScalarAlias(load, cachedValue);
deleted = true;
} else {
state.addCacheEntry(identifier, load);
}
}
} else {
assert node instanceof UnsafeStoreNode;
UnsafeStoreNode write = (UnsafeStoreNode) node;
if (write.offset().isConstant() && write.getLocationIdentity() != LocationIdentity.ANY_LOCATION) {
ValueNode object = GraphUtil.unproxify(write.object());
UnsafeLoadCacheEntry identifier = new UnsafeLoadCacheEntry(object, write.offset(), write.getLocationIdentity());
ValueNode cachedValue = state.getCacheEntry(identifier);
ValueNode value = getScalarAlias(write.value());
if (GraphUtil.unproxify(value) == GraphUtil.unproxify(cachedValue)) {
effects.deleteFixedNode(write);
deleted = true;
}
processIdentity(state, write.getLocationIdentity());
state.addCacheEntry(identifier, value);
} else {
processIdentity(state, write.getLocationIdentity());
}
}
} else if (node instanceof MemoryCheckpoint.Single) {
LocationIdentity identity = ((MemoryCheckpoint.Single) node).getLocationIdentity();
processIdentity(state, identity);
} else if (node instanceof MemoryCheckpoint.Multi) {
for (LocationIdentity identity : ((MemoryCheckpoint.Multi) node).getLocationIdentities()) {
processIdentity(state, identity);
}
}
return deleted;
}
private static void processIdentity(ReadEliminationBlockState state, LocationIdentity identity) {
if (identity == ANY_LOCATION) {
state.killReadCache();
return;
}
state.killReadCache(identity);
}
@Override
protected void processLoopExit(LoopExitNode exitNode, ReadEliminationBlockState initialState, ReadEliminationBlockState exitState, GraphEffectList effects) {
if (exitNode.graph().hasValueProxies()) {
for (Map.Entry<CacheEntry<?>, ValueNode> entry : exitState.getReadCache().entrySet()) {
if (initialState.getReadCache().get(entry.getKey()) != entry.getValue()) {
ProxyNode proxy = ValueProxyNode.create(exitState.getCacheEntry(entry.getKey()), exitNode);
effects.addFloatingNode(proxy, "readCacheProxy");
entry.setValue(proxy);
}
}
}
}
@Override
protected ReadEliminationBlockState cloneState(ReadEliminationBlockState other) {
return new ReadEliminationBlockState(other);
}
@Override
protected MergeProcessor createMergeProcessor(Block merge) {
return new ReadEliminationMergeProcessor(merge);
}
private class ReadEliminationMergeProcessor extends EffectsClosure<ReadEliminationBlockState>.MergeProcessor {
private final HashMap<Object, ValuePhiNode> materializedPhis = new HashMap<>();
public ReadEliminationMergeProcessor(Block mergeBlock) {
super(mergeBlock);
}
protected <T> PhiNode getCachedPhi(T virtual, Stamp stamp) {
ValuePhiNode result = materializedPhis.get(virtual);
if (result == null) {
result = ValuePhiNode.create(stamp, merge);
materializedPhis.put(virtual, result);
}
return result;
}
@Override
protected void merge(List<ReadEliminationBlockState> states) {
super.merge(states);
mergeReadCache(states);
}
private void mergeReadCache(List<ReadEliminationBlockState> states) {
for (Map.Entry<CacheEntry<?>, ValueNode> entry : states.get(0).readCache.entrySet()) {
CacheEntry<?> key = entry.getKey();
ValueNode value = entry.getValue();
boolean phi = false;
for (int i = 1; i < states.size(); i++) {
ValueNode otherValue = states.get(i).readCache.get(key);
if (otherValue == null) {
value = null;
phi = false;
break;
}
if (!phi && otherValue != value) {
phi = true;
}
}
|
[
" if (phi) {"
] | 810
|
lcc
|
java
| null |
f6536b39e8505ccb4c694576fea0f11053ae3a52c68fe5c6
|
|
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.ims.internal;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.RemoteException;
import android.telecom.CameraCapabilities;
import android.telecom.Connection;
import android.telecom.VideoProfile;
import android.view.Surface;
public abstract class ImsVideoCallProvider {
private static final int MSG_SET_CALLBACK = 1;
private static final int MSG_SET_CAMERA = 2;
private static final int MSG_SET_PREVIEW_SURFACE = 3;
private static final int MSG_SET_DISPLAY_SURFACE = 4;
private static final int MSG_SET_DEVICE_ORIENTATION = 5;
private static final int MSG_SET_ZOOM = 6;
private static final int MSG_SEND_SESSION_MODIFY_REQUEST = 7;
private static final int MSG_SEND_SESSION_MODIFY_RESPONSE = 8;
private static final int MSG_REQUEST_CAMERA_CAPABILITIES = 9;
private static final int MSG_REQUEST_CALL_DATA_USAGE = 10;
private static final int MSG_SET_PAUSE_IMAGE = 11;
private final ImsVideoCallProviderBinder mBinder;
private IImsVideoCallCallback mCallback;
/**
* Default handler used to consolidate binder method calls onto a single thread.
*/
private final Handler mProviderHandler = new Handler(Looper.getMainLooper()) {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_SET_CALLBACK:
mCallback = (IImsVideoCallCallback) msg.obj;
break;
case MSG_SET_CAMERA:
onSetCamera((String) msg.obj);
break;
case MSG_SET_PREVIEW_SURFACE:
onSetPreviewSurface((Surface) msg.obj);
break;
case MSG_SET_DISPLAY_SURFACE:
onSetDisplaySurface((Surface) msg.obj);
break;
case MSG_SET_DEVICE_ORIENTATION:
onSetDeviceOrientation(msg.arg1);
break;
case MSG_SET_ZOOM:
onSetZoom((Float) msg.obj);
break;
case MSG_SEND_SESSION_MODIFY_REQUEST:
onSendSessionModifyRequest((VideoProfile) msg.obj);
break;
case MSG_SEND_SESSION_MODIFY_RESPONSE:
onSendSessionModifyResponse((VideoProfile) msg.obj);
break;
case MSG_REQUEST_CAMERA_CAPABILITIES:
onRequestCameraCapabilities();
break;
case MSG_REQUEST_CALL_DATA_USAGE:
onRequestCallDataUsage();
break;
case MSG_SET_PAUSE_IMAGE:
onSetPauseImage((String) msg.obj);
break;
default:
break;
}
}
};
/**
* IImsVideoCallProvider stub implementation.
*/
private final class ImsVideoCallProviderBinder extends IImsVideoCallProvider.Stub {
public void setCallback(IImsVideoCallCallback callback) {
mProviderHandler.obtainMessage(MSG_SET_CALLBACK, callback).sendToTarget();
}
public void setCamera(String cameraId) {
mProviderHandler.obtainMessage(MSG_SET_CAMERA, cameraId).sendToTarget();
}
public void setPreviewSurface(Surface surface) {
mProviderHandler.obtainMessage(MSG_SET_PREVIEW_SURFACE, surface).sendToTarget();
}
public void setDisplaySurface(Surface surface) {
mProviderHandler.obtainMessage(MSG_SET_DISPLAY_SURFACE, surface).sendToTarget();
}
public void setDeviceOrientation(int rotation) {
mProviderHandler.obtainMessage(MSG_SET_DEVICE_ORIENTATION, rotation).sendToTarget();
}
public void setZoom(float value) {
mProviderHandler.obtainMessage(MSG_SET_ZOOM, value).sendToTarget();
}
public void sendSessionModifyRequest(VideoProfile requestProfile) {
mProviderHandler.obtainMessage(
MSG_SEND_SESSION_MODIFY_REQUEST, requestProfile).sendToTarget();
}
public void sendSessionModifyResponse(VideoProfile responseProfile) {
mProviderHandler.obtainMessage(
MSG_SEND_SESSION_MODIFY_RESPONSE, responseProfile).sendToTarget();
}
public void requestCameraCapabilities() {
mProviderHandler.obtainMessage(MSG_REQUEST_CAMERA_CAPABILITIES).sendToTarget();
}
public void requestCallDataUsage() {
mProviderHandler.obtainMessage(MSG_REQUEST_CALL_DATA_USAGE).sendToTarget();
}
public void setPauseImage(String uri) {
mProviderHandler.obtainMessage(MSG_SET_PAUSE_IMAGE, uri).sendToTarget();
}
}
public ImsVideoCallProvider() {
mBinder = new ImsVideoCallProviderBinder();
}
/**
* Returns binder object which can be used across IPC methods.
*/
public final IImsVideoCallProvider getInterface() {
return mBinder;
}
/** @see Connection.VideoProvider#onSetCamera */
public abstract void onSetCamera(String cameraId);
/** @see Connection.VideoProvider#onSetPreviewSurface */
public abstract void onSetPreviewSurface(Surface surface);
/** @see Connection.VideoProvider#onSetDisplaySurface */
public abstract void onSetDisplaySurface(Surface surface);
/** @see Connection.VideoProvider#onSetDeviceOrientation */
public abstract void onSetDeviceOrientation(int rotation);
/** @see Connection.VideoProvider#onSetZoom */
public abstract void onSetZoom(float value);
/** @see Connection.VideoProvider#onSendSessionModifyRequest */
public abstract void onSendSessionModifyRequest(VideoProfile requestProfile);
/** @see Connection.VideoProvider#onSendSessionModifyResponse */
public abstract void onSendSessionModifyResponse(VideoProfile responseProfile);
/** @see Connection.VideoProvider#onRequestCameraCapabilities */
public abstract void onRequestCameraCapabilities();
/** @see Connection.VideoProvider#onRequestCallDataUsage */
public abstract void onRequestCallDataUsage();
/** @see Connection.VideoProvider#onSetPauseImage */
public abstract void onSetPauseImage(String uri);
/** @see Connection.VideoProvider#receiveSessionModifyRequest */
public void receiveSessionModifyRequest(VideoProfile VideoProfile) {
if (mCallback != null) {
try {
mCallback.receiveSessionModifyRequest(VideoProfile);
} catch (RemoteException ignored) {
}
}
}
/** @see Connection.VideoProvider#receiveSessionModifyResponse */
public void receiveSessionModifyResponse(
int status, VideoProfile requestedProfile, VideoProfile responseProfile) {
if (mCallback != null) {
try {
mCallback.receiveSessionModifyResponse(status, requestedProfile, responseProfile);
} catch (RemoteException ignored) {
}
}
}
/** @see Connection.VideoProvider#handleCallSessionEvent */
public void handleCallSessionEvent(int event) {
|
[
" if (mCallback != null) {"
] | 587
|
lcc
|
java
| null |
eda13b1d2aeeeac450c99e7b1bd4ab5d92449eda79b54da5
|
|
using Server;
using System;
using Server.Mobiles;
using Server.Gumps;
using System.Collections.Generic;
using Server.Engines.Quests;
using Server.Network;
using Server.ContextMenus;
using Server.Multis;
namespace Server.Items
{
public class MyrmidexRewardBag : Backpack
{
public MyrmidexRewardBag()
{
Hue = BaseReward.RewardBagHue();
switch (Utility.Random(4))
{
default:
case 0: DropItem(new RecipeScroll(Utility.RandomMinMax(900, 905))); break;
case 1: DropItem(new EodonTribeRewardTitleToken()); break;
case 2: DropItem(new RecipeScroll(455)); break;
case 3: DropItem(new MoonstoneCrystal()); break;
}
}
public MyrmidexRewardBag(Serial serial)
: base(serial)
{
}
public override void Serialize(GenericWriter writer)
{
base.Serialize(writer);
writer.Write(0);
}
public override void Deserialize(GenericReader reader)
{
base.Deserialize(reader);
int version = reader.ReadInt();
}
}
public class EodonianRewardBag : Backpack
{
public EodonianRewardBag()
{
Hue = BaseReward.RewardBagHue();
switch (Utility.Random(4))
{
default:
case 0: DropItem(new MonsterStatuette(MonsterStatuetteType.SakkhranBirdOfPrey)); break;
case 1: DropItem(new EodonTribeRewardTitleToken()); break;
case 2: DropItem(new RecipeScroll(1000)); break;
case 3:
if (0.5 > Utility.RandomDouble())
DropItem(new RawMoonstoneLargeAddonDeed());
else
DropItem(new RawMoonstoneSmallAddonDeed());
break;
}
}
public EodonianRewardBag(Serial serial)
: base(serial)
{
}
public override void Serialize(GenericWriter writer)
{
base.Serialize(writer);
writer.Write(0);
}
public override void Deserialize(GenericReader reader)
{
base.Deserialize(reader);
int version = reader.ReadInt();
}
}
public class MoonstoneCrystal : Item, ISecurable
{
public static Dictionary<int, Point3D> Locations { get; set; }
private SecureLevel m_SecureLevel;
public static void Initialize()
{
Locations = new Dictionary<int, Point3D>();
Locations[1156706] = new Point3D(642, 1721, 40); // Barako Village
Locations[1156707] = new Point3D(701, 2106, 40); // Jukari Village
Locations[1156708] = new Point3D(355, 1873, 0); // Kurak Village
Locations[1156709] = new Point3D(552, 1471, 40); // Sakkhra Village
Locations[1156710] = new Point3D(412, 1595, 40); // Urali Village
Locations[1156711] = new Point3D(167, 1800, 80); // Barrab Village
Locations[1156712] = new Point3D(929, 2016, 50); // Shadowguard
Locations[1156713] = new Point3D(731, 1603, 40); // The great ape cave
Locations[1156714] = new Point3D(878, 2105, 40); // The Volcano
Locations[1156715] = new Point3D(390, 1690, 40); // Dragon Turtle Habitat
Locations[1156716] = new Point3D(269, 1726, 80); // Britannian Encampment
}
[CommandProperty(AccessLevel.GameMaster)]
public SecureLevel Level
{
get
{
return this.m_SecureLevel;
}
set
{
this.m_SecureLevel = value;
}
}
public override void GetContextMenuEntries(Mobile from, List<ContextMenuEntry> list)
{
base.GetContextMenuEntries(from, list);
SetSecureLevelEntry.AddTo(from, this, list);
}
public override int LabelNumber { get { return 1124143; } } // Moonstone Crystal
[Constructable]
public MoonstoneCrystal() : base(40123)
{
}
public override void OnDoubleClick(Mobile from)
{
if((IsLockedDown || IsSecure) && from.InRange(GetWorldLocation(), 3))
{
from.SendGump(new InternalGump(from as PlayerMobile, this));
}
}
private class InternalGump : Gump
{
public Item Moonstone { get; set; }
public PlayerMobile User { get; set; }
public InternalGump(PlayerMobile pm, Item moonstone) : base(75, 75)
{
Moonstone = moonstone;
User = pm;
AddGumpLayout();
}
public void AddGumpLayout()
{
AddBackground( 0, 0, 400, 400, 9270 );
AddHtmlLocalized( 0, 15, 400, 16, 1154645, "#1156704", 0xFFFF, false, false ); // Select your destination:
ColUtility.For<int, Point3D>(MoonstoneCrystal.Locations, (i, key, value) =>
{
AddHtmlLocalized(60, 45 + (i * 25), 250, 16, key, 0xFFFF, false, false);
AddButton(20, 50 + (i * 25), 2117, 2118, key, GumpButtonType.Reply, 0);
});
}
public override void OnResponse(NetState state, RelayInfo info)
{
if (info.ButtonID > 0)
{
int id = info.ButtonID;
if (MoonstoneCrystal.Locations.ContainsKey(id))
{
Point3D p = MoonstoneCrystal.Locations[id];
if (CheckTravel(p))
{
BaseCreature.TeleportPets(User, p, Map.TerMur);
User.Combatant = null;
User.Warmode = false;
User.Hidden = true;
User.MoveToWorld(p, Map.TerMur);
Effects.PlaySound(p, Map.TerMur, 0x1FE);
}
}
}
}
private bool CheckTravel(Point3D p)
{
if ( !User.InRange( Moonstone.GetWorldLocation(), 1 ) || User.Map != Moonstone.Map )
{
User.SendLocalizedMessage( 1019002 ); // You are too far away to use the gate.
}
/* CEO - 02/20/06 - Removed to allow Reds access to other lands
else if ( User.Murderer )
{
User.SendLocalizedMessage( 1019004 ); // You are not allowed to travel there.
}
*/
else if ( Server.Factions.Sigil.ExistsOn( User ) )
{
User.SendLocalizedMessage( 1019004 ); // You are not allowed to travel there.
}
else if ( User.Criminal )
{
User.SendLocalizedMessage( 1005561, "", 0x22 ); // Thou'rt a criminal and cannot escape so easily.
}
else if ( Server.Spells.SpellHelper.CheckCombat( User ) )
{
User.SendLocalizedMessage( 1005564, "", 0x22 ); // Wouldst thou flee during the heat of battle??
}
else if ( User.Spell != null )
{
User.SendLocalizedMessage( 1049616 ); // You are too busy to do that at the moment.
}
else if ( User.Map == Map.TerMur && User.InRange( p, 1 ) )
{
User.SendLocalizedMessage( 1019003 ); // You are already there.
}
else
return true;
return false;
}
}
public MoonstoneCrystal(Serial serial) : base(serial)
{
}
public override void Serialize(GenericWriter writer)
{
base.Serialize(writer);
writer.Write(0);
writer.Write((int)this.m_SecureLevel); // At first, need to save world with this line before next starting.
}
public override void Deserialize(GenericReader reader)
{
base.Deserialize(reader);
int version = reader.ReadInt();
this.m_SecureLevel = (SecureLevel)reader.ReadInt(); // If you have not saved world with above line in Serialize(), you should not add this line.
}
}
[TypeAlias("Server.Items.KotlPowerCoil")]
public class KotlPowerCore : Item
{
public override int LabelNumber { get { return 1124179; } } // Kotl Power Core
[Constructable]
public KotlPowerCore() : base(40147)
{
}
public KotlPowerCore(Serial serial) : base(serial)
{
}
public override void Serialize(GenericWriter writer)
{
base.Serialize(writer);
writer.Write(0);
}
public override void Deserialize(GenericReader reader)
{
base.Deserialize(reader);
int version = reader.ReadInt();
}
}
public class EodonianWallMap : Item
{
public override int LabelNumber { get { return 1156690; } } // Wall Map of Eodon
[Constructable]
public EodonianWallMap() : base(11635)
{
}
public override void OnDoubleClick(Mobile from)
{
if(from.InRange(GetWorldLocation(), 5))
{
|
[
"\t\t\t\tGump g = new Gump(0, 0);"
] | 854
|
lcc
|
csharp
| null |
93191c3c840c04430c2e0222e3643013a8af4e2b709e6f95
|
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Record models."""
from flask import current_app
from intbitset import intbitset
from sqlalchemy.ext.declarative import declared_attr
from werkzeug import cached_property
from invenio.ext.sqlalchemy import db, utils
class Record(db.Model):
"""Represent a record object inside the SQL database."""
__tablename__ = 'bibrec'
id = db.Column(
db.MediumInteger(8, unsigned=True), primary_key=True,
nullable=False, autoincrement=True)
creation_date = db.Column(
db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00',
index=True)
modification_date = db.Column(
db.DateTime, nullable=False,
server_default='1900-01-01 00:00:00',
index=True)
master_format = db.Column(
db.String(16), nullable=False,
server_default='marc')
additional_info = db.Column(db.JSON)
# FIXME: remove this from the model and add them to the record class, all?
@property
def deleted(self):
"""Return True if record is marked as deleted."""
from invenio.legacy.bibrecord import get_fieldvalues
# record exists; now check whether it isn't marked as deleted:
dbcollids = get_fieldvalues(self.id, "980__%")
return ("DELETED" in dbcollids) or \
(current_app.config.get('CFG_CERN_SITE')
and "DUMMY" in dbcollids)
@staticmethod
def _next_merged_recid(recid):
"""Return the ID of record merged with record with ID = recid."""
from invenio.legacy.bibrecord import get_fieldvalues
merged_recid = None
for val in get_fieldvalues(recid, "970__d"):
try:
merged_recid = int(val)
break
except ValueError:
pass
if not merged_recid:
return None
else:
return merged_recid
@cached_property
def merged_recid(self):
"""Return record object with which the given record has been merged.
:param recID: deleted record recID
:return: merged record recID
"""
return Record._next_merged_recid(self.id)
@property
def merged_recid_final(self):
"""Return the last record from hierarchy merged with this one."""
cur_id = self.id
next_id = Record._next_merged_recid(cur_id)
while next_id:
cur_id = next_id
next_id = Record._next_merged_recid(cur_id)
return cur_id
@cached_property
def is_restricted(self):
"""Return True is record is restricted."""
from invenio.modules.collections.cache import get_all_restricted_recids
return self.id in get_all_restricted_recids() or self.is_processed
@cached_property
def is_processed(self):
"""Return True is recods is processed (not in any collection)."""
from invenio.modules.collections.cache import is_record_in_any_collection
return not is_record_in_any_collection(self.id,
recreate_cache_if_needed=False)
@classmethod
def filter_time_interval(cls, datetext, column='c'):
"""Return filter based on date text and column type."""
column = cls.creation_date if column == 'c' else cls.modification_date
parts = datetext.split('->')
where = []
if len(parts) == 2:
if parts[0] != '':
where.append(column >= parts[0])
if parts[1] != '':
where.append(column <= parts[1])
else:
where.append(column.like(datetext + '%'))
return where
@classmethod
def allids(cls):
"""Return all existing record ids."""
return intbitset(db.session.query(cls.id).all())
class RecordMetadata(db.Model):
"""Represent a json record inside the SQL database."""
__tablename__ = 'record_json'
id = db.Column(
db.MediumInteger(8, unsigned=True),
db.ForeignKey(Record.id),
primary_key=True,
nullable=False,
autoincrement=True
)
json = db.Column(db.JSON, nullable=False)
record = db.relationship(Record, backref='record_json')
class BibxxxMixin(utils.TableNameMixin):
"""Mixin for Bibxxx tables."""
id = db.Column(db.MediumInteger(8, unsigned=True),
primary_key=True,
autoincrement=True)
tag = db.Column(db.String(6), nullable=False, index=True,
server_default='')
value = db.Column(db.Text(35), nullable=False,
index=True)
class BibrecBibxxxMixin(utils.TableFromCamelNameMixin):
"""Mixin for BibrecBibxxx tables."""
@declared_attr
def _bibxxx(cls):
return globals()[cls.__name__[6:]]
@declared_attr
def id_bibrec(cls):
return db.Column(db.MediumInteger(8, unsigned=True),
db.ForeignKey(Record.id), nullable=False,
primary_key=True, index=True, server_default='0')
@declared_attr
def id_bibxxx(cls):
return db.Column(db.MediumInteger(8, unsigned=True),
db.ForeignKey(cls._bibxxx.id), nullable=False,
primary_key=True, index=True, server_default='0')
field_number = db.Column(db.SmallInteger(5, unsigned=True),
primary_key=True)
@declared_attr
def bibrec(cls):
return db.relationship(Record)
@declared_attr
def bibxxx(cls):
return db.relationship(cls._bibxxx, backref='bibrecs')
models = []
for idx in range(100):
Bibxxx = "Bib{0:02d}x".format(idx)
globals()[Bibxxx] = type(Bibxxx, (db.Model, BibxxxMixin), {})
BibrecBibxxx = "BibrecBib{0:02d}x".format(idx)
globals()[BibrecBibxxx] = type(BibrecBibxxx,
(db.Model, BibrecBibxxxMixin), {})
models += [Bibxxx, BibrecBibxxx]
__all__ = tuple([
'Record',
'RecordMetadata',
|
[
"] + models)"
] | 609
|
lcc
|
python
| null |
588825a49d4fdd244fc2fb3cce5086b306490b945a3bc787
|
|
# This file is part of xmpp-backends (https://github.com/mathiasertl/xmpp-backends).
#
# xmpp-backends is free software: you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# xmpp-backends is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along with xmpp-backends. If not, see
# <http://www.gnu.org/licenses/>.
import ipaddress
import logging
import time
from datetime import datetime
import pytz
from .base import BackendError
from .base import UserExists
from .base import UserNotFound
from .base import UserSession
from .base import XmppBackendBase
from .constants import CONNECTION_XMPP
log = logging.getLogger(__name__)
class DummyBackend(XmppBackendBase):
"""A dummy backend for development using Djangos caching framework.
By default, Djangos caching framework uses in-memory data structures, so every registration will be
removed if you restart the development server. You can configure a different cache (e.g. memcached), see
`Django's cache framework <https://docs.djangoproject.com/en/dev/topics/cache/>`_ for details.
:params domains: A list of domains to serve.
"""
library = 'django.core.cache.cache'
def __init__(self, domains):
super(DummyBackend, self).__init__()
self._domains = domains
def get_api_version(self):
return (1, 0)
def user_exists(self, username, domain):
if domain not in self._domains:
return False
user = '%s@%s' % (username, domain)
return self.module.get(user) is not None
def user_sessions(self, username, domain):
user = '%s@%s' % (username, domain)
return self.module.get(user, {}).get('sessions', set())
def start_user_session(self, username, domain, resource, **kwargs):
"""Method to add a user session for debugging.
Accepted parameters are the same as to the constructor of :py:class:`~xmpp_backends.base.UserSession`.
"""
kwargs.setdefault('uptime', pytz.utc.localize(datetime.utcnow()))
kwargs.setdefault('priority', 0)
kwargs.setdefault('status', 'online')
kwargs.setdefault('status_text', '')
kwargs.setdefault('connection_type', CONNECTION_XMPP)
kwargs.setdefault('encrypted', True)
kwargs.setdefault('compressed', False)
kwargs.setdefault('ip_address', '127.0.0.1')
if isinstance(kwargs['ip_address'], str):
kwargs['ip_address'] = ipaddress.ip_address(kwargs['ip_address'])
user = '%s@%s' % (username, domain)
session = UserSession(self, username, domain, resource, **kwargs)
data = self.module.get(user)
if data is None:
raise UserNotFound(username, domain, resource)
data.setdefault('sessions', set())
if isinstance(data['sessions'], list):
# Cast old data to set
data['sessions'] = set(data['sessions'])
data['sessions'].add(session)
self.module.set(user, data)
all_sessions = self.module.get('all_sessions', set())
all_sessions.add(session)
self.module.set('all_sessions', all_sessions)
def stop_user_session(self, username, domain, resource, reason=''):
user = '%s@%s' % (username, domain)
data = self.module.get(user)
if data is None:
raise UserNotFound(username, domain)
data['sessions'] = set([d for d in data.get('sessions', []) if d.resource != resource])
self.module.set(user, data)
all_sessions = self.module.get('all_sessions', set())
all_sessions = set([s for s in all_sessions if s.jid != user])
self.module.set('all_sessions', all_sessions)
def create_user(self, username, domain, password, email=None):
if domain not in self._domains:
raise BackendError('Backend does not serve domain %s.' % domain)
user = '%s@%s' % (username, domain)
log.debug('Create user: %s (%s)', user, password)
data = self.module.get(user)
if data is None:
data = {
'pass': password,
'last_status': (time.time(), 'Registered'),
'sessions': set(),
}
if email is not None:
data['email'] = email
self.module.set(user, data)
# maintain list of users in cache
users = self.module.get('all_users', set())
users.add(user)
self.module.set('all_users', users)
else:
raise UserExists()
def check_password(self, username, domain, password):
user = '%s@%s' % (username, domain)
log.debug('Check pass: %s -> %s', user, password)
data = self.module.get(user)
if data is None:
return False
else:
return data['pass'] == password
def check_email(self, username, domain, email):
user = '%s@%s' % (username, domain)
log.debug('Check email: %s --> %s', user, email)
data = self.module.get(user)
if data is None:
return False
else:
return data['email'] == email
def set_password(self, username, domain, password):
user = '%s@%s' % (username, domain)
log.debug('Set pass: %s -> %s', user, password)
data = self.module.get(user)
if data is None:
raise UserNotFound(username, domain)
else:
data['pass'] = password
self.module.set(user, data)
def set_email(self, username, domain, email):
user = '%s@%s' % (username, domain)
log.debug('Set email: %s --> %s', user, email)
data = self.module.get(user)
if data is None:
raise UserNotFound(username, domain)
else:
data['email'] = email
self.module.set(user, data)
def get_last_activity(self, username, domain):
user = '%s@%s' % (username, domain)
data = self.module.get(user)
if data is None:
raise UserNotFound(username, domain)
else:
return datetime.utcfromtimestamp(data['last_status'][0])
def set_last_activity(self, username, domain, status='', timestamp=None):
user = '%s@%s' % (username, domain)
if timestamp is None:
timestamp = time.time()
else:
timestamp = self.datetime_to_timestamp(timestamp)
data = self.module.get(user)
if data is None:
pass # NOTE: real APIs provide no error either :-/
else:
data['last_status'] = (timestamp, status)
self.module.set(user, data)
def block_user(self, username, domain):
# overwritten so we pass tests
self.set_password(username, domain, self.get_random_password())
def all_domains(self):
"""Just returns the domains passed to the constructor."""
return list(self._domains)
def all_users(self, domain):
return set([u.split('@')[0] for u in self.module.get('all_users', set())
if u.endswith('@%s' % domain)])
def all_user_sessions(self):
return self.module.get('all_sessions', set())
def remove_user(self, username, domain):
|
[
" user = '%s@%s' % (username, domain)"
] | 744
|
lcc
|
python
| null |
a94d83a5503d88913110cc5d278d085fdccea0e30d2e9d9b
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# License: MIT (see LICENSE file provided)
# vim600: fdm=marker tabstop=4 shiftwidth=4 expandtab ai
#
# This file has been modified by Manuel Saelices <msaelices _at_ yaco.es>
# and belongs to David JEAN LOUIS <izimobil@gmail.com>.
#
# You can find more information about polib at http://code.google.com/p/polib/
#
# Description {{{
"""
**polib** allows you to manipulate, create, modify gettext files (pot, po
and mo files). You can load existing files, iterate through it's entries,
add, modify entries, comments or metadata, etc... or create new po files
from scratch.
**polib** provides a simple and pythonic API, exporting only three
convenience functions (*pofile*, *mofile* and *detect_encoding*), and the
four core classes, *POFile*, *MOFile*, *POEntry* and *MOEntry* for creating
new files/entries.
**Basic example**:
>>> import polib
>>> # load an existing po file
>>> po = polib.pofile('tests/test_utf8.po')
>>> for entry in po:
... # do something with entry...
... pass
>>> # add an entry
>>> entry = polib.POEntry(msgid='Welcome', msgstr='Bienvenue')
>>> entry.occurrences = [('welcome.py', '12'), ('anotherfile.py', '34')]
>>> po.append(entry)
>>> # to save our modified po file:
>>> # po.save()
>>> # or you may want to compile the po file
>>> # po.save_as_mofile('tests/test_utf8.mo')
"""
# }}}
__author__ = 'David JEAN LOUIS <izimobil@gmail.com>'
__version__ = '0.3.1'
# dependencies {{{
try:
import struct
import textwrap
import warnings
except ImportError, exc:
raise ImportError('polib requires python 2.3 or later with the standard' \
' modules "struct", "textwrap" and "warnings" (details: %s)' % exc)
# }}}
__all__ = ['pofile', 'POFile', 'POEntry', 'mofile', 'MOFile', 'MOEntry',
'detect_encoding', 'quote', 'unquote']
# shortcuts for performance improvement {{{
# yes, yes, this is quite ugly but *very* efficient
_dictget = dict.get
_listappend = list.append
_listpop = list.pop
_strjoin = str.join
_strsplit = str.split
_strstrip = str.strip
_strreplace = str.replace
_textwrap = textwrap.wrap
# }}}
default_encoding = 'utf-8'
def pofile(fpath, **kwargs):
"""
Convenience function that parse the po/pot file *fpath* and return
a POFile instance.
**Keyword arguments**:
- *fpath*: string, full or relative path to the po/pot file to parse
- *wrapwidth*: integer, the wrap width, only useful when -w option was
passed to xgettext (optional, default to 78)
- *autodetect_encoding*: boolean, if set to False the function will
not try to detect the po file encoding (optional, default to True)
- *encoding*: string, an encoding, only relevant if autodetect_encoding
is set to False
**Example**:
>>> import polib
>>> po = polib.pofile('tests/test_utf8.po')
>>> po #doctest: +ELLIPSIS
<POFile instance at ...>
>>> import os, tempfile
>>> for fname in ['test_iso-8859-15.po', 'test_utf8.po']:
... orig_po = polib.pofile('tests/'+fname)
... tmpf = tempfile.NamedTemporaryFile().name
... orig_po.save(tmpf)
... try:
... new_po = polib.pofile(tmpf)
... for old, new in zip(orig_po, new_po):
... if old.msgid != new.msgid:
... old.msgid
... new.msgid
... if old.msgstr != new.msgstr:
... old.msgid
... new.msgid
... finally:
... os.unlink(tmpf)
"""
# pofile {{{
if _dictget(kwargs, 'autodetect_encoding', True) == True:
enc = detect_encoding(fpath)
else:
enc = _dictget(kwargs, 'encoding', default_encoding)
parser = _POFileParser(fpath)
instance = parser.parse()
instance.wrapwidth = _dictget(kwargs, 'wrapwidth', 78)
instance.encoding = enc
return instance
# }}}
def mofile(fpath, **kwargs):
"""
Convenience function that parse the mo file *fpath* and return
a MOFile instance.
**Keyword arguments**:
- *fpath*: string, full or relative path to the mo file to parse
- *wrapwidth*: integer, the wrap width, only useful when -w option was
passed to xgettext to generate the po file that was used to format
the mo file (optional, default to 78)
- *autodetect_encoding*: boolean, if set to False the function will
not try to detect the po file encoding (optional, default to True)
- *encoding*: string, an encoding, only relevant if autodetect_encoding
is set to False
**Example**:
>>> import polib
>>> mo = polib.mofile('tests/test_utf8.mo')
>>> mo #doctest: +ELLIPSIS
<MOFile instance at ...>
>>> import os, tempfile
>>> for fname in ['test_iso-8859-15.mo', 'test_utf8.mo']:
... orig_mo = polib.mofile('tests/'+fname)
... tmpf = tempfile.NamedTemporaryFile().name
... orig_mo.save(tmpf)
... try:
... new_mo = polib.mofile(tmpf)
... for old, new in zip(orig_mo, new_mo):
... if old.msgid != new.msgid:
... old.msgstr
... new.msgstr
... finally:
... os.unlink(tmpf)
"""
# mofile {{{
if _dictget(kwargs, 'autodetect_encoding', True) == True:
enc = detect_encoding(fpath)
else:
enc = _dictget(kwargs, 'encoding', default_encoding)
parser = _MOFileParser(fpath)
instance = parser.parse()
instance.wrapwidth = _dictget(kwargs, 'wrapwidth', 78)
instance.encoding = enc
return instance
# }}}
def detect_encoding(fpath):
"""
Try to detect the encoding used by the file *fpath*. The function will
return polib default *encoding* if it's unable to detect it.
**Keyword argument**:
- *fpath*: string, full or relative path to the mo file to parse.
**Examples**:
>>> print detect_encoding('tests/test_noencoding.po')
utf-8
>>> print detect_encoding('tests/test_utf8.po')
UTF-8
>>> print detect_encoding('tests/test_utf8.mo')
UTF-8
>>> print detect_encoding('tests/test_iso-8859-15.po')
ISO_8859-15
>>> print detect_encoding('tests/test_iso-8859-15.mo')
ISO_8859-15
"""
# detect_encoding {{{
import re
rx = re.compile(r'"?Content-Type:.+? charset=([\w_\-:\.]+)')
f = open(fpath)
for l in f:
match = rx.search(l)
if match:
f.close()
return _strstrip(match.group(1))
f.close()
return default_encoding
# }}}
def quote(st):
"""
Quote and return the given string *st*.
**Examples**:
>>> quote('\\t and \\n and \\r and " and \\\\')
'\\\\t and \\\\n and \\\\r and \\\\" and \\\\\\\\'
"""
# quote {{{
st = _strreplace(st, '\\', r'\\')
st = _strreplace(st, '\t', r'\t')
st = _strreplace(st, '\r', r'\r')
st = _strreplace(st, '\n', r'\n')
st = _strreplace(st, '\"', r'\"')
return st
# }}}
def unquote(st):
"""
Unquote and return the given string *st*.
**Examples**:
>>> unquote('\\\\t and \\\\n and \\\\r and \\\\" and \\\\\\\\')
'\\t and \\n and \\r and " and \\\\'
"""
# unquote {{{
st = _strreplace(st, r'\"', '"')
st = _strreplace(st, r'\n', '\n')
st = _strreplace(st, r'\r', '\r')
st = _strreplace(st, r'\t', '\t')
st = _strreplace(st, r'\\', '\\')
return st
# }}}
class _BaseFile(list):
"""
Common parent class for POFile and MOFile classes.
This class must **not** be instanciated directly.
"""
# class _BaseFile {{{
def __init__(self, fpath=None, wrapwidth=78, encoding=default_encoding):
"""
Constructor.
**Keyword arguments**:
- *fpath*: string, path to po or mo file
- *wrapwidth*: integer, the wrap width, only useful when -w option
was passed to xgettext to generate the po file that was used to
format the mo file, default to 78 (optional).
"""
list.__init__(self)
# the opened file handle
self.fpath = fpath
# the width at which lines should be wrapped
self.wrapwidth = wrapwidth
# the file encoding
self.encoding = encoding
# header
self.header = ''
# both po and mo files have metadata
self.metadata = {}
self.metadata_is_fuzzy = 0
def __str__(self):
"""String representation of the file."""
ret = []
entries = [self.metadata_as_entry()] + \
[e for e in self if not e.obsolete]
for entry in entries:
_listappend(ret, entry.__str__(self.wrapwidth))
for entry in self.obsolete_entries():
_listappend(ret, entry.__str__(self.wrapwidth))
return _strjoin('\n', ret)
def __repr__(self):
"""Return the official string representation of the object."""
return '<%s instance at %x>' % (self.__class__.__name__, id(self))
def metadata_as_entry(self):
"""Return the metadata as an entry"""
e = POEntry(msgid='')
mdata = self.ordered_metadata()
if mdata:
strs = []
for name, value in mdata:
# Strip whitespace off each line in a multi-line entry
value = _strjoin('\n', [_strstrip(v)
for v in _strsplit(value, '\n')])
_listappend(strs, '%s: %s' % (name, value))
e.msgstr = _strjoin('\n', strs) + '\n'
return e
def save(self, fpath=None, repr_method='__str__'):
"""
Save the po file to file *fpath* if no file handle exists for
the object. If there's already an open file and no fpath is
provided, then the existing file is rewritten with the modified
data.
**Keyword arguments**:
- *fpath*: string, full or relative path to the file.
- *repr_method*: string, the method to use for output.
"""
if self.fpath is None and fpath is None:
raise IOError('You must provide a file path to save() method')
contents = getattr(self, repr_method)()
if fpath is None:
fpath = self.fpath
mode = 'w'
if repr_method == 'to_binary':
mode += 'b'
fhandle = open(fpath, mode)
fhandle.write(contents)
fhandle.close()
def find(self, st, by='msgid'):
"""
Find entry which msgid (or property identified by the *by*
attribute) matches the string *st*.
**Keyword arguments**:
- *st*: string, the string to search for
- *by*: string, the comparison attribute
**Examples**:
>>> po = pofile('tests/test_utf8.po')
>>> entry = po.find('Thursday')
>>> entry.msgstr
'Jueves'
>>> entry = po.find('Some unexistant msgid')
>>> entry is None
True
>>> entry = po.find('Jueves', 'msgstr')
>>> entry.msgid
'Thursday'
"""
try:
return [e for e in self if getattr(e, by) == st][0]
except IndexError:
return None
def ordered_metadata(self):
"""
Convenience method that return the metadata ordered. The return
value is list of tuples (metadata name, metadata_value).
"""
# copy the dict first
metadata = self.metadata.copy()
data_order = [
'Project-Id-Version',
'Report-Msgid-Bugs-To',
'POT-Creation-Date',
'PO-Revision-Date',
'Last-Translator',
'Language-Team',
'MIME-Version',
'Content-Type',
'Content-Transfer-Encoding'
]
ordered_data = []
for data in data_order:
try:
value = metadata.pop(data)
_listappend(ordered_data, (data, value))
except KeyError:
pass
# the rest of the metadata won't be ordered there are no specs for this
keys = metadata.keys()
keys.sort()
for data in keys:
value = metadata[data]
_listappend(ordered_data, (data, value))
return ordered_data
def to_binary(self):
"""Return the mofile binary representation."""
import struct
import array
output = ''
offsets = []
ids = strs = ''
entries = self.translated_entries()
# the keys are sorted in the .mo file
def cmp(_self, other):
if _self.msgid > other.msgid:
return 1
elif _self.msgid < other.msgid:
return -1
else:
return 0
entries.sort(cmp)
# add metadata entry
mentry = self.metadata_as_entry()
mentry.msgstr = _strreplace(mentry.msgstr, '\\n', '').lstrip() + '\n'
entries = [mentry] + entries
entries_len = len(entries)
for e in entries:
# For each string, we need size and file offset. Each string is
# NUL terminated; the NUL does not count into the size.
msgid = e._decode(e.msgid)
msgstr = e._decode(e.msgstr)
offsets.append((len(ids), len(msgid), len(strs), len(msgstr)))
ids += msgid + '\0'
strs += msgstr + '\0'
# The header is 7 32-bit unsigned integers.
keystart = 7*4+16*entries_len
# and the values start after the keys
valuestart = keystart + len(ids)
koffsets = []
voffsets = []
# The string table first has the list of keys, then the list of values.
# Each entry has first the size of the string, then the file offset.
for o1, l1, o2, l2 in offsets:
koffsets += [l1, o1+keystart]
voffsets += [l2, o2+valuestart]
offsets = koffsets + voffsets
output = struct.pack("Iiiiiii",
0x950412de, # Magic number
0, # Version
entries_len, # # of entries
7*4, # start of key index
7*4+entries_len*8, # start of value index
0, 0) # size and offset of hash table
output += array.array("i", offsets).tostring()
output += ids
output += strs
return output
# }}}
class POFile(_BaseFile):
'''
Po (or Pot) file reader/writer.
POFile objects inherit the list objects methods.
**Example**:
>>> po = POFile()
>>> entry1 = POEntry(
... msgid="Some english text",
... msgstr="Un texte en anglais"
... )
>>> entry1.occurrences = [('testfile', 12),('another_file', 1)]
>>> entry1.comment = "Some useful comment"
>>> entry2 = POEntry(
... msgid="I need my dirty cheese",
... msgstr="Je veux mon sale fromage"
... )
>>> entry2.occurrences = [('testfile', 15),('another_file', 5)]
>>> entry2.comment = "Another useful comment"
>>> entry3 = POEntry(
... msgid='Some entry with quotes " \\"',
... msgstr=u'Un message unicode avec des quotes " \\"'
... )
>>> entry3.comment = "Test string quoting"
>>> po.append(entry1)
>>> po.append(entry2)
>>> po.append(entry3)
>>> po.header = "Some Header"
>>> print po
# Some Header
msgid ""
msgstr ""
<BLANKLINE>
#. Some useful comment
#: testfile:12 another_file:1
msgid "Some english text"
msgstr "Un texte en anglais"
<BLANKLINE>
#. Another useful comment
#: testfile:15 another_file:5
msgid "I need my dirty cheese"
msgstr "Je veux mon sale fromage"
<BLANKLINE>
#. Test string quoting
msgid "Some entry with quotes \\" \\""
msgstr "Un message unicode avec des quotes \\" \\""
<BLANKLINE>
'''
# class POFile {{{
def __str__(self):
"""Return the string representation of the po file"""
ret, headers = '', _strsplit(self.header, '\n')
for header in headers:
if header[:1] in [',', ':']:
ret += '#%s\n' % header
else:
ret += '# %s\n' % header
return ret + _BaseFile.__str__(self)
def save_as_mofile(self, fpath):
"""
Save the binary representation of the file to *fpath*.
**Keyword arguments**:
- *fpath*: string, full or relative path to the file.
"""
_BaseFile.save(self, fpath, 'to_binary')
def percent_translated(self):
"""
Convenience method that return the percentage of translated
messages.
**Example**:
>>> import polib
>>> po = polib.pofile('tests/test_pofile_helpers.po')
>>> po.percent_translated()
50
>>> po = POFile()
>>> po.percent_translated()
100
"""
total = len([e for e in self if not e.obsolete])
if total == 0:
return 100
translated = len(self.translated_entries())
return int((100.00 / float(total)) * translated)
def translated_entries(self):
"""
Convenience method that return a list of translated entries.
**Example**:
>>> import polib
>>> po = polib.pofile('tests/test_pofile_helpers.po')
>>> len(po.translated_entries())
6
"""
return [e for e in self if e.translated() and not e.obsolete]
def untranslated_entries(self):
"""
Convenience method that return a list of untranslated entries.
**Example**:
>>> import polib
>>> po = polib.pofile('tests/test_pofile_helpers.po')
>>> len(po.untranslated_entries())
6
"""
return [e for e in self if not e.translated() and not e.obsolete]
def fuzzy_entries(self):
"""
Convenience method that return the list of 'fuzzy' entries.
**Example**:
>>> import polib
>>> po = polib.pofile('tests/test_pofile_helpers.po')
>>> len(po.fuzzy_entries())
2
"""
return [e for e in self if 'fuzzy' in e.flags]
def obsolete_entries(self):
"""
Convenience method that return the list of obsolete entries.
**Example**:
>>> import polib
>>> po = polib.pofile('tests/test_pofile_helpers.po')
>>> len(po.obsolete_entries())
4
"""
return [e for e in self if e.obsolete]
def merge(self, refpot):
"""
XXX this could not work if encodings are different, needs thinking
and general refactoring of how polib handles encoding...
Convenience method that merge the current pofile with the pot file
provided. It behaves exactly as the gettext msgmerge utility:
- comments of this file will be preserved, but extracted comments
and occurrences will be discarded
- any translations or comments in the file will be discarded,
however dot comments and file positions will be preserved
**Keyword argument**:
- *refpot*: object POFile, the reference catalog.
**Example**:
>>> import polib
>>> refpot = polib.pofile('tests/test_merge.pot')
>>> po = polib.pofile('tests/test_merge_before.po')
>>> po.merge(refpot)
>>> expected_po = polib.pofile('tests/test_merge_after.po')
>>> str(po) == str(expected_po)
True
"""
for entry in refpot:
e = self.find(entry.msgid)
if e is None:
# entry is not in the po file, we must add it
# entry is created with msgid, occurrences and comment
self.append(POEntry(
msgid=entry.msgid,
occurrences=entry.occurrences,
comment=entry.comment
))
else:
# entry found, we update it...
e.occurrences = entry.occurrences
e.comment = entry.comment
# ok, now we must "obsolete" entries that are not in the refpot
# anymore
for entry in self:
if refpot.find(entry.msgid) is None:
entry.obsolete = True
# }}}
class MOFile(_BaseFile):
'''
Mo file reader/writer.
MOFile objects inherit the list objects methods.
**Example**:
>>> mo = MOFile()
>>> entry1 = POEntry(
... msgid="Some english text",
... msgstr="Un texte en anglais"
... )
>>> entry2 = POEntry(
... msgid="I need my dirty cheese",
... msgstr="Je veux mon sale fromage"
... )
>>> entry3 = MOEntry(
... msgid='Some entry with quotes " \\"',
... msgstr=u'Un message unicode avec des quotes " \\"'
... )
>>> mo.append(entry1)
>>> mo.append(entry2)
>>> mo.append(entry3)
>>> print mo
msgid ""
msgstr ""
<BLANKLINE>
msgid "Some english text"
msgstr "Un texte en anglais"
<BLANKLINE>
msgid "I need my dirty cheese"
msgstr "Je veux mon sale fromage"
<BLANKLINE>
msgid "Some entry with quotes \\" \\""
msgstr "Un message unicode avec des quotes \\" \\""
<BLANKLINE>
'''
# class MOFile {{{
def __init__(self, fpath=None, wrapwidth=78):
"""
MOFile constructor.
See _BaseFile.__construct.
"""
_BaseFile.__init__(self, fpath, wrapwidth)
self.magic_number = None
self.version = 0
def save_as_pofile(self, fpath):
"""
Save the string representation of the file to *fpath*.
**Keyword argument**:
- *fpath*: string, full or relative path to the file.
"""
_BaseFile.save(self, fpath)
def save(self, fpath):
"""
Save the binary representation of the file to *fpath*.
**Keyword argument**:
|
[
" - *fpath*: string, full or relative path to the file."
] | 2,528
|
lcc
|
python
| null |
a80d3f058a72bdd5747b39d44c4b55204d02f090d3568cee
|
|
""" Runs few integrity checks
"""
__RCSID__ = "$Id$"
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Utilities.List import sortList
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.DataManagementSystem.Client.DataIntegrityClient import DataIntegrityClient
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
import re
AGENT_NAME = 'Transformation/ValidateOutputDataAgent'
class ValidateOutputDataAgent( AgentModule ):
def __init__( self, *args, **kwargs ):
""" c'tor
"""
AgentModule.__init__( self, *args, **kwargs )
self.integrityClient = DataIntegrityClient()
self.fc = FileCatalog()
self.transClient = TransformationClient()
self.fileCatalogClient = FileCatalogClient()
agentTSTypes = self.am_getOption( 'TransformationTypes', [] )
if agentTSTypes:
self.transformationTypes = agentTSTypes
else:
self.transformationTypes = Operations().getValue( 'Transformations/DataProcessing', ['MCSimulation', 'Merge'] )
self.directoryLocations = sortList( self.am_getOption( 'DirectoryLocations', ['TransformationDB',
'MetadataCatalog'] ) )
self.activeStorages = sortList( self.am_getOption( 'ActiveSEs', [] ) )
self.transfidmeta = self.am_getOption( 'TransfIDMeta', "TransformationID" )
self.enableFlag = True
#############################################################################
def initialize( self ):
""" Sets defaults
"""
# This sets the Default Proxy to used as that defined under
# /Operations/Shifter/DataManager
# the shifterProxy option in the Configuration can be used to change this default.
self.am_setOption( 'shifterProxy', 'DataManager' )
gLogger.info( "Will treat the following transformation types: %s" % str( self.transformationTypes ) )
gLogger.info( "Will search for directories in the following locations: %s" % str( self.directoryLocations ) )
gLogger.info( "Will check the following storage elements: %s" % str( self.activeStorages ) )
gLogger.info( "Will use %s as metadata tag name for TransformationID" % self.transfidmeta )
return S_OK()
#############################################################################
def execute( self ):
""" The VerifyOutputData execution method
"""
self.enableFlag = self.am_getOption( 'EnableFlag', 'True' )
if not self.enableFlag == 'True':
self.log.info( "VerifyOutputData is disabled by configuration option 'EnableFlag'" )
return S_OK( 'Disabled via CS flag' )
gLogger.info( "-" * 40 )
self.updateWaitingIntegrity()
gLogger.info( "-" * 40 )
res = self.transClient.getTransformations( {'Status':'ValidatingOutput', 'Type':self.transformationTypes} )
if not res['OK']:
gLogger.error( "Failed to get ValidatingOutput transformations", res['Message'] )
return res
transDicts = res['Value']
if not transDicts:
gLogger.info( "No transformations found in ValidatingOutput status" )
return S_OK()
gLogger.info( "Found %s transformations in ValidatingOutput status" % len( transDicts ) )
for transDict in transDicts:
transID = transDict['TransformationID']
res = self.checkTransformationIntegrity( int( transID ) )
if not res['OK']:
gLogger.error( "Failed to perform full integrity check for transformation %d" % transID )
else:
self.finalizeCheck( transID )
gLogger.info( "-" * 40 )
return S_OK()
def updateWaitingIntegrity( self ):
""" Get 'WaitingIntegrity' transformations, update to 'ValidatedOutput'
"""
gLogger.info( "Looking for transformations in the WaitingIntegrity status to update" )
res = self.transClient.getTransformations( {'Status':'WaitingIntegrity'} )
if not res['OK']:
gLogger.error( "Failed to get WaitingIntegrity transformations", res['Message'] )
return res
transDicts = res['Value']
if not transDicts:
gLogger.info( "No transformations found in WaitingIntegrity status" )
return S_OK()
gLogger.info( "Found %s transformations in WaitingIntegrity status" % len( transDicts ) )
for transDict in transDicts:
transID = transDict['TransformationID']
gLogger.info( "-" * 40 )
res = self.integrityClient.getTransformationProblematics( int( transID ) )
if not res['OK']:
gLogger.error( "Failed to determine waiting problematics for transformation", res['Message'] )
elif not res['Value']:
res = self.transClient.setTransformationParameter( transID, 'Status', 'ValidatedOutput' )
if not res['OK']:
gLogger.error( "Failed to update status of transformation %s to ValidatedOutput" % ( transID ) )
else:
gLogger.info( "Updated status of transformation %s to ValidatedOutput" % ( transID ) )
else:
gLogger.info( "%d problematic files for transformation %s were found" % ( len( res['Value'] ), transID ) )
return
#############################################################################
#
# Get the transformation directories for checking
#
def getTransformationDirectories( self, transID ):
""" Get the directories for the supplied transformation from the transformation system
"""
directories = []
if 'TransformationDB' in self.directoryLocations:
res = self.transClient.getTransformationParameters( transID, ['OutputDirectories'] )
if not res['OK']:
gLogger.error( "Failed to obtain transformation directories", res['Message'] )
return res
transDirectories = res['Value'].splitlines()
directories = self._addDirs( transID, transDirectories, directories )
if 'MetadataCatalog' in self.directoryLocations:
res = self.fileCatalogClient.findDirectoriesByMetadata( {self.transfidmeta:transID} )
if not res['OK']:
gLogger.error( "Failed to obtain metadata catalog directories", res['Message'] )
return res
transDirectories = res['Value']
directories = self._addDirs( transID, transDirectories, directories )
if not directories:
gLogger.info( "No output directories found" )
directories = sortList( directories )
return S_OK( directories )
@staticmethod
def _addDirs( transID, newDirs, existingDirs ):
for nDir in newDirs:
transStr = str( transID ).zfill( 8 )
if re.search( transStr, nDir ):
if not nDir in existingDirs:
existingDirs.append( nDir )
return existingDirs
#############################################################################
def checkTransformationIntegrity( self, transID ):
""" This method contains the real work
"""
gLogger.info( "-" * 40 )
gLogger.info( "Checking the integrity of transformation %s" % transID )
gLogger.info( "-" * 40 )
res = self.getTransformationDirectories( transID )
if not res['OK']:
return res
directories = res['Value']
if not directories:
return S_OK()
######################################################
#
# This check performs Catalog->SE for possible output directories
#
res = self.fc.exists( directories )
if not res['OK']:
gLogger.error( res['Message'] )
return res
for directory, error in res['Value']['Failed']:
gLogger.error( 'Failed to determine existance of directory', '%s %s' % ( directory, error ) )
if res['Value']['Failed']:
return S_ERROR( "Failed to determine the existance of directories" )
directoryExists = res['Value']['Successful']
for directory in sortList( directoryExists.keys() ):
if not directoryExists[directory]:
continue
iRes = self.integrityClient.catalogDirectoryToSE( directory )
if not iRes['OK']:
gLogger.error( iRes['Message'] )
return iRes
######################################################
#
# This check performs SE->Catalog for possible output directories
#
for storageElementName in sortList( self.activeStorages ):
res = self.integrityClient.storageDirectoryToCatalog( directories, storageElementName )
if not res['OK']:
gLogger.error( res['Message'] )
return res
gLogger.info( "-" * 40 )
gLogger.info( "Completed integrity check for transformation %s" % transID )
return S_OK()
def finalizeCheck( self, transID ):
""" Move to 'WaitingIntegrity' or 'ValidatedOutput'
"""
res = self.integrityClient.getTransformationProblematics( int( transID ) )
|
[
" if not res['OK']:"
] | 873
|
lcc
|
python
| null |
924bc05657c826f5fa04974021bf21c5946ff1d3a39ea9ef
|
|
/* This code is part of Freenet. It is distributed under the GNU General
* Public License, version 2 (or at your option any later version). See
* http://www.gnu.org/ for further details of the GPL. */
package freenet.client.filter;
import freenet.client.filter.HTMLFilter.ParsedTag;
import freenet.clients.http.ExternalLinkToadlet;
import freenet.clients.http.HTTPRequestImpl;
import freenet.clients.http.StaticToadlet;
import freenet.keys.FreenetURI;
import freenet.l10n.NodeL10n;
import freenet.support.*;
import freenet.support.Logger.LogLevel;
import freenet.support.api.HTTPRequest;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.util.HashSet;
import java.util.regex.Pattern;
public class GenericReadFilterCallback implements FilterCallback, URIProcessor {
public static final HashSet<String> allowedProtocols;
static {
allowedProtocols = new HashSet<String>();
allowedProtocols.add("http");
allowedProtocols.add("https");
allowedProtocols.add("ftp");
allowedProtocols.add("mailto");
allowedProtocols.add("nntp");
allowedProtocols.add("news");
allowedProtocols.add("snews");
allowedProtocols.add("about");
allowedProtocols.add("irc");
// file:// ?
}
private URI baseURI;
private URI strippedBaseURI;
private final FoundURICallback cb;
private final TagReplacerCallback trc;
/** Provider for link filter exceptions. */
private final LinkFilterExceptionProvider linkFilterExceptionProvider;
private static volatile boolean logMINOR;
static {
Logger.registerLogThresholdCallback(new LogThresholdCallback(){
@Override
public void shouldUpdate(){
logMINOR = Logger.shouldLog(LogLevel.MINOR, this);
}
});
}
public GenericReadFilterCallback(URI uri, FoundURICallback cb,TagReplacerCallback trc, LinkFilterExceptionProvider linkFilterExceptionProvider) {
this.baseURI = uri;
this.cb = cb;
this.trc=trc;
this.linkFilterExceptionProvider = linkFilterExceptionProvider;
setStrippedURI(uri.toString());
}
public GenericReadFilterCallback(FreenetURI uri, FoundURICallback cb,TagReplacerCallback trc, LinkFilterExceptionProvider linkFilterExceptionProvider) {
try {
this.baseURI = uri.toRelativeURI();
setStrippedURI(baseURI.toString());
this.cb = cb;
this.trc=trc;
this.linkFilterExceptionProvider = linkFilterExceptionProvider;
} catch (URISyntaxException e) {
throw new Error(e);
}
}
private void setStrippedURI(String u) {
int idx = u.lastIndexOf('/');
if(idx > 0) {
u = u.substring(0, idx+1);
try {
strippedBaseURI = new URI(u);
} catch (URISyntaxException e) {
Logger.error(this, "Can't strip base URI: "+e+" parsing "+u);
strippedBaseURI = baseURI;
}
} else
strippedBaseURI = baseURI;
}
@Override
public String processURI(String u, String overrideType) throws CommentException {
return processURI(u, overrideType, false, false);
}
// RFC3986
// unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
protected static final String UNRESERVED = "[a-zA-Z0-9\\-\\._~]";
// pct-encoded = "%" HEXDIG HEXDIG
protected static final String PCT_ENCODED = "(?:%[0-9A-Fa-f][0-9A-Fa-f])";
// sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
// / "*" / "+" / "," / ";" / "="
protected static final String SUB_DELIMS = "[\\!\\$&'\\(\\)\\*\\+,;=]";
// pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
protected static final String PCHAR = "(?>" + UNRESERVED + "|" + PCT_ENCODED + "|" + SUB_DELIMS + "|[:@])";
// fragment = *( pchar / "/" / "?" )
protected static final String FRAGMENT = "(?>" + PCHAR + "|\\/|\\?)*";
private static final Pattern anchorRegex;
static {
anchorRegex = Pattern.compile("^#" + FRAGMENT + "$");
}
@Override
public String processURI(String u, String overrideType, boolean forBaseHref, boolean inline) throws CommentException {
if(anchorRegex.matcher(u).matches()) {
// Hack for anchors, see #710
return u;
}
boolean noRelative = forBaseHref;
// evil hack, see #2451 and r24565,r24566
u = u.replaceAll(" #", " %23");
URI uri;
URI resolved;
try {
if(logMINOR) Logger.minor(this, "Processing "+u);
uri = URIPreEncoder.encodeURI(u).normalize();
if(logMINOR) Logger.minor(this, "Processing "+uri);
if(u.startsWith("/") || u.startsWith("%2f"))
// Don't bother with relative URIs if it's obviously absolute.
// Don't allow encoded /'s, they're just too confusing (here they would get decoded and then coalesced with other slashes).
noRelative = true;
if(!noRelative)
resolved = baseURI.resolve(uri);
else
resolved = uri;
if(logMINOR) Logger.minor(this, "Resolved: "+resolved);
} catch (URISyntaxException e1) {
if(logMINOR) Logger.minor(this, "Failed to parse URI: "+e1);
throw new CommentException(l10n("couldNotParseURIWithError", "error", e1.getMessage()));
}
String path = uri.getPath();
HTTPRequest req = new HTTPRequestImpl(uri, "GET");
if (path != null) {
if (path.equals("/") && req.isParameterSet("newbookmark") && !forBaseHref) {
// allow links to the root to add bookmarks
String bookmark_key = req.getParam("newbookmark");
String bookmark_desc = req.getParam("desc");
String bookmark_activelink = req.getParam("hasAnActivelink", "");
try {
FreenetURI furi = new FreenetURI(bookmark_key);
bookmark_key = furi.toString();
bookmark_desc = URLEncoder.encode(bookmark_desc, "UTF-8");
} catch (UnsupportedEncodingException e) {
// impossible, UTF-8 is always supported
} catch (MalformedURLException e) {
throw new CommentException("Invalid Freenet URI: " + e);
}
String url = "/?newbookmark="+bookmark_key+"&desc="+bookmark_desc;
if (bookmark_activelink.equals("true")) {
url = url + "&hasAnActivelink=true";
}
return url;
} else if(path.startsWith(StaticToadlet.ROOT_URL)) {
// @see bug #2297
return path;
} else if (linkFilterExceptionProvider != null) {
if (linkFilterExceptionProvider.isLinkExcepted(uri)) {
return path + ((uri.getQuery() != null) ? ("?" + uri.getQuery()) : "");
}
}
}
String reason = l10n("deletedURI");
// Try as an absolute URI
URI origURI = uri;
// Convert localhost uri's to relative internal ones.
String host = uri.getHost();
if(host != null && (host.equals("localhost") || host.equals("127.0.0.1")) && uri.getPort() == 8888) {
try {
uri = new URI(null, null, null, -1, uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
Logger.error(this, "URI "+uri+" looked like localhost but could not parse", e);
throw new CommentException("URI looked like localhost but could not parse: "+e);
}
host = null;
}
String rpath = uri.getPath();
if(logMINOR) Logger.minor(this, "Path: \""+path+"\" rpath: \""+rpath+"\"");
if(host == null) {
boolean isAbsolute = false;
if(rpath != null) {
if(logMINOR) Logger.minor(this, "Resolved URI (rpath absolute): \""+rpath+"\"");
// Valid FreenetURI?
try {
String p = rpath;
while(p.startsWith("/")) {
p = p.substring(1);
}
FreenetURI furi = new FreenetURI(p, true);
isAbsolute = true;
if(logMINOR) Logger.minor(this, "Parsed: "+furi);
return processURI(furi, uri, overrideType, true, inline);
} catch (MalformedURLException e) {
// Not a FreenetURI
if(logMINOR) Logger.minor(this, "Malformed URL (a): "+e, e);
if(e.getMessage() != null) {
reason = l10n("malformedAbsoluteURL", "error", e.getMessage());
} else {
reason = l10n("couldNotParseAbsoluteFreenetURI");
}
}
}
if((!isAbsolute) && (!forBaseHref)) {
// Relative URI
rpath = resolved.getPath();
if(rpath == null) throw new CommentException("No URI");
if(logMINOR) Logger.minor(this, "Resolved URI (rpath relative): "+rpath);
// Valid FreenetURI?
try {
String p = rpath;
while(p.startsWith("/")) p = p.substring(1);
FreenetURI furi = new FreenetURI(p, true);
if(logMINOR) Logger.minor(this, "Parsed: "+furi);
return processURI(furi, uri, overrideType, forBaseHref, inline);
} catch (MalformedURLException e) {
if(logMINOR) Logger.minor(this, "Malformed URL (b): "+e, e);
if(e.getMessage() != null) {
reason = l10n("malformedRelativeURL", "error", e.getMessage());
} else {
reason = l10n("couldNotParseRelativeFreenetURI");
}
}
}
}
uri = origURI;
if(forBaseHref)
throw new CommentException(l10n("bogusBaseHref"));
if(GenericReadFilterCallback.allowedProtocols.contains(uri.getScheme()))
return ExternalLinkToadlet.escape(uri.toString());
else {
if(uri.getScheme() == null) {
throw new CommentException(reason);
}
throw new CommentException(l10n("protocolNotEscaped", "protocol", uri.getScheme()));
}
}
@Override
public String makeURIAbsolute(String uri) throws URISyntaxException{
return baseURI.resolve(URIPreEncoder.encodeURI(uri).normalize()).toASCIIString();
}
private static String l10n(String key, String pattern, String value) {
return NodeL10n.getBase().getString("GenericReadFilterCallback."+key, pattern, value);
}
private static String l10n(String key) {
return NodeL10n.getBase().getString("GenericReadFilterCallback."+key);
}
private String finishProcess(HTTPRequest req, String overrideType, String path, URI u, boolean noRelative) {
String typeOverride = req.getParam("type", null);
if(overrideType != null)
typeOverride = overrideType;
if(typeOverride != null) {
String[] split = HTMLFilter.splitType(typeOverride);
if(split[1] != null) {
String charset = split[1];
if(charset != null) {
try {
charset = URLDecoder.decode(charset, false);
} catch (URLEncodedFormatException e) {
charset = null;
}
}
if(charset != null && charset.indexOf('&') != -1)
charset = null;
if(charset != null && !Charset.isSupported(charset))
charset = null;
if(charset != null)
typeOverride = split[0]+"; charset="+charset;
else
typeOverride = split[0];
}
}
// REDFLAG any other options we should support?
// Obviously we don't want to support ?force= !!
// At the moment, ?type= and ?force= are the only options supported by FProxy anyway.
try {
// URI encoding issues: FreenetURI.toString() does URLEncode'ing of critical components.
// So if we just pass it in to the component-wise constructor, we end up encoding twice,
// so get %2520 for a space.
// However, we want to support encoded slashes or @'s in the path, so we don't want to
// just decode before feeding it to the constructor. It looks like the best option is
// to construct it ourselves and then re-parse it. This is doing unnecessary work, it
// would be much easier if we had a component-wise constructor for URI that didn't
// re-encode, but at least it works...
StringBuilder sb = new StringBuilder();
if(strippedBaseURI.getScheme() != null && !noRelative) {
sb.append(strippedBaseURI.getScheme());
sb.append("://");
sb.append(strippedBaseURI.getAuthority());
assert(path.startsWith("/"));
}
sb.append(path);
if(typeOverride != null) {
sb.append("?type=");
sb.append(freenet.support.URLEncoder.encode(typeOverride, "", false, "="));
}
if(u.getFragment() != null) {
sb.append('#');
sb.append(u.getRawFragment());
}
URI uri = new URI(sb.toString());
if(!noRelative)
uri = strippedBaseURI.relativize(uri);
if(logMINOR)
Logger.minor(this, "Returning "+uri.toASCIIString()+" from "+path+" from baseURI="+baseURI+" stripped base uri="+strippedBaseURI.toString());
return uri.toASCIIString();
} catch (URISyntaxException e) {
Logger.error(this, "Could not parse own URI: path="+path+", typeOverride="+typeOverride+", frag="+u.getFragment()+" : "+e, e);
String p = path;
if(typeOverride != null)
p += "?type="+typeOverride;
if(u.getFragment() != null){
try{
// FIXME encode it properly
p += URLEncoder.encode(u.getFragment(),"UTF-8");
}catch (UnsupportedEncodingException e1){
throw new Error("Impossible: JVM doesn't support UTF-8: " + e, e);
}
}
return p;
}
}
private String processURI(FreenetURI furi, URI uri, String overrideType, boolean noRelative, boolean inline) {
// Valid Freenet URI, allow it
// Now what about the queries?
HTTPRequest req = new HTTPRequestImpl(uri, "GET");
if(cb != null) cb.foundURI(furi);
if(cb != null) cb.foundURI(furi, inline);
return finishProcess(req, overrideType, '/' + furi.toString(false, false), uri, noRelative);
}
@Override
public String onBaseHref(String baseHref) {
String ret;
try {
ret = processURI(baseHref, null, true, false);
} catch (CommentException e1) {
Logger.error(this, "Failed to parse base href: "+baseHref+" -> "+e1.getMessage());
ret = null;
}
if(ret == null) {
Logger.error(this, "onBaseHref() failed: cannot sanitize "+baseHref);
return null;
} else {
try {
baseURI = new URI(ret);
setStrippedURI(ret);
} catch (URISyntaxException e) {
throw new Error(e); // Impossible
}
return baseURI.toASCIIString();
}
}
@Override
public void onText(String s, String type) {
if(cb != null)
cb.onText(s, type, baseURI);
}
static final String PLUGINS_PREFIX = "/plugins/";
/**
* Process a form.
* Current strategy:
* - Both POST and GET forms are allowed to /
* Anything that is hazardous should be protected through formPassword.
* @throws CommentException If the form element could not be parsed and the user should be told.
*/
@Override
public String processForm(String method, String action) throws CommentException {
if(action == null) return null;
if(method == null) method = "GET";
method = method.toUpperCase();
if(!(method.equals("POST") || method.equals("GET")))
return null; // no irregular form sending methods
// FIXME what about /downloads/ /friends/ etc?
// Allow access to Library for searching, form passwords are used for actions such as adding bookmarks
if(action.equals("/library/"))
return action;
try {
URI uri = URIPreEncoder.encodeURI(action);
if(uri.getScheme() != null || uri.getHost() != null || uri.getPort() != -1 || uri.getUserInfo() != null)
throw new CommentException(l10n("invalidFormURI"));
|
[
"\t\t\tString path = uri.getPath();"
] | 1,604
|
lcc
|
java
| null |
e19ebd2a302575d8fb0c11be6536df7e0face2889546ed8d
|
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from future.builtins import range
import warnings
from mock import Mock
from mock import call
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SUCCESS
from buildbot.reporters import utils
from buildbot.reporters.gerrit import GERRIT_LABEL_REVIEWED
from buildbot.reporters.gerrit import GERRIT_LABEL_VERIFIED
from buildbot.reporters.gerrit import GerritStatusPush
from buildbot.reporters.gerrit import defaultReviewCB
from buildbot.reporters.gerrit import defaultSummaryCB
from buildbot.reporters.gerrit import makeReviewResult
from buildbot.test.fake import fakemaster
from buildbot.test.util.reporter import ReporterTestMixin
warnings.filterwarnings('error', message='.*Gerrit status')
def sampleReviewCB(builderName, build, result, status, arg):
verified = 1 if result == SUCCESS else -1
return makeReviewResult(str({'name': builderName, 'result': result}),
(GERRIT_LABEL_VERIFIED, verified))
@defer.inlineCallbacks
def sampleReviewCBDeferred(builderName, build, result, status, arg):
verified = 1 if result == SUCCESS else -1
result = yield makeReviewResult(str({'name': builderName, 'result': result}),
(GERRIT_LABEL_VERIFIED, verified))
defer.returnValue(result)
def sampleStartCB(builderName, build, arg):
return makeReviewResult(str({'name': builderName}),
(GERRIT_LABEL_REVIEWED, 0))
@defer.inlineCallbacks
def sampleStartCBDeferred(builderName, build, arg):
result = yield makeReviewResult(str({'name': builderName}),
(GERRIT_LABEL_REVIEWED, 0))
defer.returnValue(result)
def sampleSummaryCB(buildInfoList, results, status, arg):
success = False
failure = False
for buildInfo in buildInfoList:
if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement
success = True
else:
failure = True
if failure:
verified = -1
elif success:
verified = 1
else:
verified = 0
return makeReviewResult(str(buildInfoList),
(GERRIT_LABEL_VERIFIED, verified))
@defer.inlineCallbacks
def sampleSummaryCBDeferred(buildInfoList, results, master, arg):
success = False
failure = False
for buildInfo in buildInfoList:
if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement
success = True
else:
failure = True
if failure:
verified = -1
elif success:
verified = 1
else:
verified = 0
result = yield makeReviewResult(str(buildInfoList),
(GERRIT_LABEL_VERIFIED, verified))
defer.returnValue(result)
def legacyTestReviewCB(builderName, build, result, status, arg):
msg = str({'name': builderName, 'result': result})
return (msg, 1 if result == SUCCESS else -1, 0)
def legacyTestSummaryCB(buildInfoList, results, status, arg):
success = False
failure = False
for buildInfo in buildInfoList:
if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement
success = True
else:
failure = True
if failure:
verified = -1
elif success:
verified = 1
else:
verified = 0
return (str(buildInfoList), verified, 0)
class TestGerritStatusPush(unittest.TestCase, ReporterTestMixin):
def setUp(self):
self.master = fakemaster.make_master(testcase=self,
wantData=True, wantDb=True, wantMq=True)
@defer.inlineCallbacks
def setupGerritStatusPushSimple(self, *args, **kwargs):
serv = kwargs.pop("server", "serv")
username = kwargs.pop("username", "user")
gsp = GerritStatusPush(serv, username, *args, **kwargs)
yield gsp.setServiceParent(self.master)
yield gsp.startService()
defer.returnValue(gsp)
@defer.inlineCallbacks
def setupGerritStatusPush(self, *args, **kwargs):
gsp = yield self.setupGerritStatusPushSimple(*args, **kwargs)
gsp.sendCodeReview = Mock()
defer.returnValue(gsp)
@defer.inlineCallbacks
def setupBuildResults(self, buildResults, finalResult):
self.insertTestData(buildResults, finalResult)
res = yield utils.getDetailsForBuildset(self.master, 98, wantProperties=True)
builds = res['builds']
buildset = res['buildset']
@defer.inlineCallbacks
def getChangesForBuild(buildid):
assert buildid == 20
ch = yield self.master.db.changes.getChange(13)
defer.returnValue([ch])
self.master.db.changes.getChangesForBuild = getChangesForBuild
defer.returnValue((buildset, builds))
def makeBuildInfo(self, buildResults, resultText, builds):
info = []
for i in range(len(buildResults)):
info.append({'name': u"Builder%d" % i, 'result': buildResults[i],
'resultText': resultText[i], 'text': u'buildText',
'url': "http://localhost:8080/#builders/%d/builds/%d" % (79 + i, i),
'build': builds[i]})
return info
@defer.inlineCallbacks
def run_fake_summary_build(self, gsp, buildResults, finalResult,
resultText, expWarning=False):
buildset, builds = yield self.setupBuildResults(buildResults, finalResult)
yield gsp.buildsetComplete('buildset.98.complete'.split("."),
buildset)
info = self.makeBuildInfo(buildResults, resultText, builds)
if expWarning:
self.assertEqual([w['message'] for w in self.flushWarnings()],
['The Gerrit status callback uses the old '
'way to communicate results. The outcome '
'might be not what is expected.'])
defer.returnValue(str(info))
# check_summary_build and check_summary_build_legacy differ in two things:
# * the callback used
# * the expected result
@defer.inlineCallbacks
def check_summary_build_deferred(self, buildResults, finalResult, resultText,
verifiedScore):
gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCBDeferred)
msg = yield self.run_fake_summary_build(gsp, buildResults, finalResult,
resultText)
result = makeReviewResult(msg,
(GERRIT_LABEL_VERIFIED, verifiedScore))
gsp.sendCodeReview.assert_called_once_with(self.TEST_PROJECT,
self.TEST_REVISION,
result)
@defer.inlineCallbacks
def check_summary_build(self, buildResults, finalResult, resultText,
verifiedScore):
gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCB)
msg = yield self.run_fake_summary_build(gsp, buildResults, finalResult,
resultText)
result = makeReviewResult(msg,
(GERRIT_LABEL_VERIFIED, verifiedScore))
gsp.sendCodeReview.assert_called_once_with(self.TEST_PROJECT,
self.TEST_REVISION,
result)
@defer.inlineCallbacks
def check_summary_build_legacy(self, buildResults, finalResult, resultText,
verifiedScore):
gsp = yield self.setupGerritStatusPush(summaryCB=legacyTestSummaryCB)
msg = yield self.run_fake_summary_build(gsp, buildResults, finalResult,
resultText, expWarning=True)
result = makeReviewResult(msg,
(GERRIT_LABEL_VERIFIED, verifiedScore),
(GERRIT_LABEL_REVIEWED, 0))
gsp.sendCodeReview.assert_called_once_with(self.TEST_PROJECT,
self.TEST_REVISION,
result)
@defer.inlineCallbacks
def test_gerrit_ssh_cmd(self):
kwargs = {
'server': 'example.com',
'username': 'buildbot',
}
without_identity = yield self.setupGerritStatusPush(**kwargs)
expected1 = [
'ssh', 'buildbot@example.com', '-p', '29418', 'gerrit', 'foo']
self.assertEqual(expected1, without_identity._gerritCmd('foo'))
yield without_identity.disownServiceParent()
with_identity = yield self.setupGerritStatusPush(
identity_file='/path/to/id_rsa', **kwargs)
expected2 = [
'ssh', '-i', '/path/to/id_rsa', 'buildbot@example.com', '-p', '29418',
'gerrit', 'foo',
]
self.assertEqual(expected2, with_identity._gerritCmd('foo'))
def test_buildsetComplete_success_sends_summary_review_deferred(self):
d = self.check_summary_build_deferred(buildResults=[SUCCESS, SUCCESS],
finalResult=SUCCESS,
resultText=[
"succeeded", "succeeded"],
verifiedScore=1)
return d
def test_buildsetComplete_success_sends_summary_review(self):
d = self.check_summary_build(buildResults=[SUCCESS, SUCCESS],
finalResult=SUCCESS,
resultText=["succeeded", "succeeded"],
verifiedScore=1)
return d
def test_buildsetComplete_failure_sends_summary_review(self):
d = self.check_summary_build(buildResults=[FAILURE, FAILURE],
finalResult=FAILURE,
resultText=["failed", "failed"],
verifiedScore=-1)
return d
def test_buildsetComplete_mixed_sends_summary_review(self):
d = self.check_summary_build(buildResults=[SUCCESS, FAILURE],
finalResult=FAILURE,
resultText=["succeeded", "failed"],
verifiedScore=-1)
return d
def test_buildsetComplete_success_sends_summary_review_legacy(self):
d = self.check_summary_build_legacy(buildResults=[SUCCESS, SUCCESS],
finalResult=SUCCESS,
resultText=[
"succeeded", "succeeded"],
verifiedScore=1)
return d
def test_buildsetComplete_failure_sends_summary_review_legacy(self):
d = self.check_summary_build_legacy(buildResults=[FAILURE, FAILURE],
finalResult=FAILURE,
resultText=["failed", "failed"],
verifiedScore=-1)
return d
def test_buildsetComplete_mixed_sends_summary_review_legacy(self):
d = self.check_summary_build_legacy(buildResults=[SUCCESS, FAILURE],
finalResult=FAILURE,
resultText=["succeeded", "failed"],
verifiedScore=-1)
return d
@defer.inlineCallbacks
def test_buildsetComplete_filtered_builder(self):
gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCB)
gsp.builders = ["foo"]
yield self.run_fake_summary_build(gsp, [FAILURE, FAILURE], FAILURE,
["failed", "failed"])
self.assertFalse(
gsp.sendCodeReview.called, "sendCodeReview should not be called")
@defer.inlineCallbacks
def test_buildsetComplete_filtered_matching_builder(self):
gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCB)
gsp.builders = ["Builder1"]
yield self.run_fake_summary_build(gsp, [FAILURE, FAILURE], FAILURE,
["failed", "failed"])
self.assertTrue(
gsp.sendCodeReview.called, "sendCodeReview should be called")
@defer.inlineCallbacks
def run_fake_single_build(self, gsp, buildResult, expWarning=False):
buildset, builds = yield self.setupBuildResults([buildResult], buildResult)
yield gsp.buildStarted(None, builds[0])
yield gsp.buildComplete(None, builds[0])
if expWarning:
self.assertEqual([w['message'] for w in self.flushWarnings()],
['The Gerrit status callback uses the old '
'way to communicate results. The outcome '
'might be not what is expected.'])
defer.returnValue(str({'name': u'Builder0', 'result': buildResult}))
# same goes for check_single_build and check_single_build_legacy
@defer.inlineCallbacks
def check_single_build(self, buildResult, verifiedScore):
|
[
" gsp = yield self.setupGerritStatusPush(reviewCB=sampleReviewCB,"
] | 958
|
lcc
|
python
| null |
6dad6c68d75d75e242d0bbeb5900a85521954edf3aeb5975
|
|
#!/usr/bin/python
# -*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'j.s@google.com (Jeff Scudder)'
import sys
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
from gdata import test_data
import gdata.test_config as conf
class AuthorTest(unittest.TestCase):
def setUp(self):
self.author = atom.Author()
def testEmptyAuthorShouldHaveEmptyExtensionsList(self):
self.assert_(isinstance(self.author.extension_elements, list))
self.assert_(len(self.author.extension_elements) == 0)
def testNormalAuthorShouldHaveNoExtensionElements(self):
self.author.name = atom.Name(text='Jeff Scudder')
self.assert_(self.author.name.text == 'Jeff Scudder')
self.assert_(len(self.author.extension_elements) == 0)
new_author = atom.AuthorFromString(self.author.ToString())
self.assert_(len(self.author.extension_elements) == 0)
self.author.extension_elements.append(atom.ExtensionElement(
'foo', text='bar'))
self.assert_(len(self.author.extension_elements) == 1)
self.assert_(self.author.name.text == 'Jeff Scudder')
new_author = atom.AuthorFromString(self.author.ToString())
self.assert_(len(self.author.extension_elements) == 1)
self.assert_(new_author.name.text == 'Jeff Scudder')
def testEmptyAuthorToAndFromStringShouldMatch(self):
string_from_author = self.author.ToString()
new_author = atom.AuthorFromString(string_from_author)
string_from_new_author = new_author.ToString()
self.assert_(string_from_author == string_from_new_author)
def testAuthorWithNameToAndFromStringShouldMatch(self):
self.author.name = atom.Name()
self.author.name.text = 'Jeff Scudder'
string_from_author = self.author.ToString()
new_author = atom.AuthorFromString(string_from_author)
string_from_new_author = new_author.ToString()
self.assert_(string_from_author == string_from_new_author)
self.assert_(self.author.name.text == new_author.name.text)
def testExtensionElements(self):
self.author.extension_attributes['foo1'] = 'bar'
self.author.extension_attributes['foo2'] = 'rab'
self.assert_(self.author.extension_attributes['foo1'] == 'bar')
self.assert_(self.author.extension_attributes['foo2'] == 'rab')
new_author = atom.AuthorFromString(self.author.ToString())
self.assert_(new_author.extension_attributes['foo1'] == 'bar')
self.assert_(new_author.extension_attributes['foo2'] == 'rab')
def testConvertFullAuthorToAndFromString(self):
author = atom.AuthorFromString(test_data.TEST_AUTHOR)
self.assert_(author.name.text == 'John Doe')
self.assert_(author.email.text == 'johndoes@someemailadress.com')
self.assert_(author.uri.text == 'http://www.google.com')
class EmailTest(unittest.TestCase):
def setUp(self):
self.email = atom.Email()
def testEmailToAndFromString(self):
self.email.text = 'This is a test'
new_email = atom.EmailFromString(self.email.ToString())
self.assert_(self.email.text == new_email.text)
self.assert_(self.email.extension_elements ==
new_email.extension_elements)
class NameTest(unittest.TestCase):
def setUp(self):
self.name = atom.Name()
def testEmptyNameToAndFromStringShouldMatch(self):
string_from_name = self.name.ToString()
new_name = atom.NameFromString(string_from_name)
string_from_new_name = new_name.ToString()
self.assert_(string_from_name == string_from_new_name)
def testText(self):
self.assert_(self.name.text is None)
self.name.text = 'Jeff Scudder'
self.assert_(self.name.text == 'Jeff Scudder')
new_name = atom.NameFromString(self.name.ToString())
self.assert_(new_name.text == self.name.text)
def testExtensionElements(self):
self.name.extension_attributes['foo'] = 'bar'
self.assert_(self.name.extension_attributes['foo'] == 'bar')
new_name = atom.NameFromString(self.name.ToString())
self.assert_(new_name.extension_attributes['foo'] == 'bar')
class ExtensionElementTest(unittest.TestCase):
def setUp(self):
self.ee = atom.ExtensionElement('foo')
def testEmptyEEShouldProduceEmptyString(self):
pass
def testEEParsesTreeCorrectly(self):
deep_tree = atom.ExtensionElementFromString(test_data.EXTENSION_TREE)
self.assert_(deep_tree.tag == 'feed')
self.assert_(deep_tree.namespace == 'http://www.w3.org/2005/Atom')
self.assert_(deep_tree.children[0].tag == 'author')
self.assert_(deep_tree.children[0].namespace == 'http://www.google.com')
self.assert_(deep_tree.children[0].children[0].tag == 'name')
self.assert_(deep_tree.children[0].children[0].namespace ==
'http://www.google.com')
self.assert_(deep_tree.children[0].children[0].text.strip() == 'John Doe')
self.assert_(deep_tree.children[0].children[0].children[0].text.strip() ==
'Bar')
foo = deep_tree.children[0].children[0].children[0]
self.assert_(foo.tag == 'foo')
self.assert_(foo.namespace == 'http://www.google.com')
self.assert_(foo.attributes['up'] == 'down')
self.assert_(foo.attributes['yes'] == 'no')
self.assert_(foo.children == [])
def testEEToAndFromStringShouldMatch(self):
string_from_ee = self.ee.ToString()
new_ee = atom.ExtensionElementFromString(string_from_ee)
string_from_new_ee = new_ee.ToString()
self.assert_(string_from_ee == string_from_new_ee)
deep_tree = atom.ExtensionElementFromString(test_data.EXTENSION_TREE)
string_from_deep_tree = deep_tree.ToString()
new_deep_tree = atom.ExtensionElementFromString(string_from_deep_tree)
string_from_new_deep_tree = new_deep_tree.ToString()
self.assert_(string_from_deep_tree == string_from_new_deep_tree)
class LinkTest(unittest.TestCase):
def setUp(self):
self.link = atom.Link()
def testLinkToAndFromString(self):
self.link.href = 'test href'
self.link.hreflang = 'english'
self.link.type = 'text/html'
self.link.extension_attributes['foo'] = 'bar'
self.assert_(self.link.href == 'test href')
self.assert_(self.link.hreflang == 'english')
self.assert_(self.link.type == 'text/html')
self.assert_(self.link.extension_attributes['foo'] == 'bar')
new_link = atom.LinkFromString(self.link.ToString())
self.assert_(self.link.href == new_link.href)
self.assert_(self.link.type == new_link.type)
self.assert_(self.link.hreflang == new_link.hreflang)
self.assert_(self.link.extension_attributes['foo'] ==
new_link.extension_attributes['foo'])
def testLinkType(self):
test_link = atom.Link(link_type='text/html')
self.assert_(test_link.type == 'text/html')
class GeneratorTest(unittest.TestCase):
def setUp(self):
self.generator = atom.Generator()
def testGeneratorToAndFromString(self):
self.generator.uri = 'www.google.com'
self.generator.version = '1.0'
self.generator.extension_attributes['foo'] = 'bar'
self.assert_(self.generator.uri == 'www.google.com')
self.assert_(self.generator.version == '1.0')
self.assert_(self.generator.extension_attributes['foo'] == 'bar')
new_generator = atom.GeneratorFromString(self.generator.ToString())
self.assert_(self.generator.uri == new_generator.uri)
self.assert_(self.generator.version == new_generator.version)
self.assert_(self.generator.extension_attributes['foo'] ==
new_generator.extension_attributes['foo'])
class TitleTest(unittest.TestCase):
def setUp(self):
self.title = atom.Title()
def testTitleToAndFromString(self):
self.title.type = 'text'
self.title.text = 'Less: <'
self.assert_(self.title.type == 'text')
self.assert_(self.title.text == 'Less: <')
new_title = atom.TitleFromString(self.title.ToString())
self.assert_(self.title.type == new_title.type)
self.assert_(self.title.text == new_title.text)
class SubtitleTest(unittest.TestCase):
def setUp(self):
self.subtitle = atom.Subtitle()
def testTitleToAndFromString(self):
self.subtitle.type = 'text'
self.subtitle.text = 'sub & title'
self.assert_(self.subtitle.type == 'text')
self.assert_(self.subtitle.text == 'sub & title')
new_subtitle = atom.SubtitleFromString(self.subtitle.ToString())
self.assert_(self.subtitle.type == new_subtitle.type)
self.assert_(self.subtitle.text == new_subtitle.text)
class SummaryTest(unittest.TestCase):
def setUp(self):
self.summary = atom.Summary()
def testTitleToAndFromString(self):
self.summary.type = 'text'
self.summary.text = 'Less: <'
self.assert_(self.summary.type == 'text')
self.assert_(self.summary.text == 'Less: <')
new_summary = atom.SummaryFromString(self.summary.ToString())
self.assert_(self.summary.type == new_summary.type)
self.assert_(self.summary.text == new_summary.text)
class CategoryTest(unittest.TestCase):
def setUp(self):
|
[
" self.category = atom.Category()"
] | 629
|
lcc
|
python
| null |
0bad08e092a4b3c97228cda53b001b7ddfd62dc3a8be077b
|
|
package org.checkerframework.checker.igj;
import org.checkerframework.checker.igj.qual.AssignsFields;
import org.checkerframework.checker.igj.qual.I;
import org.checkerframework.checker.igj.qual.Immutable;
import org.checkerframework.checker.igj.qual.Mutable;
import org.checkerframework.checker.igj.qual.ReadOnly;
import org.checkerframework.common.basetype.BaseAnnotatedTypeFactory;
import org.checkerframework.common.basetype.BaseTypeChecker;
import org.checkerframework.framework.type.AnnotatedTypeFactory;
import org.checkerframework.framework.type.AnnotatedTypeMirror;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedArrayType;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedDeclaredType;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedExecutableType;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedTypeVariable;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedWildcardType;
import org.checkerframework.framework.type.DefaultTypeHierarchy;
import org.checkerframework.framework.type.QualifierHierarchy;
import org.checkerframework.framework.type.TypeHierarchy;
import org.checkerframework.framework.type.treeannotator.ListTreeAnnotator;
import org.checkerframework.framework.type.treeannotator.TreeAnnotator;
import org.checkerframework.framework.type.typeannotator.ListTypeAnnotator;
import org.checkerframework.framework.type.typeannotator.TypeAnnotator;
import org.checkerframework.framework.type.visitor.AnnotatedTypeScanner;
import org.checkerframework.framework.type.visitor.SimpleAnnotatedTypeVisitor;
import org.checkerframework.framework.type.visitor.VisitHistory;
import org.checkerframework.framework.util.AnnotatedTypes;
import org.checkerframework.framework.util.GraphQualifierHierarchy;
import org.checkerframework.framework.util.MultiGraphQualifierHierarchy.MultiGraphFactory;
import org.checkerframework.javacutil.AnnotationUtils;
import org.checkerframework.javacutil.ElementUtils;
import org.checkerframework.javacutil.ErrorReporter;
import org.checkerframework.javacutil.Pair;
import org.checkerframework.javacutil.TreeUtils;
import org.checkerframework.javacutil.TypesUtils;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeVariable;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.source.tree.NewClassTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.TypeCastTree;
/**
* Adds implicit and default IGJ annotations, only if the user does not
* annotate the type explicitly. The default annotations are designed
* to minimize the number of {@code Immutable} or {@code ReadOnly}
* appearing in the source code.
* <p>
*
* Implicit Annotations for literals:<br>
* Immutable - any primitive literal (e.g. integer, long, boolean, etc.)<br>
* IGJBottom - a null literal
* <p>
*
* However, due to the default setting being similar to the implicit
* annotations, there is no significant distinction between the two in
* implementation.
* <p>
*
* Default Annotations:
* <p>
*
* This factory will add the {@link Immutable} annotation to a type if the
* input is
* <ol>
* <li value="1">(*)a primitive type,
* <li value="2">a known immutable type, if the class type is annotated as
* {@code Immutable}
* </ol>
*
* It will add the {@link ReadOnly} annotation to a type if the input is
* <ol>
* <li value="3">a method receiver for an immutable class
* <li value="4">a result of unification of different immutabilities (e.g.
* within Conditional Expressions)
* <li value="5">supertype of a wildcard/type parameter in a class/method declaration
* </ol>
*
* It will add {@link IGJBottom}, a special bottom annotation to a type if
* the input can be assigned to anything, like the following cases:
* <ol>
* <li value="6">(*)the input is a {@code null} literal
* <li value="7">(*)the input is an unannotated new array tree
* <li value="8">the input is an unannotated new class tree invoking a constructor
* of {@code ReadOnly} or {@code AssignsFields} receiver type
* <li value="9">the input is the class or interface declaration
* </ol>
*
* It will add the {@link Mutable} annotation to a type if
* <ol>
* <li value="10">any remaining unqualified types (i.e. Mutable is the default)
* </ol>
*
* Implementation detail: (*) cases are handled with a meta-annotation
* rather than in this class.
* <p>
*
* Furthermore, it resolves {@link I} annotation to the proper annotation,
* according to its specification (described in {@link I} javadoc).
*/
//
// To ease dealing with libraries, this inserts the bottom qualifier
// rather than immutable in many cases, like all literals.
// Should change that
public class IGJAnnotatedTypeFactory extends BaseAnnotatedTypeFactory {
//
// IGJ tries to adhere to the various rules specified by the
// type system and the conventions of the framework, except for two
// things:
// 1. overloading the meaning of BOTTOM_QUAL
// Review the javadoc of #createQualiferHierarchy
//
// 2. Having two qualifiers for a given type in one particular case
// which is that the self type (i.e. type of 'this' identifier) within
// a method with an AssignsFields receiver within I classes, then the self type is
// '@AssignsFields @I EnclosingClass' and they are treated as
// Incomparable. This is useful in the following cases:
//
// a. for method invocability tests, a method with an AssignsFields receiver from within
// a readonly context can be called only via AssignsFields reference
// of 'this'. I cannot be a receiver type, so it doesn't interfere.
//
// b. for assignment, 'this' can be assigned to '@I EnclosingClass'
// reference within such methods (assignment encompasses the escape
// of this when passed to method parameters). Fields and variables
// cannot be AssignsFields, so it's safe.
//
// The design of QualifierHierarchy.isSubtype(Collection, Collection)
// reflect this choice.
//
/** Supported annotations for IGJ. Used for subtyping rules. **/
protected final AnnotationMirror READONLY, MUTABLE, IMMUTABLE, I, ASSIGNS_FIELDS, BOTTOM_QUAL;
/** the {@link I} annotation value key */
protected static final String IMMUTABILITY_KEY = "value";
/**
* Constructor for IGJAnnotatedTypeFactory object.
*
* @param checker the checker to which this factory belongs
*/
public IGJAnnotatedTypeFactory(BaseTypeChecker checker) {
super(checker);
READONLY = AnnotationUtils.fromClass(elements, ReadOnly.class);
MUTABLE = AnnotationUtils.fromClass(elements, Mutable.class);
IMMUTABLE = AnnotationUtils.fromClass(elements, Immutable.class);
I = AnnotationUtils.fromClass(elements, I.class);
ASSIGNS_FIELDS = AnnotationUtils.fromClass(elements, AssignsFields.class);
BOTTOM_QUAL = AnnotationUtils.fromClass(elements, IGJBottom.class);
addAliasedAnnotation(org.jmlspecs.annotation.Immutable.class, IMMUTABLE);
addAliasedAnnotation(org.jmlspecs.annotation.Readonly.class, READONLY);
addAliasedAnnotation(net.jcip.annotations.Immutable.class, IMMUTABLE);
// TODO: Add an alias for the Pure JML annotation. It's not a type qualifier, I think adding
// it above does not work. Also see NullnessAnnotatedTypeFactory.
// this.addAliasedDeclAnnotation(org.jmlspecs.annotation.Pure.class, Pure.class, annotationToUse);
this.postInit();
}
@Override
protected TreeAnnotator createTreeAnnotator() {
return new ListTreeAnnotator(
super.createTreeAnnotator(),
new IGJTreePreAnnotator(this)
);
}
@Override
protected TypeAnnotator createTypeAnnotator() {
return new ListTypeAnnotator(
new IGJTypePostAnnotator(this),
super.createTypeAnnotator()
);
}
// TODO: do store annotations into the Element -> remove this override
// Currently, many test cases fail without this.
@Override
public void postProcessClassTree(ClassTree tree) {
}
// **********************************************************************
// add implicit annotations
// **********************************************************************
/**
* Helper class for annotating unannotated types.
*/
private class IGJTypePostAnnotator extends TypeAnnotator {
public IGJTypePostAnnotator(IGJAnnotatedTypeFactory atypeFactory) {
super(atypeFactory);
}
/**
* For Declared types:
* Classes are mutable
* Interface declaration are placeholders
* Enum and annotations are immutable
*/
@Override
public Void visitDeclared(AnnotatedDeclaredType type, Void p) {
if (!hasImmutabilityAnnotation(type)) {
// Actual element
TypeElement element = (TypeElement)type.getUnderlyingType().asElement();
AnnotatedDeclaredType elementType = fromElement(element);
// ElementKind elemKind = elem != null ? elem.getKind() : ElementKind.OTHER;
if (TypesUtils.isBoxedPrimitive(type.getUnderlyingType())
|| element.getQualifiedName().contentEquals("java.lang.String")
|| ElementUtils.isObject(element)) {
// variation of case 1
// TODO: These cases are more of hacks and they should
// really be immutable or readonly
type.addAnnotation(BOTTOM_QUAL);
} else if (elementType.hasEffectiveAnnotation(IMMUTABLE)) {
// case 2: known immutable types
type.addAnnotation(IMMUTABLE);
}
}
return null; //super.visitDeclared(type, p);
/*
if (!hasImmutabilityAnnotation(type)) {
// Actual element
TypeElement element = (TypeElement)type.getUnderlyingType().asElement();
AnnotatedDeclaredType elementType = fromElement(element);
// ElementKind elemKind = elem != null ? elem.getKind() : ElementKind.OTHER;
if (TypesUtils.isBoxedPrimitive(type.getUnderlyingType())
|| element.getQualifiedName().contentEquals("java.lang.String")
|| ElementUtils.isObject(element)) {
// variation of case 1
// TODO: These cases are more of hacks and they should
// really be immutable or readonly
type.replaceAnnotation(BOTTOM_QUAL);
} else if (elementType.hasEffectiveAnnotation(IMMUTABLE)) {
// case 2: known immutable types
type.replaceAnnotation(IMMUTABLE);
//} else if (elemKind == ElementKind.LOCAL_VARIABLE) {
// type.replaceAnnotation(READONLY);
} else if (elementType.hasEffectiveAnnotation(MUTABLE)) { // not immutable
// case 7: mutable by default
type.replaceAnnotation(MUTABLE);
//} else if (elemKind.isClass() || elemKind.isInterface()) {
// case 9: class or interface declaration
// type.replaceAnnotation(BOTTOM_QUAL);
//} else if (elemKind.isField()) {
/*
&& type.getElement() != null // We don't know the field context here
&& getAnnotatedType(ElementUtils.enclosingClass(type.getElement())).hasEffectiveAnnotation(IMMUTABLE)) {
type.replaceAnnotation(IMMUTABLE);
TODO: This case is not exercised by any of the test cases. Is it needed?
} else if (element.getKind().isClass() || element.getKind().isInterface()) {
// case 10
type.replaceAnnotation(MUTABLE);
} else {
assert false : "shouldn't be here!";
}
}
return super.visitDeclared(type, p);
*/
}
@Override
public Void visitExecutable(AnnotatedExecutableType type, Void p) {
AnnotatedDeclaredType receiver;
if (type.getElement().getKind() == ElementKind.CONSTRUCTOR) {
receiver = (AnnotatedDeclaredType) type.getReturnType();
} else {
receiver = type.getReceiverType();
}
if (receiver != null &&
hasImmutabilityAnnotation(receiver)) {
return super.visitExecutable(type, p);
}
TypeElement ownerElement = ElementUtils.enclosingClass(type.getElement());
AnnotatedDeclaredType ownerType = getAnnotatedType(ownerElement);
if (type.getElement().getKind() == ElementKind.CONSTRUCTOR) {
// TODO: hack
if (ownerType.hasEffectiveAnnotation(MUTABLE) || ownerType.hasEffectiveAnnotation(BOTTOM_QUAL))
receiver.replaceAnnotation(MUTABLE);
else
receiver.replaceAnnotation(ASSIGNS_FIELDS);
} else if (receiver == null) {
// Nothing to do for static methods.
} else if (ElementUtils.isObject(ownerElement) || ownerType.hasEffectiveAnnotation(IMMUTABLE)) {
// case 3
receiver.replaceAnnotation(BOTTOM_QUAL);
} else {
// case 10: rest
receiver.replaceAnnotation(MUTABLE);
}
return super.visitExecutable(type, p);
}
/*
@Override
public Void visitTypeVariable(AnnotatedTypeVariable type, Void p) {
// In a declaration the upperbound is ReadOnly, while
// the upper bound in a use is Mutable
if (type.getUpperBoundField() != null
&& !hasImmutabilityAnnotation(type.getUpperBoundField())) {
// ElementKind elemKind = elem != null ? elem.getKind() : ElementKind.OTHER;
/*if (elemKind.isClass() || elemKind.isInterface()
|| elemKind == ElementKind.CONSTRUCTOR
|| elemKind == ElementKind.METHOD)
// case 5: upper bound within a class/method declaration
type.getUpperBoundField().replaceAnnotation(READONLY);
else* / if (TypesUtils.isObject(type.getUnderlyingType()))
// case 10: remaining cases
type.getUpperBoundField().replaceAnnotation(MUTABLE);
}
return super.visitTypeVariable(type, p);
}
*/
@Override
public Void visitWildcard(AnnotatedWildcardType type, Void p) {
// In a declaration the upper bound is ReadOnly, while
// the upper bound in a use is Mutable
if (type.getExtendsBound() != null
&& !hasImmutabilityAnnotation(type.getExtendsBound())) {
// ElementKind elemKind = elem != null ? elem.getKind() : ElementKind.OTHER;
/*if (elemKind.isClass() || elemKind.isInterface()
|| elemKind == ElementKind.CONSTRUCTOR
|| elemKind == ElementKind.METHOD)
// case 5: upper bound within a class/method declaration
type.getExtendsBound().replaceAnnotation(READONLY);
else*/ if (TypesUtils.isObject(type.getUnderlyingType()))
// case 10: remaining cases
type.getExtendsBound().replaceAnnotation(MUTABLE);
}
return super.visitWildcard(type, p);
}
}
/**
* Helper class to annotate trees.
*
* It only adds a BOTTOM_QUAL for new classes and new arrays,
* when an annotation is not specified
*/
private class IGJTreePreAnnotator extends TreeAnnotator {
public IGJTreePreAnnotator(IGJAnnotatedTypeFactory atypeFactory) {
super(atypeFactory);
}
@Override
public Void visitNewClass(NewClassTree node, AnnotatedTypeMirror p) {
/*
if (node.getClassBody() != null) {
System.out.println("Visit anonymous: " + node + " + input: " + p);
AnnotatedTypeMirror tt = IGJAnnotatedTypeFactory.this.getAnnotatedType(node.getIdentifier());
p.replaceAnnotations(tt.getAnnotations());
System.out.println(" final type: " + p);
// Is this the right way to handle anonymous classes?
} else */
if (!hasImmutabilityAnnotation(p)) {
AnnotatedTypeMirror ct = fromElement(
((AnnotatedDeclaredType)p).getUnderlyingType().asElement());
if (!hasImmutabilityAnnotation(ct) || ct.hasAnnotationRelaxed(I)) {
AnnotatedExecutableType con = getAnnotatedType(TreeUtils.elementFromUse(node));
if (con.getReceiverType() != null &&
con.getReceiverType().hasEffectiveAnnotation(IMMUTABLE))
p.replaceAnnotation(IMMUTABLE);
else
p.replaceAnnotation(MUTABLE);
} else {
// case 2: known immutability type
p.addAnnotations(ct.getAnnotations());
}
}
return null;
}
@Override
public Void visitTypeCast(TypeCastTree node, AnnotatedTypeMirror p) {
if (!hasImmutabilityAnnotation(p)) {
AnnotatedTypeMirror castedType = getAnnotatedType(node.getExpression());
p.addAnnotations(castedType.getAnnotations());
}
return null;
}
}
@Override
protected AnnotatedDeclaredType getImplicitReceiverType(ExpressionTree tree) {
AnnotatedDeclaredType receiver = super.getImplicitReceiverType(tree);
if (receiver != null && !isMostEnclosingThisDeref(tree)) {
receiver.replaceAnnotation(READONLY);
}
return receiver;
}
/**
* Returns the type of field {@code this}, for the scope of this tree.
* In IGJ, the self type is the method receiver in this scope.
*/
@Override
public AnnotatedDeclaredType getSelfType(Tree tree) {
AnnotatedDeclaredType act = getCurrentClassType(tree);
AnnotatedDeclaredType methodReceiver;
if (isWithinConstructor(tree)) {
methodReceiver = (AnnotatedDeclaredType) getAnnotatedType(visitorState.getMethodTree()).getReturnType();
} else {
methodReceiver = getCurrentMethodReceiver(tree);
}
if (methodReceiver == null)
return act;
// Are we in a mutable or Immutable scope
if (isWithinConstructor(tree) && !methodReceiver.hasEffectiveAnnotation(MUTABLE)) {
methodReceiver.replaceAnnotation(ASSIGNS_FIELDS);
}
if (methodReceiver.hasEffectiveAnnotation(MUTABLE) ||
methodReceiver.hasEffectiveAnnotation(IMMUTABLE)) {
return methodReceiver;
} else if (act.hasAnnotationRelaxed(I) || act.hasEffectiveAnnotation(IMMUTABLE)) {
if (methodReceiver.hasEffectiveAnnotation(ASSIGNS_FIELDS))
act.replaceAnnotation(ASSIGNS_FIELDS);
return act;
} else
return methodReceiver;
}
// **********************************************************************
// resolving @I Immutability
// **********************************************************************
/**
* Replace all instances of {@code @I} in the super types with the
* immutability of the current type
*
* @param type the type whose supertypes are requested
* @param supertypes the supertypes of type
*/
@Override
protected void postDirectSuperTypes(AnnotatedTypeMirror type,
List<? extends AnnotatedTypeMirror> supertypes) {
super.postDirectSuperTypes(type, supertypes);
Map<String, AnnotationMirror> templateMapping =
new ImmutabilityTemplateCollector().visit(type);
new ImmutabilityResolver().visit(supertypes, templateMapping);
for (AnnotatedTypeMirror supertype: supertypes) {
typeAnnotator.visit(supertype, null);
}
}
/**
* Resolve the instances of {@code @I} in the {@code elementType} based
* on {@code owner}, according to is specification.
*/
@Override
public void postAsMemberOf(AnnotatedTypeMirror elementType,
AnnotatedTypeMirror owner, Element element) {
resolveImmutabilityTypeVar(elementType, owner);
}
@Override
protected void annotateInheritedFromClass(/*@Mutable*/ AnnotatedTypeMirror type,
Set<AnnotationMirror> fromClass) {
// Ignore annotations inherited from a class.
// TODO: this mechanism is implemented in special IGJ logic and
// should be cleaned up.
}
/**
* Resolves {@code @I} in the type of the method type base on the method
* invocation tree parameters. Any unresolved {@code @I}s is resolved to a
* place holder type.
*
* It resolves {@code @I} annotation in the following way:
* <ul>
* <li>based on the tree receiver, done automatically through implicit
* invocation of
* {@link AnnotatedTypes#asMemberOf(Types, AnnotatedTypeFactory, AnnotatedTypeMirror, Element)}</li>
* <li>based on the invocation passed parameters</li>
* <li>if any yet unresolved immutability variables get resolved to a
* wildcard type</li>
* </ul>
*/
@Override
public Pair<AnnotatedExecutableType, List<AnnotatedTypeMirror>> methodFromUse(MethodInvocationTree tree) {
Pair<AnnotatedExecutableType, List<AnnotatedTypeMirror>> mfuPair = super.methodFromUse(tree);
AnnotatedExecutableType type = mfuPair.first;
// javac produces enum super calls with zero arguments even though the
// method element requires two.
// See also BaseTypeVisitor.visitMethodInvocation and
// CFGBuilder.CFGTranslationPhaseOne.visitMethodInvocation
if (TreeUtils.isEnumSuper(tree)) return mfuPair;
List<AnnotatedTypeMirror> requiredArgs = AnnotatedTypes.expandVarArgs(this, type, tree.getArguments());
List<AnnotatedTypeMirror> arguments = AnnotatedTypes.getAnnotatedTypes(this, requiredArgs, tree.getArguments());
ImmutabilityTemplateCollector collector = new ImmutabilityTemplateCollector();
Map<String, AnnotationMirror> matchingMapping = collector.visit(arguments, requiredArgs);
if (!matchingMapping.isEmpty())
new ImmutabilityResolver().visit(type, matchingMapping);
// For finding resolved types, rather than to actually resolve immutability
Map<String, AnnotationMirror> fromReceiver = collector.visit(getReceiverType(tree));
final Map<String, AnnotationMirror> mapping =
collector.reduce(matchingMapping, fromReceiver);
new AnnotatedTypeScanner<Void, Void>() {
@Override
public Void visitDeclared(AnnotatedDeclaredType type, Void p) {
if (type.hasAnnotationRelaxed(I)) {
AnnotationMirror anno =
type.getAnnotation(I.class);
if (!mapping.containsValue(anno)) {
type.replaceAnnotation(BOTTOM_QUAL);
}
}
return super.visitDeclared(type, p);
}
}.visit(type);
return mfuPair;
}
/**
* Infers the immutability of {@code @I}s based on the provided types, and
* replace all instances of {@code @I} with their corresponding qualifiers.
* The {@code @I} annotations that are not resolved are left intact.
*
* @param type the type with {@code @I} annotation
* @param provided the types with qualifiers that may be bound to
* {@code @I}
* @return true iff a qualifier has been resolved.
*/
private boolean resolveImmutabilityTypeVar(AnnotatedTypeMirror type,
AnnotatedTypeMirror ...provided) {
ImmutabilityTemplateCollector collector = new ImmutabilityTemplateCollector();
// maps the @I values to I resolved annotations
Map<String, AnnotationMirror> templateMapping = Collections.emptyMap();
for (AnnotatedTypeMirror pt : provided)
templateMapping = collector.reduce(templateMapping, collector.visit(pt));
// There is nothing to resolve
if (templateMapping.isEmpty())
return false;
new ImmutabilityResolver().visit(type, templateMapping);
return true;
}
/**
* A helper class that resolves the immutability on a types based on a
* provided mapping.
*
* It returns a set of the annotations that were inserted. This is important
* to recognize which immutability type variables were resolved and which
* are to be made into place holder.
*/
private class ImmutabilityResolver extends
AnnotatedTypeScanner<Void, Map<String, AnnotationMirror>> {
public void visit(Iterable<? extends AnnotatedTypeMirror> types,
Map<String, AnnotationMirror> templateMapping) {
if (templateMapping != null && !templateMapping.isEmpty()) {
for (AnnotatedTypeMirror type : types)
visit(type, templateMapping);
}
}
@Override
public Void visitDeclared(AnnotatedDeclaredType type,
Map<String, AnnotationMirror> p) {
if (type.hasAnnotationRelaxed(I)) {
String immutableString =
AnnotationUtils.getElementValue(getImmutabilityAnnotation(type),
IMMUTABILITY_KEY, String.class, true);
if (p.containsKey(immutableString)) {
type.replaceAnnotation(p.get(immutableString));
}
}
return super.visitDeclared(type, p);
}
}
/**
* A Helper class that tries to resolve the immutability type variable,
* as the type variable is assigned to the most restricted immutability
*/
private class ImmutabilityTemplateCollector
extends SimpleAnnotatedTypeVisitor<Map<String, AnnotationMirror>, AnnotatedTypeMirror> {
public Map<String, AnnotationMirror> reduce(Map<String, AnnotationMirror> r1,
|
[
" Map<String, AnnotationMirror> r2) {"
] | 2,339
|
lcc
|
java
| null |
0d5ebe09775314436fb1f120cb4c0d408be485ca49b27ab0
|
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
from bpy.types import Menu, Panel, UIList
from rna_prop_ui import PropertyPanel
from bpy.app.translations import pgettext_iface as iface_
def active_node_mat(mat):
# TODO, 2.4x has a pipeline section, for 2.5 we need to communicate
# which settings from node-materials are used
if mat is not None:
mat_node = mat.active_node_material
if mat_node:
return mat_node
else:
return mat
return None
def check_material(mat):
if mat is not None:
if mat.use_nodes:
if mat.active_node_material is not None:
return True
return False
return True
return False
def simple_material(mat):
if (mat is not None) and (not mat.use_nodes):
return True
return False
class MATERIAL_MT_sss_presets(Menu):
bl_label = "SSS Presets"
preset_subdir = "sss"
preset_operator = "script.execute_preset"
draw = Menu.draw_preset
class MATERIAL_MT_specials(Menu):
bl_label = "Material Specials"
def draw(self, context):
layout = self.layout
layout.operator("object.material_slot_copy", icon='COPY_ID')
layout.operator("material.copy", icon='COPYDOWN')
layout.operator("material.paste", icon='PASTEDOWN')
class MATERIAL_UL_matslots(UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
# assert(isinstance(item, bpy.types.MaterialSlot)
# ob = data
slot = item
ma = slot.material
if self.layout_type in {'DEFAULT', 'COMPACT'}:
if ma:
layout.prop(ma, "name", text="", emboss=False, icon_value=icon)
else:
layout.label(text="", icon_value=icon)
if ma and not context.scene.render.use_shading_nodes:
manode = ma.active_node_material
if manode:
layout.label(text=iface_("Node %s") % manode.name, translate=False, icon_value=layout.icon(manode))
elif ma.use_nodes:
layout.label(text="Node <none>")
elif self.layout_type == 'GRID':
layout.alignment = 'CENTER'
layout.label(text="", icon_value=icon)
class MaterialButtonsPanel:
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "material"
# COMPAT_ENGINES must be defined in each subclass, external engines can add themselves here
@classmethod
def poll(cls, context):
return context.material and (context.scene.render.engine in cls.COMPAT_ENGINES)
class MATERIAL_PT_context_material(MaterialButtonsPanel, Panel):
bl_label = ""
bl_options = {'HIDE_HEADER'}
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
@classmethod
def poll(cls, context):
# An exception, don't call the parent poll func because
# this manages materials for all engine types
engine = context.scene.render.engine
return (context.material or context.object) and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
mat = context.material
ob = context.object
slot = context.material_slot
space = context.space_data
is_sortable = (len(ob.material_slots) > 1)
if ob:
rows = 1
if is_sortable:
rows = 4
row = layout.row()
row.template_list("MATERIAL_UL_matslots", "", ob, "material_slots", ob, "active_material_index", rows=rows)
col = row.column(align=True)
col.operator("object.material_slot_add", icon='ZOOMIN', text="")
col.operator("object.material_slot_remove", icon='ZOOMOUT', text="")
col.menu("MATERIAL_MT_specials", icon='DOWNARROW_HLT', text="")
if is_sortable:
col.separator()
col.operator("object.material_slot_move", icon='TRIA_UP', text="").direction = 'UP'
col.operator("object.material_slot_move", icon='TRIA_DOWN', text="").direction = 'DOWN'
if ob.mode == 'EDIT':
row = layout.row(align=True)
row.operator("object.material_slot_assign", text="Assign")
row.operator("object.material_slot_select", text="Select")
row.operator("object.material_slot_deselect", text="Deselect")
split = layout.split(percentage=0.65)
if ob:
split.template_ID(ob, "active_material", new="material.new")
row = split.row()
if mat:
row.prop(mat, "use_nodes", icon='NODETREE', text="")
if slot:
row.prop(slot, "link", text="")
else:
row.label()
elif mat:
split.template_ID(space, "pin_id")
split.separator()
if mat:
layout.prop(mat, "type", expand=True)
if mat.use_nodes:
row = layout.row()
row.label(text="", icon='NODETREE')
if mat.active_node_material:
row.prop(mat.active_node_material, "name", text="")
else:
row.label(text="No material node selected")
class MATERIAL_PT_preview(MaterialButtonsPanel, Panel):
bl_label = "Preview"
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
def draw(self, context):
self.layout.template_preview(context.material)
class MATERIAL_PT_pipeline(MaterialButtonsPanel, Panel):
bl_label = "Render Pipeline Options"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return mat and (not simple_material(mat)) and (mat.type in {'SURFACE', 'WIRE', 'VOLUME'}) and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self. layout
mat = context.material
mat_type = mat.type in {'SURFACE', 'WIRE'}
row = layout.row()
row.active = mat_type
row.prop(mat, "use_transparency")
sub = row.column()
sub.prop(mat, "offset_z")
sub.active = mat_type and mat.use_transparency and mat.transparency_method == 'Z_TRANSPARENCY'
row = layout.row()
row.active = mat.use_transparency or not mat_type
row.prop(mat, "transparency_method", expand=True)
layout.separator()
split = layout.split()
col = split.column()
col.prop(mat, "use_raytrace")
col.prop(mat, "use_full_oversampling")
sub = col.column()
sub.active = mat_type
sub.prop(mat, "use_sky")
sub.prop(mat, "invert_z")
col.prop(mat, "pass_index")
col = split.column()
col.active = mat_type
col.prop(mat, "use_cast_shadows", text="Cast")
col.prop(mat, "use_cast_shadows_only", text="Cast Only")
col.prop(mat, "use_cast_buffer_shadows")
sub = col.column()
sub.active = mat.use_cast_buffer_shadows
sub.prop(mat, "shadow_cast_alpha", text="Casting Alpha")
col.prop(mat, "use_cast_approximate")
class MATERIAL_PT_diffuse(MaterialButtonsPanel, Panel):
bl_label = "Diffuse"
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return check_material(mat) and (mat.type in {'SURFACE', 'WIRE'}) and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
mat = active_node_mat(context.material)
split = layout.split()
col = split.column()
col.prop(mat, "diffuse_color", text="")
sub = col.column()
sub.active = (not mat.use_shadeless)
sub.prop(mat, "diffuse_intensity", text="Intensity")
col = split.column()
col.active = (not mat.use_shadeless)
col.prop(mat, "diffuse_shader", text="")
col.prop(mat, "use_diffuse_ramp", text="Ramp")
col = layout.column()
col.active = (not mat.use_shadeless)
if mat.diffuse_shader == 'OREN_NAYAR':
col.prop(mat, "roughness")
elif mat.diffuse_shader == 'MINNAERT':
col.prop(mat, "darkness")
elif mat.diffuse_shader == 'TOON':
row = col.row()
row.prop(mat, "diffuse_toon_size", text="Size")
row.prop(mat, "diffuse_toon_smooth", text="Smooth")
elif mat.diffuse_shader == 'FRESNEL':
row = col.row()
row.prop(mat, "diffuse_fresnel", text="Fresnel")
row.prop(mat, "diffuse_fresnel_factor", text="Factor")
if mat.use_diffuse_ramp:
col = layout.column()
col.active = (not mat.use_shadeless)
col.separator()
col.template_color_ramp(mat, "diffuse_ramp", expand=True)
col.separator()
row = col.row()
row.prop(mat, "diffuse_ramp_input", text="Input")
row.prop(mat, "diffuse_ramp_blend", text="Blend")
col.prop(mat, "diffuse_ramp_factor", text="Factor")
class MATERIAL_PT_specular(MaterialButtonsPanel, Panel):
bl_label = "Specular"
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return check_material(mat) and (mat.type in {'SURFACE', 'WIRE'}) and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
mat = active_node_mat(context.material)
layout.active = (not mat.use_shadeless)
split = layout.split()
col = split.column()
col.prop(mat, "specular_color", text="")
col.prop(mat, "specular_intensity", text="Intensity")
col = split.column()
col.prop(mat, "specular_shader", text="")
col.prop(mat, "use_specular_ramp", text="Ramp")
col = layout.column()
if mat.specular_shader in {'COOKTORR', 'PHONG'}:
col.prop(mat, "specular_hardness", text="Hardness")
elif mat.specular_shader == 'BLINN':
row = col.row()
row.prop(mat, "specular_hardness", text="Hardness")
row.prop(mat, "specular_ior", text="IOR")
elif mat.specular_shader == 'WARDISO':
col.prop(mat, "specular_slope", text="Slope")
elif mat.specular_shader == 'TOON':
row = col.row()
row.prop(mat, "specular_toon_size", text="Size")
row.prop(mat, "specular_toon_smooth", text="Smooth")
if mat.use_specular_ramp:
layout.separator()
layout.template_color_ramp(mat, "specular_ramp", expand=True)
layout.separator()
row = layout.row()
row.prop(mat, "specular_ramp_input", text="Input")
row.prop(mat, "specular_ramp_blend", text="Blend")
layout.prop(mat, "specular_ramp_factor", text="Factor")
class MATERIAL_PT_shading(MaterialButtonsPanel, Panel):
bl_label = "Shading"
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return check_material(mat) and (mat.type in {'SURFACE', 'WIRE'}) and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
mat = active_node_mat(context.material)
if mat.type in {'SURFACE', 'WIRE'}:
split = layout.split()
col = split.column()
sub = col.column()
sub.active = not mat.use_shadeless
sub.prop(mat, "emit")
sub.prop(mat, "ambient")
sub = col.column()
sub.prop(mat, "translucency")
col = split.column()
col.prop(mat, "use_shadeless")
sub = col.column()
sub.active = not mat.use_shadeless
sub.prop(mat, "use_tangent_shading")
sub.prop(mat, "use_cubic")
class MATERIAL_PT_transp(MaterialButtonsPanel, Panel):
bl_label = "Transparency"
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return check_material(mat) and (mat.type in {'SURFACE', 'WIRE'}) and (engine in cls.COMPAT_ENGINES)
def draw_header(self, context):
mat = context.material
if simple_material(mat):
self.layout.prop(mat, "use_transparency", text="")
def draw(self, context):
layout = self.layout
base_mat = context.material
mat = active_node_mat(context.material)
rayt = mat.raytrace_transparency
if simple_material(base_mat):
row = layout.row()
row.active = mat.use_transparency
row.prop(mat, "transparency_method", expand=True)
split = layout.split()
split.active = base_mat.use_transparency
col = split.column()
col.prop(mat, "alpha")
row = col.row()
row.active = (base_mat.transparency_method != 'MASK') and (not mat.use_shadeless)
row.prop(mat, "specular_alpha", text="Specular")
col = split.column()
col.active = (not mat.use_shadeless)
col.prop(rayt, "fresnel")
sub = col.column()
sub.active = (rayt.fresnel > 0.0)
sub.prop(rayt, "fresnel_factor", text="Blend")
if base_mat.transparency_method == 'RAYTRACE':
layout.separator()
split = layout.split()
split.active = base_mat.use_transparency
col = split.column()
col.prop(rayt, "ior")
col.prop(rayt, "filter")
col.prop(rayt, "falloff")
col.prop(rayt, "depth_max")
col.prop(rayt, "depth")
col = split.column()
col.label(text="Gloss:")
col.prop(rayt, "gloss_factor", text="Amount")
sub = col.column()
sub.active = rayt.gloss_factor < 1.0
sub.prop(rayt, "gloss_threshold", text="Threshold")
sub.prop(rayt, "gloss_samples", text="Samples")
class MATERIAL_PT_mirror(MaterialButtonsPanel, Panel):
bl_label = "Mirror"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return check_material(mat) and (mat.type in {'SURFACE', 'WIRE'}) and (engine in cls.COMPAT_ENGINES)
def draw_header(self, context):
raym = active_node_mat(context.material).raytrace_mirror
self.layout.prop(raym, "use", text="")
def draw(self, context):
layout = self.layout
mat = active_node_mat(context.material)
raym = mat.raytrace_mirror
layout.active = raym.use
split = layout.split()
col = split.column()
col.prop(raym, "reflect_factor")
col.prop(mat, "mirror_color", text="")
col = split.column()
col.prop(raym, "fresnel")
sub = col.column()
sub.active = (raym.fresnel > 0.0)
sub.prop(raym, "fresnel_factor", text="Blend")
split = layout.split()
col = split.column()
col.separator()
col.prop(raym, "depth")
col.prop(raym, "distance", text="Max Dist")
col.separator()
sub = col.split(percentage=0.4)
sub.active = (raym.distance > 0.0)
sub.label(text="Fade To:")
sub.prop(raym, "fade_to", text="")
col = split.column()
col.label(text="Gloss:")
col.prop(raym, "gloss_factor", text="Amount")
sub = col.column()
sub.active = (raym.gloss_factor < 1.0)
sub.prop(raym, "gloss_threshold", text="Threshold")
sub.prop(raym, "gloss_samples", text="Samples")
sub.prop(raym, "gloss_anisotropic", text="Anisotropic")
class MATERIAL_PT_sss(MaterialButtonsPanel, Panel):
bl_label = "Subsurface Scattering"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return check_material(mat) and (mat.type in {'SURFACE', 'WIRE'}) and (engine in cls.COMPAT_ENGINES)
def draw_header(self, context):
mat = active_node_mat(context.material)
sss = mat.subsurface_scattering
self.layout.active = (not mat.use_shadeless)
self.layout.prop(sss, "use", text="")
def draw(self, context):
layout = self.layout
mat = active_node_mat(context.material)
sss = mat.subsurface_scattering
layout.active = (sss.use) and (not mat.use_shadeless)
row = layout.row().split()
sub = row.row(align=True).split(align=True, percentage=0.75)
sub.menu("MATERIAL_MT_sss_presets", text=bpy.types.MATERIAL_MT_sss_presets.bl_label)
sub.operator("material.sss_preset_add", text="", icon='ZOOMIN')
sub.operator("material.sss_preset_add", text="", icon='ZOOMOUT').remove_active = True
split = layout.split()
col = split.column()
col.prop(sss, "ior")
col.prop(sss, "scale")
col.prop(sss, "color", text="")
col.prop(sss, "radius", text="RGB Radius", expand=True)
col = split.column()
sub = col.column(align=True)
sub.label(text="Blend:")
sub.prop(sss, "color_factor", text="Color")
sub.prop(sss, "texture_factor", text="Texture")
sub.label(text="Scattering Weight:")
sub.prop(sss, "front")
sub.prop(sss, "back")
col.separator()
col.prop(sss, "error_threshold", text="Error")
class MATERIAL_PT_halo(MaterialButtonsPanel, Panel):
bl_label = "Halo"
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return mat and (mat.type == 'HALO') and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
mat = context.material # don't use node material
halo = mat.halo
def number_but(layout, toggle, number, name, color):
row = layout.row(align=True)
row.prop(halo, toggle, text="")
sub = row.column(align=True)
sub.active = getattr(halo, toggle)
sub.prop(halo, number, text=name, translate=False)
if not color == "":
sub.prop(mat, color, text="")
split = layout.split()
col = split.column()
col.prop(mat, "alpha")
col.prop(mat, "diffuse_color", text="")
col.prop(halo, "seed")
col = split.column()
col.prop(halo, "size")
col.prop(halo, "hardness")
col.prop(halo, "add")
layout.label(text="Options:")
split = layout.split()
col = split.column()
col.prop(halo, "use_texture")
col.prop(halo, "use_vertex_normal")
col.prop(halo, "use_extreme_alpha")
col.prop(halo, "use_shaded")
col.prop(halo, "use_soft")
col = split.column()
number_but(col, "use_ring", "ring_count", iface_("Rings"), "mirror_color")
number_but(col, "use_lines", "line_count", iface_("Lines"), "specular_color")
number_but(col, "use_star", "star_tip_count", iface_("Star Tips"), "")
class MATERIAL_PT_flare(MaterialButtonsPanel, Panel):
bl_label = "Flare"
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return mat and (mat.type == 'HALO') and (engine in cls.COMPAT_ENGINES)
def draw_header(self, context):
halo = context.material.halo
self.layout.prop(halo, "use_flare_mode", text="")
def draw(self, context):
layout = self.layout
mat = context.material # don't use node material
halo = mat.halo
layout.active = halo.use_flare_mode
split = layout.split()
col = split.column()
col.prop(halo, "flare_size", text="Size")
col.prop(halo, "flare_boost", text="Boost")
col.prop(halo, "flare_seed", text="Seed")
col = split.column()
col.prop(halo, "flare_subflare_count", text="Subflares")
col.prop(halo, "flare_subflare_size", text="Subsize")
class MATERIAL_PT_game_settings(MaterialButtonsPanel, Panel):
bl_label = "Game Settings"
COMPAT_ENGINES = {'BLENDER_GAME'}
@classmethod
def poll(cls, context):
return context.material and (context.scene.render.engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
game = context.material.game_settings # don't use node material
row = layout.row()
row.prop(game, "use_backface_culling")
row.prop(game, "invisible")
row.prop(game, "text")
row = layout.row()
row.label(text="Alpha Blend:")
row.label(text="Face Orientation:")
row = layout.row()
row.prop(game, "alpha_blend", text="")
row.prop(game, "face_orientation", text="")
class MATERIAL_PT_physics(MaterialButtonsPanel, Panel):
bl_label = "Physics"
COMPAT_ENGINES = {'BLENDER_GAME'}
def draw_header(self, context):
game = context.material.game_settings
self.layout.prop(game, "physics", text="")
@classmethod
def poll(cls, context):
return context.material and (context.scene.render.engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
layout.active = context.material.game_settings.physics
phys = context.material.physics # don't use node material
split = layout.split()
row = split.row()
row.prop(phys, "friction")
row.prop(phys, "elasticity", slider=True)
row = layout.row()
row.label(text="Force Field:")
row = layout.row()
row.prop(phys, "fh_force")
row.prop(phys, "fh_damping", slider=True)
row = layout.row()
row.prop(phys, "fh_distance")
row.prop(phys, "use_fh_normal")
class MATERIAL_PT_strand(MaterialButtonsPanel, Panel):
bl_label = "Strand"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return mat and (mat.type in {'SURFACE', 'WIRE', 'HALO'}) and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
mat = context.material # don't use node material
tan = mat.strand
split = layout.split()
col = split.column()
sub = col.column(align=True)
sub.label(text="Size:")
sub.prop(tan, "root_size", text="Root")
sub.prop(tan, "tip_size", text="Tip")
sub.prop(tan, "size_min", text="Minimum")
sub.prop(tan, "use_blender_units")
sub = col.column()
sub.active = (not mat.use_shadeless)
sub.prop(tan, "use_tangent_shading")
col.prop(tan, "shape")
col = split.column()
col.label(text="Shading:")
col.prop(tan, "width_fade")
ob = context.object
if ob and ob.type == 'MESH':
col.prop_search(tan, "uv_layer", ob.data, "uv_textures", text="")
else:
col.prop(tan, "uv_layer", text="")
col.separator()
sub = col.column()
sub.active = (not mat.use_shadeless)
sub.label("Surface diffuse:")
sub = col.column()
sub.prop(tan, "blend_distance", text="Distance")
class MATERIAL_PT_options(MaterialButtonsPanel, Panel):
bl_label = "Options"
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
@classmethod
def poll(cls, context):
mat = context.material
engine = context.scene.render.engine
return check_material(mat) and (mat.type in {'SURFACE', 'WIRE'}) and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
base_mat = context.material
mat = active_node_mat(base_mat)
split = layout.split()
col = split.column()
if simple_material(base_mat):
col.prop(mat, "use_raytrace")
col.prop(mat, "use_full_oversampling")
col.prop(mat, "use_sky")
col.prop(mat, "use_mist")
if simple_material(base_mat):
col.prop(mat, "invert_z")
sub = col.row()
sub.prop(mat, "offset_z")
sub.active = mat.use_transparency and mat.transparency_method == 'Z_TRANSPARENCY'
sub = col.column(align=True)
sub.label(text="Light Group:")
sub.prop(mat, "light_group", text="")
row = sub.row(align=True)
row.active = bool(mat.light_group)
row.prop(mat, "use_light_group_exclusive", text="Exclusive")
row.prop(mat, "use_light_group_local", text="Local")
|
[
" col = split.column()"
] | 2,052
|
lcc
|
python
| null |
6303ed98c58fa08f8639667add933df524657e812b3495b5
|
|
using System;
using NesHd.Core.Memory;
using NesHd.Core.Memory.Mappers;
namespace NesHd.Core.Misc
{
[Serializable]
public class StateHolder
{
#region CPU
public int CycleCounter;
private int _cyclesPerScanline;
private bool _flagB;
private bool _flagC;
private bool _flagD;
private bool _flagI = true;
private bool _flagN;
private bool _flagV;
private bool _flagZ;
private byte _opCode;
private ushort _prevPc;
private byte _regA;
private ushort _regPc;
private byte _regS;
private byte _regX;
private byte _regY;
#endregion
#region MEMORY
private int _joyData1;
private int _joyData2;
private byte _joyStrobe;
private byte[] _ram;
private byte[] _sram;
#endregion
#region CART
private byte[][] _chr;
private Mirroring _mirroring;
private bool _isVram;
private uint _mirroringBase;
private bool _saveRAMPresent;
#endregion
#region _map
public uint[] CurrentChrRomPage;
public uint[] CurrentPRGRomPage;
#endregion
#region PPU
private bool _backgroundClipping;
private bool _backgroundVisibility;
private ushort _colorEmphasis;
private int _currentScanLine;
/*2000*/
private bool _executeNMIonVBlank;
private byte _hScroll;
private bool _monochromeMode;
private bool _ppuToggle;
private int _patternTableAddress8X8Sprites;
private int _patternTableAddressBackground;
private byte _reloadBits2000;
private byte[] _sprram;
private int _scanlineOfVblank;
private int _scanlinesPerFrame;
private bool _sprite0Hit;
/*2001*/
private bool _spriteClipping;
private int _spriteCrossed;
/*2003*/
private byte _spriteRamAddress;
private bool _spriteSize; //true=8x16, false=8x8
private bool _spriteVisibility;
private int _tileY;
private bool _vblank;
private int _vBits;
private byte[] _vram;
private ushort _vramAddress;
private int _vramAddressIncrement = 1;
private byte _vramReadBuffer;
/*2005,2006*/
private ushort _vramTemp;
/*Draw stuff*/
private int _vScroll;
private int _fps;
private bool _noLimiter;
#endregion
#region APU
public byte DMCDAC;
public ushort DMCDMAAddress;
public ushort DMCDMALength;
public ushort DMCDMALengthCounter;
public ushort DMCDMAStartAddress;
public byte DMCDMCBIT;
public byte DMCDMCBYTE;
public bool DMCDMCIRQEnabled;
private bool DMCIRQPending;
public bool DMC_Enabled;
public double DMC_FreqTimer;
public double DMC_Frequency;
public bool DMC_Loop;
public double DMC_RenderedLength;
public double DMC_SampleCount;
private bool FrameIRQEnabled;
private bool FrameIRQPending;
public short NOIZEOUT;
public byte NOIZE_DecayCount;
public bool NOIZE_DecayDiable;
public bool NOIZE_DecayLoopEnable;
public bool NOIZE_DecayReset;
public byte NOIZE_DecayTimer;
//NOIZE
public bool NOIZE_Enabled;
public byte NOIZE_Envelope;
public double NOIZE_FreqTimer;
public double NOIZE_Frequency;
public byte NOIZE_LengthCount;
public int NOIZE_NoiseMode;
public double NOIZE_RenderedLength;
public double NOIZE_SampleCount;
public ushort NOIZE_ShiftReg = 1;
public byte NOIZE_Volume;
public double Rectangle1DutyPercentage;
public bool Rectangle1WaveStatus;
public byte Rectangle1_DecayCount;
public bool Rectangle1_DecayDiable;
public bool Rectangle1_DecayLoopEnable;
public bool Rectangle1_DecayReset;
public byte Rectangle1_DecayTimer;
public int Rectangle1_DutyCycle;
public bool Rectangle1_Enabled;
public byte Rectangle1_Envelope;
public int Rectangle1_FreqTimer;
public double Rectangle1_Frequency;
public byte Rectangle1_LengthCount;
public double Rectangle1_RenderedLength;
public double Rectangle1_SampleCount;
public byte Rectangle1_SweepCount;
public bool Rectangle1_SweepDirection;
public bool Rectangle1_SweepEnable;
public bool Rectangle1_SweepForceSilence;
public byte Rectangle1_SweepRate;
public bool Rectangle1_SweepReset;
public byte Rectangle1_SweepShift;
public byte Rectangle1_Volume;
public double Rectangle2DutyPercentage;
public bool Rectangle2WaveStatus;
public byte Rectangle2_DecayCount;
public bool Rectangle2_DecayDiable;
public bool Rectangle2_DecayLoopEnable;
public bool Rectangle2_DecayReset;
public byte Rectangle2_DecayTimer;
public int Rectangle2_DutyCycle;
public bool Rectangle2_Enabled;
public byte Rectangle2_Envelope;
public int Rectangle2_FreqTimer;
public double Rectangle2_Frequency;
public byte Rectangle2_LengthCount;
public double Rectangle2_RenderedLength;
public double Rectangle2_SampleCount;
public byte Rectangle2_SweepCount;
public bool Rectangle2_SweepDirection;
public bool Rectangle2_SweepEnable;
public bool Rectangle2_SweepForceSilence;
public byte Rectangle2_SweepRate;
public bool Rectangle2_SweepReset;
public byte Rectangle2_SweepShift;
public byte Rectangle2_Volume;
public bool TriangleHALT;
public short TriangleOUT;
public bool Triangle_Enabled;
public int Triangle_FreqTimer;
public double Triangle_Frequency;
public byte Triangle_LengthCount;
public bool Triangle_LengthEnabled;
public bool Triangle_LinearControl;
public int Triangle_LinearCounter;
public int Triangle_LinearCounterLoad;
public double Triangle_RenderedLength;
public double Triangle_SampleCount;
public int Triangle_Sequence;
public double VRC6Pulse1DutyPercentage;
public short VRC6Pulse1OUT;
public bool VRC6Pulse1WaveStatus;
public int VRC6Pulse1_DutyCycle;
public bool VRC6Pulse1_Enabled;
public int VRC6Pulse1_FreqTimer;
public double VRC6Pulse1_Frequency;
public double VRC6Pulse1_RenderedLength;
public double VRC6Pulse1_SampleCount;
public byte VRC6Pulse1_Volume;
public double VRC6Pulse2DutyPercentage;
public short VRC6Pulse2OUT;
public bool VRC6Pulse2WaveStatus;
public int VRC6Pulse2_DutyCycle;
public bool VRC6Pulse2_Enabled;
public int VRC6Pulse2_FreqTimer;
public double VRC6Pulse2_Frequency;
public double VRC6Pulse2_RenderedLength;
public double VRC6Pulse2_SampleCount;
public byte VRC6Pulse2_Volume;
public byte VRC6SawtoothAccum;
//VRC6 Sawtooth
public byte VRC6SawtoothAccumRate;
public byte VRC6SawtoothAccumStep;
public short VRC6SawtoothOUT;
public bool VRC6Sawtooth_Enabled;
public int VRC6Sawtooth_FreqTimer;
public double VRC6Sawtooth_Frequency;
public double VRC6Sawtooth_RenderedLength;
public double VRC6Sawtooth_SampleCount;
private int _FrameCounter;
private bool _PAL;
#endregion
#region MAPPERS
//MAPPER 1
//MAPPER 18
private int Mapper18_IRQWidth;
private short Mapper18_Timer;
private short Mapper18_latch;
private bool Mapper18_timer_irq_enabled;
private byte[] Mapper18_x = new byte[22];
private bool Mapper19_IRQEnabled;
//MAPPER 19
private bool Mapper19_VROMRAMfor0000;
private bool Mapper19_VROMRAMfor1000;
private short Mapper19_irq_counter;
//MAPPER 21
private bool Mapper21_PRGMode = true;
private byte[] Mapper21_REG = new byte[8];
private int Mapper21_irq_clock;
private int Mapper21_irq_counter;
private int Mapper21_irq_enable;
private int Mapper21_irq_latch;
//MAPPER 23
//MAPPER 225
private byte Mapper225_reg0 = 0xF;
private byte Mapper225_reg1 = 0xF;
private byte Mapper225_reg2 = 0xF;
private byte Mapper225_reg3 = 0xF;
private bool Mapper23_PRGMode = true;
private byte[] Mapper23_REG = new byte[8];
private int Mapper23_irq_clock;
private int Mapper23_irq_counter;
private int Mapper23_irq_enable;
private int Mapper23_irq_latch;
private int Mapper24_irq_clock;
private int Mapper24_irq_counter;
private bool Mapper24_irq_enable;
private int Mapper24_irq_latch;
private byte Mapper41_CHR_High;
private byte Mapper41_CHR_Low;
private byte mapper10_latch1;
private int mapper10_latch1data1;
private int mapper10_latch1data2;
private byte mapper10_latch2;
private int mapper10_latch2data1;
private int mapper10_latch2data2;
private bool mapper17_IRQEnabled;
private int mapper17_irq_counter;
private byte mapper18_control;
private byte mapper1_mirroringFlag;
private byte mapper1_onePageMirroring;
private byte mapper1_prgSwitchingArea;
private byte mapper1_prgSwitchingSize;
private int mapper1_register8000BitPosition;
private int mapper1_register8000Value;
private int mapper1_registerA000BitPosition;
private int mapper1_registerA000Value;
private int mapper1_registerC000BitPosition;
private int mapper1_registerC000Value;
private int mapper1_registerE000BitPosition;
private int mapper1_registerE000Value;
private byte mapper1_vromSwitchingSize;
//MAPPER 32
private int mapper32SwitchingMode;
//MAPPER 33
private byte mapper33_IRQCounter;
private bool mapper33_IRQEabled;
private bool mapper33_type1 = true;
private int mapper4_chrAddressSelect;
private int mapper4_commandNumber;
private int mapper4_prgAddressSelect;
private uint mapper4_timer_irq_count;
private bool mapper4_timer_irq_enabled;
private uint mapper4_timer_irq_reload;
private byte mapper5_chrBankSize;
private byte mapper5_prgBankSize;
private int mapper5_scanlineSplit;
private bool mapper5_splitIrqEnabled;
private byte mapper64_chrAddressSelect;
//MAPPER 41
//MAPPER 64
private byte mapper64_commandNumber;
private byte mapper64_prgAddressSelect;
private short mapper65_timer_irq_Latch_65;
private short mapper65_timer_irq_counter_65;
private bool mapper65_timer_irq_enabled;
//MAPPER 69
private ushort mapper69_reg;
private short mapper69_timer_irq_counter_69;
private bool mapper69_timer_irq_enabled;
private bool mapper6_IRQEnabled;
private int mapper6_irq_counter;
//MAPPER 8
private bool mapper8_IRQEnabled;
private int mapper8_irq_counter;
//MAPPER 91
private int mapper91_IRQCount;
private bool mapper91_IRQEnabled;
private byte mapper9_latch1;
private int mapper9_latch1data1;
private int mapper9_latch1data2;
private byte mapper9_latch2;
private int mapper9_latch2data1;
private int mapper9_latch2data2;
private short timer_irq_Latch_16;
private short timer_irq_counter_16;
private bool timer_irq_enabled;
#endregion
public void LoadNesData(NesEngine _engine)
{
#region CPU
_regA = _engine.Cpu.REG_A;
_regX = _engine.Cpu.REG_X;
_regY = _engine.Cpu.REG_Y;
_regS = _engine.Cpu.REG_S;
_regPc = _engine.Cpu.REG_PC;
_flagN = _engine.Cpu.Flag_N;
_flagV = _engine.Cpu.Flag_V;
_flagB = _engine.Cpu.Flag_B;
_flagD = _engine.Cpu.Flag_D;
_flagI = _engine.Cpu.Flag_I;
_flagZ = _engine.Cpu.Flag_Z;
_flagC = _engine.Cpu.Flag_C;
CycleCounter = _engine.Cpu.CycleCounter;
_cyclesPerScanline = _engine.Cpu.CyclesPerScanline;
_opCode = _engine.Cpu.OpCode;
_prevPc = _engine.Cpu.PrevPc;
#endregion
#region MEMORY
_ram = _engine.Memory.Ram;
_sram = _engine.Memory.SRam;
_joyData1 = _engine.Memory.JoyData1;
_joyData2 = _engine.Memory.JoyData2;
_joyStrobe = _engine.Memory.JoyStrobe;
#endregion
#region CART
if (_engine.Memory.Map.Cartridge.ChrPages == 0)
_chr = _engine.Memory.Map.Cartridge.Chr;
_mirroring = _engine.Memory.Map.Cartridge.Mirroring;
_saveRAMPresent = _engine.Memory.Map.Cartridge.IsSaveRam;
_isVram = _engine.Memory.Map.Cartridge.IsVram;
_mirroringBase = _engine.Memory.Map.Cartridge.MirroringBase;
#endregion
#region _map
CurrentPRGRomPage = _engine.Memory.Map.CurrentPrgRomPage;
CurrentChrRomPage = _engine.Memory.Map.CurrentChrRomPage;
#endregion
#region PPU
_sprram = _engine.Ppu.SprRam;
_vram = _engine.Ppu.VRam;
_currentScanLine = _engine.Ppu.CurrentScanLine;
_vramAddress = _engine.Ppu.VRamAddress;
_sprite0Hit = _engine.Ppu.Sprite0Hit;
_spriteCrossed = _engine.Ppu.SpriteCrossed;
_scanlinesPerFrame = _engine.Ppu.ScanlinesPerFrame;
_scanlineOfVblank = _engine.Ppu.ScanlineOfVblank;
_fps = _engine.Ppu.Fps;
_vblank = _engine.Ppu.VBlank;
_vramReadBuffer = _engine.Ppu.VRamReadBuffer;
_noLimiter = _engine.Ppu.NoLimiter;
/*2000*/
_executeNMIonVBlank = _engine.Ppu.ExecuteNMIonVBlank;
_spriteSize = _engine.Ppu.SpriteSize;
_patternTableAddressBackground = _engine.Ppu.PatternTableAddressBackground;
_patternTableAddress8X8Sprites = _engine.Ppu.PatternTableAddress8X8Sprites;
_vramAddressIncrement = _engine.Ppu.VRamAddressIncrement;
_reloadBits2000 = _engine.Ppu.ReloadBits2000;
/*2001*/
_colorEmphasis = _engine.Ppu.ColorEmphasis;
_spriteVisibility = _engine.Ppu.SpriteVisibility;
_backgroundVisibility = _engine.Ppu.BackgroundVisibility;
_spriteClipping = _engine.Ppu.SpriteClipping;
_backgroundClipping = _engine.Ppu.BackgroundClipping;
_monochromeMode = _engine.Ppu.MonochromeMode;
/*2003*/
_spriteRamAddress = _engine.Ppu.SpriteRamAddress;
/*2005,2006*/
_ppuToggle = _engine.Ppu.PpuToggle;
_vramTemp = _engine.Ppu.VRamTemp;
/*Draw stuff*/
_hScroll = _engine.Ppu.HScroll;
_vScroll = _engine.Ppu.VScroll;
_vBits = _engine.Ppu.VBits;
_tileY = _engine.Ppu.TileY;
#endregion
#region APU
_FrameCounter = _engine.Apu._FrameCounter;
_PAL = _engine.Apu._PAL;
DMCIRQPending = _engine.Apu.DMCIRQPending;
FrameIRQEnabled = _engine.Apu.FrameIRQEnabled;
FrameIRQPending = _engine.Apu.FrameIRQPending;
_engine.Apu.DMC.SaveState(this);
_engine.Apu.NOIZE.SaveState(this);
_engine.Apu.RECT1.SaveState(this);
_engine.Apu.RECT2.SaveState(this);
_engine.Apu.TRIANGLE.SaveState(this);
_engine.Apu.VRC6PULSE1.SaveState(this);
_engine.Apu.VRC6PULSE2.SaveState(this);
_engine.Apu.VRC6SAWTOOTH.SaveState(this);
#endregion
#region Mappers
//MAPPER 1
if (_engine.Memory.Map.Cartridge.MapperNo == 1)
{
var map1 = (Mapper01) _engine.Memory.Map.CurrentMapper;
mapper1_register8000BitPosition = map1.Mapper1Register8000BitPosition;
mapper1_registerA000BitPosition = map1.Mapper1RegisterA000BitPosition;
mapper1_registerC000BitPosition = map1.Mapper1RegisterC000BitPosition;
mapper1_registerE000BitPosition = map1.Mapper1RegisterE000BitPosition;
mapper1_register8000Value = map1.Mapper1Register8000Value;
mapper1_registerA000Value = map1.Mapper1RegisterA000Value;
mapper1_registerC000Value = map1.Mapper1RegisterC000Value;
mapper1_registerE000Value = map1.Mapper1RegisterE000Value;
mapper1_mirroringFlag = map1.Mapper1MirroringFlag;
mapper1_onePageMirroring = map1.Mapper1OnePageMirroring;
mapper1_prgSwitchingArea = map1.Mapper1PRGSwitchingArea;
mapper1_prgSwitchingSize = map1.Mapper1PRGSwitchingSize;
mapper1_vromSwitchingSize = map1.Mapper1VromSwitchingSize;
}
//MAPPER 4
if (_engine.Memory.Map.Cartridge.MapperNo == 4)
{
var map4 = (Mapper04) _engine.Memory.Map.CurrentMapper;
mapper4_commandNumber = map4.Mapper4CommandNumber;
mapper4_prgAddressSelect = map4.Mapper4PRGAddressSelect;
mapper4_chrAddressSelect = map4.Mapper4ChrAddressSelect;
mapper4_timer_irq_enabled = map4.TimerIrqEnabled;
mapper4_timer_irq_count = map4.TimerIrqCount;
mapper4_timer_irq_reload = map4.TimerIrqReload;
}
//MAPPER 5
if (_engine.Memory.Map.Cartridge.MapperNo == 5)
{
var map5 = (Mapper05) _engine.Memory.Map.CurrentMapper;
mapper5_prgBankSize = map5.Mapper5PRGBankSize;
mapper5_chrBankSize = map5.Mapper5ChrBankSize;
mapper5_scanlineSplit = map5.Mapper5ScanlineSplit;
mapper5_splitIrqEnabled = map5.Mapper5SplitIrqEnabled;
}
//MAPPER 6
if (_engine.Memory.Map.Cartridge.MapperNo == 6)
{
var map6 = (Mapper06) _engine.Memory.Map.CurrentMapper;
mapper6_IRQEnabled = map6.IRQEnabled;
mapper6_irq_counter = map6.irq_counter;
}
//MAPPER 8
if (_engine.Memory.Map.Cartridge.MapperNo == 8)
{
var map8 = (Mapper08) _engine.Memory.Map.CurrentMapper;
mapper8_IRQEnabled = map8.IRQEnabled;
mapper8_irq_counter = map8.irq_counter;
}
//MAPPER 9
if (_engine.Memory.Map.Cartridge.MapperNo == 9)
{
var map9 = (Mapper09) _engine.Memory.Map.CurrentMapper;
mapper9_latch1 = map9.latch1;
mapper9_latch2 = map9.latch2;
mapper9_latch1data1 = map9.latch1data1;
mapper9_latch1data2 = map9.latch1data2;
mapper9_latch2data1 = map9.latch2data1;
mapper9_latch2data2 = map9.latch2data2;
}
//MAPPER 10
if (_engine.Memory.Map.Cartridge.MapperNo == 10)
{
var map10 = (Mapper10) _engine.Memory.Map.CurrentMapper;
mapper10_latch1 = map10.Latch1;
mapper10_latch2 = map10.Latch2;
mapper10_latch1data1 = map10.Latch1Data1;
mapper10_latch1data2 = map10.Latch1Data2;
mapper10_latch2data1 = map10.Latch2Data1;
mapper10_latch2data2 = map10.Latch2Data2;
}
//MAPPER 16
|
[
" if (_engine.Memory.Map.Cartridge.MapperNo == 16)"
] | 1,369
|
lcc
|
csharp
| null |
23b0090fa6618a3171db2086d8cc9478f03e22182380161d
|
|
/*************************************************************************
*
* The Contents of this file are made available subject to the terms of
* the BSD license.
*
* Copyright 2000, 2010 Oracle and/or its affiliates.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of Sun Microsystems, Inc. nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*************************************************************************/
import com.sun.star.uno.XComponentContext;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.event.ActionListener;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.KeyEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import javax.swing.ButtonGroup;
import javax.swing.JDialog;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPopupMenu;
import javax.swing.JRadioButtonMenuItem;
import javax.swing.JTabbedPane;
import javax.swing.KeyStroke;
public class SwingDialogProvider implements XDialogProvider{
private JPopupMenu m_jPopupMenu = new JPopupMenu();
private XComponentContext m_xComponentContext;
private Inspector._Inspector m_oInspector;
private JDialog m_jInspectorDialog = new JDialog();
private JTabbedPane m_jTabbedPane1 = new JTabbedPane();
private Container cp;
private JMenu jMnuOptions = new JMenu("Options");
private JRadioButtonMenuItem jJavaMenuItem = null;
private JRadioButtonMenuItem jCPlusPlusMenuItem = null;
private JRadioButtonMenuItem jBasicMenuItem = null;
/** Creates a new instance of SwingPopupMentuProvider */
public SwingDialogProvider(Inspector._Inspector _oInspector, String _sTitle) {
m_oInspector = _oInspector;
m_xComponentContext = _oInspector.getXComponentContext();
insertMenus();
initializePopupMenu();
cp = m_jInspectorDialog.getContentPane();
cp.setLayout(new java.awt.BorderLayout(0, 10));
m_jTabbedPane1.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT);
m_jInspectorDialog.addWindowListener(new InspectorWindowAdapter());
m_jInspectorDialog.addComponentListener(new InspectorComponentAdapter());
m_jInspectorDialog.setTitle(_sTitle);
m_jInspectorDialog.setLocation(100, 50);
m_jInspectorDialog.getContentPane().add(m_jTabbedPane1);
}
public JDialog getDialog(){
return m_jInspectorDialog;
}
private void addMenuBar(JMenuBar _jMenuBar){
getDialog().setJMenuBar(_jMenuBar);
}
private void removeTabPaneByIndex(int _nIndex){
if (_nIndex > -1){
String sSelInspectorPanelTitle = m_jTabbedPane1.getTitleAt(_nIndex);
m_jTabbedPane1.remove(_nIndex);
m_oInspector.getInspectorPages().remove(sSelInspectorPanelTitle);
}
}
public void selectInspectorPageByIndex(int nTabIndex){
m_jTabbedPane1.setSelectedIndex(nTabIndex);
}
public int getInspectorPageCount(){
return m_jTabbedPane1.getTabCount();
}
public JTabbedPane getTabbedPane(){
return m_jTabbedPane1;
}
public InspectorPane getSelectedInspectorPage(){
int nIndex = m_jTabbedPane1.getSelectedIndex();
return getInspectorPage(nIndex);
}
public InspectorPane getInspectorPage(int _nIndex){
InspectorPane oInspectorPane = null;
if (_nIndex > -1){
String sInspectorPanelTitle = m_jTabbedPane1.getTitleAt(_nIndex);
oInspectorPane = m_oInspector.getInspectorPages().get(sInspectorPanelTitle);
}
return oInspectorPane;
}
private void removeTabPanes(){
int nCount = m_jTabbedPane1.getTabCount();
if (nCount > 0){
for (int i = nCount-1; i >= 0; i--){
removeTabPaneByIndex(i);
}
}
}
private void removeSelectedTabPane(){
int nIndex = getTabbedPane().getSelectedIndex();
removeTabPaneByIndex(nIndex);
}
private class InspectorComponentAdapter extends ComponentAdapter{
@Override
public void componentHidden(ComponentEvent e){
m_jInspectorDialog.pack();
m_jInspectorDialog.invalidate();
}
@Override
public void componentShown(ComponentEvent e){
m_jInspectorDialog.pack();
m_jInspectorDialog.invalidate();
}
}
private class InspectorWindowAdapter extends WindowAdapter{
@Override
public void windowClosed(WindowEvent e){
removeTabPanes();
m_oInspector.disposeHiddenDocuments();
}
@Override
public void windowClosing(WindowEvent e){
removeTabPanes();
m_oInspector.disposeHiddenDocuments();
}
}
private void initializePopupMenu(){
m_jPopupMenu.add(getInspectMenuItem("Inspect"));
m_jPopupMenu.add(getSourceCodeMenuItem(SADDTOSOURCECODE));
m_jPopupMenu.add(getInvokeMenuItem(SINVOKE));
m_jPopupMenu.addSeparator();
m_jPopupMenu.add(getHelpMenuItem("Help"));
}
private void addOpenDocumentMenu(JMenu _jMnuRoot){
ActionListener oActionListener = new ActionListener(){
public void actionPerformed(java.awt.event.ActionEvent evt) {
String sTDocUrl = evt.getActionCommand();
m_oInspector.inspectOpenDocument(sTDocUrl);
}
};
|
[
" String[] sTDocUrls = m_oInspector.getTDocUrls();"
] | 594
|
lcc
|
java
| null |
4d661a278930364d65e248d5f8bbbf7992b0f866443d527f
|
|
//--- Aura Script -----------------------------------------------------------
// Aranwen
//--- Description -----------------------------------------------------------
// Teacher
//---------------------------------------------------------------------------
public class AranwenScript : NpcScript
{
public override void Load()
{
SetName("_aranwen");
SetRace(10001);
SetBody(height: 1.15f, weight: 0.9f, upper: 1.1f, lower: 0.8f);
SetFace(skinColor: 15, eyeType: 3, eyeColor: 192);
SetLocation(14, 43378, 40048, 125);
EquipItem(Pocket.Face, 3900, 0x00344300, 0x0000163E, 0x008B0021);
EquipItem(Pocket.Hair, 3026, 0x00BDC2E5, 0x00BDC2E5, 0x00BDC2E5);
EquipItem(Pocket.Armor, 13008, 0x00C6D8EA, 0x00C6D8EA, 0x00635985);
EquipItem(Pocket.Glove, 16503, 0x00C6D8EA, 0x00B20859, 0x00A7131C);
EquipItem(Pocket.Shoe, 17504, 0x00C6D8EA, 0x00C6D8EA, 0x003F6577);
EquipItem(Pocket.RightHand1, 40012, 0x00C0C0C0, 0x008C84A4, 0x00403C47);
AddPhrase("...");
AddPhrase("A sword does not betray its own will.");
AddPhrase("A sword is not a stick. I don't feel any tension from you!");
AddPhrase("Aren't you well?");
AddPhrase("Focus when you're practicing.");
AddPhrase("Hahaha.");
AddPhrase("If you're done resting, let's keep practicing!");
AddPhrase("It's those people who really need to learn swordsmanship.");
AddPhrase("Put more into the wrists!");
AddPhrase("That student may need to rest a while.");
}
protected override async Task Talk()
{
SetBgm("NPC_Aranwen.mp3");
await Intro(
"A lady decked out in shining armor is confidently training students in swordsmanship in front of the school.",
"Unlike a typical swordswoman, her moves seem delicate and elegant.",
"Her long, braided silver hair falls down her back, leaving her eyes sternly fixed on me."
);
Msg("What brings you here?", Button("Start a Conversation", "@talk"), Button("Shop", "@shop"), Button("Modify Item", "@upgrade"));
switch (await Select())
{
case "@talk":
Msg("Hmm. <username/>, right?<br/>Of course.");
// Msg("Hmm. <username/>, right?");
// Msg("Yes? Please don't block my view.");
// if the player is wearing the Savior of Erinn title, she will say this after the first message
// Msg("Guardian of Erinn...<br/>If it were anyone else,<br/>I would tell them to stop being so arrogant...");
// Msg("But with you, <username/>, you are definitely qualified.<br/>Good job.");
await StartConversation();
break;
case "@shop":
Msg("Are you looking for a party quest scroll?");
OpenShop("AranwenShop");
break;
case "@upgrade":
Msg("Please select the weapon you'd like to modify.<br/>Each weapon can be modified according to its kind.<upgrade />");
Msg("Unimplemented");
Msg("A bow is weaker than a crossbow?<br/>That's because you don't know a bow very well.<br/>Crossbows are advanced weapons for sure,<br/>but a weapon that reflects your strength and senses is closer to nature than machinery.");
break;
}
}
protected override async Task Keywords(string keyword)
{
switch (keyword) {
case "personal_info":
Msg("Let me introduce myself.<br/>My name is Aranwen. I teach combat skills at the Dunbarton School.");
break;
case "rumor":
Msg("If you need a weapon for the training,<br/>why don't you go see Nerys in the south side?<br/>She runs the Weapons Shop.");
break;
case "about_skill":
Msg("...I am sorry, but someone that has yet to master the skill<br/>should not be bluntly asking questions about skills like this.");
Msg("...if you are interested in high-leveled bowman skills, then<br/>you should at least master the Fire Arrow skill first.");
break;
case "shop_misc": // General Shop
Msg("Hmm. Looking for the General Shop?<br/>You'll find it down there across the Square.");
Msg("Walter should be standing by the door.<br/>You can buy instruments, music scores, gifts, and tailoring goods such as sewing patterns.");
break;
case "shop_grocery":
Msg("If you are looking to buy cooking ingredients,<br/>the Restaurant will be your best bet.");
break;
case "shop_healing":
Msg("A Healer's House? Are you looking for Manus?<br/>Manus runs a Healer's House near<br/>the Weapons Shop in the southern part of town.");
Msg("Even if you're not ill<br/>and you're simply looking for things like potions,<br/>that's the place to go.");
break;
case "shop_inn":
Msg("There is no inn in this town.");
break;
case "shop_bank":
Msg("If you're looking for a bank, you can go to<br/>the Erskin Bank in the west end of the Square.<br/>Talk to Austeyn there for anything involving money or items.");
break;
case "shop_smith":
Msg("There is no blacksmith's shop in this town, but<br/>if you are looking for anything like weapons or armor,<br/>why don't you head south and visit the Weapons Shop?");
break;
case "skill_range":
Msg("I suppose I could take my time and verbally explain it to you,<br/>but you should be able to quickly get the hang of it<br/>once you equip and use a bow a few times.");
break;
case "skill_tailoring":
Msg("It would be most logical to get Simon's help<br/>at the Clothing Shop.");
break;
case "skill_magnum_shot":
Msg("Magnum Shot?<br/>Haven't you learned such a basic skill alrerady?<br/>You must seriously lack training.");
Msg("It may be rather time-consuming, but<br/>please go back to Tir Chonaill.<br/>Ranald will teach you the skill.");
break;
case "skill_counter_attack":
Msg("If you don't know the Counterattack skill yet, that is definitely a problem.<br/>Very well. First, you'll need to fight a powerful monster and get hit by its Counterattack.");
Msg("Monsters like bears use Counterattack<br/>so watch how they use it and take a hit,<br/>and you should be able to quickly get the hang of it without any particular training.");
Msg("In fact, if you are not willing to take the hit,<br/>there is no other way to learn that skill.<br/>Simply reading books will not help.");
break;
case "skill_smash":
Msg("Smash...<br/>For the Smash skill, why don't you go to the Bookstore and<br/>look for a book on it?");
Msg("You should learn it by yourself before bothering<br/>people with questions.<br/>You should be ashamed of yourself.");
break;
case "square":
Msg("The Square is just over here.<br/>Perhaps it totally escaped you<br/>because it's so large.");
break;
case "farmland":
Msg("Strangely, large rats have been seen<br/>in large numbers in the farmlands recently.<br/>This obviously isn't normal.");
Msg("If you are willing,<br/>would you go and take some out?<br/>You'll be appreciated by many.");
break;
case "brook": // Adelia Stream
Msg("Adelia Stream...<br/>I believe you're speaking of the<br/>stream in Tir Chonaill...");
Msg("Shouldn't you be asking<br/>these questions<br/>in Tir Chonaill?");
break;
case "shop_headman": // Chief's House
Msg("A chief?<br/>This town is ruled by a Lord,<br/>so there is no such person as a chief here.");
break;
case "temple": // Church
Msg("You must have something to discuss with Priestess Kristell.<br/>You'll find her at the Church up north.");
Msg("You can also take the stairs that head<br/>northwest to the Square.<br/>There are other ways to get there, too,<br/>so it shouldn't be too difficult to find it.");
break;
case "school":
Msg("Mmm? This is the only school around here.");
break;
case "skill_windmill":
Player.Keywords.Remove("skill_windmill");
Msg("Are you curious about the Windmill skill?<br/>It is a useful skill to have when you're surrounded by enemies.<br/>Very well. I will teach you the Windmill skill.");
break;
case "shop_restaurant":
Msg("If you're looking for a restaurant, you are looking for Glenis' place.<br/>She not only sells food, but also a lot of cooking ingredients, so<br/>you should pay a visit if you need something.");
Msg("The Restaurant is in the north alley of the Square.");
break;
case "shop_armory": // Weapon Shop
Msg("Nerys is the owner of the Weapons Shop.<br/>Keep following the road that leads down south<br/>and you'll see her mending weapons outside.");
Msg("She may seem a little aloof,<br/>but don't let that get to you too much<br/>and you'll get used to it.");
break;
case "shop_cloth":
Msg("There is no decent clothing shop in this town...<br/>But, if you must, go visit Simon's place.<br/>You should be able to find something that fits right away.");
break;
case "shop_bookstore":
Msg("You mean Aeira's Bookstore.<br/>It's just around here.<br/>Follow the road in front of the school up north.");
Msg("Many types of books go through that place,<br/>so even if you don't find what you want right away,<br/>keep visiting and you'll soon get it.");
break;
case "shop_goverment_office": // Town Office
Msg("Are you looking for Eavan?<br/>The Lord and the Captain of the Royal Guards<br/>are very hard to reach. ");
Msg("If you're really looking for Eavan,<br/>go over to that large building to the north of the Square.");
break;
default:
RndMsg(
"I don't know anything about it. I'm sorry I can't be much help.",
"I don't know anything about it. Why don't you ask others?",
"Being a teacher doesn't mean that I know everything.",
"Hey! Asking me about such things is a waste of time.",
"It doesn't seem bad but... I don't think I can help you with it.",
"I don't know too much about anything other than combat skills.",
"If you keep bringing up topics like this, I can't say much to you.",
"Will you tell me about it when you find out more?"
);
break;
}
}
public override void EndConversation()
{
Close("Thank you, Aranwen. I'll see you later!");
}
}
public class AranwenShop : NpcShopScript
{
public override void Setup()
{
//----------------
// Party Quest
//----------------
// Page 1
Add("Party Quest", 70025); // Party Quest Scroll [10 Red Bears]
|
[
"\t\tAdd(\"Party Quest\", 70025); // Party Quest Scroll [30 Red Bears]"
] | 1,340
|
lcc
|
csharp
| null |
972aa86922fe3fc5000647badc4783f41e4e10f292b5d31b
|
|
using System;
using Android.App;
using Android.Widget;
using Android.Graphics;
using Java.Interop;
using Android.Animation;
using Android.Util;
using Android.Graphics.Drawables;
using Android.Runtime;
using Android.Views;
namespace StartClockApp
{
internal class UIView : RelativeLayout
{
public const string LEFT_MARGIN = "LeftMargin";
public const string TOP_MARGIN = "TopMargin";
public const string NEW_WIDTH = "NewWidth";
const int ALPHAANIMATIONTIME = 100;
const int FRAMEANIMATIONTIME = 200;
Frame frame;
Color backgroundColor;
Color borderColor;
bool hidden;
UIView leftBorder, rightBorder, topBorder, bottomBorder;
protected Activity context;
public virtual Frame Frame {
get {
if (frame == null) {
return new Frame ();
}
return frame;
} set {
frame = value;
RelativeLayout.LayoutParams parameters = new RelativeLayout.LayoutParams(value.W, value.H);
parameters.LeftMargin = value.X;
parameters.TopMargin = value.Y;
LayoutParameters = parameters;
LayoutSubviews ();
}
}
public virtual Color BackgroundColor {
get {
return backgroundColor;
} set {
backgroundColor = value;
SetBackgroundColor (backgroundColor);
}
}
public bool HasParent {
get {
return Parent != null;
}
}
public Rect HitRect {
get {
Rect cellRect = new Rect ();
GetHitRect (cellRect);
return cellRect;
}
}
public Color BorderColor {
get {
return borderColor;
}
set {
borderColor = value;
if (leftBorder != null) {
leftBorder.BackgroundColor = borderColor;
}
if (rightBorder != null) {
rightBorder.BackgroundColor = borderColor;
}
if (topBorder != null) {
topBorder.BackgroundColor = borderColor;
}
if (bottomBorder != null) {
bottomBorder.BackgroundColor = borderColor;
}
}
}
public bool Hidden {
get {
return hidden;
} set {
hidden = value;
if (hidden) {
Hide ();
} else {
Show ();
}
}
}
public ObjectAnimator AlphaOutAnimator {
get {
return ObjectAnimator.OfFloat (this, "Alpha", Alpha, 0);
}
}
public ObjectAnimator AlphaInAnimator {
get {
return ObjectAnimator.OfFloat (this, "Alpha", Alpha, 1);
}
}
public UIView (Activity context) : base (context)
{
this.context = context;
}
public UIView (IntPtr a, JniHandleOwnership b) : base (a, b)
{
}
public virtual void LayoutSubviews ()
{
}
public void SetSlightlyRoundWithBackgroundColor (Color color)
{
GradientDrawable background = new GradientDrawable ();
background.SetCornerRadius (Sizes.GetRealSize (6));
background.SetColor (color);
Background = background;
RemoveBorders ();
}
public void SetMultiColorBackground (int[] colours)
{
GradientDrawable drawable = new GradientDrawable (GradientDrawable.Orientation.LeftRight, colours);
Background = drawable;
}
public void AddBorders (Activity context, bool left, bool right, bool top, bool bottom)
{
if (left) {
leftBorder = new UIView (context) { BackgroundColor = borderColor };
AddView (leftBorder);
}
if (right) {
rightBorder = new UIView (context) { BackgroundColor = borderColor };
AddView (rightBorder);
}
if (top) {
topBorder = new UIView (context) { BackgroundColor = borderColor };
AddView (topBorder);
}
if (bottom) {
bottomBorder = new UIView (context) { BackgroundColor = borderColor };
AddView (bottomBorder);
}
}
public void SetBorderFrames (int w)
{
if (leftBorder != null) {
leftBorder.Frame = new Frame (0, 0, w, Frame.H);
}
if (rightBorder != null) {
rightBorder.Frame = new Frame (Frame.W - w, 0, w, Frame.H);
}
if (topBorder != null) {
topBorder.Frame = new Frame (0, 0, Frame.W, w);
}
if (bottomBorder != null) {
bottomBorder.Frame = new Frame (0, Frame.H - w, Frame.W, w);
}
}
public void RemoveBorders ()
{
RemoveView (leftBorder);
RemoveView (rightBorder);
RemoveView (topBorder);
RemoveView (bottomBorder);
}
public void AnimateHide (Action completed)
{
ObjectAnimator animator = AlphaOutAnimator;
animator.SetDuration (ALPHAANIMATIONTIME);
animator.Start ();
animator.AnimationEnd += delegate {
completed ();
Visibility = ViewStates.Gone;
};
}
public void AnimateShow (Action completed)
{
ObjectAnimator animator = AlphaInAnimator;
animator.SetDuration (ALPHAANIMATIONTIME);
animator.Start ();
animator.AnimationEnd += delegate {
completed ();
Visibility = ViewStates.Visible;
};
}
public void AnimateY (int y)
{
ObjectAnimator xAnim = ObjectAnimator.OfFloat (this, TOP_MARGIN, Frame.Y, y);
xAnim.SetDuration (FRAMEANIMATIONTIME);
xAnim.Start ();
}
public void AnimateX (Frame newFrame)
{
ObjectAnimator xAnim = ObjectAnimator.OfInt (this, LEFT_MARGIN, Frame.X, newFrame.X);
xAnim.SetDuration (FRAMEANIMATIONTIME);
xAnim.Start ();
}
public void AnimateWidth (Frame newFrame, Action completed)
{
ObjectAnimator wAnim = ObjectAnimator.OfInt (this, NEW_WIDTH, Frame.W, newFrame.W);
wAnim.SetDuration (FRAMEANIMATIONTIME);
wAnim.Start ();
wAnim.AnimationEnd += delegate {
completed ();
};
}
public void AnimateXAndWidth (Frame newFrame, Action completed)
{
ObjectAnimator xAnim = ObjectAnimator.OfFloat (this, LEFT_MARGIN, (float)Frame.X, (float)newFrame.X);
ObjectAnimator wAnim = ObjectAnimator.OfInt (this, NEW_WIDTH, Frame.W, newFrame.W);
AnimatorSet set = new AnimatorSet ();
set.SetDuration (FRAMEANIMATIONTIME);
set.PlayTogether (new ObjectAnimator[] { xAnim, wAnim });
set.Start ();
set.AnimationEnd += delegate {
completed ();
};
}
public virtual void Hide ()
{
Alpha = 0;
Visibility = ViewStates.Gone;
}
public virtual void Show ()
{
Alpha = 1;
Visibility = ViewStates.Visible;
}
public void UpdateY (int y)
{
Frame = new Frame (Frame.X, y, Frame.W, Frame.H);
}
public void UpdateX (int x)
{
Frame = new Frame (x, Frame.Y, Frame.W, Frame.H);
}
public void UpdateHeight (int height)
{
Frame = new Frame (Frame.X, Frame.Y, Frame.W, height);
}
public void AddViews (params object[] items)
{
foreach (object item in items) {
AddView (item as View);
}
}
public void RemoveViews (params object[] items)
{
foreach (object item in items) {
RemoveView (item as View);
}
}
public void UpdateFrameBy (int x, int y, int w, int h)
{
Frame = new Frame (Frame.X + x, Frame.Y + y, Frame.W + w, Frame.H + h);
}
public override void AddView (View child)
{
try {
base.AddView (child);
} catch {
Console.WriteLine ("!!!! Caught exception: Failed to add View: " + child.GetType ());
}
}
public int SizeHeightToFit ()
{
return SizeHeightToFitWithMin (0);
}
public int SizeHeightToFitWithMin (int min)
{
|
[
"\t\t\tMeasure (0, 0);"
] | 853
|
lcc
|
csharp
| null |
d9359fe4b11ff3d0cc4c2102fccceb75920f6435962b6b84
|
|
"""
Data models for the announcements app.
"""
from django.db import models
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from apps.tools.utils import unique_slug
from apps.tools.models import ModelDiffMixin
from apps.txtrender.fields import RenderTextField
from apps.txtrender.utils import render_document
from apps.txtrender.signals import render_engine_changed
from .managers import (AnnouncementManager,
AnnouncementTwitterCrossPublicationManager)
from .constants import (ANNOUNCEMENTS_TYPE_CHOICES,
ANNOUNCEMENTS_TYPE_DEFAULT)
class Announcement(ModelDiffMixin, models.Model):
"""
Announcement data model. Use to quickly broadcast information about the site.
An announcement is made of:
- a title,
- a slug (unique and indexed),
- an author,
- a creation, last content modification and publication date,
- a type,
- a "site wide" flag, used to determine if the announcement should be displayed on the front page.
- some text (source and HTML version).
Announcements made by a specific user are available using the reverse relation ``authored_announcements``.
"""
title = models.CharField(_('Title'),
max_length=255)
# FIXME AutoSlugField
slug = models.SlugField(_('Slug'),
max_length=255,
unique=True)
author = models.ForeignKey(settings.AUTH_USER_MODEL,
db_index=True, # Database optimization
related_name='authored_announcements',
verbose_name=_('Author'))
creation_date = models.DateTimeField(_('Creation date'),
auto_now_add=True,
db_index=True) # Database optimization
last_content_modification_date = models.DateTimeField(_('Last content modification date'),
default=None,
editable=False,
blank=True,
null=True,
db_index=True) # Database optimization
pub_date = models.DateTimeField(_('Publication date'),
default=None,
blank=True,
null=True,
db_index=True) # Database optimization
type = models.CharField(_('Type'),
max_length=10,
choices=ANNOUNCEMENTS_TYPE_CHOICES,
default=ANNOUNCEMENTS_TYPE_DEFAULT)
site_wide = models.BooleanField(_('Broadcast all over the site'),
default=False)
content = RenderTextField(_('Content'))
content_html = models.TextField(_('Content (raw HTML)'))
content_text = models.TextField(_('Content (raw text)'))
tags = models.ManyToManyField('AnnouncementTag',
related_name='announcements',
verbose_name=_('Announcement\'s tags'),
blank=True)
last_modification_date = models.DateTimeField(_('Last modification date'),
auto_now=True)
objects = AnnouncementManager()
class Meta:
verbose_name = _('Announcement')
verbose_name_plural = _('Announcements')
permissions = (
('can_see_preview', 'Can see any announcements in preview'),
)
get_latest_by = 'pub_date'
ordering = ('-pub_date',)
def __str__(self):
return self.title
def get_absolute_url(self):
"""
Return the permalink to this announcement.
"""
return reverse('announcements:announcement_detail', kwargs={'slug': self.slug})
def save(self, *args, **kwargs):
"""
Save the announcement, fix non-unique slug, fix/update last content modification date and render the text.
:param args: For super()
:param kwargs: For super()
"""
# Avoid duplicate slug
# FIXME AutoSlugField
self.slug = unique_slug(Announcement, self, self.slug, 'slug', self.title)
# Fix the modification date if necessary
self.fix_last_content_modification_date()
# Render the content
self.render_text()
# Save the model
super(Announcement, self).save(*args, **kwargs)
def save_no_rendering(self, *args, **kwargs):
"""
Save the announcement without doing any text rendering or fields cleanup.
This method just call the parent ``save`` method.
:param args: For super()
:param kwargs: For super()
"""
super(Announcement, self).save(*args, **kwargs)
def fix_last_content_modification_date(self):
"""
Fix the ``last_content_modification_date`` field according to ``pub_date`` and other fields.
"""
if self.pub_date:
changed_fields = self.changed_fields
if self.pk and 'title' in changed_fields or 'content' in changed_fields:
self.last_content_modification_date = timezone.now()
if self.last_content_modification_date \
and self.last_content_modification_date <= self.pub_date:
self.last_content_modification_date = None
else:
self.last_content_modification_date = None
def is_published(self):
"""
Return ``True`` if this announcement is published and so, readable by anyone.
"""
now = timezone.now()
return self.pub_date is not None and self.pub_date <= now
is_published.boolean = True
is_published.short_description = _('Published')
def can_see_preview(self, user):
"""
Return True if the given user can see this announcement in preview mode.
:param user: The user to be checked for permission
"""
return user == self.author or user.has_perm('announcements.can_see_preview')
def has_been_modified_after_publication(self):
"""
Return True if the announcement has been modified after publication.
"""
return self.last_content_modification_date is not None \
and self.last_content_modification_date != self.pub_date
def render_text(self, save=False):
"""
Render the content.
:param save: Save the model field ``content_html`` if ``True``.
"""
# Render HTML
content_html, content_text, _ = render_document(self.content,
allow_titles=True,
allow_code_blocks=True,
allow_text_formating=True,
allow_text_extra=True,
allow_text_alignments=True,
allow_text_directions=True,
allow_text_modifiers=True,
allow_text_colors=True,
allow_spoilers=True,
allow_figures=True,
allow_lists=True,
allow_todo_lists=True,
allow_definition_lists=True,
allow_tables=True,
allow_quotes=True,
allow_footnotes=True,
allow_acronyms=True,
allow_links=True,
allow_medias=True,
allow_cdm_extra=True,
force_nofollow=False,
render_text_version=True,
merge_footnotes_html=True,
merge_footnotes_text=True)
self.content_html = content_html
self.content_text = content_text
# Save if required
if save:
self.save_no_rendering(update_fields=('content_html', 'content_text'))
def _redo_announcements_text_rendering(sender, **kwargs):
"""
Redo text rendering of all announcements.
:param sender: Not used.
:param kwargs: Not used.
"""
for announcement in Announcement.objects.all():
announcement.render_text(save=True)
render_engine_changed.connect(_redo_announcements_text_rendering)
class AnnouncementTag(models.Model):
"""
Announcement tag data model.
An announcement's tag is made of:
- a slug (unique and indexed in database),
- a name (human readable).
"""
# FIXME AutoSlugField
slug = models.SlugField(_('Slug'),
max_length=255,
unique=True)
name = models.CharField(_('Name'),
max_length=255)
class Meta:
verbose_name = _('Announcement tag')
verbose_name_plural = _('Announcement tags')
def __str__(self):
return self.name
def get_absolute_url(self):
"""
Return the permalink to this announcement's tag.
"""
return reverse('announcements:tag_detail', kwargs={'slug': self.slug})
def get_latest_announcements_rss_feed_url(self):
"""
Return the permalink to "latest announcements" RSS feed for this tag.
"""
return reverse('announcements:latest_tag_announcements_rss', kwargs={'slug': self.slug})
def get_latest_announcements_atom_feed_url(self):
"""
Return the permalink to "latest announcements" Atom feed for this tag.
"""
return reverse('announcements:latest_tag_announcements_atom', kwargs={'slug': self.slug})
def save(self, *args, **kwargs):
"""
Save the model
:param args: For super()
:param kwargs: For super()
"""
# Avoid duplicate slug
# FIXME AutoSlugField
self.slug = unique_slug(AnnouncementTag, self, self.slug, 'slug', self.name)
# Save the tag
super(AnnouncementTag, self).save(*args, **kwargs)
class AnnouncementTwitterCrossPublication(models.Model):
"""
Cross-publication marker for the Twitter platform.
This simple model store three information:
- the cross-published announcement,
- the tweet ID of the cross-publication (for history in case of problem),
- the date of cross-publication.
"""
announcement = models.ForeignKey('Announcement',
db_index=True, # Database optimization
related_name='twitter_pubs',
verbose_name=_('Announcement'))
tweet_id = models.CharField(_('Tweet ID'),
db_index=True, # Database optimization
max_length=255)
pub_date = models.DateTimeField(_('Creation date'),
auto_now_add=True,
db_index=True) # Database optimization
objects = AnnouncementTwitterCrossPublicationManager()
class Meta:
verbose_name = _('Twitter cross-publication')
verbose_name_plural = _('Twitter cross-publications')
get_latest_by = 'pub_date'
ordering = ('-pub_date', )
def __str__(self):
|
[
" return '%s -> %s' % (self.announcement, self.tweet_id)"
] | 846
|
lcc
|
python
| null |
795a770e3d8fd600446fa22b4ced986b2139cc631f903b24
|
|
# -*- coding: utf-8 -*-
#
# Kotori documentation build configuration file, created by
# sphinx-quickstart on Fri Nov 6 21:36:37 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
#sys.path.insert(0, os.path.join(os.path.abspath('.'), '_extensions'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.ifconfig',
'sphinx.ext.graphviz',
#'nfotools',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Kotori'
copyright = u'2013-2020, The Kotori Developers'
author = u'The Kotori Developers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.24.5'
# The full version, including alpha/beta/rc tags.
release = '0.24.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
html_last_updated_fmt = ""
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Kotoridoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Kotori.tex', u'Kotori Documentation',
u'Kotori Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'kotori', u'Kotori Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Kotori', u'Kotori Documentation',
author, 'Kotori', 'Data Acquisition and Telemetry',
'DAQ'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Custom options -------------------------------------------
import sphinx_material
html_show_sourcelink = True
html_sidebars = {
"**": ["logo-text.html", "globaltoc.html", "localtoc.html", "searchbox.html"]
}
# Required theme setup
extensions.append('sphinx_material')
html_theme = 'sphinx_material'
html_theme_path = sphinx_material.html_theme_path()
html_context = sphinx_material.get_html_context()
# Material theme options (see theme.conf for more information)
html_theme_options = {
# Set the name of the project to appear in the navigation.
'nav_title': 'Kotori',
# Set you GA account ID to enable tracking
#'google_analytics_account': 'UA-XXXXX',
# Specify a base_url used to generate sitemap.xml. If not
# specified, then no sitemap will be built.
'base_url': 'https://getkotori.org/docs/',
# Set the color and the accent color
'color_primary': 'blue',
'color_accent': 'light-blue',
# Set the repo location to get a badge with stats
'repo_url': 'https://github.com/daq-tools/kotori/',
'repo_name': 'Kotori',
# Visible levels of the global TOC; -1 means unlimited
'globaltoc_depth': 3,
# If False, expand all TOC entries
#'globaltoc_collapse': False,
# If True, show hidden TOC entries
#'globaltoc_includehidden': False,
"master_doc": False,
"nav_links": [
],
"heroes": {
"index": "A data historian based on InfluxDB, Grafana, MQTT and more.",
"about/index": "A data historian based on InfluxDB, Grafana, MQTT and more.",
"about/scenarios": "Conceived for consumers, integrators and developers.",
"about/technologies": "Standing on the shoulders of giants.",
"examples/index": "Telemetry data acquisition and sensor networks for humans.",
"setup/index": "Easy to install and operate.",
},
}
html_logo = '_static/img/kotori-logo.png'
def setup(app):
# https://github.com/snide/sphinx_rtd_theme/issues/117#issuecomment-41571653
# foundation
# Bootstrap conflicts with Sphinx
#app.add_stylesheet("assets/css/bootstrap.min.css")
app.add_stylesheet("assets/css/font-awesome.min.css")
app.add_stylesheet("assets/css/font-entypo.css")
app.add_stylesheet("assets/css/hexagons.min.css")
# jQuery 2.1.0 conflicts with jQuery 1.11.1 from Sphinx
#app.add_javascript("assets/js/jquery-2.1.0.min.js")
app.add_javascript("assets/js/hexagons.min.js")
# application
#app.add_javascript("custom.js")
app.add_stylesheet("css/kotori-sphinx.css")
# Link with BERadio and Hiveeyes projects
intersphinx_mapping = {
'beradio': ('https://hiveeyes.org/docs/beradio/', None),
'hiveeyes': ('https://hiveeyes.org/docs/system/', None),
'hiveeyes-arduino': ('https://hiveeyes.org/docs/arduino/', None),
}
# Disable caching remote inventories completely
# http://www.sphinx-doc.org/en/stable/ext/intersphinx.html#confval-intersphinx_cache_limit
intersphinx_cache_limit = 0
# Enable proper highlighting for inline PHP by tuning Pygments' PHP lexer.
# See also http://mbless.de/blog/2015/03/02/php-syntax-highlighting-in-sphinx.html
# Load PhpLexer
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
# Enable highlighting for PHP code not between <?php ... ?> by default
|
[
"lexers['php'] = PhpLexer(startinline=True)"
] | 1,773
|
lcc
|
python
| null |
9ff8464a2c0b3eaf3e56da2bde63d1a0b3109df20a251c6e
|
|
import settings
import string
import datetime
import time
from time import mktime
import sqlite3
import pytz
from datetime import timedelta
# Creates or opens a file called mydb with a SQLite3 DB
db = sqlite3.connect('flogger.sql3')
# Get a cursor object
cursor = db.cursor()
cursor.execute('''CREATE TABLE IF NOT EXISTS
aircraft(id INTEGER PRIMARY KEY,registration TEXT,type TEXT,model TEXT,owner TEXT,airfield TEXT,flarm_id TEXT)''')
cursor.execute('''CREATE TABLE IF NOT EXISTS
flight_times(id INTEGER PRIMARY KEY,registration TEXT,type TEXT,model TEXT,
flarm_id TEXT,date, TEXT,start_time TEXT,duration TEXT,max_altitude TEXT)''')
cursor.execute('''DROP TABLE flight_log_final''')
cursor.execute('''CREATE TABLE IF NOT EXISTS
flight_log_final(id INTEGER PRIMARY KEY, sdate TEXT, stime TEXT, edate TEXT, etime TEXT, duration TEXT,
src_callsign TEXT, max_altitude TEXT, speed TEXT, registration TEXT)''')
cursor.execute('''DROP TABLE flight_log''')
cursor.execute('''CREATE TABLE IF NOT EXISTS
flight_log(id INTEGER PRIMARY KEY, sdate TEXT, stime TEXT, edate TEXT, etime TEXT, duration TEXT,
src_callsign TEXT, max_altitude TEXT, speed TEXT, registration TEXT)''')
cursor.execute('''DROP TABLE flight_group''')
cursor.execute('''CREATE TABLE IF NOT EXISTS
flight_group(id INTEGER PRIMARY KEY, groupID TEXT, sdate TEXT, stime TEXT, edate TEXT, etime TEXT, duration TEXT,
src_callsign TEXT, max_altitude TEXT, registration TEXT)''')
cursor.execute('''DROP TABLE flights''')
cursor.execute('''CREATE TABLE IF NOT EXISTS
flights(id INTEGER PRIMARY KEY, sdate TEXT, stime TEXT, edate TEXT, etime TEXT, duration TEXT,
src_callsign TEXT, max_altitude TEXT, registration TEXT)''')
#cursor.execute('''DELETE FROM flight_log''')
MINTIME = time.strptime("0:5:0", "%H:%M:%S") # 5 minutes minimum flight time
print "MINTIME is: ", MINTIME
# Need to find the highest date record in flight_log and for each record in flight_log_final
# if this has a date greater than this then process it to check whether it should be added
#
cursor.execute('''SELECT max(sdate) FROM flight_log''')
row = cursor.fetchone()
print "row is: ", row
#
# The following takes into account the situation when there are no records in flight_log
# and there is therefore no highest date record. Note it does require that this code is
# run on the same day as the flights are recorded in flight_log_final
#
if row <> (None,):
max_date = datetime.datetime.strptime(row[0], "%y/%m/%d")
print "Last record date in flight_log is: ", max_date
else:
print "No records in flight_log so set date to today"
today = datetime.date.today().strftime("%y/%m/%d")
max_date = datetime.datetime.strptime(today, "%y/%m/%d")
print "max_date set to today: ", max_date
cursor.execute('''SELECT sdate, stime, edate, etime, duration, src_callsign, max_altitude, speed, registration FROM flight_log_final''')
data = cursor.fetchall()
for row in data:
print "Row is: sdate %s, stime %s, edate %s, etime %s, duration %s, src_callsign %s, altitude %s, speed %s, registration %s" % (row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8])
# print "Row is: sdate %s" % row[0]
# print "stime %s " % row[1]
# print "edate %s " % row[2]
# print "etime %s " % row[3]
# print "duration %s " % row[4]
# print "src_callsign %s " % row[5]
# print "altitude %s " % row[6]
# print "speed %s" % row[7]
# print "registration %s" % row[8]
time_str = row[4].replace("h", "")
time_str = time_str.replace("m", "")
time_str = time_str.replace("s", "")
print "Duration now: ", time_str
duration = time.strptime(time_str, "%H: %M: %S")
strt_date = datetime.datetime.strptime(row[0], "%y/%m/%d")
if strt_date >= max_date:
print "**** Record start date: ", strt_date, " after last flight_log record, copy: ", max_date
if duration > MINTIME:
print "#### Copy record. Duration is: ", time_str
cursor.execute('''INSERT INTO flight_log(sdate, stime, edate, etime, duration, src_callsign, max_altitude, speed, registration)
VALUES(:sdate,:stime,:edate,:etime,:duration,:src_callsign,:max_altitude,:speed, :registration)''',
{'sdate':row[0], 'stime':row[1], 'edate': row[2], 'etime':row[3],
'duration': row[4], 'src_callsign':row[5], 'max_altitude':row[6], 'speed':row[7], 'registration':row[8]})
print "Row copied"
else:
print "====Ignore row, flight time too short: ", row[4]
else:
print "???? Record start date: ", strt_date, " before last flight_log record, ignore: ", max_date
print "Done"
db.commit()
# Phase 2 processing
# For some records for each flight the end time and next start time are too close together
# to be independent flights.
# This phase examines all the records and puts them into groups such that each group has
# an end and start time, such that they are distinct flights ie their end and start times are greater than
# TIME_DELTA, and not just therefore data
# jiggles (eg moving moving the plane to a new position on the flight line),
# ie the end and start time of subsequent flights is such that it couldn't have been a real flight
print "Phase 2"
TIME_DELTA = "0:2:0" # Time in hrs:min:sec of shortest flight
#
# Note the following code processes each unique or distinct call_sign ie each group
# of flights for a call_sign
# SELECT DISTINCT call_sign FROM flight_log
# rows = cursor.fetchall()
# for call_sign in rows
group = 0 # Number of groups set for case there are none
cursor.execute('''SELECT DISTINCT src_callsign FROM flight_log ORDER BY sdate, stime ''')
all_callsigns = cursor.fetchall()
print "All call_signs: ", all_callsigns
for acallsign in all_callsigns:
# call_sign = "FLRDDE671"
call_sign = ''.join(acallsign) # callsign is a tuple ie (u'cccccc',) converts ccccc to string
print "Processing for call_sign: ", call_sign
cursor.execute('''SELECT sdate, stime, edate, etime, duration, src_callsign, max_altitude
FROM flight_log WHERE src_callsign=?
ORDER BY sdate, stime ''', (call_sign,))
#for row in rows:
row_count = len(cursor.fetchall())
print "nos rows is: ", row_count
cursor.execute('''SELECT sdate, stime, edate, etime, duration, src_callsign, max_altitude, registration
FROM flight_log WHERE src_callsign=?
ORDER BY sdate, stime ''', (call_sign,))
i = 1
group = 1
while i <= row_count:
try:
row_0 =cursor.next()
row_1 = cursor.next()
print "Row pair: ", i
print "row_0 is: ", row_0
print "row_1 is: ", row_1
time.strptime(TIME_DELTA, "%H:%M:%S")
time_delta = datetime.datetime.strptime(row_1[1], "%H:%M:%S") - datetime.datetime.strptime(row_0[3], "%H:%M:%S")
delta_secs = time_delta.total_seconds()
time_lmt = datetime.datetime.strptime(TIME_DELTA, "%H:%M:%S") - datetime.datetime.strptime("0:0:0", "%H:%M:%S")
lmt_secs = time_lmt.total_seconds()
print "Delta secs is: ", delta_secs, " Time limit is: ", lmt_secs
if (delta_secs) < lmt_secs:
print "++++Same flight"
cursor.execute('''INSERT INTO flight_group(groupID, sdate, stime, edate, etime, duration, src_callsign, max_altitude, registration)
VALUES(:groupID,:sdate,:stime,:edate,:etime,:duration,:src_callsign,:max_altitude, :registration)''',
{'groupID':group, 'sdate':row_0[0], 'stime':row_0[1], 'edate': row_0[2], 'etime':row_0[3],
'duration': row_0[4], 'src_callsign':row_0[5], 'max_altitude':row_0[6], 'registration': row[7]})
else:
# Different flight so start next group ID
print "----Different flight"
cursor.execute('''INSERT INTO flight_group(groupID, sdate, stime, edate, etime, duration, src_callsign, max_altitude, registration)
VALUES(:groupID,:sdate,:stime,:edate,:etime,:duration,:src_callsign,:max_altitude, :registration)''',
{'groupID':group, 'sdate':row_0[0], 'stime':row_0[1], 'edate': row_0[2], 'etime':row_0[3],
'duration': row_0[4], 'src_callsign':row_0[5], 'max_altitude':row_0[6], 'registration': row[7]})
group = group + 1
i = i + 1
cursor.execute('''SELECT sdate, stime, edate, etime, duration, src_callsign, max_altitude, registration
FROM flight_log WHERE src_callsign=?
ORDER BY sdate, stime ''', (call_sign,))
j = 1
print "i is: ", i, " j is: ",j
while j < i:
print "Move to row: ", j
row_0 = cursor.next()
j = j + 1
except StopIteration:
print "Last row"
break
db.commit()
# Phase 3. This sums the flight durations for each of the flight groups
# hence resulting in the actual flight start, end times and duration
print "+++++++Phase 3"
#
# This function since I can't find a library function that does what I want; dates & times
# are very confusing in Python!
#
def time_add(t1, t2):
ts = 0
tm = 0
th = 0
t = t1[5] + t2[5]
if t >= 60:
ts = t - 60
tm = int(t / 60)
else:
ts = t
t = t1[4] + t2[4] + tm
if t >= 60:
tm = t - 60
th = int(t/60)
else:
tm = t
th = t1[3] + t2[3] + th
print "Time tuple is: ", (th, tm, ts)
tstring = "%s:%s:%s" % (th, tm, ts)
print "tstring is: ", tstring
time_return = time.strptime(tstring, "%H:%M:%S")
return time_return
if group <> 0:
max_groupID = group - 1
print "Max groupID is: ", max_groupID
else:
print "No groups to process"
exit()
i = 1
while i <= max_groupID:
cursor.execute('''SELECT max(max_altitude) FROM flight_group WHERE groupID=? ''', (i,))
r = cursor.fetchone()
max_altitude = r[0]
print "Max altitude from group: ", i, " is: ", r[0]
cursor.execute('''SELECT sdate, stime, edate, etime, duration, src_callsign, max_altitude, registration
FROM flight_group WHERE groupID=?
|
[
" ORDER BY sdate, stime ''', (i,))"
] | 1,259
|
lcc
|
python
| null |
3a72959ac7abd9d2f988ade3c891be2582b4b66a6cc74678
|
|
#!/usr/bin/env python3
import datetime, json, os, requests, rethinkdb, shutil, signal, socket, subprocess, time
HOSTS = '/node/etc/hosts'
# POD_NAMESPACE must be explicitly set in deployment yaml using downward api --
# see https://github.com/kubernetes/kubernetes/blob/release-1.0/docs/user-guide/downward-api.md
POD_NAMESPACE = os.environ.get('POD_NAMESPACE', 'default')
def log(*args, **kwds):
print(time_to_timestamp(), *args, **kwds)
alarm_time=0
def mysig(a,b):
raise KeyboardInterrupt
def alarm(seconds):
seconds = int(seconds)
signal.signal(signal.SIGALRM, mysig)
global alarm_time
alarm_time = seconds
signal.alarm(seconds)
def cancel_alarm():
signal.signal(signal.SIGALRM, signal.SIG_IGN)
def run(v, shell=False, path='.', get_output=False, env=None, verbose=True, timeout=20):
try:
alarm(timeout)
t = time.time()
if isinstance(v, str):
cmd = v
shell = True
else:
cmd = ' '.join([(x if len(x.split())<=1 else '"%s"'%x) for x in v])
if path != '.':
cur = os.path.abspath(os.curdir)
if verbose:
log('chdir %s'%path)
os.chdir(path)
try:
if verbose:
log(cmd)
if shell:
kwds = {'shell':True, 'executable':'/bin/bash', 'env':env}
else:
kwds = {'env':env}
if get_output:
output = subprocess.Popen(v, stdout=subprocess.PIPE, **kwds).stdout.read().decode()
else:
if subprocess.call(v, **kwds):
raise RuntimeError("error running '{cmd}'".format(cmd=cmd))
output = None
seconds = time.time() - t
if verbose:
log("TOTAL TIME: {seconds} seconds -- to run '{cmd}'".format(seconds=seconds, cmd=cmd))
return output
finally:
if path != '.':
os.chdir(cur)
finally:
cancel_alarm()
def get_service(service):
"""
Get in json format the kubernetes information about the given service.
"""
if not os.environ['KUBERNETES_SERVICE_HOST']:
log('KUBERNETES_SERVICE_HOST environment variable not set')
return None
URL = "https://{KUBERNETES_SERVICE_HOST}:{KUBERNETES_SERVICE_PORT}/api/v1/namespaces/{POD_NAMESPACE}/endpoints/{service}"
URL = URL.format(KUBERNETES_SERVICE_HOST=os.environ['KUBERNETES_SERVICE_HOST'],
KUBERNETES_SERVICE_PORT=os.environ['KUBERNETES_SERVICE_PORT'],
POD_NAMESPACE=POD_NAMESPACE,
service=service)
token = open('/var/run/secrets/kubernetes.io/serviceaccount/token').read()
headers={'Authorization':'Bearer {token}'.format(token=token)}
log("Getting k8s information about '{service}' from '{URL}'".format(service=service, URL=URL))
x = requests.get(URL, headers=headers, verify='/var/run/secrets/kubernetes.io/serviceaccount/ca.crt').json()
log("Got {x}".format(x=x))
return x
def update_etc_hosts():
log('udpate_etc_hosts')
try:
v = get_service('storage-projects')
except Exception as err:
# Expected to happen when node is starting up, etc. - we'll retry later soon!
log("Failed getting storage service info", err)
return
if v.get('status', None) == 'Failure':
return
try:
if 'addresses' not in v['subsets'][0]:
return # nothing to do; no known addresses
namespace = v['metadata']['namespace']
hosts = ["{ip} {namespace}-{name}".format(ip=x['ip'], namespace=namespace,
name=x['targetRef']['name'].split('-')[0]) for x in v['subsets'][0]['addresses']]
start = "# start smc-storage dns - namespace="+namespace+"\n\n"
end = "# end smc-storage dns - namespace="+namespace+"\n\n"
block = '\n'.join([start] + hosts + [end])
current = open(HOSTS).read()
if block in current:
return
i = current.find(start)
j = current.find(end)
if i == -1 or j == -1:
new = current + '\n' + block
else:
new = current[:i] + block + current[j+len(end):]
open(HOSTS,'w').write(new)
except Exception as err:
log("Problem in update_etc_hosts", err)
MINION_IP = 'unknown'
def enable_ssh_access_to_minion():
global MINION_IP
# create our own local ssh key
if os.path.exists('/root/.ssh'):
shutil.rmtree('/root/.ssh')
run(['ssh-keygen', '-b', '2048', '-N', '', '-f', '/root/.ssh/id_rsa'])
# make root user of minion allow login using this (and only this) key.
run('cat /root/.ssh/id_rsa.pub >> /node/root/.ssh/authorized_keys')
open("/root/.ssh/config",'w').write("StrictHostKeyChecking no\nUserKnownHostsFile=/dev/null\n")
# record hostname of minion
for x in open("/node/etc/hosts").readlines():
if 'group' in x:
MINION_IP = x.split()[0]
open("/node/minion_ip",'w').write(MINION_IP)
def minion_ip():
global MINION_IP
if MINION_IP == 'unknown':
if os.path.exists("/node/minion_ip"):
MINION_IP = open("/node/minion_ip").read()
return MINION_IP
else:
enable_ssh_access_to_minion()
if MINION_IP == 'unknown':
raise RuntimeError("first run enable_ssh_access_to_minion")
else:
return MINION_IP
else:
return MINION_IP
def run_on_minion(v, *args, **kwds):
if isinstance(v, str):
v = "ssh " + minion_ip() + " '%s'"%v
else:
v = ['ssh', minion_ip() ] + v
return run(v, *args, **kwds)
def smc_storage(*args, **kwds):
return run_on_minion(["/usr/libexec/kubernetes/kubelet-plugins/volume/exec/smc~smc-storage/smc-storage"] + list(args), **kwds)
def install_flexvolume_plugin():
# we always copy it over, which at least upgrades it if necessary.
shutil.copyfile("/install/smc-storage", "/node/plugin/smc-storage")
shutil.copymode("/install/smc-storage", "/node/plugin/smc-storage")
def is_plugin_loaded():
try:
if int(run_on_minion("zgrep Loaded /var/log/kubelet*|grep smc-storage|wc -l", get_output=True).strip()) > 0:
return True
else:
return False
except Exception as err:
log(err)
return False
def install_zfs():
try:
run_on_minion('zpool status')
log("OK: zfs is installed")
except:
log("zfs not installed, so installing it")
run(['scp', '-r', '/install/gke-zfs', minion_ip()+":"])
run_on_minion("cd /root/gke-zfs/3.16.0-4-amd64/ && ./install.sh")
def install_bindfs():
try:
run_on_minion('which bindfs')
log("OK: bindfs is installed")
except:
log("bindfs not installed, so installing it")
run_on_minion(["apt-get", "update"])
run_on_minion(["apt-get", "install", "-y", "bindfs"])
def install_sshfs():
try:
run_on_minion('which sshfs')
log("OK: bindfs is installed")
except:
log("bindfs not installed, so installing it")
run_on_minion(["apt-get", "update"])
run_on_minion(["apt-get", "install", "-y", "sshfs"])
def install_ssh_keys():
# Copy the shared secret ssh keys to the minion so that it is able to sshfs
# mount the storage servers.
path = '/node/root/.ssh/smc-storage/{POD_NAMESPACE}'.format(POD_NAMESPACE = POD_NAMESPACE)
if not os.path.exists(path):
os.makedirs(path)
for x in ['id-rsa', 'id-rsa.pub']:
src = os.path.join('/ssh', x); target = os.path.join(path, x.replace('-', '_'))
shutil.copyfile(src, target)
os.chmod(target, 0o600)
def restart_kubelet():
run_on_minion("kill `pidof /usr/local/bin/kubelet`")
TIMESTAMP_FORMAT = "%Y-%m-%d-%H%M%S" # e.g., 2016-06-27-141131
def time_to_timestamp(tm=None):
if tm is None:
tm = time.time()
return datetime.datetime.fromtimestamp(tm).strftime(TIMESTAMP_FORMAT)
def timestamp_to_rethinkdb(timestamp):
i = timestamp.rfind('-')
return rethinkdb.iso8601(timestamp[:i].replace('-','') + 'T' + timestamp[i+1:].replace(':','') + 'Z')
# TODO: this entire approach is pointless and broken because when multiple processes
# append to the same file, the result is broken corruption.
def update_zpool_active_log():
"""
Update log file showing which ZFS filesystems are mounted, which is used by the backup system.
"""
prefix = "/mnt/smc-storage/{namespace}/".format(namespace=POD_NAMESPACE)
try:
v = run_on_minion("zpool status -PL|grep {prefix}".format(prefix=prefix),
get_output=True).splitlines()
except:
# Nothing to do -- get error if no pools are mounted
return
for x in v:
w = x.split()
if w:
path = w[0].strip() # '/mnt/smc-storage/test/storage0/foo/bar/abc.zfs/00.img'
path = path[len(prefix):] # 'storage0/foo/bar/abc.zfs/00.img'
path = os.path.split(path)[0] # 'storage0/foo/bar/abc.zfs'
i = path.find('/')
server = path[:i]
image = path[i+1:]
log = "{timestamp} {image}".format(timestamp=time_to_timestamp(), image=image)
run_on_minion("echo '{log}' >> {prefix}/{server}/log/active.log".format(
log=log, prefix=prefix, server=server))
def update_all_snapshots():
v = json.loads(smc_storage("zpool-update-snapshots", get_output=True))
db_set_last_snapshot(v['new_snapshots'])
RETHINKDB_SECRET = '/secrets/rethinkdb/rethinkdb'
import rethinkdb
def rethinkdb_connection():
auth_key = open(RETHINKDB_SECRET).read().strip()
if not auth_key:
auth_key = None
return rethinkdb.connect(host='rethinkdb-driver', timeout=4, auth_key=auth_key)
def db_set_last_snapshot(new_snapshots):
"""
new_snapshots should be a dictionary with keys the project_id's and values timestamps.
This function will connect to the database if possible, and set the last_snapshot field of
each project (in the projects table) to the given timestamp.
"""
print("db_set_last_snapshot", new_snapshots)
if len(new_snapshots) == 0:
return
# Open connection to the database
conn = rethinkdb_connection()
# Do the queries
for project_id, timestamp in new_snapshots.items():
|
[
" last_snapshot = timestamp_to_rethinkdb(timestamp)"
] | 896
|
lcc
|
python
| null |
d6e15d0ebb2aa304bba431f5e996732e5585011c1cec782a
|
|
//--- Aura Script -----------------------------------------------------------
// Deian
//--- Description -----------------------------------------------------------
// Shepard - manages the sheep at Tir Chonaill Grassland
//---------------------------------------------------------------------------
public class DeianScript : NpcScript
{
public override void Load()
{
SetRace(10002);
SetName("_deian");
SetBody(height: 0.85f);
SetFace(skinColor: 23, eyeType: 19, eyeColor: 0, mouthType: 0);
SetStand("human/male/anim/male_natural_stand_npc_deian");
SetLocation(1, 27953, 42287, 158);
EquipItem(Pocket.Face, 4900, 0x00FFDC53, 0x00FFB682, 0x00A8DDD3);
EquipItem(Pocket.Hair, 4156, 0x00E7CB60, 0x00E7CB60, 0x00E7CB60);
EquipItem(Pocket.Armor, 15656, 0x00E2EDC7, 0x004F5E44, 0x00000000);
EquipItem(Pocket.Glove, 16099, 0x00343F2D, 0x00000000, 0x00000000);
EquipItem(Pocket.Shoe, 17287, 0x004C392A, 0x00000000, 0x00000000);
EquipItem(Pocket.Head, 18407, 0x00343F2D, 0x00000000, 0x00000000);
EquipItem(Pocket.RightHand1, 40001, 0x00755748, 0x005E9A49, 0x005E9A49);
AddGreeting(0, "Nice to meet you, I am Deian.<br/>You don't look that old, maybe a couple of years older than I am?<br/>Let's just say we're the same age. You don't mind do ya?");
AddGreeting(1, "Nice to meet you again.");
//AddGreeting(2, "Welcome, <username />"); // Not sure
AddPhrase("Another day... another boring day in the countryside.");
AddPhrase("Baa! Baa!");
AddPhrase("Geez, these sheep are a pain in the neck.");
AddPhrase("Hey, this way!");
AddPhrase("I don't understand. I have one extra...");
AddPhrase("I'm so bored. There's just nothing exciting around here.");
AddPhrase("It's amazing how fast they grow feeding on grass.");
AddPhrase("I wonder if I could buy a house with my savings yet...");
AddPhrase("What the... Now there's one missing!");
}
protected override async Task Talk()
{
SetBgm("NPC_Deian.mp3");
await Intro(
"An adolescent boy carrying a shepherd's staff watches over a flock of sheep.",
"Now and then, he hollers at some sheep that've wandered too far, and his voice cracks every time.",
"His skin is tanned and his muscles are strong from his daily work.",
"Though he's young, he peers at you with so much confidence it almost seems like arrogance."
);
Msg("What can I do for you?", Button("Start a Conversation", "@talk"), Button("Shop", "@shop"), Button("Modify Item", "@upgrade"));
switch (await Select())
{
case "@talk":
Greet();
Msg(Hide.Name, GetMoodString(), FavorExpression());
if (Player.Titles.SelectedTitle == 11002)
{
Msg("Eh? <username/>...<br/>You've become the Guardian of Erinn?<br/>So fast!<br/>I'm still trying to become a Warrior!");
Msg("Good for you.<br/>Just make sure you leave me some work to do for when I become a Warrior.<br/>Wow, must've been tough.");
}
await Conversation();
break;
case "@shop":
Msg("I got nothing much, except for some quest scrolls. Are you interested?");
OpenShop("DeianShop");
return;
case "@upgrade":
Msg("Upgrades! Who else would know more about that than the great Deian? Hehe...<br/>Now, what do you want to upgrade?<br/>Don't forget to check how many times you can upgrade that tiem and what type of upgrade it is before you give it to me... <upgrade />");
while (true)
{
var reply = await Select();
if (!reply.StartsWith("@upgrade:"))
break;
var result = Upgrade(reply);
if (result.Success)
Msg("Yes! Success!<br/>Honestly, I am a little surprised myself.<br/>Would you like some more upgrades? I'm starting to enjoy this.");
else
Msg("(Error)");
}
Msg("Come and see me again.<br/>I just discovered I have a new talent. Thanks to you!");
break;
}
End();
}
protected override async Task Keywords(string keyword)
{
switch (keyword)
{
case "personal_info":
Msg("Yeah, yeah. I'm a mere shepherd...for now.<br/>But I will soon be a mighty warrior!<br/>");
ModifyRelation(Random(2), 0, Random(2));
break;
case "rumor":
GiveKeyword("pool");
Msg("Some people should have been born as fish.<br/>They can't pass water without diving right in.<br/>I wish they'd stop.");
Msg("Not long ago, someone jumped into the reservoir<br/>and made a huge mess.<br/>Guess who got stuck cleaning it up?<br/>Sooo not my job.");
ModifyRelation(Random(2), 0, Random(2));
/* Message from Field Boss Spawns
Msg("<face name='normal'/>A monster will show up in Eastern Prairie of the Meadow at 3Days later Dawn!<br/>Gigantic White Wolf will show up!<br/>Hey, I said I'm not lying!");
Msg("<title name='NONE'/>(That was a great conversation!)"); */
break;
case "about_skill":
if (HasSkill(SkillId.PlayingInstrument))
{
Msg("Alright, so you know about the Instrument Playing skill.<br/>It's always good to know how to appreciate art, haha!");
}
else
{
GiveKeyword("skill_instrument");
Msg("Know anything about the Instrument Playing skill?<br/>Only introspective guys like me<br/>can handle instruments.<br/>I wonder how well you would do...");
Msg("Priestess Endelyon knows all about this skill.<br/>You should talk to her.<br/>");
}
break;
case "about_arbeit":
Msg("Unimplemented");
//Msg("It's not time to start work yet.<br/>Can you come back and ask for a job later?");
//Msg("Do you want a part-time job? I'm always in need of help.<br/>Have you ever sheared a sheep before?<br/>If you keep doing a good job, I'll raise your pay.<br/>Want to give it a try?");
break;
case "shop_misc":
Msg("You know the guy at the General Shop? His name is Malcolm.<br/>Everyone knows he's a hermit.<br/>He does nothing but work, all day long.<br/>What a dull life!");
break;
case "shop_grocery":
Msg("Every time I go there, I smell fresh baked bread. Yum.<br/>Boy, I miss that fatty, Caitin.");
Msg("You know what? Caitin has a pretty face,<br/>but her legs are so chunky! Like tree trunks! Hahahaha!<br/>There's a reason she wears long skirts.<br/>Hehe...");
break;
case "shop_healing":
Msg("Oh, you are talking about Dilys' place.<br/>Sometimes, even when I bring a sick lamb, she still treats it with extra care.<br/>I guess lambs and humans aren't that much different when they're sick...");
break;
case "shop_inn":
GiveKeyword("skill_campfire");
Msg("Staying up all night, sleeping under trees during the day...<br/>When you have my lifestyle, you don't need to sleep at an Inn!<br/>All I need is the Campfire skill to survive!");
break;
case "shop_bank":
Msg("Darn, I wish I had enough items to deposit at the Bank.<br/>Did you talk to Bebhinn?<br/>Bebhinn loves to talk about other people.<br/>You'd better be careful when you talk to her.");
break;
case "shop_smith":
Msg("The Blacksmith's Shop is too hot. I just hate the heat.<br/>I'd rather be under the shade of a nice tree...");
break;
case "skill_range":
GiveKeyword("school");
Msg("Don't you think it's best to go to the School<br/>and ask Ranald about it?<br/>I don't mind telling you about it myself,<br/>but Ranald doesn't like it when I teach people...");
break;
case "skill_instrument":
GiveKeyword("temple");
Msg("You really are something.<br/>I just told you,<br/>talk to Priestess Endelyon at the Church<br/>about that.");
Msg("I know your type...<br/>You like to use everything single<br/>keyword you get... Bug off!");
break;
case "skill_tailoring":
Msg("Hey, if I had a skill like that, why on Erinn would I be here tending sheep?<br/>It seems interesting,<br/>but my parents would go crazy if they caught me with a needle and thread.");
Msg("I hear chubby Caitin knows a lot.<br/>Problem is, she gets upset when she sees me...<br/>If you learn that skill, can you teach me?");
break;
case "skill_magnum_shot":
Msg("I've been losing one or two sheep everyday since I told you about that.<br/>You're not trying to steal my sheep, are you?");
Msg("I'm joking... Don't get so defensive.");
break;
case "skill_counter_attack":
Msg("I heard somewhere, you can learn that<br/>by getting beat up...<br/> It's not worth it for me.<br/>A method like that just seems stupid...");
break;
case "skill_smash":
Msg("Well, I learned that before.");
Msg("But I forgot.");
break;
case "skill_gathering":
Msg("Here's the rundown.<br/>Think about what you want to gather first, then, find out where you can get it.<br/>You'll need the right tool.<br/>More importantly, you need time, hard work, and money.");
Msg("But you won't get paid much.<br/>You want to make an easy living by picking up stuff from the ground, right?<br/>But trust me, it's not that easy. I've tried.");
break;
case "square":
Msg("The Square? Are you serious?<br/>You haven't been there yet?<br/>You are such a bad liar!<br/>I saw you walking out from the Square<br/>just a moment ago!");
break;
case "pool":
Msg("It's right behind chubby ol' Caitin's place.<br/>You know where her Grocery Store is, right?");
Msg("By the way, what are you going to do there?<br/>You're not going to jump in, are you?<br/>I'm just teasing. Calm down.");
break;
case "farmland":
Msg("Are you really interested in that?<br/>Don't ask unless you are really interested!<br/>What? How am I suppose to know if you are interested or not?<br/>If you are interested in the farmland, what are you doing here?");
break;
case "windmill":
Msg("You must be talking about the Windmill down there.<br/>Well, you won't find anything interesting there.<br/>You'll see a little kid.<br/>Even if she acts rude, just let her be...");
break;
case "brook":
Msg("It's the stream right over there!<br/>Didn't you cross the bridge on your way here?<br/>Ha... Your memory is a bit...poor.");
Msg("Sometimes, if you stay here long enough,<br/>you see people peeing in it. Gross.");
break;
case "shop_headman":
Msg("If you're going to the Chief's House,<br/>go to the Square first.<br/>You'll find a hill with a drawing on it.");
Msg("Yeah, where the big tree is.<br/>There's a house over that hill.<br/>That's where our Chief lives.");
break;
case "temple":
Msg("The Church... Hm, the Church....<br/>That... Er... Hmm...");
Msg("Well, I don't know! Go into town and ask someone there!<br/>Or just look at your Minimap, geez!");
break;
case "school":
Msg("Where's the School?<br/>Wow, you are way lost.");
Msg("Okay, cross the stream first, alright?<br/>Then run along, with the stream on your left<br/>and you will see the farmland.<br/>Once you see it, you know you're almost there.");
Msg("It's really close to the farmland, so you'll see it right away.");
Msg("Hey, wait a minute. Why am I telling you all this?<br/>I'm a busy guy!");
break;
case "skill_campfire":
if (!HasSkill(SkillId.Campfire))
{
if (!HasKeyword("deian_01"))
{
GiveItem(1012); // Campfire Manual
GiveItem(63002, 5); // Firewood
GiveKeyword("deian_01");
Msg("(Missing dialog: Campfire explanation)");
}
else
{
Msg("(Missing dialog: Another Campfire explanation)");
}
}
else
{
RemoveKeyword("skill_campfire");
RemoveKeyword("deian_01");
Msg("Hey, you! What are you doing!<br/>Are you trying to use the Campfire skill here?<br/>Are you crazy!? You want to burn all my wool?<br/>Go away! Go away!<br/>You want to play with fire? Go do it far away from here!");
}
break;
case "shop_restaurant":
GiveKeyword("shop_grocery");
Msg("Restaurant? You must be talking about the Grocery Store.<br/>Speaking of food,<br/>my stomach is growling...");
Msg("It's been a while since I've had a decent meal.<br/>I always eat out here.<br/>A hard loaf of bread and plain water.<br/>Let's see, was there a restaurant in our town?");
break;
case "shop_armory":
GiveKeyword("shop_smith");
Msg("A Weapons Shop? What for?<br/>What are you going to do with a weapon?<br/>Think you'll put good use to it if you buy it now?<br/>I don't think so!");
break;
case "shop_cloth":
Msg("You...are interested in fashion?<br/>Haha.<br/>Puhaha.<br/>Haha...");
Msg("Haha...so...sorry, haha, it's just funny...<br/>Talking about fashion in a place like this?<br/>Did it ever cross your mind that this might be the wrong place for that?");
break;
case "shop_bookstore":
Msg("Oh, you like reading?<br/>I'm not sure that will really help you with your life.<br/>I'll bet most people in town would say the same thing.<br/>Why else aren't there any bookstores in town?");
Msg("I don't really understand what people get out of reading.<br/>Books are full of rubbish or fairy tales, you know.<br/>Why do you like reading books?");
break;
case "shop_goverment_office":
Msg("Haha! You're joking, right?<br/>Why would this small town ever need a town office?<br/>Don't worry...if you've lost something, it's usually kept at the Chief's House.");
break;
case "graveyard":
Msg("The graveyard? That place is creepy.");
Msg("You know it's on your Minimap...<br/>Asking all these foolish questions...<br/>What's your problem?");
break;
case "lute":
Msg("Oh... I want a red lute.<br/>Why don't you buy me one when you get rich, yea?");
break;
case "complicity":
|
[
"\t\t\t\tMsg(\"Welcome to the real world...\");"
] | 1,740
|
lcc
|
csharp
| null |
2c8143e18e8a59b4e401fca8446e57b31d54c7fc033009c7
|
|
#!/usr/bin/env python
#
# This file is part of aDBa.
#
# aDBa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# aDBa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with aDBa. If not, see <http://www.gnu.org/licenses/>.
from .aniDBmaper import AniDBMaper
class ResponseResolver:
def __init__(self, data):
restag, rescode, resstr, datalines = self.parse(data)
self.restag = restag
self.rescode = rescode
self.resstr = resstr
self.datalines = datalines
def parse(self, data):
resline = data.split('\n', 1)[0]
lines = data.split('\n')[1:-1]
rescode, resstr = resline.split(' ', 1)
if rescode[0] == 'T':
restag = rescode
rescode, resstr = resstr.split(' ', 1)
else:
restag = None
datalines = []
for line in lines:
datalines.append(line.split('|'))
return restag, rescode, resstr, datalines
def resolve(self, cmd):
return responses[self.rescode](cmd, self.restag, self.rescode, self.resstr, self.datalines)
class Response:
def __init__(self, cmd, restag, rescode, resstr, rawlines):
self.req = cmd
self.restag = restag
self.rescode = rescode
self.resstr = resstr
self.rawlines = rawlines
self.maper = AniDBMaper()
def __repr__(self):
tmp = "%s(%s,%s,%s) %s\n" % (
self.__class__.__name__, repr(self.restag), repr(self.rescode), repr(self.resstr),
repr(self.attrs))
m = 0
for line in self.datalines:
for k, v in line.items():
if len(k) > m:
m = len(k)
for line in self.datalines:
tmp += " Line:\n"
for k, v in line.items():
tmp += " %s:%s %s\n" % (k, (m - len(k)) * ' ', v)
return tmp
def parse(self):
tmp = self.resstr.split(' ', len(self.codehead))
self.attrs = dict(list(zip(self.codehead, tmp[:-1])))
self.resstr = tmp[-1]
self.datalines = []
for rawline in self.rawlines:
normal = dict(list(zip(self.codetail, rawline)))
rawline = rawline[len(self.codetail):]
rep = []
if len(self.coderep):
while rawline:
tmp = dict(list(zip(self.coderep, rawline)))
rawline = rawline[len(self.coderep):]
rep.append(tmp)
# normal['rep']=rep
self.datalines.append(normal)
def handle(self):
if self.req:
self.req.handle(self)
class LoginAcceptedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
sesskey - session key
address - your address (ip:port) as seen by the server
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'LOGIN_ACCEPTED'
self.codetail = ()
self.coderep = ()
nat = cmd.parameters['nat']
nat = int(nat == None and nat or '0')
if nat:
self.codehead = ('sesskey', 'address')
else:
self.codehead = ('sesskey',)
class LoginAcceptedNewVerResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
sesskey - session key
address - your address (ip:port) as seen by the server
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'LOGIN_ACCEPTED_NEW_VER'
self.codetail = ()
self.coderep = ()
nat = cmd.parameters['nat']
nat = int(nat == None and nat or '0')
if nat:
self.codehead = ('sesskey', 'address')
else:
self.codehead = ('sesskey',)
class LoggedOutResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'LOGGED_OUT'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class ResourceResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'RESOURCE'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class StatsResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'STATS'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class TopResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'TOP'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class UptimeResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
uptime - udpserver uptime in milliseconds
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'UPTIME'
self.codehead = ()
self.codetail = ('uptime',)
self.coderep = ()
class EncryptionEnabledResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
salt - salt
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'ENCRYPTION_ENABLED'
self.codehead = ('salt',)
self.codetail = ()
self.coderep = ()
class MylistEntryAddedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
entrycnt - number of entries added
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'MYLIST_ENTRY_ADDED'
self.codehead = ()
self.codetail = ('entrycnt',)
self.coderep = ()
class MylistEntryDeletedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
entrycnt - number of entries
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'MYLIST_ENTRY_DELETED'
self.codehead = ()
self.codetail = ('entrycnt',)
self.coderep = ()
class AddedFileResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'ADDED_FILE'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class AddedStreamResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'ADDED_STREAM'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class EncodingChangedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'ENCODING_CHANGED'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class FileResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
eid episode id
gid group id
lid mylist id
state state
size size
ed2k ed2k
md5 md5
sha1 sha1
crc32 crc32
dublang dub language
sublang sub language
quality quality
source source
audiocodec audio codec
audiobitrate audio bitrate
videocodec video codec
videobitrate video bitrate
resolution video resolution
filetype file type (extension)
length length in seconds
description description
filename anidb file name
gname group name
gshortname group short name
epno number of episode
epname ep english name
epromaji ep romaji name
epkanji ep kanji name
totaleps anime total episodes
lastep last episode nr (highest, not special)
year year
type type
romaji romaji name
kanji kanji name
name english name
othername other name
shortnames short name list
synonyms synonym list
categories category list
relatedaids related aid list
producernames producer name list
producerids producer id list
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'FILE'
self.codehead = ()
self.coderep = ()
fmask = cmd.parameters['fmask']
amask = cmd.parameters['amask']
codeListF = self.maper.getFileCodesF(fmask)
codeListA = self.maper.getFileCodesA(amask)
# print "File - codelistF: "+str(codeListF)
# print "File - codelistA: "+str(codeListA)
self.codetail = tuple(['fid'] + codeListF + codeListA)
class MylistResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
lid - mylist id
fid - file id
eid - episode id
aid - anime id
gid - group id
date - date when you added this to mylist
state - the location of the file
viewdate - date when you marked this watched
storage - for example the title of the cd you have this on
source - where you got the file (bittorrent,dc++,ed2k,...)
other - other data regarding this file
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'MYLIST'
self.codehead = ()
self.codetail = (
'lid', 'fid', 'eid', 'aid', 'gid', 'date', 'state', 'viewdate', 'storage', 'source',
'other')
self.coderep = ()
class MylistStatsResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
animes - animes
eps - eps
files - files
filesizes - size of files
animesadded - added animes
epsadded - added eps
filesadded - added files
groupsadded - added groups
leechperc - leech %
lameperc - lame %
viewedofdb - viewed % of db
mylistofdb - mylist % of db
viewedofmylist - viewed % of mylist
viewedeps - number of viewed eps
votes - votes
reviews - reviews
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'MYLIST_STATS'
self.codehead = ()
self.codetail = (
'animes', 'eps', 'files', 'filesizes', 'animesadded', 'epsadded', 'filesadded',
'groupsadded', 'leechperc', 'lameperc', 'viewedofdb', 'mylistofdb', 'viewedofmylist',
'viewedeps', 'votes', 'reviews')
self.coderep = ()
class AnimeResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'ANIME'
self.codehead = ()
self.coderep = ()
# TODO: impl random anime
amask = cmd.parameters['amask']
codeList = self.maper.getAnimeCodesA(amask)
self.codetail = tuple(codeList)
class AnimeBestMatchResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'ANIME_BEST_MATCH'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class RandomanimeResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'RANDOMANIME'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class EpisodeResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
eid - episode id
aid - anime id
length - length
rating - rating
votes - votes
epno - number of episode
name - english name of episode
romaji - romaji name of episode
kanji - kanji name of episode
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'EPISODE'
self.codehead = ()
self.codetail = (
'eid', 'aid', 'length', 'rating', 'votes', 'epno', 'name', 'romaji', 'kanji')
self.coderep = ()
class ProducerResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
pid - producer id
name - name of producer
shortname - short name
othername - other name
type - type
pic - picture name
url - home page url
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'PRODUCER'
self.codehead = ()
self.codetail = ('pid', 'name', 'shortname', 'othername', 'type', 'pic', 'url')
self.coderep = ()
class GroupResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
gid - group id
rating - rating
votes - votes
animes - anime count
files - file count
name - name
shortname - short
ircchannel - irc channel
ircserver - irc server
url - url
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'GROUP'
self.codehead = ()
self.codetail = (
'gid', 'rating', 'votes', 'animes', 'files', 'name', 'shortname', 'ircchannel', 'ircserver',
'url')
self.coderep = ()
class GroupstatusResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
gid - group id
rating - rating
votes - votes
animes - anime count
files - file count
name - name
shortname - short
ircchannel - irc channel
ircserver - irc server
url - url
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'GROUPSTATUS'
self.codehead = ()
self.codetail = (
'gid', 'name', 'state', ' last_episode_number', 'rating', 'votes', 'episode_range')
self.coderep = ()
class BuddyListResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
start - mylist entry number of first buddy on this packet
end - mylist entry number of last buddy on this packet
total - total number of buddies on mylist
data:
uid - uid
name - username
state - state
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'BUDDY_LIST'
self.codehead = ('start', 'end', 'total')
self.codetail = ('uid', 'username', 'state')
self.coderep = ()
class BuddyStateResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
start - mylist entry number of first buddy on this packet
end - mylist entry number of last buddy on this packet
total - total number of buddies on mylist
data:
uid - uid
state - online state
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'BUDDY_STATE'
self.codehead = ('start', 'end', 'total')
self.codetail = ('uid', 'state')
self.coderep = ()
class BuddyAddedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'BUDDY_ADDED'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class BuddyDeletedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'BUDDY_DELETED'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class BuddyAcceptedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'BUDDY_ACCEPTED'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class BuddyDeniedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'BUDDY_DENIED'
self.codehead = ()
self.codetail = ()
self.coderep = ()
class VotedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
name - aname/ename/gname
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'VOTED'
self.codehead = ()
self.codetail = ('name',)
self.coderep = ()
class VoteFoundResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
name - aname/ename/gname
value - vote value
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'VOTE_FOUND'
self.codehead = ()
self.codetail = ('name', 'value')
self.coderep = ()
class VoteUpdatedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
name - aname/ename/gname
value - vote value
"""
Response.__init__(self, cmd, restag, rescode, resstr, datalines)
self.codestr = 'VOTE_UPDATED'
self.codehead = ()
self.codetail = ('name', 'value')
self.coderep = ()
class VoteRevokedResponse(Response):
def __init__(self, cmd, restag, rescode, resstr, datalines):
"""
attributes:
data:
|
[
"\t\tname\t- aname/ename/gname"
] | 2,041
|
lcc
|
python
| null |
997526c4ebbb7938e65ea14362c83fde9d979401f5f5f445
|
|
# (c) 2016 Matt Clay <matt@mystile.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import time
import re
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins.callback import CallbackBase
try:
from junit_xml import TestSuite, TestCase
HAS_JUNIT_XML = True
except ImportError:
HAS_JUNIT_XML = False
try:
from collections import OrderedDict
HAS_ORDERED_DICT = True
except ImportError:
try:
from ordereddict import OrderedDict
HAS_ORDERED_DICT = True
except ImportError:
HAS_ORDERED_DICT = False
class CallbackModule(CallbackBase):
"""
This callback writes playbook output to a JUnit formatted XML file.
Tasks show up in the report as follows:
'ok': pass
'failed' with 'EXPECTED FAILURE' in the task name: pass
'failed' due to an exception: error
'failed' for other reasons: failure
'skipped': skipped
This plugin makes use of the following environment variables:
JUNIT_OUTPUT_DIR (optional): Directory to write XML files to.
Default: ~/.ansible.log
JUNIT_TASK_CLASS (optional): Configure the output to be one class per yaml file
Default: False
JUNIT_FAIL_ON_CHANGE (optional): Consider any tasks reporting "changed" as a junit test failure
Default: False
Requires:
junit_xml
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'junit'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
super(CallbackModule, self).__init__()
self._output_dir = os.getenv('JUNIT_OUTPUT_DIR', os.path.expanduser('~/.ansible.log'))
self._task_class = os.getenv('JUNIT_TASK_CLASS', 'False').lower()
self._fail_on_change = os.getenv('JUNIT_FAIL_ON_CHANGE', 'False').lower()
self._playbook_path = None
self._playbook_name = None
self._play_name = None
self._task_data = None
self.disabled = False
if not HAS_JUNIT_XML:
self.disabled = True
self._display.warning('The `junit_xml` python module is not installed. '
'Disabling the `junit` callback plugin.')
if HAS_ORDERED_DICT:
self._task_data = OrderedDict()
else:
self.disabled = True
self._display.warning('The `ordereddict` python module is not installed. '
'Disabling the `junit` callback plugin.')
if not os.path.exists(self._output_dir):
os.mkdir(self._output_dir)
def _start_task(self, task):
""" record the start of a task for one or more hosts """
uuid = task._uuid
if uuid in self._task_data:
return
play = self._play_name
name = task.get_name().strip()
path = task.get_path()
if not task.no_log:
args = ', '.join(('%s=%s' % a for a in task.args.items()))
if args:
name += ' ' + args
self._task_data[uuid] = TaskData(uuid, name, path, play)
def _finish_task(self, status, result):
""" record the results of a task for a single host """
task_uuid = result._task._uuid
if hasattr(result, '_host'):
host_uuid = result._host._uuid
host_name = result._host.name
else:
host_uuid = 'include'
host_name = 'include'
task_data = self._task_data[task_uuid]
if self._fail_on_change == 'true' and status == 'ok' and result._result.get('changed', False):
status = 'failed'
if status == 'failed' and 'EXPECTED FAILURE' in task_data.name:
status = 'ok'
task_data.add_host(HostData(host_uuid, host_name, status, result))
def _build_test_case(self, task_data, host_data):
""" build a TestCase from the given TaskData and HostData """
name = '[%s] %s: %s' % (host_data.name, task_data.play, task_data.name)
duration = host_data.finish - task_data.start
if self._task_class == 'true':
junit_classname = re.sub('\.yml:[0-9]+$', '', task_data.path)
else:
junit_classname = task_data.path
if host_data.status == 'included':
return TestCase(name, junit_classname, duration, host_data.result)
res = host_data.result._result
rc = res.get('rc', 0)
dump = self._dump_results(res, indent=0)
dump = self._cleanse_string(dump)
if host_data.status == 'ok':
return TestCase(name, junit_classname, duration, dump)
test_case = TestCase(name, junit_classname, duration)
if host_data.status == 'failed':
if 'exception' in res:
message = res['exception'].strip().split('\n')[-1]
output = res['exception']
test_case.add_error_info(message, output)
elif 'msg' in res:
message = res['msg']
test_case.add_failure_info(message, dump)
else:
test_case.add_failure_info('rc=%s' % rc, dump)
elif host_data.status == 'skipped':
if 'skip_reason' in res:
message = res['skip_reason']
else:
message = 'skipped'
test_case.add_skipped_info(message)
return test_case
def _cleanse_string(self, value):
""" convert surrogate escapes to the unicode replacement character to avoid XML encoding errors """
return to_text(to_bytes(value, errors='surrogateescape'), errors='replace')
def _generate_report(self):
""" generate a TestSuite report from the collected TaskData and HostData """
test_cases = []
for task_uuid, task_data in self._task_data.items():
for host_uuid, host_data in task_data.host_data.items():
test_cases.append(self._build_test_case(task_data, host_data))
test_suite = TestSuite(self._playbook_name, test_cases)
report = TestSuite.to_xml_string([test_suite])
output_file = os.path.join(self._output_dir, '%s-%s.xml' % (self._playbook_name, time.time()))
with open(output_file, 'wb') as xml:
xml.write(to_bytes(report, errors='surrogate_or_strict'))
def v2_playbook_on_start(self, playbook):
self._playbook_path = playbook._file_name
self._playbook_name = os.path.splitext(os.path.basename(self._playbook_path))[0]
def v2_playbook_on_play_start(self, play):
self._play_name = play.get_name()
def v2_runner_on_no_hosts(self, task):
self._start_task(task)
def v2_playbook_on_task_start(self, task, is_conditional):
self._start_task(task)
def v2_playbook_on_cleanup_task_start(self, task):
self._start_task(task)
def v2_playbook_on_handler_task_start(self, task):
self._start_task(task)
def v2_runner_on_failed(self, result, ignore_errors=False):
if ignore_errors:
self._finish_task('ok', result)
else:
self._finish_task('failed', result)
def v2_runner_on_ok(self, result):
self._finish_task('ok', result)
def v2_runner_on_skipped(self, result):
self._finish_task('skipped', result)
def v2_playbook_on_include(self, included_file):
self._finish_task('included', included_file)
def v2_playbook_on_stats(self, stats):
self._generate_report()
class TaskData:
"""
Data about an individual task.
"""
def __init__(self, uuid, name, path, play):
self.uuid = uuid
self.name = name
self.path = path
self.play = play
self.start = None
self.host_data = OrderedDict()
|
[
" self.start = time.time()"
] | 793
|
lcc
|
python
| null |
1be8fdfd322b9ece97d70593dac71eecdd59539ab7ef69d7
|
|
package com.germainz.crappalinks;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.widget.Toast;
import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.ConnectException;
import java.net.CookieHandler;
import java.net.CookieManager;
import java.net.UnknownHostException;
import java.net.HttpURLConnection;
import java.net.URL;
public class Resolver extends Activity {
private String toastType;
private boolean confirmOpen;
private String resolveAllWhen;
private boolean useUnshortenIt;
private static final String TOAST_NONE = "0";
private static final String TOAST_DETAILED = "2";
private static final String UNSHORTEN_IT_API_KEY = "UcWGkhtMFdM4019XeI8lgfNOk875RL7K";
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
SharedPreferences sharedPreferences = getSharedPreferences("com.germainz.crappalinks_preferences",
Context.MODE_WORLD_READABLE);
toastType = sharedPreferences.getString("pref_toast_type", TOAST_NONE);
confirmOpen = sharedPreferences.getBoolean("pref_confirm_open", false);
resolveAllWhen = sharedPreferences.getString("pref_resolve_all_when", "ALWAYS");
// Still called pref_use_long_url for backwards compatibility, as we used to use longurl.org instead.
useUnshortenIt = sharedPreferences.getBoolean("pref_use_long_url", false);
new ResolveUrl().execute(getIntent().getDataString());
/* Ideally, this would be a service, but we're redirecting intents via Xposed.
* We finish the activity immediately so that the user can still interact with the
* foreground app while we unshorten the URL in the background.
*/
finish();
}
private class ResolveUrl extends AsyncTask<String, Void, String> {
private Context context = null;
// unknown error while connecting
private boolean connectionError = false;
// connection missing/not working
private boolean noConnectionError = false;
private ResolveUrl() {
context = Resolver.this;
}
@Override
protected void onPreExecute() {
if (!toastType.equals(TOAST_NONE))
Toast.makeText(context, getString(R.string.toast_message_started),
Toast.LENGTH_SHORT).show();
}
private String getRedirect(String url) {
HttpURLConnection c = null;
try {
c = (HttpURLConnection) new URL(url).openConnection();
c.setConnectTimeout(10000);
c.setReadTimeout(15000);
c.connect();
final int responseCode = c.getResponseCode();
// If the response code is 3xx, it's a redirection. Return the real location.
if (responseCode >= 300 && responseCode < 400) {
String location = c.getHeaderField("Location");
return RedirectHelper.getAbsoluteUrl(location, url);
}
// It might also be a redirection using meta tags.
else if (responseCode >= 200 && responseCode < 300 ) {
Document d = Jsoup.parse(c.getInputStream(), "UTF-8", url);
Elements refresh = d.select("*:not(noscript) > meta[http-equiv=Refresh]");
if (!refresh.isEmpty()) {
Element refreshElement = refresh.first();
if (refreshElement.hasAttr("url"))
return RedirectHelper.getAbsoluteUrl(refreshElement.attr("url"), url);
else if (refreshElement.hasAttr("content") && refreshElement.attr("content").contains("url="))
return RedirectHelper.getAbsoluteUrl(refreshElement.attr("content").split("url=")[1].replaceAll("^'|'$", ""), url);
}
}
} catch (ConnectException | UnknownHostException e) {
noConnectionError = true;
e.printStackTrace();
} catch (Exception e) {
connectionError = true;
e.printStackTrace();
} finally {
if (c != null)
c.disconnect();
}
return null;
}
private String getRedirectUsingLongURL(String url) {
HttpURLConnection c = null;
try {
// http://unshorten.it/api/documentation
Uri.Builder builder = new Uri.Builder();
builder.scheme("http").authority("api.unshorten.it").appendQueryParameter("shortURL", url)
.appendQueryParameter("responseFormat", "json").appendQueryParameter("apiKey", UNSHORTEN_IT_API_KEY);
String requestUrl = builder.build().toString();
c = (HttpURLConnection) new URL(requestUrl).openConnection();
c.setRequestProperty("User-Agent", "CrappaLinks");
c.setConnectTimeout(10000);
c.setReadTimeout(15000);
c.connect();
final int responseCode = c.getResponseCode();
if (responseCode == 200) {
// Response format: {"fullurl": "URL"}
JSONObject jsonObject = new JSONObject(new BufferedReader(
new InputStreamReader(c.getInputStream())).readLine());
if (jsonObject.has("error")) {
connectionError = true;
Log.e("CrappaLinks", requestUrl);
Log.e("CrappaLinks", jsonObject.toString());
return url;
} else {
return jsonObject.getString("fullurl");
}
}
} catch (ConnectException | UnknownHostException e) {
noConnectionError = true;
} catch (Exception e) {
connectionError = true;
e.printStackTrace();
} finally {
if (c != null)
c.disconnect();
}
return url;
}
protected String doInBackground(String... urls) {
String redirectUrl = urls[0];
// if there's no connection, fail and return the original URL.
ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(
Context.CONNECTIVITY_SERVICE);
if (connectivityManager.getActiveNetworkInfo() == null) {
noConnectionError = true;
return redirectUrl;
}
if (useUnshortenIt) {
return getRedirectUsingLongURL(redirectUrl);
} else {
HttpURLConnection.setFollowRedirects(false);
// Use the cookie manager so that cookies are stored. Useful for some hosts that keep
// redirecting us indefinitely unless the set cookie is detected.
CookieManager cookieManager = new CookieManager();
CookieHandler.setDefault(cookieManager);
// Should we resolve all URLs?
boolean resolveAll = true;
NetworkInfo wifiInfo = connectivityManager.getNetworkInfo(ConnectivityManager.TYPE_WIFI);
if (resolveAllWhen.equals("NEVER") || (resolveAllWhen.equals("WIFI_ONLY") && !wifiInfo.isConnected()))
resolveAll = false;
// Keep trying to resolve the URL until we get a URL that isn't a redirect.
String finalUrl = redirectUrl;
while (redirectUrl != null && ((resolveAll) || (RedirectHelper.isRedirect(Uri.parse(redirectUrl).getHost())))) {
redirectUrl = getRedirect(redirectUrl);
if (redirectUrl != null) {
// This should avoid infinite loops, just in case.
if (redirectUrl.equals(finalUrl))
return finalUrl;
finalUrl = redirectUrl;
}
}
return finalUrl;
}
}
protected void onPostExecute(final String uri) {
if (noConnectionError)
Toast.makeText(context, getString(R.string.toast_message_network) + uri, Toast.LENGTH_LONG).show();
else if (connectionError)
Toast.makeText(context, getString(R.string.toast_message_error) + uri, Toast.LENGTH_LONG).show();
if (confirmOpen) {
Intent confirmDialogIntent = new Intent(context, ConfirmDialog.class);
confirmDialogIntent.putExtra("uri", uri);
startActivity(confirmDialogIntent);
} else {
if (!noConnectionError && !connectionError && toastType.equals(TOAST_DETAILED))
Toast.makeText(context, getString(R.string.toast_message_done) + uri, Toast.LENGTH_LONG).show();
|
[
" Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(uri));"
] | 690
|
lcc
|
java
| null |
6a8feeca0aea2095a4c5fc0c58f18f3456556c6af9b7374a
|
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
import behave
import re
import os
import tempfile
import glob
from lib.file import decompress_file_by_extension_to_dir
from common.lib.behave_ext import check_context_table
from common.lib.diff import print_lines_diff
from common.lib.file import get_compression_suffix
from lib.sqlite_repodata import load_sqlite
from lib.xml_repodata import xml_parse_repodata
from lib.repodata import regex_find_file_from_list
from lib.repodata import verify_repomd_item_with_file
from lib.repodata import build_nevra
from lib.file import get_checksum_regex
from lib.file import decompression_iter
from lib.file import checksum_of_file
from string import Template
# namespaces
ns = {"pri_ns": "http://linux.duke.edu/metadata/common",
"fil_ns": "http://linux.duke.edu/metadata/filelists",
"oth_ns": "http://linux.duke.edu/metadata/other",
"md_ns": "http://linux.duke.edu/metadata/repo"}
def keys_do_not_differ(prim, flist, oth):
if prim.keys() != flist.keys():
print_lines_diff(prim.keys(), flist.keys())
raise AssertionError("Primary and Filelists have different package sets.")
if prim.keys() != oth.keys():
print_lines_diff(prim.keys(), oth.keys())
raise AssertionError("Primary and Other have different package sets.")
def repodata_do_not_differ(prim1, prim2, flist1, flist2, oth1, oth2):
# Compare packages by checksums
if prim1.keys() != prim2.keys():
print_lines_diff(prim1.keys(), prim2.keys())
raise AssertionError("Primary repodata have different package sets.")
# Compare packages by name
if prim1.packages() != prim2.packages():
print_lines_diff(prim1.packages(), prim2.packages())
raise AssertionError("Primary repodata have different sets of package names.")
diff = prim1.diff(prim2)
if diff:
raise AssertionError("Primary repodata are different.\n"
"Difference: %s" % (diff))
diff = flist1.diff(flist2)
if diff:
raise AssertionError("Filelists repodata are different.\n"
"Difference: %s" % (diff))
diff = oth1.diff(oth2)
if diff:
raise AssertionError("Other repodata are different.\n"
"Difference: %s" % (diff))
@behave.step("repodata \"{path}\" are consistent")
def repodata_are_consistent(context, path):
repopath = os.path.join(context.tempdir_manager.tempdir, path.lstrip('/'))
tmpdir = tempfile.mkdtemp()
prim_path_sqlite = None
prim_zck_path = None
# REPOMD
md_path = os.path.join(repopath, "repomd.xml")
if not os.path.exists(md_path):
raise AssertionError("Error: repomd.xml is missing (%s)" % md_path)
repomd = xml_parse_repodata(md_path, "{%s}data" % ns["md_ns"], "repomd")
for key in repomd.keys():
item = repomd.items[key]
if not item.location_href:
continue
# Remove /repodata/ from path
basename = os.path.basename(item.location_href)
p = os.path.join(repopath, basename.lstrip('/'))
if not os.path.isfile(p):
raise AssertionError("Error: repomd.xml contains: \"%s\""
"but it is not present in %s" % (p, repopath))
decompressed_p = decompress_file_by_extension_to_dir(p, tmpdir)
if item.name == "primary_db":
prim_path_sqlite = decompressed_p
elif item.name == "filelists_db":
filelists_path_sqlite = decompressed_p
elif item.name == "other_db":
other_path_sqlite = decompressed_p
elif item.name == "primary":
prim_path = decompressed_p
elif item.name == "filelists":
filelists_path = decompressed_p
elif item.name == "other":
other_path = decompressed_p
elif item.name == "primary_zck":
prim_zck_path = decompressed_p
elif item.name == "filelists_zck":
filelists_zck_path = decompressed_p
elif item.name == "other_zck":
other_zck_path = decompressed_p
else:
# Skip unsupported updateinfo, comps, etc..
# TODO(amatej): we could technically check for updateinfo,
# comps, modules and even verify some stuff
continue
verify_repomd_item_with_file(item, p, decompressed_p)
# XML
primary = xml_parse_repodata(prim_path, "{%s}package" % ns["pri_ns"], "primary")
filelists = xml_parse_repodata(filelists_path, "{%s}package" % ns["fil_ns"], "filelists")
other = xml_parse_repodata(other_path, "{%s}package" % ns["oth_ns"], "other")
keys_do_not_differ(primary, filelists, other)
# SQLITE
if prim_path_sqlite: # All three sqlite files have to be present at the same time
primary_sql = load_sqlite(prim_path_sqlite, "primary")
filelists_sql = load_sqlite(filelists_path_sqlite, "filelists")
other_sql = load_sqlite(other_path_sqlite, "other")
keys_do_not_differ(primary_sql, filelists_sql, other_sql)
repodata_do_not_differ(primary, primary_sql, filelists, filelists_sql, other, other_sql)
# ZCK
if prim_zck_path: # All three zck files have to be present at the same time
primary_zck = xml_parse_repodata(prim_zck_path, "{%s}package" % ns["pri_ns"], "primary")
filelists_zck = xml_parse_repodata(filelists_zck_path, "{%s}package" % ns["fil_ns"], "filelists")
other_zck = xml_parse_repodata(other_zck_path, "{%s}package" % ns["oth_ns"], "other")
keys_do_not_differ(primary_zck, filelists_zck, other_zck)
repodata_do_not_differ(primary, primary_zck, filelists, filelists_zck, other, other_zck)
return
@behave.step("repodata in \"{path}\" is")
def repodata_in_path_is(context, path):
check_context_table(context, ["Type", "File", "Checksum Type", "Compression Type"])
# repomd.xml is mandatory in this form
repomd_filepath = os.path.join(context.tempdir_manager.tempdir, path.lstrip("/"), "repomd.xml")
if not os.path.exists(repomd_filepath):
raise AssertionError("Error: repomd.xml is missing (%s)" % repomd_filepath)
files = os.listdir(os.path.dirname(repomd_filepath))
files.remove("repomd.xml")
for repodata_type, repodata_file, checksum_type, compression_type in context.table:
checksum_regex = get_checksum_regex(checksum_type)
filename_parts = repodata_file.split("-")
if (len(filename_parts) == 1):
pass # Simple-md-filenames
elif (filename_parts[0] == "${checksum}"):
filename_parts[0] = Template(filename_parts[0]).substitute(checksum=checksum_regex)
else:
if checksum_regex:
if not (re.compile(checksum_regex + "$")).match(filename_parts[0]):
raise ValueError("Checksum type: " + checksum_type + " does not"
" match to File: " + repodata_file)
filepath = os.path.join(context.tempdir_manager.tempdir, path.lstrip("/"), '-'.join(filename_parts))
# Final path to file, even when specified as regex
# At the same time verifies that file exists
filepath = regex_find_file_from_list(filepath, files)
files.remove(os.path.basename(filepath))
# Verify checksum
checksum = checksum_of_file(filepath, checksum_type)
if (checksum_regex):
filename_parts_final = os.path.basename(filepath).split("-")
if (len(filename_parts_final) == 1):
pass # Simple-md-filenames
elif not checksum == filename_parts_final[0]:
raise ValueError("Checksum of File: " + repodata_file + " doesn't match checksum"
" in the name of the File: " + os.path.basename(filepath))
# Verify compression
compression_suffix = get_compression_suffix(compression_type)
if compression_suffix:
if not filepath.endswith(compression_suffix):
raise ValueError("Compression type: " + compression_type + " does"
" not match suffix of File: " + repodata_file)
try:
tmp = next(decompression_iter(filepath, compression_type, blocksize=100))
if compression_suffix and filepath.endswith(compression_suffix):
filepath = filepath[:-(len(compression_suffix))]
if tmp:
if filepath.endswith(".sqlite"):
assert("SQLite" in str(tmp))
elif filepath.endswith(".xml"):
assert("xml" in str(tmp))
elif filepath.endswith(".yaml"):
# Assume all yaml files are modulemd documents
assert("modulemd" in str(tmp))
elif filepath.endswith(".txt"):
pass
else:
raise
except (AssertionError, IOError):
raise AssertionError("Cannot decompress File: " + repodata_file + " using"
" compression type: " + compression_type)
if len(files) > 0:
raise AssertionError("repodata directory contains additional metadata files:\n{0}".format('\n'.join(files)))
@behave.step("primary in \"{path}\" has only packages")
def primary_in_path_contains_only_packages(context, path):
check_context_table(context, ["Name", "Epoch", "Version", "Release", "Architecture"])
filepath = os.path.join(context.tempdir_manager.tempdir, path.lstrip('/'), "*-primary.xml.*")
primary_filepath = glob.glob(filepath)[0]
primary = xml_parse_repodata(primary_filepath, "{%s}package" % ns["pri_ns"], "primary")
for name, epoch, version, release, architecture in context.table:
nevra = build_nevra(name, epoch, version, release, architecture)
found = False
for key in primary.keys():
pkg = primary.items[key]
if (nevra == pkg.nevra()):
del primary.items[key]
found = True
break
if not found:
print("primary.xml yet unmatched packages:")
for key in primary.keys():
pkg = primary.items[key]
print("\t" + build_nevra(pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch))
raise AssertionError("Package " + nevra + " not found")
if (len(primary.keys()) > 0):
print("primary.xml contains additional packages:")
for key in primary.keys():
pkg = primary.items[key]
print("\t" + build_nevra(pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch))
raise AssertionError("Additional packages in primary.xml")
@behave.step("primary in \"{path}\" doesn't have any packages")
def primary_in_path_doesnt_contain_any_packages(context, path):
filepath = os.path.join(context.tempdir_manager.tempdir, path.lstrip('/'), "*-primary.xml.*")
primary_filepath = glob.glob(filepath)[0]
primary = xml_parse_repodata(primary_filepath, "{%s}package" % ns["pri_ns"], "primary")
|
[
" if (len(primary.keys()) > 0):"
] | 918
|
lcc
|
python
| null |
fd3a1a817ea75abb6d8f7040fcfb3ee43dbfe07e4e6d9408
|
|
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: install_lib.py 4802 2007-01-23 21:26:03Z vapier $"
import sys, os, string
from types import IntType
from distutils.core import Command
from distutils.errors import DistutilsOptionError
# Extension for Python source files.
PYTHON_SOURCE_EXTENSION = os.extsep + "py"
class install_lib (Command):
description = "install all Python modules (extensions and pure Python)"
# The byte-compilation options are a tad confusing. Here are the
# possible scenarios:
# 1) no compilation at all (--no-compile --no-optimize)
# 2) compile .pyc only (--compile --no-optimize; default)
# 3) compile .pyc and "level 1" .pyo (--compile --optimize)
# 4) compile "level 1" .pyo only (--no-compile --optimize)
# 5) compile .pyc and "level 2" .pyo (--compile --optimize-more)
# 6) compile "level 2" .pyo only (--no-compile --optimize-more)
#
# The UI for this is two option, 'compile' and 'optimize'.
# 'compile' is strictly boolean, and only decides whether to
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
# decides both whether to generate .pyo files and what level of
# optimization to use.
user_options = [
('install-dir=', 'd', "directory to install to"),
('build-dir=','b', "build directory (where to install from)"),
('force', 'f', "force installation (overwrite existing files)"),
('compile', 'c', "compile .py to .pyc [default]"),
('no-compile', None, "don't compile .py files"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('skip-build', None, "skip the build steps"),
]
boolean_options = ['force', 'compile', 'skip-build']
negative_opt = {'no-compile' : 'compile'}
def initialize_options (self):
# let the 'install' command dictate our installation directory
self.install_dir = None
self.build_dir = None
self.force = 0
self.compile = None
self.optimize = None
self.skip_build = None
def finalize_options (self):
# Get all the information we need to install pure Python modules
# from the umbrella 'install' command -- build (source) directory,
# install (target) directory, and whether to compile .py files.
self.set_undefined_options('install',
('build_lib', 'build_dir'),
('install_lib', 'install_dir'),
('force', 'force'),
('compile', 'compile'),
('optimize', 'optimize'),
('skip_build', 'skip_build'),
)
if self.compile is None:
self.compile = 1
if self.optimize is None:
self.optimize = 0
if type(self.optimize) is not IntType:
try:
self.optimize = int(self.optimize)
assert 0 <= self.optimize <= 2
except (ValueError, AssertionError):
raise DistutilsOptionError, "optimize must be 0, 1, or 2"
def run (self):
# Make sure we have built everything we need first
self.build()
# Install everything: simply dump the entire contents of the build
# directory to the installation directory (that's the beauty of
# having a build directory!)
outfiles = self.install()
# (Optionally) compile .py to .pyc
if outfiles is not None and self.distribution.has_pure_modules():
self.byte_compile(outfiles)
# run ()
# -- Top-level worker functions ------------------------------------
# (called from 'run()')
def build (self):
if not self.skip_build:
if self.distribution.has_pure_modules():
self.run_command('build_py')
if self.distribution.has_ext_modules():
self.run_command('build_ext')
def install (self):
if os.path.isdir(self.build_dir):
outfiles = self.copy_tree(self.build_dir, self.install_dir)
else:
self.warn("'%s' does not exist -- no Python modules to install" %
self.build_dir)
return
return outfiles
def byte_compile (self, files):
from distutils.util import byte_compile
# Get the "--root" directory supplied to the "install" command,
# and use it as a prefix to strip off the purported filename
# encoded in bytecode files. This is far from complete, but it
# should at least generate usable bytecode in RPM distributions.
install_root = self.get_finalized_command('install').root
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=install_root,
dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=install_root,
verbose=self.verbose, dry_run=self.dry_run)
# -- Utility methods -----------------------------------------------
def _mutate_outputs (self, has_any, build_cmd, cmd_option, output_dir):
if not has_any:
return []
build_cmd = self.get_finalized_command(build_cmd)
build_files = build_cmd.get_outputs()
build_dir = getattr(build_cmd, cmd_option)
prefix_len = len(build_dir) + len(os.sep)
outputs = []
for file in build_files:
outputs.append(os.path.join(output_dir, file[prefix_len:]))
return outputs
# _mutate_outputs ()
def _bytecode_filenames (self, py_filenames):
bytecode_files = []
for py_file in py_filenames:
# Since build_py handles package data installation, the
# list of outputs can contain more than just .py files.
# Make sure we only report bytecode for the .py files.
ext = os.path.splitext(os.path.normcase(py_file))[1]
if ext != PYTHON_SOURCE_EXTENSION:
continue
if self.compile:
bytecode_files.append(py_file + "c")
if self.optimize > 0:
bytecode_files.append(py_file + "o")
return bytecode_files
# -- External interface --------------------------------------------
# (called by outsiders)
def get_outputs (self):
"""Return the list of files that would be installed if this command
were actually run. Not affected by the "dry-run" flag or whether
modules have actually been built yet.
"""
pure_outputs = \
self._mutate_outputs(self.distribution.has_pure_modules(),
'build_py', 'build_lib',
self.install_dir)
if self.compile:
bytecode_outputs = self._bytecode_filenames(pure_outputs)
else:
bytecode_outputs = []
ext_outputs = \
self._mutate_outputs(self.distribution.has_ext_modules(),
'build_ext', 'build_lib',
self.install_dir)
return pure_outputs + bytecode_outputs + ext_outputs
# get_outputs ()
def get_inputs (self):
"""Get the list of files that are input to this command, ie. the
files that get installed as they are named in the build tree.
The files in this list correspond one-to-one to the output
filenames returned by 'get_outputs()'.
"""
inputs = []
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
inputs.extend(build_py.get_outputs())
if self.distribution.has_ext_modules():
|
[
" build_ext = self.get_finalized_command('build_ext')"
] | 791
|
lcc
|
python
| null |
b6029cffa6f017e686e2303165fc1ebc437fad4e837e5e05
|
|
using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using System.Data.Common;
using System.Text;
using NHibernate.AdoNet;
using NHibernate.Cache;
using NHibernate.Cache.Entry;
using NHibernate.Dialect.Lock;
using NHibernate.Engine;
using NHibernate.Exceptions;
using NHibernate.Id;
using NHibernate.Id.Insert;
using NHibernate.Impl;
using NHibernate.Intercept;
using NHibernate.Loader.Entity;
using NHibernate.Mapping;
using NHibernate.Metadata;
using NHibernate.Properties;
using NHibernate.SqlCommand;
using NHibernate.Tuple;
using NHibernate.Tuple.Entity;
using NHibernate.Type;
using NHibernate.Util;
using Array=System.Array;
using Property=NHibernate.Mapping.Property;
using NHibernate.SqlTypes;
using System.Linq;
namespace NHibernate.Persister.Entity
{
/// <summary>
/// Superclass for built-in mapping strategies. Implements functionalty common to both mapping
/// strategies
/// </summary>
/// <remarks>
/// May be considered an immutable view of the mapping object
/// </remarks>
public abstract class AbstractEntityPersister : IOuterJoinLoadable, IQueryable, IClassMetadata, IUniqueKeyLoadable, ISqlLoadable, ILazyPropertyInitializer, IPostInsertIdentityPersister, ILockable
{
#region InclusionChecker
protected internal interface IInclusionChecker
{
bool IncludeProperty(int propertyNumber);
}
private class NoneInclusionChecker : IInclusionChecker
{
private readonly ValueInclusion[] inclusions;
public NoneInclusionChecker(ValueInclusion[] inclusions)
{
this.inclusions = inclusions;
}
// TODO : currently we really do not handle ValueInclusion.PARTIAL...
// ValueInclusion.PARTIAL would indicate parts of a component need to
// be included in the select; currently we then just render the entire
// component into the select clause in that case.
public bool IncludeProperty(int propertyNumber)
{
return inclusions[propertyNumber] != ValueInclusion.None;
}
}
private class FullInclusionChecker : IInclusionChecker
{
private readonly bool[] includeProperty;
public FullInclusionChecker(bool[] includeProperty)
{
this.includeProperty = includeProperty;
}
public bool IncludeProperty(int propertyNumber)
{
return includeProperty[propertyNumber];
}
}
#endregion
private class GeneratedIdentifierBinder : IBinder
{
private readonly object[] fields;
private readonly bool[] notNull;
private readonly ISessionImplementor session;
private readonly object entity;
private readonly AbstractEntityPersister entityPersister;
public GeneratedIdentifierBinder(object[] fields, bool[] notNull, ISessionImplementor session, object entity, AbstractEntityPersister entityPersister)
{
this.fields = fields;
this.notNull = notNull;
this.session = session;
this.entity = entity;
this.entityPersister = entityPersister;
}
public object Entity
{
get { return entity; }
}
public virtual void BindValues(DbCommand ps)
{
entityPersister.Dehydrate(null, fields, notNull, entityPersister.propertyColumnInsertable, 0, ps, session);
}
}
private static readonly IInternalLogger log = LoggerProvider.LoggerFor(typeof(AbstractEntityPersister));
public const string EntityClass = "class";
protected const string Discriminator_Alias = "clazz_";
private readonly ISessionFactoryImplementor factory;
private readonly ICacheConcurrencyStrategy cache;
private readonly bool isLazyPropertiesCacheable;
private readonly ICacheEntryStructure cacheEntryStructure;
private readonly EntityMetamodel entityMetamodel;
private readonly Dictionary<System.Type, string> entityNameBySubclass = new Dictionary<System.Type, string>();
private readonly string[] rootTableKeyColumnNames;
private readonly string[] identifierAliases;
private readonly int identifierColumnSpan;
private readonly string versionColumnName;
private readonly bool hasFormulaProperties;
private readonly int batchSize;
private readonly bool hasSubselectLoadableCollections;
protected internal string rowIdName;
private readonly ISet<string> lazyProperties;
private readonly string sqlWhereString;
private readonly string sqlWhereStringTemplate;
#region Information about properties of this class
//including inherited properties
//(only really needed for updatable/insertable properties)
private readonly int[] propertyColumnSpans;
// the names of the columns for the property
// the array is indexed as propertyColumnNames[propertyIndex][columnIndex] = "columnName"
private readonly string[] propertySubclassNames;
private readonly string[][] propertyColumnAliases;
private readonly string[][] propertyColumnNames;
// the alias names for the columns of the property. This is used in the AS portion for
// selecting a column. It is indexed the same as propertyColumnNames
// private readonly string[ ] propertyFormulaTemplates;
private readonly string[][] propertyColumnFormulaTemplates;
private readonly bool[][] propertyColumnUpdateable;
private readonly bool[][] propertyColumnInsertable;
private readonly bool[] propertyUniqueness;
private readonly bool[] propertySelectable;
#endregion
#region Information about lazy properties of this class
private readonly string[] lazyPropertyNames;
private readonly int[] lazyPropertyNumbers;
private readonly IType[] lazyPropertyTypes;
private readonly string[][] lazyPropertyColumnAliases;
#endregion
#region Information about all properties in class hierarchy
private readonly string[] subclassPropertyNameClosure;
private readonly string[] subclassPropertySubclassNameClosure;
private readonly IType[] subclassPropertyTypeClosure;
private readonly string[][] subclassPropertyFormulaTemplateClosure;
private readonly string[][] subclassPropertyColumnNameClosure;
private readonly FetchMode[] subclassPropertyFetchModeClosure;
private readonly bool[] subclassPropertyNullabilityClosure;
protected bool[] propertyDefinedOnSubclass;
private readonly int[][] subclassPropertyColumnNumberClosure;
private readonly int[][] subclassPropertyFormulaNumberClosure;
private readonly CascadeStyle[] subclassPropertyCascadeStyleClosure;
#endregion
#region Information about all columns/formulas in class hierarchy
private readonly string[] subclassColumnClosure;
private readonly bool[] subclassColumnLazyClosure;
private readonly string[] subclassColumnAliasClosure;
private readonly bool[] subclassColumnSelectableClosure;
private readonly string[] subclassFormulaClosure;
private readonly string[] subclassFormulaTemplateClosure;
private readonly string[] subclassFormulaAliasClosure;
private readonly bool[] subclassFormulaLazyClosure;
#endregion
#region Dynamic filters attached to the class-level
private readonly FilterHelper filterHelper;
#endregion
private readonly Dictionary<string, EntityLoader> uniqueKeyLoaders = new Dictionary<string, EntityLoader>();
private readonly Dictionary<LockMode, ILockingStrategy> lockers = new Dictionary<LockMode, ILockingStrategy>();
private readonly Dictionary<string, IUniqueEntityLoader> loaders = new Dictionary<string, IUniqueEntityLoader>();
#region SQL strings
private SqlString sqlVersionSelectString;
private SqlString sqlSnapshotSelectString;
private SqlString sqlLazySelectString;
private SqlCommandInfo sqlIdentityInsertString;
private SqlCommandInfo sqlUpdateByRowIdString;
private SqlCommandInfo sqlLazyUpdateByRowIdString;
private SqlCommandInfo[] sqlDeleteStrings;
private SqlCommandInfo[] sqlInsertStrings;
private SqlCommandInfo[] sqlUpdateStrings;
private SqlCommandInfo[] sqlLazyUpdateStrings;
private SqlString sqlInsertGeneratedValuesSelectString;
private SqlString sqlUpdateGeneratedValuesSelectString;
private string identitySelectString;
#endregion
#region Custom SQL
protected internal bool[] insertCallable;
protected internal bool[] updateCallable;
protected internal bool[] deleteCallable;
protected internal SqlString[] customSQLInsert;
protected internal SqlString[] customSQLUpdate;
protected internal SqlString[] customSQLDelete;
protected internal ExecuteUpdateResultCheckStyle[] insertResultCheckStyles;
protected internal ExecuteUpdateResultCheckStyle[] updateResultCheckStyles;
protected internal ExecuteUpdateResultCheckStyle[] deleteResultCheckStyles;
#endregion
private IInsertGeneratedIdentifierDelegate identityDelegate;
private bool[] tableHasColumns;
private readonly string loaderName;
private IUniqueEntityLoader queryLoader;
private readonly string temporaryIdTableName;
private readonly string temporaryIdTableDDL;
private readonly Dictionary<string, string[]> subclassPropertyAliases = new Dictionary<string, string[]>();
private readonly Dictionary<string, string[]> subclassPropertyColumnNames = new Dictionary<string, string[]>();
protected readonly BasicEntityPropertyMapping propertyMapping;
protected AbstractEntityPersister(PersistentClass persistentClass, ICacheConcurrencyStrategy cache,
ISessionFactoryImplementor factory)
{
this.factory = factory;
this.cache = cache;
isLazyPropertiesCacheable = persistentClass.IsLazyPropertiesCacheable;
cacheEntryStructure = factory.Settings.IsStructuredCacheEntriesEnabled
? (ICacheEntryStructure)new StructuredCacheEntry(this)
: (ICacheEntryStructure)new UnstructuredCacheEntry();
entityMetamodel = new EntityMetamodel(persistentClass, factory);
if (persistentClass.HasPocoRepresentation)
{
//TODO: this is currently specific to pojos, but need to be available for all entity-modes
foreach (Subclass subclass in persistentClass.SubclassIterator)
{
entityNameBySubclass[subclass.MappedClass] = subclass.EntityName;
}
}
batchSize = persistentClass.BatchSize ?? factory.Settings.DefaultBatchFetchSize;
hasSubselectLoadableCollections = persistentClass.HasSubselectLoadableCollections;
propertyMapping = new BasicEntityPropertyMapping(this);
#region IDENTIFIER
identifierColumnSpan = persistentClass.Identifier.ColumnSpan;
rootTableKeyColumnNames = new string[identifierColumnSpan];
identifierAliases = new string[identifierColumnSpan];
rowIdName = persistentClass.RootTable.RowId;
loaderName = persistentClass.LoaderName;
// TODO NH: Not safe cast to Column
int i = 0;
foreach (Column col in persistentClass.Identifier.ColumnIterator)
{
rootTableKeyColumnNames[i] = col.GetQuotedName(factory.Dialect);
identifierAliases[i] = col.GetAlias(factory.Dialect, persistentClass.RootTable);
i++;
}
#endregion
#region VERSION
if (persistentClass.IsVersioned)
{
foreach (Column col in persistentClass.Version.ColumnIterator)
{
versionColumnName = col.GetQuotedName(factory.Dialect);
break; //only happens once
}
}
else
{
versionColumnName = null;
}
#endregion
#region WHERE STRING
sqlWhereString = !string.IsNullOrEmpty(persistentClass.Where) ? "( " + persistentClass.Where + ") " : null;
sqlWhereStringTemplate = sqlWhereString == null
? null
: Template.RenderWhereStringTemplate(sqlWhereString, factory.Dialect,
factory.SQLFunctionRegistry);
#endregion
#region PROPERTIES
// NH: see consistence with the implementation on EntityMetamodel where we are disabling lazy-properties for no lazy entities
bool lazyAvailable = IsInstrumented && entityMetamodel.IsLazy;
int hydrateSpan = entityMetamodel.PropertySpan;
propertyColumnSpans = new int[hydrateSpan];
propertySubclassNames = new string[hydrateSpan];
propertyColumnAliases = new string[hydrateSpan][];
propertyColumnNames = new string[hydrateSpan][];
propertyColumnFormulaTemplates = new string[hydrateSpan][];
propertyUniqueness = new bool[hydrateSpan];
propertySelectable = new bool[hydrateSpan];
propertyColumnUpdateable = new bool[hydrateSpan][];
propertyColumnInsertable = new bool[hydrateSpan][];
var thisClassProperties = new HashSet<Property>();
lazyProperties = new HashSet<string>();
List<string> lazyNames = new List<string>();
List<int> lazyNumbers = new List<int>();
List<IType> lazyTypes = new List<IType>();
List<string[]> lazyColAliases = new List<string[]>();
i = 0;
bool foundFormula = false;
foreach (Property prop in persistentClass.PropertyClosureIterator)
{
thisClassProperties.Add(prop);
int span = prop.ColumnSpan;
propertyColumnSpans[i] = span;
propertySubclassNames[i] = prop.PersistentClass.EntityName;
string[] colNames = new string[span];
string[] colAliases = new string[span];
string[] templates = new string[span];
int k = 0;
foreach (ISelectable thing in prop.ColumnIterator)
{
colAliases[k] = thing.GetAlias(factory.Dialect, prop.Value.Table);
if (thing.IsFormula)
{
foundFormula = true;
templates[k] = thing.GetTemplate(factory.Dialect, factory.SQLFunctionRegistry);
}
else
{
colNames[k] = thing.GetTemplate(factory.Dialect, factory.SQLFunctionRegistry);
}
k++;
}
propertyColumnNames[i] = colNames;
propertyColumnFormulaTemplates[i] = templates;
propertyColumnAliases[i] = colAliases;
if (lazyAvailable && prop.IsLazy)
{
lazyProperties.Add(prop.Name);
lazyNames.Add(prop.Name);
lazyNumbers.Add(i);
lazyTypes.Add(prop.Value.Type);
lazyColAliases.Add(colAliases);
}
propertyColumnUpdateable[i] = prop.Value.ColumnUpdateability;
propertyColumnInsertable[i] = prop.Value.ColumnInsertability;
propertySelectable[i] = prop.IsSelectable;
propertyUniqueness[i] = prop.Value.IsAlternateUniqueKey;
i++;
}
hasFormulaProperties = foundFormula;
lazyPropertyColumnAliases = lazyColAliases.ToArray();
lazyPropertyNames = lazyNames.ToArray();
lazyPropertyNumbers = lazyNumbers.ToArray();
lazyPropertyTypes = lazyTypes.ToArray();
#endregion
#region SUBCLASS PROPERTY CLOSURE
List<string> columns = new List<string>();
List<bool> columnsLazy = new List<bool>();
List<string> aliases = new List<string>();
List<string> formulas = new List<string>();
List<string> formulaAliases = new List<string>();
List<string> formulaTemplates = new List<string>();
List<bool> formulasLazy = new List<bool>();
List<IType> types = new List<IType>();
List<string> names = new List<string>();
List<string> classes = new List<string>();
List<string[]> templates2 = new List<string[]>();
List<string[]> propColumns = new List<string[]>();
List<FetchMode> joinedFetchesList = new List<FetchMode>();
List<CascadeStyle> cascades = new List<CascadeStyle>();
List<bool> definedBySubclass = new List<bool>();
List<int[]> propColumnNumbers = new List<int[]>();
List<int[]> propFormulaNumbers = new List<int[]>();
List<bool> columnSelectables = new List<bool>();
List<bool> propNullables = new List<bool>();
foreach (Property prop in persistentClass.SubclassPropertyClosureIterator)
{
names.Add(prop.Name);
classes.Add(prop.PersistentClass.EntityName);
bool isDefinedBySubclass = !thisClassProperties.Contains(prop);
definedBySubclass.Add(isDefinedBySubclass);
propNullables.Add(prop.IsOptional || isDefinedBySubclass); //TODO: is this completely correct?
types.Add(prop.Type);
string[] cols = new string[prop.ColumnSpan];
string[] forms = new string[prop.ColumnSpan];
int[] colnos = new int[prop.ColumnSpan];
int[] formnos = new int[prop.ColumnSpan];
int l = 0;
bool lazy = prop.IsLazy && lazyAvailable;
foreach (ISelectable thing in prop.ColumnIterator)
{
if (thing.IsFormula)
{
string template = thing.GetTemplate(factory.Dialect, factory.SQLFunctionRegistry);
formnos[l] = formulaTemplates.Count;
colnos[l] = -1;
formulaTemplates.Add(template);
forms[l] = template;
formulas.Add(thing.GetText(factory.Dialect));
formulaAliases.Add(thing.GetAlias(factory.Dialect));
formulasLazy.Add(lazy);
}
else
{
string colName = thing.GetTemplate(factory.Dialect, factory.SQLFunctionRegistry);
colnos[l] = columns.Count; //before add :-)
formnos[l] = -1;
columns.Add(colName);
cols[l] = colName;
aliases.Add(thing.GetAlias(factory.Dialect, prop.Value.Table));
columnsLazy.Add(lazy);
columnSelectables.Add(prop.IsSelectable);
}
l++;
}
propColumns.Add(cols);
templates2.Add(forms);
propColumnNumbers.Add(colnos);
propFormulaNumbers.Add(formnos);
joinedFetchesList.Add(prop.Value.FetchMode);
cascades.Add(prop.CascadeStyle);
}
subclassColumnClosure = columns.ToArray();
subclassColumnAliasClosure = aliases.ToArray();
subclassColumnLazyClosure = columnsLazy.ToArray();
subclassColumnSelectableClosure = columnSelectables.ToArray();
subclassFormulaClosure = formulas.ToArray();
subclassFormulaTemplateClosure = formulaTemplates.ToArray();
subclassFormulaAliasClosure = formulaAliases.ToArray();
subclassFormulaLazyClosure = formulasLazy.ToArray();
subclassPropertyNameClosure = names.ToArray();
subclassPropertySubclassNameClosure = classes.ToArray();
subclassPropertyTypeClosure = types.ToArray();
subclassPropertyNullabilityClosure = propNullables.ToArray();
subclassPropertyFormulaTemplateClosure = templates2.ToArray();
subclassPropertyColumnNameClosure = propColumns.ToArray();
subclassPropertyColumnNumberClosure = propColumnNumbers.ToArray();
subclassPropertyFormulaNumberClosure = propFormulaNumbers.ToArray();
subclassPropertyCascadeStyleClosure = cascades.ToArray();
subclassPropertyFetchModeClosure = joinedFetchesList.ToArray();
propertyDefinedOnSubclass = definedBySubclass.ToArray();
#endregion
// Handle any filters applied to the class level
filterHelper = new FilterHelper(persistentClass.FilterMap, factory.Dialect, factory.SQLFunctionRegistry);
temporaryIdTableName = persistentClass.TemporaryIdTableName;
temporaryIdTableDDL = persistentClass.TemporaryIdTableDDL;
}
protected abstract int[] SubclassColumnTableNumberClosure { get; }
protected abstract int[] SubclassFormulaTableNumberClosure { get; }
protected internal abstract int[] PropertyTableNumbersInSelect { get;}
protected internal abstract int[] PropertyTableNumbers { get;}
public virtual string DiscriminatorColumnName
{
get { return Discriminator_Alias; }
}
protected virtual string DiscriminatorFormulaTemplate
{
get { return null; }
}
public string[] RootTableKeyColumnNames
{
get { return rootTableKeyColumnNames; }
}
protected internal SqlCommandInfo[] SQLUpdateByRowIdStrings
{
get
{
if (sqlUpdateByRowIdString == null)
throw new AssertionFailure("no update by row id");
SqlCommandInfo[] result = new SqlCommandInfo[TableSpan + 1];
result[0] = sqlUpdateByRowIdString;
Array.Copy(sqlUpdateStrings, 0, result, 1, TableSpan);
return result;
}
}
protected internal SqlCommandInfo[] SQLLazyUpdateByRowIdStrings
{
get
{
if (sqlLazyUpdateByRowIdString == null)
throw new AssertionFailure("no update by row id");
SqlCommandInfo[] result = new SqlCommandInfo[TableSpan];
result[0] = sqlLazyUpdateByRowIdString;
for (int i = 1; i < TableSpan; i++)
result[i] = sqlLazyUpdateStrings[i];
return result;
}
}
protected SqlString SQLSnapshotSelectString
{
get { return sqlSnapshotSelectString; }
}
protected SqlString SQLLazySelectString
{
get { return sqlLazySelectString; }
}
/// <summary>
/// The queries that delete rows by id (and version)
/// </summary>
protected SqlCommandInfo[] SqlDeleteStrings
{
get { return sqlDeleteStrings; }
}
/// <summary>
/// The queries that insert rows with a given id
/// </summary>
protected SqlCommandInfo[] SqlInsertStrings
{
get { return sqlInsertStrings; }
}
/// <summary>
/// The queries that update rows by id (and version)
/// </summary>
protected SqlCommandInfo[] SqlUpdateStrings
{
get { return sqlUpdateStrings; }
}
protected internal SqlCommandInfo[] SQLLazyUpdateStrings
{
get { return sqlLazyUpdateStrings; }
}
/// <summary>
/// The query that inserts a row, letting the database generate an id
/// </summary>
/// <returns> The IDENTITY-based insertion query. </returns>
protected internal SqlCommandInfo SQLIdentityInsertString
{
get { return sqlIdentityInsertString; }
}
protected SqlString VersionSelectString
{
get { return sqlVersionSelectString; }
}
public bool IsBatchable => OptimisticLockMode == Versioning.OptimisticLock.None ||
(!IsVersioned && OptimisticLockMode == Versioning.OptimisticLock.Version) ||
Factory.Settings.IsBatchVersionedDataEnabled;
public virtual string[] QuerySpaces
{
get { return PropertySpaces; }
}
protected internal ISet<string> LazyProperties
{
get { return lazyProperties; }
}
public bool IsBatchLoadable
{
get { return batchSize > 1; }
}
public virtual string[] IdentifierColumnNames
{
get { return rootTableKeyColumnNames; }
}
protected int IdentifierColumnSpan
{
get { return identifierColumnSpan; }
}
public virtual string VersionColumnName
{
get { return versionColumnName; }
}
protected internal string VersionedTableName
{
get { return GetTableName(0); }
}
protected internal bool[] SubclassColumnLaziness
{
get { return subclassColumnLazyClosure; }
}
protected internal bool[] SubclassFormulaLaziness
{
get { return subclassFormulaLazyClosure; }
}
/// <summary>
/// We can't immediately add to the cache if we have formulas
/// which must be evaluated, or if we have the possibility of
/// two concurrent updates to the same item being merged on
/// the database. This can happen if (a) the item is not
/// versioned and either (b) we have dynamic update enabled
/// or (c) we have multiple tables holding the state of the
/// item.
/// </summary>
public bool IsCacheInvalidationRequired
{
get { return HasFormulaProperties || (!IsVersioned && (entityMetamodel.IsDynamicUpdate || TableSpan > 1)); }
}
public bool IsLazyPropertiesCacheable
{
get { return isLazyPropertiesCacheable; }
}
public virtual string RootTableName
{
get { return GetSubclassTableName(0); }
}
public virtual string[] RootTableIdentifierColumnNames
{
get { return RootTableKeyColumnNames; }
}
protected internal string[] PropertySubclassNames
{
get { return propertySubclassNames; }
}
protected string[][] SubclassPropertyFormulaTemplateClosure
{
get { return subclassPropertyFormulaTemplateClosure; }
}
protected IType[] SubclassPropertyTypeClosure
{
get { return subclassPropertyTypeClosure; }
}
protected string[][] SubclassPropertyColumnNameClosure
{
get { return subclassPropertyColumnNameClosure; }
}
protected string[] SubclassPropertyNameClosure
{
get { return subclassPropertyNameClosure; }
}
protected string[] SubclassPropertySubclassNameClosure
{
get { return subclassPropertySubclassNameClosure; }
}
protected string[] SubclassColumnClosure
{
get { return subclassColumnClosure; }
}
protected string[] SubclassColumnAliasClosure
{
get { return subclassColumnAliasClosure; }
}
protected string[] SubclassFormulaClosure
{
get { return subclassFormulaClosure; }
}
protected string[] SubclassFormulaTemplateClosure
{
get { return subclassFormulaTemplateClosure; }
}
protected string[] SubclassFormulaAliasClosure
{
get { return subclassFormulaAliasClosure; }
}
public string IdentitySelectString
{
get
{
if (identitySelectString == null)
identitySelectString =
Factory.Dialect.GetIdentitySelectString(GetTableName(0), GetKeyColumns(0)[0],
IdentifierType.SqlTypes(Factory)[0].DbType);
return identitySelectString;
}
}
private string RootAlias
{
get { return StringHelper.GenerateAlias(EntityName); }
}
public ISessionFactoryImplementor Factory
{
get { return factory; }
}
public EntityMetamodel EntityMetamodel
{
get { return entityMetamodel; }
}
public ICacheConcurrencyStrategy Cache
{
get { return cache; }
}
public ICacheEntryStructure CacheEntryStructure
{
get { return cacheEntryStructure; }
}
public IComparer VersionComparator
{
get { return IsVersioned ? VersionType.Comparator : null; }
}
public string EntityName
{
get { return entityMetamodel.Name; }
}
public EntityType EntityType
{
get { return entityMetamodel.EntityType; }
}
public virtual bool IsPolymorphic
{
get { return entityMetamodel.IsPolymorphic; }
}
public virtual bool IsInherited
{
get { return entityMetamodel.IsInherited; }
}
public virtual IVersionType VersionType
{
get { return LocateVersionType(); }
}
public virtual int VersionProperty
{
get { return entityMetamodel.VersionPropertyIndex; }
}
public virtual bool IsVersioned
{
get { return entityMetamodel.IsVersioned; }
}
public virtual bool IsIdentifierAssignedByInsert
{
get { return entityMetamodel.IdentifierProperty.IsIdentifierAssignedByInsert; }
}
public virtual bool IsMutable
{
get { return entityMetamodel.IsMutable; }
}
public virtual bool IsAbstract
{
get { return entityMetamodel.IsAbstract; }
}
public virtual IIdentifierGenerator IdentifierGenerator
{
get { return entityMetamodel.IdentifierProperty.IdentifierGenerator; }
}
public virtual string RootEntityName
{
get { return entityMetamodel.RootName; }
}
public virtual IClassMetadata ClassMetadata
{
get { return this; }
}
public virtual string MappedSuperclass
{
get { return entityMetamodel.Superclass; }
}
public virtual bool IsExplicitPolymorphism
{
get { return entityMetamodel.IsExplicitPolymorphism; }
}
public string[] KeyColumnNames
{
get { return IdentifierColumnNames; }
}
public string[] JoinColumnNames
{
get { return KeyColumnNames; }
}
public string Name
{
get { return EntityName; }
}
public bool IsCollection
{
get { return false; }
}
public IType Type
{
get { return entityMetamodel.EntityType; }
}
public bool IsSelectBeforeUpdateRequired
{
get { return entityMetamodel.IsSelectBeforeUpdate; }
}
public bool IsVersionPropertyGenerated
{
get { return IsVersioned && PropertyUpdateGenerationInclusions[VersionProperty] != ValueInclusion.None; }
}
public bool VersionPropertyInsertable
{
get { return IsVersioned && PropertyInsertability[VersionProperty]; }
}
public virtual string[] PropertyNames
{
get { return entityMetamodel.PropertyNames; }
}
public virtual IType[] PropertyTypes
{
get { return entityMetamodel.PropertyTypes; }
}
public bool[] PropertyLaziness
{
get { return entityMetamodel.PropertyLaziness; }
}
public virtual bool[] PropertyCheckability
{
get { return entityMetamodel.PropertyCheckability; }
}
public bool[] NonLazyPropertyUpdateability
{
get { return entityMetamodel.NonlazyPropertyUpdateability; }
}
public virtual bool[] PropertyInsertability
{
get { return entityMetamodel.PropertyInsertability; }
}
public ValueInclusion[] PropertyInsertGenerationInclusions
{
get { return entityMetamodel.PropertyInsertGenerationInclusions; }
}
public ValueInclusion[] PropertyUpdateGenerationInclusions
{
get { return entityMetamodel.PropertyUpdateGenerationInclusions; }
}
public virtual bool[] PropertyNullability
{
get { return entityMetamodel.PropertyNullability; }
}
public virtual bool[] PropertyVersionability
{
get { return entityMetamodel.PropertyVersionability; }
}
public virtual CascadeStyle[] PropertyCascadeStyles
{
get { return entityMetamodel.CascadeStyles; }
}
public virtual bool IsMultiTable
{
get { return false; }
}
public string TemporaryIdTableName
{
get { return temporaryIdTableName; }
}
public string TemporaryIdTableDDL
{
get { return temporaryIdTableDDL; }
}
protected int PropertySpan
{
get { return entityMetamodel.PropertySpan; }
}
public virtual string IdentifierPropertyName
{
get { return entityMetamodel.IdentifierProperty.Name; }
}
public virtual IType GetIdentifierType(int j)
{
return IdentifierType;
}
public virtual IType IdentifierType
{
get { return entityMetamodel.IdentifierProperty.Type; }
}
public int[] NaturalIdentifierProperties
{
get { return entityMetamodel.NaturalIdentifierProperties; }
}
public abstract string[][] ConstraintOrderedTableKeyColumnClosure { get;}
public abstract IType DiscriminatorType { get;}
public abstract string[] ConstraintOrderedTableNameClosure { get;}
public abstract string DiscriminatorSQLValue { get;}
public abstract object DiscriminatorValue { get;}
public abstract string[] SubclassClosure { get; }
public abstract string[] PropertySpaces { get;}
protected virtual void AddDiscriminatorToInsert(SqlInsertBuilder insert) { }
protected virtual void AddDiscriminatorToSelect(SelectFragment select, string name, string suffix) { }
public abstract string GetSubclassTableName(int j);
//gets the identifier for a join table if other than pk
protected virtual object GetJoinTableId(int j, object[] fields)
{
return null;
}
protected virtual object GetJoinTableId(int table, object obj)
{
return null;
}
//for joining to other keys than pk
protected virtual string[] GetJoinIdKeyColumns(int j)
{
return IdentifierColumnNames;
}
protected abstract string[] GetSubclassTableKeyColumns(int j);
protected abstract bool IsClassOrSuperclassTable(int j);
protected abstract int SubclassTableSpan { get; }
protected abstract int TableSpan { get; }
protected abstract bool IsTableCascadeDeleteEnabled(int j);
protected abstract string GetTableName(int table);
protected abstract string[] GetKeyColumns(int table);
protected abstract bool IsPropertyOfTable(int property, int table);
protected virtual int? GetRefIdColumnOfTable(int table)
{
return null;
}
protected virtual Tuple.Property GetIdentiferProperty(int table)
{
var refId = GetRefIdColumnOfTable(table);
if (refId == null)
return entityMetamodel.IdentifierProperty;
return entityMetamodel.Properties[refId.Value];
}
protected virtual bool IsIdOfTable(int property, int table)
{
return false;
}
protected abstract int GetSubclassPropertyTableNumber(int i);
public abstract string FilterFragment(string alias);
protected internal virtual string DiscriminatorAlias
{
get { return Discriminator_Alias; }
}
protected virtual bool IsInverseTable(int j)
{
return false;
}
protected virtual bool IsNullableTable(int j)
{
return false;
}
protected virtual bool IsNullableSubclassTable(int j)
{
return false;
}
protected virtual bool IsInverseSubclassTable(int j)
{
return false;
}
public virtual bool IsSubclassEntityName(string entityName)
{
return entityMetamodel.SubclassEntityNames.Contains(entityName);
}
protected bool[] TableHasColumns
{
get { return tableHasColumns; }
}
protected bool IsInsertCallable(int j)
{
return insertCallable[j];
}
protected bool IsUpdateCallable(int j)
{
return updateCallable[j];
}
protected bool IsDeleteCallable(int j)
{
return deleteCallable[j];
}
protected virtual bool IsSubclassPropertyDeferred(string propertyName, string entityName)
{
return false;
}
protected virtual bool IsSubclassTableSequentialSelect(int table)
{
return false;
}
public virtual bool HasSequentialSelect
{
get { return false; }
}
/// <summary>
/// Decide which tables need to be updated
/// </summary>
/// <param name="dirtyProperties">The indices of all the entity properties considered dirty.</param>
/// <param name="hasDirtyCollection">Whether any collections owned by the entity which were considered dirty. </param>
/// <returns> Array of booleans indicating which table require updating. </returns>
/// <remarks>
/// The return here is an array of boolean values with each index corresponding
/// to a given table in the scope of this persister.
/// </remarks>
protected virtual bool[] GetTableUpdateNeeded(int[] dirtyProperties, bool hasDirtyCollection)
{
if (dirtyProperties == null)
{
return TableHasColumns; //for object that came in via update()
}
else
{
bool[] updateability = PropertyUpdateability;
int[] propertyTableNumbers = PropertyTableNumbers;
bool[] tableUpdateNeeded = new bool[TableSpan];
for (int i = 0; i < dirtyProperties.Length; i++)
{
int property = dirtyProperties[i];
int table = propertyTableNumbers[property];
tableUpdateNeeded[table] = tableUpdateNeeded[table] ||
(GetPropertyColumnSpan(property) > 0 && updateability[property]);
}
if (IsVersioned)
{
// NH-2386 when there isn't dirty-properties and the version is generated even in UPDATE
// we can't execute an UPDATE because there isn't something to UPDATE
if(!entityMetamodel.VersionProperty.IsUpdateGenerated)
{
tableUpdateNeeded[0] = tableUpdateNeeded[0] ||
Versioning.IsVersionIncrementRequired(dirtyProperties, hasDirtyCollection,
PropertyVersionability);
}
}
return tableUpdateNeeded;
}
}
public virtual bool HasRowId
{
get { return rowIdName != null; }
}
protected internal virtual SqlString GenerateLazySelectString()
{
if (!entityMetamodel.HasLazyProperties)
return null;
HashSet<int> tableNumbers = new HashSet<int>();
List<int> columnNumbers = new List<int>();
List<int> formulaNumbers = new List<int>();
for (int i = 0; i < lazyPropertyNames.Length; i++)
{
// all this only really needs to consider properties
// of this class, not its subclasses, but since we
// are reusing code used for sequential selects, we
// use the subclass closure
int propertyNumber = GetSubclassPropertyIndex(lazyPropertyNames[i]);
int tableNumber = GetSubclassPropertyTableNumber(propertyNumber);
tableNumbers.Add(tableNumber);
int[] colNumbers = subclassPropertyColumnNumberClosure[propertyNumber];
for (int j = 0; j < colNumbers.Length; j++)
{
if (colNumbers[j] != -1)
{
columnNumbers.Add(colNumbers[j]);
}
}
int[] formNumbers = subclassPropertyFormulaNumberClosure[propertyNumber];
for (int j = 0; j < formNumbers.Length; j++)
{
if (formNumbers[j] != -1)
{
formulaNumbers.Add(formNumbers[j]);
}
}
}
if (columnNumbers.Count == 0 && formulaNumbers.Count == 0)
{
// only one-to-one is lazy fetched
return null;
}
return RenderSelect(tableNumbers.ToArray(), columnNumbers.ToArray(), formulaNumbers.ToArray());
}
public virtual object InitializeLazyProperty(string fieldName, object entity, ISessionImplementor session)
{
object id = session.GetContextEntityIdentifier(entity);
EntityEntry entry = session.PersistenceContext.GetEntry(entity);
if (entry == null)
throw new HibernateException("entity is not associated with the session: " + id);
if (log.IsDebugEnabled)
{
log.Debug(
string.Format("initializing lazy properties of: {0}, field access: {1}",
MessageHelper.InfoString(this, id, Factory), fieldName));
}
if (HasCache && session.CacheMode.HasFlag(CacheMode.Get))
{
CacheKey cacheKey = session.GenerateCacheKey(id, IdentifierType, EntityName);
object ce = Cache.Get(cacheKey, session.Timestamp);
if (ce != null)
{
CacheEntry cacheEntry = (CacheEntry)CacheEntryStructure.Destructure(ce, factory);
if (!cacheEntry.AreLazyPropertiesUnfetched)
{
//note early exit here:
return InitializeLazyPropertiesFromCache(fieldName, entity, session, entry, cacheEntry);
}
}
}
return InitializeLazyPropertiesFromDatastore(fieldName, entity, session, id, entry);
}
private object InitializeLazyPropertiesFromDatastore(string fieldName, object entity, ISessionImplementor session, object id, EntityEntry entry)
{
if (!HasLazyProperties)
throw new AssertionFailure("no lazy properties");
log.Debug("initializing lazy properties from datastore");
using (new SessionIdLoggingContext(session.SessionId))
try
{
object result = null;
DbCommand ps = null;
DbDataReader rs = null;
try
{
SqlString lazySelect = SQLLazySelectString;
if (lazySelect != null)
{
// null sql means that the only lazy properties
// are shared PK one-to-one associations which are
// handled differently in the Type#nullSafeGet code...
ps = session.Batcher.PrepareCommand(CommandType.Text, lazySelect, IdentifierType.SqlTypes(Factory));
IdentifierType.NullSafeSet(ps, id, 0, session);
rs = session.Batcher.ExecuteReader(ps);
rs.Read();
}
object[] snapshot = entry.LoadedState;
for (int j = 0; j < lazyPropertyNames.Length; j++)
{
object propValue = lazyPropertyTypes[j].NullSafeGet(rs, lazyPropertyColumnAliases[j], session, entity);
if (InitializeLazyProperty(fieldName, entity, session, snapshot, j, propValue))
{
result = propValue;
}
}
}
finally
{
session.Batcher.CloseCommand(ps, rs);
}
log.Debug("done initializing lazy properties");
return result;
}
catch (DbException sqle)
{
var exceptionContext = new AdoExceptionContextInfo
{
SqlException = sqle,
Message =
"could not initialize lazy properties: " + MessageHelper.InfoString(this, id, Factory),
Sql = SQLLazySelectString.ToString(),
EntityName = EntityName,
EntityId = id
};
throw ADOExceptionHelper.Convert(Factory.SQLExceptionConverter, exceptionContext);
}
}
private object InitializeLazyPropertiesFromCache(string fieldName, object entity, ISessionImplementor session, EntityEntry entry, CacheEntry cacheEntry)
{
log.Debug("initializing lazy properties from second-level cache");
object result = null;
object[] disassembledValues = cacheEntry.DisassembledState;
object[] snapshot = entry.LoadedState;
|
[
"\t\t\tfor (int j = 0; j < lazyPropertyNames.Length; j++)"
] | 3,626
|
lcc
|
csharp
| null |
afaa47c3e50b88c722f2e7e2f9bb3b1204f8b80e185f355f
|
|
# -*- coding: utf-8 -*-
#
# @file sge_jobs.py
#
# @remark Copyright 2014 Philippe Elie
# @remark Read the file COPYING
#
# @author Philippe Elie
import sys
import os
sys.path.append(os.path.expanduser('~/phe/common'))
import utils
import json
import hashlib
import time
import MySQLdb
import subprocess
import qstat
import re
import db
import collections
jsub = '/usr/bin/jsub'
class DbJob(db.UserDb):
def __init__(self):
super(DbJob, self).__init__('sge_jobs')
self.Accounting = collections.namedtuple('Accounting',
[
'qname', 'hostname', 'group',
'owner', 'jobname', 'jobnumber',
'account', 'priority', 'qsub_time',
'start_time', 'end_time', 'failed',
'exit_status', 'ru_wallclock', 'ru_utime',
'ru_stime', 'ru_maxrss', 'ru_ixrss',
'ru_ismrss', 'ru_idrss', 'ru_isrsst',
'ru_minflt', 'ru_majflt', 'ru_nswap',
'ru_inblock', 'ru_oublock', 'ru_msdsnd',
'ru_msgrcv', 'ru_nsignals', 'ru_nvcsw',
'ru_nivcsw', 'project', 'departement',
'granted', 'slots', 'task',
'cpu', 'mem', 'io',
'category', 'iow', 'pe_taskid',
'used_maxvmem', 'arid', 'ar_submission_time'
])
self.all_state = set(['pending', 'running', 'success', 'accounting',
'sge_fail', 'fail'])
def get_job_table(self, state_filter, limit = 50, offset = 0):
limit += 1
data = []
state_filter = state_filter.split('|')
if state_filter:
for s in state_filter[:]:
if s != 'all' and s not in self.all_state:
state_filter.remove(s)
state_filter = tuple(state_filter)
if not state_filter:
state_filter = tuple([ 'fail', 'pending', 'running' ])
if 'all' in state_filter:
state_filter = tuple([ x for x in self.all_state ])
with db.connection(self):
fmt_strs = ', '.join(['%s'] * len(state_filter))
q = 'SELECT * FROM job WHERE job_state IN (' + fmt_strs + ') ORDER BY job_id DESC LIMIT %s OFFSET %s'
#print >> sys.stderr, q % (state_filter + (limit, ) + (offset,))
self.cursor.execute(q, state_filter + (limit, ) + (offset,))
data = self.cursor.fetchall()
has_next = True if len(data) == limit else False
return data[:limit-1], has_next
def get_accounting_table(self, limit = 50, offset = 0, job_ids = None):
limit += 1
data = []
if not job_ids:
job_ids = []
if type(job_ids) != type([]):
jobs_ids = [ job_ids ]
with db.connection(self):
q = 'SELECT * from accounting '
if job_ids:
fmt_strs = ', '.join(['%s'] * len(job_ids))
q += 'WHERE job_id in (' + fmt_strs + ') '
q += 'ORDER BY job_id DESC, sge_jobnumber DESC, sge_hostname LIMIT %s OFFSET %s'
self.cursor.execute(q, tuple(job_ids) + (limit, ) + (offset,))
data = self.cursor.fetchall()
has_next = True if len(data) == limit else False
return data[:limit-1], has_next
def pending_request(self, limit = 16, offset = 0):
data = []
with db.connection(self):
self.cursor.execute("SELECT * FROM job WHERE job_state='pending' LIMIT %s OFFSET %s",
[ limit, offset ])
data = self.cursor.fetchall()
return data
def _add_request(self, jobname, run_cmd, args, max_vmem, cpu_bound, force):
job_id = 0
args = json.dumps(args)
h = hashlib.sha1()
h.update(run_cmd + args)
sha1 = h.hexdigest()
q = 'SELECT * FROM job WHERE job_sha1 = %s'
self.cursor.execute(q, [sha1])
num = self.cursor.fetchone()
if num:
job_id = num['job_id']
if num and not num['job_state'] in [ 'pending', 'running', 'accounting' ]:
q = 'SELECT COUNT(*) FROM accounting WHERE job_id=%s'
self.cursor.execute(q, [ job_id ])
count = self.cursor.fetchone()['COUNT(*)']
if count < 3 or force:
q = 'UPDATE job SET job_state="pending" WHERE job_id=%s'
self.cursor.execute(q, [ job_id ] )
else:
print >> sys.stderr, "Job %d reached its max try count, rejected" % job_id, args
elif not num:
job_data = {
'job_sha1' : sha1,
'job_jobname' : jobname,
'job_cpu_bound' : cpu_bound,
'job_submit_time' : int(time.time()),
'job_run_cmd' : run_cmd,
'job_log_dir' : os.path.expanduser('~/log/sge/'),
'job_args' : args,
'job_state' : 'pending',
'job_max_vmem' : max_vmem,
}
add_job_field = '(' + ', '.join(job_data.keys()) + ') '
# Quoting is done by execute so it's secure.
add_job_value_list = [ '%%(%s)s' % k for k in job_data.keys() ]
add_job_value = 'VALUE (' + ', '.join(add_job_value_list) + ')'
add_job = ('INSERT INTO job ' + add_job_field + add_job_value)
self.cursor.execute(add_job, job_data)
self.cursor.execute('SELECT LAST_INSERT_ID()')
job_id = self.cursor.fetchone()['LAST_INSERT_ID()']
return job_id
def add_request(self, jobname, run_cmd, args, max_vmem,
cpu_bound = True, force = False):
job_id = 0
with db.connection(self):
job_id = self._add_request(jobname, run_cmd, args,
max_vmem, cpu_bound, force)
return job_id
def exec_request(self, r):
sge_job_nr = 0
# This is a bit convoluted but we need it to avoid a race condition:
# we set the job as running before starting it so on if this script
# run twice in parallel we don't try to start the same job twice. Then
# when the job really started or fail to start we update its state
# again. As we don't know yet the sge job number, we setup it as zero.
# Note this could be done in pending_request() but I prefer to protect
# it locally.
really_pending = False
with db.connection(self):
q = 'UPDATE job SET job_state=%s, sge_jobnumber=%s WHERE job_id=%s AND job_state="pending"'
if self.cursor.execute(q, [ 'running', 0, r['job_id'] ]):
really_pending = True
if not really_pending:
print >> sys.stderr, "run request for job_id %s cancelled, as it's no longer pending" % r['job_id']
return
cmdline_arg = job_cmdline_arg(r, 'job_run_cmd')
sge_cmdline = sge_cmdline_arg(r)
ls = subprocess.Popen(sge_cmdline + cmdline_arg,
stdin=None, stdout=subprocess.PIPE,
close_fds = True)
text = ls.stdout.read()
ls.wait()
try:
sge_job_nr = int(re.search('Your job (\d+) ', text).group(1))
new_state = 'running'
except:
utils.print_traceback("sge failure to exec job: %d" % r['job_id'], text)
new_state = 'sge_fail'
# Now we can really update the job state, see comment above.
with db.connection(self):
q = 'UPDATE job SET job_state=%s, sge_jobnumber=%s WHERE job_id=%s'
self.cursor.execute(q, [ new_state, sge_job_nr, r['job_id'] ])
def run_batch(self, nr_running, limit = 16):
max_to_run = max(min(limit - nr_running, limit), 0)
if max_to_run:
for r in self.pending_request(max_to_run):
print "starting:", r
self.exec_request(r)
def _exec_check(self, request):
q = 'UPDATE job SET job_state="accounting" WHERE job_id=%s'
self.cursor.execute(q, [ request['job_id'] ])
q = 'INSERT into accounting (job_id, sge_jobnumber) VALUE (%s, %s)'
self.cursor.execute(q, [ request['job_id'], request['sge_jobnumber'] ])
self.conn.commit()
def check_running(self):
sge_running = qstat.running_jobs('')
if sge_running:
with db.connection(self):
q = 'SELECT job_id, sge_jobnumber, job_args FROM job WHERE job_state="running"'
self.cursor.execute(q)
for r in self.cursor.fetchall():
if not r['sge_jobnumber'] in sge_running:
self._exec_check(r)
return len(sge_running)
return None
# Limiting is necessary because a job can be finished but not yet in the
# accouting file (cache effect) so we can easily scan the whole file. To
# avoid that we limit the backward search to two days by default.
# float is allowed so last_time_day = 1.0/24 is an hour.
def search_accounting(self, jobs, last_time_day = 2):
last_time_day = max(1.0/24, last_time_day)
now = int(time.time())
count = 0
nr_job = len(jobs)
for line in utils.readline_backward('/data/project/.system/accounting'):
accounting = self.Accounting(*line.split(':'))
jobnumber = int(accounting.jobnumber)
count += 1
if jobnumber in jobs:
jobs[jobnumber].append(accounting)
nr_job -= 1
if nr_job == 0:
print "breaking after %d line" % count
break
# end_time == 0 occur when sge failed to start a task, don't
# use it to get the elapsed time between end_time and now.
if int(accounting.end_time) and now - int(accounting.end_time) >= last_time_day * 86400:
print "breaking after %d line, TIMEOUT" % count
break
def update_accounting(self):
jobs = {}
with db.connection(self):
q = 'SELECT job_id, sge_jobnumber, sge_hostname FROM accounting WHERE sge_hostname=""'
self.cursor.execute(q)
for data in self.cursor.fetchall():
jobs[data['sge_jobnumber']] = [ data ]
if not len(jobs):
return
self.search_accounting(jobs)
with db.connection(self):
fields = [ 'hostname', 'qsub_time', 'start_time', 'end_time',
'failed', 'exit_status', 'ru_utime', 'ru_stime',
'ru_wallclock', 'used_maxvmem' ]
set_str = []
for f in fields:
set_str.append('sge_%s=%%(%s)s' % (f, f))
set_str = ', '.join(set_str)
for sge_jobnumber in jobs:
sge_jobnumber = int(sge_jobnumber)
# Accounting not found, it'll found in the next run.
if len(jobs[sge_jobnumber]) <= 1:
continue
q = "UPDATE accounting SET " + set_str
# We can't let execute() do the quoting for jobnumber, but
# sge_jobnumber is forced to int so this code is sql injection
# safe.
q += ' WHERE sge_jobnumber=%d' % sge_jobnumber
# Kludge, execute() don't accept a namedtuple nor an
# OrderedDict so convert it explicitly to a dict.
d = jobs[sge_jobnumber][1]._asdict()
d = dict(zip(d.keys(), d.values()))
self.cursor.execute(q, d)
job = jobs[sge_jobnumber][0]
new_state = 'success'
if int(d['failed']) or int(d['exit_status']):
new_state = 'fail'
q = 'UPDATE job SET job_state=%s WHERE job_id=%s'
self.cursor.execute(q, [ new_state, job['job_id'] ])
def quote_arg(arg):
return "'" + arg.replace("'", r"'\''") + "'"
def job_cmdline_arg(request, cmd):
cmd_arg = [ request[cmd] ]
cmd_arg += [ quote_arg(x) for x in json.loads(request['job_args']) ]
return cmd_arg
def sge_cmdline_arg(request):
job_name = request['job_jobname']
log_name = request['job_log_dir'] + job_name + '_' + str(request['job_id'])
sge_cmd_arg = [
jsub,
'-b', 'y',
|
[
" '-l', 'h_vmem=%dM' % request['job_max_vmem'],"
] | 1,274
|
lcc
|
python
| null |
9ac83f0cb7aff9cd52581d7e6d5bce356b4f5531fcc7b4f2
|
|
namespace App.Mvc.Controllers
{
using Contracts;
using Mvc;
using Contracts.Services;
using Models;
using Filters;
using System.Collections.Generic;
using System.Linq;
using System.Web.Mvc;
using System.Web;
[Filters.ExceptionHandler]
public class DelegateController : Controller
{
private readonly ILogProvider log ;
private const string LogName = "Delegate";
private readonly IDelegateService service ;
public DelegateController(ILogProvider log, IDelegateService service )
{
this.service = service;
this.log = log;
}
protected override void OnActionExecuting(ActionExecutingContext filterContext)
{
base.OnActionExecuting(filterContext);
log.LogActionExecuting(LogName,filterContext);
ViewBag.Title = "App";
ViewBag.SectionTitle = "Delegate";
}
// GET: Delegate
[RolesRequired("Admin","ListDelegate")]
public ActionResult Index()
{
var errors = new List<IModelError>();
var models = service.GetAll(x => x != null, errors);
ViewBag.Errors = errors;
ViewBag.ToolButtons = "VED"; // View Edit Delete
ViewBag.Title = "List Delegate" ;
return View(models);
}
// Display a form for viewing Delegate
[RolesRequired("Admin","ViewDelegate")]
public ActionResult View(int id = -1)
{
var errors = new List<IModelError>();
ViewBag.Readonly = true;
ViewBag.ButtonFlag = "";
ViewBag.Title = "View Delegate" ;
var model = GetViewModel(id,errors);
return View("Form",model);
}
// Display a form for editing Delegate
[RolesRequired("Admin","SaveDelegate")]
public ActionResult Edit(int id = -1)
{
var errors = new List<IModelError>();
ViewBag.Readonly = false;
ViewBag.ButtonFlag = "RS"; // Relationship Submit
ViewBag.Title = "Edit Delegate" ;
var model = GetViewModel(id,errors);
return View("Form",model);
}
[RolesRequired("Admin","SaveDelegate")]
[HttpPost]
public ActionResult Edit(DelegateViewModel model)
{
var errors = new List<IModelError>();
service.TrySave(model, errors);
if (errors.Any())
{
this.AddModelErrors(errors);
ViewBag.Readonly = false;
ViewBag.ButtonFlag = "RS"; // Relationship Submit
ViewBag.Title = "Edit Delegate" ;
return View("Form", model);
}
else
{
return RedirectToAction("index", new { updated = model.Id });
}
}
// Display a form for creating Delegate
[RolesRequired("Admin","SaveDelegate")]
public ActionResult Create(int id = -1)
{
var errors = new List<IModelError>();
ViewBag.Readonly = false;
ViewBag.ButtonFlag = "S"; // Submit
ViewBag.Title = "New Delegate" ;
var model = GetViewModel(id,errors);
return View("Form",model);
}
[RolesRequired("Admin","SaveDelegate")]
[HttpPost]
public ActionResult Create(DelegateViewModel model)
{
var errors = new List<IModelError>();
service.TrySave(model, errors);
if (errors.Any())
{
this.AddModelErrors(errors);
ViewBag.Readonly = false;
ViewBag.ButtonFlag = "S"; // Submit
ViewBag.Title = "New Delegate" ;
return View("Form", model);
}
else
{
return RedirectToAction("index", new { creaated = model.Id });
}
}
// Display a form for deleting Delegate
[RolesRequired("Admin","DeleteDelegate")]
public ActionResult Delete(int id = -1)
{
var errors = new List<IModelError>();
ViewBag.Readonly = true;
ViewBag.ShowRelationships = false;
ViewBag.Title = "Delete Delegate" ;
var model = GetViewModel(id,errors);
return View("Form",model);
}
[RolesRequired("Admin", "DeleteDelegate")]
[HttpPost]
public ActionResult Delete(DelegateViewModel model, int _post)
{
var errors = new List<IModelError>();
var result = service.TryDelete(model.Id, errors);
ViewBag.Title = "Delete Delegate";
if (errors.Any())
{
model = GetViewModel(model.Id, errors);
this.AddModelErrors(errors);
ViewBag.Readonly = false;
ViewBag.ButtonFlag = "S"; // Submit
ViewBag.Title = "Delete Delegate";
return View("Form", model);
}
else
{
return RedirectToAction("index", new { deleted = model.Id });
}
}
// list all Delegate entities
[RolesRequired("Admin","ListDelegate")]
public ActionResult List()
{
var errors = new List<IModelError>();
var models = service.GetAll(x =>x != null, errors);
ViewBag.Errors = errors;
ViewBag.ToolButtons = "VP"; // View Pick
ViewBag.PickState = false;
return View("DelegateList", models);
}
// Supports the many to many relationship (DelegateEvent) between Delegate (parent) Event (child)
//[Authorize(Roles = "Admin,ListDelegateEvent")]
[RolesRequired("Admin","ListDelegateEvent")]
public ActionResult GetDelegateEvent(int id, bool selected = false)
{
var models = service.GetAllForDelegateEvent(id);
ViewBag.ToolButtons = "VP"; // View Pick
ViewBag.PickState = selected;
return View("DelegateList", models);
}
// Add a relationship (DelegateEvent) between Delegate (parent) Event (child)
//[Authorize(Roles = "Admin,SaveDelegateEvent")]
[RolesRequired("Admin","SaveDelegateEvent")]
public ActionResult AddDelegateEvent(int id)
{
ViewBag.Readonly = false;
ViewBag.ShowRelationships = false;
ViewBag.ModelId = new int?(id);
return View("Form", new DelegateViewModel());
}
// Add a relationship (DelegateEvent) between Delegate (parent) Event (child)
[HttpPost]
//[Authorize(Roles = "Admin,SaveDelegateEvent")]
[RolesRequired("Admin","SaveDelegateEvent")]
public ActionResult SaveDelegateEvent(DelegateViewModel model, int modelId)
{
var errors = new List<IModelError>();
model.Id = 0 ; // force a new object regardless
var result = service.TrySave(model, errors);
if (result)
{
service.AddEventToDelegateForDelegateEvent(model.Id, modelId);
}
return Json(new
{
Model = model,
Success = result,
Errors = errors
});
}
// remove a relationship (DelegateEvent) between Delegate (parent) Event (child)
[HttpPost]
[RolesRequired("Admin","SaveDelegateEvent")]
public ActionResult UnLinkDelegateEvent(int modelId , int[] items)
{
var result = true;
try
{
items.DefaultIfNull().AsParallel().ToList().ForEach(i => {
service.RemoveEventFromDelegateForDelegateEvent(modelId, i);
});
}
catch
{
items.DefaultIfNull().AsParallel().ToList().ForEach(i => {
service.AddEventToDelegateForDelegateEvent(modelId, i);
});
result = false;
}
return Json(new
{
Success = result
});
}
// add a relationship (DelegateEvent) between existing Delegate (parent) Event (child)
[HttpPost]
[RolesRequired("Admin","SaveDelegateEvent")]
public ActionResult LinkDelegateEvent(int modelId , int[] items)
{
var result = true;
try
{
items.DefaultIfNull().AsParallel().ToList().ForEach(i => {
service.AddEventToDelegateForDelegateEvent(modelId, i);
});
}
catch
{
items.DefaultIfNull().AsParallel().ToList().ForEach(i => {
service.RemoveEventFromDelegateForDelegateEvent(modelId, i);
});
result = false;
}
return Json(new
{
Success = result
});
}
// Supports the many to many relationship (DelegateExamResult) between Delegate (parent) ExamResult (child)
//[Authorize(Roles = "Admin,ListDelegateExamResult")]
[RolesRequired("Admin","ListDelegateExamResult")]
public ActionResult GetDelegateExamResult(int id, bool selected = false)
{
var models = service.GetAllForDelegateExamResult(id);
ViewBag.ToolButtons = "VP"; // View Pick
ViewBag.PickState = selected;
return View("DelegateList", models);
}
// Add a relationship (DelegateExamResult) between Delegate (parent) ExamResult (child)
//[Authorize(Roles = "Admin,SaveDelegateExamResult")]
[RolesRequired("Admin","SaveDelegateExamResult")]
public ActionResult AddDelegateExamResult(int id)
{
ViewBag.Readonly = false;
ViewBag.ShowRelationships = false;
ViewBag.ModelId = new int?(id);
return View("Form", new DelegateViewModel());
}
// Add a relationship (DelegateExamResult) between Delegate (parent) ExamResult (child)
[HttpPost]
//[Authorize(Roles = "Admin,SaveDelegateExamResult")]
[RolesRequired("Admin","SaveDelegateExamResult")]
public ActionResult SaveDelegateExamResult(DelegateViewModel model, int modelId)
{
var errors = new List<IModelError>();
model.Id = 0 ; // force a new object regardless
var result = service.TrySave(model, errors);
if (result)
{
service.AddExamResultToDelegateForDelegateExamResult(model.Id, modelId);
}
return Json(new
{
Model = model,
Success = result,
Errors = errors
});
}
// remove a relationship (DelegateExamResult) between Delegate (parent) ExamResult (child)
[HttpPost]
[RolesRequired("Admin","SaveDelegateExamResult")]
public ActionResult UnLinkDelegateExamResult(int modelId , int[] items)
{
var result = true;
try
{
items.DefaultIfNull().AsParallel().ToList().ForEach(i => {
service.RemoveExamResultFromDelegateForDelegateExamResult(modelId, i);
});
}
catch
{
items.DefaultIfNull().AsParallel().ToList().ForEach(i => {
service.AddExamResultToDelegateForDelegateExamResult(modelId, i);
});
result = false;
}
return Json(new
{
Success = result
});
}
// add a relationship (DelegateExamResult) between existing Delegate (parent) ExamResult (child)
[HttpPost]
[RolesRequired("Admin","SaveDelegateExamResult")]
public ActionResult LinkDelegateExamResult(int modelId , int[] items)
{
var result = true;
try
{
items.DefaultIfNull().AsParallel().ToList().ForEach(i => {
service.AddExamResultToDelegateForDelegateExamResult(modelId, i);
});
}
catch
{
items.DefaultIfNull().AsParallel().ToList().ForEach(i => {
service.RemoveExamResultFromDelegateForDelegateExamResult(modelId, i);
});
result = false;
}
return Json(new
{
Success = result
});
}
// Supports the many to many relationship (EventDelegate) between Delegate (child) Event (parent)
[RolesRequired("Admin","ListEventDelegate")]
public ActionResult GetEventDelegate(int id)
{
var models = service.GetAllForEventDelegate(id);
ViewBag.ToolButtons = "VP"; // View Pick
ViewBag.PickState = true;
return View("DelegateList", models);
}
// Add a relationship (EventDelegate) between Event (parent) Delegate (child)
[RolesRequired("Admin","SaveEventDelegate")]
public ActionResult AddEventDelegate()
{
ViewBag.Readonly = false;
ViewBag.ShowRelationships = false;
|
[
" return View(\"Form\", new DelegateViewModel());"
] | 1,017
|
lcc
|
csharp
| null |
abeb767e6c37cee48470e6764ac480ad487fef3e3fc93bba
|
|
#region License
/*
Copyright 2014 - 2015 Nikita Bernthaler
Report.cs is part of SFXUtility.
SFXUtility is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SFXUtility is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SFXUtility. If not, see <http://www.gnu.org/licenses/>.
*/
#endregion License
#region
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using LeagueSharp;
using LeagueSharp.Common;
using SFXUtility.Interfaces;
#endregion
namespace SFXUtility.Classes
{
public class GenerateReport
{
private static readonly List<string> AssemblyBlacklist = new List<string>
{
"mscorlib",
"System",
"Microsoft",
"SMDiagnostics"
};
private static readonly StringBuilder Builder = new StringBuilder();
public static string Generate()
{
Builder.Clear();
GenerateHeader();
GenerateGame();
GenerateOverview();
GenerateHeroes();
GenerateAssemblies();
GenerateFeatures();
GenerateMenu();
Builder.AppendLine("--------------- THE END ---------------");
return Builder.ToString();
}
private static void GenerateOverview()
{
Builder.AppendLine("Overview");
Builder.AppendLine("--------------------------------------");
Builder.Append("Assemblies: ");
var assemblies =
AppDomain.CurrentDomain.GetAssemblies()
.Where(a => !AssemblyBlacklist.Any(b => a.FullName.StartsWith(b)))
.ToList();
var lastAssembly = assemblies.Last();
foreach (var assembly in assemblies)
{
try
{
var info = assembly.FullName.Split(',');
if (info.Length > 0)
{
Builder.Append(info[0]);
Builder.Append(assembly.Equals(lastAssembly) ? Environment.NewLine : ", ");
}
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
}
Builder.Append("Features: ");
var features = Global.Features.Where(f => f.Enabled && f.Initialized && f.Handled && !f.Unloaded).ToList();
var lastFeature = features.Last();
foreach (var feature in Global.Features.Where(f => f.Enabled && f.Initialized && f.Handled && !f.Unloaded))
{
try
{
Builder.Append(GetFeatureName(feature));
Builder.Append(feature.Equals(lastFeature) ? Environment.NewLine : ", ");
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
}
Builder.Append("Heroes: ");
BuildHeroesString(ObjectManager.Get<Obj_AI_Hero>().ToList());
Builder.Append(Environment.NewLine);
Builder.AppendLine();
Builder.AppendLine();
Builder.AppendLine();
}
private static void BuildHeroesString(List<Obj_AI_Hero> heroes)
{
var lastHero = heroes.Last();
foreach (var hero in heroes)
{
try
{
Builder.Append(hero.ChampionName);
Builder.Append(hero.NetworkId.Equals(lastHero.NetworkId) ? string.Empty : ", ");
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
}
}
private static void GenerateHeroes()
{
Builder.AppendLine("Heroes");
Builder.AppendLine("--------------------------------------");
Builder.AppendLine(string.Format("[Self] : {0}", ObjectManager.Player.ChampionName));
Builder.Append("[Ally] : ");
BuildHeroesString(ObjectManager.Get<Obj_AI_Hero>().Where(h => h.IsAlly).ToList());
Builder.Append(Environment.NewLine);
Builder.Append("[Enemy] : ");
BuildHeroesString(ObjectManager.Get<Obj_AI_Hero>().Where(h => h.IsEnemy).ToList());
Builder.Append(Environment.NewLine);
Builder.AppendLine();
Builder.AppendLine();
Builder.AppendLine();
}
private static void GenerateHeader()
{
Builder.AppendLine("Generated Report");
Builder.AppendLine("--------------------------------------");
Builder.AppendLine(string.Format("[Name] : {0}", Global.Name));
Builder.AppendLine(string.Format("[Version] : {0}", Global.SFX.Version));
Builder.AppendLine(string.Format("[Date] : {0}", DateTime.Now.ToString("dd/MM/yyyy")));
Builder.AppendLine();
Builder.AppendLine();
Builder.AppendLine();
}
private static string GetFeatureName(IChild feature)
{
var split = feature.ToString().Split('.');
if (split.Length > 0)
{
return split.Last();
}
return feature.ToString();
}
private static void GenerateFeatures()
{
Builder.AppendLine("Activated Features");
Builder.AppendLine("--------------------------------------");
foreach (var feature in Global.Features.OrderBy(f => !f.Enabled))
{
try
{
Builder.AppendLine();
Builder.AppendLine(GetFeatureName(feature));
Builder.AppendLine("--------------------------------------");
Builder.AppendLine(string.Format("[Name] : {0}", GetFeatureName(feature)));
Builder.AppendLine(string.Format("[Full Name] : {0}", feature));
Builder.AppendLine(string.Format("[Enabled] : {0}", feature.Enabled));
Builder.AppendLine(string.Format("[Handled] : {0}", feature.Handled));
Builder.AppendLine(string.Format("[Initialized] : {0}", feature.Initialized));
Builder.AppendLine(string.Format("[Unloaded] : {0}", feature.Unloaded));
Builder.AppendLine("--------------------------------------");
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
}
Builder.AppendLine();
Builder.AppendLine();
Builder.AppendLine();
}
private static void GenerateGame()
{
Builder.AppendLine("Game Information");
Builder.AppendLine("--------------------------------------");
Builder.AppendLine(string.Format("[Version] : {0}", Game.Version));
Builder.AppendLine(string.Format("[Region] : {0}", Game.Region));
Builder.AppendLine(string.Format("[MapId] : {0}", Game.MapId));
Builder.AppendLine(string.Format("[Type] : {0}", Game.Type));
Builder.AppendLine();
Builder.AppendLine();
Builder.AppendLine();
}
private static void GenerateAssemblies()
{
Builder.AppendLine("Loaded Assemblies");
Builder.AppendLine("--------------------------------------");
var assemblies = AppDomain.CurrentDomain.GetAssemblies();
foreach (var assembly in assemblies.Where(a => !AssemblyBlacklist.Any(b => a.FullName.StartsWith(b))))
{
try
{
Builder.AppendLine();
Builder.AppendLine("--------------------------------------");
var info = assembly.FullName.Split(',');
if (info.Length > 0)
{
Builder.AppendLine(info[0]);
}
if (info.Length > 1)
{
Builder.AppendLine(info[1].Replace(" Version=", string.Empty));
}
Builder.AppendLine("--------------------------------------");
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
}
Builder.AppendLine();
Builder.AppendLine();
Builder.AppendLine();
}
private static void GenerateMenu()
{
Builder.AppendLine("Menu");
Builder.AppendLine("--------------------------------------");
HandleMenu(Global.SFX.Menu);
Builder.AppendLine();
Builder.AppendLine();
Builder.AppendLine();
}
private static void HandleMenu(Menu menu, int indent = 0)
{
var prefix = string.Empty;
if (indent > 0)
{
prefix = new string('-', indent * 3);
}
Builder.AppendLine(string.Format("{0}{1}", prefix, menu.DisplayName));
foreach (var item in menu.Items)
{
Builder.AppendLine(string.Format("{0}{1}: {2}", prefix, item.DisplayName, GetItemValueText(item)));
}
foreach (var child in menu.Children)
{
HandleMenu(child, indent + 1);
}
}
private static string GetItemValueText(MenuItem item)
{
object obj;
try
{
if (item != null)
{
obj = item.GetValue<object>();
if (obj is bool)
{
return string.Format("{0}", (bool) obj);
}
if (obj is Color)
{
var color = (Color) obj;
return string.Format("({0},{1},{2},{3})", color.R, color.G, color.B, color.A);
}
if (obj is Circle)
{
|
[
" var circle = (Circle) obj;"
] | 668
|
lcc
|
csharp
| null |
9db6b4d2545835b0eb906eb67dce37abd8a18390ff3dd44b
|
|
# -*- coding: utf-8 -*-
import re
from module.common.json_layer import json_loads
from module.network.RequestFactory import getURL
from module.plugins.Hoster import Hoster
from module.plugins.Plugin import chunks
from module.plugins.internal.CaptchaService import ReCaptcha
from module.plugins.internal.SimpleHoster import secondsToMidnight
from module.utils import parseFileSize
def checkFile(plugin, urls):
html = getURL(plugin.URLS[1], post={"urls": "\n".join(urls)}, decode=True)
file_info = []
for li in re.finditer(plugin.LINKCHECK_TR, html, re.S):
try:
cols = re.findall(plugin.LINKCHECK_TD, li.group(1))
if cols:
file_info.append((
cols[1] if cols[1] != '--' else cols[0],
parseFileSize(cols[2]) if cols[2] != '--' else 0,
2 if cols[3].startswith('Available') else 1,
cols[0]))
except Exception, e:
continue
return file_info
class FileserveCom(Hoster):
__name__ = "FileserveCom"
__type__ = "hoster"
__version__ = "0.53"
__pattern__ = r'http://(?:www\.)?fileserve\.com/file/(?P<ID>[^/]+)'
__description__ = """Fileserve.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("jeix", "jeix@hasnomail.de"),
("mkaay", "mkaay@mkaay.de"),
("Paul King", None),
("zoidberg", "zoidberg@mujmail.cz")]
URLS = ["http://www.fileserve.com/file/", "http://www.fileserve.com/link-checker.php",
"http://www.fileserve.com/checkReCaptcha.php"]
LINKCHECK_TR = r'<tr>\s*(<td>http://www\.fileserve\.com/file/.*?)</tr>'
LINKCHECK_TD = r'<td>(?:<[^>]*>| )*([^<]*)'
CAPTCHA_KEY_PATTERN = r'var reCAPTCHA_publickey=\'(.+?)\''
LONG_WAIT_PATTERN = r'<li class="title">You need to wait (\d+) (\w+) to start another download\.</li>'
LINK_EXPIRED_PATTERN = r'Your download link has expired'
DAILY_LIMIT_PATTERN = r'Your daily download limit has been reached'
NOT_LOGGED_IN_PATTERN = r'<form (name="loginDialogBoxForm"|id="login_form")|<li><a href="/login\.php">Login</a></li>'
def setup(self):
self.resumeDownload = self.multiDL = self.premium
self.file_id = re.match(self.__pattern__, self.pyfile.url).group('ID')
self.url = "%s%s" % (self.URLS[0], self.file_id)
self.logDebug("File ID: %s URL: %s" % (self.file_id, self.url))
def process(self, pyfile):
pyfile.name, pyfile.size, status, self.url = checkFile(self, [self.url])[0]
if status != 2:
self.offline()
self.logDebug("File Name: %s Size: %d" % (pyfile.name, pyfile.size))
if self.premium:
self.handlePremium()
else:
self.handleFree()
def handleFree(self):
self.html = self.load(self.url)
action = self.load(self.url, post={"checkDownload": "check"}, decode=True)
action = json_loads(action)
self.logDebug(action)
if "fail" in action:
if action['fail'] == "timeLimit":
self.html = self.load(self.url, post={"checkDownload": "showError", "errorType": "timeLimit"},
decode=True)
self.doLongWait(re.search(self.LONG_WAIT_PATTERN, self.html))
elif action['fail'] == "parallelDownload":
self.logWarning(_("Parallel download error, now waiting 60s"))
self.retry(wait_time=60, reason=_("parallelDownload"))
else:
self.fail(_("Download check returned: %s") % action['fail'])
elif "success" in action:
if action['success'] == "showCaptcha":
self.doCaptcha()
self.doTimmer()
elif action['success'] == "showTimmer":
self.doTimmer()
else:
self.error(_("Unknown server response"))
# show download link
res = self.load(self.url, post={"downloadLink": "show"}, decode=True)
self.logDebug("Show downloadLink response: %s" % res)
if "fail" in res:
self.error(_("Couldn't retrieve download url"))
# this may either download our file or forward us to an error page
self.download(self.url, post={"download": "normal"})
self.logDebug(self.req.http.lastEffectiveURL)
check = self.checkDownload({"expired": self.LINK_EXPIRED_PATTERN,
"wait" : re.compile(self.LONG_WAIT_PATTERN),
"limit" : self.DAILY_LIMIT_PATTERN})
if check == "expired":
self.logDebug("Download link was expired")
self.retry()
elif check == "wait":
self.doLongWait(self.lastCheck)
elif check == "limit":
self.logWarning(_("Download limited reached for today"))
self.setWait(secondsToMidnight(gmt=2), True)
self.wait()
self.retry()
self.thread.m.reconnecting.wait(3) # Ease issue with later downloads appearing to be in parallel
def doTimmer(self):
res = self.load(self.url, post={"downloadLink": "wait"}, decode=True)
self.logDebug("Wait response: %s" % res[:80])
if "fail" in res:
self.fail(_("Failed getting wait time"))
if self.__name__ == "FilejungleCom":
m = re.search(r'"waitTime":(\d+)', res)
if m is None:
self.fail(_("Cannot get wait time"))
wait_time = int(m.group(1))
else:
wait_time = int(res) + 3
self.setWait(wait_time)
self.wait()
def doCaptcha(self):
captcha_key = re.search(self.CAPTCHA_KEY_PATTERN, self.html).group(1)
recaptcha = ReCaptcha(self)
for _i in xrange(5):
challenge, response = recaptcha.challenge(captcha_key)
res = json_loads(self.load(self.URLS[2],
post={'recaptcha_challenge_field' : challenge,
'recaptcha_response_field' : response,
'recaptcha_shortencode_field': self.file_id}))
if not res['success']:
self.invalidCaptcha()
else:
self.correctCaptcha()
break
else:
self.fail(_("Invalid captcha"))
def doLongWait(self, m):
wait_time = (int(m.group(1)) * {'seconds': 1, 'minutes': 60, 'hours': 3600}[m.group(2)]) if m else 12 * 60
self.setWait(wait_time, True)
self.wait()
self.retry()
def handlePremium(self):
premium_url = None
if self.__name__ == "FileserveCom":
#try api download
res = self.load("http://app.fileserve.com/api/download/premium/",
post={"username": self.user,
"password": self.account.getAccountData(self.user)['password'],
"shorten": self.file_id},
decode=True)
if res:
res = json_loads(res)
if res['error_code'] == "302":
premium_url = res['next']
elif res['error_code'] in ["305", "500"]:
self.tempOffline()
elif res['error_code'] in ["403", "605"]:
self.resetAccount()
|
[
" elif res['error_code'] in [\"606\", \"607\", \"608\"]:"
] | 545
|
lcc
|
python
| null |
58414c770f11b4a32f9d1c67426fd6d8e2028e408a27f131
|
|
#region Copyright & License Information
/*
* Copyright 2007-2019 The OpenRA Developers (see AUTHORS)
* This file is part of OpenRA, which is free software. It is made
* available to you under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version. For more
* information, see COPYING.
*/
#endregion
using System.Collections.Generic;
using OpenRA.Activities;
using OpenRA.Mods.Common.Pathfinder;
using OpenRA.Mods.Common.Traits;
using OpenRA.Primitives;
using OpenRA.Traits;
namespace OpenRA.Mods.Common.Activities
{
public class FindAndDeliverResources : Activity
{
readonly Harvester harv;
readonly HarvesterInfo harvInfo;
readonly Mobile mobile;
readonly LocomotorInfo locomotorInfo;
readonly ResourceClaimLayer claimLayer;
readonly IPathFinder pathFinder;
readonly DomainIndex domainIndex;
readonly Actor deliverActor;
CPos? orderLocation;
bool hasDeliveredLoad;
bool hasHarvestedCell;
bool hasWaited;
public FindAndDeliverResources(Actor self, Actor deliverActor = null)
{
harv = self.Trait<Harvester>();
harvInfo = self.Info.TraitInfo<HarvesterInfo>();
mobile = self.Trait<Mobile>();
locomotorInfo = mobile.Info.LocomotorInfo;
claimLayer = self.World.WorldActor.Trait<ResourceClaimLayer>();
pathFinder = self.World.WorldActor.Trait<IPathFinder>();
domainIndex = self.World.WorldActor.Trait<DomainIndex>();
this.deliverActor = deliverActor;
}
public FindAndDeliverResources(Actor self, CPos orderLocation)
: this(self, null)
{
this.orderLocation = orderLocation;
}
protected override void OnFirstRun(Actor self)
{
// If an explicit "harvest" order is given, direct the harvester to the ordered location instead of
// the previous harvested cell for the initial search.
if (orderLocation != null)
{
harv.LastHarvestedCell = orderLocation;
// If two "harvest" orders are issued consecutively, we deliver the load first if needed.
// We have to make sure the actual "harvest" order is not skipped if a third order is queued,
// so we keep deliveredLoad false.
if (harv.IsFull)
QueueChild(self, new DeliverResources(self), true);
}
// If an explicit "deliver" order is given, the harvester goes immediately to the refinery.
if (deliverActor != null)
{
QueueChild(self, new DeliverResources(self, deliverActor), true);
hasDeliveredLoad = true;
}
}
public override Activity Tick(Actor self)
{
if (ChildActivity != null)
{
ChildActivity = ActivityUtils.RunActivity(self, ChildActivity);
if (ChildActivity != null)
return this;
}
if (IsCanceling)
return NextActivity;
if (NextActivity != null)
{
// Interrupt automated harvesting after clearing the first cell.
if (!harvInfo.QueueFullLoad && (hasHarvestedCell || harv.LastSearchFailed))
return NextActivity;
// Interrupt automated harvesting after first complete harvest cycle.
if (hasDeliveredLoad || harv.IsFull)
return NextActivity;
}
// Are we full or have nothing more to gather? Deliver resources.
if (harv.IsFull || (!harv.IsEmpty && harv.LastSearchFailed))
{
QueueChild(self, new DeliverResources(self), true);
hasDeliveredLoad = true;
return this;
}
// After a failed search, wait and sit still for a bit before searching again.
if (harv.LastSearchFailed && !hasWaited)
{
QueueChild(self, new Wait(harv.Info.WaitDuration), true);
hasWaited = true;
return this;
}
var closestHarvestableCell = ClosestHarvestablePos(self);
// If no resources are found near the current field, search near the refinery instead.
// If that doesn't help, give up for now.
if (!closestHarvestableCell.HasValue)
{
if (harv.LastHarvestedCell != null)
{
harv.LastHarvestedCell = null; // Forces search from backup position.
closestHarvestableCell = ClosestHarvestablePos(self);
harv.LastSearchFailed = !closestHarvestableCell.HasValue;
}
else
harv.LastSearchFailed = true;
}
if (harv.LastSearchFailed)
{
// If no harvestable position could be found and we are at the refinery, get out of the way
// of the refinery entrance.
var lastproc = harv.LastLinkedProc ?? harv.LinkedProc;
if (lastproc != null && !lastproc.Disposed)
{
var deliveryLoc = lastproc.Location + lastproc.Trait<IAcceptResources>().DeliveryOffset;
if (self.Location == deliveryLoc && harv.IsEmpty)
{
// Get out of the way:
var unblockCell = deliveryLoc + harv.Info.UnblockCell;
var moveTo = mobile.NearestMoveableCell(unblockCell, 1, 5);
self.SetTargetLine(Target.FromCell(self.World, moveTo), Color.Green, false);
QueueChild(self, mobile.MoveTo(moveTo, 1), true);
}
}
return this;
}
// Attempt to claim the target cell
if (!claimLayer.TryClaimCell(self, closestHarvestableCell.Value))
{
QueueChild(self, new Wait(25), true);
return this;
}
harv.LastSearchFailed = false;
foreach (var n in self.TraitsImplementing<INotifyHarvesterAction>())
n.MovingToResources(self, closestHarvestableCell.Value, new FindAndDeliverResources(self));
self.SetTargetLine(Target.FromCell(self.World, closestHarvestableCell.Value), Color.Red, false);
QueueChild(self, mobile.MoveTo(closestHarvestableCell.Value, 1), true);
QueueChild(self, new HarvestResource(self));
hasHarvestedCell = true;
return this;
}
/// <summary>
/// Finds the closest harvestable pos between the current position of the harvester
/// and the last order location
/// </summary>
CPos? ClosestHarvestablePos(Actor self)
{
// Harvesters should respect an explicit harvest order instead of harvesting the current cell.
if (orderLocation == null)
{
if (harv.CanHarvestCell(self, self.Location) && claimLayer.CanClaimCell(self, self.Location))
return self.Location;
}
else
{
if (harv.CanHarvestCell(self, orderLocation.Value) && claimLayer.CanClaimCell(self, orderLocation.Value))
return orderLocation;
orderLocation = null;
}
// Determine where to search from and how far to search:
var searchFromLoc = harv.LastHarvestedCell ?? GetSearchFromLocation(self);
var searchRadius = harv.LastHarvestedCell.HasValue ? harvInfo.SearchFromOrderRadius : harvInfo.SearchFromProcRadius;
var searchRadiusSquared = searchRadius * searchRadius;
// Find any harvestable resources:
List<CPos> path;
using (var search = PathSearch.Search(self.World, locomotorInfo, self, true, loc =>
domainIndex.IsPassable(self.Location, loc, locomotorInfo) && harv.CanHarvestCell(self, loc) && claimLayer.CanClaimCell(self, loc))
.WithCustomCost(loc =>
{
if ((loc - searchFromLoc).LengthSquared > searchRadiusSquared)
return int.MaxValue;
return 0;
})
.FromPoint(searchFromLoc)
.FromPoint(self.Location))
path = pathFinder.FindPath(search);
|
[
"\t\t\tif (path.Count > 0)"
] | 747
|
lcc
|
csharp
| null |
f964e1772f2adb4100531633f6287a54c98f80ddd622f9d8
|
|
"""
Builds out filesystem trees/data based on the object tree.
This is the code behind 'cobbler sync'.
Copyright 2006-2009, Red Hat, Inc and Others
Michael DeHaan <michael.dehaan AT gmail>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import os
import os.path
import glob
import shutil
import time
import yaml # Howell-Clark version
import sys
import glob
import traceback
import errno
import utils
from cexceptions import *
import templar
import pxegen
import item_distro
import item_profile
import item_repo
import item_system
from Cheetah.Template import Template
import clogger
from utils import _
import cobbler.module_loader as module_loader
class BootSync:
"""
Handles conversion of internal state to the tftpboot tree layout
"""
def __init__(self,config,verbose=True,dhcp=None,dns=None,logger=None,tftpd=None):
"""
Constructor
"""
self.logger = logger
if logger is None:
self.logger = clogger.Logger()
self.verbose = verbose
self.config = config
self.api = config.api
self.distros = config.distros()
self.profiles = config.profiles()
self.systems = config.systems()
self.settings = config.settings()
self.repos = config.repos()
self.templar = templar.Templar(config, self.logger)
self.pxegen = pxegen.PXEGen(config, self.logger)
self.dns = dns
self.dhcp = dhcp
self.tftpd = tftpd
self.bootloc = utils.tftpboot_location()
self.pxegen.verbose = verbose
self.dns.verbose = verbose
self.dhcp.verbose = verbose
self.pxelinux_dir = os.path.join(self.bootloc, "pxelinux.cfg")
self.grub_dir = os.path.join(self.bootloc, "grub")
self.images_dir = os.path.join(self.bootloc, "images")
self.yaboot_bin_dir = os.path.join(self.bootloc, "ppc")
self.yaboot_cfg_dir = os.path.join(self.bootloc, "etc")
self.s390_dir = os.path.join(self.bootloc, "s390x")
self.rendered_dir = os.path.join(self.settings.webdir, "rendered")
def run(self):
"""
Syncs the current configuration file with the config tree.
Using the Check().run_ functions previously is recommended
"""
if not os.path.exists(self.bootloc):
utils.die(self.logger,"cannot find directory: %s" % self.bootloc)
self.logger.info("running pre-sync triggers")
# run pre-triggers...
utils.run_triggers(self.api, None, "/var/lib/cobbler/triggers/sync/pre/*")
self.distros = self.config.distros()
self.profiles = self.config.profiles()
self.systems = self.config.systems()
self.settings = self.config.settings()
self.repos = self.config.repos()
# execute the core of the sync operation
self.logger.info("cleaning trees")
self.clean_trees()
# Have the tftpd module handle copying bootloaders,
# distros, images, and all_system_files
self.tftpd.sync(self.verbose)
# Copy distros to the webdir
# Adding in the exception handling to not blow up if files have
# been moved (or the path references an NFS directory that's no longer
# mounted)
for d in self.distros:
try:
self.logger.info("copying files for distro: %s" % d.name)
self.pxegen.copy_single_distro_files(d,
self.settings.webdir,True)
self.pxegen.write_templates(d,write_file=True)
except CX, e:
self.logger.error(e.value)
# make the default pxe menu anyway...
self.pxegen.make_pxe_menu()
if self.settings.manage_dhcp:
self.write_dhcp()
if self.settings.manage_dns:
self.logger.info("rendering DNS files")
self.dns.regen_hosts()
self.dns.write_dns_files()
if self.settings.manage_tftpd:
# xinetd.d/tftpd, basically
self.logger.info("rendering TFTPD files")
self.tftpd.write_tftpd_files()
# copy in boot_files
self.tftpd.write_boot_files()
self.logger.info("cleaning link caches")
self.clean_link_cache()
if self.settings.manage_rsync:
self.logger.info("rendering Rsync files")
self.rsync_gen()
# run post-triggers
self.logger.info("running post-sync triggers")
utils.run_triggers(self.api, None, "/var/lib/cobbler/triggers/sync/post/*", logger=self.logger)
utils.run_triggers(self.api, None, "/var/lib/cobbler/triggers/change/*", logger=self.logger)
return True
def make_tftpboot(self):
"""
Make directories for tftpboot images
"""
if not os.path.exists(self.pxelinux_dir):
utils.mkdir(self.pxelinux_dir,logger=self.logger)
if not os.path.exists(self.grub_dir):
utils.mkdir(self.grub_dir,logger=self.logger)
grub_images_link = os.path.join(self.grub_dir, "images")
if not os.path.exists(grub_images_link):
os.symlink("../images", grub_images_link)
if not os.path.exists(self.images_dir):
utils.mkdir(self.images_dir,logger=self.logger)
if not os.path.exists(self.s390_dir):
utils.mkdir(self.s390_dir,logger=self.logger)
if not os.path.exists(self.rendered_dir):
utils.mkdir(self.rendered_dir,logger=self.logger)
if not os.path.exists(self.yaboot_bin_dir):
utils.mkdir(self.yaboot_bin_dir,logger=self.logger)
if not os.path.exists(self.yaboot_cfg_dir):
utils.mkdir(self.yaboot_cfg_dir,logger=self.logger)
def clean_trees(self):
"""
Delete any previously built pxelinux.cfg tree and virt tree info and then create
directories.
Note: for SELinux reasons, some information goes in /tftpboot, some in /var/www/cobbler
and some must be duplicated in both. This is because PXE needs tftp, and auto-kickstart
and Virt operations need http. Only the kernel and initrd images are duplicated, which is
unfortunate, though SELinux won't let me give them two contexts, so symlinks are not
a solution. *Otherwise* duplication is minimal.
"""
# clean out parts of webdir and all of /tftpboot/images and /tftpboot/pxelinux.cfg
for x in os.listdir(self.settings.webdir):
path = os.path.join(self.settings.webdir,x)
if os.path.isfile(path):
if not x.endswith(".py"):
utils.rmfile(path,logger=self.logger)
if os.path.isdir(path):
if not x in ["aux", "web", "webui", "localmirror","repo_mirror","ks_mirror","images","links","pub","repo_profile","repo_system","svc","rendered",".link_cache"] :
# delete directories that shouldn't exist
utils.rmtree(path,logger=self.logger)
if x in ["kickstarts","kickstarts_sys","images","systems","distros","profiles","repo_profile","repo_system","rendered"]:
# clean out directory contents
utils.rmtree_contents(path,logger=self.logger)
#
self.make_tftpboot()
utils.rmtree_contents(self.pxelinux_dir,logger=self.logger)
utils.rmtree_contents(self.grub_dir,logger=self.logger)
utils.rmtree_contents(self.images_dir,logger=self.logger)
utils.rmtree_contents(self.s390_dir,logger=self.logger)
utils.rmtree_contents(self.yaboot_bin_dir,logger=self.logger)
utils.rmtree_contents(self.yaboot_cfg_dir,logger=self.logger)
utils.rmtree_contents(self.rendered_dir,logger=self.logger)
def write_dhcp(self):
self.logger.info("rendering DHCP files")
self.dhcp.write_dhcp_file()
self.dhcp.regen_ethers()
def sync_dhcp(self):
restart_dhcp = str(self.settings.restart_dhcp).lower()
which_dhcp_module = module_loader.get_module_from_file("dhcp","module",just_name=True).strip()
if self.settings.manage_dhcp:
self.write_dhcp()
if which_dhcp_module == "manage_isc":
service_name = utils.dhcp_service_name(self.api)
if restart_dhcp != "0":
rc = utils.subprocess_call(self.logger, "dhcpd -t -q", shell=True)
if rc != 0:
self.logger.error("dhcpd -t failed")
return False
service_restart = "service %s restart" % service_name
rc = utils.subprocess_call(self.logger, service_restart, shell=True)
if rc != 0:
|
[
" self.logger.error(\"%s failed\" % service_name)"
] | 750
|
lcc
|
python
| null |
3cc6d50befc6ded615d39d3b18b791d8f06b80ca3b2642b3
|
|
package org.yamcs.events;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yamcs.yarch.protobuf.Db.Event;
import org.yamcs.protobuf.Yamcs.Event.EventSeverity;
/**
* Default implementation of an EventProducer that provides shortcut methods for sending message of different severity
* types.
*/
public abstract class AbstractEventProducer implements EventProducer {
private static final Logger log = LoggerFactory.getLogger(EventProducer.class);
protected boolean logAllMessages = true;
String source;
AtomicInteger seqNo = new AtomicInteger();
private boolean repeatedEventReduction; // Whether to check for message repetitions
private Event originalEvent; // Original evt of a series of repeated events
private Event lastRepeat; // Last evt of a series of repeated events
private int repeatCounter = 0;
private long repeatedEventTimeout = 60000; // how long in milliseconds to buffer repeated events
// Flushes the Event Buffer about every minute
private Timer flusher;
@Override
public void setSource(String source) {
this.source = source;
}
@Override
public void setSeqNo(int sn) {
this.seqNo.set(sn);
}
@Override
public synchronized void sendError(String type, String msg) {
sendMessage(EventSeverity.ERROR, type, msg);
}
@Override
public synchronized void sendWarning(String type, String msg) {
sendMessage(EventSeverity.WARNING, type, msg);
}
@Override
public synchronized void sendInfo(String type, String msg) {
sendMessage(EventSeverity.INFO, type, msg);
}
@Override
public synchronized void sendWatch(String type, String msg) {
sendMessage(EventSeverity.WATCH, type, msg);
}
@Override
public synchronized void sendDistress(String type, String msg) {
sendMessage(EventSeverity.DISTRESS, type, msg);
}
@Override
public synchronized void sendCritical(String type, String msg) {
sendMessage(EventSeverity.CRITICAL, type, msg);
}
@Override
public synchronized void sendSevere(String type, String msg) {
sendMessage(EventSeverity.SEVERE, type, msg);
}
@Override
public void sendInfo(String msg) {
sendInfo(getInvokingClass(), msg);
}
@Override
public void sendWatch(String msg) {
sendWatch(getInvokingClass(), msg);
}
@Override
public void sendWarning(String msg) {
sendWarning(getInvokingClass(), msg);
}
@Override
public void sendCritical(String msg) {
sendCritical(getInvokingClass(), msg);
}
@Override
public void sendDistress(String msg) {
sendDistress(getInvokingClass(), msg);
}
@Override
public void sendSevere(String msg) {
sendSevere(getInvokingClass(), msg);
}
private String getInvokingClass() {
Throwable throwable = new Throwable();
String classname = throwable.getStackTrace()[2].getClassName();
int idx = classname.lastIndexOf('.');
return classname.substring(idx + 1);
}
private void sendMessage(EventSeverity severity, String type, String msg) {
if (logAllMessages) {
log.debug("event: {}; {}; {}", severity, type, msg);
}
Event.Builder eventb = newEvent().setSeverity(severity).setMessage(msg);
if (type != null) {
eventb.setType(type);
}
Event e = eventb.build();
if (!repeatedEventReduction) {
sendEvent(e);
} else {
if (originalEvent == null) {
sendEvent(e);
originalEvent = e;
} else if (isRepeat(e)) {
if (flusher == null) { // Prevent buffering repeated events forever
flusher = new Timer(true);
flusher.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
flushEventBuffer(false);
}
}, repeatedEventTimeout, repeatedEventTimeout);
}
lastRepeat = e;
repeatCounter++;
} else { // No more repeats
if (flusher != null) {
flusher.cancel();
flusher = null;
}
flushEventBuffer(true);
sendEvent(e);
originalEvent = e;
lastRepeat = null;
}
}
}
/**
* By default event repetitions are checked for possible reduction. Disable if 'realtime' events are required.
*/
@Override
public synchronized void setRepeatedEventReduction(boolean repeatedEventReduction,
long repeatedEventTimeoutMillisec) {
this.repeatedEventReduction = repeatedEventReduction;
this.repeatedEventTimeout = repeatedEventTimeoutMillisec;
if (!repeatedEventReduction) {
if (flusher != null) {
flusher.cancel();
flusher = null;
}
flushEventBuffer(true);
}
}
protected synchronized void flushEventBuffer(boolean startNewSequence) {
if (repeatCounter > 1) {
sendEvent(Event.newBuilder(lastRepeat)
.setMessage("Repeated " + repeatCounter + " times: " + lastRepeat.getMessage())
.build());
} else if (repeatCounter == 1) {
sendEvent(lastRepeat);
lastRepeat = null;
}
if (startNewSequence) {
originalEvent = null;
}
repeatCounter = 0;
}
/**
* Checks whether the specified Event is a repeat of the previous Event.
*/
private boolean isRepeat(Event e) {
if (originalEvent == e) {
return true;
}
return originalEvent.getMessage().equals(e.getMessage())
&& originalEvent.getSeverity().equals(e.getSeverity())
&& originalEvent.getSource().equals(e.getSource())
&& originalEvent.hasType() == e.hasType()
&& (!originalEvent.hasType() || originalEvent.getType().equals(e.getType()));
}
@Override
public Event.Builder newEvent() {
|
[
" long t = getMissionTime();"
] | 575
|
lcc
|
java
| null |
7630f3d4fd6c44654c13b705304c20adea2cf0798b4dc72c
|
|
#region Using
using System;
using System.IO;
#endregion
// This is a port of Dmitry Shkarin's PPMd Variant I Revision 1.
// Ported by Michael Bone (mjbone03@yahoo.com.au).
namespace SharpCompress.Compressors.PPMd.I1
{
/// <summary>
/// The model.
/// </summary>
internal partial class Model
{
public const uint SIGNATURE = 0x84acaf8fU;
public const char VARIANT = 'I';
public const int MAXIMUM_ORDER = 16; // maximum allowed model order
private const byte UPPER_FREQUENCY = 5;
private const byte INTERVAL_BIT_COUNT = 7;
private const byte PERIOD_BIT_COUNT = 7;
private const byte TOTAL_BIT_COUNT = INTERVAL_BIT_COUNT + PERIOD_BIT_COUNT;
private const uint INTERVAL = 1 << INTERVAL_BIT_COUNT;
private const uint BINARY_SCALE = 1 << TOTAL_BIT_COUNT;
private const uint MAXIMUM_FREQUENCY = 124;
private const uint ORDER_BOUND = 9;
private readonly See2Context[,] _see2Contexts;
private readonly See2Context _emptySee2Context;
private PpmContext _maximumContext;
private readonly ushort[,] _binarySummary = new ushort[25, 64]; // binary SEE-contexts
private readonly byte[] _numberStatisticsToBinarySummaryIndex = new byte[256];
private readonly byte[] _probabilities = new byte[260];
private readonly byte[] _characterMask = new byte[256];
private byte _escapeCount;
private int _modelOrder;
private int _orderFall;
private int _initialEscape;
private int _initialRunLength;
private int _runLength;
private byte _previousSuccess;
private byte _numberMasked;
private ModelRestorationMethod _method;
private PpmState _foundState; // found next state transition
private Allocator _allocator;
private Coder _coder;
private PpmContext _minimumContext;
private byte _numberStatistics;
private readonly PpmState[] _decodeStates = new PpmState[256];
private static readonly ushort[] INITIAL_BINARY_ESCAPES =
{
0x3CDD, 0x1F3F, 0x59BF, 0x48F3, 0x64A1, 0x5ABC, 0x6632,
0x6051
};
private static readonly byte[] EXPONENTIAL_ESCAPES = {25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2};
#region Public Methods
public Model()
{
// Construct the conversion table for number statistics. Initially it will contain the following values.
//
// 0 2 4 4 4 4 4 4 4 4 4 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
// 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6
_numberStatisticsToBinarySummaryIndex[0] = 2 * 0;
_numberStatisticsToBinarySummaryIndex[1] = 2 * 1;
for (int index = 2; index < 11; index++)
{
_numberStatisticsToBinarySummaryIndex[index] = 2 * 2;
}
for (int index = 11; index < 256; index++)
{
_numberStatisticsToBinarySummaryIndex[index] = 2 * 3;
}
// Construct the probability table. Initially it will contain the following values (depending on the value of
// the upper frequency).
//
// 00 01 02 03 04 05 06 06 07 07 07 08 08 08 08 09 09 09 09 09 10 10 10 10 10 10 11 11 11 11 11 11
// 11 12 12 12 12 12 12 12 12 13 13 13 13 13 13 13 13 13 14 14 14 14 14 14 14 14 14 14 15 15 15 15
// 15 15 15 15 15 15 15 16 16 16 16 16 16 16 16 16 16 16 16 17 17 17 17 17 17 17 17 17 17 17 17 17
// 18 18 18 18 18 18 18 18 18 18 18 18 18 18 19 19 19 19 19 19 19 19 19 19 19 19 19 19 19 20 20 20
// 20 20 20 20 20 20 20 20 20 20 20 20 20 21 21 21 21 21 21 21 21 21 21 21 21 21 21 21 21 21 22 22
// 22 22 22 22 22 22 22 22 22 22 22 22 22 22 22 22 23 23 23 23 23 23 23 23 23 23 23 23 23 23 23 23
// 23 23 23 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 25 25 25 25 25 25 25 25 25
// 25 25 25 25 25 25 25 25 25 25 25 25 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26
// 26 26 27 27
uint count = 1;
uint step = 1;
uint probability = UPPER_FREQUENCY;
for (int index = 0; index < UPPER_FREQUENCY; index++)
{
_probabilities[index] = (byte)index;
}
for (int index = UPPER_FREQUENCY; index < 260; index++)
{
_probabilities[index] = (byte)probability;
count--;
if (count == 0)
{
step++;
count = step;
probability++;
}
}
// Create the context array.
_see2Contexts = new See2Context[24, 32];
for (int index1 = 0; index1 < 24; index1++)
{
for (int index2 = 0; index2 < 32; index2++)
{
_see2Contexts[index1, index2] = new See2Context();
}
}
// Set the signature (identifying the algorithm).
_emptySee2Context = new See2Context();
_emptySee2Context._summary = (ushort)(SIGNATURE & 0x0000ffff);
_emptySee2Context._shift = (byte)((SIGNATURE >> 16) & 0x000000ff);
_emptySee2Context._count = (byte)(SIGNATURE >> 24);
}
/// <summary>
/// Encode (ie. compress) a given source stream, writing the encoded result to the target stream.
/// </summary>
public void Encode(Stream target, Stream source, PpmdProperties properties)
{
if (target == null)
{
throw new ArgumentNullException(nameof(target));
}
if (source == null)
{
throw new ArgumentNullException(nameof(source));
}
EncodeStart(properties);
EncodeBlock(target, source, true);
}
internal Coder EncodeStart(PpmdProperties properties)
{
_allocator = properties._allocator;
_coder = new Coder();
_coder.RangeEncoderInitialize();
StartModel(properties.ModelOrder, properties.RestorationMethod);
return _coder;
}
internal void EncodeBlock(Stream target, Stream source, bool final)
{
while (true)
{
_minimumContext = _maximumContext;
_numberStatistics = _minimumContext.NumberStatistics;
int c = source.ReadByte();
if (c < 0 && !final)
{
return;
}
if (_numberStatistics != 0)
{
EncodeSymbol1(c, _minimumContext);
_coder.RangeEncodeSymbol();
}
else
{
EncodeBinarySymbol(c, _minimumContext);
_coder.RangeShiftEncodeSymbol(TOTAL_BIT_COUNT);
}
while (_foundState == PpmState.ZERO)
{
_coder.RangeEncoderNormalize(target);
do
{
_orderFall++;
_minimumContext = _minimumContext.Suffix;
if (_minimumContext == PpmContext.ZERO)
{
goto StopEncoding;
}
}
while (_minimumContext.NumberStatistics == _numberMasked);
EncodeSymbol2(c, _minimumContext);
_coder.RangeEncodeSymbol();
}
if (_orderFall == 0 && (Pointer)_foundState.Successor >= _allocator._baseUnit)
{
_maximumContext = _foundState.Successor;
}
else
{
UpdateModel(_minimumContext);
if (_escapeCount == 0)
{
ClearMask();
}
}
_coder.RangeEncoderNormalize(target);
}
StopEncoding:
_coder.RangeEncoderFlush(target);
}
/// <summary>
/// Dencode (ie. decompress) a given source stream, writing the decoded result to the target stream.
/// </summary>
public void Decode(Stream target, Stream source, PpmdProperties properties)
{
if (target == null)
{
throw new ArgumentNullException(nameof(target));
}
if (source == null)
{
throw new ArgumentNullException(nameof(source));
}
DecodeStart(source, properties);
byte[] buffer = new byte[65536];
int read;
while ((read = DecodeBlock(source, buffer, 0, buffer.Length)) != 0)
{
target.Write(buffer, 0, read);
}
}
internal Coder DecodeStart(Stream source, PpmdProperties properties)
{
_allocator = properties._allocator;
_coder = new Coder();
_coder.RangeDecoderInitialize(source);
StartModel(properties.ModelOrder, properties.RestorationMethod);
_minimumContext = _maximumContext;
_numberStatistics = _minimumContext.NumberStatistics;
return _coder;
}
internal int DecodeBlock(Stream source, byte[] buffer, int offset, int count)
{
if (_minimumContext == PpmContext.ZERO)
{
return 0;
}
int total = 0;
while (total < count)
{
if (_numberStatistics != 0)
{
DecodeSymbol1(_minimumContext);
}
else
{
DecodeBinarySymbol(_minimumContext);
}
_coder.RangeRemoveSubrange();
while (_foundState == PpmState.ZERO)
{
_coder.RangeDecoderNormalize(source);
do
{
_orderFall++;
_minimumContext = _minimumContext.Suffix;
if (_minimumContext == PpmContext.ZERO)
{
goto StopDecoding;
}
}
while (_minimumContext.NumberStatistics == _numberMasked);
DecodeSymbol2(_minimumContext);
_coder.RangeRemoveSubrange();
}
buffer[offset] = _foundState.Symbol;
offset++;
total++;
if (_orderFall == 0 && (Pointer)_foundState.Successor >= _allocator._baseUnit)
{
_maximumContext = _foundState.Successor;
}
else
{
UpdateModel(_minimumContext);
if (_escapeCount == 0)
{
ClearMask();
}
}
_minimumContext = _maximumContext;
_numberStatistics = _minimumContext.NumberStatistics;
_coder.RangeDecoderNormalize(source);
}
StopDecoding:
return total;
}
#endregion
#region Private Methods
/// <summary>
/// Initialise the model (unless the model order is set to 1 in which case the model should be cleared so that
/// the statistics are carried over, allowing "solid" mode compression).
/// </summary>
private void StartModel(int modelOrder, ModelRestorationMethod modelRestorationMethod)
{
Array.Clear(_characterMask, 0, _characterMask.Length);
_escapeCount = 1;
// Compress in "solid" mode if the model order value is set to 1 (this will examine the current PPM context
// structures to determine the value of orderFall).
if (modelOrder < 2)
{
_orderFall = _modelOrder;
for (PpmContext context = _maximumContext; context.Suffix != PpmContext.ZERO; context = context.Suffix)
{
_orderFall--;
}
return;
}
_modelOrder = modelOrder;
_orderFall = modelOrder;
_method = modelRestorationMethod;
_allocator.Initialize();
_initialRunLength = -((modelOrder < 12) ? modelOrder : 12) - 1;
_runLength = _initialRunLength;
// Allocate the context structure.
_maximumContext = _allocator.AllocateContext();
_maximumContext.Suffix = PpmContext.ZERO;
_maximumContext.NumberStatistics = 255;
_maximumContext.SummaryFrequency = (ushort)(_maximumContext.NumberStatistics + 2);
_maximumContext.Statistics = _allocator.AllocateUnits(256 / 2);
// allocates enough space for 256 PPM states (each is 6 bytes)
_previousSuccess = 0;
for (int index = 0; index < 256; index++)
{
PpmState state = _maximumContext.Statistics[index];
state.Symbol = (byte)index;
state.Frequency = 1;
state.Successor = PpmContext.ZERO;
}
uint probability = 0;
for (int index1 = 0; probability < 25; probability++)
{
while (_probabilities[index1] == probability)
{
index1++;
}
for (int index2 = 0; index2 < 8; index2++)
{
_binarySummary[probability, index2] =
(ushort)(BINARY_SCALE - INITIAL_BINARY_ESCAPES[index2] / (index1 + 1));
}
for (int index2 = 8; index2 < 64; index2 += 8)
{
for (int index3 = 0; index3 < 8; index3++)
{
_binarySummary[probability, index2 + index3] = _binarySummary[probability, index3];
}
}
}
probability = 0;
for (uint index1 = 0; probability < 24; probability++)
{
while (_probabilities[index1 + 3] == probability + 3)
{
index1++;
}
for (int index2 = 0; index2 < 32; index2++)
{
_see2Contexts[probability, index2].Initialize(2 * index1 + 5);
}
}
}
private void UpdateModel(PpmContext minimumContext)
{
PpmState state = PpmState.ZERO;
PpmContext successor;
PpmContext currentContext = _maximumContext;
uint numberStatistics;
uint ns1;
uint cf;
uint sf;
uint s0;
uint foundStateFrequency = _foundState.Frequency;
byte foundStateSymbol = _foundState.Symbol;
byte symbol;
byte flag;
PpmContext foundStateSuccessor = _foundState.Successor;
PpmContext context = minimumContext.Suffix;
if ((foundStateFrequency < MAXIMUM_FREQUENCY / 4) && (context != PpmContext.ZERO))
{
if (context.NumberStatistics != 0)
{
state = context.Statistics;
if (state.Symbol != foundStateSymbol)
{
do
{
symbol = state[1].Symbol;
state++;
}
while (symbol != foundStateSymbol);
if (state[0].Frequency >= state[-1].Frequency)
{
Swap(state[0], state[-1]);
state--;
}
}
cf = (uint)((state.Frequency < MAXIMUM_FREQUENCY - 9) ? 2 : 0);
state.Frequency += (byte)cf;
context.SummaryFrequency += (byte)cf;
}
else
{
state = context.FirstState;
state.Frequency += (byte)((state.Frequency < 32) ? 1 : 0);
}
}
if (_orderFall == 0 && foundStateSuccessor != PpmContext.ZERO)
{
_foundState.Successor = CreateSuccessors(true, state, minimumContext);
if (_foundState.Successor == PpmContext.ZERO)
{
goto RestartModel;
}
_maximumContext = _foundState.Successor;
return;
}
_allocator._text[0] = foundStateSymbol;
_allocator._text++;
successor = _allocator._text;
if (_allocator._text >= _allocator._baseUnit)
{
goto RestartModel;
}
if (foundStateSuccessor != PpmContext.ZERO)
{
if (foundStateSuccessor < _allocator._baseUnit)
{
foundStateSuccessor = CreateSuccessors(false, state, minimumContext);
}
}
else
{
foundStateSuccessor = ReduceOrder(state, minimumContext);
}
if (foundStateSuccessor == PpmContext.ZERO)
{
goto RestartModel;
}
if (--_orderFall == 0)
{
successor = foundStateSuccessor;
_allocator._text -= (_maximumContext != minimumContext) ? 1 : 0;
}
else if (_method > ModelRestorationMethod.Freeze)
{
successor = foundStateSuccessor;
_allocator._text = _allocator._heap;
_orderFall = 0;
}
numberStatistics = minimumContext.NumberStatistics;
s0 = minimumContext.SummaryFrequency - numberStatistics - foundStateFrequency;
flag = (byte)((foundStateSymbol >= 0x40) ? 0x08 : 0x00);
for (; currentContext != minimumContext; currentContext = currentContext.Suffix)
{
ns1 = currentContext.NumberStatistics;
if (ns1 != 0)
{
if ((ns1 & 1) != 0)
{
state = _allocator.ExpandUnits(currentContext.Statistics, (ns1 + 1) >> 1);
if (state == PpmState.ZERO)
{
goto RestartModel;
}
currentContext.Statistics = state;
}
currentContext.SummaryFrequency += (ushort)((3 * ns1 + 1 < numberStatistics) ? 1 : 0);
}
else
{
state = _allocator.AllocateUnits(1);
if (state == PpmState.ZERO)
{
goto RestartModel;
}
Copy(state, currentContext.FirstState);
currentContext.Statistics = state;
if (state.Frequency < MAXIMUM_FREQUENCY / 4 - 1)
{
state.Frequency += state.Frequency;
}
else
{
state.Frequency = (byte)(MAXIMUM_FREQUENCY - 4);
}
currentContext.SummaryFrequency =
(ushort)(state.Frequency + _initialEscape + ((numberStatistics > 2) ? 1 : 0));
}
cf = (uint)(2 * foundStateFrequency * (currentContext.SummaryFrequency + 6));
sf = s0 + currentContext.SummaryFrequency;
if (cf < 6 * sf)
{
cf = (uint)(1 + ((cf > sf) ? 1 : 0) + ((cf >= 4 * sf) ? 1 : 0));
currentContext.SummaryFrequency += 4;
}
else
{
cf = (uint)(4 + ((cf > 9 * sf) ? 1 : 0) + ((cf > 12 * sf) ? 1 : 0) + ((cf > 15 * sf) ? 1 : 0));
currentContext.SummaryFrequency += (ushort)cf;
}
state = currentContext.Statistics + (++currentContext.NumberStatistics);
state.Successor = successor;
state.Symbol = foundStateSymbol;
state.Frequency = (byte)cf;
currentContext.Flags |= flag;
}
_maximumContext = foundStateSuccessor;
return;
RestartModel:
RestoreModel(currentContext, minimumContext, foundStateSuccessor);
}
private PpmContext CreateSuccessors(bool skip, PpmState state, PpmContext context)
{
PpmContext upBranch = _foundState.Successor;
PpmState[] states = new PpmState[MAXIMUM_ORDER];
uint stateIndex = 0;
byte symbol = _foundState.Symbol;
if (!skip)
{
states[stateIndex++] = _foundState;
if (context.Suffix == PpmContext.ZERO)
{
goto NoLoop;
}
}
bool gotoLoopEntry = false;
|
[
" if (state != PpmState.ZERO)"
] | 2,203
|
lcc
|
csharp
| null |
6cc90781be873e3f526e24b432fa2997332f4aef86d6a696
|
|
#region License
// Copyright (c) 2013, ClearCanvas Inc.
// All rights reserved.
// http://www.clearcanvas.ca
//
// This file is part of the ClearCanvas RIS/PACS open source project.
//
// The ClearCanvas RIS/PACS open source project is free software: you can
// redistribute it and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// The ClearCanvas RIS/PACS open source project is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
// Public License for more details.
//
// You should have received a copy of the GNU General Public License along with
// the ClearCanvas RIS/PACS open source project. If not, see
// <http://www.gnu.org/licenses/>.
#endregion
using System;
using System.Collections.Generic;
using ClearCanvas.Dicom.IO;
namespace ClearCanvas.Dicom
{
/// <summary>
/// Class encapsulating a DICOM Value Representation.
/// </summary>
public class DicomVr
{
private delegate DicomAttribute CreateAttribute(DicomTag tag, ByteBuffer bb);
#region Private Members
private readonly String _name;
private readonly bool _isText = false;
private readonly bool _specificCharSet = false;
private readonly bool _isMultiValue = false;
private readonly uint _maxLength = 0;
private readonly bool _is16BitLength = false;
private readonly char _padChar = ' ';
private readonly int _unitSize = 1;
private readonly CreateAttribute _createDelegate;
private static readonly IDictionary<String,DicomVr> _vrs = new Dictionary<String,DicomVr>();
#endregion
#region Public Static Members
/// <summary>
/// Static constructor.
/// </summary>
static DicomVr()
{
_vrs.Add(AEvr.Name, AEvr);
_vrs.Add(ASvr.Name, ASvr);
_vrs.Add(ATvr.Name, ATvr);
_vrs.Add(CSvr.Name, CSvr);
_vrs.Add(DAvr.Name, DAvr);
_vrs.Add(DSvr.Name, DSvr);
_vrs.Add(DTvr.Name, DTvr);
_vrs.Add(FLvr.Name, FLvr);
_vrs.Add(FDvr.Name, FDvr);
_vrs.Add(ISvr.Name, ISvr);
_vrs.Add(LOvr.Name, LOvr);
_vrs.Add(LTvr.Name, LTvr);
_vrs.Add(OBvr.Name, OBvr);
_vrs.Add(ODvr.Name, ODvr);
_vrs.Add(OFvr.Name, OFvr);
_vrs.Add(OWvr.Name, OWvr);
_vrs.Add(PNvr.Name, PNvr);
_vrs.Add(SHvr.Name, SHvr);
_vrs.Add(SLvr.Name, SLvr);
_vrs.Add(SQvr.Name, SQvr);
_vrs.Add(SSvr.Name, SSvr);
_vrs.Add(STvr.Name, STvr);
_vrs.Add(TMvr.Name, TMvr);
_vrs.Add(UIvr.Name, UIvr);
_vrs.Add(ULvr.Name, ULvr);
_vrs.Add(USvr.Name, USvr);
_vrs.Add(UTvr.Name, UTvr);
}
/// <summary>
/// The Application Entity VR.
/// </summary>
public static readonly DicomVr AEvr = new DicomVr("AE", true, false, true, 16, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeAE(tag);
return new DicomAttributeAE(tag, bb);
} );
/// <summary>
/// The Age String VR.
/// </summary>
public static readonly DicomVr ASvr = new DicomVr("AS", true, false, true, 4, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeAS(tag);
return new DicomAttributeAS(tag, bb);
});
/// <summary>
/// The Attribute Tag VR.
/// </summary>
public static readonly DicomVr ATvr = new DicomVr("AT", false, false, true, 4, true, '\0', 4,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeAT(tag);
return new DicomAttributeAT(tag, bb);
});
/// <summary>
/// The Code String VR.
/// </summary>
public static readonly DicomVr CSvr = new DicomVr("CS", true, false, true, 16, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeCS(tag);
return new DicomAttributeCS(tag, bb);
});
/// <summary>
/// The Date VR.
/// </summary>
public static readonly DicomVr DAvr = new DicomVr("DA", true, false, true, 8, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeDA(tag);
return new DicomAttributeDA(tag, bb);
});
/// <summary>
/// The Decimal String VR.
/// </summary>
public static readonly DicomVr DSvr = new DicomVr("DS", true, false, true, 16, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeDS(tag);
return new DicomAttributeDS(tag, bb);
});
/// <summary>
/// The Date Time VR.
/// </summary>
public static readonly DicomVr DTvr = new DicomVr("DT", true, false, true, 26, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeDT(tag);
return new DicomAttributeDT(tag, bb);
});
/// <summary>
/// The Floating Point Single VR.
/// </summary>
public static readonly DicomVr FLvr = new DicomVr("FL", false, false, true, 4, true, '\0', 4,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeFL(tag);
return new DicomAttributeFL(tag, bb);
});
/// <summary>
/// The Floating Point Double VR.
/// </summary>
public static readonly DicomVr FDvr = new DicomVr("FD", false, false, true, 8, true, '\0', 8,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeFD(tag);
return new DicomAttributeFD(tag, bb);
});
/// <summary>
/// The Integer String VR.
/// </summary>
public static readonly DicomVr ISvr = new DicomVr("IS", true, false, true, 12, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeIS(tag);
return new DicomAttributeIS(tag, bb);
});
/// <summary>
/// The Long String VR.
/// </summary>
public static readonly DicomVr LOvr = new DicomVr("LO", true, true, true, 64, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeLO(tag);
return new DicomAttributeLO(tag, bb);
});
/// <summary>
/// The Long Text VR.
/// </summary>
public static readonly DicomVr LTvr = new DicomVr("LT", true, true, false, 10240, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeLT(tag);
return new DicomAttributeLT(tag, bb);
});
/// <summary>
/// The Other Byte String VR.
/// </summary>
public static readonly DicomVr OBvr = new DicomVr("OB", false, false, false, 1, false, '\0', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeOB(tag);
return new DicomAttributeOB(tag, bb);
});
/// <summary>
/// The Other Double String VR.
/// </summary>
public static readonly DicomVr ODvr = new DicomVr("OD", false, false, false, 8, false, '\0', 8,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeOD(tag);
return new DicomAttributeOD(tag, bb);
});
/// <summary>
/// The Other Float String VR.
/// </summary>
public static readonly DicomVr OFvr = new DicomVr("OF", false, false, false, 4, false, '\0', 4,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeOF(tag);
return new DicomAttributeOF(tag, bb);
});
/// <summary>
/// The Other Word String VR.
/// </summary>
public static readonly DicomVr OWvr = new DicomVr("OW", false, false, false, 2, false, '\0', 2,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeOW(tag);
return new DicomAttributeOW(tag, bb);
});
/// <summary>
/// The Person Name VR.
/// </summary>
public static readonly DicomVr PNvr = new DicomVr("PN", true, true, true, 64 * 5, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributePN(tag);
return new DicomAttributePN(tag, bb);
});
/// <summary>
/// The Short String VR.
/// </summary>
public static readonly DicomVr SHvr = new DicomVr("SH", true, true, true, 16, true, ' ', 1,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeSH(tag);
return new DicomAttributeSH(tag, bb);
});
/// <summary>
/// The Signed Long VR.
/// </summary>
public static readonly DicomVr SLvr = new DicomVr("SL", false, false, true, 4, true, '\0', 4,
delegate(DicomTag tag, ByteBuffer bb)
{
if (bb == null) return new DicomAttributeSL(tag);
|
[
" return new DicomAttributeSL(tag, bb);"
] | 1,113
|
lcc
|
csharp
| null |
b6d055c89ca6a6c261a27807c672673ece45777ad9806d2e
|
|
/*
* This file is part of ChronoJump
*
* ChronoJump is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* ChronoJump is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Copyright (C) 2004-2021 Xavier de Blas <xaviblas@gmail.com>
*/
using System;
using Gdk;
using Gtk;
using Glade;
using System.Text; //StringBuilder
using System.Collections; //ArrayList
using System.Collections.Generic; //List<>
using System.IO;
using System.Threading;
using Mono.Unix;
//--------------------------------------------------------
//---------------- EDIT EVENT WIDGET ---------------------
//--------------------------------------------------------
public class EditEventWindow
{
[Widget] protected Gtk.Window edit_event;
protected bool weightPercentPreferred;
[Widget] protected Gtk.Button button_accept;
[Widget] protected Gtk.Label label_header;
[Widget] protected Gtk.Label label_type_title;
[Widget] protected Gtk.Label label_type_value;
[Widget] protected Gtk.Label label_run_start_title;
[Widget] protected Gtk.Label label_run_start_value;
[Widget] protected Gtk.Label label_event_id_value;
[Widget] protected Gtk.Label label_tv_title;
[Widget] protected Gtk.Entry entry_tv_value;
[Widget] protected Gtk.Label label_tv_units;
[Widget] protected Gtk.Label label_tc_title;
[Widget] protected Gtk.Entry entry_tc_value;
[Widget] protected Gtk.Label label_tc_units;
[Widget] protected Gtk.Label label_fall_title;
[Widget] protected Gtk.Entry entry_fall_value;
[Widget] protected Gtk.Label label_fall_units;
[Widget] protected Gtk.Label label_distance_title;
[Widget] protected Gtk.Entry entry_distance_value;
[Widget] protected Gtk.Label label_distance_units;
[Widget] protected Gtk.Label label_time_title;
[Widget] protected Gtk.Entry entry_time_value;
[Widget] protected Gtk.Label label_time_units;
[Widget] protected Gtk.Label label_speed_title;
[Widget] protected Gtk.Label label_speed_value;
[Widget] protected Gtk.Label label_speed_units;
[Widget] protected Gtk.Label label_weight_title;
[Widget] protected Gtk.Entry entry_weight_value;
[Widget] protected Gtk.Label label_weight_units;
[Widget] protected Gtk.Label label_limited_title;
[Widget] protected Gtk.Label label_limited_value;
//[Widget] protected Gtk.Label label_angle_title; //kneeAngle
//[Widget] protected Gtk.Entry entry_angle_value; //kneeAngle
//[Widget] protected Gtk.Label label_angle_units; //kneeAngle
[Widget] protected Gtk.Label label_simulated;
[Widget] protected Gtk.Box hbox_combo_eventType;
[Widget] protected Gtk.ComboBox combo_eventType;
[Widget] protected Gtk.Box hbox_combo_person;
[Widget] protected Gtk.ComboBox combo_persons;
[Widget] protected Gtk.Label label_mistakes;
[Widget] protected Gtk.SpinButton spin_mistakes;
[Widget] protected Gtk.Label label_video_yes_no;
[Widget] protected Gtk.Button button_video_watch;
[Widget] protected Gtk.Image image_video_watch;
[Widget] protected Gtk.Button button_video_url;
protected string videoFileName = "";
[Widget] protected Gtk.Entry entry_description;
//[Widget] protected Gtk.TextView textview_description;
static EditEventWindow EditEventWindowBox;
protected Gtk.Window parent;
protected int pDN;
protected bool metersSecondsPreferred;
protected string type;
protected string entryTv; //contains a entry that is a Number. If changed the entry as is not a number, recuperate this
protected string entryTc = "0";
protected string entryFall = "0";
protected string entryDistance = "0";
protected string entryTime = "0";
protected string entrySpeed = "0";
protected string entryWeight = "0"; //used to record the % for old person if we change it
//protected string entryAngle = "0"; //kneeAngle
protected Constants.TestTypes typeOfTest;
protected bool showType;
protected bool showRunStart;
protected bool showTv;
protected bool showTc;
protected bool showFall;
protected bool showDistance;
protected bool distanceCanBeDecimal;
protected bool showTime;
protected bool showSpeed;
protected bool showWeight;
protected bool showLimited;
//protected bool showAngle; //kneeAngle
protected bool showMistakes;
protected string eventBigTypeString = "a test";
protected bool headerShowDecimal = true;
protected int oldPersonID; //used to record the % for old person if we change it
//for inheritance
protected EditEventWindow () {
}
EditEventWindow (Gtk.Window parent) {
//Glade.XML gladeXML;
//gladeXML = Glade.XML.FromAssembly (Util.GetGladePath() + "edit_event.glade", "edit_event", null);
//gladeXML.Autoconnect(this);
this.parent = parent;
}
static public EditEventWindow Show (Gtk.Window parent, Event myEvent, int pDN)
//run win have also metersSecondsPreferred
{
if (EditEventWindowBox == null) {
EditEventWindowBox = new EditEventWindow (parent);
}
EditEventWindowBox.pDN = pDN;
EditEventWindowBox.initializeValues();
EditEventWindowBox.fillDialog (myEvent);
EditEventWindowBox.edit_event.Show ();
return EditEventWindowBox;
}
protected virtual void initializeValues () {
typeOfTest = Constants.TestTypes.JUMP;
showType = true;
showRunStart = false;
showTv = true;
showTc = true;
showFall = true;
showDistance = true;
distanceCanBeDecimal = true;
showTime = true;
showSpeed = true;
showWeight = true;
showLimited = true;
//showAngle = true; //kneeAngle
showMistakes = false;
label_simulated.Hide();
}
protected void fillDialog (Event myEvent)
{
fillWindowTitleAndLabelHeader();
image_video_watch.Pixbuf = new Pixbuf (null, Util.GetImagePath(false) + "video_play.png");
string id = myEvent.UniqueID.ToString();
if(myEvent.Simulated == Constants.Simulated)
label_simulated.Show();
label_event_id_value.Text = id;
label_event_id_value.UseMarkup = true;
if(showTv)
fillTv(myEvent);
else {
label_tv_title.Hide();
entry_tv_value.Hide();
label_tv_units.Hide();
}
if(showTc)
fillTc(myEvent);
else {
label_tc_title.Hide();
entry_tc_value.Hide();
label_tc_units.Hide();
}
if(showFall)
fillFall(myEvent);
else {
label_fall_title.Hide();
entry_fall_value.Hide();
label_fall_units.Hide();
}
if(showDistance)
fillDistance(myEvent);
else {
label_distance_title.Hide();
entry_distance_value.Hide();
label_distance_units.Hide();
}
if(showTime)
fillTime(myEvent);
else {
label_time_title.Hide();
entry_time_value.Hide();
label_time_units.Hide();
}
if(showSpeed)
fillSpeed(myEvent);
else {
label_speed_title.Hide();
label_speed_value.Hide();
label_speed_units.Hide();
}
if(showWeight)
fillWeight(myEvent);
else {
label_weight_title.Hide();
entry_weight_value.Hide();
label_weight_units.Hide();
}
if(showLimited)
fillLimited(myEvent);
else {
label_limited_title.Hide();
label_limited_value.Hide();
}
/*
if(showAngle)
fillAngle(myEvent);
else {
label_angle_title.Hide();
entry_angle_value.Hide();
label_angle_units.Hide();
}
*/
if(! showMistakes) {
label_mistakes.Hide();
spin_mistakes.Hide();
}
//also remove new line for old descriptions that used a textview
string temp = Util.RemoveTildeAndColonAndDot(myEvent.Description);
entry_description.Text = Util.RemoveNewLine(temp, true);
createComboEventType(myEvent);
if(! showType) {
label_type_title.Hide();
combo_eventType.Hide();
}
if(showRunStart)
fillRunStart(myEvent);
else {
label_run_start_title.Hide();
label_run_start_value.Hide();
}
ArrayList persons = SqlitePersonSession.SelectCurrentSessionPersons(
myEvent.SessionID,
false); //means: do not returnPersonAndPSlist
string [] personsStrings = new String[persons.Count];
int i=0;
foreach (Person person in persons)
personsStrings[i++] = person.IDAndName(":");
combo_persons = ComboBox.NewText();
UtilGtk.ComboUpdate(combo_persons, personsStrings, "");
combo_persons.Active = UtilGtk.ComboMakeActive(personsStrings, myEvent.PersonID + ":" + myEvent.PersonName);
oldPersonID = myEvent.PersonID;
hbox_combo_person.PackStart(combo_persons, true, true, 0);
hbox_combo_person.ShowAll();
//show video if available
videoFileName = Util.GetVideoFileName(myEvent.SessionID, typeOfTest, myEvent.UniqueID);
if(File.Exists(videoFileName)) {
label_video_yes_no.Text = Catalog.GetString("Yes");
button_video_watch.Sensitive = true;
button_video_url.Sensitive = true;
} else {
label_video_yes_no.Text = Catalog.GetString("No");
button_video_watch.Sensitive = false;
button_video_url.Sensitive = false;
}
}
private void on_button_video_watch_clicked (object o, EventArgs args)
{
if(File.Exists(videoFileName))
{
LogB.Information("Exists and clicked " + videoFileName);
/*
* using mplayer
*
* Webcam webcam = new WebcamMplayer ();
* Webcam.Result result = webcam.PlayFile(videoFileName);
*/
//using ffmpeg
Webcam webcam = new WebcamFfmpeg (Webcam.Action.PLAYFILE, UtilAll.GetOSEnum(), "", "", "", "");
//Webcam.Result result = webcam.PlayFile (videoFileName);
webcam.PlayFile (videoFileName);
}
}
private void on_button_video_url_clicked (object o, EventArgs args) {
new DialogMessage(Constants.MessageTypes.INFO,
Catalog.GetString("Video available here:") + "\n\n" +
videoFileName);
}
protected void fillWindowTitleAndLabelHeader() {
edit_event.Title = string.Format(Catalog.GetString("Edit {0}"), eventBigTypeString);
System.Globalization.NumberFormatInfo localeInfo = new System.Globalization.NumberFormatInfo();
localeInfo = System.Globalization.NumberFormatInfo.CurrentInfo;
label_header.Text = string.Format(Catalog.GetString("Use this window to edit a {0}."), eventBigTypeString);
if(headerShowDecimal)
label_header.Text += string.Format(Catalog.GetString("\n(decimal separator: '{0}')"), localeInfo.NumberDecimalSeparator);
}
protected void createComboEventType(Event myEvent)
{
combo_eventType = ComboBox.NewText ();
string [] myTypes = findTypes(myEvent);
UtilGtk.ComboUpdate(combo_eventType, myTypes, "");
combo_eventType.Active = UtilGtk.ComboMakeActive(myTypes, myEvent.Type);
hbox_combo_eventType.PackStart(combo_eventType, true, true, 0);
hbox_combo_eventType.ShowAll();
createSignal();
}
protected virtual string [] findTypes(Event myEvent) {
string [] myTypes = new String[0];
return myTypes;
}
protected virtual void createSignal() {
/*
* for jumps to show or hide the kg
* for runs to put distance depending on it it's fixed or not
*/
}
protected virtual void fillTv(Event myEvent) {
Jump myJump = (Jump) myEvent;
entryTv = myJump.Tv.ToString();
//show all the decimals for not triming there in edit window using
//(and having different values in formulae like GetHeightInCm ...)
//entry_tv_value.Text = Util.TrimDecimals(entryTv, pDN);
entry_tv_value.Text = entryTv;
}
protected virtual void fillTc (Event myEvent) {
}
protected virtual void fillFall(Event myEvent) {
}
protected virtual void fillRunStart(Event myEvent) {
}
protected virtual void fillDistance(Event myEvent) {
/*
Run myRun = (Run) myEvent;
entryDistance = myRun.Distance.ToString();
entry_distance_value.Text = Util.TrimDecimals(entryDistance, pDN);
*/
}
protected virtual void fillTime(Event myEvent) {
/*
Run myRun = (Run) myEvent;
entryTime = myRun.Time.ToString();
entry_time_value.Text = Util.TrimDecimals(entryTime, pDN);
*/
}
protected virtual void fillSpeed(Event myEvent) {
/*
Run myRun = (Run) myEvent;
label_speed_value.Text = Util.TrimDecimals(myRun.Speed.ToString(), pDN);
*/
}
protected virtual void fillWeight(Event myEvent) {
/*
Jump myJump = (Jump) myEvent;
if(myJump.TypeHasWeight) {
entryWeight = myJump.Weight.ToString();
entry_weight_value.Text = entryWeight;
entry_weight_value.Sensitive = true;
} else {
entry_weight_value.Sensitive = false;
}
*/
}
protected virtual void fillLimited(Event myEvent) {
/*
JumpRj myJumpRj = (JumpRj) myEvent;
label_limited_value.Text = Util.GetLimitedRounded(myJumpRj.Limited, pDN);
*/
}
//protected virtual void fillAngle(Event myEvent) {
//}
protected virtual void on_radio_single_leg_1_toggled(object o, EventArgs args) {
}
protected virtual void on_radio_single_leg_2_toggled(object o, EventArgs args) {
}
protected virtual void on_radio_single_leg_3_toggled(object o, EventArgs args) {
}
protected virtual void on_radio_single_leg_4_toggled(object o, EventArgs args) {
}
protected virtual void on_spin_single_leg_changed(object o, EventArgs args) {
}
private void on_entry_tv_value_changed (object o, EventArgs args) {
if(Util.IsNumber(entry_tv_value.Text.ToString(), true)){
entryTv = entry_tv_value.Text.ToString();
button_accept.Sensitive = true;
} else {
button_accept.Sensitive = false;
}
}
private void on_entry_tc_value_changed (object o, EventArgs args) {
if(Util.IsNumber(entry_tc_value.Text.ToString(), true)){
entryTc = entry_tc_value.Text.ToString();
button_accept.Sensitive = true;
} else {
button_accept.Sensitive = false;
//entry_tc_value.Text = "";
//entry_tc_value.Text = entryTc;
}
}
private void on_entry_fall_value_changed (object o, EventArgs args) {
if(Util.IsNumber(entry_fall_value.Text.ToString(), true)){
entryFall = entry_fall_value.Text.ToString();
button_accept.Sensitive = true;
} else {
button_accept.Sensitive = false;
//entry_fall_value.Text = "";
//entry_fall_value.Text = entryFall;
}
}
private void on_entry_time_changed (object o, EventArgs args) {
if(Util.IsNumber(entry_time_value.Text.ToString(), true)){
entryTime = entry_time_value.Text.ToString();
label_speed_value.Text = Util.TrimDecimals(
Util.GetSpeed (entryDistance, entryTime, metersSecondsPreferred) , pDN);
button_accept.Sensitive = true;
} else {
button_accept.Sensitive = false;
//entry_time_value.Text = "";
//entry_time_value.Text = entryTime;
}
}
private void on_entry_distance_changed (object o, EventArgs args) {
if(Util.IsNumber(entry_distance_value.Text.ToString(), distanceCanBeDecimal)){
entryDistance = entry_distance_value.Text.ToString();
label_speed_value.Text = Util.TrimDecimals(
Util.GetSpeed (entryDistance, entryTime, metersSecondsPreferred) , pDN);
button_accept.Sensitive = true;
} else {
button_accept.Sensitive = false;
//entry_distance_value.Text = "";
//entry_distance_value.Text = entryDistance;
}
}
private void on_entry_weight_value_changed (object o, EventArgs args) {
if(Util.IsNumber(entry_weight_value.Text.ToString(), true)){
entryWeight = entry_weight_value.Text.ToString();
button_accept.Sensitive = true;
} else {
button_accept.Sensitive = false;
//entry_weight_value.Text = "";
//entry_weight_value.Text = entryWeight;
}
}
/*
private void on_entry_angle_changed (object o, EventArgs args) {
string angleString = entry_angle_value.Text.ToString();
if(Util.IsNumber(angleString, true)) {
entryAngle = angleString;
button_accept.Sensitive = true;
} else if(angleString == "-") {
entryAngle = "-1,0";
button_accept.Sensitive = true;
} else
button_accept.Sensitive = false;
}
*/
protected virtual void on_spin_mistakes_changed (object o, EventArgs args) {
}
private void on_entry_description_changed (object o, EventArgs args) {
entry_description.Text = Util.RemoveTildeAndColonAndDot(entry_description.Text.ToString());
}
protected virtual void on_radio_mtgug_1_toggled(object o, EventArgs args) { }
protected virtual void on_radio_mtgug_2_toggled(object o, EventArgs args) { }
protected virtual void on_radio_mtgug_3_toggled(object o, EventArgs args) { }
protected virtual void on_radio_mtgug_4_toggled(object o, EventArgs args) { }
protected virtual void on_radio_mtgug_5_toggled(object o, EventArgs args) { }
protected virtual void on_radio_mtgug_6_toggled(object o, EventArgs args) { }
protected virtual void on_button_cancel_clicked (object o, EventArgs args)
{
EditEventWindowBox.edit_event.Hide();
EditEventWindowBox = null;
}
protected virtual void on_delete_event (object o, DeleteEventArgs args)
{
EditEventWindowBox.edit_event.Hide();
EditEventWindowBox = null;
}
protected virtual void hideWindow() {
EditEventWindowBox.edit_event.Hide();
EditEventWindowBox = null;
}
void on_button_accept_clicked (object o, EventArgs args)
{
int eventID = Convert.ToInt32 ( label_event_id_value.Text );
string myPerson = UtilGtk.ComboGetActive(combo_persons);
string [] myPersonFull = myPerson.Split(new char[] {':'});
string myDesc = entry_description.Text;
updateEvent(eventID, Convert.ToInt32(myPersonFull[0]), myDesc);
hideWindow();
}
protected virtual void updateEvent(int eventID, int personID, string description) {
}
public Button Button_accept
{
set { button_accept = value; }
get { return button_accept; }
}
~EditEventWindow() {}
}
//--------------------------------------------------------
//---------------- event_more widget ---------------------
//--------------------------------------------------------
public class EventMoreWindow
{
[Widget] protected Gtk.Notebook notebook;
[Widget] protected Gtk.TreeView treeview_more;
[Widget] protected Gtk.Button button_accept;
[Widget] protected Gtk.Button button_delete_type;
[Widget] protected Gtk.Button button_cancel;
[Widget] protected Gtk.Button button_close;
[Widget] protected Gtk.Button button_close1;
[Widget] protected Gtk.Label label_delete_confirm;
[Widget] protected Gtk.Label label_delete_confirm_name;
[Widget] protected Gtk.Label label_delete_cannot;
[Widget] protected Gtk.Image image_delete;
[Widget] protected Gtk.Image image_delete1;
protected Gtk.Window parent;
protected enum notebookPages { TESTS, DELETECONFIRM, DELETECANNOT };
protected TreeStore store;
protected string selectedEventType;
protected string selectedEventName;
protected string selectedDescription;
public Gtk.Button button_selected;
public Gtk.Button button_deleted_test; //just to send a signal
protected bool testOrDelete; //are we going to do a test or to delete a test type (test is true)
protected string [] typesTranslated;
public EventMoreWindow () {
}
public EventMoreWindow (Gtk.Window parent, bool testOrDelete)
{
//name, startIn, weight, description
store = new TreeStore(typeof (string), typeof (string), typeof (string), typeof (string));
initializeThings();
}
protected void initializeThings()
{
button_selected = new Gtk.Button();
button_deleted_test = new Gtk.Button();
createTreeView(treeview_more);
treeview_more.Model = store;
fillTreeView(treeview_more,store);
//when executing test: show accept and cancel
button_accept.Visible = testOrDelete;
button_cancel.Visible = testOrDelete;
//when deleting test type: show delete type and close
button_delete_type.Visible = ! testOrDelete;
button_close.Visible = ! testOrDelete;
Pixbuf pixbuf = new Pixbuf (null, Util.GetImagePath(false) + "stock_delete.png");
image_delete.Pixbuf = pixbuf;
image_delete1.Pixbuf = pixbuf;
button_accept.Sensitive = false;
button_delete_type.Sensitive = false;
treeview_more.Selection.Changed += onSelectionEntry;
}
//if eventType is predefined, it will have a translation on src/evenType or derivated class
//this is useful if user changed language
protected string getDescriptionLocalised(EventType myType, string descriptionFromDb) {
if(myType.IsPredefined)
return myType.Description;
else
return descriptionFromDb;
}
protected virtual void createTreeView (Gtk.TreeView tv) {
}
protected virtual void fillTreeView (Gtk.TreeView tv, TreeStore store)
{
}
/*
* when a row is selected...
* -put selected value in selected* variables
* -update graph image test on main window
*/
protected virtual void onSelectionEntry (object o, EventArgs args)
{
}
protected virtual void on_row_double_clicked (object o, Gtk.RowActivatedArgs args)
{
}
void on_button_delete_type_clicked (object o, EventArgs args)
{
List<Session> session_l = SqliteSession.SelectAll(false, Sqlite.Orders_by.DEFAULT);
string [] tests = findTestTypesInSessions();
//this will be much better doing a select distinct(session) instead of using SelectJumps or Runs
ArrayList sessionValuesArray = new ArrayList();
foreach(string t in tests)
{
string [] tFull = t.Split(new char[] {':'});
if(! Util.IsNumber(tFull[3], false))
continue;
|
[
"\t\t\tint sessionID = Convert.ToInt32(tFull[3]);"
] | 2,057
|
lcc
|
csharp
| null |
5d7f05b8f84258ef580014f750f774d8eb4af59aa074f04a
|
|
from mutagen._util import DictMixin, cdata, insert_bytes, delete_bytes
from mutagen._util import decode_terminated, split_escape, dict_match, enum
from mutagen._util import BitReader, BitReaderError, set_win32_unicode_argv
from mutagen._compat import text_type, itervalues, iterkeys, iteritems, PY2, \
cBytesIO, xrange
from tests import TestCase
import random
import sys
import os
import mmap
try:
import fcntl
except ImportError:
fcntl = None
class FDict(DictMixin):
def __init__(self):
self.__d = {}
self.keys = self.__d.keys
def __getitem__(self, *args):
return self.__d.__getitem__(*args)
def __setitem__(self, *args):
return self.__d.__setitem__(*args)
def __delitem__(self, *args):
return self.__d.__delitem__(*args)
class TDictMixin(TestCase):
def setUp(self):
self.fdict = FDict()
self.rdict = {}
self.fdict["foo"] = self.rdict["foo"] = "bar"
def test_getsetitem(self):
self.failUnlessEqual(self.fdict["foo"], "bar")
self.failUnlessRaises(KeyError, self.fdict.__getitem__, "bar")
def test_has_key_contains(self):
self.failUnless("foo" in self.fdict)
self.failIf("bar" in self.fdict)
if PY2:
self.failUnless(self.fdict.has_key("foo"))
self.failIf(self.fdict.has_key("bar"))
def test_iter(self):
self.failUnlessEqual(list(iter(self.fdict)), ["foo"])
def test_clear(self):
self.fdict.clear()
self.rdict.clear()
self.failIf(self.fdict)
def test_keys(self):
self.failUnlessEqual(list(self.fdict.keys()), list(self.rdict.keys()))
self.failUnlessEqual(
list(iterkeys(self.fdict)), list(iterkeys(self.rdict)))
def test_values(self):
self.failUnlessEqual(
list(self.fdict.values()), list(self.rdict.values()))
self.failUnlessEqual(
list(itervalues(self.fdict)), list(itervalues(self.rdict)))
def test_items(self):
self.failUnlessEqual(
list(self.fdict.items()), list(self.rdict.items()))
self.failUnlessEqual(
list(iteritems(self.fdict)), list(iteritems(self.rdict)))
def test_pop(self):
self.failUnlessEqual(self.fdict.pop("foo"), self.rdict.pop("foo"))
self.failUnlessRaises(KeyError, self.fdict.pop, "woo")
def test_pop_bad(self):
self.failUnlessRaises(TypeError, self.fdict.pop, "foo", 1, 2)
def test_popitem(self):
self.failUnlessEqual(self.fdict.popitem(), self.rdict.popitem())
self.failUnlessRaises(KeyError, self.fdict.popitem)
def test_update_other(self):
other = {"a": 1, "b": 2}
self.fdict.update(other)
self.rdict.update(other)
def test_update_other_is_list(self):
other = [("a", 1), ("b", 2)]
self.fdict.update(other)
self.rdict.update(dict(other))
def test_update_kwargs(self):
self.fdict.update(a=1, b=2)
# Ironically, the *real* dict doesn't support this on Python 2.3
other = {"a": 1, "b": 2}
self.rdict.update(other)
def test_setdefault(self):
self.fdict.setdefault("foo", "baz")
self.rdict.setdefault("foo", "baz")
self.fdict.setdefault("bar", "baz")
self.rdict.setdefault("bar", "baz")
def test_get(self):
self.failUnlessEqual(self.rdict.get("a"), self.fdict.get("a"))
self.failUnlessEqual(
self.rdict.get("a", "b"), self.fdict.get("a", "b"))
self.failUnlessEqual(self.rdict.get("foo"), self.fdict.get("foo"))
def test_repr(self):
self.failUnlessEqual(repr(self.rdict), repr(self.fdict))
def test_len(self):
self.failUnlessEqual(len(self.rdict), len(self.fdict))
def tearDown(self):
self.failUnlessEqual(self.fdict, self.rdict)
self.failUnlessEqual(self.rdict, self.fdict)
class Tcdata(TestCase):
ZERO = staticmethod(lambda s: b"\x00" * s)
LEONE = staticmethod(lambda s: b"\x01" + b"\x00" * (s - 1))
BEONE = staticmethod(lambda s: b"\x00" * (s - 1) + b"\x01")
NEGONE = staticmethod(lambda s: b"\xff" * s)
def test_char(self):
self.failUnlessEqual(cdata.char(self.ZERO(1)), 0)
self.failUnlessEqual(cdata.char(self.LEONE(1)), 1)
self.failUnlessEqual(cdata.char(self.BEONE(1)), 1)
self.failUnlessEqual(cdata.char(self.NEGONE(1)), -1)
self.assertTrue(cdata.char is cdata.int8)
self.assertTrue(cdata.to_char is cdata.to_int8)
self.assertTrue(cdata.char_from is cdata.int8_from)
def test_char_from_to(self):
self.assertEqual(cdata.to_char(-2), b"\xfe")
self.assertEqual(cdata.char_from(b"\xfe"), (-2, 1))
self.assertEqual(cdata.char_from(b"\x00\xfe", 1), (-2, 2))
self.assertRaises(cdata.error, cdata.char_from, b"\x00\xfe", 3)
def test_uchar(self):
self.failUnlessEqual(cdata.uchar(self.ZERO(1)), 0)
self.failUnlessEqual(cdata.uchar(self.LEONE(1)), 1)
self.failUnlessEqual(cdata.uchar(self.BEONE(1)), 1)
self.failUnlessEqual(cdata.uchar(self.NEGONE(1)), 255)
self.assertTrue(cdata.uchar is cdata.uint8)
self.assertTrue(cdata.to_uchar is cdata.to_uint8)
self.assertTrue(cdata.uchar_from is cdata.uint8_from)
def test_short(self):
self.failUnlessEqual(cdata.short_le(self.ZERO(2)), 0)
self.failUnlessEqual(cdata.short_le(self.LEONE(2)), 1)
self.failUnlessEqual(cdata.short_le(self.BEONE(2)), 256)
self.failUnlessEqual(cdata.short_le(self.NEGONE(2)), -1)
self.assertTrue(cdata.short_le is cdata.int16_le)
self.failUnlessEqual(cdata.short_be(self.ZERO(2)), 0)
self.failUnlessEqual(cdata.short_be(self.LEONE(2)), 256)
self.failUnlessEqual(cdata.short_be(self.BEONE(2)), 1)
self.failUnlessEqual(cdata.short_be(self.NEGONE(2)), -1)
self.assertTrue(cdata.short_be is cdata.int16_be)
def test_ushort(self):
self.failUnlessEqual(cdata.ushort_le(self.ZERO(2)), 0)
self.failUnlessEqual(cdata.ushort_le(self.LEONE(2)), 1)
self.failUnlessEqual(cdata.ushort_le(self.BEONE(2)), 2 ** 16 >> 8)
self.failUnlessEqual(cdata.ushort_le(self.NEGONE(2)), 65535)
self.assertTrue(cdata.ushort_le is cdata.uint16_le)
self.failUnlessEqual(cdata.ushort_be(self.ZERO(2)), 0)
self.failUnlessEqual(cdata.ushort_be(self.LEONE(2)), 2 ** 16 >> 8)
self.failUnlessEqual(cdata.ushort_be(self.BEONE(2)), 1)
self.failUnlessEqual(cdata.ushort_be(self.NEGONE(2)), 65535)
self.assertTrue(cdata.ushort_be is cdata.uint16_be)
def test_int(self):
self.failUnlessEqual(cdata.int_le(self.ZERO(4)), 0)
self.failUnlessEqual(cdata.int_le(self.LEONE(4)), 1)
self.failUnlessEqual(cdata.int_le(self.BEONE(4)), 2 ** 32 >> 8)
self.failUnlessEqual(cdata.int_le(self.NEGONE(4)), -1)
self.assertTrue(cdata.int_le is cdata.int32_le)
self.failUnlessEqual(cdata.int_be(self.ZERO(4)), 0)
self.failUnlessEqual(cdata.int_be(self.LEONE(4)), 2 ** 32 >> 8)
self.failUnlessEqual(cdata.int_be(self.BEONE(4)), 1)
self.failUnlessEqual(cdata.int_be(self.NEGONE(4)), -1)
self.assertTrue(cdata.int_be is cdata.int32_be)
def test_uint(self):
self.failUnlessEqual(cdata.uint_le(self.ZERO(4)), 0)
self.failUnlessEqual(cdata.uint_le(self.LEONE(4)), 1)
self.failUnlessEqual(cdata.uint_le(self.BEONE(4)), 2 ** 32 >> 8)
self.failUnlessEqual(cdata.uint_le(self.NEGONE(4)), 2 ** 32 - 1)
self.assertTrue(cdata.uint_le is cdata.uint32_le)
self.failUnlessEqual(cdata.uint_be(self.ZERO(4)), 0)
self.failUnlessEqual(cdata.uint_be(self.LEONE(4)), 2 ** 32 >> 8)
self.failUnlessEqual(cdata.uint_be(self.BEONE(4)), 1)
self.failUnlessEqual(cdata.uint_be(self.NEGONE(4)), 2 ** 32 - 1)
self.assertTrue(cdata.uint_be is cdata.uint32_be)
def test_longlong(self):
self.failUnlessEqual(cdata.longlong_le(self.ZERO(8)), 0)
self.failUnlessEqual(cdata.longlong_le(self.LEONE(8)), 1)
self.failUnlessEqual(cdata.longlong_le(self.BEONE(8)), 2 ** 64 >> 8)
self.failUnlessEqual(cdata.longlong_le(self.NEGONE(8)), -1)
self.assertTrue(cdata.longlong_le is cdata.int64_le)
self.failUnlessEqual(cdata.longlong_be(self.ZERO(8)), 0)
self.failUnlessEqual(cdata.longlong_be(self.LEONE(8)), 2 ** 64 >> 8)
self.failUnlessEqual(cdata.longlong_be(self.BEONE(8)), 1)
self.failUnlessEqual(cdata.longlong_be(self.NEGONE(8)), -1)
self.assertTrue(cdata.longlong_be is cdata.int64_be)
def test_ulonglong(self):
self.failUnlessEqual(cdata.ulonglong_le(self.ZERO(8)), 0)
self.failUnlessEqual(cdata.ulonglong_le(self.LEONE(8)), 1)
self.failUnlessEqual(cdata.longlong_le(self.BEONE(8)), 2 ** 64 >> 8)
self.failUnlessEqual(cdata.ulonglong_le(self.NEGONE(8)), 2 ** 64 - 1)
self.assertTrue(cdata.ulonglong_le is cdata.uint64_le)
self.failUnlessEqual(cdata.ulonglong_be(self.ZERO(8)), 0)
self.failUnlessEqual(cdata.ulonglong_be(self.LEONE(8)), 2 ** 64 >> 8)
self.failUnlessEqual(cdata.longlong_be(self.BEONE(8)), 1)
self.failUnlessEqual(cdata.ulonglong_be(self.NEGONE(8)), 2 ** 64 - 1)
self.assertTrue(cdata.ulonglong_be is cdata.uint64_be)
def test_invalid_lengths(self):
self.failUnlessRaises(cdata.error, cdata.char, b"")
self.failUnlessRaises(cdata.error, cdata.uchar, b"")
self.failUnlessRaises(cdata.error, cdata.int_le, b"")
self.failUnlessRaises(cdata.error, cdata.longlong_le, b"")
self.failUnlessRaises(cdata.error, cdata.uint_le, b"")
self.failUnlessRaises(cdata.error, cdata.ulonglong_le, b"")
self.failUnlessRaises(cdata.error, cdata.int_be, b"")
self.failUnlessRaises(cdata.error, cdata.longlong_be, b"")
self.failUnlessRaises(cdata.error, cdata.uint_be, b"")
self.failUnlessRaises(cdata.error, cdata.ulonglong_be, b"")
def test_test(self):
self.failUnless(cdata.test_bit((1), 0))
self.failIf(cdata.test_bit(1, 1))
self.failUnless(cdata.test_bit(2, 1))
self.failIf(cdata.test_bit(2, 0))
v = (1 << 12) + (1 << 5) + 1
self.failUnless(cdata.test_bit(v, 0))
self.failUnless(cdata.test_bit(v, 5))
self.failUnless(cdata.test_bit(v, 12))
self.failIf(cdata.test_bit(v, 3))
self.failIf(cdata.test_bit(v, 8))
self.failIf(cdata.test_bit(v, 13))
class FileHandling(TestCase):
def file(self, contents):
import tempfile
temp = tempfile.TemporaryFile()
temp.write(contents)
temp.flush()
temp.seek(0)
return temp
def read(self, fobj):
fobj.seek(0, 0)
return fobj.read()
def test_insert_into_empty(self):
o = self.file(b'')
insert_bytes(o, 8, 0)
self.assertEquals(b'\x00' * 8, self.read(o))
def test_insert_before_one(self):
o = self.file(b'a')
insert_bytes(o, 8, 0)
self.assertEquals(b'a' + b'\x00' * 7 + b'a', self.read(o))
def test_insert_after_one(self):
o = self.file(b'a')
insert_bytes(o, 8, 1)
self.assertEquals(b'a' + b'\x00' * 8, self.read(o))
def test_smaller_than_file_middle(self):
o = self.file(b'abcdefghij')
insert_bytes(o, 4, 4)
self.assertEquals(b'abcdefghefghij', self.read(o))
def test_smaller_than_file_to_end(self):
o = self.file(b'abcdefghij')
insert_bytes(o, 4, 6)
self.assertEquals(b'abcdefghijghij', self.read(o))
def test_smaller_than_file_across_end(self):
o = self.file(b'abcdefghij')
insert_bytes(o, 4, 8)
self.assertEquals(b'abcdefghij\x00\x00ij', self.read(o))
def test_smaller_than_file_at_end(self):
|
[
" o = self.file(b'abcdefghij')"
] | 694
|
lcc
|
python
| null |
8ed6880ab3b5c6188940a9999bc369708c565891b8d871f2
|
|
<?cs include:"doctype.cs" ?>
<?cs include:"macros.cs" ?>
<html devsite>
<?cs if:sdk.redirect ?>
<head>
<title>Redirecting...</title>
<meta http-equiv="refresh" content="0;url=<?cs var:toroot ?>sdk/<?cs
if:sdk.redirect.path ?><?cs var:sdk.redirect.path ?><?cs
else ?>index.html<?cs /if ?>">
<link href="<?cs var:toroot ?>assets/android-developer-docs.css" rel="stylesheet" type="text/css" />
</head>
<?cs else ?>
<?cs include:"head_tag.cs" ?>
<?cs /if ?>
<body class="gc-documentation
<?cs if:(guide||develop||training||reference||tools||sdk) ?>develop<?cs
elif:design ?>design<?cs
elif:distribute ?>distribute<?cs
/if ?>" itemscope itemtype="http://schema.org/CreativeWork">
<a name="top"></a>
<?cs include:"header.cs" ?>
<div <?cs if:fullpage
?><?cs else
?>class="col-13" id="doc-col"<?cs /if ?> >
<?cs if:sdk.redirect ?>
<div class="g-unit">
<div id="jd-content">
<p>Redirecting to
<a href="<?cs var:toroot ?>sdk/<?cs
if:sdk.redirect.path ?><?cs var:sdk.redirect.path ?><?cs
else ?>index.html<?cs /if ?>"><?cs
if:sdk.redirect.path ?><?cs var:sdk.redirect.path ?><?cs
else ?>Download the SDK<?cs /if ?>
</a> ...</p>
<?cs else ?>
<?cs # else, if NOT redirect ...
#
#
# The following is for SDK/NDK pages
#
#
?>
<?cs if:header.hide ?><?cs else ?>
<h1 itemprop="name"><?cs var:page.title ?></h1>
<?cs /if ?>
<div id="jd-content" itemprop="description">
<?cs if:sdk.not_latest_version ?>
<div class="special">
<p><strong>This is NOT the current Android SDK release.</strong></p>
<p><a href="/sdk/index.html">Download the current Android SDK</a></p>
</div>
<?cs /if ?>
<?cs if:ndk ?>
<?cs #
#
#
#
#
#
#
# the following is for the NDK
#
# (nested in if/else redirect)
#
#
#
#
?>
<table class="download" id="download-table">
<tr>
<th>Platform</th>
<th>Package</th>
<th>Size</th>
<th>MD5 Checksum</th>
</tr>
<tr>
<td>Windows</td>
<td>
<a onClick="return onDownload(this)"
href="http://dl.google.com/android/ndk/<?cs var:ndk.win_download ?>"><?cs var:ndk.win_download ?></a>
</td>
<td><?cs var:ndk.win_bytes ?> bytes</td>
<td><?cs var:ndk.win_checksum ?></td>
</tr>
<tr>
<td>Mac OS X (intel)</td>
<td>
<a onClick="return onDownload(this)"
href="http://dl.google.com/android/ndk/<?cs var:ndk.mac_download ?>"><?cs var:ndk.mac_download ?></a>
</td>
<td><?cs var:ndk.mac_bytes ?> bytes</td>
<td><?cs var:ndk.mac_checksum ?></td>
</tr>
<tr>
<td>Linux 32/64-bit (x86)</td>
<td>
<a onClick="return onDownload(this)"
href="http://dl.google.com/android/ndk/<?cs var:ndk.linux_download ?>"><?cs var:ndk.linux_download ?></a>
</td>
<td><?cs var:ndk.linux_bytes ?> bytes</td>
<td><?cs var:ndk.linux_checksum ?></td>
</tr>
</table>
<?cs ######## HERE IS THE JD DOC CONTENT ######### ?>
<?cs call:tag_list(root.descr) ?>
<script>
function onDownload(link) {
$("#downloadForRealz").html("Download " + $(link).text());
$("#downloadForRealz").attr('href',$(link).attr('href'));
$("#tos").fadeIn('slow');
location.hash = "download";
return false;
}
function onAgreeChecked() {
if ($("input#agree").is(":checked")) {
$("a#downloadForRealz").removeClass('disabled');
} else {
$("a#downloadForRealz").addClass('disabled');
}
}
function onDownloadNdkForRealz(link) {
if ($("input#agree").is(':checked')) {
$("#tos").fadeOut('slow');
$('html, body').animate({
scrollTop: $("#Installing").offset().top
}, 800, function() {
$("#Installing").click();
});
return true;
} else {
$("label#agreeLabel").parent().stop().animate({color: "#258AAF"}, 200,
function() {$("label#agreeLabel").parent().stop().animate({color: "#222"}, 200)}
);
return false;
}
}
$(window).hashchange( function(){
if (location.hash == "") {
location.reload();
}
});
</script>
<?cs else ?>
<?cs # end if NDK ...
#
#
#
#
#
#
# the following is for the SDK
#
# (nested in if/else redirect and if/else NDK)
#
#
#
#
?>
<?cs if:android.whichdoc == "online" ?>
<?cs ######## HERE IS THE JD DOC CONTENT FOR ONLINE ######### ?>
<?cs call:tag_list(root.descr) ?>
<h4><a href='' class="expandable"
onclick="toggleExpandable(this,'.pax');hideExpandable('.myide,.reqs');return false;"
>DOWNLOAD FOR OTHER PLATFORMS</a></h4>
<div class="pax col-13 online" style="display:none;margin:0;">
<p class="table-caption"><strong>ADT Bundle</strong></p>
<table class="download">
<tr>
<th>Platform</th>
<th>Package</th>
<th>Size</th>
<th>MD5 Checksum</th>
</tr>
<tr>
<td>Windows 32-bit</td>
<td>
<a onClick="return onDownload(this)" id="win-bundle32"
href="http://dl.google.com/android/adt/<?cs var:sdk.win32_bundle_download ?>"><?cs var:sdk.win32_bundle_download ?></a>
</td>
<td><?cs var:sdk.win32_bundle_bytes ?> bytes</td>
<td><?cs var:sdk.win32_bundle_checksum ?></td>
</tr>
<tr>
<td>Windows 64-bit</td>
<td>
<a onClick="return onDownload(this)" id="win-bundle64"
href="http://dl.google.com/android/adt/<?cs var:sdk.win64_bundle_download ?>"><?cs var:sdk.win64_bundle_download ?></a>
</td>
|
[
" <td><?cs var:sdk.win64_bundle_bytes ?> bytes</td>"
] | 478
|
lcc
|
csharp
| null |
7a221b8009c1d73293e9a45c3c9dc18eb109dc4e8c6cef92
|
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import glob
import json
from math import sqrt
import re
import sys
import matplotlib.pyplot as plt
import matplotlib.ticker as plticker
import numpy as np
from scipy.stats import norm, t
VC = 'startup > moz-app-visually-complete'
def add_application_to_results(results, app_result_set,
app_pattern=None, test_pattern=None,
first_repetition=None, last_repetition=None):
app_name = app_result_set['stats']['application'].strip()
if app_pattern and not re.search(app_pattern, app_name):
return
if not app_result_set.get('passes'):
return
app_results = results.get(app_name, {})
tests_added = 0
for test_result_set in app_result_set['passes']:
if add_test_to_results(app_results, test_result_set, test_pattern,
first_repetition, last_repetition):
tests_added += 1
if tests_added > 0:
results[app_name] = app_results
def add_test_to_results(app_results, test_result_set,
test_pattern=None,
first_repetition=None, last_repetition=None):
test_name = test_result_set['title'].strip()
if test_pattern and not re.search(test_pattern, test_name):
return False
if not test_result_set.get('mozPerfDurations'):
return False
test_results = app_results.get(test_name, {'durations': []})
# TODO: use slices
durations_added = 0
for index, duration in enumerate(test_result_set['mozPerfDurations'],
start=1):
if first_repetition and index < first_repetition:
continue
if last_repetition and index > last_repetition:
break
test_results['durations'].append(duration)
durations_added += 1
if durations_added:
app_results[test_name] = test_results
return True
else:
return False
def add_result_set(result_set, results,
app_pattern=None, test_pattern=None,
first_repetition=None, last_repetition=None):
for app_result_set in result_set:
add_application_to_results(results, app_result_set,
app_pattern, test_pattern,
first_repetition, last_repetition)
def get_stats(values, intervals=True):
stats = {}
values_array = np.array(values, dtype=np.float64)
stats['min'] = np.asscalar(np.amin(values_array))
stats['max'] = np.asscalar(np.amax(values_array))
stats['mean'] = np.asscalar(np.mean(values_array))
stats['median'] = np.asscalar(np.median(values_array))
if values_array.size > 1:
stats['std_dev'] = np.asscalar(np.std(values_array, ddof=1))
else:
stats['std_dev'] = 0
if intervals:
stats['intervals'] = []
loc = stats['mean']
scale = stats['std_dev'] / sqrt(values_array.size)
for alpha in (.95, .99, .90, .85, .80, .50):
if values_array.size > 30:
interval = norm.interval(alpha, loc=loc, scale=scale)
else:
interval = t.interval(alpha, values_array.size - 1, loc, scale)
stats['intervals'].append(
{'confidence': alpha, 'interval': interval})
return stats
def add_stats_to_results(results):
for app in results:
for test in results[app]:
stats = get_stats(results[app][test]['durations'])
results[app][test]['stats'] = stats
def add_stats_to_pivot(pivot):
for app in pivot:
for test in pivot[app]:
for stat in pivot[app][test]:
stats = get_stats(pivot[app][test][stat]['values'],
intervals=True)
pivot[app][test][stat]['stats'] = stats
def add_stats_pivot_to_crunched_results(crunched_results):
# pivot -> app -> test -> stat[]
pivot = {}
for run_num, run_results in enumerate(crunched_results['runs']):
# print 'Run %d:' % (run_num)
for app in run_results:
if app not in pivot:
pivot[app] = {}
for test in run_results[app]:
if test not in pivot[app]:
pivot[app][test] = {}
for stat in run_results[app][test]['stats']:
if stat == 'intervals':
continue
if stat not in pivot[app][test]:
pivot[app][test][stat] = {'values': []}
pivot[app][test][stat]['values'].append(
run_results[app][test]['stats'][stat])
# print ' Added %s.%s.%s' % (app, test, stat)
add_stats_to_pivot(pivot)
crunched_results['pivot'] = pivot
def crunch_result_sets(result_sets, app_pattern=None, test_pattern=None,
first_repetition=None, last_repetition=None):
crunched_results = {'args': {'app_pattern': app_pattern,
'test_pattern': test_pattern,
'first_repetition': first_repetition,
'last_repetition': last_repetition},
'combined': {},
'runs': []}
if app_pattern:
app_pattern = re.compile(app_pattern, re.IGNORECASE)
if test_pattern:
test_pattern = re.compile(test_pattern, re.IGNORECASE)
for result_set in result_sets:
results = {}
add_result_set(result_set, results, app_pattern, test_pattern,
first_repetition, last_repetition)
add_stats_to_results(results)
crunched_results['runs'].append(results)
# TODO: make it so it aggregates the last call instead
add_result_set(result_set, crunched_results['combined'], app_pattern,
test_pattern, first_repetition, last_repetition)
add_stats_to_results(crunched_results['combined'])
add_stats_pivot_to_crunched_results(crunched_results)
return crunched_results
def load_result_sets(filenames):
if isinstance(filenames, basestring):
filenames = glob.glob(filenames)
result_sets = []
for filename in filenames:
with open(filename) as f:
results = f.read()
try:
result_sets.append(json.loads(results))
except Exception as e:
sys.stderr.write('Discarding %s: %s\n' % (filename, str(e)))
return result_sets
def load_and_crunch_result_sets(filenames, app_pattern=None, test_pattern=None,
first_repetition=None, last_repetition=None):
rs = load_result_sets(filenames)
return crunch_result_sets(rs, app_pattern, test_pattern, first_repetition, last_repetition)
def plot_app_vc(cr, app, test=VC, stat='mean'):
loc = plticker.MultipleLocator(base=1.0)
fig, ax = plt.subplots()
ax.xaxis.set_major_locator(loc)
plt.xlabel('Runs')
plt.ylabel('Time in ms')
plt.title('%s, %s, individual %ss vs. %d-count 95%% CI' %
(app, test, stat, len(cr['combined'][app][VC]['durations'])))
csi_95 = cr['combined'][app][VC]['stats']['intervals'][0]['interval']
print csi_95
|
[
" ymin = csi_95[0]"
] | 565
|
lcc
|
python
| null |
728682005fc144f5d03a92ebb7f121b7ceb08648eff87ba8
|
|
# -*- test-case-name: twisted.python.test.test_util
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.util}.
"""
from __future__ import division, absolute_import
import errno
import os.path
import shutil
import sys
import warnings
try:
import pwd, grp
except ImportError:
pwd = grp = None
from twisted.trial import unittest
from twisted.trial.util import suppress as SUPPRESS
from twisted.python import util
from twisted.python.filepath import FilePath
from twisted.internet import reactor
from twisted.internet.interfaces import IReactorProcess
from twisted.internet.protocol import ProcessProtocol
from twisted.internet.defer import Deferred
from twisted.internet.error import ProcessDone
from twisted.test.test_process import MockOS
pyExe = FilePath(sys.executable)._asBytesPath()
class UtilTests(unittest.TestCase):
def testUniq(self):
l = ["a", 1, "ab", "a", 3, 4, 1, 2, 2, 4, 6]
self.assertEqual(util.uniquify(l), ["a", 1, "ab", 3, 4, 2, 6])
def testRaises(self):
self.assertTrue(util.raises(ZeroDivisionError, divmod, 1, 0))
self.assertFalse(util.raises(ZeroDivisionError, divmod, 0, 1))
try:
util.raises(TypeError, divmod, 1, 0)
except ZeroDivisionError:
pass
else:
raise unittest.FailTest("util.raises didn't raise when it should have")
def test_uidFromNumericString(self):
"""
When L{uidFromString} is called with a base-ten string representation
of an integer, it returns the integer.
"""
self.assertEqual(util.uidFromString("100"), 100)
def test_uidFromUsernameString(self):
"""
When L{uidFromString} is called with a base-ten string representation
of an integer, it returns the integer.
"""
pwent = pwd.getpwuid(os.getuid())
self.assertEqual(util.uidFromString(pwent.pw_name), pwent.pw_uid)
if pwd is None:
test_uidFromUsernameString.skip = (
"Username/UID conversion requires the pwd module.")
def test_gidFromNumericString(self):
"""
When L{gidFromString} is called with a base-ten string representation
of an integer, it returns the integer.
"""
self.assertEqual(util.gidFromString("100"), 100)
def test_gidFromGroupnameString(self):
"""
When L{gidFromString} is called with a base-ten string representation
of an integer, it returns the integer.
"""
grent = grp.getgrgid(os.getgid())
self.assertEqual(util.gidFromString(grent.gr_name), grent.gr_gid)
if grp is None:
test_gidFromGroupnameString.skip = (
"Group Name/GID conversion requires the grp module.")
class NameToLabelTests(unittest.TestCase):
"""
Tests for L{nameToLabel}.
"""
def test_nameToLabel(self):
"""
Test the various kinds of inputs L{nameToLabel} supports.
"""
nameData = [
('f', 'F'),
('fo', 'Fo'),
('foo', 'Foo'),
('fooBar', 'Foo Bar'),
('fooBarBaz', 'Foo Bar Baz'),
]
for inp, out in nameData:
got = util.nameToLabel(inp)
self.assertEqual(
got, out,
"nameToLabel(%r) == %r != %r" % (inp, got, out))
class UntilConcludesTests(unittest.TestCase):
"""
Tests for L{untilConcludes}, an C{EINTR} helper.
"""
def test_uninterruptably(self):
"""
L{untilConcludes} calls the function passed to it until the function
does not raise either L{OSError} or L{IOError} with C{errno} of
C{EINTR}. It otherwise completes with the same result as the function
passed to it.
"""
def f(a, b):
self.calls += 1
exc = self.exceptions.pop()
if exc is not None:
raise exc(errno.EINTR, "Interrupted system call!")
return a + b
self.exceptions = [None]
self.calls = 0
self.assertEqual(util.untilConcludes(f, 1, 2), 3)
self.assertEqual(self.calls, 1)
self.exceptions = [None, OSError, IOError]
self.calls = 0
self.assertEqual(util.untilConcludes(f, 2, 3), 5)
self.assertEqual(self.calls, 3)
class SwitchUIDTests(unittest.TestCase):
"""
Tests for L{util.switchUID}.
"""
if getattr(os, "getuid", None) is None:
skip = "getuid/setuid not available"
def setUp(self):
self.mockos = MockOS()
self.patch(util, "os", self.mockos)
self.patch(util, "initgroups", self.initgroups)
self.initgroupsCalls = []
def initgroups(self, uid, gid):
"""
Save L{util.initgroups} calls in C{self.initgroupsCalls}.
"""
self.initgroupsCalls.append((uid, gid))
def test_uid(self):
"""
L{util.switchUID} calls L{util.initgroups} and then C{os.setuid} with
the given uid.
"""
util.switchUID(12000, None)
self.assertEqual(self.initgroupsCalls, [(12000, None)])
self.assertEqual(self.mockos.actions, [("setuid", 12000)])
def test_euid(self):
"""
L{util.switchUID} calls L{util.initgroups} and then C{os.seteuid} with
the given uid if the C{euid} parameter is set to C{True}.
"""
util.switchUID(12000, None, True)
self.assertEqual(self.initgroupsCalls, [(12000, None)])
self.assertEqual(self.mockos.seteuidCalls, [12000])
def test_currentUID(self):
"""
If the current uid is the same as the uid passed to L{util.switchUID},
then initgroups does not get called, but a warning is issued.
"""
uid = self.mockos.getuid()
util.switchUID(uid, None)
self.assertEqual(self.initgroupsCalls, [])
self.assertEqual(self.mockos.actions, [])
currentWarnings = self.flushWarnings([util.switchUID])
self.assertEqual(len(currentWarnings), 1)
self.assertIn('tried to drop privileges and setuid %i' % uid,
currentWarnings[0]['message'])
self.assertIn(
'but uid is already %i' % uid, currentWarnings[0]['message'])
def test_currentEUID(self):
"""
If the current euid is the same as the euid passed to L{util.switchUID},
then initgroups does not get called, but a warning is issued.
"""
euid = self.mockos.geteuid()
util.switchUID(euid, None, True)
self.assertEqual(self.initgroupsCalls, [])
self.assertEqual(self.mockos.seteuidCalls, [])
currentWarnings = self.flushWarnings([util.switchUID])
self.assertEqual(len(currentWarnings), 1)
self.assertIn('tried to drop privileges and seteuid %i' % euid,
currentWarnings[0]['message'])
self.assertIn(
'but euid is already %i' % euid, currentWarnings[0]['message'])
class MergeFunctionMetadataTests(unittest.TestCase):
"""
Tests for L{mergeFunctionMetadata}.
"""
def test_mergedFunctionBehavesLikeMergeTarget(self):
"""
After merging C{foo}'s data into C{bar}, the returned function behaves
as if it is C{bar}.
"""
foo_object = object()
bar_object = object()
def foo():
return foo_object
def bar(x, y, ab, c=10, *d, **e):
(a, b) = ab
return bar_object
baz = util.mergeFunctionMetadata(foo, bar)
self.assertIdentical(baz(1, 2, (3, 4), quux=10), bar_object)
def test_moduleIsMerged(self):
"""
Merging C{foo} into C{bar} returns a function with C{foo}'s
C{__module__}.
"""
def foo():
pass
def bar():
pass
bar.__module__ = 'somewhere.else'
baz = util.mergeFunctionMetadata(foo, bar)
self.assertEqual(baz.__module__, foo.__module__)
def test_docstringIsMerged(self):
"""
Merging C{foo} into C{bar} returns a function with C{foo}'s docstring.
"""
def foo():
"""
This is foo.
"""
def bar():
"""
This is bar.
"""
baz = util.mergeFunctionMetadata(foo, bar)
self.assertEqual(baz.__doc__, foo.__doc__)
def test_nameIsMerged(self):
"""
Merging C{foo} into C{bar} returns a function with C{foo}'s name.
"""
def foo():
pass
def bar():
pass
baz = util.mergeFunctionMetadata(foo, bar)
self.assertEqual(baz.__name__, foo.__name__)
def test_instanceDictionaryIsMerged(self):
"""
Merging C{foo} into C{bar} returns a function with C{bar}'s
dictionary, updated by C{foo}'s.
"""
def foo():
pass
foo.a = 1
foo.b = 2
def bar():
pass
bar.b = 3
bar.c = 4
baz = util.mergeFunctionMetadata(foo, bar)
self.assertEqual(foo.a, baz.a)
self.assertEqual(foo.b, baz.b)
self.assertEqual(bar.c, baz.c)
class OrderedDictTests(unittest.TestCase):
"""
Tests for L{util.OrderedDict}.
"""
def test_deprecated(self):
"""
L{util.OrderedDict} is deprecated.
"""
from twisted.python.util import OrderedDict
OrderedDict # Shh pyflakes
currentWarnings = self.flushWarnings(offendingFunctions=[
self.test_deprecated])
self.assertEqual(
currentWarnings[0]['message'],
"twisted.python.util.OrderedDict was deprecated in Twisted "
"15.5.0: Use collections.OrderedDict instead.")
self.assertEqual(currentWarnings[0]['category'], DeprecationWarning)
self.assertEqual(len(currentWarnings), 1)
class InsensitiveDictTests(unittest.TestCase):
"""
Tests for L{util.InsensitiveDict}.
"""
def test_preserve(self):
"""
L{util.InsensitiveDict} preserves the case of keys if constructed with
C{preserve=True}.
"""
dct = util.InsensitiveDict({'Foo':'bar', 1:2, 'fnz':{1:2}}, preserve=1)
self.assertEqual(dct['fnz'], {1:2})
self.assertEqual(dct['foo'], 'bar')
self.assertEqual(dct.copy(), dct)
self.assertEqual(dct['foo'], dct.get('Foo'))
self.assertIn(1, dct)
self.assertIn('foo', dct)
result = eval(repr(dct), {
'dct': dct,
'InsensitiveDict': util.InsensitiveDict,
})
self.assertEqual(result, dct)
keys=['Foo', 'fnz', 1]
for x in keys:
self.assertIn(x, dct.keys())
self.assertIn((x, dct[x]), dct.items())
self.assertEqual(len(keys), len(dct))
del dct[1]
del dct['foo']
self.assertEqual(dct.keys(), ['fnz'])
def test_noPreserve(self):
"""
L{util.InsensitiveDict} does not preserves the case of keys if
constructed with C{preserve=False}.
"""
dct = util.InsensitiveDict({'Foo':'bar', 1:2, 'fnz':{1:2}}, preserve=0)
keys=['foo', 'fnz', 1]
for x in keys:
self.assertIn(x, dct.keys())
self.assertIn((x, dct[x]), dct.items())
self.assertEqual(len(keys), len(dct))
del dct[1]
del dct['foo']
self.assertEqual(dct.keys(), ['fnz'])
def test_unicode(self):
"""
Unicode keys are case insensitive.
"""
d = util.InsensitiveDict(preserve=False)
d[u"Foo"] = 1
self.assertEqual(d[u"FOO"], 1)
self.assertEqual(d.keys(), [u"foo"])
def test_bytes(self):
"""
Bytes keys are case insensitive.
"""
d = util.InsensitiveDict(preserve=False)
d[b"Foo"] = 1
self.assertEqual(d[b"FOO"], 1)
self.assertEqual(d.keys(), [b"foo"])
class PasswordTestingProcessProtocol(ProcessProtocol):
"""
Write the string C{"secret\n"} to a subprocess and then collect all of
its output and fire a Deferred with it when the process ends.
"""
def connectionMade(self):
self.output = []
self.transport.write(b'secret\n')
def childDataReceived(self, fd, output):
self.output.append((fd, output))
def processEnded(self, reason):
self.finished.callback((reason, self.output))
class GetPasswordTests(unittest.TestCase):
if not IReactorProcess.providedBy(reactor):
skip = "Process support required to test getPassword"
def test_stdin(self):
"""
Making sure getPassword accepts a password from standard input by
running a child process which uses getPassword to read in a string
which it then writes it out again. Write a string to the child
process and then read one and make sure it is the right string.
"""
p = PasswordTestingProcessProtocol()
p.finished = Deferred()
reactor.spawnProcess(
p, pyExe,
[pyExe,
b'-c',
(b'import sys\n'
b'from twisted.python.util import getPassword\n'
b'sys.stdout.write(getPassword())\n'
b'sys.stdout.flush()\n')],
env={b'PYTHONPATH': os.pathsep.join(sys.path).encode("utf8")})
def processFinished(result):
(reason, output) = result
reason.trap(ProcessDone)
self.assertIn((1, b'secret'), output)
return p.finished.addCallback(processFinished)
class SearchUpwardsTests(unittest.TestCase):
def testSearchupwards(self):
os.makedirs('searchupwards/a/b/c')
open('searchupwards/foo.txt', 'w').close()
open('searchupwards/a/foo.txt', 'w').close()
open('searchupwards/a/b/c/foo.txt', 'w').close()
os.mkdir('searchupwards/bar')
os.mkdir('searchupwards/bam')
os.mkdir('searchupwards/a/bar')
os.mkdir('searchupwards/a/b/bam')
actual=util.searchupwards('searchupwards/a/b/c',
files=['foo.txt'],
dirs=['bar', 'bam'])
expected=os.path.abspath('searchupwards') + os.sep
self.assertEqual(actual, expected)
shutil.rmtree('searchupwards')
actual=util.searchupwards('searchupwards/a/b/c',
files=['foo.txt'],
dirs=['bar', 'bam'])
expected=None
self.assertEqual(actual, expected)
class IntervalDifferentialTests(unittest.TestCase):
def testDefault(self):
d = iter(util.IntervalDifferential([], 10))
for i in range(100):
self.assertEqual(next(d), (10, None))
def testSingle(self):
d = iter(util.IntervalDifferential([5], 10))
for i in range(100):
self.assertEqual(next(d), (5, 0))
def testPair(self):
d = iter(util.IntervalDifferential([5, 7], 10))
for i in range(100):
self.assertEqual(next(d), (5, 0))
self.assertEqual(next(d), (2, 1))
self.assertEqual(next(d), (3, 0))
self.assertEqual(next(d), (4, 1))
self.assertEqual(next(d), (1, 0))
self.assertEqual(next(d), (5, 0))
self.assertEqual(next(d), (1, 1))
self.assertEqual(next(d), (4, 0))
self.assertEqual(next(d), (3, 1))
self.assertEqual(next(d), (2, 0))
self.assertEqual(next(d), (5, 0))
self.assertEqual(next(d), (0, 1))
def testTriple(self):
d = iter(util.IntervalDifferential([2, 4, 5], 10))
for i in range(100):
self.assertEqual(next(d), (2, 0))
self.assertEqual(next(d), (2, 0))
self.assertEqual(next(d), (0, 1))
self.assertEqual(next(d), (1, 2))
self.assertEqual(next(d), (1, 0))
self.assertEqual(next(d), (2, 0))
self.assertEqual(next(d), (0, 1))
self.assertEqual(next(d), (2, 0))
self.assertEqual(next(d), (0, 2))
self.assertEqual(next(d), (2, 0))
self.assertEqual(next(d), (0, 1))
self.assertEqual(next(d), (2, 0))
self.assertEqual(next(d), (1, 2))
self.assertEqual(next(d), (1, 0))
self.assertEqual(next(d), (0, 1))
self.assertEqual(next(d), (2, 0))
self.assertEqual(next(d), (2, 0))
self.assertEqual(next(d), (0, 1))
self.assertEqual(next(d), (0, 2))
def testInsert(self):
d = iter(util.IntervalDifferential([], 10))
self.assertEqual(next(d), (10, None))
d.addInterval(3)
self.assertEqual(next(d), (3, 0))
self.assertEqual(next(d), (3, 0))
d.addInterval(6)
self.assertEqual(next(d), (3, 0))
self.assertEqual(next(d), (3, 0))
self.assertEqual(next(d), (0, 1))
self.assertEqual(next(d), (3, 0))
self.assertEqual(next(d), (3, 0))
self.assertEqual(next(d), (0, 1))
def testRemove(self):
d = iter(util.IntervalDifferential([3, 5], 10))
self.assertEqual(next(d), (3, 0))
self.assertEqual(next(d), (2, 1))
self.assertEqual(next(d), (1, 0))
d.removeInterval(3)
self.assertEqual(next(d), (4, 0))
self.assertEqual(next(d), (5, 0))
d.removeInterval(5)
self.assertEqual(next(d), (10, None))
self.assertRaises(ValueError, d.removeInterval, 10)
class Record(util.FancyEqMixin):
"""
Trivial user of L{FancyEqMixin} used by tests.
"""
compareAttributes = ('a', 'b')
def __init__(self, a, b):
self.a = a
self.b = b
class DifferentRecord(util.FancyEqMixin):
"""
Trivial user of L{FancyEqMixin} which is not related to L{Record}.
"""
compareAttributes = ('a', 'b')
def __init__(self, a, b):
self.a = a
self.b = b
class DerivedRecord(Record):
"""
A class with an inheritance relationship to L{Record}.
"""
class EqualToEverything(object):
"""
A class the instances of which consider themselves equal to everything.
"""
def __eq__(self, other):
return True
def __ne__(self, other):
return False
class EqualToNothing(object):
"""
A class the instances of which consider themselves equal to nothing.
"""
def __eq__(self, other):
return False
def __ne__(self, other):
return True
class EqualityTests(unittest.TestCase):
"""
Tests for L{FancyEqMixin}.
"""
def test_identity(self):
"""
Instances of a class which mixes in L{FancyEqMixin} but which
defines no comparison attributes compare by identity.
"""
class Empty(util.FancyEqMixin):
pass
self.assertFalse(Empty() == Empty())
self.assertTrue(Empty() != Empty())
empty = Empty()
self.assertTrue(empty == empty)
self.assertFalse(empty != empty)
def test_equality(self):
"""
Instances of a class which mixes in L{FancyEqMixin} should compare
equal if all of their attributes compare equal. They should not
compare equal if any of their attributes do not compare equal.
"""
self.assertTrue(Record(1, 2) == Record(1, 2))
self.assertFalse(Record(1, 2) == Record(1, 3))
self.assertFalse(Record(1, 2) == Record(2, 2))
self.assertFalse(Record(1, 2) == Record(3, 4))
def test_unequality(self):
"""
Inequality between instances of a particular L{record} should be
defined as the negation of equality.
"""
self.assertFalse(Record(1, 2) != Record(1, 2))
self.assertTrue(Record(1, 2) != Record(1, 3))
self.assertTrue(Record(1, 2) != Record(2, 2))
self.assertTrue(Record(1, 2) != Record(3, 4))
def test_differentClassesEquality(self):
"""
Instances of different classes which mix in L{FancyEqMixin} should not
compare equal.
"""
self.assertFalse(Record(1, 2) == DifferentRecord(1, 2))
def test_differentClassesInequality(self):
"""
Instances of different classes which mix in L{FancyEqMixin} should
compare unequal.
"""
self.assertTrue(Record(1, 2) != DifferentRecord(1, 2))
def test_inheritedClassesEquality(self):
"""
An instance of a class which derives from a class which mixes in
L{FancyEqMixin} should compare equal to an instance of the base class
if and only if all of their attributes compare equal.
"""
self.assertTrue(Record(1, 2) == DerivedRecord(1, 2))
self.assertFalse(Record(1, 2) == DerivedRecord(1, 3))
self.assertFalse(Record(1, 2) == DerivedRecord(2, 2))
self.assertFalse(Record(1, 2) == DerivedRecord(3, 4))
def test_inheritedClassesInequality(self):
"""
An instance of a class which derives from a class which mixes in
L{FancyEqMixin} should compare unequal to an instance of the base
class if any of their attributes compare unequal.
"""
self.assertFalse(Record(1, 2) != DerivedRecord(1, 2))
self.assertTrue(Record(1, 2) != DerivedRecord(1, 3))
self.assertTrue(Record(1, 2) != DerivedRecord(2, 2))
self.assertTrue(Record(1, 2) != DerivedRecord(3, 4))
def test_rightHandArgumentImplementsEquality(self):
"""
The right-hand argument to the equality operator is given a chance
to determine the result of the operation if it is of a type
unrelated to the L{FancyEqMixin}-based instance on the left-hand
side.
"""
self.assertTrue(Record(1, 2) == EqualToEverything())
self.assertFalse(Record(1, 2) == EqualToNothing())
def test_rightHandArgumentImplementsUnequality(self):
"""
The right-hand argument to the non-equality operator is given a
chance to determine the result of the operation if it is of a type
unrelated to the L{FancyEqMixin}-based instance on the left-hand
side.
"""
self.assertFalse(Record(1, 2) != EqualToEverything())
self.assertTrue(Record(1, 2) != EqualToNothing())
class RunAsEffectiveUserTests(unittest.TestCase):
"""
Test for the L{util.runAsEffectiveUser} function.
"""
if getattr(os, "geteuid", None) is None:
skip = "geteuid/seteuid not available"
def setUp(self):
self.mockos = MockOS()
self.patch(os, "geteuid", self.mockos.geteuid)
self.patch(os, "getegid", self.mockos.getegid)
self.patch(os, "seteuid", self.mockos.seteuid)
self.patch(os, "setegid", self.mockos.setegid)
def _securedFunction(self, startUID, startGID, wantUID, wantGID):
"""
Check if wanted UID/GID matched start or saved ones.
"""
self.assertTrue(wantUID == startUID or
wantUID == self.mockos.seteuidCalls[-1])
self.assertTrue(wantGID == startGID or
wantGID == self.mockos.setegidCalls[-1])
def test_forwardResult(self):
"""
L{util.runAsEffectiveUser} forwards the result obtained by calling the
given function
"""
result = util.runAsEffectiveUser(0, 0, lambda: 1)
self.assertEqual(result, 1)
def test_takeParameters(self):
"""
L{util.runAsEffectiveUser} pass the given parameters to the given
function.
"""
result = util.runAsEffectiveUser(0, 0, lambda x: 2*x, 3)
self.assertEqual(result, 6)
def test_takesKeyworkArguments(self):
"""
L{util.runAsEffectiveUser} pass the keyword parameters to the given
function.
"""
result = util.runAsEffectiveUser(0, 0, lambda x, y=1, z=1: x*y*z, 2, z=3)
self.assertEqual(result, 6)
def _testUIDGIDSwitch(self, startUID, startGID, wantUID, wantGID,
expectedUIDSwitches, expectedGIDSwitches):
"""
Helper method checking the calls to C{os.seteuid} and C{os.setegid}
made by L{util.runAsEffectiveUser}, when switching from startUID to
wantUID and from startGID to wantGID.
"""
self.mockos.euid = startUID
self.mockos.egid = startGID
util.runAsEffectiveUser(
wantUID, wantGID,
self._securedFunction, startUID, startGID, wantUID, wantGID)
self.assertEqual(self.mockos.seteuidCalls, expectedUIDSwitches)
self.assertEqual(self.mockos.setegidCalls, expectedGIDSwitches)
self.mockos.seteuidCalls = []
self.mockos.setegidCalls = []
def test_root(self):
"""
Check UID/GID switches when current effective UID is root.
"""
self._testUIDGIDSwitch(0, 0, 0, 0, [], [])
self._testUIDGIDSwitch(0, 0, 1, 0, [1, 0], [])
self._testUIDGIDSwitch(0, 0, 0, 1, [], [1, 0])
self._testUIDGIDSwitch(0, 0, 1, 1, [1, 0], [1, 0])
def test_UID(self):
"""
Check UID/GID switches when current effective UID is non-root.
"""
self._testUIDGIDSwitch(1, 0, 0, 0, [0, 1], [])
self._testUIDGIDSwitch(1, 0, 1, 0, [], [])
self._testUIDGIDSwitch(1, 0, 1, 1, [0, 1, 0, 1], [1, 0])
|
[
" self._testUIDGIDSwitch(1, 0, 2, 1, [0, 2, 0, 1], [1, 0])"
] | 2,189
|
lcc
|
python
| null |
7e20f7645ae46306c1c075059d48f64e2003534e7b1502c4
|
|
package com.entrepidea.swing.components.checkbox;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Graphics;
import java.awt.event.ActionListener;
import java.awt.event.ItemListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.Serializable;
import javax.swing.ButtonGroup;
import javax.swing.ButtonModel;
import javax.swing.Icon;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.event.ChangeListener;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.metal.MetalLookAndFeel;
public class TristateCheckbox extends JCheckBox {
private static class State {
String desc = "";
//"NOT_SELECTED","CHECKED", "CROSSED"
private State(){}
private State(String s){
desc = s;
}
@Override
public String toString(){
return desc;
}
}
public static final State NOT_SELECTED = new State("NOT_SELECTED");
public static final State CHECKED = new State("CHECKED");
public static final State CROSSED = new State("CROSSED");
private TristateCheckModel model = null;
public TristateCheckbox(){
this(null);
}
public TristateCheckbox(String text){
super(text);
//set properties and model
super.setIcon(new TristateIcon());
setModel((model = new TristateCheckModel(getModel())));
setState(NOT_SELECTED);
//add listeners
super.addMouseListener(new MouseAdapter(){
@Override
public void mousePressed(MouseEvent e){
TristateCheckbox.this.mousePressed();
}
@Override
public void mouseReleased(MouseEvent e){
TristateCheckbox.this.mouseReleased();
}
});
}
private void mousePressed(){
System.out.println("mouse pressed");
grabFocus();
model.setArmed(true);
model.setPressed(true);
}
private void mouseReleased(){
System.out.println("mouse released");
model.nextState();
model.setArmed(false);
model.setPressed(false);
}
public void doClick(){
mousePressed();
mouseReleased();
}
public void setState(State s){
model.setState(s);
}
public State getState(){
return model.getState();
}
public void setSelected(boolean selected) {
if (selected) {
setState(CHECKED);
} else {
setState(NOT_SELECTED);
}
}
private class TristateCheckModel implements ButtonModel{
ButtonModel model = null;
State currentState = NOT_SELECTED;
public TristateCheckModel(ButtonModel model){
this.model = model;
}
public void setState(State s){
currentState = s;
};
public State getState(){
return currentState;
}
public void nextState(){
State s = getState();
System.out.println("current state: "+s);
if(s==NOT_SELECTED){
setState(CHECKED);
}
else if(s == CHECKED){
setState(CROSSED);
}
else if(s== CROSSED){
setState(NOT_SELECTED);
}
System.out.println(getState());
model.setSelected(!model.isSelected()); //trigger the fireEvent
}
@Override
public Object[] getSelectedObjects() {
return model.getSelectedObjects();
}
@Override
public boolean isArmed() {
return model.isArmed();
}
@Override
public boolean isSelected() {
return (currentState == CHECKED || currentState == CROSSED);
}
@Override
public boolean isEnabled() {
return model.isEnabled();
}
@Override
public boolean isPressed() {
return model.isPressed();
}
@Override
public boolean isRollover() {
return model.isRollover();
}
@Override
public void setArmed(boolean b) {
model.setArmed(b);
}
@Override
public void setSelected(boolean b) {
model.setSelected(b);
}
@Override
public void setEnabled(boolean b) {
try {
setFocusable(b);
} catch (Exception ex) {
ex.printStackTrace();
}//catch
model.setEnabled(b);
}
@Override
public void setPressed(boolean b) {
model.setPressed(b);
}
@Override
public void setRollover(boolean b) {
model.setRollover(b);
}
@Override
public void setMnemonic(int key) {
model.setMnemonic(key);
}
@Override
public int getMnemonic() {
return model.getMnemonic();
}
@Override
public void setActionCommand(String s) {
model.setActionCommand(s);
}
@Override
public String getActionCommand() {
return model.getActionCommand();
}
@Override
public void setGroup(ButtonGroup group) {
model.setGroup(group);
}
@Override
public void addActionListener(ActionListener l) {
model.addActionListener(l);
}
@Override
public void removeActionListener(ActionListener l) {
model.removeActionListener(l);
}
@Override
public void addItemListener(ItemListener l) {
model.addItemListener(l);
}
@Override
public void removeItemListener(ItemListener l) {
model.removeItemListener(l);
}
@Override
public void addChangeListener(ChangeListener l) {
model.addChangeListener(l);
}
@Override
public void removeChangeListener(ChangeListener l) {
model.removeChangeListener(l);
}
}
private class TristateIcon implements Icon, UIResource, Serializable{
private static final long serialVersionUID = 1L;
protected int getControlSize() {
return 13;
}
public void paintIcon(Component c, Graphics g, int x, int y) {
JCheckBox cb = (JCheckBox)c;
TristateCheckModel model = (TristateCheckModel)cb.getModel();
boolean bDrawCross = model.getState() == CROSSED;
boolean bDrawCheck = model.getState() == CHECKED;
int controlSize = getControlSize();
if(model.isEnabled()){
if(model.isPressed() && model.isArmed()){
g.setColor(MetalLookAndFeel.getControlShadow());
g.fillRect(x, y, controlSize - 1, controlSize - 1);
|
[
"\t\t\t\t\tdrawPressed3DBorder(g, x, y, controlSize, controlSize);"
] | 518
|
lcc
|
java
| null |
971fbab8c3ab0ce2be833fead765ea9d6620fe3529e6e4f2
|
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import logging
import time
import unittest2 as unittest
import cPickle
import numpy
from nupic.regions.PyRegion import RealNumpyDType
from nupic.algorithms.KNNClassifier import KNNClassifier
import pca_knn_data
LOGGER = logging.getLogger(__name__)
class KNNClassifierTest(unittest.TestCase):
"""Tests for k Nearest Neighbor classifier"""
def runTestKNNClassifier(self, short = 0):
""" Test the KNN classifier in this module. short can be:
0 (short), 1 (medium), or 2 (long)
"""
failures = ""
if short != 2:
numpy.random.seed(42)
else:
seed_value = int(time.time())
# seed_value = 1276437656
#seed_value = 1277136651
numpy.random.seed(seed_value)
LOGGER.info('Seed used: %d', seed_value)
f = open('seedval', 'a')
f.write(str(seed_value))
f.write('\n')
f.close()
failures += simulateKMoreThanOne()
LOGGER.info("\nTesting KNN Classifier on dense patterns")
numPatterns, numClasses = getNumTestPatterns(short)
patterns = numpy.random.rand(numPatterns, 100)
patternDict = dict()
# Assume there are no repeated patterns -- if there are, then
# numpy.random would be completely broken.
for i in xrange(numPatterns):
randCategory = numpy.random.randint(0, numClasses-1)
patternDict[i] = dict()
patternDict[i]['pattern'] = patterns[i]
patternDict[i]['category'] = randCategory
LOGGER.info("\nTesting KNN Classifier with L2 norm")
knn = KNNClassifier(k=1)
failures += simulateClassifier(knn, patternDict, \
"KNN Classifier with L2 norm test")
LOGGER.info("\nTesting KNN Classifier with L1 norm")
knnL1 = KNNClassifier(k=1, distanceNorm=1.0)
failures += simulateClassifier(knnL1, patternDict, \
"KNN Classifier with L1 norm test")
numPatterns, numClasses = getNumTestPatterns(short)
patterns = (numpy.random.rand(numPatterns, 25) > 0.7).astype(RealNumpyDType)
patternDict = dict()
for i in patterns:
iString = str(i.tolist())
if not patternDict.has_key(iString):
randCategory = numpy.random.randint(0, numClasses-1)
patternDict[iString] = dict()
patternDict[iString]['pattern'] = i
patternDict[iString]['category'] = randCategory
LOGGER.info("\nTesting KNN on sparse patterns")
knnDense = KNNClassifier(k=1)
failures += simulateClassifier(knnDense, patternDict, \
"KNN Classifier on sparse pattern test")
self.assertEqual(len(failures), 0,
"Tests failed: \n" + failures)
if short == 2:
f = open('seedval', 'a')
f.write('Pass\n')
f.close()
def runTestPCAKNN(self, short = 0):
LOGGER.info('\nTesting PCA/k-NN classifier')
LOGGER.info('Mode=%s', short)
numDims = 10
numClasses = 10
k = 10
numPatternsPerClass = 100
numPatterns = int(.9 * numClasses * numPatternsPerClass)
numTests = numClasses * numPatternsPerClass - numPatterns
numSVDSamples = int(.1 * numPatterns)
keep = 1
train_data, train_class, test_data, test_class = \
pca_knn_data.generate(numDims, numClasses, k, numPatternsPerClass,
numPatterns, numTests, numSVDSamples, keep)
pca_knn = KNNClassifier(k=k,numSVDSamples=numSVDSamples,
numSVDDims=keep)
knn = KNNClassifier(k=k)
LOGGER.info('Training PCA k-NN')
for i in range(numPatterns):
knn.learn(train_data[i], train_class[i])
pca_knn.learn(train_data[i], train_class[i])
LOGGER.info('Testing PCA k-NN')
numWinnerFailures = 0
numInferenceFailures = 0
numDistFailures = 0
numAbsErrors = 0
for i in range(numTests):
winner, inference, dist, categoryDist = knn.infer(test_data[i])
pca_winner, pca_inference, pca_dist, pca_categoryDist \
= pca_knn.infer(test_data[i])
if winner != test_class[i]:
numAbsErrors += 1
if pca_winner != winner:
numWinnerFailures += 1
if (numpy.abs(pca_inference - inference) > 1e-4).any():
numInferenceFailures += 1
if (numpy.abs(pca_dist - dist) > 1e-4).any():
numDistFailures += 1
s0 = 100*float(numTests - numAbsErrors) / float(numTests)
s1 = 100*float(numTests - numWinnerFailures) / float(numTests)
s2 = 100*float(numTests - numInferenceFailures) / float(numTests)
s3 = 100*float(numTests - numDistFailures) / float(numTests)
LOGGER.info('PCA/k-NN success rate=%s%s', s0, '%')
LOGGER.info('Winner success=%s%s', s1, '%')
LOGGER.info('Inference success=%s%s', s2, '%')
LOGGER.info('Distance success=%s%s', s3, '%')
self.assertEqual(s1, 100.0,
"PCA/k-NN test failed")
def testKNNClassifierShort(self):
self.runTestKNNClassifier(0)
def testPCAKNNShort(self):
self.runTestPCAKNN(0)
def testKNNClassifierMedium(self):
self.runTestKNNClassifier(1)
def testPCAKNNMedium(self):
self.runTestPCAKNN(1)
def simulateKMoreThanOne():
"""A small test with k=3"""
failures = ""
LOGGER.info("Testing the sparse KNN Classifier with k=3")
knn = KNNClassifier(k=3)
v = numpy.zeros((6, 2))
v[0] = [1.0, 0.0]
v[1] = [1.0, 0.2]
v[2] = [1.0, 0.2]
v[3] = [1.0, 2.0]
v[4] = [1.0, 4.0]
v[5] = [1.0, 4.5]
knn.learn(v[0], 0)
knn.learn(v[1], 0)
knn.learn(v[2], 0)
knn.learn(v[3], 1)
knn.learn(v[4], 1)
knn.learn(v[5], 1)
winner, _inferenceResult, _dist, _categoryDist = knn.infer(v[0])
if winner != 0:
failures += "Inference failed with k=3\n"
|
[
" winner, _inferenceResult, _dist, _categoryDist = knn.infer(v[2])"
] | 685
|
lcc
|
python
| null |
397181750c46cc7bb5cfa79f1f6812770ddf55fc6dac89cd
|
|
/*
* Copyright (c) 2007, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.max.vm.layout.ohm;
import java.lang.reflect.*;
import com.sun.max.annotate.*;
import com.sun.max.unsafe.*;
import com.sun.max.vm.actor.holder.*;
import com.sun.max.vm.layout.*;
import com.sun.max.vm.layout.Layout.HeaderField;
import com.sun.max.vm.maxsim.MaxSimInterfaceHelpers;
import com.sun.max.vm.maxsim.MaxSimPlatform;
import com.sun.max.vm.object.*;
import com.sun.max.vm.reference.*;
import com.sun.max.vm.type.*;
import com.sun.max.vm.value.*;
import com.sun.max.vm.maxsim.MaxSimTaggingScheme;
/**
*/
public class OhmArrayLayout extends OhmGeneralLayout implements ArrayLayout {
/**
* The cell offset of the word in the header containing the array length.
*/
public final int lengthOffset;
public final int headerSize;
public final Kind elementKind;
@INLINE
public final int headerSize() {
return headerSize;
}
public HeaderField[] headerFields() {
return new HeaderField[] {HeaderField.HUB, HeaderField.MISC, HeaderField.LENGTH};
}
OhmArrayLayout(Kind elementKind) {
lengthOffset = miscOffset + MaxSimInterfaceHelpers.getLayoutScaleFactor() * Word.size();
headerSize = lengthOffset + MaxSimInterfaceHelpers.getLayoutScaleFactor() * Word.size();
this.elementKind = elementKind;
}
public boolean isArrayLayout() {
return true;
}
@INLINE
public final Size getArraySize(Kind kind, int length) {
int scaleFactor = MaxSimTaggingScheme.compareUntaggedObjects(kind, Kind.REFERENCE) ?
MaxSimInterfaceHelpers.getLayoutScaleRefFactor() : MaxSimInterfaceHelpers.getLayoutScaleFactor();
return Size.fromInt(scaleFactor * kind.width.numberOfBytes).times(length).plus(headerSize).alignUp(Word.size() * MaxSimInterfaceHelpers.getLayoutScaleFactor());
}
@Override
public Offset getOffsetFromOrigin(HeaderField headerField) {
if (headerField == HeaderField.LENGTH) {
return Offset.fromInt(lengthOffset);
}
return super.getOffsetFromOrigin(headerField);
}
public int arrayLengthOffset() {
return lengthOffset;
}
@INLINE
public final int readLength(Accessor accessor) {
return accessor.readInt(lengthOffset);
}
@INLINE
public final void writeLength(Accessor accessor, int length) {
accessor.writeInt(lengthOffset, length);
}
@INLINE
public final Kind elementKind() {
return elementKind;
}
public Layout.Category category() {
return Layout.Category.ARRAY;
}
@Override
public final boolean isReferenceArrayLayout() {
final Kind rawKind = elementKind;
return rawKind.isReference;
}
@INLINE
public final int elementSize() {
return elementKind().width.numberOfBytes;
}
@INLINE
protected final int originDisplacement() {
return headerSize();
}
@INLINE
public final Offset getElementOffsetFromOrigin(int index) {
return getElementOffsetInCell(index);
}
@INLINE
public final Offset getElementOffsetInCell(int index) {
// Converting to 'Offset' before multiplication to avoid overflow:
return Offset.fromInt(index).times(elementSize()).plus(headerSize());
}
@INLINE
public final Size getArraySize(int length) {
int scaleFactor = MaxSimTaggingScheme.compareUntaggedObjects(elementKind, Kind.REFERENCE) ?
MaxSimInterfaceHelpers.getLayoutScaleRefFactor() : MaxSimInterfaceHelpers.getLayoutScaleFactor();
return getElementOffsetInCell(scaleFactor * length).aligned(MaxSimInterfaceHelpers.getLayoutScaleFactor()).asSize();
}
@INLINE
public final Size getArraySizeUnscaled(int length) {
return getElementOffsetInCell(length).aligned().asSize();
}
@INLINE
public final Size specificSize(Accessor accessor) {
return getArraySize(readLength(accessor));
}
@HOSTED_ONLY
@Override
public void visitHeader(ObjectCellVisitor visitor, Object array) {
super.visitHeader(visitor, array);
visitor.visitHeaderField(lengthOffset, "length", JavaTypeDescriptor.INT, IntValue.from(ArrayAccess.readArrayLength(array)));
}
@HOSTED_ONLY
private void visitElements(ObjectCellVisitor visitor, Object array) {
final int length = Array.getLength(array);
final Hub hub = ObjectAccess.readHub(array);
final Kind elementKind = hub.classActor.componentClassActor().kind;
if (elementKind.isReference) {
for (int i = 0; i < length; i++) {
final Object object = Array.get(array, i);
visitor.visitElement(getElementOffsetInCell(i).toInt(), i, ReferenceValue.from(object));
}
} else {
for (int i = 0; i < length; i++) {
final Object boxedJavaValue = Array.get(array, i);
final Value value = elementKind.asValue(boxedJavaValue);
visitor.visitElement(getElementOffsetInCell(i).toInt(), i, value);
}
}
}
@HOSTED_ONLY
public void visitObjectCell(Object array, ObjectCellVisitor visitor) {
visitHeader(visitor, array);
visitElements(visitor, array);
}
@HOSTED_ONLY
public Value readValue(Kind kind, ObjectMirror mirror, int offset) {
if (offset == lengthOffset) {
return IntValue.from(mirror.readArrayLength());
}
final Value value = readHeaderValue(mirror, offset);
if (value != null) {
return value;
}
assert kind.isPrimitiveOfSameSizeAs(elementKind);
final int index = (offset - headerSize()) / kind.width.numberOfBytes;
return mirror.readElement(kind, index);
}
@HOSTED_ONLY
public void writeValue(Kind kind, ObjectMirror mirror, int offset, Value value) {
assert kind.isPrimitiveOfSameSizeAs(value.kind());
if (offset == lengthOffset) {
mirror.writeArrayLength(value);
return;
}
if (writeHeaderValue(mirror, offset, value)) {
return;
}
assert kind.isPrimitiveOfSameSizeAs(elementKind);
|
[
" final int index = (offset - headerSize()) / elementSize();"
] | 681
|
lcc
|
java
| null |
0eae7d0813189dfb61921a6690103cb83cdea076b845d152
|
|
// pNAnt - A parallel .NET build tool
// Copyright (C) 2016 Nathan Daniels
// Original NAnt Copyright (C) 2001-2004 Gerry Shaw
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
// Matthew Mastracci (matt@aclaro.com)
// Scott Ford (sford@RJKTECH.com)
// Gert Driesen (drieseng@users.sourceforge.net)
using System;
using System.Collections;
using System.Collections.Specialized;
using System.Globalization;
using System.IO;
using System.Xml;
using NAnt.Core;
using NAnt.Core.Util;
namespace NAnt.VSNet {
public abstract class AssemblyReferenceBase : FileReferenceBase {
protected AssemblyReferenceBase(XmlElement xmlDefinition, ReferencesResolver referencesResolver, ProjectBase parent, GacCache gacCache) : base(xmlDefinition, referencesResolver, parent, gacCache) {
}
protected abstract bool IsPrivate {
get;
}
protected abstract bool IsPrivateSpecified {
get;
}
/// <summary>
/// Gets a value indicating whether the output file(s) of this reference
/// should be copied locally.
/// </summary>
/// <value>
/// <see langword="true" /> if the output file(s) of this reference
/// should be copied locally; otherwise, <see langword="false" />.
/// </value>
public override bool CopyLocal {
get {
if (IsPrivateSpecified) {
return IsPrivate;
} else {
// only copy local if assembly reference could be resolved,
// if not a system assembly and is not in the GAC
string assemblyFile = ResolveAssemblyReference();
return assemblyFile != null && !IsSystem &&
!GacCache.IsAssemblyInGac(assemblyFile);
}
}
}
/// <summary>
/// Gets a value indicating whether this reference represents a system
/// assembly.
/// </summary>
/// <value>
/// <see langword="true" /> if this reference represents a system
/// assembly; otherwise, <see langword="false" />.
/// </value>
protected override bool IsSystem {
get {
// if the assembly cannot be resolved, we consider it not to
// be a system assembly
string assemblyFile = ResolveAssemblyReference();
if (assemblyFile == null) {
return false;
}
// check if assembly is stored in the framework assembly
// directory
return string.Compare(Path.GetDirectoryName(assemblyFile),
SolutionTask.Project.TargetFramework.FrameworkAssemblyDirectory.FullName,
true, CultureInfo.InvariantCulture) == 0;
}
}
/// <summary>
/// Gets the path of the reference, without taking the "copy local"
/// setting into consideration.
/// </summary>
/// <param name="solutionConfiguration">The solution configuration that is built.</param>
/// <returns>
/// The output path of the reference.
/// </returns>
public override string GetPrimaryOutputFile(Configuration solutionConfiguration) {
return ResolveAssemblyReference();
}
/// <summary>
/// Gets the complete set of output files for the referenced project.
/// </summary>
/// <param name="solutionConfiguration">The solution configuration that is built.</param>
/// <param name="outputFiles">The set of output files to be updated.</param>
/// <remarks>
/// The key of the case-insensitive <see cref="Hashtable" /> is the
/// full path of the output file and the value is the path relative to
/// the output directory.
/// </remarks>
public override void GetOutputFiles(Configuration solutionConfiguration, Hashtable outputFiles) {
string assemblyFile = ResolveAssemblyReference();
if (assemblyFile != null) {
base.GetAssemblyOutputFiles(assemblyFile, outputFiles);
}
}
/// <summary>
/// Gets the complete set of assemblies that need to be referenced when
/// a project references this component.
/// </summary>
/// <param name="solutionConfiguration">The solution configuration that is built.</param>
/// <returns>
/// The complete set of assemblies that need to be referenced when a
/// project references this component.
/// </returns>
public override StringCollection GetAssemblyReferences(Configuration solutionConfiguration) {
// if we're dealing with an assembly reference, then we only
// need to reference that assembly itself as VS.NET forces users
// to add all dependent assemblies to the project itself
StringCollection assemblyReferences = new StringCollection();
// attempt to resolve assembly reference
string assemblyFile = ResolveAssemblyReference();
if (assemblyFile == null) {
Log(Level.Warning, "Assembly \"{0}\", referenced"
+ " by project \"{1}\", could not be resolved.", Name,
Parent.Name);
return assemblyReferences;
}
// ensure assembly actually exists
if (!File.Exists(assemblyFile)) {
Log(Level.Warning, "Assembly \"{0}\", referenced"
+ " by project \"{1}\", does not exist.", assemblyFile,
Parent.Name);
return assemblyReferences;
}
// add referenced assembly to list of reference assemblies
assemblyReferences.Add(assemblyFile);
return assemblyReferences;
}
/// <summary>
/// Gets the timestamp of the reference.
/// </summary>
/// <param name="solutionConfiguration">The solution configuration that is built.</param>
/// <returns>
/// The timestamp of the reference.
/// </returns>
public override DateTime GetTimestamp(Configuration solutionConfiguration) {
string assemblyFile = ResolveAssemblyReference();
if (assemblyFile == null) {
return DateTime.MaxValue;
}
return GetFileTimestamp(assemblyFile);
}
public ProjectReferenceBase CreateProjectReference(ProjectBase project) {
return project.CreateProjectReference(project, IsPrivateSpecified,
IsPrivate);
}
/// <summary>
/// Resolves an assembly reference.
/// </summary>
/// <returns>
/// The full path to the resolved assembly, or <see langword="null" />
/// if the assembly reference could not be resolved.
/// </returns>
protected abstract string ResolveAssemblyReference();
/// <summary>
/// Searches for the given file in all paths in <paramref name="folderList" />.
/// </summary>
/// <param name="folderList">The folders to search.</param>
/// <param name="fileName">The file to search for.</param>
/// <returns>
/// The path of the assembly if <paramref name="fileName" /> was found
/// in <paramref name="folderList" />; otherwise, <see langword="null" />.
/// </returns>
protected string ResolveFromFolderList(StringCollection folderList, string fileName) {
Log(Level.Debug, "Attempting to resolve \"{0}\" in AssemblyFolders...",
fileName);
foreach (string path in folderList) {
Log(Level.Debug, "Checking \"{0}\"...", path);
try {
string assemblyFile = FileUtils.CombinePaths(path, fileName);
if (File.Exists(assemblyFile)) {
Log(Level.Debug, "Assembly found in \"{0}\".", path);
return assemblyFile;
} else {
Log(Level.Debug, "Assembly not found in \"{0}\".", path);
}
} catch (Exception ex) {
Log(Level.Verbose, "Error resolving reference to \"{0}\""
+ " in directory \"{1}\".", fileName, path);
Log(Level.Debug, ex.ToString());
}
}
return null;
}
/// <summary>
/// Resolves an assembly reference in the framework assembly directory
/// of the target framework.
/// </summary>
/// <param name="fileName">The file to search for.</param>
/// <returns>
/// The full path of the assembly file if the assembly could be located
/// in the framework assembly directory; otherwise, <see langword="null" />.
/// </returns>
protected string ResolveFromFramework(string fileName) {
//string systemAssembly = FileUtils.CombinePaths(SolutionTask.Project.TargetFramework.
// FrameworkAssemblyDirectory.FullName, fileName);
string systemAssembly = SolutionTask.Project.TargetFramework.ResolveAssembly(fileName);
if (File.Exists(systemAssembly)) {
return systemAssembly;
}
return null;
}
/// <summary>
/// Resolves an assembly reference using a path relative to the project
/// directory.
/// </summary>
/// <returns>
/// The full path of the assembly, or <see langword="null" /> if
/// <paramref name="relativePath" /> is <see langword="null" /> or an
/// empty <see cref="string" />.
/// </returns>
protected string ResolveFromRelativePath(string relativePath) {
|
[
" if (!String.IsNullOrEmpty(relativePath)) {"
] | 1,081
|
lcc
|
csharp
| null |
27f6b503096cf445095535f5a908b7e00038cf4f23a5d499
|
|
"""
Utilities
"""
# Consistency
from __future__ import print_function
import copy
import getpass
import re
import readline
import sys
py_version = sys.version_info.major
if py_version == 2:
import urllib
else:
import urllib.parse as urllib
try:
import termcolor
if sys.platform == 'win32':
# Only enable termcolor on Windows if colorama is available
try:
import colorama
colorama.init()
except ImportError:
colorama = termcolor = None
except ImportError:
termcolor = None
if not sys.stdout.isatty() or '--no-color' in sys.argv:
# Prevent coloring of output with --no-color or if stdout is not a tty
termcolor = None
class UnsupportedPythonVersion(Exception):
def __init__(self, *args, **kwargs):
super(UnsupportedPythonVersion, self).__init__(*args)
log('Unsupported Python version (%s)' %
(kwargs['version'] if 'version' in kwargs else py_version),
type='fatal')
class DynamicList(list):
def __setitem__(self, i, v):
# Fill with None
self[len(self):i+1] = [None for x in range(i+1-len(self))]
super(DynamicList, self).__setitem__(i, v)
_log_color_split = re.compile('\s*[,/]?\s*')
_log_opts = re.compile('<[^>]*>')
_log_types = {
'error': 'red, bold',
'fatal': 'white, on_red, bold',
'warn': 'yellow, bold',
'ok': 'green',
'success': 'green, bold',
'info': 'blue',
'progress': 'cyan',
'bold': 'bold',
'underline': 'underline',
}
def _log_parse(*args, **kwargs):
s = ' '.join([str(x) for x in args]) + '<>'
if 'type' in kwargs and kwargs['type'] in _log_types:
s = '<' + _log_types[kwargs['type']] + '>' + s
if 'color' not in kwargs:
kwargs['color'] = True
if termcolor is not None and kwargs['color']:
parts = s.replace('\01', '').replace('<', '\01<').split('\01')
s = ''
for p in parts:
if '>' in p:
opts, text = p.split('>', 1)
if opts[1:2] == '+':
opts = opts[2:]
else:
opts = opts[1:]
s += termcolor.RESET
opts = _log_color_split.split(opts)
args, attrs = [None, None], []
for opt in opts:
opt = opt.lower()
if opt in termcolor.COLORS:
args[0] = opt
elif opt in termcolor.HIGHLIGHTS:
args[1] = opt
elif opt in termcolor.ATTRIBUTES:
attrs.append(opt)
s += termcolor.colored(text, *args, **{'attrs': attrs}).replace(termcolor.RESET, '')
else:
s += p
else:
# Remove <...> tags if termcolor isn't available
s = _log_opts.sub('', s)
return s
def log(*args, **kwargs):
print(_log_parse(*args, **kwargs))
def logf(*args, **kwargs):
sys.stdout.write(_log_parse(*args, **kwargs))
sys.stdout.flush()
_debug = ('--debug' in sys.argv)
def debug(*args, **kwargs):
if _debug:
return log(*args, **kwargs)
_input = input if py_version == 3 else raw_input
def input(prompt='', visible=True, input=''):
"""
Enhanced implementation of input (independent of Python version)
Similar to Python 2's "raw_input" and Python 3's "input"
prompt (string): The prompt to display (on the same line as the text)
visible (bool): Enables/disables echoing of input. Note that "False"
enforces a tty (i.e. it will read from the command line, not a file).
input (string): Formatting to apply to the input string (only when visible)
e.g. "red, bold" (angle brackets are not required)
"""
prompt = _log_parse(prompt)
if input and termcolor is not None:
input = input.replace('<', '').replace('>', '')
input = _log_parse('<%s>' % input).replace(termcolor.RESET, '')
try:
if not visible:
text = getpass.getpass(prompt)
else:
text = _input(prompt + input)
except:
logf('<>') # Reset terminal
raise # Allow exception to propagate
logf('<>')
return text
def get_file(prompt='File: ', exists=True, path=''):
"""
Prompt for a file
prompt: Text to display (defaults to "File: ")
exists: True if file should exist (defaults to True)
path: An initial path to use, returned if acceptable (optional)
"""
path = str(path)
while 1:
if not path:
path = input(prompt)
if exists:
try:
f = open(path)
except IOError:
pass
else:
break
else:
break
path = ''
return path
def die(*args, **kwargs):
log(*args, **kwargs)
sys.exit()
def dict_auto_filter(obj):
while True:
try:
if len(obj.keys()) > 1:
break
# list() is necessary for python 3, where keys() doesn't return
# a list that supports indexes
if isinstance(obj[list(obj.keys())[0]], dict):
obj = obj[list(obj.keys())[0]]
else:
break
except AttributeError:
# Single remaining object is not a dict
break
return obj
def dict_extend(d1, d2):
"""
Merges dictionaries 'd1' and 'd2'
For keys that exist in both, the value from d2 is used
"""
return dict(d1, **d2)
def dict_recursive_fetch_list(d, key):
"""
Returns a list of _all_ values in dict 'd' with key 'key'
Also fetches items in lists
"""
l = []
if isinstance(d, list):
for i in d:
l.extend(dict_recursive_fetch_list(i, key))
return l
for i in d:
if i == key:
l.append(d[i])
elif isinstance(d[i], (dict, list)):
l.extend(dict_recursive_fetch_list(d[i], key))
return l
def recursive_merge(d1, d2):
"""
Merges two dictionaries and their sub-dictionaries and/or lists
"""
d1, d2 = copy.copy(d1), copy.copy(d2)
result = {} if isinstance(d1, dict) or isinstance(d2, dict) else []
keys = (list(d1.keys()) if isinstance(d1, dict) else range(len(d1))) + \
(list(d2.keys()) if isinstance(d2, dict) else range(len(d2)))
# Remove duplicates
keys = list(set(keys))
if isinstance(result, dict):
# Current object is a dict
for k in keys:
if k in d1 and k in d2:
v1, v2 = d1[k], d2[k]
if v1 != v2:
if isinstance(v1, (dict, list)) and isinstance(v2, (dict, list)):
# Values can be merged
result[k] = recursive_merge(v1, v2)
else:
# Values cannot be merged, so return the value from d1
result[k] = v1
else:
# Values are equal, so merging is unnecessary
result[k] = v1
else:
# Key is either in d1 or d2
result[k] = d1[k] if k in d1 else d2[k]
else:
# Current object is a list
result = d1 + d2
return result
def str_format(string, *args, **kwargs):
"""
A slightly modified version of the native str.format(), using {% and %}
instead of { and }
>>> str_format('{a}', a=2)
{a}
>>> str_format('{%a%}', a=2)
2
>>> str_format('{% a %}', a=2)
2
"""
# Accept whitespace directly inside {% ... %} tags
string = re.compile(r'\{%\s+').sub('{%', string)
string = re.compile(r'\s+%\}').sub('%}', string)
string = string.replace('{','{{').replace('}','}}') \
.replace('{{%', '{').replace('%}}','}')
|
[
" return string.format(*args, **kwargs)"
] | 884
|
lcc
|
python
| null |
6b8b92b2b253189141dc998a845e117ddb34a9ced10cda39
|
|
package org.netlib.lapack;
import org.netlib.blas.Dcopy;
import org.netlib.err.Xerbla;
import org.netlib.util.doubleW;
import org.netlib.util.intW;
public final class Dlasda
{
public static void dlasda(int paramInt1, int paramInt2, int paramInt3, int paramInt4, double[] paramArrayOfDouble1, int paramInt5, double[] paramArrayOfDouble2, int paramInt6, double[] paramArrayOfDouble3, int paramInt7, int paramInt8, double[] paramArrayOfDouble4, int paramInt9, int[] paramArrayOfInt1, int paramInt10, double[] paramArrayOfDouble5, int paramInt11, double[] paramArrayOfDouble6, int paramInt12, double[] paramArrayOfDouble7, int paramInt13, double[] paramArrayOfDouble8, int paramInt14, int[] paramArrayOfInt2, int paramInt15, int[] paramArrayOfInt3, int paramInt16, int paramInt17, int[] paramArrayOfInt4, int paramInt18, double[] paramArrayOfDouble9, int paramInt19, double[] paramArrayOfDouble10, int paramInt20, double[] paramArrayOfDouble11, int paramInt21, double[] paramArrayOfDouble12, int paramInt22, int[] paramArrayOfInt5, int paramInt23, intW paramintW)
{
int i = 0;
int j = 0;
int k = 0;
int m = 0;
int n = 0;
int i1 = 0;
int i2 = 0;
int i3 = 0;
int i4 = 0;
int i5 = 0;
int i6 = 0;
int i7 = 0;
int i8 = 0;
int i9 = 0;
int i10 = 0;
int i11 = 0;
intW localintW1 = new intW(0);
int i12 = 0;
int i13 = 0;
int i14 = 0;
int i15 = 0;
int i16 = 0;
int i17 = 0;
intW localintW2 = new intW(0);
int i18 = 0;
int i19 = 0;
int i20 = 0;
int i21 = 0;
int i22 = 0;
int i23 = 0;
int i24 = 0;
int i25 = 0;
int i26 = 0;
int i27 = 0;
int i28 = 0;
int i29 = 0;
doubleW localdoubleW1 = new doubleW(0.0D);
doubleW localdoubleW2 = new doubleW(0.0D);
paramintW.val = 0;
if ((paramInt1 >= 0 ? 0 : 1) == 0) {}
if (((paramInt1 <= 1 ? 0 : 1) == 0 ? 0 : 1) != 0)
{
paramintW.val = -1;
}
else if ((paramInt2 >= 3 ? 0 : 1) != 0)
{
paramintW.val = -2;
}
else if ((paramInt3 >= 0 ? 0 : 1) != 0)
{
paramintW.val = -3;
}
else
{
if ((paramInt4 >= 0 ? 0 : 1) == 0) {}
if (((paramInt4 <= 1 ? 0 : 1) == 0 ? 0 : 1) != 0) {
paramintW.val = -4;
} else if ((paramInt8 >= paramInt3 + paramInt4 ? 0 : 1) != 0) {
paramintW.val = -8;
} else if ((paramInt17 >= paramInt3 ? 0 : 1) != 0) {
paramintW.val = -17;
}
}
if ((paramintW.val == 0 ? 0 : 1) != 0)
{
Xerbla.xerbla("DLASDA", -paramintW.val);
return;
}
i10 = paramInt3 + paramInt4;
if ((paramInt3 > paramInt2 ? 0 : 1) != 0)
{
if ((paramInt1 != 0 ? 0 : 1) != 0) {
Dlasdq.dlasdq("U", paramInt4, paramInt3, 0, 0, 0, paramArrayOfDouble1, paramInt5, paramArrayOfDouble2, paramInt6, paramArrayOfDouble4, paramInt9, paramInt8, paramArrayOfDouble3, paramInt7, paramInt8, paramArrayOfDouble3, paramInt7, paramInt8, paramArrayOfDouble12, paramInt22, paramintW);
} else {
Dlasdq.dlasdq("U", paramInt4, paramInt3, i10, paramInt3, 0, paramArrayOfDouble1, paramInt5, paramArrayOfDouble2, paramInt6, paramArrayOfDouble4, paramInt9, paramInt8, paramArrayOfDouble3, paramInt7, paramInt8, paramArrayOfDouble3, paramInt7, paramInt8, paramArrayOfDouble12, paramInt22, paramintW);
}
return;
}
i2 = 1;
i13 = i2 + paramInt3;
i14 = i13 + paramInt3;
m = i14 + paramInt3;
i4 = m + paramInt3;
i11 = 0;
i21 = 0;
i24 = paramInt2 + 1;
i26 = 1;
i28 = i26 + i10;
i22 = i28 + i10;
i23 = i22 + i24 * i24;
Dlasdt.dlasdt(paramInt3, localintW2, localintW1, paramArrayOfInt5, i2 - 1 + paramInt23, paramArrayOfInt5, i13 - 1 + paramInt23, paramArrayOfInt5, i14 - 1 + paramInt23, paramInt2);
i12 = (localintW1.val + 1) / 2;
i = i12;
int i31;
for (int i30 = localintW1.val - i12 + 1; i30 > 0; i30--)
{
j = i - 1;
k = paramArrayOfInt5[(i2 + j - 1 + paramInt23)];
i15 = paramArrayOfInt5[(i13 + j - 1 + paramInt23)];
i17 = i15 + 1;
i18 = paramArrayOfInt5[(i14 + j - 1 + paramInt23)];
i16 = k - i15;
i19 = k + 1;
n = m + i16 - 2;
i27 = i26 + i16 - 1;
i29 = i28 + i16 - 1;
i25 = 1;
if ((paramInt1 != 0 ? 0 : 1) != 0)
{
Dlaset.dlaset("A", i17, i17, 0.0D, 1.0D, paramArrayOfDouble12, i22 - 1 + paramInt22, i24);
Dlasdq.dlasdq("U", i25, i15, i17, i21, i11, paramArrayOfDouble1, i16 - 1 + paramInt5, paramArrayOfDouble2, i16 - 1 + paramInt6, paramArrayOfDouble12, i22 - 1 + paramInt22, i24, paramArrayOfDouble12, i23 - 1 + paramInt22, i15, paramArrayOfDouble12, i23 - 1 + paramInt22, i15, paramArrayOfDouble12, i23 - 1 + paramInt22, paramintW);
i3 = i22 + i15 * i24;
Dcopy.dcopy(i17, paramArrayOfDouble12, i22 - 1 + paramInt22, 1, paramArrayOfDouble12, i27 - 1 + paramInt22, 1);
Dcopy.dcopy(i17, paramArrayOfDouble12, i3 - 1 + paramInt22, 1, paramArrayOfDouble12, i29 - 1 + paramInt22, 1);
}
else
{
Dlaset.dlaset("A", i15, i15, 0.0D, 1.0D, paramArrayOfDouble3, i16 - 1 + (1 - 1) * paramInt8 + paramInt7, paramInt8);
Dlaset.dlaset("A", i17, i17, 0.0D, 1.0D, paramArrayOfDouble4, i16 - 1 + (1 - 1) * paramInt8 + paramInt9, paramInt8);
Dlasdq.dlasdq("U", i25, i15, i17, i15, i11, paramArrayOfDouble1, i16 - 1 + paramInt5, paramArrayOfDouble2, i16 - 1 + paramInt6, paramArrayOfDouble4, i16 - 1 + (1 - 1) * paramInt8 + paramInt9, paramInt8, paramArrayOfDouble3, i16 - 1 + (1 - 1) * paramInt8 + paramInt7, paramInt8, paramArrayOfDouble3, i16 - 1 + (1 - 1) * paramInt8 + paramInt7, paramInt8, paramArrayOfDouble12, i22 - 1 + paramInt22, paramintW);
Dcopy.dcopy(i17, paramArrayOfDouble4, i16 - 1 + (1 - 1) * paramInt8 + paramInt9, 1, paramArrayOfDouble12, i27 - 1 + paramInt22, 1);
Dcopy.dcopy(i17, paramArrayOfDouble4, i16 - 1 + (i17 - 1) * paramInt8 + paramInt9, 1, paramArrayOfDouble12, i29 - 1 + paramInt22, 1);
}
if ((paramintW.val == 0 ? 0 : 1) != 0) {
return;
}
i5 = 1;
for (i31 = i15 - 1 + 1; i31 > 0; i31--)
{
paramArrayOfInt5[(n + i5 - 1 + paramInt23)] = i5;
i5 += 1;
}
if ((i != localintW1.val ? 0 : 1) != 0) {}
if (((paramInt4 != 0 ? 0 : 1) != 0 ? 1 : 0) != 0) {
i25 = 0;
} else {
i25 = 1;
}
n += i17;
i27 += i17;
i29 += i17;
i20 = i18 + i25;
if ((paramInt1 != 0 ? 0 : 1) != 0)
{
Dlaset.dlaset("A", i20, i20, 0.0D, 1.0D, paramArrayOfDouble12, i22 - 1 + paramInt22, i24);
Dlasdq.dlasdq("U", i25, i18, i20, i21, i11, paramArrayOfDouble1, i19 - 1 + paramInt5, paramArrayOfDouble2, i19 - 1 + paramInt6, paramArrayOfDouble12, i22 - 1 + paramInt22, i24, paramArrayOfDouble12, i23 - 1 + paramInt22, i18, paramArrayOfDouble12, i23 - 1 + paramInt22, i18, paramArrayOfDouble12, i23 - 1 + paramInt22, paramintW);
i3 = i22 + (i20 - 1) * i24;
Dcopy.dcopy(i20, paramArrayOfDouble12, i22 - 1 + paramInt22, 1, paramArrayOfDouble12, i27 - 1 + paramInt22, 1);
Dcopy.dcopy(i20, paramArrayOfDouble12, i3 - 1 + paramInt22, 1, paramArrayOfDouble12, i29 - 1 + paramInt22, 1);
}
else
{
Dlaset.dlaset("A", i18, i18, 0.0D, 1.0D, paramArrayOfDouble3, i19 - 1 + (1 - 1) * paramInt8 + paramInt7, paramInt8);
Dlaset.dlaset("A", i20, i20, 0.0D, 1.0D, paramArrayOfDouble4, i19 - 1 + (1 - 1) * paramInt8 + paramInt9, paramInt8);
Dlasdq.dlasdq("U", i25, i18, i20, i18, i11, paramArrayOfDouble1, i19 - 1 + paramInt5, paramArrayOfDouble2, i19 - 1 + paramInt6, paramArrayOfDouble4, i19 - 1 + (1 - 1) * paramInt8 + paramInt9, paramInt8, paramArrayOfDouble3, i19 - 1 + (1 - 1) * paramInt8 + paramInt7, paramInt8, paramArrayOfDouble3, i19 - 1 + (1 - 1) * paramInt8 + paramInt7, paramInt8, paramArrayOfDouble12, i22 - 1 + paramInt22, paramintW);
Dcopy.dcopy(i20, paramArrayOfDouble4, i19 - 1 + (1 - 1) * paramInt8 + paramInt9, 1, paramArrayOfDouble12, i27 - 1 + paramInt22, 1);
Dcopy.dcopy(i20, paramArrayOfDouble4, i19 - 1 + (i20 - 1) * paramInt8 + paramInt9, 1, paramArrayOfDouble12, i29 - 1 + paramInt22, 1);
}
if ((paramintW.val == 0 ? 0 : 1) != 0) {
return;
}
i5 = 1;
for (i31 = i18 - 1 + 1; i31 > 0; i31--)
{
paramArrayOfInt5[(n + i5 - 1 + paramInt23)] = i5;
i5 += 1;
}
i += 1;
}
i5 = (int)Math.pow(2, localintW2.val);
i8 = localintW2.val;
for (int i30 = (1 - localintW2.val + -1) / -1; i30 > 0; i30--)
{
i9 = i8 * 2 - 1;
if ((i8 != 1 ? 0 : 1) != 0)
{
i6 = 1;
i7 = 1;
}
else
{
i6 = (int)Math.pow(2, i8 - 1);
i7 = 2 * i6 - 1;
}
i = i6;
for (i31 = i7 - i6 + 1; i31 > 0; i31--)
{
i1 = i - 1;
k = paramArrayOfInt5[(i2 + i1 - 1 + paramInt23)];
i15 = paramArrayOfInt5[(i13 + i1 - 1 + paramInt23)];
i18 = paramArrayOfInt5[(i14 + i1 - 1 + paramInt23)];
i16 = k - i15;
i19 = k + 1;
|
[
" if ((i != i7 ? 0 : 1) != 0) {"
] | 1,437
|
lcc
|
java
| null |
2249fb5b605f86689474ffb501216d8475418e32ce63adab
|
|
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
namespace Mixamo {
public interface TransitionHandler {
string[] KeyControls();
bool CanTransitionTo( string guard , string source , string destination );
}
/// <summary>
/// A class for holding all the control parameters (weights that control blend trees) in the graph.
/// This also contains a paramter "state_speed" to control the playback speed of a state.
/// </summary>
public class ControlParameters {
Dictionary< string , float> dict = new Dictionary<string, float>();
public ControlParameters(string[] opts,float[] vals) {
if( opts != null && vals != null ) {
for( int i=0 ;i < opts.Length;i++ ) {
dict.Add( opts[i] , vals[i] );
}
}
}
public float this[string s]
{
get { return dict[s]; }
set {
dict[s] = value;
}
}
/// <summary>
/// Returns a list of all control paramters
/// </summary>
public string[] Names {
get {
string[] arr = new string[dict.Keys.Count];
dict.Keys.CopyTo( arr , 0 );
return arr;
}
}
}
/// <summary>
/// Base class that describes a Transition. All other transitions must override this class.
/// </summary>
public abstract class Transition {
public bool WaitTillEnd = false;
protected State destination;
private State _start_destination = null;
protected string[] guards;
protected State start_destination {
get {
return _start_destination;
}
set {
_start_destination = value;
}
}
/// <summary>
/// The state where the transition want's to go to.
/// </summary>
public State Destination {
get {
return this.destination;
}
}
protected State source;
protected bool finished = true;
/// <summary>
/// Initiate the transition. Called when a transition is started from ChangeState().
/// </summary>
/// <param name="dest">
/// A <see cref="State"/>
/// </param>
public virtual void Start(State dest) {
finished = false;
this.destination = dest;
if( !this.destination.IsLooping ) {
this.destination.ResetTime( 0f );
}
}
/// <summary>
/// Called when the state is changed and the transition is finished.
/// </summary>
public virtual void Finish(){
}
/// <summary>
/// Returns whether or the transition can be taken to the destination state.
/// </summary>
/// <param name="dest">
/// A <see cref="State"/>
/// </param>
/// <returns>
/// A <see cref="System.Boolean"/>
/// </returns>
public virtual bool CanBeMade( State dest) {
if( guards != null ) {
foreach( string g in guards ) {
if( source.layer.graph.TransitionHandler != null && !source.layer.graph.TransitionHandler.CanTransitionTo( g , source.name , dest.name ) ) {
return false;
}
}
}
if( start_destination == null ) {
return true;
} else {
return( dest == start_destination );
}
}
/// <summary>
/// Compute the weight of the Source state and the Destination state based on the remaining weight.
/// </summary>
/// <param name="remaining_weight">
/// A <see cref="System.Single"/>
/// </param>
public abstract void UpdateGraph( float remaining_weight );
/// <summary>
/// Returns whether or not the transition has completed. Tells the state machine to set the Current State = Destination.
/// </summary>
/// <returns>
/// A <see cref="System.Boolean"/>
/// </returns>
public bool IsDone() {
return finished;
}
public override string ToString ()
{
return string.Format ("[Transition: Source={0} Destination={1} Type={2}]", source.name , (start_destination == null ? "*" : start_destination.name) , this.GetType().ToString());
}
}
/// <summary>
/// Transitions between two states by crossfading between them over a duration. This means immeditately playing the next state with weight 0 and fading it in to weight 1, while
/// fading the current state to weight 0.
/// </summary>
public class CrossfadeTransition : Transition {
private float t_weight = 0f;
public CrossfadeTransition( State source , State destination , float duration , string[] guards ) {
this.source = source;
this.start_destination = destination;
this.duration = duration;
this.guards = guards;
}
public override void Start(State dest) {
base.Start(dest);
t_weight = 0f;
destination.ResetTime(0f);
}
private float duration = 0f;
public override void UpdateGraph (float remaining_weight)
{
if( WaitTillEnd && (source.MaxTime < (source.MaxTimeLength - duration) ) ) {
source.UpdateGraph( remaining_weight );
destination.UpdateGraph( 0f );
destination.ResetTime(0f);
} else {
t_weight = Mixamo.Util.CrossFadeUp( t_weight , this.duration );
destination.UpdateGraph( t_weight * remaining_weight );
source.UpdateGraph( (1-t_weight) * remaining_weight );
if( t_weight >= 1f ) {
finished = true;
}
}
}
public override void Finish ()
{
}
}
/// <summary>
/// Transitions between two states by playing an inbetween clip.
/// This transitions crossfades the current state with the inbetween clip in duration_in seconds and then crossfades the inbetween clip with the destination state in duration_out seconds.
/// If duration_in == 0f then we wait till the source state actually finishes playing before transitioning.
/// </summary>
public class ClipTransition : Transition {
public Clip clip;
public float duration_in = 0f;
public float duration_out = 0f;
private float t_weight_start = 0f;
private float t_weight_end = 0f;
public ClipTransition( Clip c , State source , State dest , float dur_in , float dur_out , string[] guards ) {
clip = c;
clip.anim_state.wrapMode = WrapMode.ClampForever;
clip.anim_state.enabled = true;
this.source = source;
this.start_destination = dest;
duration_in = dur_in;
if( duration_in == 0f ) {
WaitTillEnd = true;
}
duration_out = dur_out;
this.guards = guards;
}
public override void Start ( State dest)
{
base.Start(dest);
t_weight_start = 0f;
t_weight_end = 0f;
clip.ResetTime(0f);
dest.ResetTime(0f);
if( duration_in == 0f ) {
source.SetCurrentWrapMode( MixamoWrapMode.ClampForever );
}
}
public override void UpdateGraph (float remaining_weight)
{
if( WaitTillEnd && (source.NormalizedTime < 1f ) ) {
source.UpdateGraph( remaining_weight );
clip.UpdateGraph(0);
destination.UpdateGraph( 0f );
destination.ResetTime(0f);
clip.ResetTime(0f);
} else if( clip.anim_state.time < this.duration_in ) {
// fade in
t_weight_start = Mixamo.Util.CrossFadeUp( t_weight_start , this.duration_in );
source.UpdateGraph( (1-t_weight_start) * remaining_weight );
clip.UpdateGraph( (t_weight_start) * remaining_weight );
destination.UpdateGraph( 0f );
destination.ResetTime(0f);
} else if( clip.anim_state.time > (clip.anim_state.length - this.duration_out ) && clip.anim_state.time < clip.anim_state.length ) {
// fade out
t_weight_end = Mixamo.Util.CrossFadeUp( t_weight_end , this.duration_out );
source.UpdateGraph( 0f );
clip.UpdateGraph( ( 1-t_weight_end) * remaining_weight );
destination.UpdateGraph( t_weight_end * remaining_weight );
} else if( clip.anim_state.time < clip.anim_state.length ) {
// play normally
clip.UpdateGraph( remaining_weight );
source.UpdateGraph( 0f );
destination.UpdateGraph( 0f );
destination.ResetTime(0f);
} else {
// end
destination.UpdateGraph( remaining_weight );
source.UpdateGraph( 0f );
clip.UpdateGraph( 0f );
source.ResetWrapMode();
finished = true;
}
}
}
/// <summary>
/// A layer of the animation graph. Each layer can be in one state at a time. Weighting is computed by calculating a normalized weight (weights summing to 1) for each layer and then using
/// Unity's weight calculation to average each layer depending on it's layer priority and any MixingTransforms on the individual bone.
/// </summary>
public class Layer {
public string name;
public State[] states;
public AnimationGraph graph;
public int priority;
public Transition CreateDefaultTransition( State source ) {
// default transition is to crossfade to every state
Transition t = new CrossfadeTransition( source , null , 0.1f , new string[0] {} );
return t;
}
State _current_state;
Transition _current_transition = null;
public Layer() {
}
public void Init() {
_current_state = states[0];
_desired_state = _current_state;
}
public State GetCurrentState() {
return _current_state;
}
public State GetCurrentDestinationState() {
if( _current_transition != null ) {
return _current_transition.Destination;
} else {
return null;
}
}
private State _desired_state;
public bool ChangeState( string name ) {
State next = this.GetStateByName( name );
if( next != null ) {
// save this state, in case you need to transition to it immediately after a non looping state
_desired_state = next;
}
if( _current_transition != null ) {
// you can't change state if you're in a transition
return false;
} else if( next == null ) {
Debug.LogError( "Could not find the state: " + name.ToString() );
return false;
} else if( next != _current_state ) {
// find a transition to the next state and make it if possible
Transition t = _current_state.GetTransitionTo( next );
if( t != null ) {
_current_transition = t;
_current_transition.Start( next );
return true;
} else {
return false;
}
} else {
return true;
}
}
public override string ToString ()
{
string str = ( "Layer: " + name + "\n" );
str += "States: \n";
foreach( State s in states ) {
|
[
"\t\t\t\tstr += s.ToString() + \"\\n\";"
] | 1,300
|
lcc
|
csharp
| null |
a1c2bff96d6d3cb3e23f9ea2a0029d635c3ebc32eb87a05c
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.