context
stringlengths
2.52k
185k
gt
stringclasses
1 value
using System.Collections.Generic; namespace Plugin.Iconize.Fonts { /// <summary> /// Defines the <see cref="WeatherIconsCollection" /> icon collection. /// </summary> public static class WeatherIconsCollection { /// <summary> /// Gets the icons. /// </summary> /// <value> /// The icons. /// </value> public static IList<IIcon> Icons { get; } = new List<IIcon>(); /// <summary> /// Initializes the <see cref="WeatherIconsCollection" /> class. /// </summary> static WeatherIconsCollection() { Icons.Add("wi-day-sunny", '\uf00d'); Icons.Add("wi-day-cloudy", '\uf002'); Icons.Add("wi-day-cloudy-gusts", '\uf000'); Icons.Add("wi-day-cloudy-windy", '\uf001'); Icons.Add("wi-day-fog", '\uf003'); Icons.Add("wi-day-hail", '\uf004'); Icons.Add("wi-day-haze", '\uf0b6'); Icons.Add("wi-day-lightning", '\uf005'); Icons.Add("wi-day-rain", '\uf008'); Icons.Add("wi-day-rain-mix", '\uf006'); Icons.Add("wi-day-rain-wind", '\uf007'); Icons.Add("wi-day-showers", '\uf009'); Icons.Add("wi-day-sleet", '\uf0b2'); Icons.Add("wi-day-sleet-storm", '\uf068'); Icons.Add("wi-day-snow", '\uf00a'); Icons.Add("wi-day-snow-thunderstorm", '\uf06b'); Icons.Add("wi-day-snow-wind", '\uf065'); Icons.Add("wi-day-sprinkle", '\uf00b'); Icons.Add("wi-day-storm-showers", '\uf00e'); Icons.Add("wi-day-sunny-overcast", '\uf00c'); Icons.Add("wi-day-thunderstorm", '\uf010'); Icons.Add("wi-day-windy", '\uf085'); Icons.Add("wi-solar-eclipse", '\uf06e'); Icons.Add("wi-hot", '\uf072'); Icons.Add("wi-day-cloudy-high", '\uf07d'); Icons.Add("wi-day-light-wind", '\uf0c4'); Icons.Add("wi-night-clear", '\uf02e'); Icons.Add("wi-night-alt-cloudy", '\uf086'); Icons.Add("wi-night-alt-cloudy-gusts", '\uf022'); Icons.Add("wi-night-alt-cloudy-windy", '\uf023'); Icons.Add("wi-night-alt-hail", '\uf024'); Icons.Add("wi-night-alt-lightning", '\uf025'); Icons.Add("wi-night-alt-rain", '\uf028'); Icons.Add("wi-night-alt-rain-mix", '\uf026'); Icons.Add("wi-night-alt-rain-wind", '\uf027'); Icons.Add("wi-night-alt-showers", '\uf029'); Icons.Add("wi-night-alt-sleet", '\uf0b4'); Icons.Add("wi-night-alt-sleet-storm", '\uf06a'); Icons.Add("wi-night-alt-snow", '\uf02a'); Icons.Add("wi-night-alt-snow-thunderstorm", '\uf06d'); Icons.Add("wi-night-alt-snow-wind", '\uf067'); Icons.Add("wi-night-alt-sprinkle", '\uf02b'); Icons.Add("wi-night-alt-storm-showers", '\uf02c'); Icons.Add("wi-night-alt-thunderstorm", '\uf02d'); Icons.Add("wi-night-cloudy", '\uf031'); Icons.Add("wi-night-cloudy-gusts", '\uf02f'); Icons.Add("wi-night-cloudy-windy", '\uf030'); Icons.Add("wi-night-fog", '\uf04a'); Icons.Add("wi-night-hail", '\uf032'); Icons.Add("wi-night-lightning", '\uf033'); Icons.Add("wi-night-partly-cloudy", '\uf083'); Icons.Add("wi-night-rain", '\uf036'); Icons.Add("wi-night-rain-mix", '\uf034'); Icons.Add("wi-night-rain-wind", '\uf035'); Icons.Add("wi-night-showers", '\uf037'); Icons.Add("wi-night-sleet", '\uf0b3'); Icons.Add("wi-night-sleet-storm", '\uf069'); Icons.Add("wi-night-snow", '\uf038'); Icons.Add("wi-night-snow-thunderstorm", '\uf06c'); Icons.Add("wi-night-snow-wind", '\uf066'); Icons.Add("wi-night-sprinkle", '\uf039'); Icons.Add("wi-night-storm-showers", '\uf03a'); Icons.Add("wi-night-thunderstorm", '\uf03b'); Icons.Add("wi-lunar-eclipse", '\uf070'); Icons.Add("wi-stars", '\uf077'); Icons.Add("wi-storm-showers", '\uf01d'); Icons.Add("wi-thunderstorm", '\uf01e'); Icons.Add("wi-night-alt-cloudy-high", '\uf07e'); Icons.Add("wi-night-cloudy-high", '\uf080'); Icons.Add("wi-night-alt-partly-cloudy", '\uf081'); Icons.Add("wi-cloud", '\uf041'); Icons.Add("wi-cloudy", '\uf013'); Icons.Add("wi-cloudy-gusts", '\uf011'); Icons.Add("wi-cloudy-windy", '\uf012'); Icons.Add("wi-fog", '\uf014'); Icons.Add("wi-hail", '\uf015'); Icons.Add("wi-rain", '\uf019'); Icons.Add("wi-rain-mix", '\uf017'); Icons.Add("wi-rain-wind", '\uf018'); Icons.Add("wi-showers", '\uf01a'); Icons.Add("wi-sleet", '\uf0b5'); Icons.Add("wi-snow", '\uf01b'); Icons.Add("wi-sprinkle", '\uf01c'); Icons.Add("wi-snow-wind", '\uf064'); Icons.Add("wi-smog", '\uf074'); Icons.Add("wi-smoke", '\uf062'); Icons.Add("wi-lightning", '\uf016'); Icons.Add("wi-raindrops", '\uf04e'); Icons.Add("wi-raindrop", '\uf078'); Icons.Add("wi-dust", '\uf063'); Icons.Add("wi-snowflake-cold", '\uf076'); Icons.Add("wi-windy", '\uf021'); Icons.Add("wi-strong-wind", '\uf050'); Icons.Add("wi-sandstorm", '\uf082'); Icons.Add("wi-earthquake", '\uf0c6'); Icons.Add("wi-fire", '\uf0c7'); Icons.Add("wi-flood", '\uf07c'); Icons.Add("wi-meteor", '\uf071'); Icons.Add("wi-tsunami", '\uf0c5'); Icons.Add("wi-volcano", '\uf0c8'); Icons.Add("wi-hurricane", '\uf073'); Icons.Add("wi-tornado", '\uf056'); Icons.Add("wi-small-craft-advisory", '\uf0cc'); Icons.Add("wi-gale-warning", '\uf0cd'); Icons.Add("wi-storm-warning", '\uf0ce'); Icons.Add("wi-hurricane-warning", '\uf0cf'); Icons.Add("wi-wind-direction", '\uf0b1'); Icons.Add("wi-alien", '\uf075'); Icons.Add("wi-celsius", '\uf03c'); Icons.Add("wi-fahrenheit", '\uf045'); Icons.Add("wi-degrees", '\uf042'); Icons.Add("wi-thermometer", '\uf055'); Icons.Add("wi-thermometer-exterior", '\uf053'); Icons.Add("wi-thermometer-internal", '\uf054'); Icons.Add("wi-cloud-down", '\uf03d'); Icons.Add("wi-cloud-up", '\uf040'); Icons.Add("wi-cloud-refresh", '\uf03e'); Icons.Add("wi-horizon", '\uf047'); Icons.Add("wi-horizon-alt", '\uf046'); Icons.Add("wi-sunrise", '\uf051'); Icons.Add("wi-sunset", '\uf052'); Icons.Add("wi-moonrise", '\uf0c9'); Icons.Add("wi-moonset", '\uf0ca'); Icons.Add("wi-refresh", '\uf04c'); Icons.Add("wi-refresh-alt", '\uf04b'); Icons.Add("wi-umbrella", '\uf084'); Icons.Add("wi-barometer", '\uf079'); Icons.Add("wi-humidity", '\uf07a'); Icons.Add("wi-na", '\uf07b'); Icons.Add("wi-train", '\uf0cb'); Icons.Add("wi-moon-new", '\uf095'); Icons.Add("wi-moon-waxing-crescent-1", '\uf096'); Icons.Add("wi-moon-waxing-crescent-2", '\uf097'); Icons.Add("wi-moon-waxing-crescent-3", '\uf098'); Icons.Add("wi-moon-waxing-crescent-4", '\uf099'); Icons.Add("wi-moon-waxing-crescent-5", '\uf09a'); Icons.Add("wi-moon-waxing-crescent-6", '\uf09b'); Icons.Add("wi-moon-first-quarter", '\uf09c'); Icons.Add("wi-moon-waxing-gibbous-1", '\uf09d'); Icons.Add("wi-moon-waxing-gibbous-2", '\uf09e'); Icons.Add("wi-moon-waxing-gibbous-3", '\uf09f'); Icons.Add("wi-moon-waxing-gibbous-4", '\uf0a0'); Icons.Add("wi-moon-waxing-gibbous-5", '\uf0a1'); Icons.Add("wi-moon-waxing-gibbous-6", '\uf0a2'); Icons.Add("wi-moon-full", '\uf0a3'); Icons.Add("wi-moon-waning-gibbous-1", '\uf0a4'); Icons.Add("wi-moon-waning-gibbous-2", '\uf0a5'); Icons.Add("wi-moon-waning-gibbous-3", '\uf0a6'); Icons.Add("wi-moon-waning-gibbous-4", '\uf0a7'); Icons.Add("wi-moon-waning-gibbous-5", '\uf0a8'); Icons.Add("wi-moon-waning-gibbous-6", '\uf0a9'); Icons.Add("wi-moon-third-quarter", '\uf0aa'); Icons.Add("wi-moon-waning-crescent-1", '\uf0ab'); Icons.Add("wi-moon-waning-crescent-2", '\uf0ac'); Icons.Add("wi-moon-waning-crescent-3", '\uf0ad'); Icons.Add("wi-moon-waning-crescent-4", '\uf0ae'); Icons.Add("wi-moon-waning-crescent-5", '\uf0af'); Icons.Add("wi-moon-waning-crescent-6", '\uf0b0'); Icons.Add("wi-moon-alt-new", '\uf0eb'); Icons.Add("wi-moon-alt-waxing-crescent-1", '\uf0d0'); Icons.Add("wi-moon-alt-waxing-crescent-2", '\uf0d1'); Icons.Add("wi-moon-alt-waxing-crescent-3", '\uf0d2'); Icons.Add("wi-moon-alt-waxing-crescent-4", '\uf0d3'); Icons.Add("wi-moon-alt-waxing-crescent-5", '\uf0d4'); Icons.Add("wi-moon-alt-waxing-crescent-6", '\uf0d5'); Icons.Add("wi-moon-alt-first-quarter", '\uf0d6'); Icons.Add("wi-moon-alt-waxing-gibbous-1", '\uf0d7'); Icons.Add("wi-moon-alt-waxing-gibbous-2", '\uf0d8'); Icons.Add("wi-moon-alt-waxing-gibbous-3", '\uf0d9'); Icons.Add("wi-moon-alt-waxing-gibbous-4", '\uf0da'); Icons.Add("wi-moon-alt-waxing-gibbous-5", '\uf0db'); Icons.Add("wi-moon-alt-waxing-gibbous-6", '\uf0dc'); Icons.Add("wi-moon-alt-full", '\uf0dd'); Icons.Add("wi-moon-alt-waning-gibbous-1", '\uf0de'); Icons.Add("wi-moon-alt-waning-gibbous-2", '\uf0df'); Icons.Add("wi-moon-alt-waning-gibbous-3", '\uf0e0'); Icons.Add("wi-moon-alt-waning-gibbous-4", '\uf0e1'); Icons.Add("wi-moon-alt-waning-gibbous-5", '\uf0e2'); Icons.Add("wi-moon-alt-waning-gibbous-6", '\uf0e3'); Icons.Add("wi-moon-alt-third-quarter", '\uf0e4'); Icons.Add("wi-moon-alt-waning-crescent-1", '\uf0e5'); Icons.Add("wi-moon-alt-waning-crescent-2", '\uf0e6'); Icons.Add("wi-moon-alt-waning-crescent-3", '\uf0e7'); Icons.Add("wi-moon-alt-waning-crescent-4", '\uf0e8'); Icons.Add("wi-moon-alt-waning-crescent-5", '\uf0e9'); Icons.Add("wi-moon-alt-waning-crescent-6", '\uf0ea'); Icons.Add("wi-moon-0", '\uf095'); Icons.Add("wi-moon-1", '\uf096'); Icons.Add("wi-moon-2", '\uf097'); Icons.Add("wi-moon-3", '\uf098'); Icons.Add("wi-moon-4", '\uf099'); Icons.Add("wi-moon-5", '\uf09a'); Icons.Add("wi-moon-6", '\uf09b'); Icons.Add("wi-moon-7", '\uf09c'); Icons.Add("wi-moon-8", '\uf09d'); Icons.Add("wi-moon-9", '\uf09e'); Icons.Add("wi-moon-10", '\uf09f'); Icons.Add("wi-moon-11", '\uf0a0'); Icons.Add("wi-moon-12", '\uf0a1'); Icons.Add("wi-moon-13", '\uf0a2'); Icons.Add("wi-moon-14", '\uf0a3'); Icons.Add("wi-moon-15", '\uf0a4'); Icons.Add("wi-moon-16", '\uf0a5'); Icons.Add("wi-moon-17", '\uf0a6'); Icons.Add("wi-moon-18", '\uf0a7'); Icons.Add("wi-moon-19", '\uf0a8'); Icons.Add("wi-moon-20", '\uf0a9'); Icons.Add("wi-moon-21", '\uf0aa'); Icons.Add("wi-moon-22", '\uf0ab'); Icons.Add("wi-moon-23", '\uf0ac'); Icons.Add("wi-moon-24", '\uf0ad'); Icons.Add("wi-moon-25", '\uf0ae'); Icons.Add("wi-moon-26", '\uf0af'); Icons.Add("wi-moon-27", '\uf0b0'); Icons.Add("wi-time-1", '\uf08a'); Icons.Add("wi-time-2", '\uf08b'); Icons.Add("wi-time-3", '\uf08c'); Icons.Add("wi-time-4", '\uf08d'); Icons.Add("wi-time-5", '\uf08e'); Icons.Add("wi-time-6", '\uf08f'); Icons.Add("wi-time-7", '\uf090'); Icons.Add("wi-time-8", '\uf091'); Icons.Add("wi-time-9", '\uf092'); Icons.Add("wi-time-10", '\uf093'); Icons.Add("wi-time-11", '\uf094'); Icons.Add("wi-time-12", '\uf089'); Icons.Add("wi-direction-up", '\uf058'); Icons.Add("wi-direction-up-right", '\uf057'); Icons.Add("wi-direction-right", '\uf04d'); Icons.Add("wi-direction-down-right", '\uf088'); Icons.Add("wi-direction-down", '\uf044'); Icons.Add("wi-direction-down-left", '\uf043'); Icons.Add("wi-direction-left", '\uf048'); Icons.Add("wi-direction-up-left", '\uf087'); Icons.Add("wi-wind-beaufort-0", '\uf0b7'); Icons.Add("wi-wind-beaufort-1", '\uf0b8'); Icons.Add("wi-wind-beaufort-2", '\uf0b9'); Icons.Add("wi-wind-beaufort-3", '\uf0ba'); Icons.Add("wi-wind-beaufort-4", '\uf0bb'); Icons.Add("wi-wind-beaufort-5", '\uf0bc'); Icons.Add("wi-wind-beaufort-6", '\uf0bd'); Icons.Add("wi-wind-beaufort-7", '\uf0be'); Icons.Add("wi-wind-beaufort-8", '\uf0bf'); Icons.Add("wi-wind-beaufort-9", '\uf0c0'); Icons.Add("wi-wind-beaufort-10", '\uf0c1'); Icons.Add("wi-wind-beaufort-11", '\uf0c2'); Icons.Add("wi-wind-beaufort-12", '\uf0c3'); Icons.Add("wi-yahoo-0", '\uf056'); Icons.Add("wi-yahoo-1", '\uf00e'); Icons.Add("wi-yahoo-2", '\uf073'); Icons.Add("wi-yahoo-3", '\uf01e'); Icons.Add("wi-yahoo-4", '\uf01e'); Icons.Add("wi-yahoo-5", '\uf017'); Icons.Add("wi-yahoo-6", '\uf017'); Icons.Add("wi-yahoo-7", '\uf017'); Icons.Add("wi-yahoo-8", '\uf015'); Icons.Add("wi-yahoo-9", '\uf01a'); Icons.Add("wi-yahoo-10", '\uf015'); Icons.Add("wi-yahoo-11", '\uf01a'); Icons.Add("wi-yahoo-12", '\uf01a'); Icons.Add("wi-yahoo-13", '\uf01b'); Icons.Add("wi-yahoo-14", '\uf00a'); Icons.Add("wi-yahoo-15", '\uf064'); Icons.Add("wi-yahoo-16", '\uf01b'); Icons.Add("wi-yahoo-17", '\uf015'); Icons.Add("wi-yahoo-18", '\uf017'); Icons.Add("wi-yahoo-19", '\uf063'); Icons.Add("wi-yahoo-20", '\uf014'); Icons.Add("wi-yahoo-21", '\uf021'); Icons.Add("wi-yahoo-22", '\uf062'); Icons.Add("wi-yahoo-23", '\uf050'); Icons.Add("wi-yahoo-24", '\uf050'); Icons.Add("wi-yahoo-25", '\uf076'); Icons.Add("wi-yahoo-26", '\uf013'); Icons.Add("wi-yahoo-27", '\uf031'); Icons.Add("wi-yahoo-28", '\uf002'); Icons.Add("wi-yahoo-29", '\uf031'); Icons.Add("wi-yahoo-30", '\uf002'); Icons.Add("wi-yahoo-31", '\uf02e'); Icons.Add("wi-yahoo-32", '\uf00d'); Icons.Add("wi-yahoo-33", '\uf083'); Icons.Add("wi-yahoo-34", '\uf00c'); Icons.Add("wi-yahoo-35", '\uf017'); Icons.Add("wi-yahoo-36", '\uf072'); Icons.Add("wi-yahoo-37", '\uf00e'); Icons.Add("wi-yahoo-38", '\uf00e'); Icons.Add("wi-yahoo-39", '\uf00e'); Icons.Add("wi-yahoo-40", '\uf01a'); Icons.Add("wi-yahoo-41", '\uf064'); Icons.Add("wi-yahoo-42", '\uf01b'); Icons.Add("wi-yahoo-43", '\uf064'); Icons.Add("wi-yahoo-44", '\uf00c'); Icons.Add("wi-yahoo-45", '\uf00e'); Icons.Add("wi-yahoo-46", '\uf01b'); Icons.Add("wi-yahoo-47", '\uf00e'); Icons.Add("wi-yahoo-3200", '\uf077'); Icons.Add("wi-forecast-io-clear-day", '\uf00d'); Icons.Add("wi-forecast-io-clear-night", '\uf02e'); Icons.Add("wi-forecast-io-rain", '\uf019'); Icons.Add("wi-forecast-io-snow", '\uf01b'); Icons.Add("wi-forecast-io-sleet", '\uf0b5'); Icons.Add("wi-forecast-io-wind", '\uf050'); Icons.Add("wi-forecast-io-fog", '\uf014'); Icons.Add("wi-forecast-io-cloudy", '\uf013'); Icons.Add("wi-forecast-io-partly-cloudy-day", '\uf002'); Icons.Add("wi-forecast-io-partly-cloudy-night", '\uf031'); Icons.Add("wi-forecast-io-hail", '\uf015'); Icons.Add("wi-forecast-io-thunderstorm", '\uf01e'); Icons.Add("wi-forecast-io-tornado", '\uf056'); Icons.Add("wi-wmo4680-0", '\uf055'); Icons.Add("wi-wmo4680-00", '\uf055'); Icons.Add("wi-wmo4680-1", '\uf013'); Icons.Add("wi-wmo4680-01", '\uf013'); Icons.Add("wi-wmo4680-2", '\uf055'); Icons.Add("wi-wmo4680-02", '\uf055'); Icons.Add("wi-wmo4680-3", '\uf013'); Icons.Add("wi-wmo4680-03", '\uf013'); Icons.Add("wi-wmo4680-4", '\uf014'); Icons.Add("wi-wmo4680-04", '\uf014'); Icons.Add("wi-wmo4680-5", '\uf014'); Icons.Add("wi-wmo4680-05", '\uf014'); Icons.Add("wi-wmo4680-10", '\uf014'); Icons.Add("wi-wmo4680-11", '\uf014'); Icons.Add("wi-wmo4680-12", '\uf016'); Icons.Add("wi-wmo4680-18", '\uf050'); Icons.Add("wi-wmo4680-20", '\uf014'); Icons.Add("wi-wmo4680-21", '\uf017'); Icons.Add("wi-wmo4680-22", '\uf017'); Icons.Add("wi-wmo4680-23", '\uf019'); Icons.Add("wi-wmo4680-24", '\uf01b'); Icons.Add("wi-wmo4680-25", '\uf015'); Icons.Add("wi-wmo4680-26", '\uf01e'); Icons.Add("wi-wmo4680-27", '\uf063'); Icons.Add("wi-wmo4680-28", '\uf063'); Icons.Add("wi-wmo4680-29", '\uf063'); Icons.Add("wi-wmo4680-30", '\uf014'); Icons.Add("wi-wmo4680-31", '\uf014'); Icons.Add("wi-wmo4680-32", '\uf014'); Icons.Add("wi-wmo4680-33", '\uf014'); Icons.Add("wi-wmo4680-34", '\uf014'); Icons.Add("wi-wmo4680-35", '\uf014'); Icons.Add("wi-wmo4680-40", '\uf017'); Icons.Add("wi-wmo4680-41", '\uf01c'); Icons.Add("wi-wmo4680-42", '\uf019'); Icons.Add("wi-wmo4680-43", '\uf01c'); Icons.Add("wi-wmo4680-44", '\uf019'); Icons.Add("wi-wmo4680-45", '\uf015'); Icons.Add("wi-wmo4680-46", '\uf015'); Icons.Add("wi-wmo4680-47", '\uf01b'); Icons.Add("wi-wmo4680-48", '\uf01b'); Icons.Add("wi-wmo4680-50", '\uf01c'); Icons.Add("wi-wmo4680-51", '\uf01c'); Icons.Add("wi-wmo4680-52", '\uf019'); Icons.Add("wi-wmo4680-53", '\uf019'); Icons.Add("wi-wmo4680-54", '\uf076'); Icons.Add("wi-wmo4680-55", '\uf076'); Icons.Add("wi-wmo4680-56", '\uf076'); Icons.Add("wi-wmo4680-57", '\uf01c'); Icons.Add("wi-wmo4680-58", '\uf019'); Icons.Add("wi-wmo4680-60", '\uf01c'); Icons.Add("wi-wmo4680-61", '\uf01c'); Icons.Add("wi-wmo4680-62", '\uf019'); Icons.Add("wi-wmo4680-63", '\uf019'); Icons.Add("wi-wmo4680-64", '\uf015'); Icons.Add("wi-wmo4680-65", '\uf015'); Icons.Add("wi-wmo4680-66", '\uf015'); Icons.Add("wi-wmo4680-67", '\uf017'); Icons.Add("wi-wmo4680-68", '\uf017'); Icons.Add("wi-wmo4680-70", '\uf01b'); Icons.Add("wi-wmo4680-71", '\uf01b'); Icons.Add("wi-wmo4680-72", '\uf01b'); Icons.Add("wi-wmo4680-73", '\uf01b'); Icons.Add("wi-wmo4680-74", '\uf076'); Icons.Add("wi-wmo4680-75", '\uf076'); Icons.Add("wi-wmo4680-76", '\uf076'); Icons.Add("wi-wmo4680-77", '\uf01b'); Icons.Add("wi-wmo4680-78", '\uf076'); Icons.Add("wi-wmo4680-80", '\uf019'); Icons.Add("wi-wmo4680-81", '\uf01c'); Icons.Add("wi-wmo4680-82", '\uf019'); Icons.Add("wi-wmo4680-83", '\uf019'); Icons.Add("wi-wmo4680-84", '\uf01d'); Icons.Add("wi-wmo4680-85", '\uf017'); Icons.Add("wi-wmo4680-86", '\uf017'); Icons.Add("wi-wmo4680-87", '\uf017'); Icons.Add("wi-wmo4680-89", '\uf015'); Icons.Add("wi-wmo4680-90", '\uf016'); Icons.Add("wi-wmo4680-91", '\uf01d'); Icons.Add("wi-wmo4680-92", '\uf01e'); Icons.Add("wi-wmo4680-93", '\uf01e'); Icons.Add("wi-wmo4680-94", '\uf016'); Icons.Add("wi-wmo4680-95", '\uf01e'); Icons.Add("wi-wmo4680-96", '\uf01e'); Icons.Add("wi-wmo4680-99", '\uf056'); Icons.Add("wi-owm-200", '\uf01e'); Icons.Add("wi-owm-201", '\uf01e'); Icons.Add("wi-owm-202", '\uf01e'); Icons.Add("wi-owm-210", '\uf016'); Icons.Add("wi-owm-211", '\uf016'); Icons.Add("wi-owm-212", '\uf016'); Icons.Add("wi-owm-221", '\uf016'); Icons.Add("wi-owm-230", '\uf01e'); Icons.Add("wi-owm-231", '\uf01e'); Icons.Add("wi-owm-232", '\uf01e'); Icons.Add("wi-owm-300", '\uf01c'); Icons.Add("wi-owm-301", '\uf01c'); Icons.Add("wi-owm-302", '\uf019'); Icons.Add("wi-owm-310", '\uf017'); Icons.Add("wi-owm-311", '\uf019'); Icons.Add("wi-owm-312", '\uf019'); Icons.Add("wi-owm-313", '\uf01a'); Icons.Add("wi-owm-314", '\uf019'); Icons.Add("wi-owm-321", '\uf01c'); Icons.Add("wi-owm-500", '\uf01c'); Icons.Add("wi-owm-501", '\uf019'); Icons.Add("wi-owm-502", '\uf019'); Icons.Add("wi-owm-503", '\uf019'); Icons.Add("wi-owm-504", '\uf019'); Icons.Add("wi-owm-511", '\uf017'); Icons.Add("wi-owm-520", '\uf01a'); Icons.Add("wi-owm-521", '\uf01a'); Icons.Add("wi-owm-522", '\uf01a'); Icons.Add("wi-owm-531", '\uf01d'); Icons.Add("wi-owm-600", '\uf01b'); Icons.Add("wi-owm-601", '\uf01b'); Icons.Add("wi-owm-602", '\uf0b5'); Icons.Add("wi-owm-611", '\uf017'); Icons.Add("wi-owm-612", '\uf017'); Icons.Add("wi-owm-615", '\uf017'); Icons.Add("wi-owm-616", '\uf017'); Icons.Add("wi-owm-620", '\uf017'); Icons.Add("wi-owm-621", '\uf01b'); Icons.Add("wi-owm-622", '\uf01b'); Icons.Add("wi-owm-701", '\uf01a'); Icons.Add("wi-owm-711", '\uf062'); Icons.Add("wi-owm-721", '\uf0b6'); Icons.Add("wi-owm-731", '\uf063'); Icons.Add("wi-owm-741", '\uf014'); Icons.Add("wi-owm-761", '\uf063'); Icons.Add("wi-owm-762", '\uf063'); Icons.Add("wi-owm-771", '\uf011'); Icons.Add("wi-owm-781", '\uf056'); Icons.Add("wi-owm-800", '\uf00d'); Icons.Add("wi-owm-801", '\uf011'); Icons.Add("wi-owm-802", '\uf011'); Icons.Add("wi-owm-803", '\uf012'); Icons.Add("wi-owm-804", '\uf013'); Icons.Add("wi-owm-900", '\uf056'); Icons.Add("wi-owm-901", '\uf01d'); Icons.Add("wi-owm-902", '\uf073'); Icons.Add("wi-owm-903", '\uf076'); Icons.Add("wi-owm-904", '\uf072'); Icons.Add("wi-owm-905", '\uf021'); Icons.Add("wi-owm-906", '\uf015'); Icons.Add("wi-owm-957", '\uf050'); Icons.Add("wi-owm-day-200", '\uf010'); Icons.Add("wi-owm-day-201", '\uf010'); Icons.Add("wi-owm-day-202", '\uf010'); Icons.Add("wi-owm-day-210", '\uf005'); Icons.Add("wi-owm-day-211", '\uf005'); Icons.Add("wi-owm-day-212", '\uf005'); Icons.Add("wi-owm-day-221", '\uf005'); Icons.Add("wi-owm-day-230", '\uf010'); Icons.Add("wi-owm-day-231", '\uf010'); Icons.Add("wi-owm-day-232", '\uf010'); Icons.Add("wi-owm-day-300", '\uf00b'); Icons.Add("wi-owm-day-301", '\uf00b'); Icons.Add("wi-owm-day-302", '\uf008'); Icons.Add("wi-owm-day-310", '\uf008'); Icons.Add("wi-owm-day-311", '\uf008'); Icons.Add("wi-owm-day-312", '\uf008'); Icons.Add("wi-owm-day-313", '\uf008'); Icons.Add("wi-owm-day-314", '\uf008'); Icons.Add("wi-owm-day-321", '\uf00b'); Icons.Add("wi-owm-day-500", '\uf00b'); Icons.Add("wi-owm-day-501", '\uf008'); Icons.Add("wi-owm-day-502", '\uf008'); Icons.Add("wi-owm-day-503", '\uf008'); Icons.Add("wi-owm-day-504", '\uf008'); Icons.Add("wi-owm-day-511", '\uf006'); Icons.Add("wi-owm-day-520", '\uf009'); Icons.Add("wi-owm-day-521", '\uf009'); Icons.Add("wi-owm-day-522", '\uf009'); Icons.Add("wi-owm-day-531", '\uf00e'); Icons.Add("wi-owm-day-600", '\uf00a'); Icons.Add("wi-owm-day-601", '\uf0b2'); Icons.Add("wi-owm-day-602", '\uf00a'); Icons.Add("wi-owm-day-611", '\uf006'); Icons.Add("wi-owm-day-612", '\uf006'); Icons.Add("wi-owm-day-615", '\uf006'); Icons.Add("wi-owm-day-616", '\uf006'); Icons.Add("wi-owm-day-620", '\uf006'); Icons.Add("wi-owm-day-621", '\uf00a'); Icons.Add("wi-owm-day-622", '\uf00a'); Icons.Add("wi-owm-day-701", '\uf009'); Icons.Add("wi-owm-day-711", '\uf062'); Icons.Add("wi-owm-day-721", '\uf0b6'); Icons.Add("wi-owm-day-731", '\uf063'); Icons.Add("wi-owm-day-741", '\uf003'); Icons.Add("wi-owm-day-761", '\uf063'); Icons.Add("wi-owm-day-762", '\uf063'); Icons.Add("wi-owm-day-781", '\uf056'); Icons.Add("wi-owm-day-800", '\uf00d'); Icons.Add("wi-owm-day-801", '\uf000'); Icons.Add("wi-owm-day-802", '\uf000'); Icons.Add("wi-owm-day-803", '\uf000'); Icons.Add("wi-owm-day-804", '\uf00c'); Icons.Add("wi-owm-day-900", '\uf056'); Icons.Add("wi-owm-day-902", '\uf073'); Icons.Add("wi-owm-day-903", '\uf076'); Icons.Add("wi-owm-day-904", '\uf072'); Icons.Add("wi-owm-day-906", '\uf004'); Icons.Add("wi-owm-day-957", '\uf050'); Icons.Add("wi-owm-night-200", '\uf02d'); Icons.Add("wi-owm-night-201", '\uf02d'); Icons.Add("wi-owm-night-202", '\uf02d'); Icons.Add("wi-owm-night-210", '\uf025'); Icons.Add("wi-owm-night-211", '\uf025'); Icons.Add("wi-owm-night-212", '\uf025'); Icons.Add("wi-owm-night-221", '\uf025'); Icons.Add("wi-owm-night-230", '\uf02d'); Icons.Add("wi-owm-night-231", '\uf02d'); Icons.Add("wi-owm-night-232", '\uf02d'); Icons.Add("wi-owm-night-300", '\uf02b'); Icons.Add("wi-owm-night-301", '\uf02b'); Icons.Add("wi-owm-night-302", '\uf028'); Icons.Add("wi-owm-night-310", '\uf028'); Icons.Add("wi-owm-night-311", '\uf028'); Icons.Add("wi-owm-night-312", '\uf028'); Icons.Add("wi-owm-night-313", '\uf028'); Icons.Add("wi-owm-night-314", '\uf028'); Icons.Add("wi-owm-night-321", '\uf02b'); Icons.Add("wi-owm-night-500", '\uf02b'); Icons.Add("wi-owm-night-501", '\uf028'); Icons.Add("wi-owm-night-502", '\uf028'); Icons.Add("wi-owm-night-503", '\uf028'); Icons.Add("wi-owm-night-504", '\uf028'); Icons.Add("wi-owm-night-511", '\uf026'); Icons.Add("wi-owm-night-520", '\uf029'); Icons.Add("wi-owm-night-521", '\uf029'); Icons.Add("wi-owm-night-522", '\uf029'); Icons.Add("wi-owm-night-531", '\uf02c'); Icons.Add("wi-owm-night-600", '\uf02a'); Icons.Add("wi-owm-night-601", '\uf0b4'); Icons.Add("wi-owm-night-602", '\uf02a'); Icons.Add("wi-owm-night-611", '\uf026'); Icons.Add("wi-owm-night-612", '\uf026'); Icons.Add("wi-owm-night-615", '\uf026'); Icons.Add("wi-owm-night-616", '\uf026'); Icons.Add("wi-owm-night-620", '\uf026'); Icons.Add("wi-owm-night-621", '\uf02a'); Icons.Add("wi-owm-night-622", '\uf02a'); Icons.Add("wi-owm-night-701", '\uf029'); Icons.Add("wi-owm-night-711", '\uf062'); Icons.Add("wi-owm-night-721", '\uf0b6'); Icons.Add("wi-owm-night-731", '\uf063'); Icons.Add("wi-owm-night-741", '\uf04a'); Icons.Add("wi-owm-night-761", '\uf063'); Icons.Add("wi-owm-night-762", '\uf063'); Icons.Add("wi-owm-night-781", '\uf056'); Icons.Add("wi-owm-night-800", '\uf02e'); Icons.Add("wi-owm-night-801", '\uf022'); Icons.Add("wi-owm-night-802", '\uf022'); Icons.Add("wi-owm-night-803", '\uf022'); Icons.Add("wi-owm-night-804", '\uf086'); Icons.Add("wi-owm-night-900", '\uf056'); Icons.Add("wi-owm-night-902", '\uf073'); Icons.Add("wi-owm-night-903", '\uf076'); Icons.Add("wi-owm-night-904", '\uf072'); Icons.Add("wi-owm-night-906", '\uf024'); Icons.Add("wi-owm-night-957", '\uf050'); Icons.Add("wi-wu-chanceflurries", '\uf064'); Icons.Add("wi-wu-chancerain", '\uf019'); Icons.Add("wi-wu-chancesleat", '\uf0b5'); Icons.Add("wi-wu-chancesnow", '\uf01b'); Icons.Add("wi-wu-chancetstorms", '\uf01e'); Icons.Add("wi-wu-clear", '\uf00d'); Icons.Add("wi-wu-cloudy", '\uf002'); Icons.Add("wi-wu-flurries", '\uf064'); Icons.Add("wi-wu-hazy", '\uf0b6'); Icons.Add("wi-wu-mostlycloudy", '\uf002'); Icons.Add("wi-wu-mostlysunny", '\uf00d'); Icons.Add("wi-wu-partlycloudy", '\uf002'); Icons.Add("wi-wu-partlysunny", '\uf00d'); Icons.Add("wi-wu-rain", '\uf01a'); Icons.Add("wi-wu-sleat", '\uf0b5'); Icons.Add("wi-wu-snow", '\uf01b'); Icons.Add("wi-wu-sunny", '\uf00d'); Icons.Add("wi-wu-tstorms", '\uf01e'); Icons.Add("wi-wu-unknown", '\uf00d'); } } }
// Python Tools for Visual Studio // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABILITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System; using System.Collections.Generic; using System.ComponentModel.Composition; using System.Diagnostics; using System.IO; using System.Linq; using System.Threading; using Microsoft.PythonTools.Infrastructure; using Newtonsoft.Json; namespace Microsoft.PythonTools.Interpreter { /// <summary> /// Detects interpreters in user-created conda environments. /// </summary> /// <remarks> /// Uses %HOMEPATH%/.conda/environments.txt and `conda info --envs`. /// </remarks> [InterpreterFactoryId(FactoryProviderName)] [Export(typeof(IPythonInterpreterFactoryProvider))] [Export(typeof(CondaEnvironmentFactoryProvider))] [PartCreationPolicy(CreationPolicy.Shared)] class CondaEnvironmentFactoryProvider : IPythonInterpreterFactoryProvider, IDisposable { private readonly Dictionary<string, PythonInterpreterInformation> _factories = new Dictionary<string, PythonInterpreterInformation>(); internal const string FactoryProviderName = "CondaEnv"; internal const string EnvironmentCompanyName = "CondaEnv"; private bool _isDisposed; private int _ignoreNotifications; private bool _initialized; private readonly CPythonInterpreterFactoryProvider _globalProvider; private readonly ICondaLocatorProvider _condaLocatorProvider; private readonly bool _watchFileSystem; private FileSystemWatcher _envsTxtWatcher; private FileSystemWatcher _condaFolderWatcher; private Timer _envsWatcherTimer; private string _userProfileFolder; private string _environmentsTxtFolder; private string _environmentsTxtPath; internal event EventHandler DiscoveryStarted; [ImportingConstructor] public CondaEnvironmentFactoryProvider( [Import] CPythonInterpreterFactoryProvider globalProvider, [Import] ICondaLocatorProvider condaLocatorProvider, [Import("Microsoft.VisualStudioTools.MockVsTests.IsMockVs", AllowDefault = true)] object isMockVs = null ) : this(globalProvider, condaLocatorProvider, isMockVs == null) { } public CondaEnvironmentFactoryProvider( CPythonInterpreterFactoryProvider globalProvider, ICondaLocatorProvider condaLocatorProvider, bool watchFileSystem, string userProfileFolder = null) { _watchFileSystem = watchFileSystem; _globalProvider = globalProvider; _condaLocatorProvider = condaLocatorProvider; _userProfileFolder = userProfileFolder; } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } ~CondaEnvironmentFactoryProvider() { Dispose(false); } protected virtual void Dispose(bool disposing) { if (!_isDisposed) { _isDisposed = true; lock (_factories) { if (_envsTxtWatcher != null) { _envsTxtWatcher.Dispose(); } if (_condaFolderWatcher != null) { _condaFolderWatcher.Dispose(); } if (_envsWatcherTimer != null) { _envsWatcherTimer.Dispose(); } } } } private void EnsureInitialized() { if (_initialized) { return; } bool doDiscover = false; lock (_factories) { if (!_initialized) { _initialized = true; doDiscover = true; try { if (_userProfileFolder == null) { _userProfileFolder = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); } _environmentsTxtFolder = Path.Combine( _userProfileFolder, ".conda" ); _environmentsTxtPath = Path.Combine( _environmentsTxtFolder, "environments.txt" ); } catch (ArgumentException) { } if (_watchFileSystem && !string.IsNullOrEmpty(_environmentsTxtPath)) { _envsWatcherTimer = new Timer(EnvironmentsWatcherTimer_Elapsed); if (!WatchForEnvironmentsTxtChanges()) { WatchForCondaFolderCreation(); } } } } if (doDiscover) { DiscoverInterpreterFactories(); } } private bool WatchForEnvironmentsTxtChanges() { // Watch the file %HOMEPATH%/.conda/Environments.txt which // is updated by conda after a new environment is created/deleted. if (Directory.Exists(_environmentsTxtFolder)) { try { _envsTxtWatcher = new FileSystemWatcher(_environmentsTxtFolder, "environments.txt"); _envsTxtWatcher.Changed += EnvironmentsTxtWatcher_Changed; _envsTxtWatcher.Created += EnvironmentsTxtWatcher_Changed; _envsTxtWatcher.EnableRaisingEvents = true; return true; } catch (ArgumentException) { } catch (IOException) { } } return false; } private void WatchForCondaFolderCreation() { // When .conda does not exist, we watch for its creation // then watch for environments.txt changes once it's created. // The simpler alternative of using a recursive watcher on user // folder could lead to poor performance if there are lots of // files under the user folder. var watchedPath = Path.GetDirectoryName(_environmentsTxtFolder); if (Directory.Exists(watchedPath)) { try { _condaFolderWatcher = new FileSystemWatcher(watchedPath, ".conda"); _condaFolderWatcher.Created += CondaFolderWatcher_Created; _condaFolderWatcher.EnableRaisingEvents = true; } catch (ArgumentException) { } catch (IOException) { } } } private void EnvironmentsWatcherTimer_Elapsed(object state) { try { lock (_factories) { _envsWatcherTimer.Change(Timeout.Infinite, Timeout.Infinite); } DiscoverInterpreterFactories(); } catch (ObjectDisposedException) { } } private void EnvironmentsTxtWatcher_Changed(object sender, FileSystemEventArgs e) { lock (_factories) { try { _envsWatcherTimer.Change(1000, Timeout.Infinite); } catch (ObjectDisposedException) { } } } private void CondaFolderWatcher_Created(object sender, FileSystemEventArgs e) { lock (_factories) { try { _envsWatcherTimer.Change(1000, Timeout.Infinite); } catch (ObjectDisposedException) { } if (_envsTxtWatcher == null) { WatchForEnvironmentsTxtChanges(); } } } private void DiscoverInterpreterFactories() { if (Volatile.Read(ref _ignoreNotifications) > 0) { return; } ForceDiscoverInterpreterFactories(); } private void ForceDiscoverInterpreterFactories() { DiscoveryStarted?.Invoke(this, EventArgs.Empty); // Discover the available interpreters... bool anyChanged = false; var found = new List<PythonInterpreterInformation>(); try { FindCondaEnvironments(found); } catch (ObjectDisposedException) { // We are aborting, so silently return with no results. return; } var uniqueIds = new HashSet<string>(found.Select(i => i.Configuration.Id)); // Then update our cached state with the lock held. lock (_factories) { foreach (var info in found) { PythonInterpreterInformation existingInfo; if (!_factories.TryGetValue(info.Configuration.Id, out existingInfo) || info.Configuration != existingInfo.Configuration) { _factories[info.Configuration.Id] = info; anyChanged = true; } } // Remove any factories we had before and no longer see... foreach (var unregistered in _factories.Keys.Except(uniqueIds).ToArray()) { _factories.Remove(unregistered); anyChanged = true; } } if (anyChanged) { OnInterpreterFactoriesChanged(); } } internal static CondaInfoResult ExecuteCondaInfo(string condaPath) { using (var output = ProcessOutput.RunHiddenAndCapture(condaPath, "info", "--json")) { output.Wait(); if (output.ExitCode == 0) { var json = string.Join(Environment.NewLine, output.StandardOutputLines); try { return JsonConvert.DeserializeObject<CondaInfoResult>(json); } catch (JsonException ex) { Debug.WriteLine("Failed to parse: {0}".FormatInvariant(ex.Message)); Debug.WriteLine(json); return null; } } return null; } } internal class CondaInfoResult { [JsonProperty("envs")] public string[] EnvironmentFolders = null; [JsonProperty("envs_dirs")] public string[] EnvironmentRootFolders = null; [JsonProperty("root_prefix")] public string RootPrefixFolder = null; } private void FindCondaEnvironments(List<PythonInterpreterInformation> envs) { var mainCondaExePath = _condaLocatorProvider?.FindLocator()?.CondaExecutablePath; if (!string.IsNullOrEmpty(mainCondaExePath)) { envs.AddRange(FindCondaEnvironments(mainCondaExePath)); } } private static IReadOnlyList<PythonInterpreterInformation> FindCondaEnvironments(string condaPath) { var condaInfoResult = ExecuteCondaInfo(condaPath); if (condaInfoResult != null) { // We skip the root to avoid duplicate entries, root is // discovered by CPythonInterpreterFactoryProvider already. // Older versions of `conda info` used to not return the root. return condaInfoResult.EnvironmentFolders .AsParallel() .Where(folder => Directory.Exists(folder) && !PathUtils.IsSameDirectory(folder, condaInfoResult.RootPrefixFolder) ) .Select(folder => CreateEnvironmentInfo(folder)) .Where(env => env != null) .ToList(); } return Enumerable.Empty<PythonInterpreterInformation>().ToList(); } private static PythonInterpreterInformation CreateEnvironmentInfo(string prefixPath) { var name = Path.GetFileName(prefixPath); var description = name; var vendor = Strings.CondaEnvironmentDescription; var vendorUrl = string.Empty; var supportUrl = string.Empty; var interpreterPath = Path.Combine(prefixPath, CondaEnvironmentFactoryConstants.ConsoleExecutable); var windowsInterpreterPath = Path.Combine(prefixPath, CondaEnvironmentFactoryConstants.WindowsExecutable); if (!File.Exists(interpreterPath)) { return null; } var arch = CPythonInterpreterFactoryProvider.ArchitectureFromExe(interpreterPath); var version = CPythonInterpreterFactoryProvider.VersionFromSysVersionInfo(interpreterPath); var config = new VisualStudioInterpreterConfiguration( CondaEnvironmentFactoryConstants.GetInterpreterId(CondaEnvironmentFactoryProvider.EnvironmentCompanyName, name), description, prefixPath, interpreterPath, windowsInterpreterPath, CondaEnvironmentFactoryConstants.PathEnvironmentVariableName, arch, version ); config.SwitchToFullDescription(); var unique = new PythonInterpreterInformation( config, vendor, vendorUrl, supportUrl ); return unique; } #region IPythonInterpreterProvider Members public IEnumerable<InterpreterConfiguration> GetInterpreterConfigurations() { EnsureInitialized(); lock (_factories) { return _factories.Values.Select(x => x.Configuration).ToArray(); } } public IPythonInterpreterFactory GetInterpreterFactory(string id) { EnsureInitialized(); PythonInterpreterInformation info; lock (_factories) { _factories.TryGetValue(id, out info); } return info?.GetOrCreateFactory(CreateFactory); } private IPythonInterpreterFactory CreateFactory(PythonInterpreterInformation info) { return InterpreterFactoryCreator.CreateInterpreterFactory( info.Configuration, new InterpreterFactoryCreationOptions { WatchFileSystem = true, } ); } private EventHandler _interpFactoriesChanged; public event EventHandler InterpreterFactoriesChanged { add { EnsureInitialized(); _interpFactoriesChanged += value; } remove { _interpFactoriesChanged -= value; } } private void OnInterpreterFactoriesChanged() { _interpFactoriesChanged?.Invoke(this, EventArgs.Empty); } public object GetProperty(string id, string propName) { PythonInterpreterInformation info; switch (propName) { case PythonRegistrySearch.CompanyPropertyKey: lock (_factories) { if (_factories.TryGetValue(id, out info)) { return info.Vendor; } } break; case PythonRegistrySearch.SupportUrlPropertyKey: lock (_factories) { if (_factories.TryGetValue(id, out info)) { return info.SupportUrl; } } break; case "PersistInteractive": return true; } return null; } internal static bool IsCondaEnv(IPythonInterpreterFactory factory) { return factory.Configuration.Id.StartsWithOrdinal(CondaEnvironmentFactoryProvider.FactoryProviderName + "|"); } internal static bool IsCondaEnv(string id) { return id.StartsWithOrdinal(CondaEnvironmentFactoryProvider.FactoryProviderName + "|"); } internal static bool IsCondaEnv(string id, string expectedName) { if (IsCondaEnv(id)) { return false; } string name = NameFromId(id); return string.CompareOrdinal(name, expectedName) == 0; } internal static bool IsCondaEnv(IPythonInterpreterFactory factory, string expectedName) { if (!IsCondaEnv(factory)) { return false; } string name = NameFromId(factory.Configuration.Id); return string.CompareOrdinal(name, expectedName) == 0; } internal static string NameFromId(string id) { if (CondaEnvironmentFactoryConstants.TryParseInterpreterId(id, out _, out string name)) { return name; } return null; } #endregion private sealed class DiscoverOnDispose : IDisposable { private readonly CondaEnvironmentFactoryProvider _provider; private readonly bool _forceDiscovery; public DiscoverOnDispose(CondaEnvironmentFactoryProvider provider, bool forceDiscovery) { _provider = provider; _forceDiscovery = forceDiscovery; Interlocked.Increment(ref _provider._ignoreNotifications); } public void Dispose() { Interlocked.Decrement(ref _provider._ignoreNotifications); if (_forceDiscovery) { _provider.ForceDiscoverInterpreterFactories(); } else { _provider.DiscoverInterpreterFactories(); } } } internal IDisposable SuppressDiscoverFactories(bool forceDiscoveryOnDispose) { return new DiscoverOnDispose(this, forceDiscoveryOnDispose); } } }
/* Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT License. See License.txt in the project root for license information. */ using System.Collections.Generic; using System.Collections.Specialized; using System.Web; using System.Web.Mvc; using Adxstudio.Xrm.Cms; namespace Adxstudio.Xrm.Web.Mvc.Html { /// <summary> /// View helpers for rendering Site Marker (adx_sitemarker) data in Adxstudio Portals applications. /// </summary> public static class SiteMarkerExtensions { /// <summary> /// Gets the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// The <see cref="ISiteMarkerTarget">target</see> of the given site marker. If <paramref name="requireTargetReadAccess"/> is set /// to true, and the current user does not have read access to the target entity, returns null. /// </returns> public static ISiteMarkerTarget SiteMarker(this HtmlHelper html, string siteMarkerName, bool requireTargetReadAccess = false) { var siteMarkers = PortalExtensions.GetPortalViewContext(html).SiteMarkers; return requireTargetReadAccess ? siteMarkers.SelectWithReadAccess(siteMarkerName) : siteMarkers.Select(siteMarkerName); } /// <summary> /// Renders a link the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="linkText">The text of the link.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns an HTML A tag linking to the target of a given site marker. Returns an empty string if a target for /// <paramref name="siteMarkerName"/> is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user /// does not have read access to the target entity, returns an empty string. /// </returns> public static IHtmlString SiteMarkerLink(this HtmlHelper html, string siteMarkerName, string linkText = null, bool requireTargetReadAccess = false) { return SiteMarkerLink(html, siteMarkerName, new { }, linkText, requireTargetReadAccess); } /// <summary> /// Renders a link the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the link URL.</param> /// <param name="linkText">The text of the link.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns an HTML A tag linking to the target of a given site marker. Returns an empty string if a target for /// <paramref name="siteMarkerName"/> is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user /// does not have read access to the target entity, returns an empty string. /// </returns> public static IHtmlString SiteMarkerLink(this HtmlHelper html, string siteMarkerName, object queryStringParameters, string linkText = null, bool requireTargetReadAccess = false) { return SiteMarkerLink(html, siteMarkerName, queryStringParameters, new { }, linkText, requireTargetReadAccess); } /// <summary> /// Renders a link the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the link URL.</param> /// <param name="linkText">The text of the link.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns an HTML A tag linking to the target of a given site marker. Returns an empty string if a target for /// <paramref name="siteMarkerName"/> is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user /// does not have read access to the target entity, returns an empty string. /// </returns> public static IHtmlString SiteMarkerLink(this HtmlHelper html, string siteMarkerName, NameValueCollection queryStringParameters, string linkText = null, bool requireTargetReadAccess = false) { return SiteMarkerLink(html, siteMarkerName, queryStringParameters, new { }, linkText, requireTargetReadAccess); } /// <summary> /// Renders a link the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the link URL.</param> /// <param name="htmlAttributes">HTML attributes that will be added to the link tag.</param> /// <param name="linkText">The text of the link.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns an HTML A tag linking to the target of a given site marker. Returns an empty string if a target for /// <paramref name="siteMarkerName"/> is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user /// does not have read access to the target entity, returns an empty string. /// </returns> public static IHtmlString SiteMarkerLink(this HtmlHelper html, string siteMarkerName, object queryStringParameters, IDictionary<string, object> htmlAttributes, string linkText = null, bool requireTargetReadAccess = false) { return SiteMarkerLink(html, siteMarkerName, PortalExtensions.AnonymousObjectToQueryStringParameters(queryStringParameters), htmlAttributes, linkText, requireTargetReadAccess); } /// <summary> /// Renders a link the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the link URL.</param> /// <param name="htmlAttributes">HTML attributes that will be added to the link tag.</param> /// <param name="linkText">The text of the link.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns an HTML A tag linking to the target of a given site marker. Returns an empty string if a target for /// <paramref name="siteMarkerName"/> is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user /// does not have read access to the target entity, returns an empty string. /// </returns> public static IHtmlString SiteMarkerLink(this HtmlHelper html, string siteMarkerName, object queryStringParameters, object htmlAttributes, string linkText = null, bool requireTargetReadAccess = false) { return SiteMarkerLink(html, siteMarkerName, PortalExtensions.AnonymousObjectToQueryStringParameters(queryStringParameters), htmlAttributes, linkText, requireTargetReadAccess); } /// <summary> /// Renders a link the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the link URL.</param> /// <param name="htmlAttributes">HTML attributes that will be added to the link tag.</param> /// <param name="linkText">The text of the link.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns an HTML A tag linking to the target of a given site marker. Returns an empty string if a target for /// <paramref name="siteMarkerName"/> is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user /// does not have read access to the target entity, returns an empty string. /// </returns> public static IHtmlString SiteMarkerLink(this HtmlHelper html, string siteMarkerName, NameValueCollection queryStringParameters, object htmlAttributes, string linkText = null, bool requireTargetReadAccess = false) { return SiteMarkerLink(html, siteMarkerName, queryStringParameters, HtmlHelper.AnonymousObjectToHtmlAttributes(htmlAttributes), linkText, requireTargetReadAccess); } /// <summary> /// Renders a link the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the link URL.</param> /// <param name="htmlAttributes">HTML attributes that will be added to the link tag.</param> /// <param name="linkText">The text of the link.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns an HTML A tag linking to the target of a given site marker. Returns an empty string if a target for /// <paramref name="siteMarkerName"/> is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user /// does not have read access to the target entity, returns an empty string. /// </returns> public static IHtmlString SiteMarkerLink(this HtmlHelper html, string siteMarkerName, NameValueCollection queryStringParameters, IDictionary<string, object> htmlAttributes, string linkText = null, bool requireTargetReadAccess = false) { var target = SiteMarker(html, siteMarkerName, requireTargetReadAccess); if (target == null) { return new HtmlString(string.Empty); } var tag = new TagBuilder("a"); var url = SiteMarkerUrl(html, target, queryStringParameters); if (url != null) { tag.Attributes["href"] = url; } if (htmlAttributes != null) { tag.MergeAttributes(htmlAttributes, true); } tag.SetInnerText(linkText ?? target.Description); return new HtmlString(tag.ToString()); } /// <summary> /// Returns a URL for the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns a URL for the target of a given site marker. Returns an empty string if a target for <paramref name="siteMarkerName"/> /// is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user does not have read access to /// the target entity, returns null. /// </returns> public static string SiteMarkerUrl(this HtmlHelper html, string siteMarkerName, bool requireTargetReadAccess = false) { return SiteMarkerUrl(html, siteMarkerName, new { }, requireTargetReadAccess); } /// <summary> /// Returns a URL for the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the URL.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns a URL for the target of a given site marker. Returns an empty string if a target for <paramref name="siteMarkerName"/> /// is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user does not have read access to /// the target entity, returns null. /// </returns> public static string SiteMarkerUrl(this HtmlHelper html, string siteMarkerName, object queryStringParameters, bool requireTargetReadAccess = false) { return SiteMarkerUrl(html, siteMarkerName, PortalExtensions.AnonymousObjectToQueryStringParameters(queryStringParameters), requireTargetReadAccess); } /// <summary> /// Returns a URL for the target of a given Site Marker (adx_sitemarker), by site marker name. /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="siteMarkerName">The name of the site marker to retrieve.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the URL.</param> /// <param name="requireTargetReadAccess"> /// Whether the target of the named site marker should be tested for security read access. This is false by default, but if /// set to true, and the current user does not have read access to the target entity, this method will return null. /// </param> /// <returns> /// Returns a URL for the target of a given site marker. Returns an empty string if a target for <paramref name="siteMarkerName"/> /// is not found. If <paramref name="requireTargetReadAccess"/> is set to true, and the current user does not have read access to /// the target entity, returns null. /// </returns> public static string SiteMarkerUrl(this HtmlHelper html, string siteMarkerName, NameValueCollection queryStringParameters, bool requireTargetReadAccess = false) { var target = SiteMarker(html, siteMarkerName, requireTargetReadAccess); return target == null ? null : SiteMarkerUrl(html, target, queryStringParameters); } /// <summary> /// Returns a URL for the target of a given Site Marker (adx_sitemarker). /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="target">The <see cref="ISiteMarkerTarget"/> whose URL will be returned.</param> /// <returns> /// Returns a URL for a <paramref name="target"/>. Returns null if <paramref name="target"/> is null. /// is not found. /// </returns> public static string SiteMarkerUrl(this HtmlHelper html, ISiteMarkerTarget target) { return SiteMarkerUrl(html, target, new { }); } /// <summary> /// Returns a URL for the target of a given Site Marker (adx_sitemarker). /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="target">The <see cref="ISiteMarkerTarget"/> whose URL will be returned.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the URL.</param> /// <returns> /// Returns a URL for a <paramref name="target"/>. Returns null if <paramref name="target"/> is null. /// is not found. /// </returns> public static string SiteMarkerUrl(this HtmlHelper html, ISiteMarkerTarget target, object queryStringParameters) { return SiteMarkerUrl(html, target, PortalExtensions.AnonymousObjectToQueryStringParameters(queryStringParameters)); } /// <summary> /// Returns a URL for the target of a given Site Marker (adx_sitemarker). /// </summary> /// <param name="html">Extension method target, provides support for HTML rendering and access to view context/data.</param> /// <param name="target">The <see cref="ISiteMarkerTarget"/> whose URL will be returned.</param> /// <param name="queryStringParameters">Query string parameter values that will be appended to the URL.</param> /// <returns> /// Returns a URL for a <paramref name="target"/>. Returns null if <paramref name="target"/> is null. /// is not found. /// </returns> public static string SiteMarkerUrl(this HtmlHelper html, ISiteMarkerTarget target, NameValueCollection queryStringParameters) { return html.EntityUrl(target, queryStringParameters); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Test.Cryptography; using Xunit; namespace System.Security.Cryptography.X509Certificates.Tests { public static class X500DistinguishedNameTests { [Fact] public static void PrintInvalidEncoding() { // One byte has been removed from the payload here. Since DER is length-prepended // this will run out of data too soon, and report as invalid. byte[] encoded = "3017311530130603550403130C436F6D6D6F6E204E616D65".HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal("", dn.Decode(X500DistinguishedNameFlags.None)); } [Fact] [ActiveIssue(3892, PlatformID.AnyUnix)] public static void PrintMultiComponentRdn() { byte[] encoded = ( "30223120300C060355040313054A616D65733010060355040A13094D6963726F" + "736F6674").HexToByteArray(); const string expected = "CN=James + O=Microsoft"; X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal(expected, dn.Decode(X500DistinguishedNameFlags.None)); // It should not change ordering when reversed, since the two are one unit. Assert.Equal(expected, dn.Decode(X500DistinguishedNameFlags.Reversed)); } [Fact] public static void PrintUnknownOidRdn() { byte[] encoded = ( "30183116301406052901020203130B496E76616C6964204F6964").HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal("OID.1.1.1.2.2.3=Invalid Oid", dn.Decode(X500DistinguishedNameFlags.None)); } [Theory] [MemberData("WhitespaceBeforeCases")] public static void QuoteWhitespaceBefore(string expected, string hexEncoded) { byte[] encoded = hexEncoded.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal(expected, dn.Decode(X500DistinguishedNameFlags.None)); } [Theory] [MemberData("WhitespaceBeforeCases")] public static void NoQuoteWhitespaceBefore(string expectedQuoted, string hexEncoded) { string expected = expectedQuoted.Replace("\"", ""); byte[] encoded = hexEncoded.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal(expected, dn.Decode(X500DistinguishedNameFlags.DoNotUseQuotes)); } [Theory] [MemberData("WhitespaceAfterCases")] public static void QuoteWhitespaceAfter(string expected, string hexEncoded) { byte[] encoded = hexEncoded.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal(expected, dn.Decode(X500DistinguishedNameFlags.None)); } [Theory] [MemberData("WhitespaceAfterCases")] public static void NoQuoteWhitespaceAfter(string expectedQuoted, string hexEncoded) { string expected = expectedQuoted.Replace("\"", ""); byte[] encoded = hexEncoded.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal(expected, dn.Decode(X500DistinguishedNameFlags.DoNotUseQuotes)); } [Theory] [MemberData("QuotedContentsCases")] public static void QuoteByContents(string expected, string hexEncoded) { byte[] encoded = hexEncoded.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal(expected, dn.Decode(X500DistinguishedNameFlags.None)); } [Theory] [MemberData("QuotedContentsCases")] public static void NoQuoteByContents(string expectedQuoted, string hexEncoded) { string expected = expectedQuoted.Replace("\"", ""); byte[] encoded = hexEncoded.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal(expected, dn.Decode(X500DistinguishedNameFlags.DoNotUseQuotes)); } [Theory] [MemberData("InternallyQuotedRDNs")] public static void QuotedWithQuotes(string quoted, string notQuoted, string hexEncoded) { byte[] encoded = hexEncoded.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal(quoted, dn.Decode(X500DistinguishedNameFlags.None)); } [Theory] [MemberData("InternallyQuotedRDNs")] public static void NotQuotedWithQuotes(string quoted, string notQuoted, string hexEncoded) { byte[] encoded = hexEncoded.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); Assert.Equal(notQuoted, dn.Decode(X500DistinguishedNameFlags.DoNotUseQuotes)); } [Fact] public static void PrintComplexReversed() { byte[] encoded = MicrosoftDotComSubject.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); const string expected = "CN=www.microsoft.com, OU=MSCOM, O=Microsoft Corporation, STREET=1 Microsoft Way, " + "L=Redmond, S=Washington, PostalCode=98052, C=US, SERIALNUMBER=600413485, "; // Windows 8.1 would continue the string with some unknown OIDs, but OpenSSL 1.0.1 can decode // at least businessCategory (2.5.4.15), and other Windows versions may do so in the future. // "OID.2.5.4.15=Private Organization, OID.1.3.6.1.4.1.311.60.2.1.2=Washington, " + // "OID.1.3.6.1.4.1.311.60.2.1.3=US"; Assert.StartsWith(expected, dn.Decode(X500DistinguishedNameFlags.Reversed), StringComparison.Ordinal); } [Fact] public static void PrintComplexForwards() { byte[] encoded = MicrosoftDotComSubject.HexToByteArray(); X500DistinguishedName dn = new X500DistinguishedName(encoded); const string expected = ", SERIALNUMBER=600413485, C=US, PostalCode=98052, S=Washington, L=Redmond, " + "STREET=1 Microsoft Way, O=Microsoft Corporation, OU=MSCOM, CN=www.microsoft.com"; Assert.EndsWith(expected, dn.Decode(X500DistinguishedNameFlags.None), StringComparison.Ordinal); } public static readonly object[][] WhitespaceBeforeCases = { // Regular space. new object[] { "CN=\" Common Name\"", "3017311530130603550403130C20436F6D6D6F6E204E616D65" }, // Tab new object[] { "CN=\"\tCommon Name\"", "30233121301F06035504031E1800090043006F006D006D006F006E0020004E00" + "61006D0065" }, // Newline new object[] { "CN=\"\nCommon Name\"", "30233121301F06035504031E18000A0043006F006D006D006F006E0020004E00" + "61006D0065" }, // xUnit doesn't like \v in Assert.Equals, reports it as an invalid character. //new object[] //{ // "CN=\"\vCommon Name\"", // "30233121301F06035504031E18000B0043006F006D006D006F006E0020004E00" + // "61006D0065" //}, // xUnit doesn't like FormFeed in Assert.Equals, reports it as an invalid character. //new object[] //{ // "CN=\"\u000cCommon Name\"", // "30233121301F06035504031E18000C0043006F006D006D006F006E0020004E00" + // "61006D0065" //}, // Carriage return new object[] { "CN=\"\rCommon Name\"", "30233121301F06035504031E18000D0043006F006D006D006F006E0020004E00" + "61006D0065" }, // em quad. This is char.IsWhitespace, but is not quoted. new object[] { "CN=\u2002Common Name", "30233121301F06035504031E1820020043006F006D006D006F006E0020004E00" + "61006D0065" }, }; public static readonly object[][] WhitespaceAfterCases = { // Regular space. new object[] { "CN=\"Common Name \"", "3017311530130603550403130C436F6D6D6F6E204E616D6520" }, // Newline new object[] { "CN=\"Common Name\t\"", "30233121301F06035504031E180043006F006D006D006F006E0020004E006100" + "6D00650009" }, // Newline new object[] { "CN=\"Common Name\n\"", "30233121301F06035504031E180043006F006D006D006F006E0020004E006100" + "6D0065000A" }, // xUnit doesn't like \v in Assert.Equals, reports it as an invalid character. //new object[] //{ // "CN=\"Common Name\v\"", // "30233121301F06035504031E180043006F006D006D006F006E0020004E006100" + // "6D0065000B" //}, // xUnit doesn't like FormFeed in Assert.Equals, reports it as an invalid character. //new object[] //{ // "CN=\"Common Name\u000c\"", // "30233121301F06035504031E180043006F006D006D006F006E0020004E006100" + // "6D0065000C" //}, // Carriage return new object[] { "CN=\"Common Name\r\"", "30233121301F06035504031E180043006F006D006D006F006E0020004E006100" + "6D0065000D" }, // em quad. This is char.IsWhitespace, but is not quoted. new object[] { "CN=Common Name\u2002", "30233121301F06035504031E180043006F006D006D006F006E0020004E006100" + "6D00652002" }, }; public static readonly object[][] QuotedContentsCases = { // Empty value new object[] { "CN=\"\"", "300B3109300706035504031300" }, // Comma (RDN separator) new object[] { "CN=\"Common,Name\"", "3016311430120603550403130B436F6D6D6F6E2C4E616D65" }, // Plus (RDN component separator) new object[] { "CN=\"Common+Name\"", "3016311430120603550403130B436F6D6D6F6E2B4E616D65" }, // Equal (Key/Value separator) new object[] { "CN=\"Common=Name\"", "3016311430120603550403130B436F6D6D6F6E3D4E616D65" }, // Note: Double Quote has been removed from this set, it's a dedicated test suite. // Newline new object[] { "CN=\"Common\nName\"", "3021311F301D06035504031E160043006F006D006D006F006E000A004E006100" + "6D0065" }, // Carriage return is NOT quoted. new object[] { "CN=Common\rName", "3021311F301D06035504031E160043006F006D006D006F006E000D004E006100" + "6D0065" }, // Less-than new object[] { "CN=\"Common<Name\"", "3021311F301D06035504031E160043006F006D006D006F006E003C004E006100" + "6D0065" }, // Greater-than new object[] { "CN=\"Common>Name\"", "3021311F301D06035504031E160043006F006D006D006F006E003E004E006100" + "6D0065" }, // Octothorpe (Number Sign, Pound, Hash, whatever) new object[] { "CN=\"Common#Name\"", "3021311F301D06035504031E160043006F006D006D006F006E0023004E006100" + "6D0065" }, // Semi-colon new object[] { "CN=\"Common;Name\"", "3021311F301D06035504031E160043006F006D006D006F006E003B004E006100" + "6D0065" }, }; public static readonly object[][] InternallyQuotedRDNs = { // Interior Double Quote new object[] { "CN=\"Common\"\"Name\"", // Quoted "CN=Common\"Name", // Not-Quoted "3021311F301D06035504031E160043006F006D006D006F006E0022004E006100" + "6D0065" }, // Starts with a double quote new object[] { "CN=\"\"\"Common Name\"", // Quoted "CN=\"Common Name", // Not-Quoted "30233121301F06035504031E1800220043006F006D006D006F006E0020004E00" + "61006D0065" }, // Ends with a double quote new object[] { "CN=\"Common Name\"\"\"", // Quoted "CN=Common Name\"", // Not-Quoted "30233121301F06035504031E180043006F006D006D006F006E0020004E006100" + "6D00650022" }, }; private const string MicrosoftDotComSubject = "3082010F31133011060B2B0601040182373C02010313025553311B3019060B2B" + "0601040182373C0201020C0A57617368696E67746F6E311D301B060355040F13" + "1450726976617465204F7267616E697A6174696F6E3112301006035504051309" + "363030343133343835310B3009060355040613025553310E300C06035504110C" + "0539383035323113301106035504080C0A57617368696E67746F6E3110300E06" + "035504070C075265646D6F6E643118301606035504090C0F31204D6963726F73" + "6F667420576179311E301C060355040A0C154D6963726F736F667420436F7270" + "6F726174696F6E310E300C060355040B0C054D53434F4D311A30180603550403" + "0C117777772E6D6963726F736F66742E636F6D"; } }
// ReSharper disable InconsistentNaming namespace Gu.Analyzers.Test.GU0011DontIgnoreReturnValueTests { using Gu.Roslyn.Asserts; using NUnit.Framework; internal partial class ValidCode { internal class Ignore { [TestCase("stringBuilder.AppendLine(\"test\");")] [TestCase("stringBuilder.Append(\"test\");")] [TestCase("stringBuilder.Clear();")] public void StringBuilder(string code) { var testCode = @" namespace RoslynSandbox { using System.Text; public class Foo { public void Bar() { var stringBuilder = new StringBuilder(); stringBuilder.AppendLine(""test""); } } }".AssertReplace("stringBuilder.AppendLine(\"test\");", code); AnalyzerAssert.Valid(Analyzer, testCode); } [Test] public void StringBuilderAppendChained() { var testCode = @" namespace RoslynSandbox { using System.Text; public class Foo { public void Bar() { var sb = new StringBuilder(); sb.Append(""1"").Append(""2""); } } }"; AnalyzerAssert.Valid(Analyzer, testCode); } [Test] public void WhenReturningSameInstance() { var ensureCode = @" namespace RoslynSandbox { using System; using System.Diagnostics; using System.Runtime.CompilerServices; public static class Ensure { public static T NotNull<T>(T value, string parameter, [CallerMemberName] string caller = null) where T : class { Debug.Assert(!string.IsNullOrEmpty(parameter), ""parameter cannot be null""); if (value == null) { var message = $""Expected parameter {parameter} in member {caller} to not be null""; throw new ArgumentNullException(parameter, message); } return value; } public static T NotNull<T>(T? value, string parameter, [CallerMemberName] string caller = null) where T : struct { Debug.Assert(!string.IsNullOrEmpty(parameter), ""parameter cannot be null""); if (value == null) { var message = $""Expected parameter {parameter} in member {caller} to not be null""; throw new ArgumentNullException(parameter, message); } return value.Value; } } }"; var testCode = @" namespace RoslynSandbox { public class Foo { public Foo(string text) { Ensure.NotNull(text, nameof(text)); } } }"; AnalyzerAssert.Valid(Analyzer, ensureCode, testCode); } [Test] public void WhenReturningThis() { var testCode = @" namespace RoslynSandbox { public class Foo { public Foo Bar() { return this; } public void Meh() { Bar(); } } }"; AnalyzerAssert.Valid(Analyzer, testCode); } [Test] public void WhenExtensionMethodReturningThis() { var barCode = @" namespace RoslynSandbox { internal static class Bar { internal static T Id<T>(this T value) { return value; } } }"; var testCode = @" namespace RoslynSandbox { public class Foo { private Foo() { var meh =1; meh.Id(); } } }"; AnalyzerAssert.Valid(Analyzer, barCode, testCode); } [Explicit("Don't know if we want this.")] [TestCase("this.ints.Add(1);")] [TestCase("ints.Add(1);")] [TestCase("this.ints.Remove(1);")] public void HashSet(string operation) { var testCode = @" namespace RoslynSandbox { using System.Collections.Generic; public sealed class Foo { private readonly HashSet<int> ints = new HashSet<int>(); public Foo() { this.ints.Add(1); } } }".AssertReplace("this.ints.Add(1);", operation); AnalyzerAssert.Valid(Analyzer, testCode); } [TestCase("this.ints.Add(1);")] [TestCase("ints.Add(1);")] [TestCase("this.ints.Remove(1);")] public void IList(string operation) { var testCode = @" namespace RoslynSandbox { using System.Collections; using System.Collections.Generic; public sealed class Foo { private readonly IList ints = new List<int>(); public Foo() { this.ints.Add(1); } } }".AssertReplace("this.ints.Add(1);", operation); AnalyzerAssert.Valid(Analyzer, testCode); } [TestCase("ints.Add(1);")] [TestCase("ints.Remove(1);")] [TestCase("ints.RemoveAll(x => x > 2);")] public void ListOfInt(string operation) { var testCode = @" namespace RoslynSandbox { using System.Collections.Generic; public class Foo { public Foo(List<int> ints) { ints.RemoveAll(x => x > 2); } } }".AssertReplace("ints.RemoveAll(x => x > 2);", operation); AnalyzerAssert.Valid(Analyzer, testCode); } [TestCase("map.TryAdd(1, 1);")] public void ConcurrentDictionary(string operation) { var testCode = @" namespace RoslynSandbox { using System.Collections.Concurrent; public class Foo { public Foo(ConcurrentDictionary<int, int> map) { map.TryAdd(1, 1); } } }".AssertReplace("map.TryAdd(1, 1);", operation); AnalyzerAssert.Valid(Analyzer, testCode); } [TestCase("mock.Setup(x => x.GetFormat(It.IsAny<Type>())).Returns(null)")] public void MoqSetupReturns(string code) { var testCode = @" namespace RoslynSandbox { using System; using Moq; using NUnit.Framework; public class Foo { [Test] public void Test() { var mock = new Mock<IFormatProvider>(); mock.Setup(x => x.GetFormat(It.IsAny<Type>())).Returns(null); } } }".AssertReplace("mock.Setup(x => x.GetFormat(It.IsAny<Type>())).Returns(null)", code); AnalyzerAssert.Valid(Analyzer, testCode); } [TestCase("mock.Setup(x => x.Bar())")] public void MoqSetupVoid(string setup) { var testCode = @" namespace RoslynSandbox { using Moq; public class Foo { public Foo() { var mock = new Mock<IFoo>(); mock.Setup(x => x.Bar()); } } public interface IFoo { void Bar(); } }".AssertReplace("mock.Setup(x => x.Bar())", setup); AnalyzerAssert.Valid(Analyzer, testCode); } [TestCase("this.Bind<Foo>().To<Foo>()")] [TestCase("this.Bind<Foo>().To<Foo>().InSingletonScope()")] [TestCase("this.Bind<Foo>().ToMethod(x => new Foo())")] public void NinjectFluent(string bind) { var testCode = @" namespace RoslynSandbox { using Ninject.Modules; public sealed class Foo : NinjectModule { public override void Load() { this.Bind<Foo>().To<Foo>(); } } }".AssertReplace("this.Bind<Foo>().To<Foo>()", bind); AnalyzerAssert.Valid(Analyzer, testCode); } [Test] public void DocumentEditorExtensionMethod() { var extCode = @" namespace RoslynSandbox { using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Editing; public static class DocumentEditorExt { internal static DocumentEditor AddUsing(this DocumentEditor editor, UsingDirectiveSyntax usingDirective) { editor.ReplaceNode( editor.OriginalRoot, (root, _) => editor.OriginalRoot); return editor; } } }"; var testCode = @" namespace RoslynSandbox { using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Editing; internal sealed class Foo { public void Bar(DocumentEditor editor, UsingDirectiveSyntax directive) { editor.AddUsing(directive); } } }"; AnalyzerAssert.Valid(Analyzer, extCode, testCode); } } } }
using System; using System.Collections.Generic; using System.Text; using System.Runtime.InteropServices; using gView.SDEWrapper.x64; using gView.Framework.Data; using gView.Framework.Geometry; namespace gView.Interoperability.Sde.x64 { internal class SdeFeatureCursor : FeatureCursor { private SdeDataset _dataset; private SdeQueryInfo _queryInfo; private ArcSdeConnection _connection; private string _errMsg = ""; private List<IField> _queryFields = null; public SdeFeatureCursor(SdeDataset dataset, ITableClass tc, IQueryFilter filter) : base((tc is IFeatureClass) ? ((IFeatureClass)tc).SpatialReference : null, (filter != null) ? filter.FeatureSpatialReference : null) { try { if (filter != null && filter.SubFields != "*") filter.AddField(tc.IDFieldName); filter.fieldPrefix = tc.Name + "."; filter.fieldPostfix = ""; Int64 err_no = 0; _dataset = dataset; if (_dataset == null) return; //_connection = _dataset.AllocConnection(); _connection = new ArcSdeConnection(dataset.ConnectionString); if (!_connection.Open()) return; _queryInfo = new SdeQueryInfo(_connection, tc, filter); if (_queryInfo.ErrorMessage != "") { Dispose(); return; } //if (Wrapper92_64.SE_stream_create(_connection.SeConnection, ref _stream) != 0) //{ // Dispose(); // return; //} _connection.ResetStream(); // SE_stream_set_state sollte auch aufgerufen werden (siehe mapsde.c von UMN) if (Wrapper92_64.SE_stream_set_state( _connection.SeStream, CONST.SE_DEFAULT_STATE_ID, CONST.SE_DEFAULT_STATE_ID, CONST.SE_STATE_DIFF_NOCHECK) != 0) { Dispose(); return; } if ((err_no = Wrapper92_64.SE_stream_query_with_info(_connection.SeStream, _queryInfo.SeQueryInfo)) != 0) { Dispose(); return; } if (_queryInfo.IsSpatial) { SE_FILTER se_filter = _queryInfo.Filter_Shape; if ((err_no = Wrapper92_64.SE_stream_set_spatial_constraints(_connection.SeStream, CONST.SE_SPATIAL_FIRST, false, 1, ref se_filter)) != 0) { _errMsg = Wrapper92_64.GetErrorMsg(_connection.SeConnection, err_no); Dispose(); return; } } else { /* SE_FILTER se_filter = _queryInfo.Filter_Id; if (Wrapper92_64.SE_stream_set_spatial_constraints(_stream, CONST.SE_SPATIAL_FIRST, false, 1, ref se_filter) != 0) { Release(); return; } * */ } if (Wrapper92_64.SE_stream_execute(_connection.SeStream) != 0) { Dispose(); return; } _queryFields = _queryInfo.QueryFields; _queryInfo.Dispose(); _queryInfo = null; } catch (Exception ex) { _errMsg = ex.Message + "\n" + ex.StackTrace; Dispose(); } } #region IFeatureCursor Member int i = 0; public override IFeature NextFeature { get { if (_connection == null) return null; Int64 err_no; err_no = Wrapper92_64.SE_stream_fetch(_connection.SeStream); if (err_no == CONST.SE_FINISHED) { Dispose(); return null; } if (err_no != 0) { _errMsg = Wrapper92_64.GetErrorMsg(_connection.SeConnection, err_no); Dispose(); return null; } i++; IFeature feature = FetchNextFeature(); Transform(feature); return feature; } } #endregion #region ICursor Member public override void Dispose() { base.Dispose(); if (_queryInfo != null) { _queryInfo.Dispose(); _queryInfo = null; } if (_connection != null) { try { _connection.Close(); } catch { } _connection = null; } } #endregion private IFeature FetchNextFeature() { try { Feature feat = new Feature(); System.Int16 index = 1; foreach (IField field in _queryFields) { switch (field.type) { case FieldType.ID: feat.OID = FetchInteger(index); feat.Fields.Add(new FieldValue(field.name, feat.OID)); break; case FieldType.Shape: feat.Shape = FetchShape(index); break; case FieldType.boolean: feat.Fields.Add(new FieldValue(field.name, "???")); break; case FieldType.character: feat.Fields.Add(new FieldValue(field.name, "???")); break; case FieldType.Date: feat.Fields.Add(new FieldValue(field.name, FetchDate(index))); break; case FieldType.Double: feat.Fields.Add(new FieldValue(field.name, FetchDouble(index))); break; case FieldType.Float: feat.Fields.Add(new FieldValue(field.name, FetchFloat(index))); break; case FieldType.biginteger: case FieldType.integer: feat.Fields.Add(new FieldValue(field.name, FetchInteger(index))); break; case FieldType.smallinteger: feat.Fields.Add(new FieldValue(field.name, FetchSmallInteger(index))); break; case FieldType.String: feat.Fields.Add(new FieldValue(field.name, FetchString(index, field.size))); break; case FieldType.NString: feat.Fields.Add(new FieldValue(field.name, FetchNString(index, field.size))); break; } index++; } return feat; } catch { return null; } } private int FetchInteger(System.Int16 index) { System.Int32 val = 0; System.Int64 err_no = Wrapper92_64.SE_stream_get_integer(_connection.SeStream, index, ref val); if (err_no == 0) return val; return 0; } private short FetchSmallInteger(System.Int16 index) { System.Int16 val = 0; System.Int64 err_no = Wrapper92_64.SE_stream_get_smallint(_connection.SeStream, index, ref val); if (err_no == 0) return val; return 0; } private double FetchDouble(System.Int16 index) { System.Double val = 0; System.Int64 err_no = Wrapper92_64.SE_stream_get_double(_connection.SeStream, index, ref val); if (err_no == 0) return val; return 0; } private float FetchFloat(System.Int16 index) { float val = 0; System.Int64 err_no = Wrapper92_64.SE_stream_get_float(_connection.SeStream, index, ref val); if (err_no == 0) return val; return 0; } private string FetchString(System.Int16 index, int size) { try { byte[] buffer = new byte[size + 1]; System.Int64 err_no = Wrapper92_64.SE_stream_get_string(_connection.SeStream, index, buffer); if (err_no == -1004) return String.Empty; //return null; if (err_no != 0) return "<ERROR>:" + Wrapper92_64.GetErrorMsg(_connection.SeConnection, err_no); return System.Text.Encoding.UTF7.GetString(buffer).Replace("\0", ""); } catch (Exception ex) { return "<EXCEPTION>:" + ex.Message; } } private string FetchNString(System.Int16 index, int size) { try { byte[] buffer = new byte[(size + 1) * 2]; System.Int64 err_no = Wrapper92_64.SE_stream_get_nstring(_connection.SeStream, index, buffer); if (err_no == -1004) return String.Empty; //return null; if (err_no != 0) return "<ERROR>:" + Wrapper92_64.GetErrorMsg(_connection.SeConnection, err_no); return System.Text.Encoding.Unicode.GetString(buffer).Replace("\0", ""); } catch (Exception ex) { return "<EXCEPTION>:" + ex.Message; } } private DateTime? FetchDate(System.Int16 index) { tm TM = new tm(); System.Int64 err_no = Wrapper92_64.SE_stream_get_date(_connection.SeStream, index, ref TM); if (err_no == -1004) return null; if (err_no != 0) return new DateTime(1, 1, 1); return new DateTime( TM.tm_year + 1900, TM.tm_mon + 1, TM.tm_mday); } private IGeometry FetchShape(System.Int16 index) { unsafe { System.Int64 err_no = 0; SE_SHAPE_64 shape_val = new SE_SHAPE_64(); System.Int32* part_offsets = null; System.Int32* subp_offsets = null; SE_POINT* points = null; try { err_no = Wrapper92_64.SE_shape_create(new SE_COORDREF_64(), ref shape_val); if (err_no != 0) return null; err_no = Wrapper92_64.SE_stream_get_shape(_connection.SeStream, index, shape_val); if (err_no != 0) return null; Int64 shapeType = 0, numPoints = 0, numParts = 0, numSubparts = 0; err_no = Wrapper92_64.SE_shape_get_type(shape_val, ref shapeType); if (err_no != 0 || shapeType == CONST.SG_NIL_SHAPE) return null; err_no = Wrapper92_64.SE_shape_get_num_points(shape_val, 0, 0, ref numPoints); if (err_no != 0) return null; err_no = Wrapper92_64.SE_shape_get_num_parts(shape_val, ref numParts, ref numSubparts); if (err_no != 0) return null; part_offsets = (System.Int32*)Marshal.AllocHGlobal(((int)numParts + 1) * sizeof(System.Int32)); subp_offsets = (System.Int32*)Marshal.AllocHGlobal(((int)numSubparts + 1) * sizeof(System.Int32)); points = (SE_POINT*)Marshal.AllocHGlobal((int)numPoints * sizeof(SE_POINT)); part_offsets[numParts] = (int)numSubparts; subp_offsets[numSubparts] = (int)numPoints; err_no = Wrapper92_64.SE_shape_get_all_points( shape_val, SE_ROTATION_TYPE.SE_DEFAULT_ROTATION, (IntPtr)part_offsets, (IntPtr)subp_offsets, (IntPtr)points, (IntPtr)null, (IntPtr)null); if (err_no != 0) return null; IGeometry ret = null; switch (shapeType) { case CONST.SG_POINT_SHAPE: if (numPoints == 1) { ret = new Point(points[0].x, points[0].y); } else if (numPoints > 1) { MultiPoint mPoint_ = new MultiPoint(); for (int i = 0; i < numPoints; i++) mPoint_.AddPoint(new Point(points[i].x, points[i].y)); ret = mPoint_; } break; case CONST.SG_MULTI_POINT_SHAPE: MultiPoint mPoint = new MultiPoint(); for (int i = 0; i < numPoints; i++) mPoint.AddPoint(new Point(points[i].x, points[i].y)); ret = mPoint; break; case CONST.SG_LINE_SHAPE: case CONST.SG_SIMPLE_LINE_SHAPE: case CONST.SG_MULTI_LINE_SHAPE: case CONST.SG_MULTI_SIMPLE_LINE_SHAPE: Polyline polyline = new Polyline(); for (int s = 0; s < numSubparts; s++) { Path path = new Path(); int to = subp_offsets[s + 1]; for (int i = subp_offsets[s]; i < to; i++) { path.AddPoint(new Point(points[i].x, points[i].y)); } polyline.AddPath(path); } ret = polyline; break; case CONST.SG_AREA_SHAPE: case CONST.SG_MULTI_AREA_SHAPE: Polygon polygon = new Polygon(); for (int s = 0; s < numSubparts; s++) { Ring ring = new Ring(); int to = subp_offsets[s + 1]; for (int i = subp_offsets[s]; i < to; i++) { ring.AddPoint(new Point(points[i].x, points[i].y)); } polygon.AddRing(ring); } ret = polygon; break; } return ret; } catch { return null; } finally { if (part_offsets != null) Marshal.FreeHGlobal((System.IntPtr)part_offsets); if (subp_offsets != null) Marshal.FreeHGlobal((System.IntPtr)subp_offsets); if (points != null) Marshal.FreeHGlobal((System.IntPtr)points); if (shape_val.handle != 0) Wrapper92_64.SE_shape_free(shape_val); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // ReSharper disable SpecifyACultureInStringConversionExplicitly // ReSharper disable UnusedAutoPropertyAccessor.Global namespace Apache.Ignite.Core.Tests.Compute { using System; using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Threading; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Cluster; using Apache.Ignite.Core.Common; using Apache.Ignite.Core.Compute; using Apache.Ignite.Core.Impl; using Apache.Ignite.Core.Impl.Binary; using Apache.Ignite.Core.Resource; using NUnit.Framework; /// <summary> /// Tests for compute. /// </summary> public class ComputeApiTest { /** Echo task name. */ public const string EchoTask = "org.apache.ignite.platform.PlatformComputeEchoTask"; /** Binary argument task name. */ public const string BinaryArgTask = "org.apache.ignite.platform.PlatformComputeBinarizableArgTask"; /** Broadcast task name. */ public const string BroadcastTask = "org.apache.ignite.platform.PlatformComputeBroadcastTask"; /** Broadcast task name. */ private const string DecimalTask = "org.apache.ignite.platform.PlatformComputeDecimalTask"; /** Java binary class name. */ private const string JavaBinaryCls = "PlatformComputeJavaBinarizable"; /** Echo type: null. */ private const int EchoTypeNull = 0; /** Echo type: byte. */ private const int EchoTypeByte = 1; /** Echo type: bool. */ private const int EchoTypeBool = 2; /** Echo type: short. */ private const int EchoTypeShort = 3; /** Echo type: char. */ private const int EchoTypeChar = 4; /** Echo type: int. */ private const int EchoTypeInt = 5; /** Echo type: long. */ private const int EchoTypeLong = 6; /** Echo type: float. */ private const int EchoTypeFloat = 7; /** Echo type: double. */ private const int EchoTypeDouble = 8; /** Echo type: array. */ private const int EchoTypeArray = 9; /** Echo type: collection. */ private const int EchoTypeCollection = 10; /** Echo type: map. */ private const int EchoTypeMap = 11; /** Echo type: binarizable. */ public const int EchoTypeBinarizable = 12; /** Echo type: binary (Java only). */ private const int EchoTypeBinarizableJava = 13; /** Type: object array. */ private const int EchoTypeObjArray = 14; /** Type: binary object array. */ private const int EchoTypeBinarizableArray = 15; /** Type: enum. */ private const int EchoTypeEnum = 16; /** Type: enum array. */ private const int EchoTypeEnumArray = 17; /** Type: enum field. */ private const int EchoTypeEnumField = 18; /** Type: affinity key. */ public const int EchoTypeAffinityKey = 19; /** Type: enum from cache. */ private const int EchoTypeEnumFromCache = 20; /** Type: enum array from cache. */ private const int EchoTypeEnumArrayFromCache = 21; /** Echo type: IgniteUuid. */ private const int EchoTypeIgniteUuid = 22; /** Echo type: binary enum (created with builder). */ private const int EchoTypeBinaryEnum = 23; /** */ private const string DefaultCacheName = "default"; /** First node. */ private IIgnite _grid1; /** Second node. */ private IIgnite _grid2; /** Third node. */ private IIgnite _grid3; /// <summary> /// Initialization routine. /// </summary> [TestFixtureSetUp] public void InitClient() { TestUtils.KillProcesses(); var configs = GetConfigs(); _grid1 = Ignition.Start(Configuration(configs.Item1)); _grid2 = Ignition.Start(Configuration(configs.Item2)); _grid3 = Ignition.Start(Configuration(configs.Item3)); } /// <summary> /// Gets the configs. /// </summary> protected virtual Tuple<string, string, string> GetConfigs() { return Tuple.Create( "config\\compute\\compute-grid1.xml", "config\\compute\\compute-grid2.xml", "config\\compute\\compute-grid3.xml"); } /// <summary> /// Gets the expected compact footers setting. /// </summary> protected virtual bool CompactFooter { get { return true; } } [TestFixtureTearDown] public void StopClient() { Ignition.StopAll(true); } [TearDown] public void AfterTest() { TestUtils.AssertHandleRegistryIsEmpty(1000, _grid1, _grid2, _grid3); } /// <summary> /// Test that it is possible to get projection from grid. /// </summary> [Test] public void TestProjection() { IClusterGroup prj = _grid1.GetCluster(); Assert.NotNull(prj); Assert.AreEqual(prj, prj.Ignite); // Check that default Compute projection excludes client nodes. CollectionAssert.AreEquivalent(prj.ForServers().GetNodes(), prj.GetCompute().ClusterGroup.GetNodes()); } /// <summary> /// Test non-existent cache. /// </summary> [Test] public void TestNonExistentCache() { Assert.Catch(typeof(ArgumentException), () => { _grid1.GetCache<int, int>("bad_name"); }); } /// <summary> /// Test node content. /// </summary> [Test] public void TestNodeContent() { ICollection<IClusterNode> nodes = _grid1.GetCluster().GetNodes(); foreach (IClusterNode node in nodes) { Assert.NotNull(node.Addresses); Assert.IsTrue(node.Addresses.Count > 0); Assert.Throws<NotSupportedException>(() => node.Addresses.Add("addr")); Assert.NotNull(node.GetAttributes()); Assert.IsTrue(node.GetAttributes().Count > 0); Assert.Throws<NotSupportedException>(() => node.GetAttributes().Add("key", "val")); Assert.NotNull(node.HostNames); Assert.Throws<NotSupportedException>(() => node.HostNames.Add("h")); Assert.IsTrue(node.Id != Guid.Empty); Assert.IsTrue(node.Order > 0); Assert.NotNull(node.GetMetrics()); } } /// <summary> /// Test cluster metrics. /// </summary> [Test] public void TestClusterMetrics() { var cluster = _grid1.GetCluster(); IClusterMetrics metrics = cluster.GetMetrics(); Assert.IsNotNull(metrics); Assert.AreEqual(cluster.GetNodes().Count, metrics.TotalNodes); Thread.Sleep(2000); IClusterMetrics newMetrics = cluster.GetMetrics(); Assert.IsFalse(metrics == newMetrics); Assert.IsTrue(metrics.LastUpdateTime < newMetrics.LastUpdateTime); } /// <summary> /// Test cluster metrics. /// </summary> [Test] public void TestNodeMetrics() { var node = _grid1.GetCluster().GetNode(); IClusterMetrics metrics = node.GetMetrics(); Assert.IsNotNull(metrics); Assert.IsTrue(metrics == node.GetMetrics()); Thread.Sleep(2000); IClusterMetrics newMetrics = node.GetMetrics(); Assert.IsFalse(metrics == newMetrics); Assert.IsTrue(metrics.LastUpdateTime < newMetrics.LastUpdateTime); } /// <summary> /// Test cluster metrics. /// </summary> [Test] public void TestResetMetrics() { var cluster = _grid1.GetCluster(); Thread.Sleep(2000); var metrics1 = cluster.GetMetrics(); cluster.ResetMetrics(); var metrics2 = cluster.GetMetrics(); Assert.IsNotNull(metrics1); Assert.IsNotNull(metrics2); } /// <summary> /// Test node ping. /// </summary> [Test] public void TestPingNode() { var cluster = _grid1.GetCluster(); Assert.IsTrue(cluster.GetNodes().Select(node => node.Id).All(cluster.PingNode)); Assert.IsFalse(cluster.PingNode(Guid.NewGuid())); } /// <summary> /// Tests the topology version. /// </summary> [Test] public void TestTopologyVersion() { var cluster = _grid1.GetCluster(); var topVer = cluster.TopologyVersion; Ignition.Stop(_grid3.Name, true); Assert.AreEqual(topVer + 1, _grid1.GetCluster().TopologyVersion); _grid3 = Ignition.Start(Configuration(GetConfigs().Item3)); Assert.AreEqual(topVer + 2, _grid1.GetCluster().TopologyVersion); } /// <summary> /// Tests the topology by version. /// </summary> [Test] public void TestTopology() { var cluster = _grid1.GetCluster(); Assert.AreEqual(1, cluster.GetTopology(1).Count); Assert.AreEqual(null, cluster.GetTopology(int.MaxValue)); // Check that Nodes and Topology return the same for current version var topVer = cluster.TopologyVersion; var top = cluster.GetTopology(topVer); var nodes = cluster.GetNodes(); Assert.AreEqual(top.Count, nodes.Count); Assert.IsTrue(top.All(nodes.Contains)); // Stop/start node to advance version and check that history is still correct Assert.IsTrue(Ignition.Stop(_grid2.Name, true)); try { top = cluster.GetTopology(topVer); Assert.AreEqual(top.Count, nodes.Count); Assert.IsTrue(top.All(nodes.Contains)); } finally { _grid2 = Ignition.Start(Configuration(GetConfigs().Item2)); } } /// <summary> /// Test nodes in full topology. /// </summary> [Test] public void TestNodes() { Assert.IsNotNull(_grid1.GetCluster().GetNode()); ICollection<IClusterNode> nodes = _grid1.GetCluster().GetNodes(); Assert.IsTrue(nodes.Count == 3); // Check subsequent call on the same topology. nodes = _grid1.GetCluster().GetNodes(); Assert.IsTrue(nodes.Count == 3); Assert.IsTrue(Ignition.Stop(_grid2.Name, true)); // Check subsequent calls on updating topologies. nodes = _grid1.GetCluster().GetNodes(); Assert.IsTrue(nodes.Count == 2); nodes = _grid1.GetCluster().GetNodes(); Assert.IsTrue(nodes.Count == 2); _grid2 = Ignition.Start(Configuration(GetConfigs().Item2)); nodes = _grid1.GetCluster().GetNodes(); Assert.IsTrue(nodes.Count == 3); } /// <summary> /// Test "ForNodes" and "ForNodeIds". /// </summary> [Test] public void TestForNodes() { ICollection<IClusterNode> nodes = _grid1.GetCluster().GetNodes(); IClusterNode first = nodes.ElementAt(0); IClusterNode second = nodes.ElementAt(1); IClusterGroup singleNodePrj = _grid1.GetCluster().ForNodeIds(first.Id); Assert.AreEqual(1, singleNodePrj.GetNodes().Count); Assert.AreEqual(first.Id, singleNodePrj.GetNodes().First().Id); singleNodePrj = _grid1.GetCluster().ForNodeIds(new List<Guid> { first.Id }); Assert.AreEqual(1, singleNodePrj.GetNodes().Count); Assert.AreEqual(first.Id, singleNodePrj.GetNodes().First().Id); singleNodePrj = _grid1.GetCluster().ForNodes(first); Assert.AreEqual(1, singleNodePrj.GetNodes().Count); Assert.AreEqual(first.Id, singleNodePrj.GetNodes().First().Id); singleNodePrj = _grid1.GetCluster().ForNodes(new List<IClusterNode> { first }); Assert.AreEqual(1, singleNodePrj.GetNodes().Count); Assert.AreEqual(first.Id, singleNodePrj.GetNodes().First().Id); IClusterGroup multiNodePrj = _grid1.GetCluster().ForNodeIds(first.Id, second.Id); Assert.AreEqual(2, multiNodePrj.GetNodes().Count); Assert.IsTrue(multiNodePrj.GetNodes().Contains(first)); Assert.IsTrue(multiNodePrj.GetNodes().Contains(second)); multiNodePrj = _grid1.GetCluster().ForNodeIds(new[] {first, second}.Select(x => x.Id)); Assert.AreEqual(2, multiNodePrj.GetNodes().Count); Assert.IsTrue(multiNodePrj.GetNodes().Contains(first)); Assert.IsTrue(multiNodePrj.GetNodes().Contains(second)); multiNodePrj = _grid1.GetCluster().ForNodes(first, second); Assert.AreEqual(2, multiNodePrj.GetNodes().Count); Assert.IsTrue(multiNodePrj.GetNodes().Contains(first)); Assert.IsTrue(multiNodePrj.GetNodes().Contains(second)); multiNodePrj = _grid1.GetCluster().ForNodes(new List<IClusterNode> { first, second }); Assert.AreEqual(2, multiNodePrj.GetNodes().Count); Assert.IsTrue(multiNodePrj.GetNodes().Contains(first)); Assert.IsTrue(multiNodePrj.GetNodes().Contains(second)); } /// <summary> /// Test "ForNodes" and "ForNodeIds". Make sure lazy enumerables are enumerated only once. /// </summary> [Test] public void TestForNodesLaziness() { var nodes = _grid1.GetCluster().GetNodes().Take(2).ToArray(); var callCount = 0; Func<IClusterNode, IClusterNode> nodeSelector = node => { callCount++; return node; }; Func<IClusterNode, Guid> idSelector = node => { callCount++; return node.Id; }; var projection = _grid1.GetCluster().ForNodes(nodes.Select(nodeSelector)); Assert.AreEqual(2, projection.GetNodes().Count); Assert.AreEqual(2, callCount); projection = _grid1.GetCluster().ForNodeIds(nodes.Select(idSelector)); Assert.AreEqual(2, projection.GetNodes().Count); Assert.AreEqual(4, callCount); } /// <summary> /// Test for local node projection. /// </summary> [Test] public void TestForLocal() { IClusterGroup prj = _grid1.GetCluster().ForLocal(); Assert.AreEqual(1, prj.GetNodes().Count); Assert.AreEqual(_grid1.GetCluster().GetLocalNode(), prj.GetNodes().First()); } /// <summary> /// Test for remote nodes projection. /// </summary> [Test] public void TestForRemotes() { ICollection<IClusterNode> nodes = _grid1.GetCluster().GetNodes(); IClusterGroup prj = _grid1.GetCluster().ForRemotes(); Assert.AreEqual(2, prj.GetNodes().Count); Assert.IsTrue(nodes.Contains(prj.GetNodes().ElementAt(0))); Assert.IsTrue(nodes.Contains(prj.GetNodes().ElementAt(1))); } /// <summary> /// Test for daemon nodes projection. /// </summary> [Test] public void TestForDaemons() { Assert.AreEqual(0, _grid1.GetCluster().ForDaemons().GetNodes().Count); using (var ignite = Ignition.Start(new IgniteConfiguration(TestUtils.GetTestConfiguration()) { SpringConfigUrl = GetConfigs().Item1, IgniteInstanceName = "daemonGrid", IsDaemon = true }) ) { var prj = _grid1.GetCluster().ForDaemons(); Assert.AreEqual(1, prj.GetNodes().Count); Assert.AreEqual(ignite.GetCluster().GetLocalNode().Id, prj.GetNode().Id); Assert.IsTrue(prj.GetNode().IsDaemon); Assert.IsTrue(ignite.GetCluster().GetLocalNode().IsDaemon); } } /// <summary> /// Test for host nodes projection. /// </summary> [Test] public void TestForHost() { ICollection<IClusterNode> nodes = _grid1.GetCluster().GetNodes(); IClusterGroup prj = _grid1.GetCluster().ForHost(nodes.First()); Assert.AreEqual(3, prj.GetNodes().Count); Assert.IsTrue(nodes.Contains(prj.GetNodes().ElementAt(0))); Assert.IsTrue(nodes.Contains(prj.GetNodes().ElementAt(1))); Assert.IsTrue(nodes.Contains(prj.GetNodes().ElementAt(2))); } /// <summary> /// Test for oldest, youngest and random projections. /// </summary> [Test] public void TestForOldestYoungestRandom() { ICollection<IClusterNode> nodes = _grid1.GetCluster().GetNodes(); IClusterGroup prj = _grid1.GetCluster().ForYoungest(); Assert.AreEqual(1, prj.GetNodes().Count); Assert.IsTrue(nodes.Contains(prj.GetNode())); prj = _grid1.GetCluster().ForOldest(); Assert.AreEqual(1, prj.GetNodes().Count); Assert.IsTrue(nodes.Contains(prj.GetNode())); prj = _grid1.GetCluster().ForRandom(); Assert.AreEqual(1, prj.GetNodes().Count); Assert.IsTrue(nodes.Contains(prj.GetNode())); } /// <summary> /// Tests ForServers projection. /// </summary> [Test] public void TestForServers() { var cluster = _grid1.GetCluster(); var servers = cluster.ForServers().GetNodes(); Assert.AreEqual(2, servers.Count); Assert.IsTrue(servers.All(x => !x.IsClient)); var serverAndClient = cluster.ForNodeIds(new[] { _grid2, _grid3 }.Select(x => x.GetCluster().GetLocalNode().Id)); Assert.AreEqual(1, serverAndClient.ForServers().GetNodes().Count); var client = cluster.ForNodeIds(new[] { _grid3 }.Select(x => x.GetCluster().GetLocalNode().Id)); Assert.AreEqual(0, client.ForServers().GetNodes().Count); } /// <summary> /// Test for attribute projection. /// </summary> [Test] public void TestForAttribute() { ICollection<IClusterNode> nodes = _grid1.GetCluster().GetNodes(); IClusterGroup prj = _grid1.GetCluster().ForAttribute("my_attr", "value1"); Assert.AreEqual(1, prj.GetNodes().Count); Assert.IsTrue(nodes.Contains(prj.GetNode())); Assert.AreEqual("value1", prj.GetNodes().First().GetAttribute<string>("my_attr")); } /// <summary> /// Test for cache/data/client projections. /// </summary> [Test] public void TestForCacheNodes() { ICollection<IClusterNode> nodes = _grid1.GetCluster().GetNodes(); // Cache nodes. IClusterGroup prjCache = _grid1.GetCluster().ForCacheNodes("cache1"); Assert.AreEqual(2, prjCache.GetNodes().Count); Assert.IsTrue(nodes.Contains(prjCache.GetNodes().ElementAt(0))); Assert.IsTrue(nodes.Contains(prjCache.GetNodes().ElementAt(1))); // Data nodes. IClusterGroup prjData = _grid1.GetCluster().ForDataNodes("cache1"); Assert.AreEqual(2, prjData.GetNodes().Count); Assert.IsTrue(prjCache.GetNodes().Contains(prjData.GetNodes().ElementAt(0))); Assert.IsTrue(prjCache.GetNodes().Contains(prjData.GetNodes().ElementAt(1))); // Client nodes. IClusterGroup prjClient = _grid1.GetCluster().ForClientNodes("cache1"); Assert.AreEqual(0, prjClient.GetNodes().Count); } /// <summary> /// Test for cache predicate. /// </summary> [Test] public void TestForPredicate() { IClusterGroup prj1 = _grid1.GetCluster().ForPredicate(new NotAttributePredicate("value1").Apply); Assert.AreEqual(2, prj1.GetNodes().Count); IClusterGroup prj2 = prj1.ForPredicate(new NotAttributePredicate("value2").Apply); Assert.AreEqual(1, prj2.GetNodes().Count); string val; prj2.GetNodes().First().TryGetAttribute("my_attr", out val); Assert.IsTrue(val == null || (!val.Equals("value1") && !val.Equals("value2"))); } /// <summary> /// Attribute predicate. /// </summary> private class NotAttributePredicate { /** Required attribute value. */ private readonly string _attrVal; /// <summary> /// Constructor. /// </summary> /// <param name="attrVal">Required attribute value.</param> public NotAttributePredicate(string attrVal) { _attrVal = attrVal; } /** <inhreitDoc /> */ public bool Apply(IClusterNode node) { string val; node.TryGetAttribute("my_attr", out val); return val == null || !val.Equals(_attrVal); } } /// <summary> /// Test echo with decimals. /// </summary> [Test] public void TestEchoDecimal() { decimal val; Assert.AreEqual(val = decimal.Zero, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, 1, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, 1, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, 1, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, 1, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, 1, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, 1, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MinValue, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MinValue, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MinValue, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MinValue, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MinValue, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MinValue, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MaxValue, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MaxValue, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MaxValue, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MaxValue, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MaxValue, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 0, int.MaxValue, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 1, 0, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 1, 0, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 1, 0, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 1, 0, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 1, 0, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, 1, 0, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MinValue, 0, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MinValue, 0, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MinValue, 0, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MinValue, 0, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MinValue, 0, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MinValue, 0, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MaxValue, 0, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MaxValue, 0, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MaxValue, 0, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MaxValue, 0, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MaxValue, 0, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(0, int.MaxValue, 0, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 0, 0, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 0, 0, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 0, 0, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 0, 0, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 0, 0, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 0, 0, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MinValue, 0, 0, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MinValue, 0, 0, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MinValue, 0, 0, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MinValue, 0, 0, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MinValue, 0, 0, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MinValue, 0, 0, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MaxValue, 0, 0, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MaxValue, 0, 0, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MaxValue, 0, 0, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MaxValue, 0, 0, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MaxValue, 0, 0, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(int.MaxValue, 0, 0, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 1, 1, false, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 1, 1, true, 0), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 1, 1, false, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 1, 1, true, 0) - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 1, 1, false, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = new decimal(1, 1, 1, true, 0) + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("65536"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-65536"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("65536") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-65536") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("65536") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-65536") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("4294967296"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-4294967296"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("4294967296") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-4294967296") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("4294967296") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-4294967296") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("281474976710656"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-281474976710656"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("281474976710656") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-281474976710656") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("281474976710656") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-281474976710656") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("18446744073709551616"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-18446744073709551616"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("18446744073709551616") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-18446744073709551616") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("18446744073709551616") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-18446744073709551616") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("1208925819614629174706176"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-1208925819614629174706176"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("1208925819614629174706176") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-1208925819614629174706176") - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("1208925819614629174706176") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-1208925819614629174706176") + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.MaxValue, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.MinValue, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.MaxValue - 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.MinValue + 1, _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("11,12"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); Assert.AreEqual(val = decimal.Parse("-11,12"), _grid1.GetCompute().ExecuteJavaTask<object>(DecimalTask, new object[] { val, val.ToString() })); // Test echo with overflow. var ex = Assert.Throws<BinaryObjectException>(() => _grid1.GetCompute() .ExecuteJavaTask<object>(DecimalTask, new object[] {null, decimal.MaxValue.ToString() + 1})); Assert.AreEqual("Decimal magnitude overflow (must be less than 96 bits): 104", ex.Message); // Negative scale. 1E+1 parses to "1 scale -1" on Java side. ex = Assert.Throws<BinaryObjectException>(() => _grid1.GetCompute() .ExecuteJavaTask<object>(DecimalTask, new object[] {null, "1E+1"})); Assert.AreEqual("Decimal value scale overflow (must be between 0 and 28): -1", ex.Message); } /// <summary> /// Test echo task returning null. /// </summary> [Test] public void TestEchoTaskNull() { Assert.IsNull(_grid1.GetCompute().ExecuteJavaTask<object>(EchoTask, EchoTypeNull)); } /// <summary> /// Test echo task returning various primitives. /// </summary> [Test] public void TestEchoTaskPrimitives() { Assert.AreEqual(1, _grid1.GetCompute().ExecuteJavaTask<byte>(EchoTask, EchoTypeByte)); Assert.AreEqual(true, _grid1.GetCompute().ExecuteJavaTask<bool>(EchoTask, EchoTypeBool)); Assert.AreEqual(1, _grid1.GetCompute().ExecuteJavaTask<short>(EchoTask, EchoTypeShort)); Assert.AreEqual((char)1, _grid1.GetCompute().ExecuteJavaTask<char>(EchoTask, EchoTypeChar)); Assert.AreEqual(1, _grid1.GetCompute().ExecuteJavaTask<int>(EchoTask, EchoTypeInt)); Assert.AreEqual(1, _grid1.GetCompute().ExecuteJavaTask<long>(EchoTask, EchoTypeLong)); Assert.AreEqual((float)1, _grid1.GetCompute().ExecuteJavaTask<float>(EchoTask, EchoTypeFloat)); Assert.AreEqual((double)1, _grid1.GetCompute().ExecuteJavaTask<double>(EchoTask, EchoTypeDouble)); } /// <summary> /// Test echo task returning compound types. /// </summary> [Test] public void TestEchoTaskCompound() { int[] res1 = _grid1.GetCompute().ExecuteJavaTask<int[]>(EchoTask, EchoTypeArray); Assert.AreEqual(1, res1.Length); Assert.AreEqual(1, res1[0]); var res2 = _grid1.GetCompute().ExecuteJavaTask<IList>(EchoTask, EchoTypeCollection); Assert.AreEqual(1, res2.Count); Assert.AreEqual(1, res2[0]); var res3 = _grid1.GetCompute().ExecuteJavaTask<IDictionary>(EchoTask, EchoTypeMap); Assert.AreEqual(1, res3.Count); Assert.AreEqual(1, res3[1]); } /// <summary> /// Test echo task returning binary object. /// </summary> [Test] public void TestEchoTaskBinarizable() { var values = new[] {int.MinValue, int.MaxValue, 0, 1, -1, byte.MaxValue, byte.MinValue}; var cache = _grid1.GetCache<int, int>(DefaultCacheName); var compute = _grid1.GetCompute(); foreach (var val in values) { cache[EchoTypeBinarizable] = val; var res = compute.ExecuteJavaTask<PlatformComputeBinarizable>(EchoTask, EchoTypeBinarizable); Assert.AreEqual(val, res.Field); // Binary mode. var binRes = compute.WithKeepBinary().ExecuteJavaTask<BinaryObject>(EchoTask, EchoTypeBinarizable); Assert.AreEqual(val, binRes.GetField<long>("Field")); var dotNetBin = _grid1.GetBinary().ToBinary<BinaryObject>(res); Assert.AreEqual(dotNetBin.Header.HashCode, binRes.Header.HashCode); Func<BinaryObject, byte[]> getData = bo => bo.Data.Skip(bo.Offset).Take(bo.Header.Length).ToArray(); Assert.AreEqual(getData(dotNetBin), getData(binRes)); } } /// <summary> /// Test echo task returning binary object with no corresponding class definition. /// </summary> [Test] public void TestEchoTaskBinarizableNoClass() { ICompute compute = _grid1.GetCompute(); compute.WithKeepBinary(); IBinaryObject res = compute.ExecuteJavaTask<IBinaryObject>(EchoTask, EchoTypeBinarizableJava); Assert.AreEqual(1, res.GetField<int>("field")); // This call must fail because "keepBinary" flag is reset. var ex = Assert.Throws<BinaryObjectException>(() => { compute.ExecuteJavaTask<IBinaryObject>(EchoTask, EchoTypeBinarizableJava); }); Assert.AreEqual("Unknown pair [platformId=1, typeId=2009791293]", ex.Message); } /// <summary> /// Tests the echo task returning object array. /// </summary> [Test] public void TestEchoTaskObjectArray() { var res = _grid1.GetCompute().ExecuteJavaTask<string[]>(EchoTask, EchoTypeObjArray); Assert.AreEqual(new[] {"foo", "bar", "baz"}, res); } /// <summary> /// Tests the echo task returning binary array. /// </summary> [Test] public void TestEchoTaskBinarizableArray() { var res = _grid1.GetCompute().ExecuteJavaTask<object[]>(EchoTask, EchoTypeBinarizableArray); Assert.AreEqual(3, res.Length); for (var i = 0; i < res.Length; i++) Assert.AreEqual(i + 1, ((PlatformComputeBinarizable) res[i]).Field); } /// <summary> /// Tests the echo task returning enum. /// </summary> [Test] public void TestEchoTaskEnum() { var res = _grid1.GetCompute().ExecuteJavaTask<PlatformComputeEnum>(EchoTask, EchoTypeEnum); Assert.AreEqual(PlatformComputeEnum.Bar, res); } /// <summary> /// Tests the echo task returning enum. /// </summary> [Test] public void TestEchoTaskBinaryEnum() { var res = _grid1.GetCompute().WithKeepBinary() .ExecuteJavaTask<IBinaryObject>(EchoTask, EchoTypeBinaryEnum); Assert.AreEqual("JavaFoo", res.EnumName); Assert.AreEqual(1, res.EnumValue); var binType = res.GetBinaryType(); Assert.IsTrue(binType.IsEnum); Assert.AreEqual("JavaDynEnum", binType.TypeName); var vals = binType.GetEnumValues().OrderBy(x => x.EnumValue).ToArray(); Assert.AreEqual(new[] {1, 2}, vals.Select(x => x.EnumValue)); Assert.AreEqual(new[] {"JavaFoo", "JavaBar"}, vals.Select(x => x.EnumName)); } /// <summary> /// Tests the echo task returning enum. /// </summary> [Test] public void TestEchoTaskEnumFromCache() { var cache = _grid1.GetCache<int, PlatformComputeEnum>(DefaultCacheName); foreach (PlatformComputeEnum val in Enum.GetValues(typeof(PlatformComputeEnum))) { cache[EchoTypeEnumFromCache] = val; var res = _grid1.GetCompute().ExecuteJavaTask<PlatformComputeEnum>(EchoTask, EchoTypeEnumFromCache); Assert.AreEqual(val, res); } } /// <summary> /// Tests the echo task returning enum. /// </summary> [Test] public void TestEchoTaskEnumArray() { var res = _grid1.GetCompute().ExecuteJavaTask<PlatformComputeEnum[]>(EchoTask, EchoTypeEnumArray); Assert.AreEqual(new[] { PlatformComputeEnum.Bar, PlatformComputeEnum.Baz, PlatformComputeEnum.Foo }, res); } /// <summary> /// Tests the echo task returning enum. /// </summary> [Test] public void TestEchoTaskEnumArrayFromCache() { var cache = _grid1.GetCache<int, PlatformComputeEnum[]>(DefaultCacheName); foreach (var val in new[] { new[] {PlatformComputeEnum.Bar, PlatformComputeEnum.Baz, PlatformComputeEnum.Foo }, new[] {PlatformComputeEnum.Foo, PlatformComputeEnum.Baz}, new[] {PlatformComputeEnum.Bar} }) { cache[EchoTypeEnumArrayFromCache] = val; var res = _grid1.GetCompute().ExecuteJavaTask<PlatformComputeEnum[]>( EchoTask, EchoTypeEnumArrayFromCache); Assert.AreEqual(val, res); } } /// <summary> /// Tests the echo task reading enum from a binary object field. /// Ensures that Java can understand enums written by .NET. /// </summary> [Test] public void TestEchoTaskEnumField() { var enumVal = PlatformComputeEnum.Baz; _grid1.GetCache<int, InteropComputeEnumFieldTest>(DefaultCacheName) .Put(EchoTypeEnumField, new InteropComputeEnumFieldTest {InteropEnum = enumVal}); var res = _grid1.GetCompute().ExecuteJavaTask<PlatformComputeEnum>(EchoTask, EchoTypeEnumField); var enumMeta = _grid1.GetBinary().GetBinaryType(typeof (PlatformComputeEnum)); Assert.IsTrue(enumMeta.IsEnum); Assert.AreEqual(enumMeta.TypeName, typeof(PlatformComputeEnum).Name); Assert.AreEqual(0, enumMeta.Fields.Count); Assert.AreEqual(enumVal, res); } /// <summary> /// Tests that IgniteGuid in .NET maps to IgniteUuid in Java. /// </summary> [Test] public void TestEchoTaskIgniteUuid() { var guid = Guid.NewGuid(); _grid1.GetCache<int, object>(DefaultCacheName)[EchoTypeIgniteUuid] = new IgniteGuid(guid, 25); var res = _grid1.GetCompute().ExecuteJavaTask<IgniteGuid>(EchoTask, EchoTypeIgniteUuid); Assert.AreEqual(guid, res.GlobalId); Assert.AreEqual(25, res.LocalId); } /// <summary> /// Test for binary argument in Java. /// </summary> [Test] public void TestBinarizableArgTask() { ICompute compute = _grid1.GetCompute(); compute.WithKeepBinary(); PlatformComputeNetBinarizable arg = new PlatformComputeNetBinarizable {Field = 100}; int res = compute.ExecuteJavaTask<int>(BinaryArgTask, arg); Assert.AreEqual(arg.Field, res); } /// <summary> /// Test running broadcast task. /// </summary> [Test] public void TestBroadcastTask([Values(false, true)] bool isAsync) { var execTask = isAsync ? (Func<ICompute, List<Guid>>) ( c => c.ExecuteJavaTaskAsync<ICollection>(BroadcastTask, null).Result.OfType<Guid>().ToList()) : c => c.ExecuteJavaTask<ICollection>(BroadcastTask, null).OfType<Guid>().ToList(); var res = execTask(_grid1.GetCompute()); Assert.AreEqual(2, res.Count); Assert.AreEqual(1, _grid1.GetCluster().ForNodeIds(res.ElementAt(0)).GetNodes().Count); Assert.AreEqual(1, _grid1.GetCluster().ForNodeIds(res.ElementAt(1)).GetNodes().Count); var prj = _grid1.GetCluster().ForPredicate(node => res.Take(2).Contains(node.Id)); Assert.AreEqual(2, prj.GetNodes().Count); var filteredRes = execTask(prj.GetCompute()); Assert.AreEqual(2, filteredRes.Count); Assert.IsTrue(filteredRes.Contains(res.ElementAt(0))); Assert.IsTrue(filteredRes.Contains(res.ElementAt(1))); } /// <summary> /// Tests the action broadcast. /// </summary> [Test] public void TestBroadcastAction() { var id = Guid.NewGuid(); _grid1.GetCompute().Broadcast(new ComputeAction(id)); Assert.AreEqual(2, ComputeAction.InvokeCount(id)); id = Guid.NewGuid(); _grid1.GetCompute().BroadcastAsync(new ComputeAction(id)).Wait(); Assert.AreEqual(2, ComputeAction.InvokeCount(id)); } /// <summary> /// Tests single action run. /// </summary> [Test] public void TestRunAction() { var id = Guid.NewGuid(); _grid1.GetCompute().Run(new ComputeAction(id)); Assert.AreEqual(1, ComputeAction.InvokeCount(id)); id = Guid.NewGuid(); _grid1.GetCompute().RunAsync(new ComputeAction(id)).Wait(); Assert.AreEqual(1, ComputeAction.InvokeCount(id)); } /// <summary> /// Tests single action run. /// </summary> [Test] public void TestRunActionAsyncCancel() { using (var cts = new CancellationTokenSource()) { // Cancel while executing var task = _grid1.GetCompute().RunAsync(new ComputeAction(), cts.Token); cts.Cancel(); Assert.IsTrue(task.IsCanceled); // Use cancelled token task = _grid1.GetCompute().RunAsync(new ComputeAction(), cts.Token); Assert.IsTrue(task.IsCanceled); } } /// <summary> /// Tests multiple actions run. /// </summary> [Test] public void TestRunActions() { var id = Guid.NewGuid(); _grid1.GetCompute().Run(Enumerable.Range(0, 10).Select(x => new ComputeAction(id))); Assert.AreEqual(10, ComputeAction.InvokeCount(id)); var id2 = Guid.NewGuid(); _grid1.GetCompute().RunAsync(Enumerable.Range(0, 10).Select(x => new ComputeAction(id2))).Wait(); Assert.AreEqual(10, ComputeAction.InvokeCount(id2)); } /// <summary> /// Tests affinity run. /// </summary> [Test] public void TestAffinityRun() { const string cacheName = DefaultCacheName; // Test keys for non-client nodes var nodes = new[] {_grid1, _grid2}.Select(x => x.GetCluster().GetLocalNode()); var aff = _grid1.GetAffinity(cacheName); foreach (var node in nodes) { var primaryKey = TestUtils.GetPrimaryKey(_grid1, cacheName, node); var affinityKey = aff.GetAffinityKey<int, int>(primaryKey); _grid1.GetCompute().AffinityRun(cacheName, affinityKey, new ComputeAction()); Assert.AreEqual(node.Id, ComputeAction.LastNodeId); _grid1.GetCompute().AffinityRunAsync(cacheName, affinityKey, new ComputeAction()).Wait(); Assert.AreEqual(node.Id, ComputeAction.LastNodeId); } } /// <summary> /// Tests affinity call. /// </summary> [Test] public void TestAffinityCall() { const string cacheName = DefaultCacheName; // Test keys for non-client nodes var nodes = new[] { _grid1, _grid2 }.Select(x => x.GetCluster().GetLocalNode()); var aff = _grid1.GetAffinity(cacheName); foreach (var node in nodes) { var primaryKey = TestUtils.GetPrimaryKey(_grid1, cacheName, node); var affinityKey = aff.GetAffinityKey<int, int>(primaryKey); var result = _grid1.GetCompute().AffinityCall(cacheName, affinityKey, new ComputeFunc()); Assert.AreEqual(result, ComputeFunc.InvokeCount); Assert.AreEqual(node.Id, ComputeFunc.LastNodeId); // Async. ComputeFunc.InvokeCount = 0; result = _grid1.GetCompute().AffinityCallAsync(cacheName, affinityKey, new ComputeFunc()).Result; Assert.AreEqual(result, ComputeFunc.InvokeCount); Assert.AreEqual(node.Id, ComputeFunc.LastNodeId); } } /// <summary> /// Test "withNoFailover" feature. /// </summary> [Test] public void TestWithNoFailover() { var res = _grid1.GetCompute().WithNoFailover().ExecuteJavaTask<ICollection>(BroadcastTask, null) .OfType<Guid>().ToList(); Assert.AreEqual(2, res.Count); Assert.AreEqual(1, _grid1.GetCluster().ForNodeIds(res.ElementAt(0)).GetNodes().Count); Assert.AreEqual(1, _grid1.GetCluster().ForNodeIds(res.ElementAt(1)).GetNodes().Count); } /// <summary> /// Test "withTimeout" feature. /// </summary> [Test] public void TestWithTimeout() { var res = _grid1.GetCompute().WithTimeout(1000).ExecuteJavaTask<ICollection>(BroadcastTask, null) .OfType<Guid>().ToList(); Assert.AreEqual(2, res.Count); Assert.AreEqual(1, _grid1.GetCluster().ForNodeIds(res.ElementAt(0)).GetNodes().Count); Assert.AreEqual(1, _grid1.GetCluster().ForNodeIds(res.ElementAt(1)).GetNodes().Count); } /// <summary> /// Test simple dotNet task execution. /// </summary> [Test] public void TestNetTaskSimple() { Assert.AreEqual(2, _grid1.GetCompute() .Execute<NetSimpleJobArgument, NetSimpleJobResult, NetSimpleTaskResult>( typeof(NetSimpleTask), new NetSimpleJobArgument(1)).Res); Assert.AreEqual(2, _grid1.GetCompute() .ExecuteAsync<NetSimpleJobArgument, NetSimpleJobResult, NetSimpleTaskResult>( typeof(NetSimpleTask), new NetSimpleJobArgument(1)).Result.Res); Assert.AreEqual(4, _grid1.GetCompute().Execute(new NetSimpleTask(), new NetSimpleJobArgument(2)).Res); Assert.AreEqual(6, _grid1.GetCompute().ExecuteAsync(new NetSimpleTask(), new NetSimpleJobArgument(3)) .Result.Res); } /// <summary> /// Tests the exceptions. /// </summary> [Test] public void TestExceptions() { Assert.Throws<AggregateException>(() => _grid1.GetCompute().Broadcast(new InvalidComputeAction())); Assert.Throws<AggregateException>( () => _grid1.GetCompute().Execute<NetSimpleJobArgument, NetSimpleJobResult, NetSimpleTaskResult>( typeof (NetSimpleTask), new NetSimpleJobArgument(-1))); // Local. var ex = Assert.Throws<AggregateException>(() => _grid1.GetCluster().ForLocal().GetCompute().Broadcast(new ExceptionalComputeAction())); Assert.IsNotNull(ex.InnerException); Assert.AreEqual("Compute job has failed on local node, examine InnerException for details.", ex.InnerException.Message); Assert.IsNotNull(ex.InnerException.InnerException); Assert.AreEqual(ExceptionalComputeAction.ErrorText, ex.InnerException.InnerException.Message); // Remote. ex = Assert.Throws<AggregateException>(() => _grid1.GetCluster().ForRemotes().GetCompute().Broadcast(new ExceptionalComputeAction())); Assert.IsNotNull(ex.InnerException); Assert.AreEqual("Compute job has failed on remote node, examine InnerException for details.", ex.InnerException.Message); Assert.IsNotNull(ex.InnerException.InnerException); Assert.AreEqual(ExceptionalComputeAction.ErrorText, ex.InnerException.InnerException.Message); } /// <summary> /// Tests the footer setting. /// </summary> [Test] public void TestFooterSetting() { Assert.AreEqual(CompactFooter, ((Ignite)_grid1).Marshaller.CompactFooter); foreach (var g in new[] {_grid1, _grid2, _grid3}) Assert.AreEqual(CompactFooter, g.GetConfiguration().BinaryConfiguration.CompactFooter); } /// <summary> /// Create configuration. /// </summary> /// <param name="path">XML config path.</param> private static IgniteConfiguration Configuration(string path) { return new IgniteConfiguration(TestUtils.GetTestConfiguration()) { BinaryConfiguration = new BinaryConfiguration { TypeConfigurations = new List<BinaryTypeConfiguration> { new BinaryTypeConfiguration(typeof(PlatformComputeBinarizable)), new BinaryTypeConfiguration(typeof(PlatformComputeNetBinarizable)), new BinaryTypeConfiguration(JavaBinaryCls), new BinaryTypeConfiguration(typeof(PlatformComputeEnum)), new BinaryTypeConfiguration(typeof(InteropComputeEnumFieldTest)) }, NameMapper = BinaryBasicNameMapper.SimpleNameInstance }, SpringConfigUrl = path }; } } class PlatformComputeBinarizable { public int Field { get; set; } } class PlatformComputeNetBinarizable : PlatformComputeBinarizable { } [Serializable] class NetSimpleTask : IComputeTask<NetSimpleJobArgument, NetSimpleJobResult, NetSimpleTaskResult> { /** <inheritDoc /> */ public IDictionary<IComputeJob<NetSimpleJobResult>, IClusterNode> Map(IList<IClusterNode> subgrid, NetSimpleJobArgument arg) { var jobs = new Dictionary<IComputeJob<NetSimpleJobResult>, IClusterNode>(); for (int i = 0; i < subgrid.Count; i++) { var job = arg.Arg > 0 ? new NetSimpleJob {Arg = arg} : new InvalidNetSimpleJob(); jobs[job] = subgrid[i]; } return jobs; } /** <inheritDoc /> */ public ComputeJobResultPolicy OnResult(IComputeJobResult<NetSimpleJobResult> res, IList<IComputeJobResult<NetSimpleJobResult>> rcvd) { return ComputeJobResultPolicy.Wait; } /** <inheritDoc /> */ public NetSimpleTaskResult Reduce(IList<IComputeJobResult<NetSimpleJobResult>> results) { return new NetSimpleTaskResult(results.Sum(res => res.Data.Res)); } } [Serializable] class NetSimpleJob : IComputeJob<NetSimpleJobResult> { public NetSimpleJobArgument Arg; /** <inheritDoc /> */ public NetSimpleJobResult Execute() { return new NetSimpleJobResult(Arg.Arg); } /** <inheritDoc /> */ public void Cancel() { // No-op. } } class InvalidNetSimpleJob : NetSimpleJob, IBinarizable { public void WriteBinary(IBinaryWriter writer) { throw new BinaryObjectException("Expected"); } public void ReadBinary(IBinaryReader reader) { throw new BinaryObjectException("Expected"); } } [Serializable] class NetSimpleJobArgument { public int Arg; public NetSimpleJobArgument(int arg) { Arg = arg; } } [Serializable] class NetSimpleTaskResult { public int Res; public NetSimpleTaskResult(int res) { Res = res; } } [Serializable] class NetSimpleJobResult { public int Res; public NetSimpleJobResult(int res) { Res = res; } } [Serializable] class ComputeAction : IComputeAction { [InstanceResource] #pragma warning disable 649 private IIgnite _grid; public static ConcurrentBag<Guid> Invokes = new ConcurrentBag<Guid>(); public static Guid LastNodeId; public Guid Id { get; set; } public ComputeAction() { // No-op. } public ComputeAction(Guid id) { Id = id; } public void Invoke() { Thread.Sleep(10); Invokes.Add(Id); LastNodeId = _grid.GetCluster().GetLocalNode().Id; } public static int InvokeCount(Guid id) { return Invokes.Count(x => x == id); } } class InvalidComputeAction : ComputeAction, IBinarizable { public void WriteBinary(IBinaryWriter writer) { throw new BinaryObjectException("Expected"); } public void ReadBinary(IBinaryReader reader) { throw new BinaryObjectException("Expected"); } } class ExceptionalComputeAction : IComputeAction { public const string ErrorText = "Expected user exception"; public void Invoke() { throw new OverflowException(ErrorText); } } interface IUserInterface<out T> { T Invoke(); } interface INestedComputeFunc : IComputeFunc<int> { } [Serializable] class ComputeFunc : INestedComputeFunc, IUserInterface<int> { [InstanceResource] private IIgnite _grid; public static int InvokeCount; public static Guid LastNodeId; int IComputeFunc<int>.Invoke() { Thread.Sleep(10); InvokeCount++; LastNodeId = _grid.GetCluster().GetLocalNode().Id; return InvokeCount; } int IUserInterface<int>.Invoke() { // Same signature as IComputeFunc<int>, but from different interface throw new Exception("Invalid method"); } public int Invoke() { // Same signature as IComputeFunc<int>, but due to explicit interface implementation this is a wrong method throw new Exception("Invalid method"); } } public enum PlatformComputeEnum : ushort { Foo, Bar, Baz } public class InteropComputeEnumFieldTest { public PlatformComputeEnum InteropEnum { get; set; } } }
using System; using System.Collections.Generic; using System.Linq; namespace Toggl.Core.UI.Collections.Diffing { public sealed class Diffing<TSection, THeader, TElement, TKey> where TKey : IEquatable<TKey> where TSection : IAnimatableSectionModel<THeader, TElement, TKey>, new() where TElement : IDiffable<TKey>, IEquatable<TElement> where THeader : IDiffable<TKey> { public class Changeset { public List<TSection> OriginalSections { get; } public List<TSection> FinalSections { get; } public List<int> InsertedSections { get; } public List<int> DeletedSections { get; } public List<(int, int)> MovedSections { get; } public List<int> UpdatedSections { get; } public List<ItemPath> InsertedItems { get; } public List<ItemPath> DeletedItems { get; } public List<(ItemPath, ItemPath)> MovedItems { get; } public List<ItemPath> UpdatedItems { get; } public Changeset( List<TSection> originalSections = null, List<TSection> finalSections = null, List<int> insertedSections = null, List<int> deletedSections = null, List<(int, int)> movedSections = null, List<int> updatedSections = null, List<ItemPath> insertedItems = null, List<ItemPath> deletedItems = null, List<(ItemPath, ItemPath)> movedItems = null, List<ItemPath> updatedItems = null) { OriginalSections = originalSections ?? new List<TSection>(); FinalSections = finalSections ?? new List<TSection>(); InsertedSections = insertedSections ?? new List<int>(); DeletedSections = deletedSections ?? new List<int>(); MovedSections = movedSections ?? new List<(int, int)>(); UpdatedSections = updatedSections ?? new List<int>(); InsertedItems = insertedItems ?? new List<ItemPath>(); DeletedItems = deletedItems ?? new List<ItemPath>(); MovedItems = movedItems ?? new List<(ItemPath, ItemPath)>(); UpdatedItems = updatedItems ?? new List<ItemPath>(); } } private readonly List<TSection> initialSections; private readonly List<TSection> finalSections; private List<List<TElement>> initialItemCache; private List<List<TElement>> finalItemCache; private List<SectionAssociatedData> initialSectionData; private List<SectionAssociatedData> finalSectionData; private List<List<ItemAssociatedData>> initialItemData; private List<List<ItemAssociatedData>> finalItemData; public Diffing(IEnumerable<TSection> initialSections, IEnumerable<TSection> finalSections) { this.initialSections = initialSections.ToList(); this.finalSections = finalSections.ToList(); } public List<Changeset> ComputeDifferences() { (initialSectionData, finalSectionData) = calculateSectionMovements(initialSections.ToList(), finalSections.ToList()); initialItemCache = initialSections.Select(collection => collection.Items.ToList()).ToList(); finalItemCache = finalSections.Select(collection => collection.Items.ToList()).ToList(); var result = Enumerable.Empty<Changeset>().ToList(); (initialItemData, finalItemData) = calculateItemMovements( initialItemCache, finalItemCache, initialSectionData, finalSectionData ); result.AddRange(generateDeleteSectionsDeletedItemsAndUpdatedItems()); result.AddRange(generateInsertAndMoveSections()); result.AddRange(generateInsertAndMovedItems()); return result; } private static (List<SectionAssociatedData>, List<SectionAssociatedData>) calculateSectionMovements( List<TSection> initialSections, List<TSection> finalSections) { var initialSectionIndexes = indexSections(initialSections); var initialSectionData = Enumerable.Range(0, initialSections.Count) .Select(_ => SectionAssociatedData.Initial()) .ToList(); var finalSectionData = Enumerable.Range(0, finalSections.Count) .Select(_ => SectionAssociatedData.Initial()) .ToList(); for (var i = 0; i < finalSections.Count; i++) { var section = finalSections[i]; finalSectionData[i].ItemCount = finalSections[i].Items.Count; if (!initialSectionIndexes.ContainsKey(section.Identity)) { continue; } var initialSectionIndex = initialSectionIndexes[section.Identity]; if (initialSectionData[initialSectionIndex].MoveIndex.HasValue) { throw new DuplicateSectionException<TKey>(section.Identity); } initialSectionData[initialSectionIndex].MoveIndex = i; finalSectionData[i].MoveIndex = initialSectionIndex; } var sectionIndexAfterDelete = 0; // deleted sections for (var i = 0; i < initialSectionData.Count; i++) { initialSectionData[i].ItemCount = initialSections[i].Items.Count; if (initialSectionData[i].MoveIndex == null) { initialSectionData[i].EditEvent = EditEvent.Deleted; continue; } initialSectionData[i].IndexAfterDelete = sectionIndexAfterDelete; sectionIndexAfterDelete += 1; } // moved sections int? untouchedOldIndex = 0; int? findNextUntouchedOldIndex(int? initialSearchIndex) { if (!initialSearchIndex.HasValue) { return null; } var i = initialSearchIndex.Value; while (i < initialSections.Count) { if (initialSectionData[i].EditEvent == EditEvent.Untouched) { return i; } i++; } return null; } // inserted and moved sections // this should fix all sections and move them into correct places // 2nd stage for (var i = 0; i < finalSections.Count; i++) { untouchedOldIndex = findNextUntouchedOldIndex(untouchedOldIndex); // oh, it did exist var oldSectionIndex = finalSectionData[i].MoveIndex; if (oldSectionIndex.HasValue) { var moveType = oldSectionIndex != untouchedOldIndex ? EditEvent.Moved : EditEvent.MovedAutomatically; finalSectionData[i].EditEvent = moveType; initialSectionData[oldSectionIndex.Value].EditEvent = moveType; } else { finalSectionData[i].EditEvent = EditEvent.Inserted; } } // inserted sections foreach (var section in finalSectionData) { if (!section.MoveIndex.HasValue) { section.EditEvent = EditEvent.Inserted; } } return (initialSectionData, finalSectionData); } private static (List<List<ItemAssociatedData>>, List<List<ItemAssociatedData>>) calculateItemMovements( IReadOnlyList<List<TElement>> initialItemCache, IReadOnlyList<List<TElement>> finalItemCache, IReadOnlyList<SectionAssociatedData> initialSectionData, IReadOnlyList<SectionAssociatedData> finalSectionData) { var (initialItemData, finalItemData) = calculateAssociatedData( initialItemCache.Select(items => items.ToList()).ToList(), finalItemCache.Select(items => items.ToList()).ToList() ); int? findNextUntouchedOldIndex(int initialSectionIndex, int? initialSearchIndex) { if (!initialSearchIndex.HasValue) { return null; } var i2 = initialSearchIndex.Value; while (i2 < initialSectionData[initialSectionIndex].ItemCount) { if (initialItemData[initialSectionIndex][i2].EditEvent == EditEvent.Untouched) { return i2; } i2++; } return null; } // first mark deleted items for (int i = 0; i < initialItemCache.Count; i++) { if (!initialSectionData[i].MoveIndex.HasValue) { continue; } var indexAfterDelete = 0; for (int j = 0; j < initialItemCache[i].Count; j++) { if (initialItemData[i][j].MoveIndex == null) { initialItemData[i][j].EditEvent = EditEvent.Deleted; continue; } var finalIndexPath = initialItemData[i][j].MoveIndex; // from this point below, section has to be move type because it's initial and not deleted // because there is no move to inserted section if (finalSectionData[finalIndexPath.sectionIndex].EditEvent == EditEvent.Inserted) { initialItemData[i][j].EditEvent = EditEvent.Deleted; continue; } initialItemData[i][j].IndexAfterDelete = indexAfterDelete; indexAfterDelete += 1; } } // mark moved or moved automatically for (int i = 0; i < finalItemCache.Count; i++) { if (!finalSectionData[i].MoveIndex.HasValue) { continue; } var originalSectionIndex = finalSectionData[i].MoveIndex.Value; int? untouchedIndex = 0; for (int j = 0; j < finalItemCache[i].Count; j++) { untouchedIndex = findNextUntouchedOldIndex(originalSectionIndex, untouchedIndex); if (finalItemData[i][j].MoveIndex == null) { finalItemData[i][j].EditEvent = EditEvent.Inserted; continue; } var originalIndex = finalItemData[i][j].MoveIndex; // In case trying to move from deleted section, abort, otherwise it will crash table view if (initialSectionData[originalIndex.sectionIndex].EditEvent == EditEvent.Deleted) { finalItemData[i][j].EditEvent = EditEvent.Inserted; continue; } // original section can't be inserted if (initialSectionData[originalIndex.sectionIndex].EditEvent == EditEvent.Inserted) { throw new Exception("New section in initial sections, that is wrong"); } var initialSectionEvent = initialSectionData[originalIndex.sectionIndex].EditEvent; if (initialSectionEvent != EditEvent.Moved && initialSectionEvent != EditEvent.MovedAutomatically) { throw new Exception("Section not moved"); } var eventType = (originalIndex.sectionIndex == originalSectionIndex && originalIndex.itemIndex == (untouchedIndex ?? -1)) ? EditEvent.MovedAutomatically : EditEvent.Moved; initialItemData[originalIndex.sectionIndex][originalIndex.itemIndex].EditEvent = eventType; finalItemData[i][j].EditEvent = eventType; } } return (initialItemData, finalItemData); } private List<Changeset> generateDeleteSectionsDeletedItemsAndUpdatedItems() { var deletedSections = new List<int>(); var updatedSections = new List<int>(); var deletedItems = new List<ItemPath>(); var updatedItems = new List<ItemPath>(); var afterDeleteState = new List<TSection>(); // mark deleted items // 1rst stage again (I know, I know ...) for (var i = 0; i < initialItemCache.Count; i++) { var initialItems = initialItemCache[i]; var editEvent = initialSectionData[i].EditEvent; // Deleted section will take care of deleting child items. // In case of moving an item from deleted section, tableview will // crash anyway, so this is not limiting anything. if (editEvent == EditEvent.Deleted) { deletedSections.Add(i); continue; } var afterDeleteItems = new List<TElement>(); for (int j = 0; j < initialItems.Count; j++) { editEvent = initialItemData[i][j].EditEvent; switch (editEvent) { case EditEvent.Deleted: deletedItems.Add(new ItemPath(i, j)); break; case EditEvent.Moved: case EditEvent.MovedAutomatically: var finalItemIndex = initialItemData[i][j].MoveIndex; var finalItem = finalItemCache[finalItemIndex.sectionIndex][finalItemIndex.itemIndex]; if (!finalItem.Equals(initialSections[i].Items[j])) { updatedItems.Add(new ItemPath(sectionIndex: i, itemIndex: j)); } afterDeleteItems.Add(finalItem); break; default: throw new Exception("Unhandled case"); } } var sectionData = initialSectionData[i]; TSection section; switch (sectionData.EditEvent) { case EditEvent.Moved: case EditEvent.MovedAutomatically: section = finalSections[sectionData.MoveIndex ?? i]; break; default: section = finalSections[i]; break; } var newSection = new TSection(); newSection.Initialize(section.Header, afterDeleteItems); afterDeleteState.Add(newSection); } if (deletedItems.Count == 0 && deletedSections.Count == 0 && updatedItems.Count == 0) { return new List<Changeset>(); } updatedSections = deletedItems .Concat(updatedItems) .Select(item => item.sectionIndex) .Distinct() .ToList(); var changeSet = new Changeset( finalSections: afterDeleteState, deletedSections: deletedSections, updatedSections: updatedSections, deletedItems: deletedItems, updatedItems: updatedItems ); return new List<Changeset>(new[] { changeSet }); } private IEnumerable<Changeset> generateInsertAndMoveSections() { var movedSections = new List<(int, int)>(); var insertedSections = new List<int>(); for (int i = 0; i < initialSections.Count; i++) { switch (initialSectionData[i].EditEvent) { case EditEvent.Deleted: break; case EditEvent.Moved: movedSections.Add((initialSectionData[i].IndexAfterDelete.Value, initialSectionData[i].MoveIndex.Value)); break; case EditEvent.MovedAutomatically: break; default: throw new Exception("Unhandled case in initial sections"); } } for (int i = 0; i < finalSections.Count; i++) { if (finalSectionData[i].EditEvent == EditEvent.Inserted) { insertedSections.Add(i); } } if (insertedSections.Count == 0 && movedSections.Count == 0) { return new List<Changeset>(); } // sections should be in place, but items should be original without deleted ones var sectionsAfterChange = Enumerable.Range(0, finalSections.Count).Select(i => { var section = finalSections[i]; var editEvent = finalSectionData[i].EditEvent; if (editEvent == EditEvent.Inserted) { // it's already set up return section; } if (editEvent == EditEvent.Moved || editEvent == EditEvent.MovedAutomatically) { var originalSectionIndex = finalSectionData[i].MoveIndex.Value; var originalSection = initialSections[originalSectionIndex]; var items = new List<TElement>(); //items.reserveCapacity(originalSection.items.count) var itemAssociatedData = initialItemData[originalSectionIndex]; for (int j = 0; j < originalSection.Items.Count; j++) { var initialData = itemAssociatedData[j]; if (initialData.EditEvent == EditEvent.Deleted) { continue; } if (initialData.MoveIndex == null) { throw new Exception("Item was moved, but no final location."); } var finalIndex = initialData.MoveIndex; items.Add(finalItemCache[finalIndex.sectionIndex][finalIndex.itemIndex]); } var newSection = new TSection(); newSection.Initialize(section.Header, items); var modifiedSection = newSection; return modifiedSection; } throw new Exception("This is weird, this shouldn't happen"); }); var changeSet = new Changeset( finalSections: sectionsAfterChange.ToList(), insertedSections: insertedSections, movedSections: movedSections); return new List<Changeset>(new[] { changeSet }); } private IEnumerable<Changeset> generateInsertAndMovedItems() { var updatedSections = new List<int>(); var insertedItems = new List<ItemPath>(); var movedItems = new List<(ItemPath, ItemPath)>(); // mark new and moved items // 3rd stage for (int i = 0; i < finalSections.Count; i++) { var finalSection = finalSections[i]; var sectionEvent = finalSectionData[i].EditEvent; // new and deleted sections cause reload automatically if (sectionEvent != EditEvent.Moved && sectionEvent != EditEvent.MovedAutomatically) { continue; } for (int j = 0; j < finalSection.Items.Count; j++) { var currentItemEvent = finalItemData[i][j].EditEvent; if (currentItemEvent == EditEvent.Untouched) { throw new Exception("Current event is not untouched"); } var editEvent = finalItemData[i][j].EditEvent; switch (editEvent) { case EditEvent.Inserted: insertedItems.Add(new ItemPath(i, j)); break; case EditEvent.Moved: var originalIndex = finalItemData[i][j].MoveIndex; var finalSectionIndex = initialSectionData[originalIndex.sectionIndex].MoveIndex.Value; var moveFromItemWithIndex = initialItemData[originalIndex.sectionIndex][originalIndex.itemIndex].IndexAfterDelete.Value; var moveCommand = ( new ItemPath(finalSectionIndex, moveFromItemWithIndex), new ItemPath(i, j) ); movedItems.Add(moveCommand); break; } } } if (insertedItems.Count == 0 && movedItems.Count == 0) { return new List<Changeset>(); } updatedSections = insertedItems .Select(item => item.sectionIndex) .Concat(movedItems.SelectMany(movedItem => new[] { movedItem.Item1.sectionIndex, movedItem.Item2.sectionIndex })) .Distinct() .ToList(); var changeset = new Changeset( finalSections: finalSections, updatedSections: updatedSections, insertedItems: insertedItems, movedItems: movedItems ); return new List<Changeset>(new[] { changeset }); } private static Dictionary<TKey, int> indexSections(List<TSection> sections) { Dictionary<TKey, int> indexedSections = new Dictionary<TKey, int>(); for (int i = 0; i < sections.Count; i++) { var section = sections[i]; if (indexedSections.ContainsKey(section.Identity)) { throw new DuplicateSectionException<TKey>(section.Identity); } indexedSections[section.Identity] = i; } return indexedSections; } private static (List<List<ItemAssociatedData>>, List<List<ItemAssociatedData>>) calculateAssociatedData( IReadOnlyList<List<TElement>> initialItemCache, IReadOnlyList<List<TElement>> finalItemCache) { var initialIdentities = new List<TKey>(); var initialItemPaths = new List<ItemPath>(); for (int i = 0; i < initialItemCache.Count; i++) { var items = initialItemCache[i]; for (int j = 0; j < items.Count; j++) { var item = items[j]; initialIdentities.Add(item.Identity); initialItemPaths.Add(new ItemPath(i, j)); } } var initialItemData = initialItemCache .Select(items => Enumerable.Range(0, items.Count).Select(_ => ItemAssociatedData.Initial()).ToList()) .ToList(); var finalItemData = finalItemCache .Select(items => Enumerable.Range(0, items.Count).Select(_ => ItemAssociatedData.Initial()).ToList()) .ToList(); var dictionary = new Dictionary<TKey, int>(); for (int i = 0; i < initialIdentities.Count; i++) { var identity = initialIdentities[i]; if (dictionary.ContainsKey(identity)) { var existingValueItemPathIndex = dictionary[identity]; var itemPath = initialItemPaths[existingValueItemPathIndex]; var item = initialItemCache[itemPath.sectionIndex][itemPath.itemIndex]; throw new DuplicateItemException<TKey>(item.Identity); } dictionary[identity] = i; } for (int i = 0; i < finalItemCache.Count; i++) { var items = finalItemCache[i]; for (int j = 0; j < items.Count; j++) { var item = items[j]; var identity = item.Identity; if (!dictionary.ContainsKey(identity)) { continue; } var initialItemPathIndex = dictionary[identity]; var itemPath = initialItemPaths[initialItemPathIndex]; if (initialItemData[itemPath.sectionIndex][itemPath.itemIndex].MoveIndex != null) { throw new DuplicateItemException<TKey>(item.Identity); } initialItemData[itemPath.sectionIndex][itemPath.itemIndex].MoveIndex = new ItemPath(i, j); finalItemData[i][j].MoveIndex = itemPath; } } return (initialItemData, finalItemData); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using Debug = System.Diagnostics.Debug; namespace Internal.TypeSystem.Interop { public partial class NativeStructType : MetadataType { // The managed struct that this type will imitate public MetadataType ManagedStructType { get; } public override ModuleDesc Module { get; } public override string Name { get { return "__NativeType__" + ManagedStructType.Name; } } public override string Namespace { get { return "Internal.CompilerGenerated"; } } public override Instantiation Instantiation { get { return ManagedStructType.Instantiation; } } public override PInvokeStringFormat PInvokeStringFormat { get { return ManagedStructType.PInvokeStringFormat; } } public override bool IsExplicitLayout { get { return ManagedStructType.IsExplicitLayout; } } public override bool IsSequentialLayout { get { return ManagedStructType.IsSequentialLayout; } } public override bool IsBeforeFieldInit { get { return ManagedStructType.IsBeforeFieldInit; } } public override MetadataType MetadataBaseType { get { return (MetadataType)Context.GetWellKnownType(WellKnownType.ValueType); } } public override bool IsSealed { get { return true; } } public override bool IsAbstract { get { return false; } } public override DefType ContainingType { get { return null; } } public override DefType[] ExplicitlyImplementedInterfaces { get { return Array.Empty<DefType>(); } } public override TypeSystemContext Context { get { return ManagedStructType.Context; } } private NativeStructField[] _fields; private InteropStateManager _interopStateManager; private bool _hasInvalidLayout; public bool HasInvalidLayout { get { return _hasInvalidLayout; } } public FieldDesc[] Fields { get { return _fields; } } public NativeStructType(ModuleDesc owningModule, MetadataType managedStructType, InteropStateManager interopStateManager) { Debug.Assert(managedStructType.IsTypeDefinition); Debug.Assert(managedStructType.IsValueType); Debug.Assert(!managedStructType.IsGenericDefinition); Module = owningModule; ManagedStructType = managedStructType; _interopStateManager = interopStateManager; _hasInvalidLayout = false; CalculateFields(); } private void CalculateFields() { bool isSequential = ManagedStructType.IsSequentialLayout; MarshalAsDescriptor[] marshalAsDescriptors = ManagedStructType.GetFieldMarshalAsDescriptors(); bool isAnsi = ((MetadataType)ManagedStructType).PInvokeStringFormat == PInvokeStringFormat.AnsiClass; int numFields = 0; foreach (FieldDesc field in ManagedStructType.GetFields()) { if (field.IsStatic) { continue; } numFields++; } _fields = new NativeStructField[numFields]; int index = 0; foreach (FieldDesc field in ManagedStructType.GetFields()) { if (field.IsStatic) { continue; } var managedType = field.FieldType; TypeDesc nativeType; try { nativeType = MarshalHelpers.GetNativeStructFieldType(managedType, marshalAsDescriptors[index], _interopStateManager, isAnsi); } catch (NotSupportedException) { // if marshalling is not supported for this type the generated stubs will emit appropriate // error message. We just set native type to be same as managedtype nativeType = managedType; _hasInvalidLayout = true; } _fields[index++] = new NativeStructField(nativeType, this, field); } } public override ClassLayoutMetadata GetClassLayout() { ClassLayoutMetadata layout = ManagedStructType.GetClassLayout(); ClassLayoutMetadata result; result.PackingSize = layout.PackingSize; result.Size = layout.Size; if (IsExplicitLayout) { result.Offsets = new FieldAndOffset[layout.Offsets.Length]; Debug.Assert(layout.Offsets.Length <= _fields.Length); int layoutIndex = 0; for (int index = 0; index < _fields.Length; index++) { if (_fields[index].Name == layout.Offsets[layoutIndex].Field.Name) { result.Offsets[layoutIndex] = new FieldAndOffset(_fields[index], layout.Offsets[layoutIndex].Offset); layoutIndex++; } } Debug.Assert(layoutIndex == layout.Offsets.Length); } else { result.Offsets = null; } return result; } public override bool HasCustomAttribute(string attributeNamespace, string attributeName) { return false; } public override IEnumerable<MetadataType> GetNestedTypes() { return Array.Empty<MetadataType>(); } public override MetadataType GetNestedType(string name) { return null; } protected override MethodImplRecord[] ComputeVirtualMethodImplsForType() { return Array.Empty<MethodImplRecord>(); } public override MethodImplRecord[] FindMethodsImplWithMatchingDeclName(string name) { return Array.Empty<MethodImplRecord>(); } private int _hashCode; private void InitializeHashCode() { var hashCodeBuilder = new Internal.NativeFormat.TypeHashingAlgorithms.HashCodeBuilder(Namespace); if (Namespace.Length > 0) { hashCodeBuilder.Append("."); } hashCodeBuilder.Append(Name); _hashCode = hashCodeBuilder.ToHashCode(); } public override int GetHashCode() { if (_hashCode == 0) { InitializeHashCode(); } return _hashCode; } protected override TypeFlags ComputeTypeFlags(TypeFlags mask) { TypeFlags flags = 0; if ((mask & TypeFlags.HasGenericVarianceComputed) != 0) { flags |= TypeFlags.HasGenericVarianceComputed; } if ((mask & TypeFlags.CategoryMask) != 0) { flags |= TypeFlags.ValueType; } flags |= TypeFlags.HasFinalizerComputed; flags |= TypeFlags.IsByRefLikeComputed; return flags; } public override IEnumerable<FieldDesc> GetFields() { return _fields; } /// <summary> /// Synthetic field on <see cref="NativeStructType"/>. /// </summary> private partial class NativeStructField : FieldDesc { private TypeDesc _fieldType; private MetadataType _owningType; private FieldDesc _managedField; public override TypeSystemContext Context { get { return _owningType.Context; } } public override TypeDesc FieldType { get { return _fieldType; } } public override bool HasRva { get { return false; } } public override bool IsInitOnly { get { return false; } } public override bool IsLiteral { get { return false; } } public override bool IsStatic { get { return false; } } public override bool IsThreadStatic { get { return false; } } public override DefType OwningType { get { return _owningType; } } public override bool HasCustomAttribute(string attributeNamespace, string attributeName) { return false; } public override string Name { get { return _managedField.Name; } } public NativeStructField(TypeDesc nativeType, MetadataType owningType, FieldDesc managedField) { _fieldType = nativeType; _owningType = owningType; _managedField = managedField; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // Enables instruction counting and displaying stats at process exit. // #define STATS using System; using System.Collections.Generic; using System.Diagnostics; using System.Reflection; using System.Runtime.CompilerServices; using System.Dynamic.Utils; namespace System.Linq.Expressions.Interpreter { [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1815:OverrideEqualsAndOperatorEqualsOnValueTypes")] [DebuggerTypeProxy(typeof(InstructionArray.DebugView))] internal struct InstructionArray { internal readonly int MaxStackDepth; internal readonly int MaxContinuationDepth; internal readonly Instruction[] Instructions; internal readonly object[] Objects; internal readonly RuntimeLabel[] Labels; // list of (instruction index, cookie) sorted by instruction index: internal readonly List<KeyValuePair<int, object>> DebugCookies; internal InstructionArray(int maxStackDepth, int maxContinuationDepth, Instruction[] instructions, object[] objects, RuntimeLabel[] labels, List<KeyValuePair<int, object>> debugCookies) { MaxStackDepth = maxStackDepth; MaxContinuationDepth = maxContinuationDepth; Instructions = instructions; DebugCookies = debugCookies; Objects = objects; Labels = labels; } internal int Length { get { return Instructions.Length; } } #region Debug View internal sealed class DebugView { private readonly InstructionArray _array; public DebugView(InstructionArray array) { _array = array; } [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] public InstructionList.DebugView.InstructionView[]/*!*/ A0 { get { return InstructionList.DebugView.GetInstructionViews( _array.Instructions, _array.Objects, (index) => _array.Labels[index].Index, _array.DebugCookies ); } } } #endregion } [DebuggerTypeProxy(typeof(InstructionList.DebugView))] [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1506:AvoidExcessiveClassCoupling")] internal sealed class InstructionList { private readonly List<Instruction> _instructions = new List<Instruction>(); private List<object> _objects; private int _currentStackDepth; private int _maxStackDepth; private int _currentContinuationsDepth; private int _maxContinuationDepth; private int _runtimeLabelCount; private List<BranchLabel> _labels; // list of (instruction index, cookie) sorted by instruction index: private List<KeyValuePair<int, object>> _debugCookies = null; #region Debug View internal sealed class DebugView { private readonly InstructionList _list; public DebugView(InstructionList list) { _list = list; } [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] public InstructionView[]/*!*/ A0 { get { return GetInstructionViews( _list._instructions, _list._objects, (index) => _list._labels[index].TargetIndex, _list._debugCookies ); } } internal static InstructionView[] GetInstructionViews(IList<Instruction> instructions, IList<object> objects, Func<int, int> labelIndexer, IList<KeyValuePair<int, object>> debugCookies) { var result = new List<InstructionView>(); int index = 0; int stackDepth = 0; int continuationsDepth = 0; var cookieEnumerator = (debugCookies != null ? debugCookies : new KeyValuePair<int, object>[0]).GetEnumerator(); var hasCookie = cookieEnumerator.MoveNext(); for (int i = 0; i < instructions.Count; i++) { object cookie = null; while (hasCookie && cookieEnumerator.Current.Key == i) { cookie = cookieEnumerator.Current.Value; hasCookie = cookieEnumerator.MoveNext(); } int stackDiff = instructions[i].StackBalance; int contDiff = instructions[i].ContinuationsBalance; string name = instructions[i].ToDebugString(i, cookie, labelIndexer, objects); result.Add(new InstructionView(instructions[i], name, i, stackDepth, continuationsDepth)); index++; stackDepth += stackDiff; continuationsDepth += contDiff; } return result.ToArray(); } [DebuggerDisplay("{GetValue(),nq}", Name = "{GetName(),nq}", Type = "{GetDisplayType(), nq}")] internal struct InstructionView { private readonly int _index; private readonly int _stackDepth; private readonly int _continuationsDepth; private readonly string _name; private readonly Instruction _instruction; internal string GetName() { return _index + (_continuationsDepth == 0 ? "" : " C(" + _continuationsDepth + ")") + (_stackDepth == 0 ? "" : " S(" + _stackDepth + ")"); } internal string GetValue() { return _name; } internal string GetDisplayType() { return _instruction.ContinuationsBalance + "/" + _instruction.StackBalance; } public InstructionView(Instruction instruction, string name, int index, int stackDepth, int continuationsDepth) { _instruction = instruction; _name = name; _index = index; _stackDepth = stackDepth; _continuationsDepth = continuationsDepth; } } } #endregion #region Core Emit Ops public void Emit(Instruction instruction) { _instructions.Add(instruction); UpdateStackDepth(instruction); } private void UpdateStackDepth(Instruction instruction) { Debug.Assert(instruction.ConsumedStack >= 0 && instruction.ProducedStack >= 0 && instruction.ConsumedContinuations >= 0 && instruction.ProducedContinuations >= 0, "bad instruction " + instruction.ToString()); _currentStackDepth -= instruction.ConsumedStack; Debug.Assert(_currentStackDepth >= 0, "negative stack depth " + instruction.ToString()); _currentStackDepth += instruction.ProducedStack; if (_currentStackDepth > _maxStackDepth) { _maxStackDepth = _currentStackDepth; } _currentContinuationsDepth -= instruction.ConsumedContinuations; Debug.Assert(_currentContinuationsDepth >= 0, "negative continuations " + instruction.ToString()); _currentContinuationsDepth += instruction.ProducedContinuations; if (_currentContinuationsDepth > _maxContinuationDepth) { _maxContinuationDepth = _currentContinuationsDepth; } } /// <summary> /// Attaches a cookie to the last emitted instruction. /// </summary> [Conditional("DEBUG")] public void SetDebugCookie(object cookie) { #if DEBUG if (_debugCookies == null) { _debugCookies = new List<KeyValuePair<int, object>>(); } Debug.Assert(Count > 0); _debugCookies.Add(new KeyValuePair<int, object>(Count - 1, cookie)); #endif } public int Count { get { return _instructions.Count; } } public int CurrentStackDepth { get { return _currentStackDepth; } } public int CurrentContinuationsDepth { get { return _currentContinuationsDepth; } } public int MaxStackDepth { get { return _maxStackDepth; } } internal Instruction GetInstruction(int index) { return _instructions[index]; } #if STATS private static Dictionary<string, int> _executedInstructions = new Dictionary<string, int>(); private static Dictionary<string, Dictionary<object, bool>> _instances = new Dictionary<string, Dictionary<object, bool>>(); static InstructionList() { AppDomain.CurrentDomain.ProcessExit += new EventHandler((_, __) => { PerfTrack.DumpHistogram(_executedInstructions); Console.WriteLine("-- Total executed: {0}", _executedInstructions.Values.Aggregate(0, (sum, value) => sum + value)); Console.WriteLine("-----"); var referenced = new Dictionary<string, int>(); int total = 0; foreach (var entry in _instances) { referenced[entry.Key] = entry.Value.Count; total += entry.Value.Count; } PerfTrack.DumpHistogram(referenced); Console.WriteLine("-- Total referenced: {0}", total); Console.WriteLine("-----"); }); } #endif public InstructionArray ToArray() { #if STATS lock (_executedInstructions) { _instructions.ForEach((instr) => { int value = 0; var name = instr.GetType().Name; _executedInstructions.TryGetValue(name, out value); _executedInstructions[name] = value + 1; Dictionary<object, bool> dict; if (!_instances.TryGetValue(name, out dict)) { _instances[name] = dict = new Dictionary<object, bool>(); } dict[instr] = true; }); } #endif return new InstructionArray( _maxStackDepth, _maxContinuationDepth, _instructions.ToArray(), (_objects != null) ? _objects.ToArray() : null, BuildRuntimeLabels(), _debugCookies ); } #endregion #region Stack Operations private const int PushIntMinCachedValue = -100; private const int PushIntMaxCachedValue = 100; private const int CachedObjectCount = 256; private static Instruction s_null; private static Instruction s_true; private static Instruction s_false; private static Instruction[] s_ints; private static Instruction[] s_loadObjectCached; public void EmitLoad(object value) { EmitLoad(value, null); } public void EmitLoad(bool value) { if ((bool)value) { Emit(s_true ?? (s_true = new LoadObjectInstruction(value))); } else { Emit(s_false ?? (s_false = new LoadObjectInstruction(value))); } } public void EmitLoad(object value, Type type) { if (value == null) { Emit(s_null ?? (s_null = new LoadObjectInstruction(null))); return; } if (type == null || type.GetTypeInfo().IsValueType) { if (value is bool) { EmitLoad((bool)value); return; } if (value is int) { int i = (int)value; if (i >= PushIntMinCachedValue && i <= PushIntMaxCachedValue) { if (s_ints == null) { s_ints = new Instruction[PushIntMaxCachedValue - PushIntMinCachedValue + 1]; } i -= PushIntMinCachedValue; Emit(s_ints[i] ?? (s_ints[i] = new LoadObjectInstruction(value))); return; } } } if (_objects == null) { _objects = new List<object>(); if (s_loadObjectCached == null) { s_loadObjectCached = new Instruction[CachedObjectCount]; } } if (_objects.Count < s_loadObjectCached.Length) { uint index = (uint)_objects.Count; _objects.Add(value); Emit(s_loadObjectCached[index] ?? (s_loadObjectCached[index] = new LoadCachedObjectInstruction(index))); } else { Emit(new LoadObjectInstruction(value)); } } public void EmitDup() { Emit(DupInstruction.Instance); } public void EmitPop() { Emit(PopInstruction.Instance); } #endregion #region Locals internal void SwitchToBoxed(int index, int instructionIndex) { var instruction = _instructions[instructionIndex] as IBoxableInstruction; if (instruction != null) { var newInstruction = instruction.BoxIfIndexMatches(index); if (newInstruction != null) { _instructions[instructionIndex] = newInstruction; } } } private const int LocalInstrCacheSize = 64; private static Instruction[] s_loadLocal; private static Instruction[] s_loadLocalBoxed; private static Instruction[] s_loadLocalFromClosure; private static Instruction[] s_loadLocalFromClosureBoxed; private static Instruction[] s_assignLocal; private static Instruction[] s_storeLocal; private static Instruction[] s_assignLocalBoxed; private static Instruction[] s_storeLocalBoxed; private static Instruction[] s_assignLocalToClosure; public void EmitLoadLocal(int index) { if (s_loadLocal == null) { s_loadLocal = new Instruction[LocalInstrCacheSize]; } if (index < s_loadLocal.Length) { Emit(s_loadLocal[index] ?? (s_loadLocal[index] = new LoadLocalInstruction(index))); } else { Emit(new LoadLocalInstruction(index)); } } public void EmitLoadLocalBoxed(int index) { Emit(LoadLocalBoxed(index)); } internal static Instruction LoadLocalBoxed(int index) { if (s_loadLocalBoxed == null) { s_loadLocalBoxed = new Instruction[LocalInstrCacheSize]; } if (index < s_loadLocalBoxed.Length) { return s_loadLocalBoxed[index] ?? (s_loadLocalBoxed[index] = new LoadLocalBoxedInstruction(index)); } else { return new LoadLocalBoxedInstruction(index); } } public void EmitLoadLocalFromClosure(int index) { if (s_loadLocalFromClosure == null) { s_loadLocalFromClosure = new Instruction[LocalInstrCacheSize]; } if (index < s_loadLocalFromClosure.Length) { Emit(s_loadLocalFromClosure[index] ?? (s_loadLocalFromClosure[index] = new LoadLocalFromClosureInstruction(index))); } else { Emit(new LoadLocalFromClosureInstruction(index)); } } public void EmitLoadLocalFromClosureBoxed(int index) { if (s_loadLocalFromClosureBoxed == null) { s_loadLocalFromClosureBoxed = new Instruction[LocalInstrCacheSize]; } if (index < s_loadLocalFromClosureBoxed.Length) { Emit(s_loadLocalFromClosureBoxed[index] ?? (s_loadLocalFromClosureBoxed[index] = new LoadLocalFromClosureBoxedInstruction(index))); } else { Emit(new LoadLocalFromClosureBoxedInstruction(index)); } } public void EmitAssignLocal(int index) { if (s_assignLocal == null) { s_assignLocal = new Instruction[LocalInstrCacheSize]; } if (index < s_assignLocal.Length) { Emit(s_assignLocal[index] ?? (s_assignLocal[index] = new AssignLocalInstruction(index))); } else { Emit(new AssignLocalInstruction(index)); } } public void EmitStoreLocal(int index) { if (s_storeLocal == null) { s_storeLocal = new Instruction[LocalInstrCacheSize]; } if (index < s_storeLocal.Length) { Emit(s_storeLocal[index] ?? (s_storeLocal[index] = new StoreLocalInstruction(index))); } else { Emit(new StoreLocalInstruction(index)); } } public void EmitAssignLocalBoxed(int index) { Emit(AssignLocalBoxed(index)); } internal static Instruction AssignLocalBoxed(int index) { if (s_assignLocalBoxed == null) { s_assignLocalBoxed = new Instruction[LocalInstrCacheSize]; } if (index < s_assignLocalBoxed.Length) { return s_assignLocalBoxed[index] ?? (s_assignLocalBoxed[index] = new AssignLocalBoxedInstruction(index)); } else { return new AssignLocalBoxedInstruction(index); } } public void EmitStoreLocalBoxed(int index) { Emit(StoreLocalBoxed(index)); } internal static Instruction StoreLocalBoxed(int index) { if (s_storeLocalBoxed == null) { s_storeLocalBoxed = new Instruction[LocalInstrCacheSize]; } if (index < s_storeLocalBoxed.Length) { return s_storeLocalBoxed[index] ?? (s_storeLocalBoxed[index] = new StoreLocalBoxedInstruction(index)); } else { return new StoreLocalBoxedInstruction(index); } } public void EmitAssignLocalToClosure(int index) { if (s_assignLocalToClosure == null) { s_assignLocalToClosure = new Instruction[LocalInstrCacheSize]; } if (index < s_assignLocalToClosure.Length) { Emit(s_assignLocalToClosure[index] ?? (s_assignLocalToClosure[index] = new AssignLocalToClosureInstruction(index))); } else { Emit(new AssignLocalToClosureInstruction(index)); } } public void EmitStoreLocalToClosure(int index) { EmitAssignLocalToClosure(index); EmitPop(); } public void EmitInitializeLocal(int index, Type type) { object value = ScriptingRuntimeHelpers.GetPrimitiveDefaultValue(type); if (value != null) { Emit(new InitializeLocalInstruction.ImmutableValue(index, value)); } else if (type.GetTypeInfo().IsValueType) { Emit(new InitializeLocalInstruction.MutableValue(index, type)); } else { Emit(InitReference(index)); } } internal void EmitInitializeParameter(int index, Type parameterType) { Emit(Parameter(index, parameterType)); } internal static Instruction Parameter(int index, Type parameterType) { return new InitializeLocalInstruction.Parameter(index, parameterType); } internal static Instruction ParameterBox(int index) { return new InitializeLocalInstruction.ParameterBox(index); } internal static Instruction InitReference(int index) { return new InitializeLocalInstruction.Reference(index); } internal static Instruction InitImmutableRefBox(int index) { return new InitializeLocalInstruction.ImmutableRefBox(index); } public void EmitNewRuntimeVariables(int count) { Emit(new RuntimeVariablesInstruction(count)); } #endregion #region Array Operations public void EmitGetArrayItem() { Emit(GetArrayItemInstruction.Instruction); } public void EmitSetArrayItem() { Emit(new SetArrayItemInstruction()); } public void EmitNewArray(Type elementType) { Emit(new NewArrayInstruction(elementType)); } public void EmitNewArrayBounds(Type elementType, int rank) { Emit(new NewArrayBoundsInstruction(elementType, rank)); } public void EmitNewArrayInit(Type elementType, int elementCount) { Emit(new NewArrayInitInstruction(elementType, elementCount)); } #endregion #region Arithmetic Operations public void EmitAdd(Type type, bool @checked) { if (@checked) { Emit(AddOvfInstruction.Create(type)); } else { Emit(AddInstruction.Create(type)); } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters")] public void EmitSub(Type type, bool @checked) { if (@checked) { Emit(SubOvfInstruction.Create(type)); } else { Emit(SubInstruction.Create(type)); } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters")] public void EmitMul(Type type, bool @checked) { if (@checked) { Emit(MulOvfInstruction.Create(type)); } else { Emit(MulInstruction.Create(type)); } } public void EmitDiv(Type type) { Emit(DivInstruction.Create(type)); } public void EmitModulo(Type type) { Emit(ModuloInstruction.Create(type)); } #endregion #region Comparisons public void EmitExclusiveOr(Type type) { Emit(ExclusiveOrInstruction.Create(type)); } public void EmitAnd(Type type) { Emit(AndInstruction.Create(type)); } public void EmitOr(Type type) { Emit(OrInstruction.Create(type)); } public void EmitLeftShift(Type type) { Emit(LeftShiftInstruction.Create(type)); } public void EmitRightShift(Type type) { Emit(RightShiftInstruction.Create(type)); } public void EmitEqual(Type type, bool liftedToNull = false) { Emit(EqualInstruction.Create(type, liftedToNull)); } public void EmitNotEqual(Type type, bool liftedToNull = false) { Emit(NotEqualInstruction.Create(type, liftedToNull)); } public void EmitLessThan(Type type, bool liftedToNull) { Emit(LessThanInstruction.Create(type, liftedToNull)); } public void EmitLessThanOrEqual(Type type, bool liftedToNull) { Emit(LessThanOrEqualInstruction.Create(type, liftedToNull)); } public void EmitGreaterThan(Type type, bool liftedToNull) { Emit(GreaterThanInstruction.Create(type, liftedToNull)); } public void EmitGreaterThanOrEqual(Type type, bool liftedToNull) { Emit(GreaterThanOrEqualInstruction.Create(type, liftedToNull)); } #endregion #region Conversions public void EmitNumericConvertChecked(TypeCode from, TypeCode to, bool isLiftedToNull) { Emit(new NumericConvertInstruction.Checked(from, to, isLiftedToNull)); } public void EmitNumericConvertUnchecked(TypeCode from, TypeCode to, bool isLiftedToNull) { Emit(new NumericConvertInstruction.Unchecked(from, to, isLiftedToNull)); } public void EmitCast(Type toType) { Emit(CastInstruction.Create(toType)); } #endregion #region Boolean Operators public void EmitNot(Type type) { Emit(NotInstruction.Create(type)); } #endregion #region Types public void EmitDefaultValue(Type type) { Emit(new DefaultValueInstruction(type)); } public void EmitNew(ConstructorInfo constructorInfo) { Emit(new NewInstruction(constructorInfo)); } public void EmitByRefNew(ConstructorInfo constructorInfo, ByRefUpdater[] updaters) { Emit(new ByRefNewInstruction(constructorInfo, updaters)); } internal void EmitCreateDelegate(LightDelegateCreator creator) { Emit(new CreateDelegateInstruction(creator)); } public void EmitTypeEquals() { Emit(TypeEqualsInstruction.Instance); } public void EmitNullableTypeEquals() { Emit(NullableTypeEqualsInstruction.Instance); } public void EmitArrayLength() { Emit(ArrayLengthInstruction.Instance); } public void EmitNegate(Type type) { Emit(NegateInstruction.Create(type)); } public void EmitNegateChecked(Type type) { Emit(NegateCheckedInstruction.Create(type)); } public void EmitIncrement(Type type) { Emit(IncrementInstruction.Create(type)); } public void EmitDecrement(Type type) { Emit(DecrementInstruction.Create(type)); } public void EmitTypeIs(Type type) { Emit(new TypeIsInstruction(type)); } public void EmitTypeAs(Type type) { Emit(new TypeAsInstruction(type)); } #endregion #region Fields and Methods private static readonly Dictionary<FieldInfo, Instruction> s_loadFields = new Dictionary<FieldInfo, Instruction>(); public void EmitLoadField(FieldInfo field) { Emit(GetLoadField(field)); } private Instruction GetLoadField(FieldInfo field) { lock (s_loadFields) { Instruction instruction; if (!s_loadFields.TryGetValue(field, out instruction)) { if (field.IsStatic) { instruction = new LoadStaticFieldInstruction(field); } else { instruction = new LoadFieldInstruction(field); } s_loadFields.Add(field, instruction); } return instruction; } } public void EmitStoreField(FieldInfo field) { if (field.IsStatic) { Emit(new StoreStaticFieldInstruction(field)); } else { Emit(new StoreFieldInstruction(field)); } } public void EmitCall(MethodInfo method) { EmitCall(method, method.GetParameters()); } public void EmitCall(MethodInfo method, ParameterInfo[] parameters) { Emit(CallInstruction.Create(method, parameters)); } public void EmitByRefCall(MethodInfo method, ParameterInfo[] parameters, ByRefUpdater[] byrefArgs) { Emit(new ByRefMethodInfoCallInstruction(method, method.IsStatic ? parameters.Length : parameters.Length + 1, byrefArgs)); } public void EmitNullableCall(MethodInfo method, ParameterInfo[] parameters) { Emit(NullableMethodCallInstruction.Create(method.Name, parameters.Length)); } public void EmitNullCheck(int stackOffset) { Emit(NullCheckInstruction.Create(stackOffset)); } #endregion #region Control Flow private static readonly RuntimeLabel[] s_emptyRuntimeLabels = new RuntimeLabel[] { new RuntimeLabel(Interpreter.RethrowOnReturn, 0, 0) }; private RuntimeLabel[] BuildRuntimeLabels() { if (_runtimeLabelCount == 0) { return s_emptyRuntimeLabels; } var result = new RuntimeLabel[_runtimeLabelCount + 1]; foreach (BranchLabel label in _labels) { if (label.HasRuntimeLabel) { result[label.LabelIndex] = label.ToRuntimeLabel(); } } // "return and rethrow" label: result[result.Length - 1] = new RuntimeLabel(Interpreter.RethrowOnReturn, 0, 0); return result; } public BranchLabel MakeLabel() { if (_labels == null) { _labels = new List<BranchLabel>(); } var label = new BranchLabel(); _labels.Add(label); return label; } internal void FixupBranch(int branchIndex, int offset) { _instructions[branchIndex] = ((OffsetInstruction)_instructions[branchIndex]).Fixup(offset); } private int EnsureLabelIndex(BranchLabel label) { if (label.HasRuntimeLabel) { return label.LabelIndex; } label.LabelIndex = _runtimeLabelCount; _runtimeLabelCount++; return label.LabelIndex; } public int MarkRuntimeLabel() { BranchLabel handlerLabel = MakeLabel(); MarkLabel(handlerLabel); return EnsureLabelIndex(handlerLabel); } public void MarkLabel(BranchLabel label) { label.Mark(this); } public void EmitGoto(BranchLabel label, bool hasResult, bool hasValue, bool labelTargetGetsValue) { Emit(GotoInstruction.Create(EnsureLabelIndex(label), hasResult, hasValue, labelTargetGetsValue)); } private void EmitBranch(OffsetInstruction instruction, BranchLabel label) { Emit(instruction); label.AddBranch(this, Count - 1); } public void EmitBranch(BranchLabel label) { EmitBranch(new BranchInstruction(), label); } public void EmitBranch(BranchLabel label, bool hasResult, bool hasValue) { EmitBranch(new BranchInstruction(hasResult, hasValue), label); } public void EmitCoalescingBranch(BranchLabel leftNotNull) { EmitBranch(new CoalescingBranchInstruction(), leftNotNull); } public void EmitBranchTrue(BranchLabel elseLabel) { EmitBranch(new BranchTrueInstruction(), elseLabel); } public void EmitBranchFalse(BranchLabel elseLabel) { EmitBranch(new BranchFalseInstruction(), elseLabel); } public void EmitThrow() { Emit(ThrowInstruction.Throw); } public void EmitThrowVoid() { Emit(ThrowInstruction.VoidThrow); } public void EmitRethrow() { Emit(ThrowInstruction.Rethrow); } public void EmitRethrowVoid() { Emit(ThrowInstruction.VoidRethrow); } public void EmitEnterTryFinally(BranchLabel finallyStartLabel) { Emit(EnterTryCatchFinallyInstruction.CreateTryFinally(EnsureLabelIndex(finallyStartLabel))); } public void EmitEnterTryCatch() { Emit(EnterTryCatchFinallyInstruction.CreateTryCatch()); } public void EmitEnterFinally(BranchLabel finallyStartLabel) { Emit(EnterFinallyInstruction.Create(EnsureLabelIndex(finallyStartLabel))); } public void EmitLeaveFinally() { Emit(LeaveFinallyInstruction.Instance); } public void EmitLeaveFault(bool hasValue) { Emit(hasValue ? LeaveFaultInstruction.NonVoid : LeaveFaultInstruction.Void); } public void EmitEnterExceptionHandlerNonVoid() { Emit(EnterExceptionHandlerInstruction.NonVoid); } public void EmitEnterExceptionHandlerVoid() { Emit(EnterExceptionHandlerInstruction.Void); } public void EmitLeaveExceptionHandler(bool hasValue, BranchLabel tryExpressionEndLabel) { Emit(LeaveExceptionHandlerInstruction.Create(EnsureLabelIndex(tryExpressionEndLabel), hasValue)); } public void EmitSwitch(Dictionary<int, int> cases) { Emit(new SwitchInstruction(cases)); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace System.Linq.Expressions.Tests { public static class LambdaSubtractNullableTests { #region Test methods [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaSubtractNullableDecimalTest(bool useInterpreter) { decimal?[] values = new decimal?[] { null, decimal.Zero, decimal.One, decimal.MinusOne, decimal.MinValue, decimal.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifySubtractNullableDecimal(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaSubtractNullableDoubleTest(bool useInterpreter) { double?[] values = new double?[] { null, 0, 1, -1, double.MinValue, double.MaxValue, double.Epsilon, double.NegativeInfinity, double.PositiveInfinity, double.NaN }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifySubtractNullableDouble(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaSubtractNullableFloatTest(bool useInterpreter) { float?[] values = new float?[] { null, 0, 1, -1, float.MinValue, float.MaxValue, float.Epsilon, float.NegativeInfinity, float.PositiveInfinity, float.NaN }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifySubtractNullableFloat(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaSubtractNullableIntTest(bool useInterpreter) { int?[] values = new int?[] { null, 0, 1, -1, int.MinValue, int.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifySubtractNullableInt(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaSubtractNullableLongTest(bool useInterpreter) { long?[] values = new long?[] { null, 0, 1, -1, long.MinValue, long.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifySubtractNullableLong(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaSubtractNullableShortTest(bool useInterpreter) { short?[] values = new short?[] { null, 0, 1, -1, short.MinValue, short.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifySubtractNullableShort(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaSubtractNullableUIntTest(bool useInterpreter) { uint?[] values = new uint?[] { null, 0, 1, uint.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifySubtractNullableUInt(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaSubtractNullableULongTest(bool useInterpreter) { ulong?[] values = new ulong?[] { null, 0, 1, ulong.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifySubtractNullableULong(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaSubtractNullableUShortTest(bool useInterpreter) { ushort?[] values = new ushort?[] { null, 0, 1, ushort.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifySubtractNullableUShort(values[i], values[j], useInterpreter); } } } #endregion #region Test verifiers #region Verify decimal? private static void VerifySubtractNullableDecimal(decimal? a, decimal? b, bool useInterpreter) { decimal? expected = null; bool overflowed = false; try { expected = a - b; } catch (OverflowException) { overflowed = true; } ParameterExpression p0 = Expression.Parameter(typeof(decimal?), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(decimal?), "p1"); // verify with parameters supplied Expression<Func<decimal?>> e1 = Expression.Lambda<Func<decimal?>>( Expression.Invoke( Expression.Lambda<Func<decimal?, decimal?, decimal?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(decimal?)), Expression.Constant(b, typeof(decimal?)) }), Enumerable.Empty<ParameterExpression>()); Func<decimal?> f1 = e1.Compile(useInterpreter); if (overflowed) { Assert.Throws<OverflowException>(() => f1()); } else { Assert.Equal(expected, f1()); } // verify with values passed to make parameters Expression<Func<decimal?, decimal?, Func<decimal?>>> e2 = Expression.Lambda<Func<decimal?, decimal?, Func<decimal?>>>( Expression.Lambda<Func<decimal?>>( Expression.Subtract(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<decimal?, decimal?, Func<decimal?>> f2 = e2.Compile(useInterpreter); if (overflowed) { Assert.Throws<OverflowException>(() => f2(a, b)()); } else { Assert.Equal(expected, f2(a, b)()); } // verify with values directly passed Expression<Func<Func<decimal?, decimal?, decimal?>>> e3 = Expression.Lambda<Func<Func<decimal?, decimal?, decimal?>>>( Expression.Invoke( Expression.Lambda<Func<Func<decimal?, decimal?, decimal?>>>( Expression.Lambda<Func<decimal?, decimal?, decimal?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<decimal?, decimal?, decimal?> f3 = e3.Compile(useInterpreter)(); if (overflowed) { Assert.Throws<OverflowException>(() => f3(a, b)); } else { Assert.Equal(expected, f3(a, b)); } // verify as a function generator Expression<Func<Func<decimal?, decimal?, decimal?>>> e4 = Expression.Lambda<Func<Func<decimal?, decimal?, decimal?>>>( Expression.Lambda<Func<decimal?, decimal?, decimal?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<decimal?, decimal?, decimal?>> f4 = e4.Compile(useInterpreter); if (overflowed) { Assert.Throws<OverflowException>(() => f4()(a, b)); } else { Assert.Equal(expected, f4()(a, b)); } // verify with currying Expression<Func<decimal?, Func<decimal?, decimal?>>> e5 = Expression.Lambda<Func<decimal?, Func<decimal?, decimal?>>>( Expression.Lambda<Func<decimal?, decimal?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<decimal?, Func<decimal?, decimal?>> f5 = e5.Compile(useInterpreter); if (overflowed) { Assert.Throws<OverflowException>(() => f5(a)(b)); } else { Assert.Equal(expected, f5(a)(b)); } // verify with one parameter Expression<Func<Func<decimal?, decimal?>>> e6 = Expression.Lambda<Func<Func<decimal?, decimal?>>>( Expression.Invoke( Expression.Lambda<Func<decimal?, Func<decimal?, decimal?>>>( Expression.Lambda<Func<decimal?, decimal?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(decimal?)) }), Enumerable.Empty<ParameterExpression>()); Func<decimal?, decimal?> f6 = e6.Compile(useInterpreter)(); if (overflowed) { Assert.Throws<OverflowException>(() => f6(b)); } else { Assert.Equal(expected, f6(b)); } } #endregion #region Verify double? private static void VerifySubtractNullableDouble(double? a, double? b, bool useInterpreter) { double? expected = a - b; ParameterExpression p0 = Expression.Parameter(typeof(double?), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(double?), "p1"); // verify with parameters supplied Expression<Func<double?>> e1 = Expression.Lambda<Func<double?>>( Expression.Invoke( Expression.Lambda<Func<double?, double?, double?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(double?)), Expression.Constant(b, typeof(double?)) }), Enumerable.Empty<ParameterExpression>()); Func<double?> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<double?, double?, Func<double?>>> e2 = Expression.Lambda<Func<double?, double?, Func<double?>>>( Expression.Lambda<Func<double?>>( Expression.Subtract(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<double?, double?, Func<double?>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<double?, double?, double?>>> e3 = Expression.Lambda<Func<Func<double?, double?, double?>>>( Expression.Invoke( Expression.Lambda<Func<Func<double?, double?, double?>>>( Expression.Lambda<Func<double?, double?, double?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<double?, double?, double?> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<double?, double?, double?>>> e4 = Expression.Lambda<Func<Func<double?, double?, double?>>>( Expression.Lambda<Func<double?, double?, double?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<double?, double?, double?>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<double?, Func<double?, double?>>> e5 = Expression.Lambda<Func<double?, Func<double?, double?>>>( Expression.Lambda<Func<double?, double?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<double?, Func<double?, double?>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<double?, double?>>> e6 = Expression.Lambda<Func<Func<double?, double?>>>( Expression.Invoke( Expression.Lambda<Func<double?, Func<double?, double?>>>( Expression.Lambda<Func<double?, double?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(double?)) }), Enumerable.Empty<ParameterExpression>()); Func<double?, double?> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify float? private static void VerifySubtractNullableFloat(float? a, float? b, bool useInterpreter) { float? expected = a - b; ParameterExpression p0 = Expression.Parameter(typeof(float?), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(float?), "p1"); // verify with parameters supplied Expression<Func<float?>> e1 = Expression.Lambda<Func<float?>>( Expression.Invoke( Expression.Lambda<Func<float?, float?, float?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(float?)), Expression.Constant(b, typeof(float?)) }), Enumerable.Empty<ParameterExpression>()); Func<float?> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<float?, float?, Func<float?>>> e2 = Expression.Lambda<Func<float?, float?, Func<float?>>>( Expression.Lambda<Func<float?>>( Expression.Subtract(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<float?, float?, Func<float?>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<float?, float?, float?>>> e3 = Expression.Lambda<Func<Func<float?, float?, float?>>>( Expression.Invoke( Expression.Lambda<Func<Func<float?, float?, float?>>>( Expression.Lambda<Func<float?, float?, float?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<float?, float?, float?> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<float?, float?, float?>>> e4 = Expression.Lambda<Func<Func<float?, float?, float?>>>( Expression.Lambda<Func<float?, float?, float?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<float?, float?, float?>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<float?, Func<float?, float?>>> e5 = Expression.Lambda<Func<float?, Func<float?, float?>>>( Expression.Lambda<Func<float?, float?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<float?, Func<float?, float?>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<float?, float?>>> e6 = Expression.Lambda<Func<Func<float?, float?>>>( Expression.Invoke( Expression.Lambda<Func<float?, Func<float?, float?>>>( Expression.Lambda<Func<float?, float?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(float?)) }), Enumerable.Empty<ParameterExpression>()); Func<float?, float?> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify int? private static void VerifySubtractNullableInt(int? a, int? b, bool useInterpreter) { int? expected = unchecked(a - b); ParameterExpression p0 = Expression.Parameter(typeof(int?), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(int?), "p1"); // verify with parameters supplied Expression<Func<int?>> e1 = Expression.Lambda<Func<int?>>( Expression.Invoke( Expression.Lambda<Func<int?, int?, int?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(int?)), Expression.Constant(b, typeof(int?)) }), Enumerable.Empty<ParameterExpression>()); Func<int?> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<int?, int?, Func<int?>>> e2 = Expression.Lambda<Func<int?, int?, Func<int?>>>( Expression.Lambda<Func<int?>>( Expression.Subtract(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<int?, int?, Func<int?>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<int?, int?, int?>>> e3 = Expression.Lambda<Func<Func<int?, int?, int?>>>( Expression.Invoke( Expression.Lambda<Func<Func<int?, int?, int?>>>( Expression.Lambda<Func<int?, int?, int?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<int?, int?, int?> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<int?, int?, int?>>> e4 = Expression.Lambda<Func<Func<int?, int?, int?>>>( Expression.Lambda<Func<int?, int?, int?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<int?, int?, int?>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<int?, Func<int?, int?>>> e5 = Expression.Lambda<Func<int?, Func<int?, int?>>>( Expression.Lambda<Func<int?, int?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<int?, Func<int?, int?>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<int?, int?>>> e6 = Expression.Lambda<Func<Func<int?, int?>>>( Expression.Invoke( Expression.Lambda<Func<int?, Func<int?, int?>>>( Expression.Lambda<Func<int?, int?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(int?)) }), Enumerable.Empty<ParameterExpression>()); Func<int?, int?> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify long? private static void VerifySubtractNullableLong(long? a, long? b, bool useInterpreter) { long? expected = unchecked(a - b); ParameterExpression p0 = Expression.Parameter(typeof(long?), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(long?), "p1"); // verify with parameters supplied Expression<Func<long?>> e1 = Expression.Lambda<Func<long?>>( Expression.Invoke( Expression.Lambda<Func<long?, long?, long?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(long?)), Expression.Constant(b, typeof(long?)) }), Enumerable.Empty<ParameterExpression>()); Func<long?> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<long?, long?, Func<long?>>> e2 = Expression.Lambda<Func<long?, long?, Func<long?>>>( Expression.Lambda<Func<long?>>( Expression.Subtract(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<long?, long?, Func<long?>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<long?, long?, long?>>> e3 = Expression.Lambda<Func<Func<long?, long?, long?>>>( Expression.Invoke( Expression.Lambda<Func<Func<long?, long?, long?>>>( Expression.Lambda<Func<long?, long?, long?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<long?, long?, long?> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<long?, long?, long?>>> e4 = Expression.Lambda<Func<Func<long?, long?, long?>>>( Expression.Lambda<Func<long?, long?, long?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<long?, long?, long?>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<long?, Func<long?, long?>>> e5 = Expression.Lambda<Func<long?, Func<long?, long?>>>( Expression.Lambda<Func<long?, long?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<long?, Func<long?, long?>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<long?, long?>>> e6 = Expression.Lambda<Func<Func<long?, long?>>>( Expression.Invoke( Expression.Lambda<Func<long?, Func<long?, long?>>>( Expression.Lambda<Func<long?, long?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(long?)) }), Enumerable.Empty<ParameterExpression>()); Func<long?, long?> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify short? private static void VerifySubtractNullableShort(short? a, short? b, bool useInterpreter) { short? expected = unchecked((short?)(a - b)); ParameterExpression p0 = Expression.Parameter(typeof(short?), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(short?), "p1"); // verify with parameters supplied Expression<Func<short?>> e1 = Expression.Lambda<Func<short?>>( Expression.Invoke( Expression.Lambda<Func<short?, short?, short?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(short?)), Expression.Constant(b, typeof(short?)) }), Enumerable.Empty<ParameterExpression>()); Func<short?> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<short?, short?, Func<short?>>> e2 = Expression.Lambda<Func<short?, short?, Func<short?>>>( Expression.Lambda<Func<short?>>( Expression.Subtract(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<short?, short?, Func<short?>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<short?, short?, short?>>> e3 = Expression.Lambda<Func<Func<short?, short?, short?>>>( Expression.Invoke( Expression.Lambda<Func<Func<short?, short?, short?>>>( Expression.Lambda<Func<short?, short?, short?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<short?, short?, short?> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<short?, short?, short?>>> e4 = Expression.Lambda<Func<Func<short?, short?, short?>>>( Expression.Lambda<Func<short?, short?, short?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<short?, short?, short?>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<short?, Func<short?, short?>>> e5 = Expression.Lambda<Func<short?, Func<short?, short?>>>( Expression.Lambda<Func<short?, short?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<short?, Func<short?, short?>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<short?, short?>>> e6 = Expression.Lambda<Func<Func<short?, short?>>>( Expression.Invoke( Expression.Lambda<Func<short?, Func<short?, short?>>>( Expression.Lambda<Func<short?, short?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(short?)) }), Enumerable.Empty<ParameterExpression>()); Func<short?, short?> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify uint? private static void VerifySubtractNullableUInt(uint? a, uint? b, bool useInterpreter) { uint? expected = unchecked(a - b); ParameterExpression p0 = Expression.Parameter(typeof(uint?), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(uint?), "p1"); // verify with parameters supplied Expression<Func<uint?>> e1 = Expression.Lambda<Func<uint?>>( Expression.Invoke( Expression.Lambda<Func<uint?, uint?, uint?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(uint?)), Expression.Constant(b, typeof(uint?)) }), Enumerable.Empty<ParameterExpression>()); Func<uint?> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<uint?, uint?, Func<uint?>>> e2 = Expression.Lambda<Func<uint?, uint?, Func<uint?>>>( Expression.Lambda<Func<uint?>>( Expression.Subtract(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<uint?, uint?, Func<uint?>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<uint?, uint?, uint?>>> e3 = Expression.Lambda<Func<Func<uint?, uint?, uint?>>>( Expression.Invoke( Expression.Lambda<Func<Func<uint?, uint?, uint?>>>( Expression.Lambda<Func<uint?, uint?, uint?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<uint?, uint?, uint?> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<uint?, uint?, uint?>>> e4 = Expression.Lambda<Func<Func<uint?, uint?, uint?>>>( Expression.Lambda<Func<uint?, uint?, uint?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<uint?, uint?, uint?>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<uint?, Func<uint?, uint?>>> e5 = Expression.Lambda<Func<uint?, Func<uint?, uint?>>>( Expression.Lambda<Func<uint?, uint?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<uint?, Func<uint?, uint?>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<uint?, uint?>>> e6 = Expression.Lambda<Func<Func<uint?, uint?>>>( Expression.Invoke( Expression.Lambda<Func<uint?, Func<uint?, uint?>>>( Expression.Lambda<Func<uint?, uint?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(uint?)) }), Enumerable.Empty<ParameterExpression>()); Func<uint?, uint?> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify ulong? private static void VerifySubtractNullableULong(ulong? a, ulong? b, bool useInterpreter) { ulong? expected = unchecked(a - b); ParameterExpression p0 = Expression.Parameter(typeof(ulong?), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(ulong?), "p1"); // verify with parameters supplied Expression<Func<ulong?>> e1 = Expression.Lambda<Func<ulong?>>( Expression.Invoke( Expression.Lambda<Func<ulong?, ulong?, ulong?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(ulong?)), Expression.Constant(b, typeof(ulong?)) }), Enumerable.Empty<ParameterExpression>()); Func<ulong?> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<ulong?, ulong?, Func<ulong?>>> e2 = Expression.Lambda<Func<ulong?, ulong?, Func<ulong?>>>( Expression.Lambda<Func<ulong?>>( Expression.Subtract(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<ulong?, ulong?, Func<ulong?>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<ulong?, ulong?, ulong?>>> e3 = Expression.Lambda<Func<Func<ulong?, ulong?, ulong?>>>( Expression.Invoke( Expression.Lambda<Func<Func<ulong?, ulong?, ulong?>>>( Expression.Lambda<Func<ulong?, ulong?, ulong?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<ulong?, ulong?, ulong?> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<ulong?, ulong?, ulong?>>> e4 = Expression.Lambda<Func<Func<ulong?, ulong?, ulong?>>>( Expression.Lambda<Func<ulong?, ulong?, ulong?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<ulong?, ulong?, ulong?>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<ulong?, Func<ulong?, ulong?>>> e5 = Expression.Lambda<Func<ulong?, Func<ulong?, ulong?>>>( Expression.Lambda<Func<ulong?, ulong?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<ulong?, Func<ulong?, ulong?>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<ulong?, ulong?>>> e6 = Expression.Lambda<Func<Func<ulong?, ulong?>>>( Expression.Invoke( Expression.Lambda<Func<ulong?, Func<ulong?, ulong?>>>( Expression.Lambda<Func<ulong?, ulong?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(ulong?)) }), Enumerable.Empty<ParameterExpression>()); Func<ulong?, ulong?> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify ushort? private static void VerifySubtractNullableUShort(ushort? a, ushort? b, bool useInterpreter) { ushort? expected = unchecked((ushort?)(a - b)); ParameterExpression p0 = Expression.Parameter(typeof(ushort?), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(ushort?), "p1"); // verify with parameters supplied Expression<Func<ushort?>> e1 = Expression.Lambda<Func<ushort?>>( Expression.Invoke( Expression.Lambda<Func<ushort?, ushort?, ushort?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(ushort?)), Expression.Constant(b, typeof(ushort?)) }), Enumerable.Empty<ParameterExpression>()); Func<ushort?> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<ushort?, ushort?, Func<ushort?>>> e2 = Expression.Lambda<Func<ushort?, ushort?, Func<ushort?>>>( Expression.Lambda<Func<ushort?>>( Expression.Subtract(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<ushort?, ushort?, Func<ushort?>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<ushort?, ushort?, ushort?>>> e3 = Expression.Lambda<Func<Func<ushort?, ushort?, ushort?>>>( Expression.Invoke( Expression.Lambda<Func<Func<ushort?, ushort?, ushort?>>>( Expression.Lambda<Func<ushort?, ushort?, ushort?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<ushort?, ushort?, ushort?> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<ushort?, ushort?, ushort?>>> e4 = Expression.Lambda<Func<Func<ushort?, ushort?, ushort?>>>( Expression.Lambda<Func<ushort?, ushort?, ushort?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<ushort?, ushort?, ushort?>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<ushort?, Func<ushort?, ushort?>>> e5 = Expression.Lambda<Func<ushort?, Func<ushort?, ushort?>>>( Expression.Lambda<Func<ushort?, ushort?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<ushort?, Func<ushort?, ushort?>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<ushort?, ushort?>>> e6 = Expression.Lambda<Func<Func<ushort?, ushort?>>>( Expression.Invoke( Expression.Lambda<Func<ushort?, Func<ushort?, ushort?>>>( Expression.Lambda<Func<ushort?, ushort?>>( Expression.Subtract(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(ushort?)) }), Enumerable.Empty<ParameterExpression>()); Func<ushort?, ushort?> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #endregion } }
#region License /* Copyright (c) 2003-2015 Llewellyn Pritchard * All rights reserved. * This source code is subject to terms and conditions of the BSD License. * See license.txt. */ #endregion #region Includes using System; using System.Collections; using System.Collections.Generic; using System.Windows.Forms; using System.Reflection; using ToolStripMenuItem = IronScheme.Editor.Controls.ToolStripMenuItem; #endregion namespace IronScheme.Editor.ComponentModel { /// <summary> /// Defines a toplevel menu name for service /// </summary> [AttributeUsage(AttributeTargets.Class, AllowMultiple=false)] public class MenuAttribute : Attribute { string text; /// <summary> /// Creates instance of MenuAttribute /// </summary> /// <param name="text">the name</param> public MenuAttribute(string text) { this.text = text; } /// <summary> /// Gets the name of the toplevel menu /// </summary> public string Text { get {return text;} } } /// <summary> /// Defines the current application state, will probably become internal /// </summary> [Flags] public enum ApplicationState { /// <summary> /// Application is uninitialized /// </summary> UnInitialized = -1, /// <summary> /// Normal /// </summary> Normal = 0, /// <summary> /// File is active /// </summary> File = 1, /// <summary> /// Project is active /// </summary> Project = 2, /// <summary> /// Debugger active /// </summary> Debug = 4, /// <summary> /// Breakpoint hit /// </summary> Break = 8, /// <summary> /// Debugger active and breakpoint hit /// </summary> DebugBreak = Debug | Break, /// <summary> /// AutoComplete active /// </summary> AutoComplete = 16, /// <summary> /// Buffer (text editor) active /// </summary> Buffer = 32, /// <summary> /// Buffer and project active /// </summary> ProjectBuffer = Project | Buffer, /// <summary> /// Grid active /// </summary> Grid = 64, /// <summary> /// IEdit control active /// </summary> Edit = 128, /// <summary> /// INavigate control active /// </summary> Navigate = 256, /// <summary> /// IScroll control active /// </summary> Scroll = 512, Build = 1024, Document = 2048, } /// <summary> /// Suppresses menu creation /// </summary> [AttributeUsage(AttributeTargets.Method | AttributeTargets.Property, AllowMultiple=false)] public class SuppressMenuAttribute : Attribute { } /// <summary> /// Defines menuitems on services /// </summary> [AttributeUsage(AttributeTargets.Method | AttributeTargets.Property, AllowMultiple=false)] public class MenuItemAttribute : Attribute, IComparable { string text, image; ApplicationState state = 0; int index = -1; static int maxindex = -1; internal MemberInfo invoke; internal bool istogglemenu = false; internal ServiceBase ctr; internal ToolStripMenuItem mi; bool allowtoolbar = false; /// <summary> /// Creates an instance of MenuItemAttribute /// </summary> /// <param name="text">the name</param> public MenuItemAttribute(string text) { this.text = text; index = maxindex + 1; } /// <summary> /// Gets the name of the item /// </summary> public string Text { get {return text;} } /// <summary> /// Gets or sets the image /// </summary> public string Image { get {return image;} set {image = value;} } /// <summary> /// Gets or sets the state /// </summary> /// <remarks>Flagged usage</remarks> public ApplicationState State { get {return state;} set {state = value;} } /// <summary> /// Gets or sets the index /// </summary> /// <remarks>any 'gaps' in the number sequence will add seperator items</remarks> public int Index { get {return index;} set { index = value; maxindex = Math.Max(maxindex, value); } } Type conv; /// <summary> /// Gets or sets the converter. /// </summary> /// <value>The converter.</value> public Type Converter { get { return conv; } set { conv = value; } } /// <summary> /// Gets or sets whether to create a toolbar button /// </summary> public bool AllowToolBar { get {return allowtoolbar;} set {allowtoolbar = value;} } int IComparable.CompareTo(object obj) { MenuItemAttribute b = obj as MenuItemAttribute; if (b == null) { return -1; } return Index - b.Index; } } /// <summary> /// Provides services for menu handling /// </summary> [Name("Menu service")] public interface IMenuService : IService { /// <summary> /// Gets the 'toplevelitem' /// </summary> ToolStripMenuItem this[string name] { get;} /// <summary> /// Gets the main menu of the hosting form /// </summary> MenuStrip MainMenu { get;} } sealed class MenuService : ServiceBase, IMenuService { readonly MenuStrip main; readonly Dictionary<string, ToolStripMenuItem> menus = new Dictionary<string, ToolStripMenuItem>(); readonly Dictionary<ToolStripMenuItem, Hashtable> attrmapmap = new Dictionary<ToolStripMenuItem, Hashtable>(); bool menuaccel = true; public MenuStrip MainMenu { get {return main;} } public bool MenuAccel { get {return menuaccel;} set { if (menuaccel != value) { if (value) { foreach (ToolStripMenuItem mi in menus.Values) { mi.Text = mi.Tag.ToString(); } } else { foreach (ToolStripMenuItem mi in menus.Values) { mi.Text = MnemonicEscape(mi.Tag.ToString()); } } } } } public MenuService() { main = new MenuStrip(); main.Dock = DockStyle.Top; ServiceHost.Window.MainForm.Controls.Add(main); AddTopLevel("&File"); AddTopLevel("&Edit"); AddTopLevel("&View"); AddTopLevel("&Project"); AddTopLevel("&Build"); AddTopLevel("&Debug"); AddTopLevel("&Script"); AddTopLevel("&Window"); AddTopLevel("&Help"); ServiceHost.StateChanged +=new EventHandler(ServiceHost_StateChanged); } static string MnemonicEscape(string s) { return s.Replace("&&", "||").Replace("&", string.Empty).Replace("||", "&"); } public Hashtable GetAttributeMap(ToolStripMenuItem toplevel) { return attrmapmap[toplevel] as Hashtable; } public ToolStripMenuItem AddTopLevel(string name) { return AddTopLevel( new ToolStripMenuItem(name)); } public ToolStripMenuItem AddTopLevel(ToolStripMenuItem mi) { if (ServiceHost.ToolBar != null) { (ServiceHost.ToolBar as ToolBarService).Add(mi, null); } mi.DropDownOpening +=new EventHandler(toplevel_Popup); main.Items.Add(mi); string mit = MnemonicEscape(mi.Text); menus[mit] = mi; // mi.Tag = mi.Text; attrmapmap[mi] = new Hashtable(); return mi; } public void RemoveTopLevel(ToolStripMenuItem mi) { main.Items.Remove(mi); menus.Remove(MnemonicEscape(mi.Text)); attrmapmap.Remove(mi); mi.DropDownOpening -=new EventHandler(toplevel_Popup); } public ToolStripMenuItem this[string name] { get { string escname = MnemonicEscape(name); if (menus.ContainsKey(escname)) { return menus[escname]; } else { return null; } } } void toplevel_Popup(object sender, EventArgs e) { ValidateMenuState(sender as ToolStripMenuItem); } void ValidateMenuState(ToolStripMenuItem toplevel) { Hashtable attrmap = GetAttributeMap(toplevel); foreach (ToolStripItem pmi in toplevel.DropDownItems) { MenuItemAttribute mia = attrmap[pmi] as MenuItemAttribute; if (mia == null) // in case its a seperator or submenu { if (!(pmi is ToolStripSeparator)) { foreach (ToolStripMenuItem spmi in (pmi as ToolStripMenuItem).DropDownItems) { MenuItemAttribute smia = attrmap[spmi] as MenuItemAttribute; ServiceBase ctr = smia.ctr; spmi.Enabled = ((ctr.MenuState & smia.State) == smia.State); if (smia.istogglemenu) { try { spmi.Checked = (bool) ((PropertyInfo) smia.invoke).GetValue(smia.ctr, new object[0]); } catch { //something not ready, sorts itself out } } } } } else { ServiceBase ctr = mia.ctr; pmi.Enabled = ((ctr.MenuState & mia.State) == mia.State); if (mia.istogglemenu) { try { (pmi as ToolStripMenuItem).Checked = (bool) ((PropertyInfo) mia.invoke).GetValue(mia.ctr, new object[0]); } catch { //something not ready, sorts itself out } } } } } void ServiceHost_StateChanged(object sender, EventArgs e) { foreach (ToolStripMenuItem mi in menus.Values) { ValidateMenuState(mi); } } } }
/* Windows Forms Collapsible Splitter Control for .Net (c)Copyright 2002-2003 NJF (furty74@yahoo.com). All rights reserved. Assembly Build Dependencies: CollapsibleSplitter.bmp Version 1.1 Changes: OnPaint is now overridden instead of being a handled event, and the entire splitter is now painted rather than just the collpaser control The splitter rectangle is now correctly defined The Collapsed property was renamed to IsCollapsed, and the code changed so that no value needs to be set New visual styles added: Win9x, XP, DoubleDots and Lines Version 1.11 Changes: The OnMouseMove event handler was updated to address a flickering issue discovered by John O'Byrne Version 1.2 Changes: Added support for horizontal splitters Version 1.21 Changes: Added support for inclusion as a VS.Net ToolBox control Added a ToolBox bitmap Removed extraneous overrides Added summaries Version 1.22 Changes: Removed the ParentFolder from public properties - this is now set automatically in the OnHandleCreated event *Added expand/collapse animation code Version 1.3 Changes: Added an optional 3D border General code and comment cleaning Flagged assembly with the CLSCompliant attribute Added a simple designer class to filter unwanted properties */ namespace BrawlManagerLib { using System; using System.ComponentModel; using System.Drawing; using System.Windows.Forms; #region Enums /// <summary> /// Enumeration to sepcify the visual style to be applied to the CollapsibleSplitter control /// </summary> public enum VisualStyles { Mozilla = 0, XP, Win9x, DoubleDots, Lines } /// <summary> /// Enumeration to specify the current animation state of the control. /// </summary> public enum SplitterState { Collapsed = 0, Expanding, Expanded, Collapsing } #endregion /// <summary> /// A custom collapsible splitter that can resize, hide and show associated form controls /// </summary> [ToolboxBitmap(typeof(CollapsibleSplitter))] //[DesignerAttribute(typeof(CollapsibleSplitterDesigner))] public class CollapsibleSplitter : System.Windows.Forms.Splitter { #region Private Properties // declare and define some base properties private bool hot; private System.Drawing.Color hotColor = CalculateColor(SystemColors.Highlight, SystemColors.Window, 70); private System.Windows.Forms.Control controlToHide; private System.Drawing.Rectangle rr; private System.Windows.Forms.Form parentForm; private bool expandParentForm; private VisualStyles visualStyle; // Border added in version 1.3 private System.Windows.Forms.Border3DStyle borderStyle = System.Windows.Forms.Border3DStyle.Flat; // animation controls introduced in version 1.22 private System.Windows.Forms.Timer animationTimer; private int controlWidth; private int controlHeight; private int parentFormWidth; private int parentFormHeight; private SplitterState currentState; private int animationDelay = 20; private int animationStep = 20; private bool useAnimations; #endregion #region Public Properties /// <summary> /// The initial state of the Splitter. Set to True if the control to hide is not visible by default /// </summary> [Bindable(true), Category("Collapsing Options"), DefaultValue("False"), Description("The initial state of the Splitter. Set to True if the control to hide is not visible by default")] public bool IsCollapsed { get { if(this.controlToHide!= null) return !this.controlToHide.Visible; else return true; } } /// <summary> /// The System.Windows.Forms.Control that the splitter will collapse /// </summary> [Bindable(true), Category("Collapsing Options"), DefaultValue(""), Description("The System.Windows.Forms.Control that the splitter will collapse")] public System.Windows.Forms.Control ControlToHide { get{ return this.controlToHide; } set{ this.controlToHide = value; } } /// <summary> /// Determines if the collapse and expanding actions will be animated /// </summary> [Bindable(true), Category("Collapsing Options"), DefaultValue("True"), Description("Determines if the collapse and expanding actions will be animated")] public bool UseAnimations { get { return this.useAnimations; } set { this.useAnimations = value; } } /// <summary> /// The delay in millisenconds between animation steps /// </summary> [Bindable(true), Category("Collapsing Options"), DefaultValue("20"), Description("The delay in millisenconds between animation steps")] public int AnimationDelay { get{ return this.animationDelay; } set{ this.animationDelay = value; } } /// <summary> /// The amount of pixels moved in each animation step /// </summary> [Bindable(true), Category("Collapsing Options"), DefaultValue("20"), Description("The amount of pixels moved in each animation step")] public int AnimationStep { get{ return this.animationStep; } set{ this.animationStep = value; } } /// <summary> /// When true the entire parent form will be expanded and collapsed, otherwise just the contol to expand will be changed /// </summary> [Bindable(true), Category("Collapsing Options"), DefaultValue("False"), Description("When true the entire parent form will be expanded and collapsed, otherwise just the contol to expand will be changed")] public bool ExpandParentForm { get{ return this.expandParentForm; } set{ this.expandParentForm = value; } } /// <summary> /// The visual style that will be painted on the control /// </summary> [Bindable(true), Category("Collapsing Options"), DefaultValue("VisualStyles.XP"), Description("The visual style that will be painted on the control")] public VisualStyles VisualStyle { get{ return this.visualStyle; } set { this.visualStyle = value; this.Invalidate(); } } /// <summary> /// An optional border style to paint on the control. Set to Flat for no border /// </summary> [Bindable(true), Category("Collapsing Options"), DefaultValue("System.Windows.Forms.Border3DStyle.Flat"), Description("An optional border style to paint on the control. Set to Flat for no border")] public System.Windows.Forms.Border3DStyle BorderStyle3D { get{ return this.borderStyle; } set { this.borderStyle = value; this.Invalidate(); } } #endregion #region Public Methods public void ToggleState() { this.ToggleSplitter(); } #endregion #region Constructor public CollapsibleSplitter() { // Register mouse events this.Click += new System.EventHandler(OnClick); this.Resize += new System.EventHandler(OnResize); this.MouseLeave += new System.EventHandler(OnMouseLeave); this.MouseMove += new MouseEventHandler(OnMouseMove); // Setup the animation timer control this.animationTimer = new System.Windows.Forms.Timer(); this.animationTimer.Interval = animationDelay; this.animationTimer.Tick += new System.EventHandler(this.animationTimerTick); } #endregion #region Overrides protected override void OnHandleCreated(EventArgs e) { base.OnHandleCreated(e); this.parentForm = this.FindForm(); // set the current state if(this.controlToHide != null) { if(this.controlToHide.Visible) { this.currentState = SplitterState.Expanded; } else { this.currentState = SplitterState.Collapsed; } } } protected override void OnEnabledChanged(System.EventArgs e) { base.OnEnabledChanged(e); this.Invalidate(); } #endregion #region Event Handlers protected override void OnMouseDown(MouseEventArgs e) { // if the hider control isn't hot, let the base resize action occur if(this.controlToHide!= null) { if(!this.hot && this.controlToHide.Visible) { base.OnMouseDown(e); } } } private void OnResize(object sender, System.EventArgs e) { this.Invalidate(); } // this method was updated in version 1.11 to fix a flickering problem // discovered by John O'Byrne private void OnMouseMove(object sender, System.Windows.Forms.MouseEventArgs e) { // check to see if the mouse cursor position is within the bounds of our control if(e.X >= rr.X && e.X <= rr.X + rr.Width && e.Y >= rr.Y && e.Y <= rr.Y + rr.Height) { if(!this.hot) { this.hot = true; this.Cursor = Cursors.Hand; this.Invalidate(); } } else { if(this.hot) { this.hot = false; this.Invalidate();; } this.Cursor = Cursors.Default; if(controlToHide!= null) { if(!controlToHide.Visible) this.Cursor = Cursors.Default; else // Changed in v1.2 to support Horizontal Splitters { if(this.Dock == DockStyle.Left || this.Dock == DockStyle.Right) { this.Cursor = Cursors.VSplit; } else { this.Cursor = Cursors.HSplit; } } } } } private void OnMouseLeave(object sender, System.EventArgs e) { // ensure that the hot state is removed this.hot = false; this.Invalidate();; } private void OnClick(object sender, System.EventArgs e) { if(controlToHide!= null && hot && currentState != SplitterState.Collapsing && currentState != SplitterState.Expanding) { ToggleSplitter(); } } private void ToggleSplitter() { // if an animation is currently in progress for this control, drop out if(currentState == SplitterState.Collapsing || currentState == SplitterState.Expanding) return; controlWidth = controlToHide.Width; controlHeight = controlToHide.Height; if(controlToHide.Visible) { if(useAnimations) { currentState = SplitterState.Collapsing; if(parentForm != null) { if(this.Dock == DockStyle.Left || this.Dock == DockStyle.Right) { parentFormWidth = parentForm.Width - controlWidth; } else { parentFormHeight = parentForm.Height - controlHeight; } } this.animationTimer.Enabled = true; } else { // no animations, so just toggle the visible state currentState = SplitterState.Collapsed; controlToHide.Visible = false; if(expandParentForm && parentForm != null) { if(this.Dock == DockStyle.Left || this.Dock == DockStyle.Right) { parentForm.Width -= controlToHide.Width; } else { parentForm.Height -= controlToHide.Height; } } } } else { // control to hide is collapsed if(useAnimations) { currentState = SplitterState.Expanding; if(this.Dock == DockStyle.Left || this.Dock == DockStyle.Right) { if(parentForm != null) { parentFormWidth = parentForm.Width + controlWidth; } controlToHide.Width = 0; } else { if(parentForm != null) { parentFormHeight = parentForm.Height + controlHeight; } controlToHide.Height = 0; } controlToHide.Visible = true; this.animationTimer.Enabled = true; } else { // no animations, so just toggle the visible state currentState = SplitterState.Expanded; controlToHide.Visible = true; if(expandParentForm && parentForm != null) { if(this.Dock == DockStyle.Left || this.Dock == DockStyle.Right) { parentForm.Width += controlToHide.Width; } else { parentForm.Height += controlToHide.Height; } } } } } #endregion #region Implementation #region Animation Timer Tick private void animationTimerTick(object sender, System.EventArgs e) { switch(currentState) { case SplitterState.Collapsing: if(this.Dock == DockStyle.Left || this.Dock == DockStyle.Right) { // vertical splitter if(controlToHide.Width > animationStep) { if(expandParentForm && parentForm.WindowState != FormWindowState.Maximized && parentForm != null) { parentForm.Width -= animationStep; } controlToHide.Width -= animationStep; } else { if(expandParentForm && parentForm.WindowState != FormWindowState.Maximized && parentForm != null) { parentForm.Width = parentFormWidth; } controlToHide.Visible = false; animationTimer.Enabled = false; controlToHide.Width = controlWidth; currentState = SplitterState.Collapsed; this.Invalidate(); } } else { // horizontal splitter if(controlToHide.Height > animationStep) { if(expandParentForm && parentForm.WindowState != FormWindowState.Maximized && parentForm != null) { parentForm.Height -= animationStep; } controlToHide.Height -= animationStep; } else { if(expandParentForm && parentForm.WindowState != FormWindowState.Maximized && parentForm != null) { parentForm.Height = parentFormHeight; } controlToHide.Visible = false; animationTimer.Enabled = false; controlToHide.Height = controlHeight; currentState = SplitterState.Collapsed; this.Invalidate(); } } break; case SplitterState.Expanding: if(this.Dock == DockStyle.Left || this.Dock == DockStyle.Right) { // vertical splitter if(controlToHide.Width < (controlWidth - animationStep)) { if(expandParentForm && parentForm.WindowState != FormWindowState.Maximized && parentForm != null) { parentForm.Width += animationStep; } controlToHide.Width += animationStep; } else { if(expandParentForm && parentForm.WindowState != FormWindowState.Maximized && parentForm != null) { parentForm.Width = parentFormWidth; } controlToHide.Width = controlWidth; controlToHide.Visible = true; animationTimer.Enabled = false; currentState = SplitterState.Expanded; this.Invalidate(); } } else { // horizontal splitter if(controlToHide.Height < (controlHeight - animationStep)) { if(expandParentForm && parentForm.WindowState != FormWindowState.Maximized && parentForm != null) { parentForm.Height += animationStep; } controlToHide.Height += animationStep; } else { if(expandParentForm && parentForm.WindowState != FormWindowState.Maximized && parentForm != null) { parentForm.Height = parentFormHeight; } controlToHide.Height = controlHeight; controlToHide.Visible = true; animationTimer.Enabled = false; currentState = SplitterState.Expanded; this.Invalidate(); } } break; } } #endregion #region Paint the control // OnPaint is now an override rather than an event in version 1.1 protected override void OnPaint(PaintEventArgs e) { // create a Graphics object Graphics g = e.Graphics; // find the rectangle for the splitter and paint it Rectangle r = this.ClientRectangle; // fixed in version 1.1 g.FillRectangle(new SolidBrush(this.BackColor), r); #region Vertical Splitter // Check the docking style and create the control rectangle accordingly if(this.Dock == DockStyle.Left || this.Dock == DockStyle.Right) { // create a new rectangle in the vertical center of the splitter for our collapse control button rr = new Rectangle(r.X, (int)r.Y + ((r.Height - 115)/2), 8, 115); // force the width to 8px so that everything always draws correctly this.Width = 8; // draw the background color for our control image if(hot) { g.FillRectangle(new SolidBrush(hotColor), new Rectangle(rr.X + 1, rr.Y, 6, 115)); } else { g.FillRectangle(new SolidBrush(this.BackColor), new Rectangle(rr.X + 1, rr.Y, 6, 115)); } // draw the top & bottom lines for our control image g.DrawLine(new Pen(SystemColors.ControlDark, 1), rr.X + 1, rr.Y, rr.X + rr.Width - 2, rr.Y); g.DrawLine(new Pen(SystemColors.ControlDark, 1), rr.X + 1, rr.Y + rr.Height, rr.X + rr.Width - 2, rr.Y + rr.Height); if(this.Enabled) { // draw the arrows for our control image // the ArrowPointArray is a point array that defines an arrow shaped polygon g.FillPolygon(new SolidBrush(SystemColors.ControlDarkDark), ArrowPointArray(rr.X + 2, rr.Y + 3)); g.FillPolygon(new SolidBrush(SystemColors.ControlDarkDark), ArrowPointArray(rr.X + 2, rr.Y + rr.Height - 9)); } // draw the dots for our control image using a loop int x = rr.X + 3; int y = rr.Y + 14; // Visual Styles added in version 1.1 switch(visualStyle) { case VisualStyles.Mozilla: for(int i=0; i < 30; i++) { // light dot g.DrawLine(new Pen(SystemColors.ControlLightLight), x, y + (i*3), x+1, y + 1 + (i*3)); // dark dot g.DrawLine(new Pen(SystemColors.ControlDarkDark), x+1, y + 1 + (i*3), x+2, y + 2 + (i*3)); // overdraw the background color as we actually drew 2px diagonal lines, not just dots if(hot) { g.DrawLine(new Pen(hotColor), x+2, y + 1 + (i*3), x+2, y + 2 + (i*3)); } else { g.DrawLine(new Pen(this.BackColor), x+2, y + 1 + (i*3), x+2, y + 2 + (i*3)); } } break; case VisualStyles.DoubleDots: for(int i=0; i < 30; i++) { // light dot g.DrawRectangle(new Pen(SystemColors.ControlLightLight), x, y + 1 + (i*3), 1, 1 ); // dark dot g.DrawRectangle(new Pen(SystemColors.ControlDark), x - 1, y +(i*3), 1, 1 ); i++; // light dot g.DrawRectangle(new Pen(SystemColors.ControlLightLight), x + 2, y + 1 + (i*3), 1, 1 ); // dark dot g.DrawRectangle(new Pen(SystemColors.ControlDark), x + 1, y + (i*3), 1, 1 ); } break; case VisualStyles.Win9x: g.DrawLine(new Pen(SystemColors.ControlLightLight), x, y, x + 2, y); g.DrawLine(new Pen(SystemColors.ControlLightLight), x, y, x,y + 90); g.DrawLine(new Pen(SystemColors.ControlDark), x + 2, y, x + 2, y + 90); g.DrawLine(new Pen(SystemColors.ControlDark), x, y + 90, x + 2, y + 90); break; case VisualStyles.XP: for(int i=0; i < 18; i++) { // light dot g.DrawRectangle(new Pen(SystemColors.ControlLight), x, y + (i*5), 2, 2 ); // light light dot g.DrawRectangle(new Pen(SystemColors.ControlLightLight), x + 1, y + 1 + (i*5), 1, 1 ); // dark dark dot g.DrawRectangle(new Pen(SystemColors.ControlDarkDark), x, y +(i*5), 1, 1 ); // dark fill g.DrawLine(new Pen(SystemColors.ControlDark), x, y + (i*5), x, y + (i*5) + 1); g.DrawLine(new Pen(SystemColors.ControlDark), x, y + (i*5), x + 1, y + (i*5)); } break; case VisualStyles.Lines: for(int i=0; i < 44; i++) { g.DrawLine(new Pen(SystemColors.ControlDark), x, y + (i*2), x + 2, y + (i*2)); } break; } // Added in version 1.3 if(this.borderStyle != System.Windows.Forms.Border3DStyle.Flat) { // Paint the control border ControlPaint.DrawBorder3D(e.Graphics, this.ClientRectangle, this.borderStyle, Border3DSide.Left); ControlPaint.DrawBorder3D(e.Graphics, this.ClientRectangle, this.borderStyle, Border3DSide.Right); } } #endregion // Horizontal Splitter support added in v1.2 #region Horizontal Splitter else if (this.Dock == DockStyle.Top || this.Dock == DockStyle.Bottom) { // create a new rectangle in the horizontal center of the splitter for our collapse control button rr = new Rectangle((int)r.X + ((r.Width - 115)/2), r.Y, 115, 8); // force the height to 8px this.Height = 8; // draw the background color for our control image if(hot) { g.FillRectangle(new SolidBrush(hotColor), new Rectangle(rr.X, rr.Y + 1, 115, 6)); } else { g.FillRectangle(new SolidBrush(this.BackColor), new Rectangle(rr.X, rr.Y + 1, 115, 6)); } // draw the left & right lines for our control image g.DrawLine(new Pen(SystemColors.ControlDark, 1), rr.X, rr.Y + 1, rr.X, rr.Y + rr.Height - 2); g.DrawLine(new Pen(SystemColors.ControlDark, 1), rr.X + rr.Width, rr.Y + 1, rr.X + rr.Width, rr.Y + rr.Height - 2); if(this.Enabled) { // draw the arrows for our control image // the ArrowPointArray is a point array that defines an arrow shaped polygon g.FillPolygon(new SolidBrush(SystemColors.ControlDarkDark), ArrowPointArray(rr.X + 3, rr.Y + 2)); g.FillPolygon(new SolidBrush(SystemColors.ControlDarkDark), ArrowPointArray(rr.X + rr.Width - 9, rr.Y + 2)); } // draw the dots for our control image using a loop int x = rr.X + 14; int y = rr.Y + 3; // Visual Styles added in version 1.1 switch(visualStyle) { case VisualStyles.Mozilla: for(int i=0; i < 30; i++) { // light dot g.DrawLine(new Pen(SystemColors.ControlLightLight), x + (i*3), y, x + 1 + (i*3), y + 1); // dark dot g.DrawLine(new Pen(SystemColors.ControlDarkDark), x + 1 + (i*3), y + 1, x + 2 + (i*3), y + 2); // overdraw the background color as we actually drew 2px diagonal lines, not just dots if(hot) { g.DrawLine(new Pen(hotColor), x + 1 + (i*3), y + 2, x + 2 + (i*3), y + 2); } else { g.DrawLine(new Pen(this.BackColor), x + 1 + (i*3), y + 2, x + 2 + (i*3), y + 2); } } break; case VisualStyles.DoubleDots: for(int i=0; i < 30; i++) { // light dot g.DrawRectangle(new Pen(SystemColors.ControlLightLight), x + 1 + (i*3), y, 1, 1 ); // dark dot g.DrawRectangle(new Pen(SystemColors.ControlDark), x + (i*3), y - 1, 1, 1 ); i++; // light dot g.DrawRectangle(new Pen(SystemColors.ControlLightLight), x + 1 + (i*3), y + 2, 1, 1 ); // dark dot g.DrawRectangle(new Pen(SystemColors.ControlDark), x + (i*3), y + 1, 1, 1 ); } break; case VisualStyles.Win9x: g.DrawLine(new Pen(SystemColors.ControlLightLight), x, y, x, y + 2); g.DrawLine(new Pen(SystemColors.ControlLightLight), x, y, x + 88, y); g.DrawLine(new Pen(SystemColors.ControlDark), x, y + 2, x + 88, y + 2); g.DrawLine(new Pen(SystemColors.ControlDark), x + 88, y, x + 88, y + 2); break; case VisualStyles.XP: for(int i=0; i < 18; i++) { // light dot g.DrawRectangle(new Pen(SystemColors.ControlLight), x + (i*5), y, 2, 2 ); // light light dot g.DrawRectangle(new Pen(SystemColors.ControlLightLight), x + 1 + (i*5), y + 1, 1, 1 ); // dark dark dot g.DrawRectangle(new Pen(SystemColors.ControlDarkDark), x +(i*5), y, 1, 1 ); // dark fill g.DrawLine(new Pen(SystemColors.ControlDark), x + (i*5), y, x + (i*5) + 1, y); g.DrawLine(new Pen(SystemColors.ControlDark), x + (i*5), y, x + (i*5), y + 1); } break; case VisualStyles.Lines: for(int i=0; i < 44; i++) { g.DrawLine(new Pen(SystemColors.ControlDark), x + (i*2), y, x + (i*2), y + 2); } break; } // Added in version 1.3 if(this.borderStyle != System.Windows.Forms.Border3DStyle.Flat) { // Paint the control border ControlPaint.DrawBorder3D(e.Graphics, this.ClientRectangle, this.borderStyle, Border3DSide.Top); ControlPaint.DrawBorder3D(e.Graphics, this.ClientRectangle, this.borderStyle, Border3DSide.Bottom); } } #endregion else { throw new Exception("The Collapsible Splitter control cannot have the Filled or None Dockstyle property"); } // dispose the Graphics object g.Dispose(); } #endregion #region Arrow Polygon Array // This creates a point array to draw a arrow-like polygon private Point[] ArrowPointArray(int x, int y) { Point[] point = new Point[3]; if(controlToHide!= null) { // decide which direction the arrow will point if ( (this.Dock == DockStyle.Right && controlToHide.Visible) || (this.Dock == DockStyle.Left && !controlToHide.Visible) ) { // right arrow point[0] = new Point(x,y); point[1] = new Point(x + 3, y + 3); point[2] = new Point(x, y + 6); } else if ( (this.Dock == DockStyle.Right && !controlToHide.Visible) || (this.Dock == DockStyle.Left && controlToHide.Visible) ) { // left arrow point[0] = new Point(x + 3 ,y); point[1] = new Point(x, y + 3); point[2] = new Point(x + 3, y + 6); } // Up/Down arrows added in v1.2 else if ( (this.Dock == DockStyle.Top && controlToHide.Visible) || (this.Dock == DockStyle.Bottom && !controlToHide.Visible) ) { // up arrow point[0] = new Point(x + 3, y); point[1] = new Point(x + 6, y + 4); point[2] = new Point(x, y + 4); } else if ( (this.Dock == DockStyle.Top && !controlToHide.Visible) || (this.Dock == DockStyle.Bottom && controlToHide.Visible) ) { // down arrow point[0] = new Point(x,y); point[1] = new Point(x + 6, y); point[2] = new Point(x + 3, y + 3); } } return point; } #endregion #region Color Calculator // this method was borrowed from the RichUI Control library by Sajith M private static Color CalculateColor(Color front, Color back, int alpha) { // solid color obtained as a result of alpha-blending Color frontColor = Color.FromArgb(255, front); Color backColor = Color.FromArgb(255, back); float frontRed = frontColor.R; float frontGreen = frontColor.G; float frontBlue = frontColor.B; float backRed = backColor.R; float backGreen = backColor.G; float backBlue = backColor.B; float fRed = frontRed*alpha/255 + backRed*((float)(255-alpha)/255); byte newRed = (byte)fRed; float fGreen = frontGreen*alpha/255 + backGreen*((float)(255-alpha)/255); byte newGreen = (byte)fGreen; float fBlue = frontBlue*alpha/255 + backBlue*((float)(255-alpha)/255); byte newBlue = (byte)fBlue; return Color.FromArgb(255, newRed, newGreen, newBlue); } #endregion #endregion } /* /// <summary> /// A simple designer class for the CollapsibleSplitter control to remove /// unwanted properties at design time. /// </summary> public class CollapsibleSplitterDesigner : System.Windows.Forms.Design.ControlDesigner { public CollapsibleSplitterDesigner() { } protected override void PreFilterProperties(System.Collections.IDictionary properties) { properties.Remove("IsCollapsed"); properties.Remove("BorderStyle"); properties.Remove("Size"); } }*/ }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Microsoft.Win32.SafeHandles; using System.Collections; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Runtime.InteropServices; using System.Security; using System.Security.Authentication.ExtendedProtection; using System.Security.Cryptography; using System.Security.Cryptography.X509Certificates; using System.Security.Principal; using System.Security.Authentication; namespace System.Net.Security { // // SecureChannel - a wrapper on SSPI based functionality. // Provides an additional abstraction layer over SSPI for SslStream. // internal class SecureChannel { // When reading a frame from the wire first read this many bytes for the header. internal const int ReadHeaderSize = 5; private SafeFreeCredentials _credentialsHandle; private SafeDeleteContext _securityContext; private readonly string _destination; private readonly string _hostName; private readonly bool _serverMode; private readonly bool _remoteCertRequired; private readonly SslProtocols _sslProtocols; private readonly EncryptionPolicy _encryptionPolicy; private SslConnectionInfo _connectionInfo; private X509Certificate _serverCertificate; private X509Certificate _selectedClientCertificate; private bool _isRemoteCertificateAvailable; private readonly X509CertificateCollection _clientCertificates; private LocalCertSelectionCallback _certSelectionDelegate; // These are the MAX encrypt buffer output sizes, not the actual sizes. private int _headerSize = 5; //ATTN must be set to at least 5 by default private int _trailerSize = 16; private int _maxDataSize = 16354; private bool _checkCertRevocation; private bool _checkCertName; private bool _refreshCredentialNeeded; internal SecureChannel(string hostname, bool serverMode, SslProtocols sslProtocols, X509Certificate serverCertificate, X509CertificateCollection clientCertificates, bool remoteCertRequired, bool checkCertName, bool checkCertRevocationStatus, EncryptionPolicy encryptionPolicy, LocalCertSelectionCallback certSelectionDelegate) { GlobalLog.Enter("SecureChannel#" + Logging.HashString(this) + "::.ctor", "hostname:" + hostname + " #clientCertificates=" + ((clientCertificates == null) ? "0" : clientCertificates.Count.ToString(NumberFormatInfo.InvariantInfo))); if (Logging.On) { Logging.PrintInfo(Logging.Web, this, ".ctor", "hostname=" + hostname + ", #clientCertificates=" + ((clientCertificates == null) ? "0" : clientCertificates.Count.ToString(NumberFormatInfo.InvariantInfo)) + ", encryptionPolicy=" + encryptionPolicy); } SslStreamPal.VerifyPackageInfo(); _destination = hostname; GlobalLog.Assert(hostname != null, "SecureChannel#{0}::.ctor()|hostname == null", Logging.HashString(this)); _hostName = hostname; _serverMode = serverMode; _sslProtocols = sslProtocols; _serverCertificate = serverCertificate; _clientCertificates = clientCertificates; _remoteCertRequired = remoteCertRequired; _securityContext = null; _checkCertRevocation = checkCertRevocationStatus; _checkCertName = checkCertName; _certSelectionDelegate = certSelectionDelegate; _refreshCredentialNeeded = true; _encryptionPolicy = encryptionPolicy; GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::.ctor"); } // // SecureChannel properties // // LocalServerCertificate - local certificate for server mode channel // LocalClientCertificate - selected certificated used in the client channel mode otherwise null // IsRemoteCertificateAvailable - true if the remote side has provided a certificate // HeaderSize - Header & trailer sizes used in the TLS stream // TrailerSize - // internal X509Certificate LocalServerCertificate { get { return _serverCertificate; } } internal X509Certificate LocalClientCertificate { get { return _selectedClientCertificate; } } internal bool IsRemoteCertificateAvailable { get { return _isRemoteCertificateAvailable; } } internal ChannelBinding GetChannelBinding(ChannelBindingKind kind) { GlobalLog.Enter("SecureChannel#" + Logging.HashString(this) + "::GetChannelBindingToken", kind.ToString()); ChannelBinding result = null; if (_securityContext != null) { result = SslStreamPal.QueryContextChannelBinding(_securityContext, kind); } GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::GetChannelBindingToken", Logging.HashString(result)); return result; } internal bool CheckCertRevocationStatus { get { return _checkCertRevocation; } } internal X509CertificateCollection ClientCertificates { get { return _clientCertificates; } } internal int HeaderSize { get { return _headerSize; } } internal int MaxDataSize { get { return _maxDataSize; } } internal SslConnectionInfo ConnectionInfo { get { return _connectionInfo; } } internal bool IsValidContext { get { return !(_securityContext == null || _securityContext.IsInvalid); } } internal bool IsServer { get { return _serverMode; } } internal bool RemoteCertRequired { get { return _remoteCertRequired; } } internal void SetRefreshCredentialNeeded() { _refreshCredentialNeeded = true; } internal void Close() { if (_securityContext != null) { _securityContext.Dispose(); } if (_credentialsHandle != null) { _credentialsHandle.Dispose(); } } // // SECURITY: we open a private key container on behalf of the caller // and we require the caller to have permission associated with that operation. // private X509Certificate2 EnsurePrivateKey(X509Certificate certificate) { if (certificate == null) { return null; } if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.Format(SR.net_log_locating_private_key_for_certificate, certificate.ToString(true))); } try { string certHash = null; // Protecting from X509Certificate2 derived classes. X509Certificate2 certEx = MakeEx(certificate); certHash = certEx.Thumbprint; if (certEx != null) { if (certEx.HasPrivateKey) { if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.net_log_cert_is_of_type_2); } return certEx; } if ((object)certificate != (object)certEx) { certEx.Dispose(); } } X509Certificate2Collection collectionEx; // ELSE Try the MY user and machine stores for private key check. // For server side mode MY machine store takes priority. X509Store store = CertificateValidationPal.EnsureStoreOpened(_serverMode); if (store != null) { collectionEx = store.Certificates.Find(X509FindType.FindByThumbprint, certHash, false); if (collectionEx.Count > 0 && collectionEx[0].HasPrivateKey) { if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.Format(SR.net_log_found_cert_in_store, (_serverMode ? "LocalMachine" : "CurrentUser"))); } return collectionEx[0]; } } store = CertificateValidationPal.EnsureStoreOpened(!_serverMode); if (store != null) { collectionEx = store.Certificates.Find(X509FindType.FindByThumbprint, certHash, false); if (collectionEx.Count > 0 && collectionEx[0].HasPrivateKey) { if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.Format(SR.net_log_found_cert_in_store, (_serverMode ? "CurrentUser" : "LocalMachine"))); } return collectionEx[0]; } } } catch (CryptographicException) { } if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.net_log_did_not_find_cert_in_store); } return null; } private static X509Certificate2 MakeEx(X509Certificate certificate) { Debug.Assert(certificate != null, "certificate != null"); if (certificate.GetType() == typeof(X509Certificate2)) { return (X509Certificate2)certificate; } X509Certificate2 certificateEx = null; try { if (certificate.Handle != IntPtr.Zero) { certificateEx = new X509Certificate2(certificate.Handle); } } catch (SecurityException) { } catch (CryptographicException) { } return certificateEx; } // // Get certificate_authorities list, according to RFC 5246, Section 7.4.4. // Used only by client SSL code, never returns null. // private string[] GetRequestCertificateAuthorities() { string[] issuers = Array.Empty<string>(); if (IsValidContext) { issuers = CertificateValidationPal.GetRequestCertificateAuthorities(_securityContext); } return issuers; } /*++ AcquireCredentials - Attempts to find Client Credential Information, that can be sent to the server. In our case, this is only Client Certificates, that we have Credential Info. How it works: case 0: Cert Selection delegate is present Always use its result as the client cert answer. Try to use cached credential handle whenever feasible. Do not use cached anonymous creds if the delegate has returned null and the collection is not empty (allow responding with the cert later). case 1: Certs collection is empty Always use the same statically acquired anonymous SSL Credential case 2: Before our Connection with the Server If we have a cached credential handle keyed by first X509Certificate **content** in the passed collection, then we use that cached credential and hoping to restart a session. Otherwise create a new anonymous (allow responding with the cert later). case 3: After our Connection with the Server (i.e. during handshake or re-handshake) The server has requested that we send it a Certificate then we Enumerate a list of server sent Issuers trying to match against our list of Certificates, the first match is sent to the server. Once we got a cert we again try to match cached credential handle if possible. This will not restart a session but helps minimizing the number of handles we create. In the case of an error getting a Certificate or checking its private Key we fall back to the behavior of having no certs, case 1. Returns: True if cached creds were used, false otherwise. --*/ private bool AcquireClientCredentials(ref byte[] thumbPrint) { GlobalLog.Enter("SecureChannel#" + Logging.HashString(this) + "::AcquireClientCredentials"); // Acquire possible Client Certificate information and set it on the handle. X509Certificate clientCertificate = null; // This is a candidate that can come from the user callback or be guessed when targeting a session restart. ArrayList filteredCerts = new ArrayList(); // This is an intermediate client certs collection that try to use if no selectedCert is available yet. string[] issuers = null; // This is a list of issuers sent by the server, only valid is we do know what the server cert is. bool sessionRestartAttempt = false; // If true and no cached creds we will use anonymous creds. if (_certSelectionDelegate != null) { issuers = GetRequestCertificateAuthorities(); GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::AcquireClientCredentials() calling CertificateSelectionCallback"); X509Certificate2 remoteCert = null; try { X509Certificate2Collection dummyCollection; remoteCert = CertificateValidationPal.GetRemoteCertificate(_securityContext, out dummyCollection); clientCertificate = _certSelectionDelegate(_hostName, ClientCertificates, remoteCert, issuers); } finally { if (remoteCert != null) { remoteCert.Dispose(); } } if (clientCertificate != null) { if (_credentialsHandle == null) { sessionRestartAttempt = true; } filteredCerts.Add(clientCertificate); if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.net_log_got_certificate_from_delegate); } } else { if (ClientCertificates.Count == 0) { if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.net_log_no_delegate_and_have_no_client_cert); } sessionRestartAttempt = true; } else { if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.net_log_no_delegate_but_have_client_cert); } } } } else if (_credentialsHandle == null && _clientCertificates != null && _clientCertificates.Count > 0) { // This is where we attempt to restart a session by picking the FIRST cert from the collection. // Otherwise it is either server sending a client cert request or the session is renegotiated. clientCertificate = ClientCertificates[0]; sessionRestartAttempt = true; if (clientCertificate != null) { filteredCerts.Add(clientCertificate); } if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.Format(SR.net_log_attempting_restart_using_cert, (clientCertificate == null ? "null" : clientCertificate.ToString(true)))); } } else if (_clientCertificates != null && _clientCertificates.Count > 0) { // // This should be a server request for the client cert sent over currently anonymous sessions. // issuers = GetRequestCertificateAuthorities(); if (Logging.On) { if (issuers == null || issuers.Length == 0) { Logging.PrintInfo(Logging.Web, this, SR.net_log_no_issuers_try_all_certs); } else { Logging.PrintInfo(Logging.Web, this, SR.Format(SR.net_log_server_issuers_look_for_matching_certs, issuers.Length)); } } for (int i = 0; i < _clientCertificates.Count; ++i) { // // Make sure we add only if the cert matches one of the issuers. // If no issuers were sent and then try all client certs starting with the first one. // if (issuers != null && issuers.Length != 0) { X509Certificate2 certificateEx = null; X509Chain chain = null; try { certificateEx = MakeEx(_clientCertificates[i]); if (certificateEx == null) { continue; } GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::AcquireClientCredentials() root cert:" + certificateEx.Issuer); chain = new X509Chain(); chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; chain.ChainPolicy.VerificationFlags = X509VerificationFlags.IgnoreInvalidName; chain.Build(certificateEx); bool found = false; // // We ignore any errors happened with chain. // if (chain.ChainElements.Count > 0) { for (int ii = 0; ii < chain.ChainElements.Count; ++ii) { string issuer = chain.ChainElements[ii].Certificate.Issuer; found = Array.IndexOf(issuers, issuer) != -1; if (found) { GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::AcquireClientCredentials() matched:" + issuer); break; } GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::AcquireClientCredentials() no match:" + issuer); } } if (!found) { continue; } } finally { if (chain != null) { chain.Dispose(); } if (certificateEx != null && (object)certificateEx != (object)_clientCertificates[i]) { certificateEx.Dispose(); } } } if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.Format(SR.net_log_selected_cert, _clientCertificates[i].ToString(true))); } filteredCerts.Add(_clientCertificates[i]); } } bool cachedCred = false; // This is a return result from this method. X509Certificate2 selectedCert = null; // This is a final selected cert (ensured that it does have private key with it). clientCertificate = null; if (Logging.On) { Logging.PrintInfo(Logging.Web, this, SR.Format(SR.net_log_n_certs_after_filtering, filteredCerts.Count)); if (filteredCerts.Count != 0) { Logging.PrintInfo(Logging.Web, this, SR.net_log_finding_matching_certs); } } // // ATTN: When the client cert was returned by the user callback OR it was guessed AND it has no private key, // THEN anonymous (no client cert) credential will be used. // // SECURITY: Accessing X509 cert Credential is disabled for semitrust. // We no longer need to demand for unmanaged code permissions. // EnsurePrivateKey should do the right demand for us. for (int i = 0; i < filteredCerts.Count; ++i) { clientCertificate = filteredCerts[i] as X509Certificate; if ((selectedCert = EnsurePrivateKey(clientCertificate)) != null) { break; } clientCertificate = null; selectedCert = null; } GlobalLog.Assert(((object)clientCertificate == (object)selectedCert) || clientCertificate.Equals(selectedCert), "AcquireClientCredentials()|'selectedCert' does not match 'clientCertificate'."); GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::AcquireClientCredentials() Selected Cert = " + (selectedCert == null ? "null" : selectedCert.Subject)); try { // Try to locate cached creds first. // // SECURITY: selectedCert ref if not null is a safe object that does not depend on possible **user** inherited X509Certificate type. // byte[] guessedThumbPrint = selectedCert == null ? null : selectedCert.GetCertHash(); SafeFreeCredentials cachedCredentialHandle = SslSessionsCache.TryCachedCredential(guessedThumbPrint, _sslProtocols, _serverMode, _encryptionPolicy); // We can probably do some optimization here. If the selectedCert is returned by the delegate // we can always go ahead and use the certificate to create our credential // (instead of going anonymous as we do here). if (sessionRestartAttempt && cachedCredentialHandle == null && selectedCert != null) { GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::AcquireClientCredentials() Reset to anonymous session."); // IIS does not renegotiate a restarted session if client cert is needed. // So we don't want to reuse **anonymous** cached credential for a new SSL connection if the client has passed some certificate. // The following block happens if client did specify a certificate but no cached creds were found in the cache. // Since we don't restart a session the server side can still challenge for a client cert. if ((object)clientCertificate != (object)selectedCert) { selectedCert.Dispose(); } guessedThumbPrint = null; selectedCert = null; clientCertificate = null; } if (cachedCredentialHandle != null) { if (Logging.On) { Logging.PrintInfo(Logging.Web, SR.net_log_using_cached_credential); } _credentialsHandle = cachedCredentialHandle; _selectedClientCertificate = clientCertificate; cachedCred = true; } else { _credentialsHandle = SslStreamPal.AcquireCredentialsHandle(selectedCert, _sslProtocols, _encryptionPolicy, _serverMode); thumbPrint = guessedThumbPrint; // Delay until here in case something above threw. _selectedClientCertificate = clientCertificate; } } finally { // An extra cert could have been created, dispose it now. if (selectedCert != null && (object)clientCertificate != (object)selectedCert) { selectedCert.Dispose(); } } GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::AcquireClientCredentials, cachedCreds = " + cachedCred.ToString(), Logging.ObjectToString(_credentialsHandle)); return cachedCred; } // // Acquire Server Side Certificate information and set it on the class. // private bool AcquireServerCredentials(ref byte[] thumbPrint) { GlobalLog.Enter("SecureChannel#" + Logging.HashString(this) + "::AcquireServerCredentials"); X509Certificate localCertificate = null; bool cachedCred = false; if (_certSelectionDelegate != null) { X509CertificateCollection tempCollection = new X509CertificateCollection(); tempCollection.Add(_serverCertificate); localCertificate = _certSelectionDelegate(string.Empty, tempCollection, null, Array.Empty<string>()); GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::AcquireServerCredentials() Use delegate selected Cert"); } else { localCertificate = _serverCertificate; } if (localCertificate == null) { throw new NotSupportedException(SR.net_ssl_io_no_server_cert); } // SECURITY: Accessing X509 cert Credential is disabled for semitrust. // We no longer need to demand for unmanaged code permissions. // EnsurePrivateKey should do the right demand for us. X509Certificate2 selectedCert = EnsurePrivateKey(localCertificate); if (selectedCert == null) { throw new NotSupportedException(SR.net_ssl_io_no_server_cert); } GlobalLog.Assert(localCertificate.Equals(selectedCert), "AcquireServerCredentials()|'selectedCert' does not match 'localCertificate'."); // // Note selectedCert is a safe ref possibly cloned from the user passed Cert object // byte[] guessedThumbPrint = selectedCert.GetCertHash(); try { SafeFreeCredentials cachedCredentialHandle = SslSessionsCache.TryCachedCredential(guessedThumbPrint, _sslProtocols, _serverMode, _encryptionPolicy); if (cachedCredentialHandle != null) { _credentialsHandle = cachedCredentialHandle; _serverCertificate = localCertificate; cachedCred = true; } else { _credentialsHandle = SslStreamPal.AcquireCredentialsHandle(selectedCert, _sslProtocols, _encryptionPolicy, _serverMode); thumbPrint = guessedThumbPrint; _serverCertificate = localCertificate; } } finally { // An extra cert could have been created, dispose it now. if ((object)localCertificate != (object)selectedCert) { selectedCert.Dispose(); } } GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::AcquireServerCredentials, cachedCreds = " + cachedCred.ToString(), Logging.ObjectToString(_credentialsHandle)); return cachedCred; } // internal ProtocolToken NextMessage(byte[] incoming, int offset, int count) { GlobalLog.Enter("SecureChannel#" + Logging.HashString(this) + "::NextMessage"); byte[] nextmsg = null; SecurityStatusPal errorCode = GenerateToken(incoming, offset, count, ref nextmsg); if (!_serverMode && errorCode == SecurityStatusPal.CredentialsNeeded) { GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::NextMessage() returned SecurityStatusPal.CredentialsNeeded"); SetRefreshCredentialNeeded(); errorCode = GenerateToken(incoming, offset, count, ref nextmsg); } ProtocolToken token = new ProtocolToken(nextmsg, errorCode); GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::NextMessage", token.ToString()); return token; } /*++ GenerateToken - Called after each successive state in the Client - Server handshake. This function generates a set of bytes that will be sent next to the server. The server responds, each response, is pass then into this function, again, and the cycle repeats until successful connection, or failure. Input: input - bytes from the wire output - ref to byte [], what we will send to the server in response Return: errorCode - an SSPI error code --*/ private SecurityStatusPal GenerateToken(byte[] input, int offset, int count, ref byte[] output) { #if TRACE_VERBOSE GlobalLog.Enter("SecureChannel#" + Logging.HashString(this) + "::GenerateToken, _refreshCredentialNeeded = " + _refreshCredentialNeeded); #endif if (offset < 0 || offset > (input == null ? 0 : input.Length)) { GlobalLog.Assert(false, "SecureChannel#" + Logging.HashString(this) + "::GenerateToken", "Argument 'offset' out of range."); throw new ArgumentOutOfRangeException("offset"); } if (count < 0 || count > (input == null ? 0 : input.Length - offset)) { GlobalLog.Assert(false, "SecureChannel#" + Logging.HashString(this) + "::GenerateToken", "Argument 'count' out of range."); throw new ArgumentOutOfRangeException("count"); } SecurityBuffer incomingSecurity = null; SecurityBuffer[] incomingSecurityBuffers = null; if (input != null) { incomingSecurity = new SecurityBuffer(input, offset, count, SecurityBufferType.Token); incomingSecurityBuffers = new SecurityBuffer[] { incomingSecurity, new SecurityBuffer(null, 0, 0, SecurityBufferType.Empty) }; } SecurityBuffer outgoingSecurity = new SecurityBuffer(null, SecurityBufferType.Token); SecurityStatusPal errorCode = 0; bool cachedCreds = false; byte[] thumbPrint = null; // // Looping through ASC or ISC with potentially cached credential that could have been // already disposed from a different thread before ISC or ASC dir increment a cred ref count. // try { do { thumbPrint = null; if (_refreshCredentialNeeded) { cachedCreds = _serverMode ? AcquireServerCredentials(ref thumbPrint) : AcquireClientCredentials(ref thumbPrint); } if (_serverMode) { errorCode = SslStreamPal.AcceptSecurityContext( ref _credentialsHandle, ref _securityContext, incomingSecurity, outgoingSecurity, _remoteCertRequired ); } else { if (incomingSecurity == null) { errorCode = SslStreamPal.InitializeSecurityContext( ref _credentialsHandle, ref _securityContext, _destination, incomingSecurity, outgoingSecurity ); } else { errorCode = SslStreamPal.InitializeSecurityContext( _credentialsHandle, ref _securityContext, _destination, incomingSecurityBuffers, outgoingSecurity ); } } } while (cachedCreds && _credentialsHandle == null); } finally { if (_refreshCredentialNeeded) { _refreshCredentialNeeded = false; // // Assuming the ISC or ASC has referenced the credential, // we want to call dispose so to decrement the effective ref count. // if (_credentialsHandle != null) { _credentialsHandle.Dispose(); } // // This call may bump up the credential reference count further. // Note that thumbPrint is retrieved from a safe cert object that was possible cloned from the user passed cert. // if (!cachedCreds && _securityContext != null && !_securityContext.IsInvalid && _credentialsHandle != null && !_credentialsHandle.IsInvalid) { SslSessionsCache.CacheCredential(_credentialsHandle, thumbPrint, _sslProtocols, _serverMode, _encryptionPolicy); } } } output = outgoingSecurity.token; #if TRACE_VERBOSE GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::GenerateToken()", Interop.MapSecurityStatus((uint)errorCode)); #endif return (SecurityStatusPal)errorCode; } /*++ ProcessHandshakeSuccess - Called on successful completion of Handshake - used to set header/trailer sizes for encryption use Fills in the information about established protocol --*/ internal void ProcessHandshakeSuccess() { GlobalLog.Enter("SecureChannel#" + Logging.HashString(this) + "::ProcessHandshakeSuccess"); StreamSizes streamSizes; SslStreamPal.QueryContextStreamSizes(_securityContext, out streamSizes); if (streamSizes != null) { try { _headerSize = streamSizes.header; _trailerSize = streamSizes.trailer; _maxDataSize = checked(streamSizes.maximumMessage - (_headerSize + _trailerSize)); } catch (Exception e) { if (!ExceptionCheck.IsFatal(e)) { GlobalLog.Assert(false, "SecureChannel#" + Logging.HashString(this) + "::ProcessHandshakeSuccess", "StreamSizes out of range."); } throw; } } SslStreamPal.QueryContextConnectionInfo(_securityContext, out _connectionInfo); GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::ProcessHandshakeSuccess"); } /*++ Encrypt - Encrypts our bytes before we send them over the wire PERF: make more efficient, this does an extra copy when the offset is non-zero. Input: buffer - bytes for sending offset - size - output - Encrypted bytes --*/ internal SecurityStatusPal Encrypt(byte[] buffer, int offset, int size, ref byte[] output, out int resultSize) { GlobalLog.Enter("SecureChannel#" + Logging.HashString(this) + "::Encrypt"); GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::Encrypt() - offset: " + offset.ToString() + " size: " + size.ToString() + " buffersize: " + buffer.Length.ToString()); GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::Encrypt() buffer:"); GlobalLog.Dump(buffer, Math.Min(buffer.Length, 128)); byte[] writeBuffer; try { if (offset < 0 || offset > (buffer == null ? 0 : buffer.Length)) { throw new ArgumentOutOfRangeException("offset"); } if (size < 0 || size > (buffer == null ? 0 : buffer.Length - offset)) { throw new ArgumentOutOfRangeException("size"); } resultSize = 0; int bufferSizeNeeded = checked(size + _headerSize + _trailerSize); if (output != null && bufferSizeNeeded <= output.Length) { writeBuffer = output; } else { writeBuffer = new byte[bufferSizeNeeded]; } Buffer.BlockCopy(buffer, offset, writeBuffer, _headerSize, size); } catch (Exception e) { if (!ExceptionCheck.IsFatal(e)) { GlobalLog.Assert(false, "SecureChannel#" + Logging.HashString(this) + "::Encrypt", "Arguments out of range."); } throw; } SecurityStatusPal secStatus = SslStreamPal.EncryptMessage(_securityContext, writeBuffer, size, _headerSize, _trailerSize, out resultSize); if (secStatus != SecurityStatusPal.OK) { GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::Encrypt ERROR", secStatus.ToString("x")); } else { output = writeBuffer; GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::Encrypt OK", "data size:" + resultSize.ToString()); } return secStatus; } internal SecurityStatusPal Decrypt(byte[] payload, ref int offset, ref int count) { GlobalLog.Print("SecureChannel#" + Logging.HashString(this) + "::Decrypt() - offset: " + offset.ToString() + " size: " + count.ToString() + " buffersize: " + payload.Length.ToString()); if (offset < 0 || offset > (payload == null ? 0 : payload.Length)) { GlobalLog.Assert(false, "SecureChannel#" + Logging.HashString(this) + "::Encrypt", "Argument 'offset' out of range."); throw new ArgumentOutOfRangeException("offset"); } if (count < 0 || count > (payload == null ? 0 : payload.Length - offset)) { GlobalLog.Assert(false, "SecureChannel#" + Logging.HashString(this) + "::Encrypt", "Argument 'count' out of range."); throw new ArgumentOutOfRangeException("count"); } SecurityStatusPal secStatus = SslStreamPal.DecryptMessage(_securityContext, payload, ref offset, ref count); return secStatus; } /*++ VerifyRemoteCertificate - Validates the content of a Remote Certificate checkCRL if true, checks the certificate revocation list for validity. checkCertName, if true checks the CN field of the certificate --*/ //This method validates a remote certificate. //SECURITY: The scenario is allowed in semitrust StorePermission is asserted for Chain.Build // A user callback has unique signature so it is safe to call it under permission assert. // internal bool VerifyRemoteCertificate(RemoteCertValidationCallback remoteCertValidationCallback) { GlobalLog.Enter("SecureChannel#" + Logging.HashString(this) + "::VerifyRemoteCertificate"); SslPolicyErrors sslPolicyErrors = SslPolicyErrors.None; // We don't catch exceptions in this method, so it's safe for "accepted" be initialized with true. bool success = false; X509Chain chain = null; X509Certificate2 remoteCertificateEx = null; try { X509Certificate2Collection remoteCertificateStore; remoteCertificateEx = CertificateValidationPal.GetRemoteCertificate(_securityContext, out remoteCertificateStore); _isRemoteCertificateAvailable = remoteCertificateEx != null; if (remoteCertificateEx == null) { GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::VerifyRemoteCertificate (no remote cert)", (!_remoteCertRequired).ToString()); sslPolicyErrors |= SslPolicyErrors.RemoteCertificateNotAvailable; } else { chain = new X509Chain(); chain.ChainPolicy.RevocationMode = _checkCertRevocation ? X509RevocationMode.Online : X509RevocationMode.NoCheck; chain.ChainPolicy.RevocationFlag = X509RevocationFlag.ExcludeRoot; if (remoteCertificateStore != null) { chain.ChainPolicy.ExtraStore.AddRange(remoteCertificateStore); } // Don't call chain.Build here in the common code, because the Windows version // is potentially going to check for GetLastWin32Error, and that call needs to be // guaranteed to be right after the call to chain.Build. sslPolicyErrors |= CertificateValidationPal.VerifyCertificateProperties( chain, remoteCertificateEx, _checkCertName, _serverMode, _hostName); } if (remoteCertValidationCallback != null) { success = remoteCertValidationCallback(_hostName, remoteCertificateEx, chain, sslPolicyErrors); } else { if (sslPolicyErrors == SslPolicyErrors.RemoteCertificateNotAvailable && !_remoteCertRequired) { success = true; } else { success = (sslPolicyErrors == SslPolicyErrors.None); } } if (Logging.On) { if (sslPolicyErrors != SslPolicyErrors.None) { Logging.PrintInfo(Logging.Web, this, SR.net_log_remote_cert_has_errors); if ((sslPolicyErrors & SslPolicyErrors.RemoteCertificateNotAvailable) != 0) { Logging.PrintInfo(Logging.Web, this, "\t" + SR.net_log_remote_cert_not_available); } if ((sslPolicyErrors & SslPolicyErrors.RemoteCertificateNameMismatch) != 0) { Logging.PrintInfo(Logging.Web, this, "\t" + SR.net_log_remote_cert_name_mismatch); } if ((sslPolicyErrors & SslPolicyErrors.RemoteCertificateChainErrors) != 0) { foreach (X509ChainStatus chainStatus in chain.ChainStatus) { Logging.PrintInfo(Logging.Web, this, "\t" + chainStatus.StatusInformation); } } } if (success) { if (remoteCertValidationCallback != null) { Logging.PrintInfo(Logging.Web, this, SR.net_log_remote_cert_user_declared_valid); } else { Logging.PrintInfo(Logging.Web, this, SR.net_log_remote_cert_has_no_errors); } } else { if (remoteCertValidationCallback != null) { Logging.PrintInfo(Logging.Web, this, SR.net_log_remote_cert_user_declared_invalid); } } } GlobalLog.Print("Cert Validation, remote cert = " + (remoteCertificateEx == null ? "<null>" : remoteCertificateEx.ToString(true))); } finally { // At least on Win2k server the chain is found to have dependencies on the original cert context. // So it should be closed first. if (chain != null) { chain.Dispose(); } if (remoteCertificateEx != null) { remoteCertificateEx.Dispose(); } } GlobalLog.Leave("SecureChannel#" + Logging.HashString(this) + "::VerifyRemoteCertificate", success.ToString()); return success; } } // // ProtocolToken - used to process and handle the return codes // from the SSPI wrapper // internal class ProtocolToken { internal SecurityStatusPal Status; internal byte[] Payload; internal int Size; internal bool Failed { get { return ((Status != SecurityStatusPal.OK) && (Status != SecurityStatusPal.ContinueNeeded)); } } internal bool Done { get { return (Status == SecurityStatusPal.OK); } } internal bool Renegotiate { get { return (Status == SecurityStatusPal.Renegotiate); } } internal bool CloseConnection { get { return (Status == SecurityStatusPal.ContextExpired); } } internal ProtocolToken(byte[] data, SecurityStatusPal errorCode) { Status = errorCode; Payload = data; Size = data != null ? data.Length : 0; } internal Exception GetException() { // If it's not done, then there's got to be an error, even if it's // a Handshake message up, and we only have a Warning message. return this.Done ? null : SslStreamPal.GetException(Status); } #if TRACE_VERBOSE public override string ToString() { return "Status=" + Status.ToString() + ", data size=" + Size; } #endif } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void CompareScalarUnorderedEqualBoolean() { var test = new BooleanBinaryOpTest__CompareScalarUnorderedEqualBoolean(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); if (Sse2.IsSupported) { // Validates passing a static member works, using pinning and Load test.RunClsVarScenario_Load(); } // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); if (Sse2.IsSupported) { // Validates passing the field of a local class works, using pinning and Load test.RunClassLclFldScenario_Load(); } // Validates passing an instance member of a class works test.RunClassFldScenario(); if (Sse2.IsSupported) { // Validates passing an instance member of a class works, using pinning and Load test.RunClassFldScenario_Load(); } // Validates passing the field of a local struct works test.RunStructLclFldScenario(); if (Sse2.IsSupported) { // Validates passing the field of a local struct works, using pinning and Load test.RunStructLclFldScenario_Load(); } // Validates passing an instance member of a struct works test.RunStructFldScenario(); if (Sse2.IsSupported) { // Validates passing an instance member of a struct works, using pinning and Load test.RunStructFldScenario_Load(); } } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class BooleanBinaryOpTest__CompareScalarUnorderedEqualBoolean { private struct DataTable { private byte[] inArray1; private byte[] inArray2; private GCHandle inHandle1; private GCHandle inHandle2; private ulong alignment; public DataTable(Double[] inArray1, Double[] inArray2, int alignment) { int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Double>(); int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Double>(); if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2) { throw new ArgumentException("Invalid value of alignment"); } this.inArray1 = new byte[alignment * 2]; this.inArray2 = new byte[alignment * 2]; this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned); this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned); this.alignment = (ulong)alignment; Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Double, byte>(ref inArray1[0]), (uint)sizeOfinArray1); Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Double, byte>(ref inArray2[0]), (uint)sizeOfinArray2); } public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment); public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment); public void Dispose() { inHandle1.Free(); inHandle2.Free(); } private static unsafe void* Align(byte* buffer, ulong expectedAlignment) { return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1)); } } private struct TestStruct { public Vector128<Double> _fld1; public Vector128<Double> _fld2; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref testStruct._fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref testStruct._fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); return testStruct; } public void RunStructFldScenario(BooleanBinaryOpTest__CompareScalarUnorderedEqualBoolean testClass) { var result = Sse2.CompareScalarUnorderedEqual(_fld1, _fld2); testClass.ValidateResult(_fld1, _fld2, result); } public void RunStructFldScenario_Load(BooleanBinaryOpTest__CompareScalarUnorderedEqualBoolean testClass) { fixed (Vector128<Double>* pFld1 = &_fld1) fixed (Vector128<Double>* pFld2 = &_fld2) { var result = Sse2.CompareScalarUnorderedEqual( Sse2.LoadVector128((Double*)(pFld1)), Sse2.LoadVector128((Double*)(pFld2)) ); testClass.ValidateResult(_fld1, _fld2, result); } } } private static readonly int LargestVectorSize = 16; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double); private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double); private static Double[] _data1 = new Double[Op1ElementCount]; private static Double[] _data2 = new Double[Op2ElementCount]; private static Vector128<Double> _clsVar1; private static Vector128<Double> _clsVar2; private Vector128<Double> _fld1; private Vector128<Double> _fld2; private DataTable _dataTable; static BooleanBinaryOpTest__CompareScalarUnorderedEqualBoolean() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); } public BooleanBinaryOpTest__CompareScalarUnorderedEqualBoolean() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); } _dataTable = new DataTable(_data1, _data2, LargestVectorSize); } public bool IsSupported => Sse2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Sse2.CompareScalarUnorderedEqual( Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr) ); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Sse2.CompareScalarUnorderedEqual( Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr)) ); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Sse2.CompareScalarUnorderedEqual( Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr)) ); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareScalarUnorderedEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) }) .Invoke(null, new object[] { Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr) }); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result)); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareScalarUnorderedEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) }) .Invoke(null, new object[] { Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr)) }); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result)); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareScalarUnorderedEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) }) .Invoke(null, new object[] { Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr)) }); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result)); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Sse2.CompareScalarUnorderedEqual( _clsVar1, _clsVar2 ); ValidateResult(_clsVar1, _clsVar2, result); } public void RunClsVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load)); fixed (Vector128<Double>* pClsVar1 = &_clsVar1) fixed (Vector128<Double>* pClsVar2 = &_clsVar2) { var result = Sse2.CompareScalarUnorderedEqual( Sse2.LoadVector128((Double*)(pClsVar1)), Sse2.LoadVector128((Double*)(pClsVar2)) ); ValidateResult(_clsVar1, _clsVar2, result); } } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var op1 = Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr); var op2 = Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr); var result = Sse2.CompareScalarUnorderedEqual(op1, op2); ValidateResult(op1, op2, result); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var op1 = Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)); var op2 = Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr)); var result = Sse2.CompareScalarUnorderedEqual(op1, op2); ValidateResult(op1, op2, result); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var op1 = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)); var op2 = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr)); var result = Sse2.CompareScalarUnorderedEqual(op1, op2); ValidateResult(op1, op2, result); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new BooleanBinaryOpTest__CompareScalarUnorderedEqualBoolean(); var result = Sse2.CompareScalarUnorderedEqual(test._fld1, test._fld2); ValidateResult(test._fld1, test._fld2, result); } public void RunClassLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load)); var test = new BooleanBinaryOpTest__CompareScalarUnorderedEqualBoolean(); fixed (Vector128<Double>* pFld1 = &test._fld1) fixed (Vector128<Double>* pFld2 = &test._fld2) { var result = Sse2.CompareScalarUnorderedEqual( Sse2.LoadVector128((Double*)(pFld1)), Sse2.LoadVector128((Double*)(pFld2)) ); ValidateResult(test._fld1, test._fld2, result); } } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Sse2.CompareScalarUnorderedEqual(_fld1, _fld2); ValidateResult(_fld1, _fld2, result); } public void RunClassFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load)); fixed (Vector128<Double>* pFld1 = &_fld1) fixed (Vector128<Double>* pFld2 = &_fld2) { var result = Sse2.CompareScalarUnorderedEqual( Sse2.LoadVector128((Double*)(pFld1)), Sse2.LoadVector128((Double*)(pFld2)) ); ValidateResult(_fld1, _fld2, result); } } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Sse2.CompareScalarUnorderedEqual(test._fld1, test._fld2); ValidateResult(test._fld1, test._fld2, result); } public void RunStructLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load)); var test = TestStruct.Create(); var result = Sse2.CompareScalarUnorderedEqual( Sse2.LoadVector128((Double*)(&test._fld1)), Sse2.LoadVector128((Double*)(&test._fld2)) ); ValidateResult(test._fld1, test._fld2, result); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunStructFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load)); var test = TestStruct.Create(); test.RunStructFldScenario_Load(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector128<Double> op1, Vector128<Double> op2, bool result, [CallerMemberName] string method = "") { Double[] inArray1 = new Double[Op1ElementCount]; Double[] inArray2 = new Double[Op2ElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), op1); Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), op2); ValidateResult(inArray1, inArray2, result, method); } private void ValidateResult(void* op1, void* op2, bool result, [CallerMemberName] string method = "") { Double[] inArray1 = new Double[Op1ElementCount]; Double[] inArray2 = new Double[Op2ElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Double>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Double>>()); ValidateResult(inArray1, inArray2, result, method); } private void ValidateResult(Double[] left, Double[] right, bool result, [CallerMemberName] string method = "") { bool succeeded = true; if ((left[0] == right[0]) != result) { succeeded = false; } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Sse2)}.{nameof(Sse2.CompareScalarUnorderedEqual)}<Boolean>(Vector128<Double>, Vector128<Double>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})"); TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})"); TestLibrary.TestFramework.LogInformation($" result: ({result})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore; using proto = Google.Protobuf; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Cloud.Profiler.V2 { /// <summary>Settings for <see cref="ProfilerServiceClient"/> instances.</summary> public sealed partial class ProfilerServiceSettings : gaxgrpc::ServiceSettingsBase { /// <summary>Get a new instance of the default <see cref="ProfilerServiceSettings"/>.</summary> /// <returns>A new instance of the default <see cref="ProfilerServiceSettings"/>.</returns> public static ProfilerServiceSettings GetDefault() => new ProfilerServiceSettings(); /// <summary>Constructs a new <see cref="ProfilerServiceSettings"/> object with default settings.</summary> public ProfilerServiceSettings() { } private ProfilerServiceSettings(ProfilerServiceSettings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); CreateProfileSettings = existing.CreateProfileSettings; CreateOfflineProfileSettings = existing.CreateOfflineProfileSettings; UpdateProfileSettings = existing.UpdateProfileSettings; OnCopy(existing); } partial void OnCopy(ProfilerServiceSettings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>ProfilerServiceClient.CreateProfile</c> and <c>ProfilerServiceClient.CreateProfileAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>Timeout: 3600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings CreateProfileSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>ProfilerServiceClient.CreateOfflineProfile</c> and <c>ProfilerServiceClient.CreateOfflineProfileAsync</c> /// . /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>Timeout: 30 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings CreateOfflineProfileSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(30000))); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>ProfilerServiceClient.UpdateProfile</c> and <c>ProfilerServiceClient.UpdateProfileAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>Timeout: 30 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings UpdateProfileSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(30000))); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="ProfilerServiceSettings"/> object.</returns> public ProfilerServiceSettings Clone() => new ProfilerServiceSettings(this); } /// <summary> /// Builder class for <see cref="ProfilerServiceClient"/> to provide simple configuration of credentials, endpoint /// etc. /// </summary> public sealed partial class ProfilerServiceClientBuilder : gaxgrpc::ClientBuilderBase<ProfilerServiceClient> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public ProfilerServiceSettings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public ProfilerServiceClientBuilder() { UseJwtAccessWithScopes = ProfilerServiceClient.UseJwtAccessWithScopes; } partial void InterceptBuild(ref ProfilerServiceClient client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<ProfilerServiceClient> task); /// <summary>Builds the resulting client.</summary> public override ProfilerServiceClient Build() { ProfilerServiceClient client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<ProfilerServiceClient> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<ProfilerServiceClient> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private ProfilerServiceClient BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return ProfilerServiceClient.Create(callInvoker, Settings); } private async stt::Task<ProfilerServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return ProfilerServiceClient.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => ProfilerServiceClient.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => ProfilerServiceClient.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => ProfilerServiceClient.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance; } /// <summary>ProfilerService client wrapper, for convenient use.</summary> /// <remarks> /// Manage the collection of continuous profiling data provided by profiling /// agents running in the cloud or by an offline provider of profiling data. /// /// General guidelines: /// * Profiles for a single deployment must be created in ascending time order. /// * Profiles can be created in either online or offline mode, see below. /// </remarks> public abstract partial class ProfilerServiceClient { /// <summary> /// The default endpoint for the ProfilerService service, which is a host of "cloudprofiler.googleapis.com" and /// a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "cloudprofiler.googleapis.com:443"; /// <summary>The default ProfilerService scopes.</summary> /// <remarks> /// The default ProfilerService scopes are: /// <list type="bullet"> /// <item><description>https://www.googleapis.com/auth/cloud-platform</description></item> /// <item><description>https://www.googleapis.com/auth/monitoring</description></item> /// <item><description>https://www.googleapis.com/auth/monitoring.write</description></item> /// </list> /// </remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/monitoring", "https://www.googleapis.com/auth/monitoring.write", }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="ProfilerServiceClient"/> using the default credentials, endpoint and /// settings. To specify custom credentials or other settings, use <see cref="ProfilerServiceClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns>The task representing the created <see cref="ProfilerServiceClient"/>.</returns> public static stt::Task<ProfilerServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) => new ProfilerServiceClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="ProfilerServiceClient"/> using the default credentials, endpoint and /// settings. To specify custom credentials or other settings, use <see cref="ProfilerServiceClientBuilder"/>. /// </summary> /// <returns>The created <see cref="ProfilerServiceClient"/>.</returns> public static ProfilerServiceClient Create() => new ProfilerServiceClientBuilder().Build(); /// <summary> /// Creates a <see cref="ProfilerServiceClient"/> which uses the specified call invoker for remote operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="ProfilerServiceSettings"/>.</param> /// <returns>The created <see cref="ProfilerServiceClient"/>.</returns> internal static ProfilerServiceClient Create(grpccore::CallInvoker callInvoker, ProfilerServiceSettings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } ProfilerService.ProfilerServiceClient grpcClient = new ProfilerService.ProfilerServiceClient(callInvoker); return new ProfilerServiceClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC ProfilerService client</summary> public virtual ProfilerService.ProfilerServiceClient GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// CreateProfile creates a new profile resource in the online mode. /// /// The server ensures that the new profiles are created at a constant rate per /// deployment, so the creation request may hang for some time until the next /// profile session is available. /// /// The request may fail with ABORTED error if the creation is not available /// within ~1m, the response will indicate the duration of the backoff the /// client should take before attempting creating a profile again. The backoff /// duration is returned in google.rpc.RetryInfo extension on the response /// status. To a gRPC client, the extension will be return as a /// binary-serialized proto in the trailing metadata item named /// "google.rpc.retryinfo-bin". /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual Profile CreateProfile(CreateProfileRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// CreateProfile creates a new profile resource in the online mode. /// /// The server ensures that the new profiles are created at a constant rate per /// deployment, so the creation request may hang for some time until the next /// profile session is available. /// /// The request may fail with ABORTED error if the creation is not available /// within ~1m, the response will indicate the duration of the backoff the /// client should take before attempting creating a profile again. The backoff /// duration is returned in google.rpc.RetryInfo extension on the response /// status. To a gRPC client, the extension will be return as a /// binary-serialized proto in the trailing metadata item named /// "google.rpc.retryinfo-bin". /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<Profile> CreateProfileAsync(CreateProfileRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// CreateProfile creates a new profile resource in the online mode. /// /// The server ensures that the new profiles are created at a constant rate per /// deployment, so the creation request may hang for some time until the next /// profile session is available. /// /// The request may fail with ABORTED error if the creation is not available /// within ~1m, the response will indicate the duration of the backoff the /// client should take before attempting creating a profile again. The backoff /// duration is returned in google.rpc.RetryInfo extension on the response /// status. To a gRPC client, the extension will be return as a /// binary-serialized proto in the trailing metadata item named /// "google.rpc.retryinfo-bin". /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<Profile> CreateProfileAsync(CreateProfileRequest request, st::CancellationToken cancellationToken) => CreateProfileAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// CreateOfflineProfile creates a new profile resource in the offline mode. /// The client provides the profile to create along with the profile bytes, the /// server records it. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual Profile CreateOfflineProfile(CreateOfflineProfileRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// CreateOfflineProfile creates a new profile resource in the offline mode. /// The client provides the profile to create along with the profile bytes, the /// server records it. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<Profile> CreateOfflineProfileAsync(CreateOfflineProfileRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// CreateOfflineProfile creates a new profile resource in the offline mode. /// The client provides the profile to create along with the profile bytes, the /// server records it. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<Profile> CreateOfflineProfileAsync(CreateOfflineProfileRequest request, st::CancellationToken cancellationToken) => CreateOfflineProfileAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// UpdateProfile updates the profile bytes and labels on the profile resource /// created in the online mode. Updating the bytes for profiles created in the /// offline mode is currently not supported: the profile content must be /// provided at the time of the profile creation. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual Profile UpdateProfile(UpdateProfileRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// UpdateProfile updates the profile bytes and labels on the profile resource /// created in the online mode. Updating the bytes for profiles created in the /// offline mode is currently not supported: the profile content must be /// provided at the time of the profile creation. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<Profile> UpdateProfileAsync(UpdateProfileRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// UpdateProfile updates the profile bytes and labels on the profile resource /// created in the online mode. Updating the bytes for profiles created in the /// offline mode is currently not supported: the profile content must be /// provided at the time of the profile creation. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<Profile> UpdateProfileAsync(UpdateProfileRequest request, st::CancellationToken cancellationToken) => UpdateProfileAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); } /// <summary>ProfilerService client wrapper implementation, for convenient use.</summary> /// <remarks> /// Manage the collection of continuous profiling data provided by profiling /// agents running in the cloud or by an offline provider of profiling data. /// /// General guidelines: /// * Profiles for a single deployment must be created in ascending time order. /// * Profiles can be created in either online or offline mode, see below. /// </remarks> public sealed partial class ProfilerServiceClientImpl : ProfilerServiceClient { private readonly gaxgrpc::ApiCall<CreateProfileRequest, Profile> _callCreateProfile; private readonly gaxgrpc::ApiCall<CreateOfflineProfileRequest, Profile> _callCreateOfflineProfile; private readonly gaxgrpc::ApiCall<UpdateProfileRequest, Profile> _callUpdateProfile; /// <summary> /// Constructs a client wrapper for the ProfilerService service, with the specified gRPC client and settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings">The base <see cref="ProfilerServiceSettings"/> used within this client.</param> public ProfilerServiceClientImpl(ProfilerService.ProfilerServiceClient grpcClient, ProfilerServiceSettings settings) { GrpcClient = grpcClient; ProfilerServiceSettings effectiveSettings = settings ?? ProfilerServiceSettings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); _callCreateProfile = clientHelper.BuildApiCall<CreateProfileRequest, Profile>(grpcClient.CreateProfileAsync, grpcClient.CreateProfile, effectiveSettings.CreateProfileSettings).WithGoogleRequestParam("parent", request => request.Parent); Modify_ApiCall(ref _callCreateProfile); Modify_CreateProfileApiCall(ref _callCreateProfile); _callCreateOfflineProfile = clientHelper.BuildApiCall<CreateOfflineProfileRequest, Profile>(grpcClient.CreateOfflineProfileAsync, grpcClient.CreateOfflineProfile, effectiveSettings.CreateOfflineProfileSettings).WithGoogleRequestParam("parent", request => request.Parent); Modify_ApiCall(ref _callCreateOfflineProfile); Modify_CreateOfflineProfileApiCall(ref _callCreateOfflineProfile); _callUpdateProfile = clientHelper.BuildApiCall<UpdateProfileRequest, Profile>(grpcClient.UpdateProfileAsync, grpcClient.UpdateProfile, effectiveSettings.UpdateProfileSettings).WithGoogleRequestParam("profile.name", request => request.Profile?.Name); Modify_ApiCall(ref _callUpdateProfile); Modify_UpdateProfileApiCall(ref _callUpdateProfile); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_CreateProfileApiCall(ref gaxgrpc::ApiCall<CreateProfileRequest, Profile> call); partial void Modify_CreateOfflineProfileApiCall(ref gaxgrpc::ApiCall<CreateOfflineProfileRequest, Profile> call); partial void Modify_UpdateProfileApiCall(ref gaxgrpc::ApiCall<UpdateProfileRequest, Profile> call); partial void OnConstruction(ProfilerService.ProfilerServiceClient grpcClient, ProfilerServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC ProfilerService client</summary> public override ProfilerService.ProfilerServiceClient GrpcClient { get; } partial void Modify_CreateProfileRequest(ref CreateProfileRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_CreateOfflineProfileRequest(ref CreateOfflineProfileRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_UpdateProfileRequest(ref UpdateProfileRequest request, ref gaxgrpc::CallSettings settings); /// <summary> /// CreateProfile creates a new profile resource in the online mode. /// /// The server ensures that the new profiles are created at a constant rate per /// deployment, so the creation request may hang for some time until the next /// profile session is available. /// /// The request may fail with ABORTED error if the creation is not available /// within ~1m, the response will indicate the duration of the backoff the /// client should take before attempting creating a profile again. The backoff /// duration is returned in google.rpc.RetryInfo extension on the response /// status. To a gRPC client, the extension will be return as a /// binary-serialized proto in the trailing metadata item named /// "google.rpc.retryinfo-bin". /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override Profile CreateProfile(CreateProfileRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_CreateProfileRequest(ref request, ref callSettings); return _callCreateProfile.Sync(request, callSettings); } /// <summary> /// CreateProfile creates a new profile resource in the online mode. /// /// The server ensures that the new profiles are created at a constant rate per /// deployment, so the creation request may hang for some time until the next /// profile session is available. /// /// The request may fail with ABORTED error if the creation is not available /// within ~1m, the response will indicate the duration of the backoff the /// client should take before attempting creating a profile again. The backoff /// duration is returned in google.rpc.RetryInfo extension on the response /// status. To a gRPC client, the extension will be return as a /// binary-serialized proto in the trailing metadata item named /// "google.rpc.retryinfo-bin". /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<Profile> CreateProfileAsync(CreateProfileRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_CreateProfileRequest(ref request, ref callSettings); return _callCreateProfile.Async(request, callSettings); } /// <summary> /// CreateOfflineProfile creates a new profile resource in the offline mode. /// The client provides the profile to create along with the profile bytes, the /// server records it. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override Profile CreateOfflineProfile(CreateOfflineProfileRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_CreateOfflineProfileRequest(ref request, ref callSettings); return _callCreateOfflineProfile.Sync(request, callSettings); } /// <summary> /// CreateOfflineProfile creates a new profile resource in the offline mode. /// The client provides the profile to create along with the profile bytes, the /// server records it. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<Profile> CreateOfflineProfileAsync(CreateOfflineProfileRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_CreateOfflineProfileRequest(ref request, ref callSettings); return _callCreateOfflineProfile.Async(request, callSettings); } /// <summary> /// UpdateProfile updates the profile bytes and labels on the profile resource /// created in the online mode. Updating the bytes for profiles created in the /// offline mode is currently not supported: the profile content must be /// provided at the time of the profile creation. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override Profile UpdateProfile(UpdateProfileRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_UpdateProfileRequest(ref request, ref callSettings); return _callUpdateProfile.Sync(request, callSettings); } /// <summary> /// UpdateProfile updates the profile bytes and labels on the profile resource /// created in the online mode. Updating the bytes for profiles created in the /// offline mode is currently not supported: the profile content must be /// provided at the time of the profile creation. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<Profile> UpdateProfileAsync(UpdateProfileRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_UpdateProfileRequest(ref request, ref callSettings); return _callUpdateProfile.Async(request, callSettings); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Microsoft.Xna.Framework.Graphics; using FlatRedBall.Math; using FlatRedBall.Graphics.Texture; using FlatRedBall.Content; #if FRB_XNA using Vector3 = Microsoft.Xna.Framework.Vector3; #endif #if XNA4 using Color = Microsoft.Xna.Framework.Color; #endif using Microsoft.Xna.Framework; namespace FlatRedBall.AI.LineOfSight { /// <summary> /// Represents a 2D grid of cells which identify what can be seen given a list of IViewers. /// This supports line of sight. /// </summary> public class VisibilityGrid { #region Fields byte[][] mBlockedTiles; byte[][] mRevealedTiles; int mNumberOfXTiles; int mNumberOfYTiles; ImageData mImageData; Sprite mSprite; float mXSeed; float mYSeed; float mGridSpacing; bool mVisible; Dictionary<IViewer, ViewerInformation> mViewers = new Dictionary<IViewer, ViewerInformation>(); List<Rectangle> mViewerUpdateAreas = new List<Rectangle>(); float mVisibleDisplayZ; #region Fog Of War ImageData mFogImageData; ImageData mFogGradiantData; Texture2D mFogTexture; int mFogFactor; #endregion #endregion #region Properties public string Name { get; set; } public string ContentManagerName { get; set; } public Color HiddenBlockedColor { get; set; } public Color RevealedBlockedColor { get; set; } public Color HiddenClearedColor { get; set; } public Color RevealedClearedColor { get; set; } public bool Visible { get { return mVisible; } set { if (mVisible != value) { mVisible = value; UpdateDisplay(); } } } public int NumberOfXTiles { get { return mNumberOfXTiles; } } public int NumberOfYTiles { get { return mNumberOfYTiles; } } public float Z { get { return mVisibleDisplayZ; } set { mVisibleDisplayZ = value; if (mSprite != null) { mSprite.Z = mVisibleDisplayZ; } } } public ImageData VisibilityImage { get { return mImageData; } } #region Fog of War public Color FogColor { get; set; } public int FogResolution { get { return mFogFactor; } set { int newFogFactor = value; if (newFogFactor != mFogFactor) { int fogWidth = NumberOfXTiles * newFogFactor; int fogHeight = NumberOfYTiles * newFogFactor; mFogImageData = new ImageData(fogWidth, fogHeight); mFogGradiantData = new ImageData(newFogFactor, newFogFactor); mFogImageData.Fill(FogColor); if(string.IsNullOrEmpty(ContentManagerName) == false) { ContentManager contentManager = FlatRedBallServices.GetContentManagerByName(ContentManagerName); string assetName = "FogOfWareTexture_" + newFogFactor.ToString(); mFogTexture = new Texture2D(FlatRedBallServices.GraphicsDevice, fogWidth, fogHeight); mFogTexture.Name = assetName; contentManager.AddDisposable(assetName, mFogTexture); } } mFogFactor = newFogFactor; } } public byte FogShade { get; set; } public Texture2D FogTexture { get { return mFogTexture; } } #endregion #endregion #region Methods #region Constructor /// <summary> /// Instantiates a new VisibilityGrid. /// </summary> /// <param name="xSeed">The absolute x coordinate seed value.</param> /// <param name="ySeed">The absolute y coordinate seed value.</param> /// <param name="gridSpacing">The amount of distance in world coordinates between rows and columns.</param> /// <param name="numberOfXTiles">Number of tiles wide (on the X axis)</param> /// <param name="numberOfYTiles">Number of tiles heigh (on the Y axis)</param> public VisibilityGrid(float xSeed, float ySeed, float gridSpacing, int numberOfXTiles, int numberOfYTiles) { HiddenClearedColor = Color.DarkBlue; HiddenBlockedColor = Color.DarkRed; RevealedClearedColor = Color.LightBlue; RevealedBlockedColor = Color.Pink; mBlockedTiles = new byte[numberOfXTiles][]; mRevealedTiles = new byte[numberOfXTiles][]; mNumberOfXTiles = numberOfXTiles; mNumberOfYTiles = numberOfYTiles; mXSeed = xSeed; mYSeed = ySeed; mGridSpacing = gridSpacing; // Do an initial loop to create the arrays for (int x = 0; x < numberOfXTiles; x++) { mBlockedTiles[x] = new byte[numberOfYTiles]; mRevealedTiles[x] = new byte[numberOfYTiles]; } } #endregion #region Public Methods /// <summary> /// Checks if any viewers have changed since last Update, and if so it updates the grid. /// </summary> /// <returns>Whether anything has changed.</returns> public bool Activity() { bool hasAnythingChanged = false; int xIndex; int yIndex; foreach (KeyValuePair<IViewer, ViewerInformation> kvp in this.mViewers) { IViewer viewer = kvp.Key; ViewerInformation information = kvp.Value; WorldToIndex(viewer.X, viewer.Y, out xIndex, out yIndex); int radiusAsInt = MathFunctions.RoundToInt(viewer.WorldViewRadius / mGridSpacing); if (xIndex != information.LastX || yIndex != information.LastY) { hasAnythingChanged = true; information.LastX = xIndex; information.LastY = yIndex; if (xIndex >= radiusAsInt && yIndex >= radiusAsInt && xIndex + radiusAsInt < mNumberOfXTiles && yIndex + radiusAsInt < mNumberOfYTiles) { UpdateViewersLocalVisibilityGrid(viewer, information); } } } if (hasAnythingChanged) { UpdateRevealedFromViewers(); } return hasAnythingChanged; } /// <summary> /// Destroys the VisibilityGrid - specifically disposing its internal fog texture. /// </summary> public void Destroy() { if ( mFogFactor > 0 ) { mFogTexture.Dispose(); mFogTexture = null; } } /// <summary> /// Adds an IViewer to this grid. /// </summary> /// <param name="viewerToAdd">The viewer to add.</param> public void AddViewer(IViewer viewerToAdd) { int localGridDimension = MathFunctions.RoundToInt(viewerToAdd.WorldViewRadius / mGridSpacing) * 2 + 1; VisibilityGrid localVisibilityGrid = new VisibilityGrid(0, 0, mGridSpacing, localGridDimension, localGridDimension); ViewerInformation viewerInformation = new ViewerInformation(); viewerInformation.LocalVisibilityGrid = localVisibilityGrid; if (mFogFactor > 0) { localVisibilityGrid.FogColor = FogColor; localVisibilityGrid.FogShade = FogShade; localVisibilityGrid.FogResolution = FogResolution; } int xIndex; int yIndex; mViewers.Add(viewerToAdd, viewerInformation); WorldToIndex(viewerToAdd.X, viewerToAdd.Y, out xIndex, out yIndex); int radiusAsInt = MathFunctions.RoundToInt(viewerToAdd.WorldViewRadius / mGridSpacing); if (xIndex >= radiusAsInt && yIndex >= radiusAsInt && xIndex + radiusAsInt < mNumberOfXTiles && yIndex + radiusAsInt < mNumberOfYTiles) { UpdateViewersLocalVisibilityGrid(viewerToAdd, viewerInformation); } UpdateRevealedFromViewers(); } /// <summary> /// Makes walls visible if they are adjacent to visible non-walls. /// </summary> public void BleedDirectlyVisibleToWalls() { int y; // We're not going to go to the very edges to avoid if statements (for speed reasons) for (int x = 1; x < mNumberOfXTiles - 1; x++) { for (y = 1; y < mNumberOfYTiles - 1; y++) { byte valueToSet = (byte)(( mBlockedTiles[x][y] * ((mRevealedTiles[x][y + 1] & 1) | (mRevealedTiles[x + 1][y] & 1) | (mRevealedTiles[x][y - 1] & 1) | (mRevealedTiles[x - 1][y] & 1))) << 1); //if (valueToSet != 0) //{ // //int m = 3; //} mRevealedTiles[x][y] = (byte)(mRevealedTiles[x][y] | valueToSet); } } } /// <summary> /// Adds a block (or wall) at a given world location. /// </summary> /// <param name="worldX">The world coordinate X.</param> /// <param name="worldY">The world coordinate Y.</param> public void BlockWorld(float worldX, float worldY) { int xIndex; int yIndex; WorldToIndex(worldX, worldY, out xIndex, out yIndex); mBlockedTiles[xIndex][yIndex] = 1; } /// <summary> /// Unblocks a tile that was previously marked as a world blocker /// </summary> /// <param name="X">The X coordinate of the tile</param> /// <param name="Y">The Y coordinate of the tile</param> public void UnBlockWorld( float X, float Y ) { int xIndex, yIndex; WorldToIndex( X, Y, out xIndex, out yIndex ); mBlockedTiles[xIndex][yIndex] = 0; } /// <summary> /// Clears all blocked tiles. /// </summary> public void ClearBlockedTiles() { int y; for (int x = 0; x < mNumberOfXTiles; x++) { for (y = 0; y < mNumberOfYTiles; y++) { mBlockedTiles[x][y] = 0; } } } /// <summary> /// Returns whether a given world position is in view of a given viewer. /// </summary> /// <param name="viewer">The viewer to check visibility for.</param> /// <param name="targetPosition">The world coordinates.</param> /// <returns>Whether in view.</returns> public bool IsPositionInDirectView(IViewer viewer, ref Vector3 targetPosition) { if (mViewers.ContainsKey(viewer)) { VisibilityGrid grid = mViewers[viewer].LocalVisibilityGrid; return grid.IsRevealedWorld(targetPosition.X, targetPosition.Y); } #if DEBUG else { throw new InvalidOperationException("Viewer does not exist in Visbility grid"); } #else return false; #endif } /// <summary> /// Returns whether a given world coordinate is relealed. /// </summary> /// <param name="worldX">The world x coordinate.</param> /// <param name="worldY">The world y coordinate.</param> /// <returns>Whether the world coordinate is revealed or not.</returns> public bool IsRevealedWorld(float worldX, float worldY) { int xIndex; int yIndex; WorldToIndex(worldX, worldY, out xIndex, out yIndex); return mRevealedTiles[xIndex][yIndex] == 1; } /// <summary> /// Returns whether a given X and Y index is revealed. /// </summary> /// <param name="xIndex">The x index</param> /// <param name="yIndex">The y index</param> /// <returns>Whether the location specified by the x/y index is revealed.</returns> public bool IsRevealed(int xIndex, int yIndex) { return mRevealedTiles[xIndex][yIndex] == 1; } /// <summary> /// Hides the entire grid (makes it not revealed). /// </summary> public void MakeAllHidden() { Color shadedColor = #if XNA4 Color.FromNonPremultiplied(FogColor.R, FogColor.G, FogColor.B, FogShade); #else new Color(FogColor, FogShade); #endif int y; for (int x = 0; x < mNumberOfXTiles; x++) { for (y = 0; y < mNumberOfYTiles; y++) { if (mFogFactor > 0) { for ( int fogX = 0; fogX < mFogFactor; fogX++ ) { for ( int fogY = 0; fogY < mFogFactor; fogY++ ) { shadedColor = mFogImageData.GetPixelColor(x * mFogFactor + fogX, y * mFogFactor + fogY); if ( shadedColor.R > FogShade ) shadedColor.R = shadedColor.G = shadedColor.B = FogShade; mFogImageData.SetPixel(x * mFogFactor + fogX, y * mFogFactor + fogY, shadedColor); } } } mRevealedTiles[x][y] = 0; } } } /// <summary> /// Reveals the entire grid. /// </summary> public void MakeAllRevealed() { int y; for (int x = 0; x < mNumberOfXTiles; x++) { for (y = 0; y < mNumberOfYTiles; y++) { mRevealedTiles[x][y] = 1; } } } /// <summary> /// Removes a viewer. /// </summary> /// <param name="viewerToRemove">The argument IViewer to remove.</param> public void RemoveViewer(IViewer viewerToRemove) { mViewers.Remove(viewerToRemove); // This have definitely changed, so we gotta refresh everything. // Fortunately there's no re-calculation here, just paste everything // down. The newly-removed guy will not be placed. UpdateRevealedFromViewers(); } /// <summary> /// Reveals a circle around the given world coordinate using a given radius /// </summary> /// <param name="worldX">The world coordinate X</param> /// <param name="worldY">The world coordinate Y</param> /// <param name="worldRadius">The radius in world units</param> public void RevealCircleWorld(float worldX, float worldY, float worldRadius) { int xIndex; int yIndex; WorldToIndex(worldX, worldY, out xIndex, out yIndex); int tileRadius = MathFunctions.RoundToInt(worldRadius / mGridSpacing); RevealCircle(xIndex, yIndex, tileRadius); } public void RevealCircle(int xIndex, int yIndex, int tileRadius) { int f = 1 - tileRadius; int ddF_x = 1; int ddF_y = -2 * tileRadius; int x = 0; int y = tileRadius; RevealLine(xIndex, yIndex, xIndex, yIndex + tileRadius); RevealLine(xIndex, yIndex, xIndex, yIndex - tileRadius); RevealLine(xIndex, yIndex, xIndex + tileRadius, yIndex); RevealLine(xIndex, yIndex, xIndex - tileRadius, yIndex); bool didYChange = false; while (x < y) { didYChange = false; // ddF_x == 2 * x + 1; // ddF_y == -2 * y; // f == x*x + y*y - radius*radius + 2*x - y + 1; if (f >= 0) { y--; ddF_y += 2; f += ddF_y; didYChange = true; } x++; ddF_x += 2; f += ddF_x; RevealLine(xIndex, yIndex, xIndex + x, yIndex + y); RevealLine(xIndex, yIndex, xIndex - x, yIndex + y); RevealLine(xIndex, yIndex, xIndex + x, yIndex - y); RevealLine(xIndex, yIndex, xIndex - x, yIndex - y); RevealLine(xIndex, yIndex, xIndex + y, yIndex + x); RevealLine(xIndex, yIndex, xIndex - y, yIndex + x); RevealLine(xIndex, yIndex, xIndex + y, yIndex - x); RevealLine(xIndex, yIndex, xIndex - y, yIndex - x); if (didYChange) { x--; RevealLine(xIndex, yIndex, xIndex + x, yIndex + y); RevealLine(xIndex, yIndex, xIndex - x, yIndex + y); RevealLine(xIndex, yIndex, xIndex + x, yIndex - y); RevealLine(xIndex, yIndex, xIndex - x, yIndex - y); RevealLine(xIndex, yIndex, xIndex + y, yIndex + x); RevealLine(xIndex, yIndex, xIndex - y, yIndex + x); RevealLine(xIndex, yIndex, xIndex + y, yIndex - x); RevealLine(xIndex, yIndex, xIndex - y, yIndex - x); x++; } } } public void RevealLineWorld(float worldX1, float worldY1, float worldX2, float worldY2) { int xIndex1; int yIndex1; WorldToIndex(worldX1, worldY1, out xIndex1, out yIndex1); int xIndex2; int yIndex2; WorldToIndex(worldX2, worldY2, out xIndex2, out yIndex2); RevealLine(xIndex1, yIndex1, xIndex2, yIndex2); } public void RevealLine(int x0, int y0, int x1, int y1) { int dy = y1 - y0; int dx = x1 - x0; int stepx, stepy; if (dy < 0) { dy = -dy; stepy = -1; } else { stepy = 1; } if (dx < 0) { dx = -dx; stepx = -1; } else { stepx = 1; } dy <<= 1; // dy is now 2*dy dx <<= 1; // dx is now 2*dx mRevealedTiles[x0][y0] = 1; if (mBlockedTiles[x0][y0] > 0) { return; } if (dx > dy) { int fraction = dy - (dx >> 1); // same as 2*dy - dx while (x0 != x1) { if (fraction >= 0) { y0 += stepy; fraction -= dx; // same as fraction -= 2*dx } x0 += stepx; fraction += dy; // same as fraction -= 2*dy //if (x0 > -1 && y0 > -1 && // x0 < mNumberOfXTiles && y0 < mNumberOfYTiles) { mRevealedTiles[x0][y0] = 1; if (mBlockedTiles[x0][y0] > 0) { return; } } } } else { int fraction = dx - (dy >> 1); while (y0 != y1) { if (fraction >= 0) { x0 += stepx; fraction -= dy; } y0 += stepy; fraction += dx; //if (x0 > -1 && y0 > -1 && // x0 < mNumberOfXTiles && y0 < mNumberOfYTiles) { mRevealedTiles[x0][y0] = 1; if (mBlockedTiles[x0][y0] > 0) { return; } } } } } public void RevealAreaWorld(float worldX1, float worldY1, float worldX2, float worldY2) { int xIndex1; int yIndex1; WorldToIndex(worldX1, worldY1, out xIndex1, out yIndex1); int xIndex2; int yIndex2; WorldToIndex(worldX2, worldY2, out xIndex2, out yIndex2); RevealArea(xIndex1, yIndex1, xIndex2, yIndex2); } public void RevealArea(int x0, int y0, int x1, int y1) { int x, fogX, fogY; Color revealedColor = #if XNA4 Color.FromNonPremultiplied(127, 127, 127, 255); #else new Color(127, 127, 127, 255); #endif for (int y = y0; y < y1; y++) { for (x = x0; x < x1; x++) { mRevealedTiles[x][y] = 1; if ( mFogFactor != 0 ) { for ( fogY = 0; fogY < mFogFactor; fogY++ ) { for ( fogX = 0; fogX < mFogFactor; fogX++ ) { mFogImageData.SetPixel(( x ) * mFogFactor + fogX, ( y ) * mFogFactor + fogY, revealedColor); } } } } } if ( mFogFactor > 0 && mFogTexture != null ) { mFogImageData.ToTexture2D(mFogTexture); } } static int NumberCreated = 0; public void UpdateDisplay() { #region Create the Sprites if needed if (mVisible && (mSprite == null || mImageData == null)) { mSprite = SpriteManager.AddSprite((Texture2D)null); // mSprite.TextureAddressMode = TextureAddressMode.Clamp; // required on REACH if we're not a power of 2 mSprite.X = mXSeed + .5f * (mNumberOfXTiles - 1) * mGridSpacing; mSprite.Y = mYSeed + .5f * (mNumberOfYTiles - 1) * mGridSpacing; mSprite.Z = this.mVisibleDisplayZ; mSprite.FlipVertical = true; mSprite.ScaleX = (mNumberOfXTiles) * mGridSpacing / 2.0f; mSprite.ScaleY = (mNumberOfYTiles) * mGridSpacing / 2.0f; mImageData = new ImageData(mNumberOfXTiles, mNumberOfYTiles); } #endregion if (Visible) { ForceUpdateImageData(); } else { if (mSprite != null) { SpriteManager.RemoveSprite(mSprite); mSprite = null; } } } public void ForceUpdateImageData() { #region Update the texture if (mImageData != null) { for (int x = 0; x < mNumberOfXTiles; x++) { for (int y = 0; y < mNumberOfYTiles; y++) { Color colorToSet; if (mBlockedTiles[x][y] == 0) { if (mRevealedTiles[x][y] == 0) { colorToSet = HiddenClearedColor; } else { colorToSet = RevealedClearedColor; } } else { if (mRevealedTiles[x][y] == 0) { colorToSet = HiddenBlockedColor; } else { colorToSet = RevealedBlockedColor; } } //byte blue = (byte)(255 * (mBlockedTiles[x][y] & 1)); //byte green = (byte)(255 * (mRevealedTiles[x][y] & 1)); //byte red = 255; mImageData.SetPixel(x, y, colorToSet); } } ContentManager contentManager = FlatRedBallServices.GetContentManagerByName(this.ContentManagerName); //FlatRedBallServices.GlobalContentManager); if (mSprite != null) { if (mSprite.Texture != null) { contentManager.UnloadAsset(mSprite.Texture); mSprite.Texture.Dispose(); } bool generateMipmaps = false; mSprite.Texture = mImageData.ToTexture2D(generateMipmaps, FlatRedBallServices.GraphicsDevice); contentManager.AddDisposable("VisibilityGridTexture #" + NumberCreated, mSprite.Texture); NumberCreated++; } } #endregion } public void ForceUpdateVisibilityGrid() { int xIndex; int yIndex; foreach (KeyValuePair<IViewer, ViewerInformation> kvp in this.mViewers) { IViewer viewer = kvp.Key; ViewerInformation information = kvp.Value; WorldToIndex(viewer.X, viewer.Y, out xIndex, out yIndex); int radiusAsInt = MathFunctions.RoundToInt(viewer.WorldViewRadius / mGridSpacing); if (xIndex >= radiusAsInt && yIndex >= radiusAsInt && xIndex + radiusAsInt < mNumberOfXTiles && yIndex + radiusAsInt < mNumberOfYTiles) { UpdateViewersLocalVisibilityGrid(viewer, information); } } UpdateRevealedFromViewers(); } public void UpdateFog() { UpdateFog(false); } public void UpdateFog(bool fullUpdate) { // For Eric: // This return // statement is // the first thing // in this method. Are // we no longer using it? return; /* TimeManager.TimeSection("Start UpdateFog"); if (FogResolution > 0) { Color ShadedColor = #if XNA4 Color.FromNonPremultiplied(FogShade, FogShade, FogShade, 255); #else new Color(FogShade, FogShade, FogShade, 255); #endif byte shadedValue; float invertedFogFactor = 1.0f / mFogFactor; if(fullUpdate == false) { int x; int y; int xIndex; int yIndex; int radiusAsInt; float fogXIndex; float fogYIndex; int fogX; int fogY; int fogImageX; int fogImageY; Color color; float shift = mFogFactor / 2.0f; int borderToInclude = 1; IViewer viewer; ViewerInformation info; VisibilityGrid gridToPlace; int xOffset; int yOffset; foreach (KeyValuePair<IViewer, ViewerInformation> kvp in mViewers) { viewer = kvp.Key; WorldToIndex(viewer.X, viewer.Y, out xIndex, out yIndex); radiusAsInt = MathFunctions.RoundToInt(viewer.WorldViewRadius / mGridSpacing)+2; if (xIndex >= radiusAsInt && yIndex >= radiusAsInt && xIndex + radiusAsInt < mNumberOfXTiles && yIndex + radiusAsInt < mNumberOfYTiles) { info = kvp.Value; gridToPlace = info.LocalVisibilityGrid; xOffset = MathFunctions.RoundToInt((gridToPlace.mXSeed - mXSeed) / mGridSpacing); yOffset = MathFunctions.RoundToInt((gridToPlace.mYSeed - mYSeed) / mGridSpacing); for (x = -borderToInclude; x < gridToPlace.mNumberOfXTiles + borderToInclude; x++) { for (y = -borderToInclude; y < gridToPlace.mNumberOfYTiles + borderToInclude; y++) { fogXIndex = (x + xOffset); fogImageX = (int)(fogXIndex*FogResolution); fogYIndex = (y + yOffset); fogImageY = (int)(fogYIndex*FogResolution); for (fogX = 0; fogX < mFogFactor; fogX++) { for (fogY = 0; fogY < mFogFactor; fogY++) { color = mFogImageData.GetPixelColor(fogImageX+fogX, fogImageY+fogY); shadedValue = color.A; if ( color.R > FogShade ) { color = ShadedColor; shadedValue = FogShade; } if (x >= 0 && x < gridToPlace.NumberOfXTiles && y >= 0 && y < gridToPlace.NumberOfYTiles) shadedValue = CalculateFogColorByDistance(fogXIndex + (fogX * invertedFogFactor), fogYIndex + (fogY * invertedFogFactor), shadedValue); #if XNA4 color = Color.FromNonPremultiplied(FogColor.R, FogColor.G, FogColor.B, shadedValue); //color = Color.FromNonPremultiplied(shadedValue, shadedValue, shadedValue, 255); #else color = FogColor; color.A = shadedValue; #endif mFogGradiantData.SetPixel(fogX, fogY, color); } } mFogGradiantData.CopyTo(mFogImageData, fogImageX, fogImageY); } } } } } else { int xIndex, yIndex; int fogX, fogY; Color color; for (int yTile = 0; yTile < NumberOfYTiles; yTile++) { for (int xTile = 0; xTile < NumberOfXTiles; xTile++) { xIndex = (int)(xTile * FogResolution); yIndex = (int)(yTile * FogResolution); for (fogX = 0; fogX < mFogFactor; fogX++) { for (fogY = 0; fogY < mFogFactor; fogY++) { color = mFogImageData.GetPixelColor(xIndex + fogX, yIndex + fogY); shadedValue = color.R; if ( color.R > FogShade ) { color = ShadedColor; shadedValue = FogShade; } shadedValue = CalculateFogColorByDistance(xTile + (fogX * invertedFogFactor), yTile + (fogY * invertedFogFactor), shadedValue); #if XNA4 color = Color.FromNonPremultiplied(FogColor.R, FogColor.G, FogColor.B, shadedValue); #else color = FogColor; color.A = shadedValue; #endif mFogGradiantData.SetPixel(fogX, fogY, color); } } mFogGradiantData.CopyTo(mFogImageData, xIndex, yIndex); } } } mFogImageData.ToTexture2D(mFogTexture); } */ } public void IndexToWorld(int xIndex, int yIndex, out float worldX, out float worldY) { worldX = mXSeed + mGridSpacing * xIndex; worldY = mYSeed + mGridSpacing * yIndex; } public void WorldToIndex(float worldX, float worldY, out int xIndex, out int yIndex) { xIndex = MathFunctions.RoundToInt((worldX - mXSeed) / mGridSpacing); yIndex = MathFunctions.RoundToInt((worldY - mYSeed) / mGridSpacing); xIndex = System.Math.Max(0, xIndex); xIndex = System.Math.Min(xIndex, mNumberOfXTiles - 1); yIndex = System.Math.Max(0, yIndex); yIndex = System.Math.Min(yIndex, mNumberOfYTiles - 1); } //Alternate to get partial "indices" public void WorldToIndex(float worldX, float worldY, out float xIndex, out float yIndex) { xIndex = (worldX - mXSeed) / mGridSpacing; yIndex = (worldY - mYSeed) / mGridSpacing; xIndex = System.Math.Max(0, xIndex); xIndex = System.Math.Min(xIndex, mNumberOfXTiles - 1); yIndex = System.Math.Max(0, yIndex); yIndex = System.Math.Min(yIndex, mNumberOfYTiles - 1); } #endregion #region Private Methods private void UpdateRevealedFromViewers() { MakeAllHidden(); int x; int y; int xIndex; int yIndex; int radiusAsInt; foreach (KeyValuePair<IViewer, ViewerInformation> kvp in mViewers) { IViewer viewer = kvp.Key; WorldToIndex(viewer.X, viewer.Y, out xIndex, out yIndex); radiusAsInt = MathFunctions.RoundToInt(viewer.WorldViewRadius / mGridSpacing); if (xIndex >= radiusAsInt && yIndex >= radiusAsInt && xIndex + radiusAsInt < mNumberOfXTiles && yIndex + radiusAsInt < mNumberOfYTiles) { ViewerInformation info = kvp.Value; VisibilityGrid gridToPlace = info.LocalVisibilityGrid; int xOffset = MathFunctions.RoundToInt((gridToPlace.mXSeed - mXSeed) / mGridSpacing); int yOffset = MathFunctions.RoundToInt((gridToPlace.mYSeed - mYSeed) / mGridSpacing); int fogX, fogY; Color sourceColor; Color destinationColor; for (x = 0; x < gridToPlace.mNumberOfXTiles; x++) { for (y = 0; y < gridToPlace.mNumberOfYTiles; y++) { mRevealedTiles[x + xOffset][y + yOffset] |= gridToPlace.mRevealedTiles[x][y]; if (mFogFactor > 0) { for (fogX = 0; fogX < mFogFactor; fogX++) { for (fogY = 0; fogY < mFogFactor; fogY++) { sourceColor = gridToPlace.mFogImageData.GetPixelColor(x*mFogFactor+fogX, y*mFogFactor+fogY); destinationColor = mFogImageData.GetPixelColor((x + xOffset)*mFogFactor+fogX, (y + yOffset)*mFogFactor+fogY); if (sourceColor.R > destinationColor.R) destinationColor = sourceColor; mFogImageData.SetPixel((x+xOffset)*mFogFactor+fogX, (y+yOffset)*mFogFactor+fogY, destinationColor); } } } } } } } if (mFogFactor > 0 && mFogTexture != null) { mFogImageData.ToTexture2D(mFogTexture); } } private void UpdateViewersLocalVisibilityGrid(IViewer viewer, ViewerInformation viewerInformation) { VisibilityGrid localGrid = viewerInformation.LocalVisibilityGrid; localGrid.MakeAllHidden(); localGrid.ClearBlockedTiles(); int viewRadius = MathFunctions.RoundToInt(viewer.WorldViewRadius / mGridSpacing); int xIndex; int yIndex; WorldToIndex(viewer.X, viewer.Y, out xIndex, out yIndex); float tileCenteredXPosition; float tileCenteredYPosition; IndexToWorld(xIndex, yIndex, out tileCenteredXPosition, out tileCenteredYPosition); viewerInformation.LastX = xIndex; viewerInformation.LastY = yIndex; localGrid.mXSeed = tileCenteredXPosition - viewer.WorldViewRadius; localGrid.mYSeed = tileCenteredYPosition - viewer.WorldViewRadius; int xOffset = MathFunctions.RoundToInt((localGrid.mXSeed - mXSeed) / mGridSpacing); int yOffset = MathFunctions.RoundToInt((localGrid.mYSeed - mYSeed) / mGridSpacing); // copy over the blocked areas to the viewer int y; int fogX, fogY; byte shadedValue; Color shadedColor; float invertedFogFactor = 1.0f; if (mFogFactor > 1) invertedFogFactor = 1.0f / mFogFactor; for (int x = 0; x < localGrid.mNumberOfXTiles; x++) { for (y = 0; y < localGrid.mNumberOfYTiles; y++) { localGrid.mBlockedTiles[x][y] = mBlockedTiles[x + xOffset][y + yOffset]; } } localGrid.MakeAllHidden(); localGrid.RevealCircle(viewRadius, viewRadius, viewRadius); localGrid.BleedDirectlyVisibleToWalls(); if (mFogFactor > 0) { for (int x = 0; x < localGrid.mNumberOfXTiles; x++) { for (y = 0; y < localGrid.mNumberOfYTiles; y++) { for (fogX = 0; fogX < mFogFactor; fogX++) { for (fogY = 0; fogY < mFogFactor; fogY++) { shadedColor = localGrid.mFogImageData.GetPixelColor(x * mFogFactor + fogX, y * mFogFactor + fogY); shadedValue = 0; if (localGrid.mRevealedTiles[x][y] != 0) shadedValue = localGrid.CalculateFogColorByDistance(x + (fogX * invertedFogFactor), y + (fogY * invertedFogFactor), shadedColor.R, viewer); else if (shadedColor.R > FogShade) shadedValue = FogShade; #if XNA4 //shadedColor = Color.FromNonPremultiplied(FogColor.R, FogColor.G, FogColor.B, shadedValue); shadedColor = Color.FromNonPremultiplied(shadedValue, shadedValue, shadedValue, 255); #else shadedColor = FogColor; shadedColor.A = shadedValue; #endif localGrid.mFogImageData.SetPixel(x * mFogFactor + fogX, y * mFogFactor + fogY, shadedColor); } } } } } } private byte CalculateFogColorByDistance(float x, float y, byte alpha){return 0;} private byte CalculateFogColorByDistance(float x, float y, byte alpha, IViewer viewer) { int shadeValue = 127;// alpha; int radiusAsInt; int radiusAsIntSquared; //We need to extend the radius slightly to allow smooth gradients on grid squares just out side the viewable area. int radiusExtended; int radiusExtendedSquared; float xIndex; float yIndex; float distance; float calculatedValue; { radiusAsInt = MathFunctions.RoundToInt(viewer.WorldViewRadius / mGridSpacing); radiusExtended = radiusAsInt + 2; radiusAsIntSquared = radiusAsInt * radiusAsInt; radiusExtendedSquared = radiusExtended * radiusExtended; WorldToIndex(viewer.X, viewer.Y, out xIndex, out yIndex); distance = ((x - xIndex) * (x - xIndex)) + ((y - yIndex) * (y - yIndex)); if (distance <= radiusAsIntSquared) { calculatedValue = distance / radiusAsIntSquared; calculatedValue = MathHelper.Max(0, MathHelper.Min( (calculatedValue /** calculatedValue*/), 1.0f)); shadeValue = (byte)((1.0f-calculatedValue) * 127); alpha = (byte)MathHelper.Min(MathHelper.Max(alpha, shadeValue), 127); } } return alpha; } #endregion #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. #if AMD64 || ARM64 || (BIT32 && !ARM) #define HAS_CUSTOM_BLOCKS #endif namespace System { //Only contains static methods. Does not require serialization using System; using System.Runtime.CompilerServices; using System.Runtime.ConstrainedExecution; using System.Runtime.InteropServices; using System.Runtime.Versioning; using System.Diagnostics; using System.Security; using System.Runtime; #if BIT64 using nuint = System.UInt64; #else // BIT64 using nuint = System.UInt32; #endif // BIT64 public static class Buffer { // Copies from one primitive array to another primitive array without // respecting types. This calls memmove internally. The count and // offset parameters here are in bytes. If you want to use traditional // array element indices and counts, use Array.Copy. [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern void BlockCopy(Array src, int srcOffset, Array dst, int dstOffset, int count); // A very simple and efficient memmove that assumes all of the // parameter validation has already been done. The count and offset // parameters here are in bytes. If you want to use traditional // array element indices and counts, use Array.Copy. [MethodImplAttribute(MethodImplOptions.InternalCall)] internal static extern void InternalBlockCopy(Array src, int srcOffsetBytes, Array dst, int dstOffsetBytes, int byteCount); // This is ported from the optimized CRT assembly in memchr.asm. The JIT generates // pretty good code here and this ends up being within a couple % of the CRT asm. // It is however cross platform as the CRT hasn't ported their fast version to 64-bit // platforms. // internal unsafe static int IndexOfByte(byte* src, byte value, int index, int count) { Debug.Assert(src != null, "src should not be null"); byte* pByte = src + index; // Align up the pointer to sizeof(int). while (((int)pByte & 3) != 0) { if (count == 0) return -1; else if (*pByte == value) return (int)(pByte - src); count--; pByte++; } // Fill comparer with value byte for comparisons // // comparer = 0/0/value/value uint comparer = (((uint)value << 8) + (uint)value); // comparer = value/value/value/value comparer = (comparer << 16) + comparer; // Run through buffer until we hit a 4-byte section which contains // the byte we're looking for or until we exhaust the buffer. while (count > 3) { // Test the buffer for presence of value. comparer contains the byte // replicated 4 times. uint t1 = *(uint*)pByte; t1 = t1 ^ comparer; uint t2 = 0x7efefeff + t1; t1 = t1 ^ 0xffffffff; t1 = t1 ^ t2; t1 = t1 & 0x81010100; // if t1 is zero then these 4-bytes don't contain a match if (t1 != 0) { // We've found a match for value, figure out which position it's in. int foundIndex = (int)(pByte - src); if (pByte[0] == value) return foundIndex; else if (pByte[1] == value) return foundIndex + 1; else if (pByte[2] == value) return foundIndex + 2; else if (pByte[3] == value) return foundIndex + 3; } count -= 4; pByte += 4; } // Catch any bytes that might be left at the tail of the buffer while (count > 0) { if (*pByte == value) return (int)(pByte - src); count--; pByte++; } // If we don't have a match return -1; return -1; } // Returns a bool to indicate if the array is of primitive data types // or not. [MethodImplAttribute(MethodImplOptions.InternalCall)] private static extern bool IsPrimitiveTypeArray(Array array); // Gets a particular byte out of the array. The array must be an // array of primitives. // // This essentially does the following: // return ((byte*)array) + index. // [MethodImplAttribute(MethodImplOptions.InternalCall)] private static extern byte _GetByte(Array array, int index); public static byte GetByte(Array array, int index) { // Is the array present? if (array == null) throw new ArgumentNullException(nameof(array)); // Is it of primitive types? if (!IsPrimitiveTypeArray(array)) throw new ArgumentException(SR.Arg_MustBePrimArray, nameof(array)); // Is the index in valid range of the array? if (index < 0 || index >= _ByteLength(array)) throw new ArgumentOutOfRangeException(nameof(index)); return _GetByte(array, index); } // Sets a particular byte in an the array. The array must be an // array of primitives. // // This essentially does the following: // *(((byte*)array) + index) = value. // [MethodImplAttribute(MethodImplOptions.InternalCall)] private static extern void _SetByte(Array array, int index, byte value); public static void SetByte(Array array, int index, byte value) { // Is the array present? if (array == null) throw new ArgumentNullException(nameof(array)); // Is it of primitive types? if (!IsPrimitiveTypeArray(array)) throw new ArgumentException(SR.Arg_MustBePrimArray, nameof(array)); // Is the index in valid range of the array? if (index < 0 || index >= _ByteLength(array)) throw new ArgumentOutOfRangeException(nameof(index)); // Make the FCall to do the work _SetByte(array, index, value); } // Gets a particular byte out of the array. The array must be an // array of primitives. // // This essentially does the following: // return array.length * sizeof(array.UnderlyingElementType). // [MethodImplAttribute(MethodImplOptions.InternalCall)] private static extern int _ByteLength(Array array); public static int ByteLength(Array array) { // Is the array present? if (array == null) throw new ArgumentNullException(nameof(array)); // Is it of primitive types? if (!IsPrimitiveTypeArray(array)) throw new ArgumentException(SR.Arg_MustBePrimArray, nameof(array)); return _ByteLength(array); } internal unsafe static void ZeroMemory(byte* src, long len) { while (len-- > 0) *(src + len) = 0; } internal unsafe static void Memcpy(byte[] dest, int destIndex, byte* src, int srcIndex, int len) { Debug.Assert((srcIndex >= 0) && (destIndex >= 0) && (len >= 0), "Index and length must be non-negative!"); Debug.Assert(dest.Length - destIndex >= len, "not enough bytes in dest"); // If dest has 0 elements, the fixed statement will throw an // IndexOutOfRangeException. Special-case 0-byte copies. if (len == 0) return; fixed (byte* pDest = dest) { Memcpy(pDest + destIndex, src + srcIndex, len); } } internal unsafe static void Memcpy(byte* pDest, int destIndex, byte[] src, int srcIndex, int len) { Debug.Assert((srcIndex >= 0) && (destIndex >= 0) && (len >= 0), "Index and length must be non-negative!"); Debug.Assert(src.Length - srcIndex >= len, "not enough bytes in src"); // If dest has 0 elements, the fixed statement will throw an // IndexOutOfRangeException. Special-case 0-byte copies. if (len == 0) return; fixed (byte* pSrc = src) { Memcpy(pDest + destIndex, pSrc + srcIndex, len); } } // This is tricky to get right AND fast, so lets make it useful for the whole Fx. // E.g. System.Runtime.WindowsRuntime!WindowsRuntimeBufferExtensions.MemCopy uses it. // This method has a slightly different behavior on arm and other platforms. // On arm this method behaves like memcpy and does not handle overlapping buffers. // While on other platforms it behaves like memmove and handles overlapping buffers. // This behavioral difference is unfortunate but intentional because // 1. This method is given access to other internal dlls and this close to release we do not want to change it. // 2. It is difficult to get this right for arm and again due to release dates we would like to visit it later. [FriendAccessAllowed] #if ARM [MethodImplAttribute(MethodImplOptions.InternalCall)] internal unsafe static extern void Memcpy(byte* dest, byte* src, int len); #else // ARM [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] internal unsafe static void Memcpy(byte* dest, byte* src, int len) { Debug.Assert(len >= 0, "Negative length in memcopy!"); Memmove(dest, src, (uint)len); } #endif // ARM // This method has different signature for x64 and other platforms and is done for performance reasons. internal unsafe static void Memmove(byte* dest, byte* src, nuint len) { #if AMD64 || (BIT32 && !ARM) const nuint CopyThreshold = 2048; #elif ARM64 #if PLATFORM_WINDOWS // TODO-ARM64-WINDOWS-OPT determine optimal value for Windows // https://github.com/dotnet/coreclr/issues/13843 const nuint CopyThreshold = 2048; #else // PLATFORM_WINDOWS // Managed code is currently faster than glibc unoptimized memmove // TODO-ARM64-UNIX-OPT revisit when glibc optimized memmove is in Linux distros // https://github.com/dotnet/coreclr/issues/13844 const nuint CopyThreshold = UInt64.MaxValue; #endif // PLATFORM_WINDOWS #else const nuint CopyThreshold = 512; #endif // AMD64 || (BIT32 && !ARM) // P/Invoke into the native version when the buffers are overlapping. if (((nuint)dest - (nuint)src < len) || ((nuint)src - (nuint)dest < len)) goto PInvoke; byte* srcEnd = src + len; byte* destEnd = dest + len; if (len <= 16) goto MCPY02; if (len > 64) goto MCPY05; MCPY00: // Copy bytes which are multiples of 16 and leave the remainder for MCPY01 to handle. Debug.Assert(len > 16 && len <= 64); #if HAS_CUSTOM_BLOCKS *(Block16*)dest = *(Block16*)src; // [0,16] #elif BIT64 *(long*)dest = *(long*)src; *(long*)(dest + 8) = *(long*)(src + 8); // [0,16] #else *(int*)dest = *(int*)src; *(int*)(dest + 4) = *(int*)(src + 4); *(int*)(dest + 8) = *(int*)(src + 8); *(int*)(dest + 12) = *(int*)(src + 12); // [0,16] #endif if (len <= 32) goto MCPY01; #if HAS_CUSTOM_BLOCKS *(Block16*)(dest + 16) = *(Block16*)(src + 16); // [0,32] #elif BIT64 *(long*)(dest + 16) = *(long*)(src + 16); *(long*)(dest + 24) = *(long*)(src + 24); // [0,32] #else *(int*)(dest + 16) = *(int*)(src + 16); *(int*)(dest + 20) = *(int*)(src + 20); *(int*)(dest + 24) = *(int*)(src + 24); *(int*)(dest + 28) = *(int*)(src + 28); // [0,32] #endif if (len <= 48) goto MCPY01; #if HAS_CUSTOM_BLOCKS *(Block16*)(dest + 32) = *(Block16*)(src + 32); // [0,48] #elif BIT64 *(long*)(dest + 32) = *(long*)(src + 32); *(long*)(dest + 40) = *(long*)(src + 40); // [0,48] #else *(int*)(dest + 32) = *(int*)(src + 32); *(int*)(dest + 36) = *(int*)(src + 36); *(int*)(dest + 40) = *(int*)(src + 40); *(int*)(dest + 44) = *(int*)(src + 44); // [0,48] #endif MCPY01: // Unconditionally copy the last 16 bytes using destEnd and srcEnd and return. Debug.Assert(len > 16 && len <= 64); #if HAS_CUSTOM_BLOCKS *(Block16*)(destEnd - 16) = *(Block16*)(srcEnd - 16); #elif BIT64 *(long*)(destEnd - 16) = *(long*)(srcEnd - 16); *(long*)(destEnd - 8) = *(long*)(srcEnd - 8); #else *(int*)(destEnd - 16) = *(int*)(srcEnd - 16); *(int*)(destEnd - 12) = *(int*)(srcEnd - 12); *(int*)(destEnd - 8) = *(int*)(srcEnd - 8); *(int*)(destEnd - 4) = *(int*)(srcEnd - 4); #endif return; MCPY02: // Copy the first 8 bytes and then unconditionally copy the last 8 bytes and return. if ((len & 24) == 0) goto MCPY03; Debug.Assert(len >= 8 && len <= 16); #if BIT64 *(long*)dest = *(long*)src; *(long*)(destEnd - 8) = *(long*)(srcEnd - 8); #else *(int*)dest = *(int*)src; *(int*)(dest + 4) = *(int*)(src + 4); *(int*)(destEnd - 8) = *(int*)(srcEnd - 8); *(int*)(destEnd - 4) = *(int*)(srcEnd - 4); #endif return; MCPY03: // Copy the first 4 bytes and then unconditionally copy the last 4 bytes and return. if ((len & 4) == 0) goto MCPY04; Debug.Assert(len >= 4 && len < 8); *(int*)dest = *(int*)src; *(int*)(destEnd - 4) = *(int*)(srcEnd - 4); return; MCPY04: // Copy the first byte. For pending bytes, do an unconditionally copy of the last 2 bytes and return. Debug.Assert(len < 4); if (len == 0) return; *dest = *src; if ((len & 2) == 0) return; *(short*)(destEnd - 2) = *(short*)(srcEnd - 2); return; MCPY05: // PInvoke to the native version when the copy length exceeds the threshold. if (len > CopyThreshold) { goto PInvoke; } // Copy 64-bytes at a time until the remainder is less than 64. // If remainder is greater than 16 bytes, then jump to MCPY00. Otherwise, unconditionally copy the last 16 bytes and return. Debug.Assert(len > 64 && len <= CopyThreshold); nuint n = len >> 6; MCPY06: #if HAS_CUSTOM_BLOCKS *(Block64*)dest = *(Block64*)src; #elif BIT64 *(long*)dest = *(long*)src; *(long*)(dest + 8) = *(long*)(src + 8); *(long*)(dest + 16) = *(long*)(src + 16); *(long*)(dest + 24) = *(long*)(src + 24); *(long*)(dest + 32) = *(long*)(src + 32); *(long*)(dest + 40) = *(long*)(src + 40); *(long*)(dest + 48) = *(long*)(src + 48); *(long*)(dest + 56) = *(long*)(src + 56); #else *(int*)dest = *(int*)src; *(int*)(dest + 4) = *(int*)(src + 4); *(int*)(dest + 8) = *(int*)(src + 8); *(int*)(dest + 12) = *(int*)(src + 12); *(int*)(dest + 16) = *(int*)(src + 16); *(int*)(dest + 20) = *(int*)(src + 20); *(int*)(dest + 24) = *(int*)(src + 24); *(int*)(dest + 28) = *(int*)(src + 28); *(int*)(dest + 32) = *(int*)(src + 32); *(int*)(dest + 36) = *(int*)(src + 36); *(int*)(dest + 40) = *(int*)(src + 40); *(int*)(dest + 44) = *(int*)(src + 44); *(int*)(dest + 48) = *(int*)(src + 48); *(int*)(dest + 52) = *(int*)(src + 52); *(int*)(dest + 56) = *(int*)(src + 56); *(int*)(dest + 60) = *(int*)(src + 60); #endif dest += 64; src += 64; n--; if (n != 0) goto MCPY06; len %= 64; if (len > 16) goto MCPY00; #if HAS_CUSTOM_BLOCKS *(Block16*)(destEnd - 16) = *(Block16*)(srcEnd - 16); #elif BIT64 *(long*)(destEnd - 16) = *(long*)(srcEnd - 16); *(long*)(destEnd - 8) = *(long*)(srcEnd - 8); #else *(int*)(destEnd - 16) = *(int*)(srcEnd - 16); *(int*)(destEnd - 12) = *(int*)(srcEnd - 12); *(int*)(destEnd - 8) = *(int*)(srcEnd - 8); *(int*)(destEnd - 4) = *(int*)(srcEnd - 4); #endif return; PInvoke: _Memmove(dest, src, len); } // Non-inlinable wrapper around the QCall that avoids poluting the fast path // with P/Invoke prolog/epilog. [MethodImplAttribute(MethodImplOptions.NoInlining)] private unsafe static void _Memmove(byte* dest, byte* src, nuint len) { __Memmove(dest, src, len); } [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] extern private unsafe static void __Memmove(byte* dest, byte* src, nuint len); // The attributes on this method are chosen for best JIT performance. // Please do not edit unless intentional. [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] [CLSCompliant(false)] public static unsafe void MemoryCopy(void* source, void* destination, long destinationSizeInBytes, long sourceBytesToCopy) { if (sourceBytesToCopy > destinationSizeInBytes) { ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.sourceBytesToCopy); } Memmove((byte*)destination, (byte*)source, checked((nuint)sourceBytesToCopy)); } // The attributes on this method are chosen for best JIT performance. // Please do not edit unless intentional. [MethodImplAttribute(MethodImplOptions.AggressiveInlining)] [CLSCompliant(false)] public static unsafe void MemoryCopy(void* source, void* destination, ulong destinationSizeInBytes, ulong sourceBytesToCopy) { if (sourceBytesToCopy > destinationSizeInBytes) { ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.sourceBytesToCopy); } #if BIT64 Memmove((byte*)destination, (byte*)source, sourceBytesToCopy); #else // BIT64 Memmove((byte*)destination, (byte*)source, checked((uint)sourceBytesToCopy)); #endif // BIT64 } #if HAS_CUSTOM_BLOCKS [StructLayout(LayoutKind.Sequential, Size = 16)] private struct Block16 { } [StructLayout(LayoutKind.Sequential, Size = 64)] private struct Block64 { } #endif // HAS_CUSTOM_BLOCKS } }
/* ScriptLoader.cs * SSIS Script Loader by Karan Misra (kid0m4n) */ using System; using System.Data; using System.Linq; using System.Collections.Generic; using Microsoft.SqlServer.Dts.Runtime; using System.Data.SqlClient; using System.Text; using System.Threading; using System.Data.Common; using System.Data.OleDb; using System.IO; using System.Collections; namespace ST_6df146e709f5496ab83a8452be3f0685.csproj { [System.AddIn.AddIn("ScriptMain", Version = "1.0", Publisher = "", Description = "")] public partial class ScriptMain : Microsoft.SqlServer.Dts.Tasks.ScriptTask.VSTARTScriptObjectModelBase { #region VSTA generated code enum ScriptResults { Success = Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Success, Failure = Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Failure }; #endregion private string _packageName; private string _taskName; private string _version; private string _batchId; private string _loadingTableConn; private string _loggingTableConn; private int _totalThreads; private int _commitInterval; private int _bufferLength; private bool _loadLogging; private int _loadLoggingInterval; private int _loggingLevel; private bool _doProfiling; private string _badFilesPath; private int _totalBadLines; private char _badFileSeparator; private string _connDelimiter; private string _connections; private Dictionary<string, string> _connectionStrings = new Dictionary<string, string>(); private int _recoveryBatchSize; private int _errorThreshold; private T GetVariable<T>(string varName) { if (Dts.Variables.Contains(varName)) return (T)Dts.Variables[varName].Value; else throw new ApplicationException("Variable {0} not found.".FormatWith(varName)); } private T GetVariable<T>(string varName, T defaultValue) { if (Dts.Variables.Contains(varName)) return (T)Dts.Variables[varName].Value; else return defaultValue; } /// <summary> /// Initialize the variables as they are mapped from SSIS. /// </summary> private void InitVariables() { // This is the package name of the calling package. _packageName = (string)Dts.Variables["v_MasterPackageName"].Value; _taskName = (string)Dts.Variables["TaskName"].Value; _version = "{0}.{1}.{2}".FormatWith( Dts.Variables["VersionMajor"].Value, Dts.Variables["VersionMinor"].Value, Dts.Variables["VersionBuild"].Value); _batchId = (string)Dts.Variables["v_BatchID"].Value; _loadingTableConn = (string)Dts.Variables["v_LoadingTableConn"].Value; _loggingTableConn = (string)Dts.Variables["v_LoggingTableConn"].Value; _totalThreads = (int)Dts.Variables["v_TotalThreads"].Value; _commitInterval = GetVariable<int>("v_CommitInterval", 100000); _bufferLength = GetVariable<int>("v_BufferLength", 10000); _loadLogging = (bool)Dts.Variables["v_LoadLogging"].Value; _loadLoggingInterval = (int)Dts.Variables["v_LoadLoggingInterval"].Value; _loggingLevel = (int)Dts.Variables["v_LoggingLevel"].Value; _doProfiling = (bool)Dts.Variables["v_DoProfiling"].Value; _recoveryBatchSize = GetVariable<int>("v_RecoveryBatchSize", 2); _badFilesPath = GetVariable<string>("v_BadFilesPath", Path.GetTempPath()); _totalBadLines = GetVariable<int>("v_TotalBadLines", 10); _badFileSeparator = (char)GetVariable<ushort>("v_BadFileSeparator", ','); _connDelimiter = GetVariable<string>("v_ConnDelimiter"); _connections = GetVariable<string>("v_Connections"); _errorThreshold = GetVariable<int>("v_ErrorThreshold", 20); foreach (var conn in _connections.Split(new string[] { _connDelimiter + _connDelimiter }, StringSplitOptions.None)) { var twoParts = conn.Split(_connDelimiter[0]); if (twoParts.Length != 2) throw new ApplicationException("Connection string is not formatted properly. Name and string should be separated by #"); else _connectionStrings.Add(twoParts[0], twoParts[1]); } } /// <summary> /// Classify the different types of messages generated by the script. /// </summary> private enum InfoType { Load, Misc, Error } /// <summary> /// Write info generated by the script. /// </summary> /// <param name="logType">The level of information generated.</param> /// <param name="message">The actual info message.</param> /// <param name="args">Parameters for formatting.</param> private void WriteInfo(InfoType logType, string message, params object[] args) { // Decide whether to proceed based on the current logging level. // Ex: Misc messages are not output if the logging level is < 1. switch (logType) { case InfoType.Load: case InfoType.Error: break; case InfoType.Misc: if (_loggingLevel < 1) return; else break; } bool temp = false; string processedMessage = message.FormatWith(args); // We fire an error or information based on the type of info. switch (logType) { case InfoType.Load: case InfoType.Misc: Dts.Events.FireInformation(0, "", processedMessage, "", 0, ref temp); break; case InfoType.Error: Dts.Events.FireInformation(0, "", processedMessage, "", 0, ref temp); Thread.VolatileWrite(ref _totalErrors, _totalErrors + 1); break; } } /// <summary> /// Queue to hold tasks. /// </summary> Queue<LoadTask> _loadTasks = null; List<Conversion> _conversions; /// <summary> /// Get row count of a table using a OleDbConnection. /// </summary> /// <param name="tableName">Name of table from which to fetch info.</param> /// <param name="conn">The OleDbConnection to use to connect to the database.</param> /// <returns>Row count of the table.</returns> private int GetRowCount(string tableName, string query, OleDbConnection conn) { // Get row count. if (query != null) { using (var cmd = GetCommand<OleDbCommand, OleDbConnection>("SELECT COUNT(*) FROM ({0}) A".FormatWith(query), conn)) { // No time out. cmd.CommandTimeout = 0; return Int32.Parse(cmd.ExecuteScalar().ToString()); } } else { using (var cmd = GetCommand<OleDbCommand, OleDbConnection>("SELECT COUNT(*) FROM {0}".FormatWith(tableName), conn)) { // No time out. cmd.CommandTimeout = 0; return Int32.Parse(cmd.ExecuteScalar().ToString()); } } } /// <summary> /// Get row count of a table using a SqlConnection. /// </summary> /// <param name="tableName">Name of table from which to fetch info.</param> /// <param name="conn">The SqlConnection to use to connect to the database.</param> /// <return>Row count of the table.</return></returns> private int GetRowCount(string tableName, string query, SqlConnection conn) { // Get row count. if (query != null) { using (var cmd = GetCommand<SqlCommand, SqlConnection>("SELECT COUNT(*) FROM ({0}) A".FormatWith(query), conn)) { // No time out. cmd.CommandTimeout = 0; return Int32.Parse(cmd.ExecuteScalar().ToString()); } } else { using (var cmd = GetCommand<SqlCommand, SqlConnection>("SELECT COUNT(*) FROM {0}".FormatWith(tableName), conn)) { // No time out. cmd.CommandTimeout = 0; return Int32.Parse(cmd.ExecuteScalar().ToString()); } } } /// <summary> /// Get the connection string from the different repositories. /// </summary> /// <param name="conn">Identifier to retrieve using.</param> /// <returns>Connection string.</returns> private string ResolveConnString(string conn) { if (_connectionStrings.ContainsKey(conn)) return _connectionStrings[conn]; else throw new ApplicationException("Connection string not resolved for {0}.".FormatWith(conn)); } public class Column { public string Name { get; set; } public string DataType { get; set; } public int Length { get; set; } public bool IsNullAllowed { get; set; } public override string ToString() { return "{0} ({1},{2},{3})".FormatWith(Name, DataType, Length, IsNullAllowed ? "NULL" : "NOT NULL"); } } private bool CanConvertTo(Column from, Column to) { return from != null && to != null && from.Name.Equals(to.Name, StringComparison.CurrentCultureIgnoreCase) && ((from.DataType == to.DataType && from.Length == to.Length) || _conversions.Count(pc => pc.SourceType == from.DataType && pc.TargetType == to.DataType) == 1); } /// <summary> /// Get schema (column) information for a particular table. /// </summary> /// <param name="cmd">Prepared IDbCommand containing a SELECT * FROM TABLE</param> /// <returns>List of column names returned from the executed query.</returns> private List<Column> CollectSchema(DbCommand cmd) { using (var rdr = cmd.ExecuteReader(CommandBehavior.SchemaOnly)) { DataTable tbl = rdr.GetSchemaTable(); var colList = new List<Column>(); for (var i = 0; i < rdr.VisibleFieldCount; i++) colList.Add( new Column { Name = rdr.GetName(i), DataType = rdr.GetDataTypeName(i), Length = (int)tbl.Rows[i]["ColumnSize"], IsNullAllowed = (bool)tbl.Rows[i]["AllowDBNull"] }); return colList; } } private T GetConnection<T>(string connName) where T : DbConnection, new() { var conn = new T(); conn.ConnectionString = ResolveConnString(connName); conn.Open(); return conn; } private void CloseConnection<T>(ref T conn) where T : DbConnection { if (conn != null) { conn.Close(); conn.Dispose(); conn = null; } } private TCmd GetCommand<TCmd, TConn>(string cmdStr, TConn conn) where TCmd : DbCommand, new() where TConn : DbConnection { var cmd = new TCmd(); cmd.Connection = conn as TConn; cmd.CommandText = cmdStr; cmd.CommandTimeout = 0; return cmd; } private SqlBulkCopy PrepareBulkCopy(SqlConnection conn, int batchSize, string destination, List<SqlBulkCopyColumnMapping> columnMappings) { var bulkCopy = new SqlBulkCopy(conn, SqlBulkCopyOptions.KeepNulls | SqlBulkCopyOptions.TableLock, null) { BatchSize = batchSize, DestinationTableName = destination, BulkCopyTimeout = 0 }; if (columnMappings != null) foreach (var mapping in columnMappings) bulkCopy.ColumnMappings.Add(mapping); // If we are going to do load logging, setup the bulk copy process as required. if (_loadLogging) { bulkCopy.NotifyAfter = _loadLoggingInterval; bulkCopy.SqlRowsCopied += new SqlRowsCopiedEventHandler((o, args) => { WriteInfo(InfoType.Load, "{0} rows loaded to {1}.", _loadLoggingInterval, (o as SqlBulkCopy).DestinationTableName); }); } return bulkCopy; } private void BufferedBulkLoadTable(OleDbCommand rdrCommand, string sourceTable, List<Column> sourceColumns, string[] commonColumnNames, SqlConnection targetConn, string targetTable, List<SqlBulkCopyColumnMapping> columnMappings, int commitInterval, out DateTime startTime, out DateTime endTime, out int totalRows) { using (var bulkCopy = PrepareBulkCopy(targetConn, commitInterval, targetTable, columnMappings)) { // Record start time. startTime = DateTime.Now; totalRows = 0; try { // Ensure that we dispose the reader. using (var rdr = new DecoradedReader(new ConvertingReader(rdrCommand.ExecuteReader()), commitInterval)) { // Only proceed if we have input rows coming along. // Wont be harmful to proceed, but wasted execution time. if (rdr.HasRows) { // Mark the starting of the main copy routine. WriteInfo(InfoType.Misc, "Buffered load for {0} starting.", targetTable); StreamWriter badFile = null; string insertStatement = null; var totalBadRows = 0; object[] badData = new object[rdr.FieldCount + 1]; do { try { bulkCopy.WriteToServer(rdr); if (!rdr.HitBufferLimit) break; } catch (Exception e) { if (e.Message.Contains("Received an invalid column length from the bcp client")) { throw; } if (badFile == null) { badFile = GetBadFileWriter(targetTable, startTime); var header = new List<string>(sourceColumns.Select(x => x.Name)); header.Add("Error Description"); badFile.WriteLine(header.Aggregate((x, y) => "{0}{1}{2}".FormatWith(x, _badFileSeparator, y))); var i = 0; insertStatement = "insert into {0} ({1}) values ({2})".FormatWith(targetTable, commonColumnNames.Aggregate((x, y) => x + "," + y), commonColumnNames.Select(x => "@p" + i++).Aggregate((x, y) => x + "," + y)); } using (var cmd = GetCommand<SqlCommand, SqlConnection>(insertStatement, targetConn)) { for (var i = 0; i < rdr.RowsInBatch; i++) { var data = rdr.GetBufferData(i); try { cmd.Parameters.Clear(); for (var j = 0; j < data.Length; j++) cmd.Parameters.AddWithValue("@p" + j, data[j]); cmd.ExecuteNonQuery(); } catch (Exception ie) { if (totalBadRows++ < _totalBadLines) { badFile.Write(data.Aggregate((x, y) => "{0}{1}{2}".FormatWith(x.ToNullString(), _badFileSeparator, y.ToNullString()))); badFile.WriteLine(",{0}", ie.Message.Trim()); } } } } } rdr.ResetBatch(); } while (true); // Get inserted row count. totalRows = GetRowCount(targetTable, null, targetConn); if (badFile == null) { // We finished succesfully. WriteInfo(InfoType.Misc, "Buffered oad for {0} completed.", targetTable); } else { badFile.Close(); badFile = null; throw new ApplicationException("Error in data detected. Bad file {0} created. Total errors = {1}".FormatWith(GetBadFileName(targetTable, startTime), totalBadRows)); } } else { // No rows in the source. Nothing to do. startTime = DateTime.Now; WriteInfo(InfoType.Misc, "Source table {0} didn't have any rows.", sourceTable); } } } finally { // Record end time. endTime = DateTime.Now; } } } /// <summary> /// Bulk load a table based on various parameters. /// </summary> /// <param name="rdrCommand">The reader command to feed the target table with data.</param> /// <param name="sourceTable">Souce table name (used for logging and other information.)</param> /// <param name="targetConn">Target connection (SqlConnection.)</param> /// <param name="targetTable">Target table name (used for logging and other information.)</param> /// <param name="commitInterval">Commit interval to use while loading.</param> /// <param name="startTime">out parameter to hold the start time of the bulk load.</param> /// <param name="endTime">out parameter to hold the end time of the bulk load.</param> /// <param name="totalRows">out parameter to hold the total number of rows loaded.</param> bool BulkLoadTable(OleDbCommand rdrCommand, string sourceTable, List<Column> sourceColumns, string[] commonColumnNames, SqlConnection targetConn, string targetTable, List<SqlBulkCopyColumnMapping> columnMappings, int commitInterval, out DateTime startTime, out DateTime endTime, out int totalRows) { // Create the bulk copy object. // We are using table locking, keep nulls and internal transaction to ensure speedy and // consistent copy. using (var bulkCopy = PrepareBulkCopy(targetConn, commitInterval, targetTable, columnMappings)) { // Record start time. startTime = DateTime.Now; totalRows = 0; try { // Ensure that we dispose the reader. using (var rdr = new ConvertingReader(rdrCommand.ExecuteReader())) { // Only proceed if we have input rows coming along. // Wont be harmful to proceed, but wasted execution time. if (rdr.HasRows) { // Mark the starting of the main copy routine. WriteInfo(InfoType.Misc, "Load for {0} starting.", targetTable); try { bulkCopy.WriteToServer(rdr); } catch (Exception e) { if (e.Message.Contains("Received an invalid column length from the bcp client")) { throw; } return false; } // Get inserted row count. totalRows = GetRowCount(targetTable, null, targetConn); } else { // No rows in the source. Nothing to do. startTime = DateTime.Now; WriteInfo(InfoType.Misc, "Source table {0} didn't have any rows.", sourceTable); } return true; } } finally { // Record end time. endTime = DateTime.Now; } } } void BulkLoadTable(OleDbCommand rdrCommand, string sourceTable, List<Column> sourceColumns, string[] commonColumnNames, SqlConnection targetConn, string targetTable, List<SqlBulkCopyColumnMapping> columnMappings, int commitInterval) { DateTime startTime, endTime; int totalRows; BulkLoadTable(rdrCommand, sourceTable, sourceColumns, commonColumnNames, targetConn, targetTable, columnMappings, commitInterval, out startTime, out endTime, out totalRows); } void BulkLoadTable(OleDbCommand rdrCommand, string sourceTable, List<Column> sourceColumns, string[] commonColumnNames, SqlConnection targetConn, string targetTable, List<SqlBulkCopyColumnMapping> columnMappings, int commitInterval, out int totalRows) { DateTime startTime, endTime; BulkLoadTable(rdrCommand, sourceTable, sourceColumns, commonColumnNames, targetConn, targetTable, columnMappings, commitInterval, out startTime, out endTime, out totalRows); } /// <summary> /// Mark as thread done executing. /// </summary> void MarkThreadDoneExecuting(LoadTask loadTask) { lock (_executingTasks) { _executingTasks.Remove(loadTask); _totalTaskExecuting--; } } private DataRow[] GetRows(DataRowCollection rowsCollection, int start, int maxRows, List<DataRow> rows) { rows.Clear(); for (int i = 0; i < maxRows && (start + i) < rowsCollection.Count; i++) { rows.Add(rowsCollection[start + i]); } return rows.ToArray(); } private string[] GetCommonColumnNames(List<Column> sourceColumns, List<Column> targetColumns) { return sourceColumns.Select(c => c.Name).Intersect(targetColumns.Select(c => c.Name), StringComparer.CurrentCultureIgnoreCase).ToArray(); } private string GetBadFileName(string targetTable, DateTime loadStartTime) { return "{0}_{1}.txt".FormatWith(targetTable.Contains('.') ? targetTable.Split('.')[1] : targetTable, loadStartTime.ToString("yyyyMMdd")); } private StreamWriter GetBadFileWriter(string targetTable, DateTime loadStartTime) { return new StreamWriter( Path.Combine( _badFilesPath, GetBadFileName(targetTable, loadStartTime)), false); } private void TruncateTable(string tableName, SqlConnection conn) { using (var cmd = GetCommand<SqlCommand, SqlConnection>("TRUNCATE TABLE {0}".FormatWith(tableName), conn)) { cmd.ExecuteNonQuery(); WriteInfo(InfoType.Misc, "Target table {0} has been truncated.", tableName); } } /// <summary> /// Thread function to execute a truncate load task. /// </summary> /// <param name="_loadTask">The task we are going to operate on for the load.</param> private void ExecuteTruncateLoadTask(object _loadTask) { LoadTask loadTask = (LoadTask)_loadTask; OleDbConnection sourceConn = null; int? truncatedRowCount = null; int? importedRowCount = null; int totalRows = 0; try { sourceConn = GetConnection<OleDbConnection>("Source"); // Get row counts. We only proceed if profiling is turned on // at the global level and it is not overridden at the load level. if (_doProfiling && !(loadTask.OverrideProfiling ?? false)) { importedRowCount = GetRowCount(loadTask.SourceTable, loadTask.SourceQuery, sourceConn); } var columnMappings = new List<SqlBulkCopyColumnMapping>(); var sourceColumnsOverride = "*"; List<Column> sourceColumns = null; List<Column> targetColumns = null; string[] commonColumnNames; using (var targetOleDbConn = GetConnection<OleDbConnection>("TargetOleDb")) { // Check if we have been provided source query. If source query is provided we are going to use it. // Otherwise, we are going to find out own mapping using the source colunmns // or the target table column list. if (loadTask.SourceQuery != null) { using (var cmd = GetCommand<OleDbCommand, OleDbConnection>(loadTask.SourceQuery, sourceConn)) { sourceColumns = CollectSchema(cmd); } using (var cmd = GetCommand<OleDbCommand, OleDbConnection>("SELECT * FROM {0}".FormatWith(loadTask.TargetTable), targetOleDbConn)) { targetColumns = CollectSchema(cmd); } commonColumnNames = GetCommonColumnNames(sourceColumns, targetColumns); if (commonColumnNames.Length > 0) { foreach (Column sourceColumn in sourceColumns) { foreach (Column targetColumn in targetColumns) { if (sourceColumn.Name.Equals(targetColumn.Name, StringComparison.CurrentCultureIgnoreCase)) { if (CanConvertTo(sourceColumn, targetColumn)) columnMappings.Add(new SqlBulkCopyColumnMapping(sourceColumn.Name, targetColumn.Name)); else throw new ApplicationException( "Source column {0} doesn't match target column {1}.".FormatWith(sourceColumn, targetColumn)); } } } } else { throw new ApplicationException("No common columns found between source and target."); } } else if (loadTask.SourceColumns != null) { // If source columns is provided, then we use it. var sourceColumnNames = loadTask.SourceColumns.Split(','); var targetColumnNames = loadTask.TargetColumns.Split(','); // Confirm that the mapping beween source columns and the target columns is 1 to 1. // If that is the case the the lenghts will match. if (sourceColumnNames.Length != targetColumnNames.Length) { WriteInfo(InfoType.Error, "Invalid source-to-target mapping specified for load {0}. Skipping load.", loadTask.LoadID); return; } using (var cmd = GetCommand<OleDbCommand, OleDbConnection>("SELECT {0} FROM {1}".FormatWith(loadTask.SourceColumns, loadTask.SourceTable), sourceConn)) { sourceColumns = CollectSchema(cmd); } using (var cmd = GetCommand<OleDbCommand, OleDbConnection>("SELECT {0} FROM {1}".FormatWith(loadTask.TargetColumns, loadTask.TargetTable), targetOleDbConn)) { targetColumns = CollectSchema(cmd); } // Populate the column mappings to be used for the SqlBulkCopy operation. // It is always a 1-to-1 in order mapping between source and target columns. for (int i = 0; i < sourceColumnNames.Length; i++) { if (CanConvertTo(sourceColumns[i], targetColumns[i])) columnMappings.Add(new SqlBulkCopyColumnMapping(sourceColumnNames[i], targetColumnNames[i])); else throw new ApplicationException( "Source column {0} doesn't match target column {1}.".FormatWith(sourceColumns[i], targetColumns[i])); } commonColumnNames = targetColumns.Select(x => x.Name).ToArray(); } else { // We need to now dynamically find the source and target columns and proceed from there. using (var cmd = GetCommand<OleDbCommand, OleDbConnection>("SELECT * FROM {0}".FormatWith(loadTask.SourceTable), sourceConn)) { sourceColumns = CollectSchema(cmd); } using (var cmd = GetCommand<OleDbCommand, OleDbConnection>("SELECT * FROM {0}".FormatWith(loadTask.TargetTable), targetOleDbConn)) { targetColumns = CollectSchema(cmd); } commonColumnNames = GetCommonColumnNames(sourceColumns, targetColumns); if (commonColumnNames.Count() > 0) { foreach (var col in commonColumnNames) { var sourceColumn = sourceColumns.Where(nt => nt.Name.Equals(col, StringComparison.CurrentCultureIgnoreCase)).Single(); var targetColumn = targetColumns.Where(nt => nt.Name.Equals(col, StringComparison.CurrentCultureIgnoreCase)).Single(); if (CanConvertTo(sourceColumn, targetColumn)) columnMappings.Add(new SqlBulkCopyColumnMapping(sourceColumn.Name, targetColumn.Name)); else throw new ApplicationException( "Source column {0} doesn't match target column {1}.".FormatWith(sourceColumn, targetColumn)); } sourceColumnsOverride = commonColumnNames.Aggregate((x, y) => x + "," + y); } else throw new ApplicationException("No common columns found between source and target."); } } if (loadTask.SourceActualColumns != null) { if (sourceColumns.Select(x => x.Name).Intersect(loadTask.SourceActualColumns.Split(',')).Count() != sourceColumns.Count()) throw new ApplicationException("Source columns have changed from the last load."); } if (loadTask.TargetActualColumns != null) { if (targetColumns.Select(x => x.Name).Intersect(loadTask.TargetActualColumns.Split(',')).Count() != targetColumns.Count()) throw new ApplicationException("Target columns have changed from the last load."); } DateTime startTime, endTime; TimeSpan timeTaken; bool status; // This is our source command. We will open a reader on it to fetch data. using (var readCmd = GetCommand<OleDbCommand, OleDbConnection>(loadTask.SourceQuery ?? "SELECT {0} FROM {1}".FormatWith(loadTask.SourceColumns ?? sourceColumnsOverride, loadTask.SourceTable), sourceConn)) { using (var targetConn = GetConnection<SqlConnection>("Target")) { truncatedRowCount = GetRowCount(loadTask.TargetTable, null, targetConn); // Truncate the target table. TruncateTable(loadTask.TargetTable, targetConn); // Call the bulk load table function to do our bulk load. status = BulkLoadTable( readCmd, loadTask.SourceTable, sourceColumns, commonColumnNames, targetConn, loadTask.TargetTable, columnMappings, loadTask.CommitInterval ?? this._commitInterval, out startTime, out endTime, out totalRows); } } if (!status) { using (var readCmd = GetCommand<OleDbCommand, OleDbConnection>(loadTask.SourceQuery ?? "SELECT {0} FROM {1}".FormatWith(loadTask.SourceColumns ?? sourceColumnsOverride, loadTask.SourceTable), sourceConn)) { using (var targetConn = GetConnection<SqlConnection>("Target")) { TruncateTable(loadTask.TargetTable, targetConn); BufferedBulkLoadTable( readCmd, loadTask.SourceTable, sourceColumns, commonColumnNames, targetConn, loadTask.TargetTable, columnMappings, _bufferLength, out startTime, out endTime, out totalRows); } } } // Calculate our elapsed time. timeTaken = endTime - startTime; // Log information about the current completed load. WriteInfo( InfoType.Load, "{0} rows loaded {1}: source {2}->target {3} in {4}.", totalRows, loadTask.LoadID, loadTask.SourceTable, loadTask.TargetTable, timeTaken); UpdateLoadColumns(loadTask, sourceColumns, targetColumns); // Log the successful load. LogSuccessEntry( _packageName, _taskName, _version, loadTask.TargetTable, importedRowCount ?? totalRows, totalRows, truncatedRowCount, null, // No updates startTime, endTime, timeTaken); } catch (Exception e) { // We have encountered a failure. WriteInfo(InfoType.Error, "Error in loading task {0}: {1}", loadTask.LoadID, e.Message); // Log the error. LogErrorEntry( _packageName, _taskName, _version, loadTask.TargetTable, String.Empty, e.Message + e.StackTrace, DateTime.Now); Interlocked.Increment(ref _totalTableFailures); } finally { CloseConnection(ref sourceConn); // We are done executing. MarkThreadDoneExecuting(loadTask); // Increase global row count. Thread.VolatileWrite(ref _globalTotalRows, _globalTotalRows + totalRows); GC.Collect(); } } void ExecuteDeleteLoadTask(object _loadTask) { //LoadTask loadTask = (LoadTask)_loadTask; //OleDbConnection sourceConn; //SqlConnection targetConn; //OleDbConnection targetOleDbConn; //int? importedRowCount = null; //var totalRows = 0; //sourceConn = GetConnection<OleDbConnection>("Source"); //targetConn = GetConnection<SqlConnection>("Target"); //targetOleDbConn = GetConnection<OleDbConnection>("TargetOleDb"); //try //{ // // Get row counts. // if (_doProfiling) // importedRowCount = GetRowCount(loadTask.SourceTable, loadTask.SourceQuery, sourceConn); // // Mark start time. // DateTime startTime = DateTime.Now; // // Get the max updated date from target table. // string maxUpdatedDate; // using (var getMaxUpdatedDateCmd = new SqlCommand(loadTask.GetMaxDateQuery, targetConn)) // { // maxUpdatedDate = getMaxUpdatedDateCmd.ExecuteScalar().ToString(); // } // // Copy the new rows based on obtained update date from source table to temp table. // var readSourceDataCmd = new OleDbCommand(loadTask.ReadSourceDataQuery.FormatWith(maxUpdatedDate), sourceConn); // BulkLoadTable( // readSourceDataCmd, // loadTask.SourceTable, // targetConn, // loadTask.TmpTargetTable, // null, // loadTask.CommitInterval ?? _commitInterval); // // Delete matching rows from target table based on temp table. // int totalRowsDeleted; // using (var deleteMatchingRowsCmd = new SqlCommand(loadTask.DeleteDataTargetQuery, targetConn)) // { // totalRowsDeleted = (int)deleteMatchingRowsCmd.ExecuteScalar(); // } // // Copy data from temp table to target table. // var readDataFromTmpTableCmd = GetCommand<OleDbCommand, OleDbConnection>("SELECT * FROM " + loadTask.TargetTable, targetOleDbConn); // BulkLoadTable( // readDataFromTmpTableCmd, // loadTask.TmpTargetTable, // targetConn, // loadTask.TargetTable, // null, // loadTask.CommitInterval ?? _commitInterval, // out totalRows); // // Mark end time. // DateTime endTime = DateTime.Now; // TimeSpan timeTaken = endTime - startTime; // WriteInfo( // InfoType.Load, // "{0} rows loaded source {1}->target {2} in {3}.", // totalRows, // loadTask.SourceTable, // loadTask.TargetTable, // timeTaken); // LogSuccessEntry( // _packageName, // _taskName, // _version, // loadTask.TargetTable, // importedRowCount, // totalRows, // null, // totalRowsDeleted, // No updates // startTime, // endTime, // timeTaken); //} //catch (Exception e) //{ // // We have encountered a failure. // WriteInfo(InfoType.Error, "Error in loading task {0}: {1}", loadTask.LoadID, e.Message); // // Log the error. // LogErrorEntry( // _packageName, // _taskName, // _version, // loadTask.TargetTable, // String.Empty, // e.Message, // DateTime.Now); //} //finally //{ // // We are done executing. // MarkThreadDoneExecuting(loadTask); // // Increase global row count. // Thread.VolatileWrite(ref _globalTotalRows, _globalTotalRows + totalRows); //} } /// <summary> /// The logging data context. /// </summary> LoggingDataContext _loggingDataContext; int _totalErrors; int _totalTableFailures; /// <summary> /// Initialize the logging system. /// </summary> private void InitializeLogging() { _loggingDataContext = new LoggingDataContext(ResolveConnString(_loggingTableConn)); } public void LogSuccessEntry(string packageName, string taskName, string version, string tableName, int? importedRowCount, int? insertedRowCount, int? delTruncRowCount, int? updatedRowCount, DateTime? startTime, DateTime? endTime, TimeSpan? elapsedTime) { lock (_loggingDataContext) { _loggingDataContext.Successes.InsertOnSubmit( new Success() { Package_Name = packageName, Task_Name = String.Format("{0}:{1}", taskName, tableName), Version = version, Imported_RowCount = importedRowCount, Inserted_RowCount = insertedRowCount, Del_Trunc_RowCount = delTruncRowCount, Updated_RowCount = updatedRowCount, Starttime = startTime, Endtime = endTime, ElapsedTime = elapsedTime.ToString() }); } } public void LogErrorEntry(string packageName, string taskName, string version, string tableName, string errorNum, string errorDesc, DateTime? errorTime) { lock (_loggingDataContext) { _loggingDataContext.Failures.InsertOnSubmit( new Failure() { Package_Name = packageName, Task_Name = "{0}:{1}".FormatWith(taskName, tableName), Version = version, Error_Num = errorNum, Error_Desc = errorDesc, Error_Time = errorTime }); } } /// <summary> /// Shut down the logging system. /// </summary> private void ShutdownLogging() { _loggingDataContext.SubmitChanges(); _loggingDataContext.Dispose(); _loggingDataContext = null; } LoadingDataContext _loadingDataContext; /// <summary> /// Initialize the loading system. /// </summary> private void InitializeLoading() { // Make sure we dispose of the loading data context. _loadingDataContext = new LoadingDataContext(ResolveConnString(_loadingTableConn)); // LINQ to SQL query to get the load tasks. // The load task must be enabled (Enabled == True) and must be from the proper batch. // They are ordered based on priority and load id. _loadTasks = new Queue<LoadTask>(from x in _loadingDataContext.LoadTasks where x.Enabled == true && x.BatchID == _batchId orderby (x.Priority ?? 0) descending select x); _conversions = new List<Conversion>(_loadingDataContext.Conversions); // Log information about number of tasks retrieved. WriteInfo(InfoType.Load, "We have been given {0} task(s). We will execute these tasks using {1} thread(s).", _loadTasks.Count, _totalThreads); WriteInfo(InfoType.Misc, "Closing connection to meta table."); } private void ShutdownLoading() { _loadingDataContext.SubmitChanges(); _loadingDataContext.Dispose(); _loadingDataContext = null; } private void UpdateLoadColumns(LoadTask loadTask, List<Column> sourceColumns, List<Column> targetColumns) { loadTask.SourceActualColumns = sourceColumns.Select(x => x.Name).Aggregate((y, z) => y + "," + z); loadTask.TargetActualColumns = targetColumns.Select(x => x.Name).Aggregate((y, z) => y + "," + z); } /// <summary> /// Total rows loaded in this session. /// </summary> int _globalTotalRows = 0; /// <summary> /// Total number of tasks executing. /// </summary> int _totalTaskExecuting = 0; List<LoadTask> _executingTasks = new List<LoadTask>(); List<LoadTask> _scheduledTasks = new List<LoadTask>(); /// <summary> /// Which task to execute next. /// </summary> /// <returns>Next load task to execute.</returns> private LoadTask GetNextTaskToExecute() { var loadTask = _loadTasks.Dequeue(); _scheduledTasks.Add(loadTask); return loadTask; } /// <summary> /// Tells us if more tasks remain in the queue. /// </summary> /// <returns>True if more tasks are there to execute.</returns> private bool MoreTasksRemain() { return _loadTasks.Count > 0; } /// <summary> /// Our main routine. /// </summary> public void Main() { // Initialize the variables from SSIS. InitVariables(); // Initialize the table logging system. InitializeLogging(); // Get loading meta data. InitializeLoading(); // Record the start time. var startTime = DateTime.Now; var totalTables = _loadTasks.Count; // Schedule jobs. while (MoreTasksRemain()) { // Check if we are running at max number of tasks. if (_totalTaskExecuting >= _totalThreads) { // Nothing to schedule, lets idle for 500 msec. Thread.Sleep(500); } else { // We have a job to schedule. Increase the total threads executing. Thread thread; // Get the next task to execute. var taskToExecute = GetNextTaskToExecute(); lock (_executingTasks) { _executingTasks.Add(taskToExecute); _totalTaskExecuting++; } // Decide on how to process the task. If it is a delete load, then we will use // the ExecuteDeleteLoadTask routine to process the load. Otherwise we use the // ExecuteTruncateLoadTask routine to proceed. if ((taskToExecute.IsDeleteLoad ?? false) || taskToExecute.DeleteDataTargetQuery != null) thread = new Thread(ExecuteDeleteLoadTask); else thread = new Thread(ExecuteTruncateLoadTask); WriteInfo( InfoType.Load, "Scheduling load for target {0}: {1}...", taskToExecute.LoadID, taskToExecute.TargetTable); // Start execution of thread. thread.Start(taskToExecute); } } var waitLoopCount = 0; // All jobs scheduled. while (true) { lock (_executingTasks) { if (_executingTasks.Count > 0) { waitLoopCount++; if (waitLoopCount % 60 == 0) { waitLoopCount = 0; WriteInfo( InfoType.Load, "{0} job(s) still waiting to finish. {1} rows loaded so far.".FormatWith(_executingTasks.Count, _globalTotalRows)); WriteInfo( InfoType.Load, "Jobs executing are: {0}".FormatWith(_executingTasks.Take(5).Select(t => t.SourceTable).Aggregate((x, y) => x + ", " + y))); } } else break; } // Wait 500 msec to check again. Thread.Sleep(500); } // Record the end time. DateTime endTime = DateTime.Now; // Total time taken. TimeSpan timeTaken = endTime - startTime; // Mark as success or failure. if (_totalTableFailures > ((_errorThreshold / 100.0) * totalTables)) Dts.TaskResult = (int)ScriptResults.Failure; else { // Log information for the completed session. WriteInfo(InfoType.Load, "{0} table loads completed in {1} using {2} parallel threads.", totalTables, timeTaken.ToString(), _totalThreads); WriteInfo(InfoType.Load, "{0} rows were loaded in {1}. Thats {2} rows/sec.", _globalTotalRows, timeTaken, Math.Round(_globalTotalRows / timeTaken.TotalSeconds)); Dts.TaskResult = (int)ScriptResults.Success; } // Shutdown logging system. ShutdownLogging(); ShutdownLoading(); } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using sys = System; namespace Google.Ads.GoogleAds.V9.Resources { /// <summary>Resource name for the <c>HotelReconciliation</c> resource.</summary> public sealed partial class HotelReconciliationName : gax::IResourceName, sys::IEquatable<HotelReconciliationName> { /// <summary>The possible contents of <see cref="HotelReconciliationName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary> /// A resource name with pattern <c>customers/{customer_id}/hotelReconciliations/{commission_id}</c>. /// </summary> CustomerCommission = 1, } private static gax::PathTemplate s_customerCommission = new gax::PathTemplate("customers/{customer_id}/hotelReconciliations/{commission_id}"); /// <summary>Creates a <see cref="HotelReconciliationName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="HotelReconciliationName"/> containing the provided /// <paramref name="unparsedResourceName"/>. /// </returns> public static HotelReconciliationName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new HotelReconciliationName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="HotelReconciliationName"/> with the pattern /// <c>customers/{customer_id}/hotelReconciliations/{commission_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="commissionId">The <c>Commission</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// A new instance of <see cref="HotelReconciliationName"/> constructed from the provided ids. /// </returns> public static HotelReconciliationName FromCustomerCommission(string customerId, string commissionId) => new HotelReconciliationName(ResourceNameType.CustomerCommission, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), commissionId: gax::GaxPreconditions.CheckNotNullOrEmpty(commissionId, nameof(commissionId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="HotelReconciliationName"/> with pattern /// <c>customers/{customer_id}/hotelReconciliations/{commission_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="commissionId">The <c>Commission</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="HotelReconciliationName"/> with pattern /// <c>customers/{customer_id}/hotelReconciliations/{commission_id}</c>. /// </returns> public static string Format(string customerId, string commissionId) => FormatCustomerCommission(customerId, commissionId); /// <summary> /// Formats the IDs into the string representation of this <see cref="HotelReconciliationName"/> with pattern /// <c>customers/{customer_id}/hotelReconciliations/{commission_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="commissionId">The <c>Commission</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="HotelReconciliationName"/> with pattern /// <c>customers/{customer_id}/hotelReconciliations/{commission_id}</c>. /// </returns> public static string FormatCustomerCommission(string customerId, string commissionId) => s_customerCommission.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), gax::GaxPreconditions.CheckNotNullOrEmpty(commissionId, nameof(commissionId))); /// <summary> /// Parses the given resource name string into a new <see cref="HotelReconciliationName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/hotelReconciliations/{commission_id}</c></description></item> /// </list> /// </remarks> /// <param name="hotelReconciliationName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="HotelReconciliationName"/> if successful.</returns> public static HotelReconciliationName Parse(string hotelReconciliationName) => Parse(hotelReconciliationName, false); /// <summary> /// Parses the given resource name string into a new <see cref="HotelReconciliationName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/hotelReconciliations/{commission_id}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="hotelReconciliationName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="HotelReconciliationName"/> if successful.</returns> public static HotelReconciliationName Parse(string hotelReconciliationName, bool allowUnparsed) => TryParse(hotelReconciliationName, allowUnparsed, out HotelReconciliationName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="HotelReconciliationName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/hotelReconciliations/{commission_id}</c></description></item> /// </list> /// </remarks> /// <param name="hotelReconciliationName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="HotelReconciliationName"/>, or <c>null</c> if parsing /// failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string hotelReconciliationName, out HotelReconciliationName result) => TryParse(hotelReconciliationName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="HotelReconciliationName"/> instance; /// optionally allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/hotelReconciliations/{commission_id}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="hotelReconciliationName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="HotelReconciliationName"/>, or <c>null</c> if parsing /// failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string hotelReconciliationName, bool allowUnparsed, out HotelReconciliationName result) { gax::GaxPreconditions.CheckNotNull(hotelReconciliationName, nameof(hotelReconciliationName)); gax::TemplatedResourceName resourceName; if (s_customerCommission.TryParseName(hotelReconciliationName, out resourceName)) { result = FromCustomerCommission(resourceName[0], resourceName[1]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(hotelReconciliationName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private HotelReconciliationName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string commissionId = null, string customerId = null) { Type = type; UnparsedResource = unparsedResourceName; CommissionId = commissionId; CustomerId = customerId; } /// <summary> /// Constructs a new instance of a <see cref="HotelReconciliationName"/> class from the component parts of /// pattern <c>customers/{customer_id}/hotelReconciliations/{commission_id}</c> /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="commissionId">The <c>Commission</c> ID. Must not be <c>null</c> or empty.</param> public HotelReconciliationName(string customerId, string commissionId) : this(ResourceNameType.CustomerCommission, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), commissionId: gax::GaxPreconditions.CheckNotNullOrEmpty(commissionId, nameof(commissionId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Commission</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CommissionId { get; } /// <summary> /// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CustomerId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.CustomerCommission: return s_customerCommission.Expand(CustomerId, CommissionId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as HotelReconciliationName); /// <inheritdoc/> public bool Equals(HotelReconciliationName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(HotelReconciliationName a, HotelReconciliationName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(HotelReconciliationName a, HotelReconciliationName b) => !(a == b); } public partial class HotelReconciliation { /// <summary> /// <see cref="HotelReconciliationName"/>-typed view over the <see cref="ResourceName"/> resource name property. /// </summary> internal HotelReconciliationName ResourceNameAsHotelReconciliationName { get => string.IsNullOrEmpty(ResourceName) ? null : HotelReconciliationName.Parse(ResourceName, allowUnparsed: true); set => ResourceName = value?.ToString() ?? ""; } } }
using System; using System.Data; using System.Data.SqlClient; using Csla; using Csla.Data; namespace ParentLoadSoftDelete.Business.ERLevel { /// <summary> /// E12Level111111 (editable child object).<br/> /// This is a generated base class of <see cref="E12Level111111"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="E11Level111111Coll"/> collection. /// </remarks> [Serializable] public partial class E12Level111111 : BusinessBase<E12Level111111> { #region Static Fields private static int _lastID; #endregion #region State Fields [NotUndoable] [NonSerialized] internal int qarentID1 = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="Level_1_1_1_1_1_1_ID"/> property. /// </summary> public static readonly PropertyInfo<int> Level_1_1_1_1_1_1_IDProperty = RegisterProperty<int>(p => p.Level_1_1_1_1_1_1_ID, "Level_1_1_1_1_1_1 ID"); /// <summary> /// Gets the Level_1_1_1_1_1_1 ID. /// </summary> /// <value>The Level_1_1_1_1_1_1 ID.</value> public int Level_1_1_1_1_1_1_ID { get { return GetProperty(Level_1_1_1_1_1_1_IDProperty); } } /// <summary> /// Maintains metadata about <see cref="Level_1_1_1_1_1_1_Name"/> property. /// </summary> public static readonly PropertyInfo<string> Level_1_1_1_1_1_1_NameProperty = RegisterProperty<string>(p => p.Level_1_1_1_1_1_1_Name, "Level_1_1_1_1_1_1 Name"); /// <summary> /// Gets or sets the Level_1_1_1_1_1_1 Name. /// </summary> /// <value>The Level_1_1_1_1_1_1 Name.</value> public string Level_1_1_1_1_1_1_Name { get { return GetProperty(Level_1_1_1_1_1_1_NameProperty); } set { SetProperty(Level_1_1_1_1_1_1_NameProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="E12Level111111"/> object. /// </summary> /// <returns>A reference to the created <see cref="E12Level111111"/> object.</returns> internal static E12Level111111 NewE12Level111111() { return DataPortal.CreateChild<E12Level111111>(); } /// <summary> /// Factory method. Loads a <see cref="E12Level111111"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> /// <returns>A reference to the fetched <see cref="E12Level111111"/> object.</returns> internal static E12Level111111 GetE12Level111111(SafeDataReader dr) { E12Level111111 obj = new E12Level111111(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(dr); obj.MarkOld(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="E12Level111111"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> private E12Level111111() { // Prevent direct creation // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="E12Level111111"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { LoadProperty(Level_1_1_1_1_1_1_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID)); var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="E12Level111111"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(Level_1_1_1_1_1_1_IDProperty, dr.GetInt32("Level_1_1_1_1_1_1_ID")); LoadProperty(Level_1_1_1_1_1_1_NameProperty, dr.GetString("Level_1_1_1_1_1_1_Name")); qarentID1 = dr.GetInt32("QarentID1"); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Inserts a new <see cref="E12Level111111"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(E10Level11111 parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("AddE12Level111111", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_ID", parent.Level_1_1_1_1_1_ID).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_1_ID", ReadProperty(Level_1_1_1_1_1_1_IDProperty)).Direction = ParameterDirection.Output; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_1_Name", ReadProperty(Level_1_1_1_1_1_1_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnInsertPre(args); cmd.ExecuteNonQuery(); OnInsertPost(args); LoadProperty(Level_1_1_1_1_1_1_IDProperty, (int) cmd.Parameters["@Level_1_1_1_1_1_1_ID"].Value); } } } /// <summary> /// Updates in the database all changes made to the <see cref="E12Level111111"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update() { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("UpdateE12Level111111", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_1_ID", ReadProperty(Level_1_1_1_1_1_1_IDProperty)).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_1_Name", ReadProperty(Level_1_1_1_1_1_1_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnUpdatePre(args); cmd.ExecuteNonQuery(); OnUpdatePost(args); } } } /// <summary> /// Self deletes the <see cref="E12Level111111"/> object from database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf() { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("DeleteE12Level111111", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_1_ID", ReadProperty(Level_1_1_1_1_1_1_IDProperty)).DbType = DbType.Int32; var args = new DataPortalHookArgs(cmd); OnDeletePre(args); cmd.ExecuteNonQuery(); OnDeletePost(args); } } } #endregion #region Pseudo Events /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
namespace Shadow.SchoolGame.Utils { using System; using Loaders; using System.IO; using UnityEngine; using UnityEngine.UI; using Newtonsoft.Json; using Game.Controllers; public class Settings : MonoBehaviour { public Toggle musicPlay; public InputField bckgMusic; public Toggle rightPlay; public InputField rightMusic; public Toggle wrongPlay; public InputField wrongMusic; public InputField bgColorR; public InputField bgColorG; public InputField bgColorB; public InputField timerColorR; public InputField timerColorG; public InputField timerColorB; public InputField waitAnswer; public InputField waitAfterAnswer; public InputField waitScore; public InputField waitOver; public InputField blueName; public InputField blueColorR; public InputField blueColorG; public InputField blueColorB; public InputField redName; public InputField redColorR; public InputField redColorG; public InputField redColorB; void Awake() { musicPlay.isOn = settings.background; rightPlay.isOn = settings.rightEffect; wrongPlay.isOn = settings.wrongEffect; bckgMusic.text = settings.backgroundMusic; rightMusic.text = settings.rightSound; wrongMusic.text = settings.wrongSound; waitAnswer.text = Convert.ToString(settings.waitForAnswer); waitAfterAnswer.text = Convert.ToString(settings.waitAfterAnswer); waitScore.text = Convert.ToString(settings.waitForScore); waitOver.text = Convert.ToString(settings.waitForGameOver); bgColorR.text = Convert.ToString(settings.backgroundColorR); bgColorG.text = Convert.ToString(settings.backgroundColorG); bgColorB.text = Convert.ToString(settings.backgroundColorB); timerColorR.text = Convert.ToString(settings.timerColorR); timerColorG.text = Convert.ToString(settings.timerColorG); timerColorB.text = Convert.ToString(settings.timerColorB); blueName.text = settings.blue.name; blueColorR.text = Convert.ToString(settings.blue.nameColorR); blueColorG.text = Convert.ToString(settings.blue.nameColorG); blueColorB.text = Convert.ToString(settings.blue.nameColorB); redName.text = settings.red.name; redColorR.text = Convert.ToString(settings.red.nameColorR); redColorG.text = Convert.ToString(settings.red.nameColorG); redColorB.text = Convert.ToString(settings.red.nameColorB); } public void SetBackground(bool b) { settings.background = b; } public void SetRightEffect(bool b) { settings.rightEffect = b; } public void SetWrongEffect(bool b) { settings.wrongEffect = b; } public void SetBackgroundMusic(string s) { if(SoundLoader.Exists(s)) { bckgMusic.textComponent.color = Color.green; settings.backgroundMusic = s; } else { bckgMusic.textComponent.color = Color.red; } } public void SetRightMusic(string s) { if(SoundLoader.Exists(s)) { rightMusic.textComponent.color = Color.green; settings.rightSound = s; } else { rightMusic.textComponent.color = Color.red; } } public void SetWrongMusic(string s) { if(SoundLoader.Exists(s)) { wrongMusic.textComponent.color = Color.green; settings.wrongSound = s; } else { wrongMusic.textComponent.color = Color.red; } } public void SetWaitForAnswer(string i) { settings.waitForAnswer = Convert.ToInt32(i); } public void SetWaitAfterAnswer(string i) { settings.waitAfterAnswer = Convert.ToInt32(i); } public void SetWaitForScore(string i) { settings.waitForScore = Convert.ToInt32(i); } public void SetWaitForGameOver(string i) { settings.waitForGameOver = Convert.ToInt32(i); } public void SetBckgCR(string f) { settings.backgroundColorR = Convert.ToSingle(f); } public void SetBckgCG(string f) { settings.backgroundColorG = Convert.ToSingle(f); } public void SetBckgCB(string f) { settings.backgroundColorB = Convert.ToSingle(f); } public void SetTimerCR(string f) { settings.timerColorR = Convert.ToSingle(f); } public void SetTimerCG(string f) { settings.timerColorG = Convert.ToSingle(f); } public void SetTimerCB(string f) { settings.timerColorB = Convert.ToSingle(f); } public void SetBlue(string s) { settings.blue.name = s; } public void SetBlueCR(string f) { settings.blue.nameColorR = Convert.ToSingle(f); } public void SetBlueCG(string f) { settings.blue.nameColorG = Convert.ToSingle(f); } public void SetBlueCB(string f) { settings.blue.nameColorB = Convert.ToSingle(f); } public void SetRed(string s) { settings.red.name = s; } public void SetRedCR(string f) { settings.red.nameColorR = Convert.ToSingle(f); } public void SetRedCG(string f) { settings.red.nameColorG = Convert.ToSingle(f); } public void SetRedCB(string f) { settings.red.nameColorB = Convert.ToSingle(f); } public void Save() { SaveData(); Core.inst.LoadMusic(); } public static SettingsData settings; public static string SettingsLocation { get { return Loader.DataFolder + Path.AltDirectorySeparatorChar + "Settings.json"; } } static JsonSerializerSettings s { get { JsonSerializerSettings tmp = new JsonSerializerSettings(); tmp.Formatting = Formatting.Indented; tmp.NullValueHandling = NullValueHandling.Ignore; tmp.MissingMemberHandling = MissingMemberHandling.Ignore; tmp.DefaultValueHandling = DefaultValueHandling.Include; return tmp; } } public static void SaveData() { string data = JsonConvert.SerializeObject(settings, s); Debug.Log(data); File.WriteAllText(SettingsLocation, data); } public static void Load() { if(File.Exists(SettingsLocation)) { settings = JsonConvert.DeserializeObject<SettingsData>(File.ReadAllText(SettingsLocation), s); } else { settings = new SettingsData(); SaveData(); } } } }
// // Copyright (c) 2010-2012 Frank A. Krueger // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // using System; using System.Collections.Generic; using Android.Graphics; namespace CrossGraphics.Android { public class AndroidGraphics : IGraphics { ColPaints _paints; class ColPaints { public Paint Fill; public Paint Stroke; public Paint Text; } public AndroidGraphics (Canvas canvas) { _c = canvas; _offscreen = null; _paints = new ColPaints(); _paints.Fill = new Paint (); _paints.Fill.AntiAlias = true; _paints.Fill.SetStyle (Paint.Style.Fill); _paints.Stroke = new Paint (); _paints.Stroke.AntiAlias = true; _paints.Stroke.SetStyle (Paint.Style.Stroke); _paints.Text = new Paint (); _paints.Text.AntiAlias = true; _paints.Text.SetStyle (Paint.Style.Fill); SetColor (Colors.Black); } private struct drawcontext { public Canvas c; public AndroidImage img; }; Canvas _c; AndroidImage _offscreen; private Stack<drawcontext> _prev; Font _lastFont; private Paint.FontMetrics TextFontMetrics; public void BeginEntity (object entity) { } public void SetFont (Font f) { if (f != _lastFont) { _lastFont = f; //_paints.Text.SetTypeface (f.ToTypeface()); _paints.Text.SetTypeface (Typeface.Default); // TODO temporary hack _paints.Text.TextSize = (float) f.Size; TextFontMetrics = _paints.Text.GetFontMetrics (); } } Color _lastColor; public void SetColor (Color c) { if (!Color.AreEqual(c, _lastColor)) { _lastColor = c; var ac = new global::Android.Graphics.Color (c.Red, c.Green, c.Blue, c.Alpha); _paints.Stroke.Color = ac; _paints.Fill.Color = ac; _paints.Text.Color = ac; } } public void FillRoundedRect (float x, float y, float width, float height, float radius) { _c.DrawRoundRect (new RectF (x, y, x + width, y + height), radius, radius, _paints.Fill); } public void DrawRoundedRect (float x, float y, float width, float height, float radius, float w) { _paints.Stroke.StrokeWidth = w; _c.DrawRoundRect (new RectF (x, y, x + width, y + height), radius, radius, _paints.Stroke); } public void FillRect (float x, float y, float width, float height) { _c.DrawRect (new RectF (x, y, x + width, y + height), _paints.Fill); } public void DrawRect (float x, float y, float width, float height, float w) { _paints.Stroke.StrokeWidth = w; _c.DrawRect (new RectF (x, y, x + width, y + height), _paints.Stroke); } public void FillOval (float x, float y, float width, float height) { _c.DrawOval (new RectF (x, y, x + width, y + height), _paints.Fill); } public void DrawOval (float x, float y, float width, float height, float w) { _paints.Stroke.StrokeWidth = w; _c.DrawOval (new RectF (x, y, x + width, y + height), _paints.Stroke); } const float RadiansToDegrees = (float)(180 / Math.PI); public void FillArc (float cx, float cy, float radius, float startAngle, float endAngle) { var sa = -startAngle * RadiansToDegrees; var ea = -endAngle * RadiansToDegrees; _c.DrawArc (new RectF (cx - radius, cy - radius, cx + radius, cy + radius), sa, ea - sa, false, _paints.Fill); } public void DrawArc (float cx, float cy, float radius, float startAngle, float endAngle, float w) { var sa = -startAngle * RadiansToDegrees; var ea = -endAngle * RadiansToDegrees; _paints.Stroke.StrokeWidth = w; _c.DrawArc (new RectF (cx - radius, cy - radius, cx + radius, cy + radius), sa, ea - sa, false, _paints.Stroke); } bool _inLines = false; Path _linesPath = null; int _linesCount = 0; float _lineWidth = 1; public void BeginLines (bool rounded) { if (!_inLines) { _inLines = true; _linesPath = new Path (); _linesCount = 0; } } public void DrawLine (float sx, float sy, float ex, float ey, float w) { if (_inLines) { if (_linesCount == 0) { _linesPath.MoveTo (sx, sy); } _linesPath.LineTo (ex, ey); _lineWidth = w; _linesCount++; } else { _paints.Stroke.StrokeWidth = w; _c.DrawLine (sx, sy, ex, ey, _paints.Stroke); } } public void EndLines () { if (_inLines) { _inLines = false; _paints.Stroke.StrokeWidth = _lineWidth; _paints.Stroke.StrokeJoin = Paint.Join.Round; _c.DrawPath (_linesPath, _paints.Stroke); _linesPath.Dispose (); _linesPath = null; } } public void DrawImage (IImage img, float x, float y) { var dimg = img as AndroidImage; if (dimg != null) { //SetColor (Xamarin.Forms.Color.White); // TODO what if there is no fill set? _c.DrawBitmap ( dimg.Bitmap, x, y, null); //_paints.Fill); } } public void DrawImage (IImage img, float x, float y, float width, float height) { var dimg = img as AndroidImage; if (dimg != null) { SetColor (Colors.White); _c.DrawBitmap ( dimg.Bitmap, new Rect (0, 0, dimg.Bitmap.Width, dimg.Bitmap.Height), new RectF (x, y, x + width, y + height), _paints.Fill); } } public void DrawString(string s, float box_x, float box_y, float box_width, float box_height, LineBreakMode lineBreak, TextAlignment horizontal_align, TextAlignment vertical_align ) { // TODO if (string.IsNullOrWhiteSpace (s)) return; //SetTextAlign() float text_width; float text_height; if ( (horizontal_align != TextAlignment.Start) || (vertical_align != TextAlignment.Start)) { // not all of the alignments need the bounding rect. don't // calculate it if not needed. text_width = _paints.Text.MeasureText (s); text_height = - (TextFontMetrics.Ascent); } else { text_width = 0; text_height = 0; } //Console.WriteLine ("width: {0} height: {1}", text_width, text_height); float x; switch (horizontal_align) { case TextAlignment.End: x = (box_x + box_width) - text_width; break; case TextAlignment.Center: x = box_x + (box_width - text_width) / 2; break; case TextAlignment.Start: default: x = box_x; break; } float y; switch (vertical_align) { case TextAlignment.End: y = box_y + text_height; break; case TextAlignment.Center: y = (box_y + box_height) - (box_height - text_height) / 2; break; case TextAlignment.Start: default: y = (box_y + box_height); break; } _c.DrawText (s, x, y, _paints.Text); } public IImage ImageFromFile (string path) { var bmp = BitmapFactory.DecodeFile (path); if (bmp == null) return null; var dimg = new AndroidImage () { Bitmap = bmp }; return dimg; } public void BeginOffscreen(float width, float height, IImage img) { if (_prev == null) { _prev = new Stack<drawcontext> (); } _prev.Push (new drawcontext { c = _c, img = _offscreen }); _offscreen = null; if (img != null) { var aimg = img as AndroidImage; if ( (aimg.Bitmap.Width >= width) && (aimg.Bitmap.Height >= height)) { _offscreen = img as AndroidImage; } else { img.Destroy (); } } if (null == _offscreen) { _offscreen = new AndroidImage (); _offscreen.Bitmap = Bitmap.CreateBitmap ((int)width, (int)height, Bitmap.Config.Rgb565); // TODO what bitmap config? } _c = new Canvas (); _c.SetBitmap (_offscreen.Bitmap); #if false if (img != null) { SetColor (Xamarin.Forms.Color.Yellow); FillRect (0, 0, _offscreen.Bitmap.Width, _offscreen.Bitmap.Height); } #endif } public IImage EndOffscreen() { var dimg = _offscreen; drawcontext ctx = _prev.Pop (); _offscreen = ctx.img; _c = ctx.c; return dimg; } public void SaveState() { _c.Save (); } public void SetClippingRect (float x, float y, float width, float height) { _c.ClipRect (x, y, x + width, y + height); } public void Translate(float dx, float dy) { _c.Translate (dx, dy); } public void Scale(float sx, float sy) { _c.Scale (sx, sy); } public void RestoreState() { _c.Restore (); } } public class AndroidImage : IImage { public Bitmap Bitmap; public void Destroy() { Bitmap.Recycle (); Bitmap.Dispose (); Bitmap = null; } } }
#region File Description //----------------------------------------------------------------------------- // CubemapProcessor.cs // // Microsoft XNA Community Game Platform // Copyright (C) Microsoft Corporation. All rights reserved. //----------------------------------------------------------------------------- #endregion #region Using Statements using System; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Content.Pipeline; using Microsoft.Xna.Framework.Content.Pipeline.Graphics; #endregion namespace CustomModelEffectPipeline { /// <summary> /// Custom content pipeline processor converts regular /// 2D images into reflection cubemaps. /// </summary> [ContentProcessor] public class CubemapProcessor : ContentProcessor<TextureContent, TextureCubeContent> { const int cubemapSize = 256; /// <summary> /// Converts an arbitrary 2D image into a reflection cubemap. /// </summary> public override TextureCubeContent Process(TextureContent input, ContentProcessorContext context) { // Convert the input data to Color format, for ease of processing. input.ConvertBitmapType(typeof(PixelBitmapContent<Color>)); // Mirror the source image from left to right. PixelBitmapContent<Color> mirrored; mirrored = MirrorBitmap((PixelBitmapContent<Color>)input.Faces[0][0]); // Create the six cubemap faces. TextureCubeContent cubemap = new TextureCubeContent(); cubemap.Faces[(int)CubeMapFace.NegativeZ] = CreateSideFace(mirrored, 0); cubemap.Faces[(int)CubeMapFace.NegativeX] = CreateSideFace(mirrored, 1); cubemap.Faces[(int)CubeMapFace.PositiveZ] = CreateSideFace(mirrored, 2); cubemap.Faces[(int)CubeMapFace.PositiveX] = CreateSideFace(mirrored, 3); cubemap.Faces[(int)CubeMapFace.PositiveY] = CreateTopFace(mirrored); cubemap.Faces[(int)CubeMapFace.NegativeY] = CreateBottomFace(mirrored); // Calculate mipmap data. cubemap.GenerateMipmaps(true); // Compress the cubemap into DXT1 format. cubemap.ConvertBitmapType(typeof(Dxt1BitmapContent)); return cubemap; } /// <summary> /// Our source data is just a regular 2D image, but to make a good /// cubemap we need this to wrap on all sides without any visible seams. /// An easy way of making an image wrap from left to right is simply to /// put a mirrored copy of the image next to the original. The point /// where the image mirrors is still pretty obvious, but for a reflection /// map this will be good enough. /// </summary> static PixelBitmapContent<Color> MirrorBitmap(PixelBitmapContent<Color> source) { int width = source.Width * 2; PixelBitmapContent<Color> mirrored; mirrored = new PixelBitmapContent<Color>(width, source.Height); for (int y = 0; y < source.Height; y++) { for (int x = 0; x < source.Width; x++) { Color color = source.GetPixel(x, y); mirrored.SetPixel(x, y, color); mirrored.SetPixel(width - x - 1, y, color); } } return mirrored; } /// <summary> /// The four side faces of the cubemap are easy to create: we just copy /// out the appropriate region from the middle of the source bitmap. /// </summary> static BitmapContent CreateSideFace(PixelBitmapContent<Color> source, int cubeSide) { PixelBitmapContent<Color> result; result = new PixelBitmapContent<Color>(cubemapSize, cubemapSize); Rectangle sourceRegion = new Rectangle(source.Width * cubeSide / 4, source.Height / 3, source.Width / 4, source.Height / 3); Rectangle destinationRegion = new Rectangle(0, 0, cubemapSize, cubemapSize); BitmapContent.Copy(source, sourceRegion, result, destinationRegion); return result; } /// <summary> /// We have to do a lot of stretching and warping to create the top /// and bottom faces of the cubemap. To keep the result nicely free /// of jaggies, we do this computation on a larger version of the /// bitmap, then scale down the final result to antialias it. /// </summary> const int multisampleScale = 4; /// <summary> /// Folds four flaps inward from the top of the source bitmap, /// to create the top face of the cubemap. /// </summary> static BitmapContent CreateTopFace(PixelBitmapContent<Color> source) { PixelBitmapContent<Color> result; result = new PixelBitmapContent<Color>(cubemapSize * multisampleScale, cubemapSize * multisampleScale); int right = cubemapSize * multisampleScale - 1; ScaleTrapezoid(source, 0, -1, result, right, 0, -1, 0, 0, 1); ScaleTrapezoid(source, 1, -1, result, 0, 0, 0, 1, 1, 0); ScaleTrapezoid(source, 2, -1, result, 0, right, 1, 0, 0, -1); ScaleTrapezoid(source, 3, -1, result, right, right, 0, -1, -1, 0); return BlurCubemapFace(result); } /// <summary> /// Folds four flaps inward from the bottom of the source bitmap, /// to create the bottom face of the cubemap. /// </summary> static BitmapContent CreateBottomFace(PixelBitmapContent<Color> source) { PixelBitmapContent<Color> result; result = new PixelBitmapContent<Color>(cubemapSize * multisampleScale, cubemapSize * multisampleScale); int right = cubemapSize * multisampleScale - 1; ScaleTrapezoid(source, 0, 1, result, right, right, -1, 0, 0, -1); ScaleTrapezoid(source, 1, 1, result, 0, right, 0, -1, 1, 0); ScaleTrapezoid(source, 2, 1, result, 0, 0, 1, 0, 0, 1); ScaleTrapezoid(source, 3, 1, result, right, 0, 0, 1, -1, 0); return BlurCubemapFace(result); } /// <summary> /// Worker function for folding and stretching a flap from the source /// image to make up one quarter of the top or bottom cubemap faces. /// </summary> static void ScaleTrapezoid(PixelBitmapContent<Color> source, int cubeSide, int cubeY, PixelBitmapContent<Color> destination, int destinationX, int destinationY, int xDirection1, int yDirection1, int xDirection2, int yDirection2) { int size = destination.Width; // Compute the source x location. int baseSourceX = cubeSide * source.Width / 4; // Copy the image data one row at a time. for (int row = 0; row < size / 2; row++) { // Compute the source y location. int sourceY; if (cubeY < 0) sourceY = source.Height / 3; else sourceY = source.Height * 2 / 3; sourceY += cubeY * row * source.Height / 3 / (size / 2); // Stretch this row from the source to destination. int x = destinationX; int y = destinationY; int rowLength = size - row * 2; for (int i = 0; i < rowLength; i++) { int sourceX = baseSourceX + i * source.Width / 4 / rowLength; Color color = source.GetPixel(sourceX, sourceY); destination.SetPixel(x, y, color); x += xDirection1; y += yDirection1; } // Advance to the start of the next row. destinationX += xDirection1 + xDirection2; destinationY += yDirection1 + yDirection2; } } /// <summary> /// The top and bottom cubemap faces will have a nasty discontinuity /// in the middle where the four source image flaps meet. We can cover /// this up by applying a blur filter to the problematic area. /// </summary> static BitmapContent BlurCubemapFace(PixelBitmapContent<Color> source) { // Create two temporary bitmaps. PixelBitmapContent<Vector4> temp1, temp2; temp1 = new PixelBitmapContent<Vector4>(cubemapSize, cubemapSize); temp2 = new PixelBitmapContent<Vector4>(cubemapSize, cubemapSize); // Antialias by shrinking the larger generated image to the final size. BitmapContent.Copy(source, temp1); // Apply the blur in two passes, first horizontally, then vertically. ApplyBlurPass(temp1, temp2, 1, 0); ApplyBlurPass(temp2, temp1, 0, 1); // Convert the result back to Color format. PixelBitmapContent<Color> result; result = new PixelBitmapContent<Color>(cubemapSize, cubemapSize); BitmapContent.Copy(temp1, result); return result; } /// <summary> /// Applies a single pass of a separable box filter, blurring either /// along the x or y axis. This could give much higher quality results /// if we used a gaussian filter kernel rather than this simplistic box, /// but this is good enough to get the job done. /// </summary> static void ApplyBlurPass(PixelBitmapContent<Vector4> source, PixelBitmapContent<Vector4> destination, int dx, int dy) { int cubemapCenter = cubemapSize / 2; for (int y = 0; y < cubemapSize; y++) { for (int x = 0; x < cubemapSize; x++) { // How far is this texel from the center of the image? int xDist = cubemapCenter - x; int yDist = cubemapCenter - y; int distance = (int)Math.Sqrt(xDist * xDist + yDist * yDist); // Blur more in the center, less near the edges. int blurAmount = Math.Max(cubemapCenter - distance, 0) / 8; // Accumulate source texel values. Vector4 blurredValue = Vector4.Zero; for (int i = -blurAmount; i <= blurAmount; i++) { blurredValue += source.GetPixel(x + dx * i, y + dy * i); } // Average them to calculate a blurred result. blurredValue /= blurAmount * 2 + 1; destination.SetPixel(x, y, blurredValue); } } } } }
// ZipHelperStream.cs // // Copyright 2006, 2007 John Reilly // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // Linking this library statically or dynamically with other modules is // making a combined work based on this library. Thus, the terms and // conditions of the GNU General Public License cover the whole // combination. // // As a special exception, the copyright holders of this library give you // permission to link this library with independent modules to produce an // executable, regardless of the license terms of these independent // modules, and to copy and distribute the resulting executable under // terms of your choice, provided that you also meet, for each linked // independent module, the terms and conditions of the license of that // module. An independent module is a module which is not derived from // or based on this library. If you modify this library, you may extend // this exception to your version of the library, but you are not // obligated to do so. If you do not wish to do so, delete this // exception statement from your version. using System; using System.IO; using System.Text; using ICSharpCode.SharpZipLib.Silverlight.Zip; namespace ICSharpCode.SharpZipLib.Zip { /// <summary> /// Holds data pertinent to a data descriptor. /// </summary> public class DescriptorData { /// <summary> /// Get /set the compressed size of data. /// </summary> public long CompressedSize { get { return compressedSize; } set { compressedSize = value; } } /// <summary> /// Get / set the uncompressed size of data /// </summary> public long Size { get { return size; } set { size = value; } } /// <summary> /// Get /set the crc value. /// </summary> public long Crc { get { return crc; } set { crc = (value & 0xffffffff); } } #region Instance Fields long size; long compressedSize; long crc; #endregion } class EntryPatchData { public long SizePatchOffset { get { return sizePatchOffset_; } set { sizePatchOffset_ = value; } } public long CrcPatchOffset { get { return crcPatchOffset_; } set { crcPatchOffset_ = value; } } #region Instance Fields long sizePatchOffset_; long crcPatchOffset_; #endregion } /// <summary> /// This class assists with writing/reading from Zip files. /// </summary> internal class ZipHelperStream : Stream { #region Constructors /// <summary> /// Initialise an instance of this class. /// </summary> /// <param name="name">The name of the file to open.</param> public ZipHelperStream(string name) { stream_ = new FileStream(name, FileMode.Open, FileAccess.ReadWrite); isOwner_ = true; } /// <summary> /// Initialise a new instance of <see cref="ZipHelperStream"/>. /// </summary> /// <param name="stream">The stream to use.</param> public ZipHelperStream(Stream stream) { stream_ = stream; } #endregion /// <summary> /// Get / set a value indicating wether the the underlying stream is owned or not. /// </summary> /// <remarks>If the stream is owned it is closed when this instance is closed.</remarks> public bool IsStreamOwner { get { return isOwner_; } set { isOwner_ = value; } } #region Base Stream Methods public override bool CanRead { get { return stream_.CanRead; } } public override bool CanSeek { get { return stream_.CanSeek; } } public override bool CanTimeout { get { return stream_.CanTimeout; } } public override long Length { get { return stream_.Length; } } public override long Position { get { return stream_.Position; } set { stream_.Position = value; } } public override bool CanWrite { get { return stream_.CanWrite; } } public override void Flush() { stream_.Flush(); } public override long Seek(long offset, SeekOrigin origin) { return stream_.Seek(offset, origin); } public override void SetLength(long value) { stream_.SetLength(value); } public override int Read(byte[] buffer, int offset, int count) { return stream_.Read(buffer, offset, count); } public override void Write(byte[] buffer, int offset, int count) { stream_.Write(buffer, offset, count); } /// <summary> /// Close the stream. /// </summary> /// <remarks> /// The underlying stream is closed only if <see cref="IsStreamOwner"/> is true. /// </remarks> override public void Close() { Stream toClose = stream_; stream_ = null; if (isOwner_ && (toClose != null)) { isOwner_ = false; toClose.Close(); } } #endregion // Write the local file header // TODO: ZipHelperStream.WriteLocalHeader is not yet used and needs checking for ZipFile and ZipOuptutStream usage void WriteLocalHeader(ZipEntry entry, EntryPatchData patchData) { CompressionMethod method = entry.CompressionMethod; bool headerInfoAvailable = true; // How to get this? bool patchEntryHeader = false; WriteLEInt(ZipConstants.LocalHeaderSignature); WriteLEShort(entry.Version); WriteLEShort(entry.Flags); WriteLEShort((byte)method); WriteLEInt((int)entry.DosTime); if (headerInfoAvailable == true) { WriteLEInt((int)entry.Crc); if ( entry.LocalHeaderRequiresZip64 ) { WriteLEInt(-1); WriteLEInt(-1); } else { WriteLEInt(entry.IsCrypted ? (int)entry.CompressedSize + ZipConstants.CryptoHeaderSize : (int)entry.CompressedSize); WriteLEInt((int)entry.Size); } } else { if (patchData != null) { patchData.CrcPatchOffset = stream_.Position; } WriteLEInt(0); // Crc if ( patchData != null ) { patchData.SizePatchOffset = stream_.Position; } // For local header both sizes appear in Zip64 Extended Information if ( entry.LocalHeaderRequiresZip64 && patchEntryHeader ) { WriteLEInt(-1); WriteLEInt(-1); } else { WriteLEInt(0); // Compressed size WriteLEInt(0); // Uncompressed size } } byte[] name = ZipConstants.ConvertToArray(entry.Flags, entry.Name); if (name.Length > 0xFFFF) { throw new ZipException("Entry name too long."); } ZipExtraData ed = new ZipExtraData(entry.ExtraData); if (entry.LocalHeaderRequiresZip64 && (headerInfoAvailable || patchEntryHeader)) { ed.StartNewEntry(); if (headerInfoAvailable) { ed.AddLeLong(entry.Size); ed.AddLeLong(entry.CompressedSize); } else { ed.AddLeLong(-1); ed.AddLeLong(-1); } ed.AddNewEntry(1); if ( !ed.Find(1) ) { throw new ZipException("Internal error cant find extra data"); } if ( patchData != null ) { patchData.SizePatchOffset = ed.CurrentReadIndex; } } else { ed.Delete(1); } byte[] extra = ed.GetEntryData(); WriteLEShort(name.Length); WriteLEShort(extra.Length); if ( name.Length > 0 ) { stream_.Write(name, 0, name.Length); } if ( entry.LocalHeaderRequiresZip64 && patchEntryHeader ) { patchData.SizePatchOffset += stream_.Position; } if ( extra.Length > 0 ) { stream_.Write(extra, 0, extra.Length); } } /// <summary> /// Locates a block with the desired <paramref name="signature"/>. /// </summary> /// <param name="signature">The signature to find.</param> /// <param name="endLocation">Location, marking the end of block.</param> /// <param name="minimumBlockSize">Minimum size of the block.</param> /// <param name="maximumVariableData">The maximum variable data.</param> /// <returns>Eeturns the offset of the first byte after the signature; -1 if not found</returns> public long LocateBlockWithSignature(int signature, long endLocation, int minimumBlockSize, int maximumVariableData) { long pos = endLocation - minimumBlockSize; if ( pos < 0 ) { return -1; } long giveUpMarker = Math.Max(pos - maximumVariableData, 0); // TODO: This loop could be optimised for speed. do { if ( pos < giveUpMarker ) { return -1; } Seek(pos--, SeekOrigin.Begin); } while ( ReadLEInt() != signature ); return Position; } /// <summary> /// Write Zip64 end of central directory records (File header and locator). /// </summary> /// <param name="noOfEntries">The number of entries in the central directory.</param> /// <param name="sizeEntries">The size of entries in the central directory.</param> /// <param name="centralDirOffset">The offset of the dentral directory.</param> public void WriteZip64EndOfCentralDirectory(long noOfEntries, long sizeEntries, long centralDirOffset) { long centralSignatureOffset = stream_.Position; WriteLEInt(ZipConstants.Zip64CentralFileHeaderSignature); WriteLELong(44); // Size of this record (total size of remaining fields in header or full size - 12) WriteLEShort(ZipConstants.VersionMadeBy); // Version made by WriteLEShort(ZipConstants.VersionZip64); // Version to extract WriteLEInt(0); // Number of this disk WriteLEInt(0); // number of the disk with the start of the central directory WriteLELong(noOfEntries); // No of entries on this disk WriteLELong(noOfEntries); // Total No of entries in central directory WriteLELong(sizeEntries); // Size of the central directory WriteLELong(centralDirOffset); // offset of start of central directory // zip64 extensible data sector not catered for here (variable size) // Write the Zip64 end of central directory locator WriteLEInt(ZipConstants.Zip64CentralDirLocatorSignature); // no of the disk with the start of the zip64 end of central directory WriteLEInt(0); // relative offset of the zip64 end of central directory record WriteLELong(centralSignatureOffset); // total number of disks WriteLEInt(1); } /// <summary> /// Write the required records to end the central directory. /// </summary> /// <param name="noOfEntries">The number of entries in the directory.</param> /// <param name="sizeEntries">The size of the entries in the directory.</param> /// <param name="startOfCentralDirectory">The start of the central directory.</param> /// <param name="comment">The archive comment. (This can be null).</param> public void WriteEndOfCentralDirectory(long noOfEntries, long sizeEntries, long startOfCentralDirectory, byte[] comment) { if ( (noOfEntries >= 0xffff) || (startOfCentralDirectory >= 0xffffffff) || (sizeEntries >= 0xffffffff) ) { WriteZip64EndOfCentralDirectory(noOfEntries, sizeEntries, startOfCentralDirectory); } WriteLEInt(ZipConstants.EndOfCentralDirectorySignature); // TODO: ZipFile Multi disk handling not done WriteLEShort(0); // number of this disk WriteLEShort(0); // no of disk with start of central dir // Number of entries if ( noOfEntries >= 0xffff ) { WriteLEUshort(0xffff); // Zip64 marker WriteLEUshort(0xffff); } else { WriteLEShort(( short )noOfEntries); // entries in central dir for this disk WriteLEShort(( short )noOfEntries); // total entries in central directory } // Size of the central directory if ( sizeEntries >= 0xffffffff ) { WriteLEUint(0xffffffff); // Zip64 marker } else { WriteLEInt(( int )sizeEntries); } // offset of start of central directory if ( startOfCentralDirectory >= 0xffffffff ) { WriteLEUint(0xffffffff); // Zip64 marker } else { WriteLEInt(( int )startOfCentralDirectory); } int commentLength = (comment != null) ? comment.Length : 0; if ( commentLength > 0xffff ) { throw new ZipException(string.Format("Comment length({0}) is too long can only be 64K", commentLength)); } WriteLEShort(commentLength); if ( commentLength > 0 ) { Write(comment, 0, comment.Length); } } #region LE value reading/writing /// <summary> /// Read an unsigned short in little endian byte order. /// </summary> /// <returns>Returns the value read.</returns> /// <exception cref="IOException"> /// An i/o error occurs. /// </exception> /// <exception cref="EndOfStreamException"> /// The file ends prematurely /// </exception> public int ReadLEShort() { int byteValue1 = stream_.ReadByte(); if (byteValue1 < 0) { throw new EndOfStreamException(); } int byteValue2 = stream_.ReadByte(); if (byteValue2 < 0) { throw new EndOfStreamException(); } return byteValue1 | (byteValue2 << 8); } /// <summary> /// Read an int in little endian byte order. /// </summary> /// <returns>Returns the value read.</returns> /// <exception cref="IOException"> /// An i/o error occurs. /// </exception> /// <exception cref="System.IO.EndOfStreamException"> /// The file ends prematurely /// </exception> public int ReadLEInt() { return ReadLEShort() | (ReadLEShort() << 16); } /// <summary> /// Read a long in little endian byte order. /// </summary> /// <returns>The value read.</returns> public long ReadLELong() { return (uint)ReadLEInt() | ((long)ReadLEInt() << 32); } /// <summary> /// Write an unsigned short in little endian byte order. /// </summary> /// <param name="value">The value to write.</param> public void WriteLEShort(int value) { stream_.WriteByte(( byte )(value & 0xff)); stream_.WriteByte(( byte )((value >> 8) & 0xff)); } /// <summary> /// Write a ushort in little endian byte order. /// </summary> /// <param name="value">The value to write.</param> public void WriteLEUshort(ushort value) { stream_.WriteByte(( byte )(value & 0xff)); stream_.WriteByte(( byte )(value >> 8)); } /// <summary> /// Write an int in little endian byte order. /// </summary> /// <param name="value">The value to write.</param> public void WriteLEInt(int value) { WriteLEShort(value); WriteLEShort(value >> 16); } /// <summary> /// Write a uint in little endian byte order. /// </summary> /// <param name="value">The value to write.</param> public void WriteLEUint(uint value) { WriteLEUshort(( ushort )(value & 0xffff)); WriteLEUshort(( ushort )(value >> 16)); } /// <summary> /// Write a long in little endian byte order. /// </summary> /// <param name="value">The value to write.</param> public void WriteLELong(long value) { WriteLEInt(( int )value); WriteLEInt(( int )(value >> 32)); } /// <summary> /// Write a ulong in little endian byte order. /// </summary> /// <param name="value">The value to write.</param> public void WriteLEUlong(ulong value) { WriteLEUint(( uint )(value & 0xffffffff)); WriteLEUint(( uint )(value >> 32)); } #endregion /// <summary> /// Write a data descriptor. /// </summary> /// <param name="entry">The entry to write a descriptor for.</param> /// <returns>Returns the number of descriptor bytes written.</returns> public int WriteDataDescriptor(ZipEntry entry) { if (entry == null) { throw new ArgumentNullException("entry"); } int result=0; // Add data descriptor if flagged as required if ((entry.Flags & (int)GeneralBitFlags.Descriptor) != 0) { // The signature is not PKZIP originally but is now described as optional // in the PKZIP Appnote documenting trhe format. WriteLEInt(ZipConstants.DataDescriptorSignature); WriteLEInt(unchecked((int)(entry.Crc))); result+=8; if (entry.LocalHeaderRequiresZip64) { WriteLELong(entry.CompressedSize); WriteLELong(entry.Size); result+=16; } else { WriteLEInt((int)entry.CompressedSize); WriteLEInt((int)entry.Size); result+=8; } } return result; } /// <summary> /// Read data descriptor at the end of compressed data. /// </summary> /// <param name="zip64">if set to <c>true</c> [zip64].</param> /// <param name="data">The data to fill in.</param> /// <returns>Returns the number of bytes read in the descriptor.</returns> public void ReadDataDescriptor(bool zip64, DescriptorData data) { int intValue = ReadLEInt(); // In theory this may not be a descriptor according to PKZIP appnote. // In practise its always there. if (intValue != ZipConstants.DataDescriptorSignature) { throw new ZipException("Data descriptor signature not found"); } data.Crc = ReadLEInt(); if (zip64) { data.CompressedSize = ReadLELong(); data.Size = ReadLELong(); } else { data.CompressedSize = ReadLEInt(); data.Size = ReadLEInt(); } } #region Instance Fields bool isOwner_; Stream stream_; #endregion } }
using System.Collections.Generic; using NUnit.Framework; namespace AutoPoco.KCL { [TestFixture] internal class TestRandom : IRandom, IRandomExtensions { public Queue<int> IntQueue { get; private set; } public Queue<byte[]> ByteArrayQueue { get; private set; } public Queue<double> DoubleQueue { get; private set; } public TestRandom() { IntQueue = new Queue<int>(); ByteArrayQueue = new Queue<byte[]>(); DoubleQueue = new Queue<double>(); } #region IRandom Members public int Next() { return IntQueue.Dequeue(); } public int Next(int maxValue) { int result = IntQueue.Dequeue(); if(result >= maxValue) return maxValue - 1; else return result; } public int Next(int minValue, int maxValue) { int result = IntQueue.Dequeue(); if(result >= maxValue) return maxValue - 1; else if(result < minValue) return minValue; else return result; } public void NextBytes(byte[] buffer) { byte[] value = ByteArrayQueue.Dequeue(); for(int i = 0; i < buffer.Length; i++) buffer[i] = value[i]; } public double NextDouble() { return DoubleQueue.Dequeue(); } #endregion #region IRandomExtensions Members public System.Boolean GetBoolean() { return RandomExtensions.GetBoolean(this); } public System.Byte GetByte(System.Byte min = System.Byte.MinValue, System.Byte max = System.Byte.MaxValue) { return RandomExtensions.GetByte(this, min, max); } public System.Int16 GetInt16(System.Int16 min = System.Int16.MinValue, System.Int16 max = System.Int16.MaxValue) { return RandomExtensions.GetInt16(this, min, max); } public System.Int32 GetInt32(System.Int32 min = System.Int32.MinValue, System.Int32 max = System.Int32.MaxValue) { return RandomExtensions.GetInt32(this, min, max); } public System.Int64 GetInt64(System.Int64 min = System.Int64.MinValue, System.Int64 max = System.Int64.MaxValue) { return RandomExtensions.GetInt64(this, min, max); } public System.Double GetDouble(System.Double min = System.Double.MinValue, System.Double max = System.Double.MaxValue) { return RandomExtensions.GetDouble(this, min, max); } public System.Single GetSingle(System.Single min = System.Single.MinValue, System.Single max = System.Single.MaxValue) { return RandomExtensions.GetSingle(this, min, max); } public System.Decimal GetDecimal(System.Decimal min = System.Decimal.MinValue, System.Decimal max = System.Decimal.MaxValue) { return RandomExtensions.GetDecimal(this, min, max); } public System.DateTime GetDateTime(System.DateTime min, System.DateTime max) { return RandomExtensions.GetDateTime(this, min, max); } public System.Guid GetGuid() { return RandomExtensions.GetGuid(this); } public System.Byte[] GetBinary(int minLength, int maxLength) { return RandomExtensions.GetBinary(this, minLength, maxLength); } #endregion [Test] public void NextReturnsTopOfIntQueue() { TestRandom rand = new TestRandom(); rand.IntQueue.Enqueue(101); Assert.AreEqual(101, rand.Next(), "The result of Next should have been the top of the IntQueue"); } [Test] public void NextPopsTheTopOfTheIntQueue() { TestRandom rand = new TestRandom(); rand.IntQueue.Enqueue(0); if(rand.IntQueue.Count != 1) Assert.Fail("Unable to determine the size of IntQueue."); rand.Next(); Assert.AreEqual(0, rand.IntQueue.Count, "The number of items left in IntQueue should be zero."); } [Test] public void NextDoubleReturnsTopOfDoubleQueue() { TestRandom rand = new TestRandom(); rand.DoubleQueue.Enqueue(1.1); Assert.AreEqual(1.1, rand.NextDouble(), "The result of NextDouble should have been the top of the DoubleQueue."); } [Test] public void NextDoublePopsTheTopOfTheDoubleQueue() { TestRandom rand = new TestRandom(); rand.DoubleQueue.Enqueue(1.2); if(rand.DoubleQueue.Count != 1) Assert.Fail("Unable to determine the size of DoubleQueue."); rand.NextDouble(); Assert.AreEqual(0, rand.DoubleQueue.Count, "The number of items left in DoubleQueue should be zero."); } [Test] public void NextBytesReturnsTopOfBytesArrayQueue() { byte[] target = new byte[] { 0x1, 0x2, 0x3, 0x4 }; TestRandom rand = new TestRandom(); rand.ByteArrayQueue.Enqueue(target); byte[] result = new byte[4]; rand.NextBytes(result); CollectionAssert.AreEqual(target, result, "The two arrays should be the same."); } [Test] public void NextBytesPopsTheTopOfTheBytesArrayQueue() { byte[] target = new byte[] { 0x1, 0x2, 0x3, 0x4 }; TestRandom rand = new TestRandom(); rand.ByteArrayQueue.Enqueue(target); if(rand.ByteArrayQueue.Count != 1) Assert.Fail("Unable to determine the size of ByteArrayQueue."); byte[] result = new byte[4]; rand.NextBytes(result); Assert.AreEqual(0, rand.ByteArrayQueue.Count, "The number of items left in ByteArrayQueue should be zero."); } [Test] public void NextMaxReturnsNextItemInIntQueue() { TestRandom rand = new TestRandom(); rand.IntQueue.Enqueue(100); int result = rand.Next(101); Assert.AreEqual(100, result); } [Test] public void NextMaxPopsIntQueue() { TestRandom rand = new TestRandom(); rand.IntQueue.Enqueue(100); if(rand.IntQueue.Count != 1) Assert.Fail("Unable to determine the size of IntQueue."); rand.Next(101); Assert.AreEqual(0, rand.IntQueue.Count, "The number of items in IntQueue should be zero."); } [Test] public void NextMaxTruncatesValueInIntQueue() { TestRandom rand = new TestRandom(); rand.IntQueue.Enqueue(101); int maxValue = 100; int result = rand.Next(maxValue); Assert.AreEqual(maxValue - 1, result, "The result should have been (maxvalue - 1) because the max value represents an exclusive upper bound."); } [Test] public void NextMaxTruncatesValueInIntQueueAgain() { TestRandom rand = new TestRandom(); int maxValue = 90; rand.IntQueue.Enqueue(maxValue); int result = rand.Next(maxValue); Assert.AreEqual(maxValue - 1, result, "The result should have been (maxvalue - 1) because the max value represents an exclusive upper bound."); } [Test] public void NextMinMaxReturnsNextItemInIntQueue() { TestRandom rand = new TestRandom(); rand.IntQueue.Enqueue(100); int result = rand.Next(0, 101); Assert.AreEqual(100, result); } [Test] public void NextMinMaxPopsIntQueue() { TestRandom rand = new TestRandom(); rand.IntQueue.Enqueue(100); if(rand.IntQueue.Count != 1) Assert.Fail("Unable to determine the size of IntQueue."); rand.Next(0, 101); Assert.AreEqual(0, rand.IntQueue.Count, "The number of items in IntQueue should be zero."); } [Test] public void NextMinMaxTruncatesValueInIntQueueMax() { TestRandom rand = new TestRandom(); rand.IntQueue.Enqueue(101); int maxValue = 100; int result = rand.Next(0, maxValue); Assert.AreEqual(maxValue - 1, result, "The result should have been (maxvalue - 1) because the max value represents an exclusive upper bound."); } [Test] public void NextMinMaxTruncatesValueInIntQueueAgainMax() { TestRandom rand = new TestRandom(); int maxValue = 90; rand.IntQueue.Enqueue(maxValue); int result = rand.Next(0, maxValue); Assert.AreEqual(maxValue - 1, result, "The result should have been (maxvalue - 1) because the max value represents an exclusive upper bound."); } [Test] public void NextMinMaxTruncatesValueInIntQueueMin() { TestRandom rand = new TestRandom(); rand.IntQueue.Enqueue(-1); int minValue = 0; int result = rand.Next(minValue, 100); Assert.AreEqual(minValue, result, "The result should have been equal to minValue because the min value represents an inclusive lower bound."); } [Test] public void NextMinMaxTruncatesValueInIntQueueAgainMin() { TestRandom rand = new TestRandom(); int minValue = 5; rand.IntQueue.Enqueue(minValue); int result = rand.Next(minValue, 100); Assert.AreEqual(minValue, result, "The result should have been minValue because the min value represents an inclusive lower bound."); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using Xunit; namespace System.Data.SqlClient.Tests { public class ExceptionTest { // test connection string private string connectionString = "server=tcp:server,1432;database=test;uid=admin;pwd=SQLDB;connect timeout=60;"; // data value and server consts private const string badServer = "NotAServer"; private const string sqlsvrBadConn = "A network-related or instance-specific error occurred while establishing a connection to SQL Server. The server was not found or was not accessible. Verify that the instance name is correct and that SQL Server is configured to allow remote connections."; private const string execReaderFailedMessage = "ExecuteReader requires an open and available Connection. The connection's current state is closed."; private const string orderIdQuery = "select orderid from orders where orderid < 10250"; [Fact] public void ExceptionTests() { SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(connectionString); // tests improper server name thrown from constructor of tdsparser SqlConnectionStringBuilder badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { DataSource = badServer, ConnectTimeout = 1 }; VerifyConnectionFailure<SqlException>(() => GenerateConnectionException(badBuilder.ConnectionString), sqlsvrBadConn, VerifyException); } [Fact] public void VariousExceptionTests() { // Test exceptions - makes sure they are only thrown from upper layers SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(connectionString); SqlConnectionStringBuilder badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { DataSource = badServer, ConnectTimeout = 1 }; using (var sqlConnection = new SqlConnection(badBuilder.ConnectionString)) { using (SqlCommand command = sqlConnection.CreateCommand()) { command.CommandText = orderIdQuery; VerifyConnectionFailure<InvalidOperationException>(() => command.ExecuteReader(), execReaderFailedMessage); } } } [Fact] public void IndependentConnectionExceptionTestOpenConnection() { // Test exceptions for existing connection to ensure proper exception and call stack SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(connectionString); SqlConnectionStringBuilder badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { DataSource = badServer, ConnectTimeout = 1 }; using (var sqlConnection = new SqlConnection(badBuilder.ConnectionString)) { VerifyConnectionFailure<SqlException>(() => sqlConnection.Open(), sqlsvrBadConn, VerifyException); } } [Fact] public void IndependentConnectionExceptionTestExecuteReader() { // Test exceptions for existing connection to ensure proper exception and call stack SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(connectionString); SqlConnectionStringBuilder badBuilder = new SqlConnectionStringBuilder(builder.ConnectionString) { DataSource = badServer, ConnectTimeout = 1 }; using (var sqlConnection = new SqlConnection(badBuilder.ConnectionString)) { using (SqlCommand command = new SqlCommand(orderIdQuery, sqlConnection)) { VerifyConnectionFailure<InvalidOperationException>(() => command.ExecuteReader(), execReaderFailedMessage); } } } [ActiveIssue(19057)] [Theory] [InlineData(@"np:\\.\pipe\sqlbad\query")] [InlineData(@"np:\\.\pipe\MSSQL$NonExistentInstance\sql\query")] [InlineData(@"\\.\pipe\sqlbad\query")] [InlineData(@"\\.\pipe\MSSQL$NonExistentInstance\sql\query")] [InlineData(@"np:\\localhost\pipe\sqlbad\query")] [InlineData(@"np:\\localhost\pipe\MSSQL$NonExistentInstance\sqlbad\query")] [InlineData(@"\\localhost\pipe\sqlbad\query")] [InlineData(@"\\localhost\pipe\MSSQL$NonExistentInstance\sqlbad\query")] [PlatformSpecific(TestPlatforms.Windows)] // Named pipes with the given input strings are not supported on Unix public void NamedPipeTest(string dataSource) { SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(); builder.DataSource = dataSource; builder.ConnectTimeout = 1; using(SqlConnection connection = new SqlConnection(builder.ConnectionString)) { VerifyConnectionFailure<SqlException>(() => connection.Open(), "(provider: Named Pipes Provider, error: 11 - Timeout error)"); } } [ActiveIssue(19057)] [Fact] public void NamedPipeInvalidConnStringTest() { SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(); builder.ConnectTimeout = 1; string invalidConnStringError = "(provider: Named Pipes Provider, error: 25 - Connection string is not valid)"; string fakeServerName = Guid.NewGuid().ToString("N"); // Using forward slashes builder.DataSource = "np://" + fakeServerName + "/pipe/sql/query"; OpenBadConnection(builder.ConnectionString, invalidConnStringError); // Without pipe token builder.DataSource = @"np:\\" + fakeServerName + @"\sql\query"; OpenBadConnection(builder.ConnectionString, invalidConnStringError); // Without a pipe name builder.DataSource = @"np:\\" + fakeServerName + @"\pipe"; OpenBadConnection(builder.ConnectionString, invalidConnStringError); // Nothing after server builder.DataSource = @"np:\\" + fakeServerName; OpenBadConnection(builder.ConnectionString, invalidConnStringError); // No leading slashes builder.DataSource = @"np:" + fakeServerName + @"\pipe\sql\query"; OpenBadConnection(builder.ConnectionString, invalidConnStringError); // No server name builder.DataSource = @"np:\\\pipe\sql\query"; OpenBadConnection(builder.ConnectionString, invalidConnStringError); // Nothing but slashes builder.DataSource = @"np:\\\\\"; OpenBadConnection(builder.ConnectionString, invalidConnStringError); } private void GenerateConnectionException(string connectionString) { using (SqlConnection sqlConnection = new SqlConnection(connectionString)) { sqlConnection.Open(); using (SqlCommand command = sqlConnection.CreateCommand()) { command.CommandText = orderIdQuery; command.ExecuteReader(); } } } private TException VerifyConnectionFailure<TException>(Action connectAction, string expectedExceptionMessage, Func<TException, bool> exVerifier) where TException : Exception { TException ex = Assert.Throws<TException>(connectAction); // Some exception messages are different between Framework and Core if(!PlatformDetection.IsFullFramework) { Assert.Contains(expectedExceptionMessage, ex.Message); } Assert.True(exVerifier(ex), "FAILED Exception verifier failed on the exception."); return ex; } private void OpenBadConnection(string connectionString, string errorMsg) { using (SqlConnection conn = new SqlConnection(connectionString)) { VerifyConnectionFailure<SqlException>(() => conn.Open(), errorMsg); } } private TException VerifyConnectionFailure<TException>(Action connectAction, string expectedExceptionMessage) where TException : Exception { return VerifyConnectionFailure<TException>(connectAction, expectedExceptionMessage, (ex) => true); } private bool VerifyException(SqlException exception) { VerifyException(exception, 1); return true; } private bool VerifyException(SqlException exception, int count, int? errorNumber = null, int? errorState = null, int? severity = null) { Assert.NotEmpty(exception.Errors); Assert.Equal(count, exception.Errors.Count); // Ensure that all errors have an error-level severity for (int i = 0; i < count; i++) { Assert.InRange(exception.Errors[i].Class, 10, byte.MaxValue); } // Check the properties of the exception populated by the server are correct if (errorNumber.HasValue) { Assert.Equal(errorNumber.Value, exception.Number); } if (errorState.HasValue) { Assert.Equal(errorState.Value, exception.State); } if (severity.HasValue) { Assert.Equal(severity.Value, exception.Class); } if ((errorNumber.HasValue) && (errorState.HasValue) && (severity.HasValue)) { string expected = $"Error Number:{errorNumber.Value},State:{errorState.Value},Class:{severity.Value}"; Assert.Contains(expected, exception.ToString()); } return true; } } }
/********************************************************************++ Copyright (c) Microsoft Corporation. All rights reserved. --********************************************************************/ using System.Collections.ObjectModel; using Dbg = System.Management.Automation; namespace System.Management.Automation { /// <summary> /// Exposes the Children noun of the Cmdlet Providers to the Cmdlet base class. The methods of this class /// use the providers to perform operations. /// </summary> public sealed class ChildItemCmdletProviderIntrinsics { #region Constructors /// <summary> /// Hide the default constructor since we always require an instance of SessionState /// </summary> private ChildItemCmdletProviderIntrinsics() { Dbg.Diagnostics.Assert( false, "This constructor should never be called. Only the constructor that takes an instance of SessionState should be called."); } // CmdletProviderIntrinsics private /// <summary> /// Constructs a facade over the "real" session state API /// </summary> /// /// <param name="cmdlet"> /// An instance of the cmdlet that this class is acting as a facade for. /// </param> /// internal ChildItemCmdletProviderIntrinsics(Cmdlet cmdlet) { if (cmdlet == null) { throw PSTraceSource.NewArgumentNullException("cmdlet"); } _cmdlet = cmdlet; _sessionState = cmdlet.Context.EngineSessionState; } // ChildItemCmdletProviderIntrinsics internal /// <summary> /// Constructs a facade over the "real" session state API /// </summary> /// /// <param name="sessionState"> /// An instance of the "real" session state. /// </param> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="sessionState"/> is null. /// </exception> /// internal ChildItemCmdletProviderIntrinsics(SessionStateInternal sessionState) { if (sessionState == null) { throw PSTraceSource.NewArgumentNullException("sessionState"); } _sessionState = sessionState; } // ChildItemCmdletProviderIntrinsics internal #endregion Constructors #region Public methods #region GetChildItems /// <summary> /// Gets the child items of the container at the given path. /// </summary> /// /// <param name="path"> /// The path to the item to retrieve. It may be a drive or provider-qualified path and may include /// glob characters. /// </param> /// /// <param name="recurse"> /// If true, gets all the children in all the sub-containers of the specified /// container. If false, only gets the immediate children of the specified /// container. /// </param> /// /// <returns> /// The children of the container at the specified path. The type of the objects returned are /// determined by the provider that supports the given path. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> or <paramref name="context"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> public Collection<PSObject> Get(string path, bool recurse) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object return _sessionState.GetChildItems(new string[] { path }, recurse, uint.MaxValue, false, false); } // GetChildItems /// <summary> /// Gets the child items of the container at the given path(s). /// </summary> /// /// <param name="path"> /// The path(s) to the item(s) to retrieve. They may be drive or provider-qualified paths and may include /// glob characters. /// </param> /// /// <param name="recurse"> /// If true, gets all the children in all the sub-containers of the specified /// container. If false, only gets the immediate children of the specified /// container. /// </param> /// /// <param name="depth"> /// Limits the depth of recursion; uint.MaxValue performs full recursion. /// </param> /// /// <param name="force"> /// Passed on to providers to force operations. /// </param> /// /// <param name="literalPath"> /// If true, globbing is not done on paths. /// </param> /// /// <returns> /// The children of the container at the specified path. The type of the objects returned are /// determined by the provider that supports the given path. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> or <paramref name="context"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> /// public Collection<PSObject> Get(string[] path, bool recurse, uint depth, bool force, bool literalPath) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object return _sessionState.GetChildItems(path, recurse, depth, force, literalPath); } /// <summary> /// Gets the child items of the container at the given path(s). /// </summary> /// /// <param name="path"> /// The path(s) to the item(s) to retrieve. They may be drive or provider-qualified paths and may include /// glob characters. /// </param> /// /// <param name="recurse"> /// If true, gets all the children in all the sub-containers of the specified /// container. If false, only gets the immediate children of the specified /// container. /// </param> /// /// <param name="force"> /// Passed on to providers to force operations. /// </param> /// /// <param name="literalPath"> /// If true, globbing is not done on paths. /// </param> /// /// <returns> /// The children of the container at the specified path. The type of the objects returned are /// determined by the provider that supports the given path. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> or <paramref name="context"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> /// public Collection<PSObject> Get(string[] path, bool recurse, bool force, bool literalPath) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object return this.Get(path, recurse, uint.MaxValue, force, literalPath); } /// <summary> /// Gets the child items of the container at the given path. /// </summary> /// /// <param name="path"> /// The path to the item to retrieve. It may be a drive or provider-qualified path and may include /// glob characters. /// </param> /// /// <param name="recurse"> /// If true, gets all the children in all the sub-containers of the specified /// container. If false, only gets the immediate children of the specified /// container. /// </param> /// /// <param name="depth"> /// Limits the depth of recursion; uint.MaxValue performs full recursion. /// </param> /// /// <param name="context"> /// The context under which the command is running. /// </param> /// /// <returns> /// Nothing. The children of the container at the specified path are written to the context. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> or <paramref name="context"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> internal void Get( string path, bool recurse, uint depth, CmdletProviderContext context) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object _sessionState.GetChildItems(path, recurse, depth, context); } // GetChildItems /// <summary> /// Gets the dynamic parameters for the get-childitem cmdlet. /// </summary> /// /// <param name="path"> /// The path to the item if it was specified on the command line. /// </param> /// /// <param name="recurse"> /// If true, gets all the children in all the sub-containers of the specified /// container. If false, only gets the immediate children of the specified /// container. /// </param> /// /// <param name="context"> /// The context which the core command is running. /// </param> /// /// <returns> /// An object that has properties and fields decorated with /// parsing attributes similar to a cmdlet class. /// </returns> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> internal object GetChildItemsDynamicParameters( string path, bool recurse, CmdletProviderContext context) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object return _sessionState.GetChildItemsDynamicParameters(path, recurse, context); } // GetChildItemsDynamicParameters #endregion GetChildItems #region GetChildNames /// <summary> /// Gets the child names of the container at the given path. /// </summary> /// /// <param name="path"> /// The path to the item to retrieve. It may be a drive or provider-qualified path and may include /// glob characters. /// </param> /// /// <param name="returnContainers"> /// Determines if all containers should be returned or only those containers that match the /// filter(s). /// </param> /// /// <param name="recurse"> /// If true, gets all the relative paths of all the children /// in all the sub-containers of the specified /// container. If false, only gets the immediate child names of the specified /// container. /// </param> /// /// <returns> /// The children of the container at the specified path. The type of the objects returned are /// determined by the provider that supports the given path. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> or <paramref name="propertyToClear"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> public Collection<string> GetNames( string path, ReturnContainers returnContainers, bool recurse) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object return _sessionState.GetChildNames(new string[] { path }, returnContainers, recurse, uint.MaxValue, false, false); } // GetChildNames /// <summary> /// Gets the child names of the container at the given path. /// </summary> /// /// <param name="path"> /// The path(s) to the item(s) to retrieve. They may be drive or provider-qualified paths and may include /// glob characters. /// </param> /// /// <param name="returnContainers"> /// Determines if all containers should be returned or only those containers that match the /// filter(s). /// </param> /// /// <param name="recurse"> /// If true, gets all the relative paths of all the children /// in all the sub-containers of the specified /// container. If false, only gets the immediate child names of the specified /// container. /// </param> /// /// <param name="force"> /// Passed on to providers to force operations. /// </param> /// /// <param name="literalPath"> /// If true, globbing is not done on paths. /// </param> /// /// <returns> /// The children of the container at the specified path. The type of the objects returned are /// determined by the provider that supports the given path. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> or <paramref name="propertyToClear"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> /// public Collection<string> GetNames( string[] path, ReturnContainers returnContainers, bool recurse, bool force, bool literalPath) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); return _sessionState.GetChildNames(path, returnContainers, recurse, uint.MaxValue, force, literalPath); } /// <summary> /// Gets the child names of the container at the given path. /// </summary> /// /// <param name="path"> /// The path(s) to the item(s) to retrieve. They may be drive or provider-qualified paths and may include /// glob characters. /// </param> /// /// <param name="returnContainers"> /// Determines if all containers should be returned or only those containers that match the /// filter(s). /// </param> /// /// <param name="recurse"> /// If true, gets all the relative paths of all the children /// in all the sub-containers of the specified /// container. If false, only gets the immediate child names of the specified /// container. /// </param> /// /// <param name="depth"> /// Limits the depth of recursion; uint.MaxValue performs full recursion. /// </param> /// /// <param name="force"> /// Passed on to providers to force operations. /// </param> /// /// <param name="literalPath"> /// If true, globbing is not done on paths. /// </param> /// /// <returns> /// The children of the container at the specified path. The type of the objects returned are /// determined by the provider that supports the given path. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> or <paramref name="propertyToClear"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> /// public Collection<string> GetNames( string[] path, ReturnContainers returnContainers, bool recurse, uint depth, bool force, bool literalPath) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); return _sessionState.GetChildNames(path, returnContainers, recurse, depth, force, literalPath); } /// <summary> /// Gets the child names of the container at the given path. /// </summary> /// /// <param name="path"> /// The path to the item to retrieve. It may be a drive or provider-qualified path and may include /// glob characters. /// </param> /// /// <param name="returnContainers"> /// Determines if all containers should be returned or only those containers that match the /// filter(s). /// </param> /// /// <param name="recurse"> /// If true, gets all the relative paths of all the children /// in all the sub-containers of the specified /// container. If false, only gets the immediate child names of the specified /// container. /// </param> /// /// <param name="depth"> /// Limits the depth of recursion; uint.MaxValue performs full recursion. /// </param> /// /// <param name="context"> /// The context under which the command is running. /// </param> /// /// <returns> /// Nothing. The names of the children of the specified container are written to the context. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> or <paramref name="propertyToClear"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> internal void GetNames( string path, ReturnContainers returnContainers, bool recurse, uint depth, CmdletProviderContext context) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object _sessionState.GetChildNames(path, returnContainers, recurse, depth, context); } // GetChildNames /// <summary> /// Gets the dynamic parameters for the get-childitem -name cmdlet. /// </summary> /// /// <param name="path"> /// The path to the item if it was specified on the command line. /// </param> /// /// <param name="context"> /// The context which the core command is running. /// </param> /// /// <returns> /// An object that has properties and fields decorated with /// parsing attributes similar to a cmdlet class. /// </returns> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> internal object GetChildNamesDynamicParameters( string path, CmdletProviderContext context) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object return _sessionState.GetChildNamesDynamicParameters(path, context); } // GetChildNamesDynamicParameters #endregion GetChildNames #region HasChildItems /// <summary> /// Determines if an item at the given path has children. /// </summary> /// /// <param name="path"> /// The path to the item to determine if it has children. It may be a drive or provider-qualified path and may include /// glob characters. /// </param> /// /// <returns> /// True if the item at the specified path has children. False otherwise. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> public bool HasChild(string path) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object return _sessionState.HasChildItems(path, false, false); } // HasChildItems /// <summary> /// Determines if an item at the given path has children. /// </summary> /// /// <param name="path"> /// The path to the item to determine if it has children. It may be a drive or provider-qualified path and may include /// glob characters. /// </param> /// /// <param name="force"> /// Passed on to providers to force operations. /// </param> /// /// <param name="literalPath"> /// If true, globbing is not done on paths. /// </param> /// /// <returns> /// True if the item at the specified path has children. False otherwise. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> public bool HasChild(string path, bool force, bool literalPath) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object return _sessionState.HasChildItems(path, force, literalPath); } /// <summary> /// Determines if an item at the given path has children. /// </summary> /// /// <param name="path"> /// The path to the item to determine if it has children. It may be a drive or provider-qualified path and may include /// glob characters. /// </param> /// /// <param name="context"> /// The context under which the command is running. /// </param> /// /// <returns> /// True if the item at the specified path has children. False otherwise. /// </returns> /// /// <exception cref="ArgumentNullException"> /// If <paramref name="path"/> is null. /// </exception> /// /// <exception cref="ProviderNotFoundException"> /// If the <paramref name="path"/> refers to a provider that could not be found. /// </exception> /// /// <exception cref="DriveNotFoundException"> /// If the <paramref name="path"/> refers to a drive that could not be found. /// </exception> /// /// <exception cref="ItemNotFoundException"> /// If <paramref name="path"/> does not contain glob characters and /// could not be found. /// </exception> /// /// <exception cref="NotSupportedException"> /// If the provider that the <paramref name="path"/> refers to does /// not support this operation. /// </exception> /// /// <exception cref="ProviderInvocationException"> /// If the provider threw an exception. /// </exception> internal bool HasChild( string path, CmdletProviderContext context) { Dbg.Diagnostics.Assert( _sessionState != null, "The only constructor for this class should always set the sessionState field"); // Parameter validation is done in the session state object return _sessionState.HasChildItems(path, context); } // HasChildItems #endregion HasChildItems #endregion Public methods #region private data private Cmdlet _cmdlet; private SessionStateInternal _sessionState; #endregion private data } // ChildItemCmdletProviderIntrinsics /// <summary> /// This enum determines which types of containers are returned from some of /// the provider methods. /// </summary> /// public enum ReturnContainers { /// <summary> /// Only containers that match the filter(s) are returned. /// </summary> ReturnMatchingContainers, /// <summary> /// All containers are returned even if they don't match the filter(s). /// </summary> ReturnAllContainers } }
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using Microsoft.Azure.Commands.Common.Authentication.Models; using Microsoft.Azure.Commands.Sql.Auditing.Model; using Microsoft.Azure.Commands.Sql.Common; using Microsoft.Azure.Commands.Sql.Database.Model; using Microsoft.Azure.Commands.Sql.Database.Services; using Microsoft.Azure.Management.Sql.LegacySdk.Models; using System; using System.Collections.Generic; using System.Linq; using System.Text; using Microsoft.WindowsAzure.Commands.Utilities.Common; using Microsoft.Azure.Commands.Common.Authentication.Abstractions; namespace Microsoft.Azure.Commands.Sql.Auditing.Services { /// <summary> /// The SqlAuditClient class is responsible for transforming the data that was received form the endpoints to the cmdlets model of auditing policy and vice versa /// </summary> public class SqlAuditAdapter { /// <summary> /// Gets or sets the Azure subscription /// </summary> private IAzureSubscription Subscription { get; set; } /// <summary> /// The auditing endpoints communicator used by this adapter /// </summary> private AuditingEndpointsCommunicator Communicator { get; set; } /// <summary> /// The Azure endpoints communicator used by this adapter /// </summary> private AzureEndpointsCommunicator AzureCommunicator { get; set; } /// <summary> /// Caching the fetched storage account name to prevent costly network interaction in cases it is not needed /// </summary> private string FetchedStorageAccountName { get; set; } /// <summary> /// Caching the fetched storage account resource group to prevent costly network interaction in cases it is not needed /// </summary> private string FetchedStorageAccountResourceGroup { get; set; } /// <summary> /// Caching the fetched storage account subscription to prevent costly network interaction in cases it is not needed /// </summary> private string FetchedStorageAccountSubscription { get; set; } /// <summary> /// Caching the fetched storage account table name to prevent costly network interaction in cases it is not needed /// </summary> private string FetchedStorageAccountTableEndpoint { get; set; } /// <summary> /// In cases when storage is not needed and not provided, there's no need to perform storage related network interaction that may fail /// </summary> public bool IgnoreStorage { get; set; } /// <summary> /// Gets or sets the Azure profile /// </summary> public IAzureContext Context { get; set; } public SqlAuditAdapter(IAzureContext context) { Context = context; Subscription = context.Subscription; Communicator = new AuditingEndpointsCommunicator(Context); AzureCommunicator = new AzureEndpointsCommunicator(Context); IgnoreStorage = false; } /// <summary> /// Returns the storage account name of the given database server /// </summary> /// <param name="resourceGroupName">The name of the resource group to which the server belongs</param> /// <param name="serverName">The server's name</param> /// <param name="requestId">The Id to use in the request</param> /// <returns>The name of the storage account, null if it doesn't exist</returns> public string GetServerStorageAccount(string resourceGroupName, string serverName) { ServerAuditingPolicy policy; Communicator.GetServerAuditingPolicy(resourceGroupName, serverName, out policy); return policy.Properties.StorageAccountName; } /// <summary> /// Provides a database audit policy model for the given database /// </summary> public void GetDatabaseAuditingPolicy(string resourceGroup, string serverName, string databaseName, out DatabaseAuditingPolicyModel model) { DatabaseAuditingPolicy policy; Communicator.GetDatabaseAuditingPolicy(resourceGroup, serverName, databaseName, out policy); var dbPolicyModel = ModelizeDatabaseAuditPolicy(policy); dbPolicyModel.AuditType = AuditType.Table; dbPolicyModel.ResourceGroupName = resourceGroup; dbPolicyModel.ServerName = serverName; dbPolicyModel.DatabaseName = databaseName; FetchedStorageAccountName = policy.Properties.StorageAccountName; FetchedStorageAccountResourceGroup = policy.Properties.StorageAccountResourceGroupName; FetchedStorageAccountSubscription = policy.Properties.StorageAccountSubscriptionId; FetchedStorageAccountTableEndpoint = policy.Properties.StorageTableEndpoint; model = dbPolicyModel; } /// <summary> /// Provides a database audit policy model for the given database /// </summary> public void GetDatabaseAuditingPolicy(string resourceGroup, string serverName, string databaseName, out DatabaseBlobAuditingPolicyModel model) { BlobAuditingPolicy policy; Communicator.GetDatabaseAuditingPolicy(resourceGroup, serverName, databaseName, out policy); var dbPolicyModel = ModelizeDatabaseAuditPolicy(policy); dbPolicyModel.AuditType = AuditType.Blob; dbPolicyModel.ResourceGroupName = resourceGroup; dbPolicyModel.ServerName = serverName; dbPolicyModel.DatabaseName = databaseName; model = dbPolicyModel; } /// <summary> /// Provides a database server audit policy model for the given database /// </summary> public void GetServerAuditingPolicy(string resourceGroup, string serverName, out ServerAuditingPolicyModel model) { ServerAuditingPolicy policy; Communicator.GetServerAuditingPolicy(resourceGroup, serverName, out policy); var serverPolicyModel = ModelizeServerAuditPolicy(policy); serverPolicyModel.AuditType = AuditType.Table; serverPolicyModel.ResourceGroupName = resourceGroup; serverPolicyModel.ServerName = serverName; FetchedStorageAccountName = policy.Properties.StorageAccountName; FetchedStorageAccountResourceGroup = policy.Properties.StorageAccountResourceGroupName; FetchedStorageAccountSubscription = policy.Properties.StorageAccountSubscriptionId; FetchedStorageAccountTableEndpoint = policy.Properties.StorageTableEndpoint; model = serverPolicyModel; } /// <summary> /// Provides a database server audit policy model for the given database /// </summary> public void GetServerAuditingPolicy(string resourceGroup, string serverName, out ServerBlobAuditingPolicyModel model) { BlobAuditingPolicy policy; Communicator.GetServerAuditingPolicy(resourceGroup, serverName, out policy); var serverPolicyModel = ModelizeServerAuditPolicy(policy); serverPolicyModel.AuditType = AuditType.Blob; serverPolicyModel.ResourceGroupName = resourceGroup; serverPolicyModel.ServerName = serverName; model = serverPolicyModel; } /// <summary> /// Transforms the given database policy object to its cmdlet model representation /// </summary> private DatabaseAuditingPolicyModel ModelizeDatabaseAuditPolicy(DatabaseAuditingPolicy policy) { var dbPolicyModel = new DatabaseAuditingPolicyModel(); var properties = policy.Properties; dbPolicyModel.AuditState = ModelizeAuditState(properties.AuditingState); dbPolicyModel.UseServerDefault = properties.UseServerDefault == SecurityConstants.AuditingEndpoint.Enabled ? UseServerDefaultOptions.Enabled : UseServerDefaultOptions.Disabled; ModelizeStorageInfo(dbPolicyModel, properties.StorageAccountName, properties.StorageAccountKey, properties.StorageAccountSecondaryKey); ModelizeEventTypesInfo(dbPolicyModel, properties.EventTypesToAudit); ModelizeRetentionInfo(dbPolicyModel, properties.RetentionDays, properties.AuditLogsTableName, properties.FullAuditLogsTableName); return dbPolicyModel; } private DatabaseBlobAuditingPolicyModel ModelizeDatabaseAuditPolicy(BlobAuditingPolicy policy) { var dbPolicyModel = new DatabaseBlobAuditingPolicyModel(); var properties = policy.Properties; dbPolicyModel.AuditState = ModelizeAuditState(properties.State); ModelizeStorageInfo(dbPolicyModel, properties.StorageEndpoint, properties.IsStorageSecondaryKeyInUse); ModelizeAuditActionGroups(dbPolicyModel, properties.AuditActionsAndGroups); ModelizeAuditActions(dbPolicyModel, properties.AuditActionsAndGroups); ModelizeRetentionInfo(dbPolicyModel, properties.RetentionDays); return dbPolicyModel; } private void ModelizeAuditActionGroups(BaseBlobAuditingPolicyModel policyModel, IEnumerable<string> auditActionsAndGroups) { var groups = new List<AuditActionGroups>(); auditActionsAndGroups.ForEach(item => { AuditActionGroups group; if (Enum.TryParse(item, true, out group)) { groups.Add(group); } }); policyModel.AuditActionGroup = groups.ToArray(); } private void ModelizeAuditActions(DatabaseBlobAuditingPolicyModel policyModel, IEnumerable<string> auditActionsAndGroups) { var actions = new List<string>(); auditActionsAndGroups.ForEach(item => { AuditActionGroups group; if (!Enum.TryParse(item, true, out group)) { actions.Add(item); } }); policyModel.AuditAction = actions.ToArray(); } private void ModelizeRetentionInfo(BaseBlobAuditingPolicyModel model, int retentionDays) { model.RetentionInDays = Convert.ToUInt32(retentionDays); } private static void ModelizeStorageInfo(BaseBlobAuditingPolicyModel model, string storageEndpoint, bool isSecondary) { if (string.IsNullOrEmpty(storageEndpoint)) { return; } var accountNameStartIndex = storageEndpoint.StartsWith("https://", StringComparison.InvariantCultureIgnoreCase)? 8 : 7; // https:// or http:// var accountNameEndIndex = storageEndpoint.IndexOf(".blob", StringComparison.InvariantCultureIgnoreCase); model.StorageAccountName = storageEndpoint.Substring(accountNameStartIndex, accountNameEndIndex- accountNameStartIndex); model.StorageKeyType = (isSecondary) ? StorageKeyKind.Secondary : StorageKeyKind.Primary; } /// <summary> /// Transforms the given server policy object to its cmdlet model representation /// </summary> private ServerAuditingPolicyModel ModelizeServerAuditPolicy(ServerAuditingPolicy policy) { var serverPolicyModel = new ServerAuditingPolicyModel(); var properties = policy.Properties; serverPolicyModel.AuditState = ModelizeAuditState(properties.AuditingState); ModelizeStorageInfo(serverPolicyModel, properties.StorageAccountName, properties.StorageAccountKey, properties.StorageAccountSecondaryKey); ModelizeEventTypesInfo(serverPolicyModel, properties.EventTypesToAudit); ModelizeRetentionInfo(serverPolicyModel, properties.RetentionDays, properties.AuditLogsTableName, properties.FullAuditLogsTableName); return serverPolicyModel; } /// <summary> /// Transforms the given server policy object to its cmdlet model representation /// </summary> private ServerBlobAuditingPolicyModel ModelizeServerAuditPolicy(BlobAuditingPolicy policy) { var serverPolicyModel = new ServerBlobAuditingPolicyModel(); var properties = policy.Properties; serverPolicyModel.AuditState = ModelizeAuditState(properties.State); ModelizeStorageInfo(serverPolicyModel, properties.StorageEndpoint, properties.IsStorageSecondaryKeyInUse); ModelizeAuditActionGroups(serverPolicyModel, properties.AuditActionsAndGroups); ModelizeRetentionInfo(serverPolicyModel, properties.RetentionDays); return serverPolicyModel; } /// <summary> /// Transforms the given policy state in a string form to its cmdlet model representation /// </summary> private static AuditStateType ModelizeAuditState(string auditState) { AuditStateType value; Enum.TryParse(auditState, true, out value); return value; } /// <summary> /// Updates the content of the model object with all the storage related information /// </summary> private static void ModelizeStorageInfo(BaseTableAuditingPolicyModel model, string accountName, string primary, string secondary) { model.StorageAccountName = accountName; if (!string.IsNullOrEmpty(secondary)) { model.StorageKeyType = StorageKeyKind.Secondary; } else { model.StorageKeyType = StorageKeyKind.Primary; } } /// <summary> /// Updates the given model with all the event types information /// </summary> private static void ModelizeEventTypesInfo(BaseTableAuditingPolicyModel model, string eventTypesToAudit) { HashSet<AuditEventType> events = new HashSet<AuditEventType>(); if (eventTypesToAudit != null) { if (eventTypesToAudit.IndexOf(SecurityConstants.PlainSQL_Success) != -1) events.Add(AuditEventType.PlainSQL_Success); if (eventTypesToAudit.IndexOf(SecurityConstants.PlainSQL_Failure) != -1) events.Add(AuditEventType.PlainSQL_Failure); if (eventTypesToAudit.IndexOf(SecurityConstants.ParameterizedSQL_Success) != -1) events.Add(AuditEventType.ParameterizedSQL_Success); if (eventTypesToAudit.IndexOf(SecurityConstants.ParameterizedSQL_Failure) != -1) events.Add(AuditEventType.ParameterizedSQL_Failure); if (eventTypesToAudit.IndexOf(SecurityConstants.StoredProcedure_Success) != -1) events.Add(AuditEventType.StoredProcedure_Success); if (eventTypesToAudit.IndexOf(SecurityConstants.StoredProcedure_Failure) != -1) events.Add(AuditEventType.StoredProcedure_Failure); if (eventTypesToAudit.IndexOf(SecurityConstants.Login_Success) != -1) events.Add(AuditEventType.Login_Success); if (eventTypesToAudit.IndexOf(SecurityConstants.Login_Failure) != -1) events.Add(AuditEventType.Login_Failure); if (eventTypesToAudit.IndexOf(SecurityConstants.TransactionManagement_Success) != -1) events.Add(AuditEventType.TransactionManagement_Success); if (eventTypesToAudit.IndexOf(SecurityConstants.TransactionManagement_Failure) != -1) events.Add(AuditEventType.TransactionManagement_Failure); } model.EventType = events.ToArray(); } /// <summary> /// Updates the content of the model object with all the retention information /// </summary> private static void ModelizeRetentionInfo(BaseTableAuditingPolicyModel model, string retentionDays, string auditLogsTableName, string fullAuditLogsTableName) { model.TableIdentifier = auditLogsTableName; model.FullAuditLogsTableName = fullAuditLogsTableName; uint retentionDaysForModel; if (!(uint.TryParse(retentionDays, out retentionDaysForModel))) { retentionDaysForModel = 0; } model.RetentionInDays = retentionDaysForModel; } /// <summary> /// Transforms the given model to its endpoints acceptable structure and sends it to the endpoint /// </summary> public void SetDatabaseAuditingPolicy(DatabaseAuditingPolicyModel model, string storageEndpointSuffix) { if (!IsDatabaseInServiceTierForPolicy(model.ResourceGroupName, model.ServerName, model.DatabaseName)) { throw new Exception(Properties.Resources.DatabaseNotInServiceTierForAuditingPolicy); } var parameters = PolicizeDatabaseAuditingModel(model, storageEndpointSuffix); Communicator.SetDatabaseAuditingPolicy(model.ResourceGroupName, model.ServerName, model.DatabaseName, parameters); } /// <summary> /// Transforms the given model to its endpoints acceptable structure and sends it to the endpoint /// </summary> public void SetDatabaseAuditingPolicy(DatabaseBlobAuditingPolicyModel model, string storageEndpointSuffix) { if (!IsDatabaseInServiceTierForPolicy(model.ResourceGroupName, model.ServerName, model.DatabaseName)) { throw new Exception(Properties.Resources.DatabaseNotInServiceTierForAuditingPolicy); } var parameters = PolicizeBlobAuditingModel(model, storageEndpointSuffix); Communicator.SetDatabaseAuditingPolicy(model.ResourceGroupName, model.ServerName, model.DatabaseName, parameters); } /// <summary> /// Transforms the given model to its endpoints acceptable structure and sends it to the endpoint /// </summary> public void SetServerAuditingPolicy(ServerAuditingPolicyModel model, string storageEndpointSuffix) { var parameters = PolicizeServerAuditingModel(model, storageEndpointSuffix); Communicator.SetServerAuditingPolicy(model.ResourceGroupName, model.ServerName, parameters); } /// <summary> /// Transforms the given model to its endpoints acceptable structure and sends it to the endpoint /// </summary> public void SetServerAuditingPolicy(ServerBlobAuditingPolicyModel model, string storageEndpointSuffix) { var parameters = PolicizeBlobAuditingModel(model, storageEndpointSuffix); Communicator.SetServerAuditingPolicy(model.ResourceGroupName, model.ServerName, parameters); } private bool IsDatabaseInServiceTierForPolicy(string resourceGroupName, string serverName, string databaseName) { var dbCommunicator = new AzureSqlDatabaseCommunicator(Context); var database = dbCommunicator.Get(resourceGroupName, serverName, databaseName); DatabaseEdition edition; Enum.TryParse(database.Properties.Edition, true, out edition); if (edition != DatabaseEdition.None && edition != DatabaseEdition.Free) { return true; } return false; } /// <summary> /// Takes the cmdlets model object and transform it to the policy as expected by the endpoint /// </summary> /// <param name="model">The AuditingPolicy model object</param> /// <param name="storageEndpointSuffix">The suffix of the storage endpoint</param> /// <returns>The communication model object</returns> private DatabaseAuditingPolicyCreateOrUpdateParameters PolicizeDatabaseAuditingModel(DatabaseAuditingPolicyModel model, string storageEndpointSuffix) { var updateParameters = new DatabaseAuditingPolicyCreateOrUpdateParameters(); var properties = new DatabaseAuditingPolicyProperties(); updateParameters.Properties = properties; properties.AuditingState = model.AuditState.ToString(); properties.UseServerDefault = (model.UseServerDefault == UseServerDefaultOptions.Enabled) ? SecurityConstants.AuditingEndpoint.Enabled : SecurityConstants.AuditingEndpoint.Disabled; properties.StorageAccountName = ExtractStorageAccountName(model); properties.StorageAccountResourceGroupName = ExtractStorageAccountResourceGroup(properties.StorageAccountName); properties.StorageAccountSubscriptionId = ExtractStorageAccountSubscriptionId(properties.StorageAccountName); properties.StorageTableEndpoint = ExtractStorageAccountTableEndpoint(properties.StorageAccountName, storageEndpointSuffix); properties.StorageAccountKey = ExtractStorageAccountKey(properties.StorageAccountName, model, properties.StorageAccountResourceGroupName, StorageKeyKind.Primary); properties.StorageAccountSecondaryKey = ExtractStorageAccountKey(properties.StorageAccountName, model, properties.StorageAccountResourceGroupName, StorageKeyKind.Secondary); properties.EventTypesToAudit = ExtractEventTypes(model); properties.RetentionDays = model.RetentionInDays.ToString(); properties.AuditLogsTableName = model.TableIdentifier; return updateParameters; } /// <summary> /// Takes the cmdlets model object and transform it to the policy as expected by the endpoint /// </summary> /// <param name="model">The AuditingPolicy model object</param> /// <param name="storageEndpointSuffix">The suffix of the storage endpoint</param> /// <returns>The communication model object</returns> private BlobAuditingCreateOrUpdateParameters PolicizeBlobAuditingModel(BaseBlobAuditingPolicyModel model, string storageEndpointSuffix) { var updateParameters = new BlobAuditingCreateOrUpdateParameters(); var properties = new BlobAuditingProperties(); updateParameters.Properties = properties; properties.State = model.AuditState.ToString(); if (!IgnoreStorage && (model.AuditState == AuditStateType.Enabled)) { properties.StorageEndpoint = ExtractStorageAccountName(model, storageEndpointSuffix); properties.StorageAccountAccessKey = ExtractStorageAccountKey(model.StorageAccountName); properties.IsStorageSecondaryKeyInUse = model.StorageKeyType == StorageKeyKind.Secondary; properties.StorageAccountSubscriptionId = ExtractStorageAccountSubscriptionId(model.StorageAccountName); } properties.AuditActionsAndGroups = ExtractAuditActionsAndGroups(model); if (model.RetentionInDays != null) { properties.RetentionDays = (int) model.RetentionInDays; } return updateParameters; } private static IList<string> ExtractAuditActionsAndGroups(BaseBlobAuditingPolicyModel model) { var dbPolicyModel = model as DatabaseBlobAuditingPolicyModel; var actionsAndGroups = new List<string>(); if (dbPolicyModel != null) { actionsAndGroups.AddRange(dbPolicyModel.AuditAction); } model.AuditActionGroup.ToList().ForEach(aag => actionsAndGroups.Add(aag.ToString())); if (actionsAndGroups.Count == 0) // default audit actions and groups in case nothing was defined by the user { actionsAndGroups.Add("SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP"); actionsAndGroups.Add("FAILED_DATABASE_AUTHENTICATION_GROUP"); actionsAndGroups.Add("BATCH_COMPLETED_GROUP"); } return actionsAndGroups; } /// <summary> /// Takes the cmdlets model object and transform it to the policy as expected by the endpoint /// </summary> /// <param name="model">The AuditingPolicy model object</param> /// <param name="storageEndpointSuffix">The suffix of the storage endpoint</param> /// <returns>The communication model object</returns> private ServerAuditingPolicyCreateOrUpdateParameters PolicizeServerAuditingModel(ServerAuditingPolicyModel model, string storageEndpointSuffix) { var updateParameters = new ServerAuditingPolicyCreateOrUpdateParameters(); var properties = new ServerAuditingPolicyProperties(); updateParameters.Properties = properties; properties.AuditingState = model.AuditState.ToString(); properties.StorageAccountName = ExtractStorageAccountName(model); properties.StorageAccountResourceGroupName = ExtractStorageAccountResourceGroup(properties.StorageAccountName); properties.StorageAccountSubscriptionId = ExtractStorageAccountSubscriptionId(properties.StorageAccountName); properties.StorageTableEndpoint = ExtractStorageAccountTableEndpoint(properties.StorageAccountName, storageEndpointSuffix); properties.StorageAccountKey = ExtractStorageAccountKey(properties.StorageAccountName, model, properties.StorageAccountResourceGroupName, StorageKeyKind.Primary); properties.StorageAccountSecondaryKey = ExtractStorageAccountKey(properties.StorageAccountName, model, properties.StorageAccountResourceGroupName, StorageKeyKind.Secondary); properties.EventTypesToAudit = ExtractEventTypes(model); properties.RetentionDays = model.RetentionInDays.ToString(); properties.AuditLogsTableName = model.TableIdentifier; return updateParameters; } /// <summary> /// Extracts the storage account name from the given model /// </summary> private static string ExtractStorageAccountName(BaseBlobAuditingPolicyModel model, string endpointSuffix) { return string.Format("https://{0}.blob.{1}", model.StorageAccountName, endpointSuffix); } /// <summary> /// Extracts the storage account name from the given model /// </summary> private string ExtractStorageAccountName(BaseTableAuditingPolicyModel model) { string storageAccountName; if (model.StorageAccountName == FetchedStorageAccountName) // the user provided the same storage account that was given before { storageAccountName = FetchedStorageAccountName; } else if (model.StorageAccountName == null) // the user did not provided storage account for a policy for which such account is already defined { storageAccountName = FetchedStorageAccountName; } else // the user updates the name of the storage account { storageAccountName = model.StorageAccountName; } if (string.IsNullOrEmpty(storageAccountName) && (!IgnoreStorage)) // can happen if the user didn't provide account name for a policy that lacked it { throw new Exception(string.Format(Properties.Resources.NoStorageAccountWhenConfiguringAuditingPolicy)); } return storageAccountName; } /// <summary> /// Extracts the event types from the given model /// </summary> private static string ExtractEventTypes(BaseTableAuditingPolicyModel model) { if (model.EventType == null) { return null; } if (model.EventType.Any(t => t == AuditEventType.None)) { if (model.EventType.Count() == 1) { return string.Empty; } if (model.EventType.Any(t => t != AuditEventType.None)) { throw new Exception(Properties.Resources.InvalidEventTypeSet); } } return string.Join(",", model.EventType.Select(t => t.ToString())); } /// <summary> /// Extracts the storage account endpoint /// </summary> private string ExtractStorageAccountTableEndpoint(string storageName, string endpointSuffix) { if (IgnoreStorage || (storageName == FetchedStorageAccountName && FetchedStorageAccountTableEndpoint != null)) { return FetchedStorageAccountTableEndpoint; } return string.Format("https://{0}.table.{1}", storageName, endpointSuffix); } /// <summary> /// Extracts the storage account subscription id /// </summary> private string ExtractStorageAccountSubscriptionId(string storageName) { if (IgnoreStorage || (storageName == FetchedStorageAccountName && FetchedStorageAccountSubscription != null)) { return FetchedStorageAccountSubscription; } return Subscription.Id.ToString(); } /// <summary> /// Extracts the storage account resource group /// </summary> private string ExtractStorageAccountResourceGroup(string storageName) { if (IgnoreStorage || (storageName == FetchedStorageAccountName && FetchedStorageAccountResourceGroup != null)) { return FetchedStorageAccountResourceGroup; } return AzureCommunicator.GetStorageResourceGroup(storageName); } /// <summary> /// Extracts the storage account requested key /// </summary> private string ExtractStorageAccountKey(string storageName, BaseTableAuditingPolicyModel model, string storageAccountResourceGroup, StorageKeyKind keyType) { if (!IgnoreStorage && (model.StorageKeyType == keyType)) { return AzureCommunicator.GetStorageKeys(storageAccountResourceGroup, storageName)[keyType]; } return null; } /// <summary> /// Extracts the storage account requested key /// </summary> private string ExtractStorageAccountKey(string storageName) { return AzureCommunicator.GetStorageKeys(storageName)[StorageKeyKind.Primary]; } internal void ClearStorageDetailsCache() { FetchedStorageAccountName = null; FetchedStorageAccountResourceGroup = null; FetchedStorageAccountSubscription = null; FetchedStorageAccountTableEndpoint = null; } } }
using Bridge.Test.NUnit; namespace Bridge.ClientTest.Batch3.BridgeIssues { [Category(Constants.MODULE_ISSUES)] [TestFixture(TestNameFormat = "#1341 - {0}")] public class Bridge1341 { [Test] public static void TestPlainObject() { var o1 = Script.ToPlainObject(new { A = 1 }); Assert.NotNull(o1, "o1 not null"); Assert.AreEqual(1, o1.A, "o1.A == 1"); Assert.Null(o1["getHashCode"], "o1 has no getHashCode"); Assert.Null(o1["toJSON"], "o1 has no toJSON"); Assert.Null(o1["ctor"], "o1 has no ctor"); Assert.Null(o1["equals"], "o1 has no equals"); Assert.NotNull(o1["A"], "o1 has a"); var o2 = Script.ToPlainObject(new { A = 1, B = "2" }); Assert.NotNull(o2, "o2 not null"); Assert.AreEqual(1, o2.A, "o2.A == 1"); Assert.AreEqual("2", o2.B, "o2.B == \"2\""); var o3 = Script.ToPlainObject(new { A = 1, B = new SomeStructA() { Value1 = 1 } }); Assert.NotNull(o3, "o3 not null"); Assert.AreEqual(1, o3.A, "o3.A == 1"); Assert.NotNull(o3.B, "o3.B not null"); Assert.AreEqual(1, o3.B.Value1, "o3.B.Value1 == 1"); var o4 = Script.ToPlainObject(new { A = 1, B = new SomeStructA() { Value1 = 1 } }); Assert.NotNull(o4, "o4 not null"); Assert.AreEqual(1, o4.A, "o4.A == 1"); Assert.NotNull(o4.B, "o4.B not null"); Assert.AreEqual(1, o4.B.Value1, "o4.B.Value1 == 1"); var o5 = Script.ToPlainObject(new { A = 1, B = new SomeClassB() { Value1 = 1, Value2 = new SomeStructA() { Value1 = 1 } } }); Assert.NotNull(o5, "o5 not null"); Assert.AreEqual(1, o5.A, "o5.A == 1"); Assert.NotNull(o5.B, "o5.B not null"); Assert.AreEqual(1, o5.B.Value1, "o5.B.Value1 == 1"); Assert.NotNull(o5.B.Value2, "o5.B.Value2 not null"); Assert.AreEqual(1, o5.B.Value2.Value1, "o5.B.Value2.Value1 == 1"); Assert.Null(o5["getHashCode"], "o5 has no getHashCode"); Assert.Null(o5["toJSON"], "o5 has no toJSON"); Assert.Null(o5["$constructor"], "o5 has no $constructor"); Assert.Null(o5["equals"], "o5 has no equals"); Assert.NotNull(o5["A"], "o5 has a"); Assert.NotNull(o5["B"], "o5 has b"); Assert.NotNull(o5.B["Value1"], "o5.B has getValue1"); } [Test] public static void TestAnonymousTypeCreation() { var o1 = new { A = 1 }; Assert.NotNull(o1, "o1 not null"); Assert.AreEqual(1, o1.A, "o1.A == 1"); Assert.NotNull(o1["getHashCode"], "o1 has getHashCode"); Assert.NotNull(o1["toJSON"], "o1 has toJSON"); Assert.NotNull(o1["ctor"], "o1 has ctor"); Assert.NotNull(o1["equals"], "o1 has equals"); var o2 = new { A = 1, B = "2" }; Assert.NotNull(o2, "o2 not null"); Assert.AreEqual(1, o2.A, "o2.A == 1"); Assert.AreEqual("2", o2.B, "o2.B == \"2\""); var o3 = new { A = 1, B = new SomeStructA() { Value1 = 1 } }; Assert.NotNull(o3, "o3 not null"); Assert.AreEqual(1, o3.A, "o3.A == 1"); Assert.NotNull(o3.B, "o3.B not null"); Assert.AreEqual(1, o3.B.Value1, "o3.B.Value1 == 1"); var o4 = new { A = 1, B = new SomeStructA() { Value1 = 1 } }; Assert.NotNull(o4, "o4 not null"); Assert.AreEqual(1, o4.A, "o4.A == 1"); Assert.NotNull(o4.B, "o4.B not null"); Assert.AreEqual(1, o4.B.Value1, "o4.B.Value1 == 1"); var o5 = new { A = 1, B = new SomeClassB() { Value1 = 1, Value2 = new SomeStructA() { Value1 = 1 } } }; Assert.NotNull(o5, "o5 not null"); Assert.AreEqual(1, o5.A, "o5.A == 1"); Assert.NotNull(o5.B, "o5.B not null"); Assert.AreEqual(1, o5.B.Value1, "o5.B.Value1 == 1"); Assert.NotNull(o5.B.Value2, "o5.B.Value2 not null"); Assert.AreEqual(1, o5.B.Value2.Value1, "o5.B.Value2.Value1 == 1"); Assert.NotNull(o5["getHashCode"], "o5 has getHashCode"); Assert.NotNull(o5["toJSON"], "o5 has toJSON"); Assert.NotNull(o5["ctor"], "o5 has ctor"); Assert.NotNull(o5["equals"], "o5 has equals"); } [Test] public static void TestDiffStructHashCode() { var s = new SomeStructA() { Value1 = 10 }; var s1 = new SomeStructA1() { Value1 = 10 }; Assert.AreNotEqual(s.GetHashCode(), s1.GetHashCode(), "Structs of diff types with same fields and values should give diff hash codes"); var s2 = new SomeStructA2() { Value2 = 10 }; Assert.AreNotEqual(s.GetHashCode(), s2.GetHashCode(), "Structs of diff types with same values should give diff hash codes"); } [Test] public static void TestDiffAnonymousTypesHashCode() { var s = new { Value1 = 10 }; var s1 = new { Value2 = 10 }; Assert.AreNotEqual(s.GetHashCode(), s1.GetHashCode(), "Same field values should give diff hash codes"); } private static void Test(object[] values) { var o1 = values[0]; var o2 = values[1]; var o3 = values[2]; var o4 = values[3]; var o5 = values[4]; var o6 = values[5]; Assert.AreEqual(o1.GetHashCode(), o2.GetHashCode(), "GetHashCode o1 == o2"); Assert.AreNotEqual(o1.GetHashCode(), o3.GetHashCode(), "GetHashCode o1 != o3"); Assert.AreNotEqual(o1.GetHashCode(), o4.GetHashCode(), "GetHashCode o1 != o4"); Assert.AreEqual(o1.GetHashCode(), o5.GetHashCode(), "GetHashCode o1 == o5"); Assert.AreNotEqual(o1.GetHashCode(), o6.GetHashCode(), "GetHashCode o1 != o6"); Assert.True(o1.Equals(o2), "Equals o1 == o2"); Assert.False(o1.Equals(o3), "Equals o1 != o3"); Assert.False(o1.Equals(o4), "Equals o1 != o4"); Assert.True(o1.Equals(o5), "Equals o1 == o5"); Assert.False(o1.Equals(o6), "Equals o1 != o6"); Assert.True(o2.Equals(o1), "Equals o2 == o1"); Assert.False(o3.Equals(o1), "Equals o3 != o1"); Assert.False(o4.Equals(o1), "Equals o4 != o1"); Assert.True(o5.Equals(o1), "Equals o5 == o1"); Assert.False(o6.Equals(o1), "Equals o6 != o1"); } [Test] public static void Test1AnonymousType() { var o1 = new { A = 1 }; var o2 = new { A = 1 }; var o3 = new { A = 2 }; var o4 = new { B = 1 }; var o5 = o1; var o6 = o3; object[] values = new object[] { o1, o2, o3, o4, o5, o6 }; Test(values); } [Test] public static void Test2AnonymousType() { var o1 = new { A = 1, B = "2" }; var o2 = new { A = 1, B = "2" }; var o3 = new { A = 1, B = "3" }; var o4 = new { B = 1, C = "2" }; var o5 = o1; var o6 = o3; object[] values = new object[] { o1, o2, o3, o4, o5, o6 }; Test(values); } [Test] public static void Test3AnonymousType() { var o1 = new { A = 1, B = new SomeClassA() { Value1 = 1 } }; var o2 = o1; var o3 = new { A = 1, B = new SomeClassA() { Value1 = 1 } }; var o4 = new { B = 1, C = new SomeClassA() { Value1 = 1 } }; var o5 = o1; var o6 = o3; object[] values = new object[] { o1, o2, o3, o4, o5, o6 }; Test(values); } [Test] public static void Test4AnonymousType() { var o1 = new { A = 1, B = new SomeStructA() { Value1 = 1 } }; var o2 = new { A = 1, B = new SomeStructA() { Value1 = 1 } }; var o3 = new { A = 1, B = new SomeStructA() { Value1 = 2 } }; var o4 = new { B = 1, A = new SomeStructA() { Value1 = 1 } }; var o5 = o1; var o6 = o3; object[] values = new object[] { o1, o2, o3, o4, o5, o6 }; Test(values); } [Test] public static void Test5AnonymousType() { var o1 = new { A = 1, B = new SomeClassB() { Value1 = 1, Value2 = new SomeStructA() { Value1 = 1 } } }; var o2 = o1; var o3 = new { A = 1, B = new SomeClassB() { Value1 = 1, Value2 = new SomeStructA() { Value1 = 1 } } }; var o4 = new { B = 1, C = new SomeClassB() { Value1 = 1, Value2 = new SomeStructA() { Value1 = 1 } } }; var o5 = o1; var o6 = o3; object[] values = new object[] { o1, o2, o3, o4, o5, o6 }; Test(values); } private class SomeClassA { public int Value1 { get; set; } } private struct SomeStructA { public int Value1 { get; set; } } private struct SomeStructA1 { public int Value1 { get; set; } } private struct SomeStructA2 { public int Value2 { get; set; } } private class SomeClassB { public int Value1 { get; set; } public SomeStructA Value2 { get; set; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics.X86; using System.Runtime.Intrinsics; namespace IntelHardwareIntrinsicTest { class Program { const int Pass = 100; const int Fail = 0; static unsafe int Main(string[] args) { int testResult = Pass; if (Avx.IsSupported) { using (TestTable<float> floatTable = new TestTable<float>(new float[8] { float.NaN, float.NaN, float.NaN, float.NaN, float.NaN, float.NaN, float.NaN, float.NaN })) { var vf1 = Avx.SetZeroVector256<float>(); Unsafe.Write(floatTable.outArrayPtr, vf1); if (!floatTable.CheckResult((x) => BitConverter.SingleToInt32Bits(x) == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on float:"); foreach (var item in floatTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } using (TestTable<double> doubleTable = new TestTable<double>(new double[4] { double.NaN, double.NaN, double.NaN, double.NaN })) { var vf1 = Avx.SetZeroVector256<double>(); Unsafe.Write(doubleTable.outArrayPtr, vf1); if (!doubleTable.CheckResult((x) => BitConverter.DoubleToInt64Bits(x) == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on double:"); foreach (var item in doubleTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } using (TestTable<long> longTable = new TestTable<long>(new long[4] { long.MaxValue, long.MaxValue, long.MaxValue, long.MaxValue })) { var vf1 = Avx.SetZeroVector256<long>(); Unsafe.Write(longTable.outArrayPtr, vf1); if (!longTable.CheckResult((x) => x == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on long:"); foreach (var item in longTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } using (TestTable<ulong> ulongTable = new TestTable<ulong>(new ulong[4] { ulong.MaxValue, ulong.MaxValue, ulong.MaxValue, ulong.MaxValue })) { var vf1 = Avx.SetZeroVector256<ulong>(); Unsafe.Write(ulongTable.outArrayPtr, vf1); if (!ulongTable.CheckResult((x) => x == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on ulong:"); foreach (var item in ulongTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } using (TestTable<int> intTable = new TestTable<int>(new int[8] { int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue, int.MaxValue })) { var vf1 = Avx.SetZeroVector256<int>(); Unsafe.Write(intTable.outArrayPtr, vf1); if (!intTable.CheckResult((x) => x == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on int:"); foreach (var item in intTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } using (TestTable<uint> uintTable = new TestTable<uint>(new uint[8] { uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue })) { var vf1 = Avx.SetZeroVector256<uint>(); Unsafe.Write(uintTable.outArrayPtr, vf1); if (!uintTable.CheckResult((x) => x == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on uint:"); foreach (var item in uintTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } using (TestTable<short> shortTable = new TestTable<short>(new short[16] { short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue, short.MaxValue })) { var vf1 = Avx.SetZeroVector256<short>(); Unsafe.Write(shortTable.outArrayPtr, vf1); if (!shortTable.CheckResult((x) => x == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on short:"); foreach (var item in shortTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } using (TestTable<ushort> ushortTable = new TestTable<ushort>(new ushort[16] { ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue, ushort.MaxValue })) { var vf1 = Avx.SetZeroVector256<ushort>(); Unsafe.Write(ushortTable.outArrayPtr, vf1); if (!ushortTable.CheckResult((x) => x == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on ushort:"); foreach (var item in ushortTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } using (TestTable<sbyte> sbyteTable = new TestTable<sbyte>(new sbyte[32] { sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue, sbyte.MaxValue })) { var vf1 = Avx.SetZeroVector256<sbyte>(); Unsafe.Write(sbyteTable.outArrayPtr, vf1); if (!sbyteTable.CheckResult((x) => x == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on sbyte:"); foreach (var item in sbyteTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } using (TestTable<byte> byteTable = new TestTable<byte>(new byte[32] { byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue, byte.MaxValue })) { var vf1 = Avx.SetZeroVector256<byte>(); Unsafe.Write(byteTable.outArrayPtr, vf1); if (!byteTable.CheckResult((x) => x == 0)) { Console.WriteLine("AVX SetZeroVector256 failed on byte:"); foreach (var item in byteTable.outArray) { Console.Write(item + ", "); } Console.WriteLine(); testResult = Fail; } } } return testResult; } public unsafe struct TestTable<T> : IDisposable where T : struct { public T[] outArray; public void* outArrayPtr => outHandle.AddrOfPinnedObject().ToPointer(); GCHandle outHandle; public TestTable(T[] a) { this.outArray = a; outHandle = GCHandle.Alloc(outArray, GCHandleType.Pinned); } public bool CheckResult(Func<T, bool> check) { for (int i = 0; i < outArray.Length; i++) { if (!check(outArray[i])) { return false; } } return true; } public void Dispose() { outHandle.Free(); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.13.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Network { using System; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; public static partial class RoutesOperationsExtensions { /// <summary> /// The delete route operation deletes the specified route from a route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> public static void Delete(this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName) { Task.Factory.StartNew(s => ((IRoutesOperations)s).DeleteAsync(resourceGroupName, routeTableName, routeName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The delete route operation deletes the specified route from a route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task DeleteAsync( this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName, CancellationToken cancellationToken = default(CancellationToken)) { await operations.DeleteWithHttpMessagesAsync(resourceGroupName, routeTableName, routeName, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// The delete route operation deletes the specified route from a route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> public static void BeginDelete(this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName) { Task.Factory.StartNew(s => ((IRoutesOperations)s).BeginDeleteAsync(resourceGroupName, routeTableName, routeName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The delete route operation deletes the specified route from a route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task BeginDeleteAsync( this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName, CancellationToken cancellationToken = default(CancellationToken)) { await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, routeTableName, routeName, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// The Get route operation retreives information about the specified route /// from the route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> public static Route Get(this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName) { return Task.Factory.StartNew(s => ((IRoutesOperations)s).GetAsync(resourceGroupName, routeTableName, routeName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Get route operation retreives information about the specified route /// from the route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Route> GetAsync( this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName, CancellationToken cancellationToken = default(CancellationToken)) { AzureOperationResponse<Route> result = await operations.GetWithHttpMessagesAsync(resourceGroupName, routeTableName, routeName, null, cancellationToken).ConfigureAwait(false); return result.Body; } /// <summary> /// The Put route operation creates/updates a route in the specified route /// table /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> /// <param name='routeParameters'> /// Parameters supplied to the create/update routeoperation /// </param> public static Route CreateOrUpdate(this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName, Route routeParameters) { return Task.Factory.StartNew(s => ((IRoutesOperations)s).CreateOrUpdateAsync(resourceGroupName, routeTableName, routeName, routeParameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Put route operation creates/updates a route in the specified route /// table /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> /// <param name='routeParameters'> /// Parameters supplied to the create/update routeoperation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Route> CreateOrUpdateAsync( this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName, Route routeParameters, CancellationToken cancellationToken = default(CancellationToken)) { AzureOperationResponse<Route> result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, routeTableName, routeName, routeParameters, null, cancellationToken).ConfigureAwait(false); return result.Body; } /// <summary> /// The Put route operation creates/updates a route in the specified route /// table /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> /// <param name='routeParameters'> /// Parameters supplied to the create/update routeoperation /// </param> public static Route BeginCreateOrUpdate(this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName, Route routeParameters) { return Task.Factory.StartNew(s => ((IRoutesOperations)s).BeginCreateOrUpdateAsync(resourceGroupName, routeTableName, routeName, routeParameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Put route operation creates/updates a route in the specified route /// table /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='routeName'> /// The name of the route. /// </param> /// <param name='routeParameters'> /// Parameters supplied to the create/update routeoperation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Route> BeginCreateOrUpdateAsync( this IRoutesOperations operations, string resourceGroupName, string routeTableName, string routeName, Route routeParameters, CancellationToken cancellationToken = default(CancellationToken)) { AzureOperationResponse<Route> result = await operations.BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, routeTableName, routeName, routeParameters, null, cancellationToken).ConfigureAwait(false); return result.Body; } /// <summary> /// The List network security rule opertion retrieves all the routes in a /// route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> public static IPage<Route> List(this IRoutesOperations operations, string resourceGroupName, string routeTableName) { return Task.Factory.StartNew(s => ((IRoutesOperations)s).ListAsync(resourceGroupName, routeTableName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The List network security rule opertion retrieves all the routes in a /// route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='routeTableName'> /// The name of the route table. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<Route>> ListAsync( this IRoutesOperations operations, string resourceGroupName, string routeTableName, CancellationToken cancellationToken = default(CancellationToken)) { AzureOperationResponse<IPage<Route>> result = await operations.ListWithHttpMessagesAsync(resourceGroupName, routeTableName, null, cancellationToken).ConfigureAwait(false); return result.Body; } /// <summary> /// The List network security rule opertion retrieves all the routes in a /// route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static IPage<Route> ListNext(this IRoutesOperations operations, string nextPageLink) { return Task.Factory.StartNew(s => ((IRoutesOperations)s).ListNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The List network security rule opertion retrieves all the routes in a /// route table. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<Route>> ListNextAsync( this IRoutesOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken)) { AzureOperationResponse<IPage<Route>> result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false); return result.Body; } } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #if !SILVERLIGHT using System; using System.Collections; using System.Reflection; using System.Diagnostics.Contracts; namespace System.Security.Cryptography.X509Certificates { // Summary: // Defines a collection that stores System.Security.Cryptography.X509Certificates.X509Certificate // objects. public class X509CertificateCollection : CollectionBase { // Summary: // Initializes a new instance of the System.Security.Cryptography.X509Certificates.X509CertificateCollection // class. extern public X509CertificateCollection(); // // Summary: // Initializes a new instance of the System.Security.Cryptography.X509Certificates.X509CertificateCollection // class from an array of System.Security.Cryptography.X509Certificates.X509Certificate // objects. // // Parameters: // value: // The array of System.Security.Cryptography.X509Certificates.X509Certificate // objects with which to initialize the new object. public X509CertificateCollection(X509Certificate[] value) { Contract.Requires(value != null); } // // Summary: // Initializes a new instance of the System.Security.Cryptography.X509Certificates.X509CertificateCollection // class from another System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Parameters: // value: // The System.Security.Cryptography.X509Certificates.X509CertificateCollection // with which to initialize the new object. public X509CertificateCollection(X509CertificateCollection value) { Contract.Requires(value != null); } // Summary: // Gets or sets the entry at the specified index of the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Parameters: // index: // The zero-based index of the entry to locate in the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Returns: // The System.Security.Cryptography.X509Certificates.X509Certificate at the // specified index of the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Exceptions: // System.ArgumentOutOfRangeException: // The index parameter is outside the valid range of indexes for the collection. public X509Certificate this[int index] { get { Contract.Requires(index >= 0); Contract.Requires(index < this.Count); return default(X509Certificate); } set { Contract.Requires(index >= 0); Contract.Requires(index < this.Count); } } // Summary: // Adds an System.Security.Cryptography.X509Certificates.X509Certificate with // the specified value to the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Parameters: // value: // The System.Security.Cryptography.X509Certificates.X509Certificate to add // to the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Returns: // The index into the current System.Security.Cryptography.X509Certificates.X509CertificateCollection // at which the new System.Security.Cryptography.X509Certificates.X509Certificate // was inserted. public int Add(X509Certificate value) { Contract.Ensures(Contract.Result<int>() >= 0); Contract.Ensures(Contract.Result<int>() < this.Count); return default(int); } // // Summary: // Copies the elements of an array of type System.Security.Cryptography.X509Certificates.X509Certificate // to the end of the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Parameters: // value: // The array of type System.Security.Cryptography.X509Certificates.X509Certificate // containing the objects to add to the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Exceptions: // System.ArgumentNullException: // The value parameter is null. public void AddRange(X509Certificate[] value) { Contract.Requires(value != null); } // // Summary: // Copies the elements of the specified System.Security.Cryptography.X509Certificates.X509CertificateCollection // to the end of the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Parameters: // value: // The System.Security.Cryptography.X509Certificates.X509CertificateCollection // containing the objects to add to the collection. // // Exceptions: // System.ArgumentNullException: // The value parameter is null. public void AddRange(X509CertificateCollection value) { Contract.Requires(value != null); } // // Summary: // Gets a value indicating whether the current System.Security.Cryptography.X509Certificates.X509CertificateCollection // contains the specified System.Security.Cryptography.X509Certificates.X509Certificate. // // Parameters: // value: // The System.Security.Cryptography.X509Certificates.X509Certificate to locate. // // Returns: // true if the System.Security.Cryptography.X509Certificates.X509Certificate // is contained in this collection; otherwise, false. [Pure] public bool Contains(X509Certificate value) { return default(bool); } // // Summary: // Copies the System.Security.Cryptography.X509Certificates.X509Certificate // values in the current System.Security.Cryptography.X509Certificates.X509CertificateCollection // to a one-dimensional System.Array instance at the specified index. // // Parameters: // array: // The one-dimensional System.Array that is the destination of the values copied // from System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // index: // The index into array to begin copying. // // Exceptions: // System.ArgumentException: // The array parameter is multidimensional. -or- The number of elements in // the System.Security.Cryptography.X509Certificates.X509CertificateCollection // is greater than the available space between arrayIndex and the end of array. // // System.ArgumentNullException: // The array parameter is null. // // System.ArgumentOutOfRangeException: // The arrayIndex parameter is less than the array parameter's lower bound. public void CopyTo(X509Certificate[] array, int index) { Contract.Requires(array != null); //Contract.Requires(array.Rank == 1); Contract.Requires(index >= 0); Contract.Requires(index < array.Length); } // // Summary: // Returns an enumerator that can iterate through the System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Returns: // An enumerator of the subelements of System.Security.Cryptography.X509Certificates.X509CertificateCollection // you can use to iterate through the collection. public new X509CertificateEnumerator GetEnumerator() { Contract.Ensures(Contract.Result<X509CertificateEnumerator>() != null); return default(X509CertificateEnumerator); } // // Summary: // Returns the index of the specified System.Security.Cryptography.X509Certificates.X509Certificate // in the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Parameters: // value: // The System.Security.Cryptography.X509Certificates.X509Certificate to locate. // // Returns: // The index of the System.Security.Cryptography.X509Certificates.X509Certificate // specified by the value parameter in the System.Security.Cryptography.X509Certificates.X509CertificateCollection, // if found; otherwise, -1. [Pure] public int IndexOf(X509Certificate value) { Contract.Ensures(Contract.Result<int>() >= -1); Contract.Ensures(Contract.Result<int>() < this.Count); return default(int); } // // Summary: // Inserts a System.Security.Cryptography.X509Certificates.X509Certificate into // the current System.Security.Cryptography.X509Certificates.X509CertificateCollection // at the specified index. // // Parameters: // index: // The zero-based index where value should be inserted. // // value: // The System.Security.Cryptography.X509Certificates.X509Certificate to insert. public void Insert(int index, X509Certificate value) { Contract.Requires(index >= 0); } // // Summary: // Removes a specific System.Security.Cryptography.X509Certificates.X509Certificate // from the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Parameters: // value: // The System.Security.Cryptography.X509Certificates.X509Certificate to remove // from the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. // // Exceptions: // System.ArgumentException: // The System.Security.Cryptography.X509Certificates.X509Certificate specified // by the value parameter is not found in the current System.Security.Cryptography.X509Certificates.X509CertificateCollection. extern public void Remove(X509Certificate value); // Summary: // Enumerates the System.Security.Cryptography.X509Certificates.X509Certificate // objects in an System.Security.Cryptography.X509Certificates.X509CertificateCollection. public class X509CertificateEnumerator // : IEnumerator { } } } #endif
//********************************************************* // // Copyright (c) Microsoft. All rights reserved. // This code is licensed under the MIT License (MIT). // THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF // ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY // IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR // PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. // //********************************************************* using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.IO; using System.Linq; using System.Runtime.InteropServices.WindowsRuntime; using System.Threading.Tasks; using Windows.Devices.Enumeration; using Windows.Devices.WiFiDirect; using Windows.Networking; using Windows.Networking.Sockets; using Windows.Security.Cryptography; using Windows.Storage.Streams; using Windows.UI.Core; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Navigation; // The Blank Page item template is documented at http://go.microsoft.com/fwlink/?LinkId=234238 namespace SDKTemplate { /// <summary> /// An empty page that can be used on its own or navigated to within a Frame. /// </summary> public sealed partial class Scenario2_Connector : Page { private MainPage rootPage = MainPage.Current; DeviceWatcher _deviceWatcher = null; bool _fWatcherStarted = false; WiFiDirectAdvertisementPublisher _publisher = new WiFiDirectAdvertisementPublisher(); public ObservableCollection<DiscoveredDevice> DiscoveredDevices { get; } = new ObservableCollection<DiscoveredDevice>(); public ObservableCollection<ConnectedDevice> ConnectedDevices { get; } = new ObservableCollection<ConnectedDevice>(); public Scenario2_Connector() { this.InitializeComponent(); } protected override void OnNavigatedFrom(NavigationEventArgs e) { if (_deviceWatcher != null) { StopWatcher(); } } private void StopWatcher() { _deviceWatcher.Added -= OnDeviceAdded; _deviceWatcher.Removed -= OnDeviceRemoved; _deviceWatcher.Updated -= OnDeviceUpdated; _deviceWatcher.EnumerationCompleted -= OnEnumerationCompleted; _deviceWatcher.Stopped -= OnStopped; _deviceWatcher.Stop(); _deviceWatcher = null; } private void btnWatcher_Click(object sender, RoutedEventArgs e) { if (_deviceWatcher == null) { _publisher.Start(); if (_publisher.Status != WiFiDirectAdvertisementPublisherStatus.Started) { rootPage.NotifyUser("Failed to start advertisement.", NotifyType.ErrorMessage); return; } DiscoveredDevices.Clear(); rootPage.NotifyUser("Finding Devices...", NotifyType.StatusMessage); String deviceSelector = WiFiDirectDevice.GetDeviceSelector( Utils.GetSelectedItemTag<WiFiDirectDeviceSelectorType>(cmbDeviceSelector)); _deviceWatcher = DeviceInformation.CreateWatcher(deviceSelector, new string[] { "System.Devices.WiFiDirect.InformationElements" }); _deviceWatcher.Added += OnDeviceAdded; _deviceWatcher.Removed += OnDeviceRemoved; _deviceWatcher.Updated += OnDeviceUpdated; _deviceWatcher.EnumerationCompleted += OnEnumerationCompleted; _deviceWatcher.Stopped += OnStopped; _deviceWatcher.Start(); btnWatcher.Content = "Stop Watcher"; _fWatcherStarted = true; } else { _publisher.Stop(); btnWatcher.Content = "Start Watcher"; StopWatcher(); rootPage.NotifyUser("Device watcher stopped.", NotifyType.StatusMessage); } } #region DeviceWatcherEvents private async void OnDeviceAdded(DeviceWatcher deviceWatcher, DeviceInformation deviceInfo) { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { DiscoveredDevices.Add(new DiscoveredDevice(deviceInfo)); }); } private async void OnDeviceRemoved(DeviceWatcher deviceWatcher, DeviceInformationUpdate deviceInfoUpdate) { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { foreach (DiscoveredDevice discoveredDevice in DiscoveredDevices) { if (discoveredDevice.DeviceInfo.Id == deviceInfoUpdate.Id) { DiscoveredDevices.Remove(discoveredDevice); break; } } }); } private async void OnDeviceUpdated(DeviceWatcher deviceWatcher, DeviceInformationUpdate deviceInfoUpdate) { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { foreach (DiscoveredDevice discoveredDevice in DiscoveredDevices) { if (discoveredDevice.DeviceInfo.Id == deviceInfoUpdate.Id) { discoveredDevice.UpdateDeviceInfo(deviceInfoUpdate); break; } } }); } private void OnEnumerationCompleted(DeviceWatcher deviceWatcher, object o) { rootPage.NotifyUserFromBackground("DeviceWatcher enumeration completed", NotifyType.StatusMessage); } private void OnStopped(DeviceWatcher deviceWatcher, object o) { rootPage.NotifyUserFromBackground("DeviceWatcher stopped", NotifyType.StatusMessage); } #endregion private void btnIe_Click(object sender, RoutedEventArgs e) { var discoveredDevice = (DiscoveredDevice)lvDiscoveredDevices.SelectedItem; IList<WiFiDirectInformationElement> informationElements = null; try { informationElements = WiFiDirectInformationElement.CreateFromDeviceInformation(discoveredDevice.DeviceInfo); } catch (Exception ex) { rootPage.NotifyUser("No Information element found: " + ex.Message, NotifyType.ErrorMessage); } if (informationElements != null) { StringWriter message = new StringWriter(); foreach (WiFiDirectInformationElement informationElement in informationElements) { string ouiName = CryptographicBuffer.EncodeToHexString(informationElement.Oui); string value = string.Empty; Byte[] bOui = informationElement.Oui.ToArray(); if (bOui.SequenceEqual(Globals.MsftOui)) { // The format of Microsoft information elements is documented here: // https://msdn.microsoft.com/en-us/library/dn392651.aspx // with errata here: // https://msdn.microsoft.com/en-us/library/mt242386.aspx ouiName += " (Microsoft)"; } else if (bOui.SequenceEqual(Globals.WfaOui)) { ouiName += " (WFA)"; } else if (bOui.SequenceEqual(Globals.CustomOui)) { ouiName += " (Custom)"; if (informationElement.OuiType == Globals.CustomOuiType) { DataReader dataReader = DataReader.FromBuffer(informationElement.Value); dataReader.UnicodeEncoding = UnicodeEncoding.Utf8; dataReader.ByteOrder = ByteOrder.LittleEndian; // Read the string. try { string data = dataReader.ReadString(dataReader.ReadUInt32()); value = $"Data: {data}"; } catch (Exception) { value = "(Unable to parse)"; } } } message.WriteLine($"OUI {ouiName}, Type {informationElement.OuiType} {value}"); } message.Write($"Information elements found: {informationElements.Count}"); rootPage.NotifyUser(message.ToString(), NotifyType.StatusMessage); } } private async void btnFromId_Click(object sender, RoutedEventArgs e) { var discoveredDevice = (DiscoveredDevice)lvDiscoveredDevices.SelectedItem; if (discoveredDevice == null) { rootPage.NotifyUser("No device selected, please select one.", NotifyType.ErrorMessage); return; } rootPage.NotifyUser($"Connecting to {discoveredDevice.DeviceInfo.Name}...", NotifyType.StatusMessage); if (!discoveredDevice.DeviceInfo.Pairing.IsPaired) { if (!await connectionSettingsPanel.RequestPairDeviceAsync(discoveredDevice.DeviceInfo.Pairing)) { return; } } WiFiDirectDevice wfdDevice = null; try { // IMPORTANT: FromIdAsync needs to be called from the UI thread wfdDevice = await WiFiDirectDevice.FromIdAsync(discoveredDevice.DeviceInfo.Id); } catch (TaskCanceledException) { rootPage.NotifyUser("FromIdAsync was canceled by user", NotifyType.ErrorMessage); return; } // Register for the ConnectionStatusChanged event handler wfdDevice.ConnectionStatusChanged += OnConnectionStatusChanged; IReadOnlyList<EndpointPair> endpointPairs = wfdDevice.GetConnectionEndpointPairs(); HostName remoteHostName = endpointPairs[0].RemoteHostName; rootPage.NotifyUser($"Devices connected on L2 layer, connecting to IP Address: {remoteHostName} Port: {Globals.strServerPort}", NotifyType.StatusMessage); // Wait for server to start listening on a socket await Task.Delay(2000); // Connect to Advertiser on L4 layer StreamSocket clientSocket = new StreamSocket(); try { await clientSocket.ConnectAsync(remoteHostName, Globals.strServerPort); rootPage.NotifyUser("Connected with remote side on L4 layer", NotifyType.StatusMessage); } catch (Exception ex) { rootPage.NotifyUser($"Connect operation threw an exception: {ex.Message}", NotifyType.ErrorMessage); return; } SocketReaderWriter socketRW = new SocketReaderWriter(clientSocket, rootPage); string sessionId = Path.GetRandomFileName(); ConnectedDevice connectedDevice = new ConnectedDevice(sessionId, wfdDevice, socketRW); ConnectedDevices.Add(connectedDevice); // The first message sent over the socket is the name of the connection. await socketRW.WriteMessageAsync(sessionId); while (await socketRW.ReadMessageAsync() != null) { // Keep reading messages } } private void OnConnectionStatusChanged(WiFiDirectDevice sender, object arg) { rootPage.NotifyUserFromBackground($"Connection status changed: {sender.ConnectionStatus}", NotifyType.StatusMessage); } private async void btnSendMessage_Click(object sender, RoutedEventArgs e) { var connectedDevice = (ConnectedDevice)lvConnectedDevices.SelectedItem; await connectedDevice.SocketRW.WriteMessageAsync(txtSendMessage.Text); } private void btnClose_Click(object sender, RoutedEventArgs e) { var connectedDevice = (ConnectedDevice)lvConnectedDevices.SelectedItem; ConnectedDevices.Remove(connectedDevice); // Close socket and WiFiDirect object connectedDevice.Dispose(); } private async void btnUnpair_Click(object sender, RoutedEventArgs e) { var discoveredDevice = (DiscoveredDevice)lvDiscoveredDevices.SelectedItem; DeviceUnpairingResult result = await discoveredDevice.DeviceInfo.Pairing.UnpairAsync(); rootPage.NotifyUser($"Unpair result: {result.Status}", NotifyType.StatusMessage); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Physics.Manager; namespace OpenSim.Region.Physics.POSPlugin { public class POSCharacter : PhysicsActor { public Vector3 _size = Vector3.Zero; public Vector3 _target_velocity = Vector3.Zero; public Vector3 _velocity; private Vector3 _acceleration; private Vector3 _position; private bool flying; private bool isColliding; private Vector3 m_rotationalVelocity = Vector3.Zero; public POSCharacter() { } public override Vector3 Acceleration { get { return _acceleration; } set { _acceleration = value; } } public override bool APIDActive { set { return; } } public override float APIDDamping { set { return; } } public override float APIDStrength { set { return; } } public override Quaternion APIDTarget { set { return; } } public override float Buoyancy { get { return 0f; } set { return; } } public override Vector3 CenterOfMass { get { return Vector3.Zero; } } public override bool CollidingGround { get { return false; } set { return; } } public override bool CollidingObj { get { return false; } set { return; } } public override float CollisionScore { get { return 0f; } set { } } public override bool FloatOnWater { set { return; } } public override bool Flying { get { return flying; } set { flying = value; } } public override Vector3 Force { get { return Vector3.Zero; } set { return; } } public override Vector3 GeometricCenter { get { return Vector3.Zero; } } public override bool Grabbed { set { return; } } public override bool IsColliding { get { return isColliding; } set { isColliding = value; } } public override bool IsPhysical { get { return false; } set { return; } } public override bool Kinematic { get { return true; } set { } } public override uint LocalID { set { return; } } public override float Mass { get { return 0f; } } public override Quaternion Orientation { get { return Quaternion.Identity; } set { } } public override int PhysicsActorType { get { return (int)ActorTypes.Agent; } set { return; } } public override bool PIDActive { set { return; } } public override bool PIDHoverActive { set { return; } } public override float PIDHoverHeight { set { return; } } public override float PIDHoverTau { set { return; } } public override PIDHoverType PIDHoverType { set { return; } } public override Vector3 PIDTarget { set { return; } } public override float PIDTau { set { return; } } public override Vector3 Position { get { return _position; } set { _position = value; } } public override Vector3 RotationalVelocity { get { return m_rotationalVelocity; } set { m_rotationalVelocity = value; } } public override bool Selected { set { return; } } public override bool SetAlwaysRun { get { return false; } set { return; } } public override PrimitiveBaseShape Shape { set { return; } } public override Vector3 Size { get { return _size; } set { _size = value; _size.Z = _size.Z / 2.0f; } } public override bool Stopped { get { return false; } } public override bool ThrottleUpdates { get { return false; } set { return; } } public override Vector3 Torque { get { return Vector3.Zero; } set { return; } } public override int VehicleType { get { return 0; } set { return; } } public override Vector3 Velocity { get { return _velocity; } set { _target_velocity = value; } } public override void AddAngularForce(Vector3 force, bool pushforce) { } public override void AddForce(Vector3 force, bool pushforce) { } public override void CrossingFailure() { } public override void delink() { } public override void link(PhysicsActor obj) { } public override void LockAngularMotion(Vector3 axis) { } public override void SetMomentum(Vector3 momentum) { } public override void SetVolumeDetect(int param) { } public override bool SubscribedEvents() { return false; } public override void SubscribeEvents(int ms) { } public override void UnSubscribeEvents() { } public override void VehicleFlags(int param, bool remove) { } public override void VehicleFloatParam(int param, float value) { } public override void VehicleRotationParam(int param, Quaternion rotation) { } public override void VehicleVectorParam(int param, Vector3 value) { } } }
using System; using System.Security.Cryptography; using System.Collections.Generic; using System.Text; /* * Do not refactor this class too much. * Should function as a reference implementation. * */ namespace SevenDigital.Api.Wrapper.EndpointResolution.OAuth { public class OAuthBase { /// <summary> /// Provides a predefined set of algorithms that are supported officially by the protocol /// </summary> public enum SignatureTypes { HMACSHA1, PLAINTEXT, RSASHA1 } /// <summary> /// Provides an internal structure to sort the query parameter /// </summary> protected class QueryParameter { private string name = null; private string value = null; public QueryParameter(string name, string value) { this.name = name; this.value = value; } public string Name { get { return name; } } public string Value { get { return value; } } } /// <summary> /// Comparer class used to perform the sorting of the query parameters /// </summary> protected class QueryParameterComparer : IComparer<QueryParameter> { public int Compare(QueryParameter x, QueryParameter y) { if (x.Name == y.Name) { return string.Compare(x.Value, y.Value); } else { return string.Compare(x.Name, y.Name); } } } public const string OAuthVersion = "1.0"; protected const string OAuthParameterPrefix = "oauth_"; public bool includeVersion = true; // // List of know and used oauth parameters' names // public const string OAuthConsumerKeyKey = "oauth_consumer_key"; public const string OAuthCallbackKey = "oauth_callback"; public const string OAuthVersionKey = "oauth_version"; public const string OAuthSignatureMethodKey = "oauth_signature_method"; public const string OAuthSignatureKey = "oauth_signature"; public const string OAuthTimestampKey = "oauth_timestamp"; public const string OAuthNonceKey = "oauth_nonce"; public const string OAuthTokenKey = "oauth_token"; public const string OAuthTokenSecretKey = "oauth_token_secret"; public const string HMACSHA1SignatureType = "HMAC-SHA1"; public const string PlainTextSignatureType = "PLAINTEXT"; public const string RSASHA1SignatureType = "RSA-SHA1"; protected Random random = new Random(); protected static string unreservedChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~"; /// <summary> /// Helper function to compute a hash value /// </summary> /// <param name="hashAlgorithm">The hashing algoirhtm used. If that algorithm needs some initialization, like HMAC and its derivatives, they should be initialized prior to passing it to this function</param> /// <param name="data">The data to hash</param> /// <returns>a Base64 string of the hash value</returns> private string ComputeHash(HashAlgorithm hashAlgorithm, string data) { if (hashAlgorithm == null) { throw new ArgumentNullException("hashAlgorithm"); } if (string.IsNullOrEmpty(data)) { throw new ArgumentNullException("data"); } byte[] dataBuffer = System.Text.Encoding.UTF8.GetBytes(data); byte[] hashBytes = hashAlgorithm.ComputeHash(dataBuffer); return Convert.ToBase64String(hashBytes); } /// <summary> /// Internal function to cut out all non oauth query string parameters (all parameters not begining with "oauth_") /// </summary> /// <param name="parameters">The query string part of the Url</param> /// <returns>A list of QueryParameter each containing the parameter name and value</returns> private List<QueryParameter> GetQueryParameters(string parameters) { if (parameters.StartsWith("?")) { parameters = parameters.Remove(0, 1); } List<QueryParameter> result = new List<QueryParameter>(); if (!string.IsNullOrEmpty(parameters)) { string[] p = parameters.Split('&'); foreach (string s in p) { if (!string.IsNullOrEmpty(s) && !s.StartsWith(OAuthParameterPrefix)) { if (s.IndexOf('=') > -1) { string[] temp = s.Split('='); result.Add(new QueryParameter(temp[0], temp[1])); } else { result.Add(new QueryParameter(s, string.Empty)); } } } } return result; } /// <summary> /// This is a different Url Encode implementation since the default .NET one outputs the percent encoding in lower case. /// While this is not a problem with the percent encoding spec, it is used in upper case throughout OAuth /// </summary> /// <param name="value">The value to Url encode</param> /// <returns>Returns a Url encoded string</returns> public static string UrlEncode(string value) { StringBuilder result = new StringBuilder(); foreach (char symbol in value) { if (unreservedChars.IndexOf(symbol) != -1) { result.Append(symbol); } else { result.Append('%' + String.Format("{0:X2}", (int)symbol)); } } return result.ToString(); } /// <summary> /// Normalizes the request parameters according to the spec /// </summary> /// <param name="parameters">The list of parameters already sorted</param> /// <returns>a string representing the normalized parameters</returns> protected string NormalizeRequestParameters(IList<QueryParameter> parameters) { StringBuilder sb = new StringBuilder(); QueryParameter p = null; for (int i = 0; i < parameters.Count; i++) { p = parameters[i]; if (p.Name.StartsWith("oauth")) { sb.AppendFormat("{0}={1}", p.Name, UrlEncode(p.Value)); } else { sb.AppendFormat("{0}={1}", p.Name, p.Value); } if (i < parameters.Count - 1) { sb.Append("&"); } } return sb.ToString(); } /// <summary> /// Generate the signature base that is used to produce the signature /// </summary> /// <param name="url">The full url that needs to be signed including its non OAuth url parameters</param> /// <param name="consumerKey">The consumer key</param> /// <param name="token">The token, if available. If not available pass null or an empty string</param> /// <param name="tokenSecret">The token secret, if available. If not available pass null or an empty string</param> /// <param name="httpMethod">The http method used. Must be a valid HTTP method verb (POST,GET,PUT, etc)</param> /// <param name="signatureType">The signature type. To use the default values use <see cref="OAuthBase.SignatureTypes">OAuthBase.SignatureTypes</see>.</param> /// <returns>The signature base</returns> public string GenerateSignatureBase(Uri url, string consumerKey, string token, string tokenSecret, string httpMethod, IDictionary<string, string> postParams, string timeStamp, string nonce, string signatureType, out string normalizedUrl, out string normalizedRequestParameters, string oAuthVersion) { if (token == null) { token = string.Empty; } if (tokenSecret == null) { tokenSecret = string.Empty; } if (string.IsNullOrEmpty(consumerKey)) { throw new ArgumentNullException("consumerKey"); } if (string.IsNullOrEmpty(httpMethod)) { throw new ArgumentNullException("httpMethod"); } if (string.IsNullOrEmpty(signatureType)) { throw new ArgumentNullException("signatureType"); } normalizedUrl = null; normalizedRequestParameters = null; List<QueryParameter> parameters = GetQueryParameters(url.Query); if (!String.IsNullOrEmpty(oAuthVersion)) { parameters.Add(new QueryParameter(OAuthVersionKey, oAuthVersion)); } if (postParams != null & httpMethod.ToUpper() == "POST") { foreach (var key in postParams.Keys) { parameters.Add(new QueryParameter(key, postParams[key])); } } parameters.Add(new QueryParameter(OAuthNonceKey, nonce)); parameters.Add(new QueryParameter(OAuthTimestampKey, timeStamp)); parameters.Add(new QueryParameter(OAuthSignatureMethodKey, signatureType)); parameters.Add(new QueryParameter(OAuthConsumerKeyKey, consumerKey)); if (!string.IsNullOrEmpty(token)) { parameters.Add(new QueryParameter(OAuthTokenKey, token)); } parameters.Sort(new QueryParameterComparer()); normalizedUrl = string.Format("{0}://{1}", url.Scheme, url.Host); if (!((url.Scheme == "http" && url.Port == 80) || (url.Scheme == "https" && url.Port == 443))) { normalizedUrl += ":" + url.Port; } normalizedUrl += url.AbsolutePath; normalizedRequestParameters = NormalizeRequestParameters(parameters); StringBuilder signatureBase = new StringBuilder(); signatureBase.AppendFormat("{0}&", httpMethod.ToUpper()); signatureBase.AppendFormat("{0}&", UrlEncode(normalizedUrl)); signatureBase.AppendFormat("{0}", UrlEncode(normalizedRequestParameters)); return signatureBase.ToString(); } /// <summary> /// Generate the signature value based on the given signature base and hash algorithm /// </summary> /// <param name="signatureBase">The signature based as produced by the GenerateSignatureBase method or by any other means</param> /// <param name="hash">The hash algorithm used to perform the hashing. If the hashing algorithm requires initialization or a key it should be set prior to calling this method</param> /// <returns>A base64 string of the hash value</returns> public string GenerateSignatureUsingHash(string signatureBase, HashAlgorithm hash) { return ComputeHash(hash, signatureBase); } /// <summary> /// Generates a signature using the HMAC-SHA1 algorithm /// </summary> /// <param name="url">The full url that needs to be signed including its non OAuth url parameters</param> /// <param name="consumerKey">The consumer key</param> /// <param name="consumerSecret">The consumer seceret</param> /// <param name="token">The token, if available. If not available pass null or an empty string</param> /// <param name="tokenSecret">The token secret, if available. If not available pass null or an empty string</param> /// <param name="httpMethod">The http method used. Must be a valid HTTP method verb (POST,GET,PUT, etc)</param> /// <returns>A base64 string of the hash value</returns> public string GenerateSignature(Uri url, string consumerKey, string consumerSecret, string token, string tokenSecret, string httpMethod, string timeStamp, string nonce, out string normalizedUrl, out string normalizedRequestParameters, IDictionary<string, string> postParameters) { return GenerateSignature(url, consumerKey, consumerSecret, token, tokenSecret, httpMethod, timeStamp, nonce, SignatureTypes.HMACSHA1, out normalizedUrl, out normalizedRequestParameters, postParameters, OAuthVersion); } /// <summary> /// Generates a signature using the specified signatureType /// </summary> /// <param name="url">The full url that needs to be signed including its non OAuth url parameters</param> /// <param name="consumerKey">The consumer key</param> /// <param name="consumerSecret">The consumer seceret</param> /// <param name="token">The token, if available. If not available pass null or an empty string</param> /// <param name="tokenSecret">The token secret, if available. If not available pass null or an empty string</param> /// <param name="httpMethod">The http method used. Must be a valid HTTP method verb (POST,GET,PUT, etc)</param> /// <param name="signatureType">The type of signature to use</param> /// <returns>A base64 string of the hash value</returns> public string GenerateSignature(Uri url, string consumerKey, string consumerSecret, string token, string tokenSecret, string httpMethod, string timeStamp, string nonce, SignatureTypes signatureType, out string normalizedUrl, out string normalizedRequestParameters, IDictionary<string, string> postParameters, string oAuthVersion) { normalizedUrl = null; normalizedRequestParameters = null; switch (signatureType) { case SignatureTypes.PLAINTEXT: return Uri.EscapeDataString(string.Format("{0}&{1}", consumerSecret, tokenSecret)); case SignatureTypes.HMACSHA1: string signatureBase = GenerateSignatureBase(url, consumerKey, token, tokenSecret, httpMethod, postParameters, timeStamp, nonce, HMACSHA1SignatureType, out normalizedUrl, out normalizedRequestParameters, oAuthVersion); HMACSHA1 hmacsha1 = new HMACSHA1(); hmacsha1.Key = Encoding.UTF8.GetBytes(string.Format("{0}&{1}", UrlEncode(consumerSecret), string.IsNullOrEmpty(tokenSecret) ? "" : UrlEncode(tokenSecret))); return GenerateSignatureUsingHash(signatureBase, hmacsha1); case SignatureTypes.RSASHA1: throw new NotImplementedException(); default: throw new ArgumentException("Unknown signature type", "signatureType"); } } /// <summary> /// Generate the timestamp for the signature /// </summary> /// <returns></returns> public virtual string GenerateTimeStamp() { // Default implementation of UNIX time of the current UTC time TimeSpan ts = DateTime.UtcNow - new DateTime(1970, 1, 1, 0, 0, 0, 0); return Convert.ToInt64(ts.TotalSeconds).ToString(); } /// <summary> /// Generate a nonce /// </summary> /// <returns></returns> public virtual string GenerateNonce() { return NonceGenerator.ThreadSafeNonce(); } } }
// -------------------------------------------------------------------------------------------------------------------- // <copyright file="ShoppingCartSpot.ascx.cs" company="Sitecore Corporation"> // Copyright (c) Sitecore Corporation 1999-2015 // </copyright> // <summary> // The ShoppingCartSpot user control. // </summary> // -------------------------------------------------------------------------------------------------------------------- // Copyright 2015 Sitecore Corporation A/S // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the // License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions // and limitations under the License. // ------------------------------------------------------------------------------------------- namespace Sitecore.Ecommerce.layouts.Ecommerce.UserControls { using System; using System.Web.UI; using Analytics.Components; using Diagnostics; using DomainModel.Carts; using DomainModel.Configurations; using DomainModel.Products; using Sitecore.Globalization; using Utils; /// <summary> /// The ShoppingCartSpot user control. /// </summary> public partial class ShoppingCartSpot : UserControl { /// <summary> /// Gets the settings. /// </summary> /// <value>The settings.</value> protected ShoppingCartSpotSettings Settings { get { return Sitecore.Ecommerce.Context.Entity.GetConfiguration<ShoppingCartSpotSettings>(); } } /// <summary> /// Gets the general settings. /// </summary> /// <value>The general settings.</value> protected GeneralSettings GeneralSettings { get { return Sitecore.Ecommerce.Context.Entity.GetConfiguration<GeneralSettings>(); } } /// <summary> /// Gets a value indicating whether [show navigation links]. /// </summary> /// <value><c>true</c> if [show navigation links]; otherwise, <c>false</c>.</value> protected bool IsItemsInShoppingCart { get { return this.ShoppingCart.ShoppingCartLines.Count > 0; } } /// <summary> /// Gets the lit amount in ShoppingCart status line. /// </summary> /// <returns>The amount in ShoppingCart status text.</returns> protected string AmountInShoppingCartStatusText { get { int itemsInShoppingCart = this.ShoppingCart.ShoppingCartLines.Count; return itemsInShoppingCart == 0 ? Translate.Text(Sitecore.Ecommerce.Examples.Texts.TheShoppingCartIsEmpty) : string.Format("{0} {1}", itemsInShoppingCart, Translate.Text(Sitecore.Ecommerce.Examples.Texts.ItemInShoppingCart)); } } /// <summary> /// Gets a value indicating whether [show total sum in shopping cart]. /// </summary> /// <value> /// <c>true</c> if [show total sum in shopping cart]; otherwise, <c>false</c>. /// </value> protected bool ShowTotalSumInShoppingCart { get { bool visible = this.Settings.ShowTotal; if (!visible || this.ShoppingCart.ShoppingCartLines == null || this.ShoppingCart.ShoppingCartLines.Count <= 0) { visible = false; } return visible; } } /// <summary> /// Gets the lit total sum. /// </summary> /// <returns> /// The total sum. /// </returns> protected string TotalSum { get { if (this.ShoppingCart.ShoppingCartLines.Count > 0) { decimal price = this.Settings.ShowTotalIncVat ? this.ShoppingCart.Totals.PriceIncVat : this.ShoppingCart.Totals.PriceExVat; return MainUtil.FormatPrice(price, GeneralSettings.DisplayCurrencyOnPrices, this.Settings.PriceFormatString); } return null; } } /// <summary> /// Gets the total price inc vat. /// </summary> /// <value>The total price inc vat.</value> protected string TotalPriceIncVat { get { try { return MainUtil.FormatPrice(this.ShoppingCart.Totals.TotalPriceIncVat); } catch (Exception exception) { Log.Error(exception.Message, exception); return "-"; } } } /// <summary> /// Gets a value indicating whether [show amount in shopping cart status line]. /// </summary> /// <value> /// <c>true</c> if [show amount in shopping cart status line]; otherwise, <c>false</c>. /// </value> protected bool ShowAmountInShoppingCartStatusLine { get { return this.ShoppingCart.ShoppingCartLines.Count == 0 || this.Settings.ShowAmountInShoppingCartStatusLine; } } /// <summary> /// Gets a value indicating whether [show shopping cart items]. /// </summary> /// <value> /// <c>true</c> if [show shopping cart items]; otherwise, <c>false</c>. /// </value> protected bool ShowShoppingCartItems { get { return this.ShoppingCart.ShoppingCartLines.Count > 0 && this.Settings.ShowShoppingCartItemLines; } } /// <summary> /// Gets the shopping cart instance. /// </summary> protected ShoppingCart ShoppingCart { get { return Sitecore.Ecommerce.Context.Entity.GetInstance<ShoppingCart>(); } } /// <summary> /// The page load event. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The event arguments.</param> protected void Page_Load(object sender, EventArgs e) { this.repShoppingCartList.DataSource = this.ShoppingCart.ShoppingCartLines; DataBind(); bool alwaysShowShoppingCart = this.Settings.AlwaysShowShoppingCart; if (this.ShoppingCart.ShoppingCartLines.Count == 0) { Visible = alwaysShowShoppingCart; } } /// <summary> /// Gets the price text. /// </summary> /// <param name="dataItem">The data item.</param> /// <returns>The ShoppingCart line item price.</returns> protected string ShoppingCartLineItemPrice(object dataItem) { ProductLine productLine = dataItem as ProductLine; if (productLine == null) { Log.Warn("Product line is null.", this); return "-"; } return this.Settings.ShowPriceIncVAT ? MainUtil.FormatPrice(productLine.Totals.PriceIncVat, false, this.Settings.PriceFormatString) : MainUtil.FormatPrice(productLine.Totals.PriceExVat, false, this.Settings.PriceFormatString); } /// <summary> /// Gets the lit total price text. /// </summary> /// <param name="dataItem">The data item.</param> /// <returns>The ShoppingCart line total price.</returns> protected string ShoppingCartLineTotalPrice(object dataItem) { ProductLine productLine = dataItem as ProductLine; if (productLine == null) { Log.Warn("Product line is null.", this); return "-"; } return this.Settings.ShowPriceIncVAT ? MainUtil.FormatPrice(productLine.Totals.TotalPriceIncVat, false, this.Settings.PriceFormatString) : MainUtil.FormatPrice(productLine.Totals.TotalPriceExVat, false, this.Settings.PriceFormatString); } /// <summary> /// Gets the Friendly Url /// </summary> /// <param name="dataItem">The data item.</param> /// <returns>Shopping cart item friendly url.</returns> protected string ShoppingCartLineFriendlyUrl(object dataItem) { ShoppingCartLine shoppingCartLine = dataItem as ShoppingCartLine; if (shoppingCartLine == null) { Log.Warn("Product line is null.", this); return "-"; } return AnalyticsUtil.AddFollowListToQueryString(shoppingCartLine.FriendlyUrl, "ShoppingCartSpot"); } } }
// Copyright (c) 2006-2009 Frank Laub // All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // 3. The name of the author may not be used to endorse or promote products // derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT // NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF // THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. using System; using System.Collections.Generic; using System.Text; using System.Runtime.InteropServices; using OpenSSL.Core; namespace OpenSSL.X509 { /// <summary> /// Wraps the X509_STORE object /// </summary> public class X509Store : BaseReferenceType { #region X509_STORE [StructLayout(LayoutKind.Sequential)] struct X509_STORE { /* The following is a cache of trusted certs */ public int cache; /* if true, stash any hits */ public IntPtr objs; //STACK_OF(X509_OBJECT) *objs; /* Cache of all objects */ /* These are external lookup methods */ public IntPtr get_cert_methods; //STACK_OF(X509_LOOKUP) *get_cert_methods; public IntPtr param; // X509_VERIFY_PARAM* param; /* Callbacks for various operations */ public IntPtr verify; //int (*verify)(X509_STORE_CTX *ctx); /* called to verify a certificate */ public IntPtr verify_cb; //int (*verify_cb)(int ok,X509_STORE_CTX *ctx); /* error callback */ public IntPtr get_issuer; //int (*get_issuer)(X509 **issuer, X509_STORE_CTX *ctx, X509 *x); /* get issuers cert from ctx */ public IntPtr check_issued; //int (*check_issued)(X509_STORE_CTX *ctx, X509 *x, X509 *issuer); /* check issued */ public IntPtr check_revocation; //int (*check_revocation)(X509_STORE_CTX *ctx); /* Check revocation status of chain */ public IntPtr get_crl; //int (*get_crl)(X509_STORE_CTX *ctx, X509_CRL **crl, X509 *x); /* retrieve CRL */ public IntPtr check_crl; //int (*check_crl)(X509_STORE_CTX *ctx, X509_CRL *crl); /* Check CRL validity */ public IntPtr cert_crl; //int (*cert_crl)(X509_STORE_CTX *ctx, X509_CRL *crl, X509 *x); /* Check certificate against CRL */ public IntPtr cleanup; //int (*cleanup)(X509_STORE_CTX *ctx); #region CRYPTO_EX_DATA ex_data; public IntPtr ex_data_sk; public int ex_data_dummy; #endregion public int references; } #endregion #region Initialization /// <summary> /// Calls X509_STORE_new() /// </summary> public X509Store() : base(Native.ExpectNonNull(Native.X509_STORE_new()), true) { } /// <summary> /// Initializes the X509Store object with a pre-existing native X509_STORE pointer /// </summary> /// <param name="ptr"></param> /// <param name="takeOwnership"></param> internal X509Store(IntPtr ptr, bool takeOwnership) : base(ptr, takeOwnership) { } /// <summary> /// Calls X509_STORE_new() and then adds the specified chain as trusted. /// </summary> /// <param name="chain"></param> public X509Store(X509Chain chain) : this(chain, true) { } /// <summary> /// Calls X509_STORE_new() and then adds the specified chaing as trusted. /// </summary> /// <param name="chain"></param> /// <param name="takeOwnership"></param> public X509Store(X509Chain chain, bool takeOwnership) : base(Native.ExpectNonNull(Native.X509_STORE_new()), takeOwnership) { foreach (X509Certificate cert in chain) { this.AddTrusted(cert); } } #endregion #region Properties /// <summary> /// Wraps the <code>objs</code> member on the raw X509_STORE structure /// </summary> public Core.Stack<X509Object> Objects { get { X509_STORE raw = (X509_STORE)Marshal.PtrToStructure(this.ptr, typeof(X509_STORE)); Core.Stack<X509Object> stack = new Core.Stack<X509Object>(raw.objs, false); return stack; } } /// <summary> /// Accessor to the untrusted list /// </summary> public X509Chain Untrusted { get { return this.untrusted; } set { this.untrusted = value; } } #endregion #region Methods /// <summary> /// Returns the trusted state of the specified certificate /// </summary> /// <param name="cert"></param> /// <param name="error"></param> /// <returns></returns> public bool Verify(X509Certificate cert, out string error) { using (X509StoreContext ctx = new X509StoreContext()) { ctx.Init(this, cert, this.untrusted); if (ctx.Verify()) { error = ""; return true; } error = ctx.ErrorString; } return false; } /// <summary> /// Adds a chain to the trusted list. /// </summary> /// <param name="chain"></param> public void AddTrusted(X509Chain chain) { foreach (X509Certificate cert in chain) { AddTrusted(cert); } } /// <summary> /// Adds a certificate to the trusted list, calls X509_STORE_add_cert() /// </summary> /// <param name="cert"></param> public void AddTrusted(X509Certificate cert) { // Don't Addref here -- X509_STORE_add_cert increases the refcount of the certificate pointer Native.ExpectSuccess(Native.X509_STORE_add_cert(this.ptr, cert.Handle)); } /// <summary> /// Add an untrusted certificate /// </summary> /// <param name="cert"></param> public void AddUntrusted(X509Certificate cert) { this.untrusted.Add(cert); } #endregion #region Overrides /// <summary> /// Calls X509_STORE_free() /// </summary> protected override void OnDispose() { Native.X509_STORE_free(this.ptr); if (this.untrusted != null) { this.untrusted.Dispose(); this.untrusted = null; } } internal override CryptoLockTypes LockType { get { return CryptoLockTypes.CRYPTO_LOCK_X509_STORE; } } internal override Type RawReferenceType { get { return typeof(X509_STORE); } } #endregion #region Fields private X509Chain untrusted = new X509Chain(); #endregion } }
// // ApplicationEvents.cs // // Author: // Michael Hutchinson <mhutchinson@novell.com> // // Copyright (c) 2010 Novell, Inc. (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections.Generic; #pragma warning disable 0169 namespace OsxIntegration.Framework { public static class ApplicationEvents { static object lockObj = new object (); #region Quit static EventHandler<ApplicationEventArgs> quit; static IntPtr quitHandlerRef = IntPtr.Zero; public static event EventHandler<ApplicationEventArgs> Quit { add { lock (lockObj) { quit += value; if (quitHandlerRef == IntPtr.Zero) quitHandlerRef = Carbon.InstallApplicationEventHandler (HandleQuit, CarbonEventApple.QuitApplication); } } remove { lock (lockObj) { quit -= value; if (quit == null && quitHandlerRef != IntPtr.Zero) { Carbon.RemoveEventHandler (quitHandlerRef); quitHandlerRef = IntPtr.Zero; } } } } static CarbonEventHandlerStatus HandleQuit (IntPtr callRef, IntPtr eventRef, IntPtr user_data) { var args = new ApplicationEventArgs (); quit (null, args); return args.HandledStatus; } #endregion #region Reopen static EventHandler<ApplicationEventArgs> reopen; static IntPtr reopenHandlerRef = IntPtr.Zero; public static event EventHandler<ApplicationEventArgs> Reopen { add { lock (lockObj) { reopen += value; if (reopenHandlerRef == IntPtr.Zero) reopenHandlerRef = Carbon.InstallApplicationEventHandler (HandleReopen, CarbonEventApple.ReopenApplication); } } remove { lock (lockObj) { reopen -= value; if (reopen == null && reopenHandlerRef != IntPtr.Zero) { Carbon.RemoveEventHandler (reopenHandlerRef); reopenHandlerRef = IntPtr.Zero; } } } } static CarbonEventHandlerStatus HandleReopen (IntPtr callRef, IntPtr eventRef, IntPtr user_data) { var args = new ApplicationEventArgs (); reopen (null, args); return args.HandledStatus; } #endregion #region OpenDocuments static EventHandler<ApplicationDocumentEventArgs> openDocuments; static IntPtr openDocumentsHandlerRef = IntPtr.Zero; public static event EventHandler<ApplicationDocumentEventArgs> OpenDocuments { add { lock (lockObj) { openDocuments += value; if (openDocumentsHandlerRef == IntPtr.Zero) openDocumentsHandlerRef = Carbon.InstallApplicationEventHandler (HandleOpenDocuments, CarbonEventApple.OpenDocuments); } } remove { lock (lockObj) { openDocuments -= value; if (openDocuments == null && openDocumentsHandlerRef != IntPtr.Zero) { Carbon.RemoveEventHandler (openDocumentsHandlerRef); openDocumentsHandlerRef = IntPtr.Zero; } } } } static CarbonEventHandlerStatus HandleOpenDocuments (IntPtr callRef, IntPtr eventRef, IntPtr user_data) { try { var docs = Carbon.GetFileListFromEventRef (eventRef); var args = new ApplicationDocumentEventArgs (docs); openDocuments (null, args); return args.HandledStatus; } catch (Exception ex) { System.Console.WriteLine (ex); return CarbonEventHandlerStatus.NotHandled; } } #endregion #region OpenUrls static EventHandler<ApplicationUrlEventArgs> openUrls; static IntPtr openUrlsHandlerRef = IntPtr.Zero; public static event EventHandler<ApplicationUrlEventArgs> OpenUrls { add { lock (lockObj) { openUrls += value; if (openUrlsHandlerRef == IntPtr.Zero) openUrlsHandlerRef = Carbon.InstallApplicationEventHandler (HandleOpenUrls, new CarbonEventTypeSpec[] { //For some reason GetUrl doesn't take CarbonEventClass.AppleEvent //need to use GURL, GURL new CarbonEventTypeSpec (CarbonEventClass.Internet, (int)CarbonEventApple.GetUrl) } ); } } remove { lock (lockObj) { openUrls -= value; if (openUrls == null && openUrlsHandlerRef != IntPtr.Zero) { Carbon.RemoveEventHandler (openUrlsHandlerRef); openUrlsHandlerRef = IntPtr.Zero; } } } } static CarbonEventHandlerStatus HandleOpenUrls (IntPtr callRef, IntPtr eventRef, IntPtr user_data) { try { var urls = Carbon.GetUrlListFromEventRef (eventRef); var args = new ApplicationUrlEventArgs (urls); openUrls (null, args); return args.HandledStatus; } catch (Exception ex) { System.Console.WriteLine (ex); return CarbonEventHandlerStatus.NotHandled; } } #endregion } public class ApplicationEventArgs : EventArgs { public bool Handled { get; set; } internal CarbonEventHandlerStatus HandledStatus { get { return Handled? CarbonEventHandlerStatus.Handled : CarbonEventHandlerStatus.NotHandled; } } } public class ApplicationDocumentEventArgs : ApplicationEventArgs { public ApplicationDocumentEventArgs (IList<string> documents) { this.Documents = documents; } public IList<string> Documents { get; private set; } } public class ApplicationUrlEventArgs : ApplicationEventArgs { public ApplicationUrlEventArgs (IList<string> urls) { this.Urls = urls; } public IList<string> Urls { get; private set; } } }
using CrystalDecisions.CrystalReports.Engine; using CrystalDecisions.Windows.Forms; using DpSdkEngLib; using DPSDKOPSLib; using Microsoft.VisualBasic; using System; using System.Collections; using System.Collections.Generic; using System.Drawing; using System.Diagnostics; using System.Windows.Forms; using System.Linq; using System.Xml.Linq; // ERROR: Not supported in C#: OptionDeclaration namespace _4PosBackOffice.NET { internal partial class frmDepositTake : System.Windows.Forms.Form { private ADODB.Recordset withEventsField_adoPrimaryRS; public ADODB.Recordset adoPrimaryRS { get { return withEventsField_adoPrimaryRS; } set { if (withEventsField_adoPrimaryRS != null) { withEventsField_adoPrimaryRS.MoveComplete -= adoPrimaryRS_MoveComplete; withEventsField_adoPrimaryRS.WillChangeRecord -= adoPrimaryRS_WillChangeRecord; } withEventsField_adoPrimaryRS = value; if (withEventsField_adoPrimaryRS != null) { withEventsField_adoPrimaryRS.MoveComplete += adoPrimaryRS_MoveComplete; withEventsField_adoPrimaryRS.WillChangeRecord += adoPrimaryRS_WillChangeRecord; } } } bool mbChangedByCode; int mvBookMark; bool mbEditFlag; bool mbAddNewFlag; bool mbDataChanged; string gFilter; string gFilterSQL; private StdFormat.StdDataFormat fmtBooleanData; private void loadLanguage() { //frmDepositTake = No Code [Deposit Stock Take Adjustments] //'rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000 //If rsLang.RecordCount Then frmDepositTake.Caption = rsLang("LanguageLayoutLnk_Description"): frmDepositTake.RightToLeft = rsLang("LanguageLayoutLnk_RightTL") //lblHeading = No Code [Deposit Stock Take List] //rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000 //If rsLang.RecordCount Then lblHeading.Caption = rsLang("LanguageLayoutLnk_Description"): lblHeading.RightToLeft = rsLang("LanguageLayoutLnk_RightTL") modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1085; //Print|Checked if (modRecordSet.rsLang.RecordCount){cmdPrint.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdPrint.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;} modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1004; //Exit|Checked if (modRecordSet.rsLang.RecordCount){cmdClose.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdClose.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;} modRecordSet.rsHelp.filter = "Help_Section=0 AND Help_Form='" + this.Name + "'"; //UPGRADE_ISSUE: Form property frmDepositTake.ToolTip1 was not upgraded. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="CC4C7EC0-C903-48FC-ACCC-81861D12DA4A"' if (modRecordSet.rsHelp.RecordCount) this.ToolTip1 = modRecordSet.rsHelp.Fields("Help_ContextID").Value; } public void loadItem() { My.MyProject.Forms.frmStockTakeSnapshot.remoteSnapShot(); System.Windows.Forms.Application.DoEvents(); getNamespace(); mbDataChanged = false; loadLanguage(); ShowDialog(); } private void cmdFilter_Click() { My.MyProject.Forms.frmFilter.loadFilter(ref gFilter); getNamespace(); } private void getNamespace() { adoPrimaryRS = modRecordSet.getRS(ref "SELECT Deposit.Deposit_Name, StockTakeDeposit_DepositTypeID, StockTakeDeposit.StockTakeDeposit_Quantity, StockTakeDeposit_DepositTypeID, StockTakeDeposit.StockTakeDeposit_WarehouseID, StockTakeDeposit.StockTakeDeposit_DepositID FROM Deposit INNER JOIN StockTakeDeposit ON Deposit.DepositID = StockTakeDeposit.StockTakeDeposit_DepositID Where (((StockTakeDeposit.StockTakeDeposit_WarehouseID) = 2)) ORDER BY Deposit.Deposit_Name, StockTakeDeposit.StockTakeDeposit_DepositTypeID;"); //Display the list of Titles in the DataCombo grdDataGrid.DataSource = adoPrimaryRS; grdDataGrid.Columns[0].HeaderText = "Stock Name"; grdDataGrid.Columns[0].DefaultCellStyle.Alignment = MSDataGridLib.AlignmentConstants.dbgLeft; grdDataGrid.Columns[0].Frozen = true; grdDataGrid.Columns[1].HeaderText = "Type"; grdDataGrid.Columns[1].Frozen = true; grdDataGrid.Columns[1].Width = sizeConvertors.twipsToPixels(900, true); grdDataGrid.Columns[1].DefaultCellStyle.Format = fmtBooleanData; grdDataGrid.Columns[2].HeaderText = "Quantity"; grdDataGrid.Columns[2].DefaultCellStyle.Alignment = MSDataGridLib.AlignmentConstants.dbgRight; grdDataGrid.Columns[2].Width = sizeConvertors.twipsToPixels(900, true); //UPGRADE_WARNING: Couldn't resolve default property of object grdDataGrid.Columns().DataFormat.Type. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"' //grdDataGrid.Columns(2).DefaultCellStyle.FormatProvid = 1 grdDataGrid.Columns[2].DefaultCellStyle.Format = "#,##0"; grdDataGrid.Columns[2].Frozen = false; grdDataGrid.Columns[5].Visible = false; grdDataGrid.Columns[3].Visible = false; grdDataGrid.Columns[4].Visible = false; frmDepositTake_Resize(this, new System.EventArgs()); mbDataChanged = false; } private void cmdPrint_Click(System.Object eventSender, System.EventArgs eventArgs) { modApplication.report_DepositTake(); } private void frmDepositTake_Load(System.Object eventSender, System.EventArgs eventArgs) { fmtBooleanData = new StdFormat.StdDataFormat(); fmtBooleanData.Type = StdFormat.FormatType.fmtBoolean; fmtBooleanData.TrueValue = "Crate"; fmtBooleanData.FalseValue = "Bottle"; fmtBooleanData.NullValue = ""; } private void frmDepositTake_KeyPress(System.Object eventSender, System.Windows.Forms.KeyPressEventArgs eventArgs) { short KeyAscii = Strings.Asc(eventArgs.KeyChar); if (KeyAscii == 27) { KeyAscii = 0; cmdClose_Click(cmdClose, new System.EventArgs()); } eventArgs.KeyChar = Strings.Chr(KeyAscii); if (KeyAscii == 0) { eventArgs.Handled = true; } } //UPGRADE_WARNING: Event frmDepositTake.Resize may fire when form is initialized. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="88B12AE1-6DE0-48A0-86F1-60C0686C026A"' private void frmDepositTake_Resize(System.Object eventSender, System.EventArgs eventArgs) { // ERROR: Not supported in C#: OnErrorStatement //This will resize the grid when the form is resized System.Windows.Forms.Application.DoEvents(); grdDataGrid.Height = sizeConvertors.twipsToPixels(sizeConvertors.pixelToTwips(this.ClientRectangle.Height, false) - 30 - sizeConvertors.pixelToTwips(picButtons.Height, false), false); grdDataGrid.Columns[0].Width = sizeConvertors.twipsToPixels(sizeConvertors.pixelToTwips(grdDataGrid.Width, true) - 1800 - 580, true); } private void frmDepositTake_FormClosed(System.Object eventSender, System.Windows.Forms.FormClosedEventArgs eventArgs) { System.Windows.Forms.Cursor.Current = System.Windows.Forms.Cursors.Default; } private void adoPrimaryRS_MoveComplete(ADODB.EventReasonEnum adReason, ADODB.Error pError, ref ADODB.EventStatusEnum adStatus, ADODB.Recordset pRecordset) { //This will display the current record position for this recordset } private void adoPrimaryRS_WillChangeRecord(ADODB.EventReasonEnum adReason, int cRecords, ref ADODB.EventStatusEnum adStatus, ADODB.Recordset pRecordset) { int lQuantity = 0; if (adoPrimaryRS.Fields("StockTakeDeposit_Quantity").OriginalValue != adoPrimaryRS.Fields("StockTakeDeposit_Quantity").Value) { lQuantity = Convert.ToInt32(adoPrimaryRS.Fields("StockTakeDeposit_Quantity").Value) - Convert.ToInt32(adoPrimaryRS.Fields("StockTakeDeposit_Quantity").OriginalValue); modRecordSet.cnnDB.Execute("UPDATE WarehouseDepositItemLnk SET WarehouseDepositItemLnk.WarehouseDepositItemLnk_Quantity = [WarehouseDepositItemLnk]![WarehouseDepositItemLnk_Quantity]+(" + lQuantity + ") WHERE (((WarehouseDepositItemLnk.WarehouseDepositItemLnk_WarehouseID)=" + adoPrimaryRS.Fields("StockTakeDeposit_WarehouseID").Value + ") AND ((WarehouseDepositItemLnk.WarehouseDepositItemLnk_DepositID)=" + adoPrimaryRS.Fields("StockTakeDeposit_DepositID").Value + ") AND ((WarehouseDepositItemLnk.WarehouseDepositItemLnk_DepositTypeID)=" + adoPrimaryRS.Fields(1).Value + "));"); modRecordSet.cnnDB.Execute("UPDATE Company INNER JOIN DayEndDepositItemLnk ON Company.Company_DayEndID = DayEndDepositItemLnk.DayEndDepositItemLnk_DayEndID SET DayEndDepositItemLnk.DayEndDepositItemLnk_QuantityShrink = [DayEndDepositItemLnk]![DayEndDepositItemLnk_QuantityShrink]+" + lQuantity + " WHERE (((DayEndDepositItemLnk.DayEndDeposittemLnk_DepositID)=" + adoPrimaryRS.Fields("StockTakeDeposit_DepositID").Value + ") AND ((DayEndDepositItemLnk.DayEndDeposittemLnk_DepositType)=" + adoPrimaryRS.Fields(1).Value + "));"); doDiskFlush(); } } private void doDiskFlush() { return; Scripting.FileSystemObject fso = new Scripting.FileSystemObject(); int hkey = 0; int lRetVal = 0; string vValue = null; string lPath = null; ADODB.Recordset rs = default(ADODB.Recordset); int lID = 0; int lCompanyID = 0; string lString = null; string lKey = null; short lDepositType = 0; lID = adoPrimaryRS.Fields("StockTakeDeposit_DepositID").Value; lDepositType = adoPrimaryRS.Fields(1).Value; lRetVal = modUtilities.RegOpenKeyEx(modUtilities.HKEY_LOCAL_MACHINE, "Software\\4POS", 0, modUtilities.KEY_QUERY_VALUE, ref hkey); lRetVal = modUtilities.QueryValueEx(hkey, "master", ref vValue); modUtilities.RegCloseKey(hkey); if (string.IsNullOrEmpty(vValue)) { return; } else { lPath = vValue; } rs = modRecordSet.getRS(ref "SELECT Company.CompanyID, DayEndDepositItemLnk.DayEndDeposittemLnk_DepositID, DayEndDepositItemLnk.DayEndDepositItemLnk_DayEndID, DayEndDepositItemLnk.DayEndDepositItemLnk_QuantityShrink, DayEndDepositItemLnk.DayEndDeposittemLnk_DepositType FROM Company INNER JOIN DayEndDepositItemLnk ON Company.Company_DayEndID = DayEndDepositItemLnk.DayEndDepositItemLnk_DayEndID WHERE (((DayEndDepositItemLnk.DayEndDeposittemLnk_DepositID)=" + lID + ") AND ((DayEndDepositItemLnk.DayEndDeposittemLnk_DepositType)=" + lDepositType + "));"); //If rs.RecordCount Then //Key = rs("CompanyID") & "_" & rs("DayEndDeposittemLnk_DepositID") & "_" & rs("DayEndDepositItemLnk_DayEndID") & "_" & CInt(lDepositType + 1) //lCompanyID = rs("CompanyID") //If fso.FileExists(lPath & lCompanyID & "\" & Key & ".stk") Then fso.DeleteFile lPath & lCompanyID & "\" & Key & ".stk" //If rs("DayEndDepositItemLnk_QuantityShrink") Then //Set lTextstream = fso.OpenTextFile(lPath & lCompanyID & "\" & Key & ".stk", ForWriting, True) //lTextstream.Write rs("DayEndDepositItemLnk_QuantityShrink") //lTextstream.Close //End If //End If } private void cmdCancel_Click() { // ERROR: Not supported in C#: OnErrorStatement mbEditFlag = false; mbAddNewFlag = false; adoPrimaryRS.CancelUpdate(); if (mvBookMark > 0) { adoPrimaryRS.Bookmark = mvBookMark; } else { adoPrimaryRS.MoveFirst(); } mbDataChanged = false; } //UPGRADE_NOTE: update was upgraded to update_Renamed. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="A9E4979A-37FA-4718-9994-97DD76ED70A7"' private void update_Renamed() { // ERROR: Not supported in C#: OnErrorStatement adoPrimaryRS.UpdateBatch(ADODB.AffectEnum.adAffectAll); if (mbAddNewFlag) { adoPrimaryRS.MoveLast(); //move to the new record } mbEditFlag = false; mbAddNewFlag = false; mbDataChanged = false; return; UpdateErr: Interaction.MsgBox(Err().Description); } private void cmdClose_Click(System.Object eventSender, System.EventArgs eventArgs) { update_Renamed(); this.Close(); } private void goFirst() { // ERROR: Not supported in C#: OnErrorStatement adoPrimaryRS.MoveFirst(); mbDataChanged = false; return; GoFirstError: Interaction.MsgBox(Err().Description); } private void goLast() { // ERROR: Not supported in C#: OnErrorStatement adoPrimaryRS.MoveLast(); mbDataChanged = false; return; GoLastError: Interaction.MsgBox(Err().Description); } //Private Sub grdDataGrid_CellValueChanged(ByVal eventSender As System.Object, ByVal eventArgs As AxMSDataGridLib.DDataGridEvents_CellValueChangedEvent) Handles grdDataGrid.CellValueChanged // If grdDataGrid.Columns(ColIndex).DataFormat.Format = "#,##0.00" Then // grdDataGrid.Columns(ColIndex).DataFormat = 0 // End If //End Sub } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Linq; using System.Security.Cryptography.X509Certificates; using Test.Cryptography; using Xunit; namespace System.Security.Cryptography.Pkcs.Tests { public static partial class SignerInfoTests { private const string TokenAttributeOid = "1.2.840.113549.1.9.16.2.14"; [Fact] public static void SignerInfo_AddUnsignedAttribute_Adds() { SignedCms cms = new SignedCms(); cms.Decode(SignedDocuments.RsaPkcs1OneSignerIssuerAndSerialNumber); Assert.Equal(0, cms.SignerInfos[0].UnsignedAttributes.Count); AsnEncodedData attribute1 = CreateTimestampToken(1); cms.SignerInfos[0].AddUnsignedAttribute(attribute1); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes[0].Values.Count); VerifyAttributesAreEqual(cms.SignerInfos[0].UnsignedAttributes[0].Values[0], attribute1); ReReadSignedCms(ref cms); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes[0].Values.Count); VerifyAttributesAreEqual(cms.SignerInfos[0].UnsignedAttributes[0].Values[0], attribute1); AsnEncodedData attribute2 = CreateTimestampToken(2); cms.SignerInfos[0].AddUnsignedAttribute(attribute2); var expectedAttributes = new List<AsnEncodedData>(); expectedAttributes.Add(attribute1); expectedAttributes.Add(attribute2); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(2, cms.SignerInfos[0].UnsignedAttributes[0].Values.Count); VerifyAttributesContainsAll(cms.SignerInfos[0].UnsignedAttributes, expectedAttributes); ReReadSignedCms(ref cms); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(2, cms.SignerInfos[0].UnsignedAttributes[0].Values.Count); VerifyAttributesContainsAll(cms.SignerInfos[0].UnsignedAttributes, expectedAttributes); } [Fact] public static void SignerInfo_RemoveUnsignedAttribute_RemoveCounterSignature() { SignedCms cms = new SignedCms(); cms.Decode(SignedDocuments.OneRsaSignerTwoRsaCounterSigners); Assert.Equal(2, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(2, cms.SignerInfos[0].CounterSignerInfos.Count); byte[] secondSignerCounterSignature = cms.SignerInfos[0].CounterSignerInfos[1].GetSignature(); cms.SignerInfos[0].RemoveUnsignedAttribute(cms.SignerInfos[0].UnsignedAttributes[0].Values[0]); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(1, cms.SignerInfos[0].CounterSignerInfos.Count); Assert.Equal(secondSignerCounterSignature, cms.SignerInfos[0].CounterSignerInfos[0].GetSignature()); ReReadSignedCms(ref cms); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(1, cms.SignerInfos[0].CounterSignerInfos.Count); Assert.Equal(secondSignerCounterSignature, cms.SignerInfos[0].CounterSignerInfos[0].GetSignature()); } [Theory] [MemberData(nameof(SignedDocumentsWithAttributesTestData))] public static void SignerInfo_RemoveUnsignedAttributes_RemoveAllAttributesFromBeginning(byte[] document) { SignedCms cms = new SignedCms(); cms.Decode(document); List<AsnEncodedData> attributes = GetAllAsnEncodedDataFromAttributes(cms.SignerInfos[0].UnsignedAttributes); Assert.True(attributes.Count > 0); for (int i = 0; i < attributes.Count; i++) { AsnEncodedData attribute = attributes[i]; cms.SignerInfos[0].RemoveUnsignedAttribute(attribute); attributes.RemoveAt(0); VerifyAttributesContainsAll(cms.SignerInfos[0].UnsignedAttributes, attributes); ReReadSignedCms(ref cms); VerifyAttributesContainsAll(cms.SignerInfos[0].UnsignedAttributes, attributes); } } [Theory] [MemberData(nameof(SignedDocumentsWithAttributesTestData))] public static void SignerInfo_RemoveUnsignedAttributes_RemoveAllAttributesFromEnd(byte[] document) { SignedCms cms = new SignedCms(); cms.Decode(document); List<AsnEncodedData> attributes = GetAllAsnEncodedDataFromAttributes(cms.SignerInfos[0].UnsignedAttributes); Assert.True(attributes.Count > 0); for (int i = attributes.Count - 1; i >= 0; i--) { AsnEncodedData attribute = attributes[i]; cms.SignerInfos[0].RemoveUnsignedAttribute(attribute); attributes.RemoveAt(i); VerifyAttributesContainsAll(cms.SignerInfos[0].UnsignedAttributes, attributes); ReReadSignedCms(ref cms); VerifyAttributesContainsAll(cms.SignerInfos[0].UnsignedAttributes, attributes); } } [Fact] public static void SignerInfo_RemoveUnsignedAttributes_RemoveWithNonMatchingOid() { SignedCms cms = new SignedCms(); cms.Decode(SignedDocuments.OneRsaSignerTwoRsaCounterSigners); int numberOfAttributes = cms.SignerInfos[0].UnsignedAttributes.Count; Assert.NotEqual(0, numberOfAttributes); AsnEncodedData fakeAttribute = new AsnEncodedData(new Oid("1.2.3.4", "1.2.3.4"), cms.SignerInfos[0].UnsignedAttributes[0].Values[0].RawData); Assert.Throws<CryptographicException>(() => cms.SignerInfos[0].RemoveUnsignedAttribute(fakeAttribute)); Assert.Equal(numberOfAttributes, cms.SignerInfos[0].UnsignedAttributes.Count); } [Fact] public static void SignerInfo_RemoveUnsignedAttributes_RemoveWithNonMatchingData() { SignedCms cms = new SignedCms(); cms.Decode(SignedDocuments.OneRsaSignerTwoRsaCounterSigners); int numberOfAttributes = cms.SignerInfos[0].UnsignedAttributes.Count; Assert.NotEqual(0, numberOfAttributes); AsnEncodedData fakeAttribute = new AsnEncodedData( cms.SignerInfos[0].UnsignedAttributes[0].Oid, cms.SignerInfos[0].UnsignedAttributes[0].Values[0].RawData.Skip(1).ToArray()); Assert.Throws<CryptographicException>(() => cms.SignerInfos[0].RemoveUnsignedAttribute(fakeAttribute)); Assert.Equal(numberOfAttributes, cms.SignerInfos[0].UnsignedAttributes.Count); } [Fact] public static void SignerInfo_RemoveUnsignedAttributes_MultipleAttributeValues() { SignedCms cms = new SignedCms(); cms.Decode(SignedDocuments.RsaPkcs1OneSignerIssuerAndSerialNumber); Assert.Equal(0, cms.SignerInfos[0].UnsignedAttributes.Count); AsnEncodedData attribute1 = CreateTimestampToken(1); AsnEncodedData attribute2 = CreateTimestampToken(2); cms.SignerInfos[0].AddUnsignedAttribute(attribute1); cms.SignerInfos[0].AddUnsignedAttribute(attribute2); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(2, cms.SignerInfos[0].UnsignedAttributes[0].Values.Count); cms.SignerInfos[0].RemoveUnsignedAttribute(attribute1); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes[0].Values.Count); Assert.True(AsnEncodedDataEqual(attribute2, cms.SignerInfos[0].UnsignedAttributes[0].Values[0])); cms.SignerInfos[0].RemoveUnsignedAttribute(attribute2); Assert.Equal(0, cms.SignerInfos[0].UnsignedAttributes.Count); } [Fact] public static void SignerInfo_AddRemoveUnsignedAttributes_JoinCounterSignaturesAttributesIntoOne() { byte[] message = { 1, 2, 3, 4, 5 }; ContentInfo content = new ContentInfo(message); SignedCms cms = new SignedCms(content); using (X509Certificate2 signerCert = Certificates.RSA2048SignatureOnly.TryGetCertificateWithPrivateKey()) { CmsSigner signer = new CmsSigner(SubjectIdentifierType.IssuerAndSerialNumber, signerCert); cms.ComputeSignature(signer); } using (X509Certificate2 counterSigner1cert = Certificates.Dsa1024.TryGetCertificateWithPrivateKey()) { CmsSigner counterSigner = new CmsSigner(SubjectIdentifierType.IssuerAndSerialNumber, counterSigner1cert); counterSigner.IncludeOption = X509IncludeOption.EndCertOnly; counterSigner.DigestAlgorithm = new Oid(Oids.Sha1, Oids.Sha1); cms.SignerInfos[0].ComputeCounterSignature(counterSigner); } using (X509Certificate2 counterSigner2cert = Certificates.ECDsaP256Win.TryGetCertificateWithPrivateKey()) { CmsSigner counterSigner = new CmsSigner(SubjectIdentifierType.IssuerAndSerialNumber, counterSigner2cert); cms.SignerInfos[0].ComputeCounterSignature(counterSigner); } Assert.Equal(2, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes[0].Values.Count); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes[1].Values.Count); cms.CheckSignature(true); AsnEncodedData counterSignature = cms.SignerInfos[0].UnsignedAttributes[0].Values[0]; cms.SignerInfos[0].RemoveUnsignedAttribute(counterSignature); cms.SignerInfos[0].AddUnsignedAttribute(counterSignature); Assert.Equal(1, cms.SignerInfos[0].UnsignedAttributes.Count); Assert.Equal(2, cms.SignerInfos[0].UnsignedAttributes[0].Values.Count); cms.CheckSignature(true); } private static void VerifyAttributesContainsAll(CryptographicAttributeObjectCollection attributes, List<AsnEncodedData> expectedAttributes) { var indices = new HashSet<int>(); foreach (CryptographicAttributeObject attribute in attributes) { foreach (AsnEncodedData attributeValue in attribute.Values) { int idx = FindAsnEncodedData(expectedAttributes, attributeValue); Assert.NotEqual(-1, idx); indices.Add(idx); } } Assert.Equal(expectedAttributes.Count, indices.Count); } private static int FindAsnEncodedData(List<AsnEncodedData> array, AsnEncodedData data) { for (int i = 0; i < array.Count; i++) { if (AsnEncodedDataEqual(array[i], data)) { return i; } } return -1; } private static List<AsnEncodedData> GetAllAsnEncodedDataFromAttributes(CryptographicAttributeObjectCollection attributes) { var ret = new List<AsnEncodedData>(); foreach (CryptographicAttributeObject attribute in attributes) { foreach (AsnEncodedData attributeValue in attribute.Values) { ret.Add(attributeValue); } } return ret; } private static bool AsnEncodedDataEqual(AsnEncodedData a, AsnEncodedData b) { return a.Oid.Value == b.Oid.Value && a.RawData.SequenceEqual(b.RawData); } private static void ReReadSignedCms(ref SignedCms cms) { byte[] bytes = cms.Encode(); cms = new SignedCms(); cms.Decode(bytes); } private static AsnEncodedData CreateTimestampToken(byte serial) { Oid tokenOid = new Oid(TokenAttributeOid, TokenAttributeOid); Oid policyId = new Oid("0.0", "0.0"); Oid hashAlgorithmId = new Oid(Oids.Sha256); var tokenInfo = new Rfc3161TimestampTokenInfo( policyId, hashAlgorithmId, new byte[256 / 8], new byte[] { (byte)serial }, DateTimeOffset.UtcNow); return new AsnEncodedData(tokenOid, tokenInfo.Encode()); } private static void VerifyAttributesAreEqual(AsnEncodedData actual, AsnEncodedData expected) { Assert.NotSame(expected.Oid, actual.Oid); Assert.Equal(expected.Oid.Value, actual.Oid.Value); // We need to decode bytes because DER and BER may encode the same information slightly differently Rfc3161TimestampTokenInfo expectedToken; Assert.True(Rfc3161TimestampTokenInfo.TryDecode(expected.RawData, out expectedToken, out _)); Rfc3161TimestampTokenInfo actualToken; Assert.True(Rfc3161TimestampTokenInfo.TryDecode(actual.RawData, out actualToken, out _)); Assert.Equal(expectedToken.GetSerialNumber().ByteArrayToHex(), actualToken.GetSerialNumber().ByteArrayToHex()); Assert.Equal(expectedToken.Timestamp, actualToken.Timestamp); Assert.Equal(expectedToken.HashAlgorithmId.Value, Oids.Sha256); Assert.Equal(expectedToken.HashAlgorithmId.Value, actualToken.HashAlgorithmId.Value); } public static IEnumerable<object[]> SignedDocumentsWithAttributesTestData() { yield return new object[] { SignedDocuments.CounterSignedRsaPkcs1OneSigner }; yield return new object[] { SignedDocuments.NoSignatureSignedWithAttributesAndCounterSignature }; yield return new object[] { SignedDocuments.OneRsaSignerTwoRsaCounterSigners }; yield return new object[] { SignedDocuments.RsaPkcs1CounterSignedWithNoSignature }; yield return new object[] { SignedDocuments.UnsortedSignerInfos}; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Data; using System.IO; using System.Data.ProviderBase; using System.Data.Common; using System.Text; namespace System.Data.SqlClient { internal sealed class SqlMetaDataFactory : DbMetaDataFactory { private const string _serverVersionNormalized90 = "09.00.0000"; private const string _serverVersionNormalized10 = "10.00.0000"; public SqlMetaDataFactory(Stream XMLStream, string serverVersion, string serverVersionNormalized) : base(XMLStream, serverVersion, serverVersionNormalized) { } private void addUDTsToDataTypesTable(DataTable dataTypesTable, SqlConnection connection, string ServerVersion) { const string sqlCommand = "select " + "assemblies.name, " + "types.assembly_class, " + "ASSEMBLYPROPERTY(assemblies.name, 'VersionMajor') as version_major, " + "ASSEMBLYPROPERTY(assemblies.name, 'VersionMinor') as version_minor, " + "ASSEMBLYPROPERTY(assemblies.name, 'VersionBuild') as version_build, " + "ASSEMBLYPROPERTY(assemblies.name, 'VersionRevision') as version_revision, " + "ASSEMBLYPROPERTY(assemblies.name, 'CultureInfo') as culture_info, " + "ASSEMBLYPROPERTY(assemblies.name, 'PublicKey') as public_key, " + "is_nullable, " + "is_fixed_length, " + "max_length " + "from sys.assemblies as assemblies join sys.assembly_types as types " + "on assemblies.assembly_id = types.assembly_id "; // pre 9.0/Yukon servers do not have UDTs if (0 > string.Compare(ServerVersion, _serverVersionNormalized90, StringComparison.OrdinalIgnoreCase)) { return; } // Execute the SELECT statement SqlCommand command = connection.CreateCommand(); command.CommandText = sqlCommand; DataRow newRow = null; DataColumn providerDbtype = dataTypesTable.Columns[DbMetaDataColumnNames.ProviderDbType]; DataColumn columnSize = dataTypesTable.Columns[DbMetaDataColumnNames.ColumnSize]; DataColumn isFixedLength = dataTypesTable.Columns[DbMetaDataColumnNames.IsFixedLength]; DataColumn isSearchable = dataTypesTable.Columns[DbMetaDataColumnNames.IsSearchable]; DataColumn isLiteralSupported = dataTypesTable.Columns[DbMetaDataColumnNames.IsLiteralSupported]; DataColumn typeName = dataTypesTable.Columns[DbMetaDataColumnNames.TypeName]; DataColumn isNullable = dataTypesTable.Columns[DbMetaDataColumnNames.IsNullable]; if ((providerDbtype == null) || (columnSize == null) || (isFixedLength == null) || (isSearchable == null) || (isLiteralSupported == null) || (typeName == null) || (isNullable == null)) { throw ADP.InvalidXml(); } const int columnSizeIndex = 10; const int isFixedLengthIndex = 9; const int isNullableIndex = 8; const int assemblyNameIndex = 0; const int assemblyClassIndex = 1; const int versionMajorIndex = 2; const int versionMinorIndex = 3; const int versionBuildIndex = 4; const int versionRevisionIndex = 5; const int cultureInfoIndex = 6; const int publicKeyIndex = 7; using (IDataReader reader = command.ExecuteReader()) { object[] values = new object[11]; while (reader.Read()) { reader.GetValues(values); newRow = dataTypesTable.NewRow(); newRow[providerDbtype] = SqlDbType.Udt; if (values[columnSizeIndex] != DBNull.Value) { newRow[columnSize] = values[columnSizeIndex]; } if (values[isFixedLengthIndex] != DBNull.Value) { newRow[isFixedLength] = values[isFixedLengthIndex]; } newRow[isSearchable] = true; newRow[isLiteralSupported] = false; if (values[isNullableIndex] != DBNull.Value) { newRow[isNullable] = values[isNullableIndex]; } if ((values[assemblyNameIndex] != DBNull.Value) && (values[assemblyClassIndex] != DBNull.Value) && (values[versionMajorIndex] != DBNull.Value) && (values[versionMinorIndex] != DBNull.Value) && (values[versionBuildIndex] != DBNull.Value) && (values[versionRevisionIndex] != DBNull.Value)) { StringBuilder nameString = new StringBuilder(); nameString.Append(values[assemblyClassIndex].ToString()); nameString.Append(", "); nameString.Append(values[assemblyNameIndex].ToString()); nameString.Append(", Version="); nameString.Append(values[versionMajorIndex].ToString()); nameString.Append("."); nameString.Append(values[versionMinorIndex].ToString()); nameString.Append("."); nameString.Append(values[versionBuildIndex].ToString()); nameString.Append("."); nameString.Append(values[versionRevisionIndex].ToString()); if (values[cultureInfoIndex] != DBNull.Value) { nameString.Append(", Culture="); nameString.Append(values[cultureInfoIndex].ToString()); } if (values[publicKeyIndex] != DBNull.Value) { nameString.Append(", PublicKeyToken="); StringBuilder resultString = new StringBuilder(); byte[] byteArrayValue = (byte[])values[publicKeyIndex]; foreach (byte b in byteArrayValue) { resultString.Append(string.Format("{0,-2:x2}", b)); } nameString.Append(resultString.ToString()); } newRow[typeName] = nameString.ToString(); dataTypesTable.Rows.Add(newRow); newRow.AcceptChanges(); } // if assembly name }//end while } // end using } private void AddTVPsToDataTypesTable(DataTable dataTypesTable, SqlConnection connection, string ServerVersion) { const string sqlCommand = "select " + "name, " + "is_nullable, " + "max_length " + "from sys.types " + "where is_table_type = 1"; // TODO: update this check once the server upgrades major version number!!! // pre 9.0/Yukon servers do not have Table types if (0 > string.Compare(ServerVersion, _serverVersionNormalized10, StringComparison.OrdinalIgnoreCase)) { return; } // Execute the SELECT statement SqlCommand command = connection.CreateCommand(); command.CommandText = sqlCommand; DataRow newRow = null; DataColumn providerDbtype = dataTypesTable.Columns[DbMetaDataColumnNames.ProviderDbType]; DataColumn columnSize = dataTypesTable.Columns[DbMetaDataColumnNames.ColumnSize]; DataColumn isSearchable = dataTypesTable.Columns[DbMetaDataColumnNames.IsSearchable]; DataColumn isLiteralSupported = dataTypesTable.Columns[DbMetaDataColumnNames.IsLiteralSupported]; DataColumn typeName = dataTypesTable.Columns[DbMetaDataColumnNames.TypeName]; DataColumn isNullable = dataTypesTable.Columns[DbMetaDataColumnNames.IsNullable]; if ((providerDbtype == null) || (columnSize == null) || (isSearchable == null) || (isLiteralSupported == null) || (typeName == null) || (isNullable == null)) { throw ADP.InvalidXml(); } const int columnSizeIndex = 2; const int isNullableIndex = 1; const int typeNameIndex = 0; using (IDataReader reader = command.ExecuteReader()) { object[] values = new object[11]; while (reader.Read()) { reader.GetValues(values); newRow = dataTypesTable.NewRow(); newRow[providerDbtype] = SqlDbType.Structured; if (values[columnSizeIndex] != DBNull.Value) { newRow[columnSize] = values[columnSizeIndex]; } newRow[isSearchable] = false; newRow[isLiteralSupported] = false; if (values[isNullableIndex] != DBNull.Value) { newRow[isNullable] = values[isNullableIndex]; } if (values[typeNameIndex] != DBNull.Value) { newRow[typeName] = values[typeNameIndex]; dataTypesTable.Rows.Add(newRow); newRow.AcceptChanges(); } // if type name }//end while } // end using } private DataTable GetDataTypesTable(SqlConnection connection) { // verify the existance of the table in the data set DataTable dataTypesTable = CollectionDataSet.Tables[DbMetaDataCollectionNames.DataTypes]; if (dataTypesTable == null) { throw ADP.UnableToBuildCollection(DbMetaDataCollectionNames.DataTypes); } // copy the table filtering out any rows that don't apply to tho current version of the prrovider dataTypesTable = CloneAndFilterCollection(DbMetaDataCollectionNames.DataTypes, null); addUDTsToDataTypesTable(dataTypesTable, connection, ServerVersionNormalized); AddTVPsToDataTypesTable(dataTypesTable, connection, ServerVersionNormalized); dataTypesTable.AcceptChanges(); return dataTypesTable; } protected override DataTable PrepareCollection(string collectionName, string[] restrictions, DbConnection connection) { SqlConnection sqlConnection = (SqlConnection)connection; DataTable resultTable = null; if (collectionName == DbMetaDataCollectionNames.DataTypes) { if (ADP.IsEmptyArray(restrictions) == false) { throw ADP.TooManyRestrictions(DbMetaDataCollectionNames.DataTypes); } resultTable = GetDataTypesTable(sqlConnection); } if (resultTable == null) { throw ADP.UnableToBuildCollection(collectionName); } return resultTable; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.ComponentModel; using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; using System.Text.Unicode; namespace System.Text.Encodings.Web { /// <summary> /// An abstraction representing various text encoders. /// </summary> /// <remarks> /// TextEncoder subclasses can be used to do HTML encoding, URI encoding, and JavaScript encoding. /// Instances of such subclasses can be accessed using <see cref="HtmlEncoder.Default"/>, <see cref="UrlEncoder.Default"/>, and <see cref="JavaScriptEncoder.Default"/>. /// </remarks> public abstract class TextEncoder { // The following pragma disables a warning complaining about non-CLS compliant members being abstract, // and wants me to mark the type as non-CLS compliant. // It is true that this type cannot be extended by all CLS compliant languages. // Having said that, if I marked the type as non-CLS all methods that take it as parameter will now have to be marked CLSCompliant(false), // yet consumption of concrete encoders is totally CLS compliant, // as it?s mainly to be done by calling helper methods in TextEncoderExtensions class, // and so I think the warning is a bit too aggressive. /// <summary> /// Encodes a Unicode scalar into a buffer. /// </summary> /// <param name="unicodeScalar">Unicode scalar.</param> /// <param name="buffer">The destination of the encoded text.</param> /// <param name="bufferLength">Length of the destination <paramref name="buffer"/> in chars.</param> /// <param name="numberOfCharactersWritten">Number of characters written to the <paramref name="buffer"/>.</param> /// <returns>Returns false if <paramref name="bufferLength"/> is too small to fit the encoded text, otherwise returns true.</returns> /// <remarks>This method is seldom called directly. One of the TextEncoder.Encode overloads should be used instead. /// Implementations of <see cref="TextEncoder"/> need to be thread safe and stateless. /// </remarks> #pragma warning disable 3011 [CLSCompliant(false)] [EditorBrowsable(EditorBrowsableState.Never)] public unsafe abstract bool TryEncodeUnicodeScalar(int unicodeScalar, char* buffer, int bufferLength, out int numberOfCharactersWritten); // all subclasses have the same implementation of this method. // but this cannot be made virtual, because it will cause a virtual call to Encodes, and it destroys perf, i.e. makes common scenario 2x slower /// <summary> /// Finds index of the first character that needs to be encoded. /// </summary> /// <param name="text">The text buffer to search.</param> /// <param name="textLength">The number of characters in the <paramref name="text"/>.</param> /// <returns></returns> /// <remarks>This method is seldom called directly. It's used by higher level helper APIs.</remarks> [CLSCompliant(false)] [EditorBrowsable(EditorBrowsableState.Never)] public unsafe abstract int FindFirstCharacterToEncode(char* text, int textLength); #pragma warning restore /// <summary> /// Determines if a given Unicode scalar will be encoded. /// </summary> /// <param name="unicodeScalar">Unicode scalar.</param> /// <returns>Returns true if the <paramref name="unicodeScalar"/> will be encoded by this encoder, otherwise returns false.</returns> [EditorBrowsable(EditorBrowsableState.Never)] public abstract bool WillEncode(int unicodeScalar); // this could be a field, but I am trying to make the abstraction pure. /// <summary> /// Maximum number of characters that this encoder can generate for each input character. /// </summary> [EditorBrowsable(EditorBrowsableState.Never)] public abstract int MaxOutputCharactersPerInputCharacter { get; } /// <summary> /// Encodes the supplied string and returns the encoded text as a new string. /// </summary> /// <param name="value">String to encode.</param> /// <returns>Encoded string.</returns> public virtual string Encode(string value) { if (value == null) { throw new ArgumentNullException(nameof(value)); } unsafe { fixed (char* valuePointer = value) { int firstCharacterToEncode = FindFirstCharacterToEncode(valuePointer, value.Length); if (firstCharacterToEncode == -1) { return value; } int bufferSize = MaxOutputCharactersPerInputCharacter * value.Length; string result; if (bufferSize < 1024) { char* wholebuffer = stackalloc char[bufferSize]; int totalWritten = EncodeIntoBuffer(wholebuffer, bufferSize, valuePointer, value.Length, firstCharacterToEncode); result = new string(wholebuffer, 0, totalWritten); } else { char[] wholebuffer = new char[bufferSize]; fixed(char* buffer = &wholebuffer[0]) { int totalWritten = EncodeIntoBuffer(buffer, bufferSize, valuePointer, value.Length, firstCharacterToEncode); result = new string(wholebuffer, 0, totalWritten); } } return result; } } } // NOTE: The order of the parameters to this method is a work around for https://github.com/dotnet/corefx/issues/4455 // and the underlying Mono bug: https://bugzilla.xamarin.com/show_bug.cgi?id=36052. // If changing the signature of this method, ensure this issue isn't regressing on Mono. private unsafe int EncodeIntoBuffer(char* buffer, int bufferLength, char* value, int valueLength, int firstCharacterToEncode) { int totalWritten = 0; if (firstCharacterToEncode > 0) { int bytesToCopy = firstCharacterToEncode + firstCharacterToEncode; BufferInternal.MemoryCopy(value, buffer, bytesToCopy, bytesToCopy); totalWritten += firstCharacterToEncode; bufferLength -= firstCharacterToEncode; buffer += firstCharacterToEncode; } int valueIndex = firstCharacterToEncode; char firstChar = value[valueIndex]; char secondChar = firstChar; bool wasSurrogatePair = false; int charsWritten; // this loop processes character pairs (in case they are surrogates). // there is an if block below to process single last character. int secondCharIndex; for (secondCharIndex = valueIndex + 1; secondCharIndex < valueLength; secondCharIndex++) { if (!wasSurrogatePair) { firstChar = secondChar; } else { firstChar = value[secondCharIndex - 1]; } secondChar = value[secondCharIndex]; if (!WillEncode(firstChar)) { wasSurrogatePair = false; *buffer = firstChar; buffer++; bufferLength--; totalWritten++; } else { int nextScalar = UnicodeHelpers.GetScalarValueFromUtf16(firstChar, secondChar, out wasSurrogatePair); if (!TryEncodeUnicodeScalar(nextScalar, buffer, bufferLength, out charsWritten)) { throw new ArgumentException("Argument encoder does not implement MaxOutputCharsPerInputChar correctly."); } buffer += charsWritten; bufferLength -= charsWritten; totalWritten += charsWritten; if (wasSurrogatePair) { secondCharIndex++; } } } if (secondCharIndex == valueLength) { firstChar = value[valueLength - 1]; int nextScalar = UnicodeHelpers.GetScalarValueFromUtf16(firstChar, null, out wasSurrogatePair); if (!TryEncodeUnicodeScalar(nextScalar, buffer, bufferLength, out charsWritten)) { throw new ArgumentException("Argument encoder does not implement MaxOutputCharsPerInputChar correctly."); } buffer += charsWritten; bufferLength -= charsWritten; totalWritten += charsWritten; } return totalWritten; } /// <summary> /// Encodes the supplied string into a <see cref="TextWriter"/>. /// </summary> /// <param name="output">Encoded text is written to this output.</param> /// <param name="value">String to be encoded.</param> public void Encode(TextWriter output, string value) { Encode(output, value, 0, value.Length); } /// <summary> /// Encodes a substring into a <see cref="TextWriter"/>. /// </summary> /// <param name="output">Encoded text is written to this output.</param> /// <param name="value">String whose substring is to be encoded.</param> /// <param name="startIndex">The index where the substring starts.</param> /// <param name="characterCount">Number of characters in the substring.</param> public virtual void Encode(TextWriter output, string value, int startIndex, int characterCount) { if (value == null) { throw new ArgumentNullException(nameof(value)); } if (output == null) { throw new ArgumentNullException(nameof(output)); } ValidateRanges(startIndex, characterCount, actualInputLength: value.Length); unsafe { fixed (char* valuePointer = value) { char* substring = valuePointer + startIndex; int firstIndexToEncode = FindFirstCharacterToEncode(substring, characterCount); if (firstIndexToEncode == -1) // nothing to encode; { if (startIndex == 0 && characterCount == value.Length) // write whole string { output.Write(value); return; } for (int i = 0; i < characterCount; i++) // write substring { output.Write(*substring); substring++; } return; } // write prefix, then encode for (int i = 0; i < firstIndexToEncode; i++) { output.Write(*substring); substring++; } EncodeCore(output, substring, characterCount - firstIndexToEncode); } } } /// <summary> /// Encodes characters from an array into a <see cref="TextWriter"/>. /// </summary> /// <param name="output">Encoded text is written to the output.</param> /// <param name="value">Array of characters to be encoded.</param> /// <param name="startIndex">The index where the substring starts.</param> /// <param name="characterCount">Number of characters in the substring.</param> public virtual void Encode(TextWriter output, char[] value, int startIndex, int characterCount) { if (value == null) { throw new ArgumentNullException(nameof(value)); } if (output == null) { throw new ArgumentNullException(nameof(output)); } ValidateRanges(startIndex, characterCount, actualInputLength: value.Length); unsafe { fixed (char* valuePointer = value) { char* substring = valuePointer + startIndex; int firstIndexToEncode = FindFirstCharacterToEncode(substring, characterCount); if (firstIndexToEncode == -1) // nothing to encode; { if (startIndex == 0 && characterCount == value.Length) // write whole string { output.Write(value); return; } for (int i = 0; i < characterCount; i++) // write substring { output.Write(*substring); substring++; } return; } // write prefix, then encode for (int i = 0; i < firstIndexToEncode; i++) { output.Write(*substring); substring++; } EncodeCore(output, substring, characterCount - firstIndexToEncode); } } } private unsafe void EncodeCore(TextWriter output, char* value, int valueLength) { Debug.Assert(value != null & output != null); Debug.Assert(valueLength >= 0); int bufferLength = MaxOutputCharactersPerInputCharacter; char* buffer = stackalloc char[bufferLength]; char firstChar = *value; char secondChar = firstChar; bool wasSurrogatePair = false; int charsWritten; // this loop processes character pairs (in case they are surrogates). // there is an if block below to process single last character. for (int secondCharIndex = 1; secondCharIndex < valueLength; secondCharIndex++) { if (!wasSurrogatePair) { firstChar = secondChar; } else { firstChar = value[secondCharIndex - 1]; } secondChar = value[secondCharIndex]; if (!WillEncode(firstChar)) { wasSurrogatePair = false; output.Write(firstChar); } else { int nextScalar = UnicodeHelpers.GetScalarValueFromUtf16(firstChar, secondChar, out wasSurrogatePair); if (!TryEncodeUnicodeScalar(nextScalar, buffer, bufferLength, out charsWritten)) { throw new ArgumentException("Argument encoder does not implement MaxOutputCharsPerInputChar correctly."); } Write(output, buffer, charsWritten); if (wasSurrogatePair) { secondCharIndex++; } } } if (!wasSurrogatePair) { firstChar = value[valueLength - 1]; int nextScalar = UnicodeHelpers.GetScalarValueFromUtf16(firstChar, null, out wasSurrogatePair); if (!TryEncodeUnicodeScalar(nextScalar, buffer, bufferLength, out charsWritten)) { throw new ArgumentException("Argument encoder does not implement MaxOutputCharsPerInputChar correctly."); } Write(output, buffer, charsWritten); } } internal static unsafe bool TryCopyCharacters(char[] source, char* destination, int destinationLength, out int numberOfCharactersWritten) { Debug.Assert(source != null && destination != null && destinationLength >= 0); if (destinationLength < source.Length) { numberOfCharactersWritten = 0; return false; } for (int i = 0; i < source.Length; i++) { destination[i] = source[i]; } numberOfCharactersWritten = source.Length; return true; } [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static unsafe bool TryWriteScalarAsChar(int unicodeScalar, char* destination, int destinationLength, out int numberOfCharactersWritten) { Debug.Assert(destination != null && destinationLength >= 0); Debug.Assert(unicodeScalar < ushort.MaxValue); if (destinationLength < 1) { numberOfCharactersWritten = 0; return false; } *destination = (char)unicodeScalar; numberOfCharactersWritten = 1; return true; } private static void ValidateRanges(int startIndex, int characterCount, int actualInputLength) { if (startIndex < 0 || startIndex > actualInputLength) { throw new ArgumentOutOfRangeException(nameof(startIndex)); } if (characterCount < 0 || characterCount > (actualInputLength - startIndex)) { throw new ArgumentOutOfRangeException(nameof(characterCount)); } } private static unsafe void Write(TextWriter output, char* input, int inputLength) { Debug.Assert(output != null && input != null && inputLength >= 0); while (inputLength-- > 0) { output.Write(*input); input++; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // using System; using System.Collections.Generic; using System.Threading.Tasks; using System.Runtime.InteropServices; using System.Runtime.CompilerServices; #if CORECLR using System.Runtime.Loader; #endif using System.Reflection; using System.IO; class InstanceFieldTest : MyClass { public int Value; } class InstanceFieldTest2 : InstanceFieldTest { public int Value2; } [StructLayout(LayoutKind.Sequential)] class InstanceFieldTestWithLayout : MyClassWithLayout { public int Value; } class GrowingBase { MyGrowingStruct s; } class InheritingFromGrowingBase : GrowingBase { public int x; } class Program { static void TestVirtualMethodCalls() { var o = new MyClass(); Assert.AreEqual(o.VirtualMethod(), "Virtual method result"); var iface = (IMyInterface)o; Assert.AreEqual(iface.InterfaceMethod(" "), "Interface result"); Assert.AreEqual(MyClass.TestInterfaceMethod(iface, "+"), "Interface+result"); } static void TestMovedVirtualMethods() { var o = new MyChildClass(); Assert.AreEqual(o.MovedToBaseClass(), "MovedToBaseClass"); Assert.AreEqual(o.ChangedToVirtual(), "ChangedToVirtual"); if (!LLILCJitEnabled) { o = null; try { o.MovedToBaseClass(); } catch (NullReferenceException) { try { o.ChangedToVirtual(); } catch (NullReferenceException) { return; } } Assert.AreEqual("NullReferenceException", "thrown"); } } static void TestConstrainedMethodCalls() { using (MyStruct s = new MyStruct()) { ((Object)s).ToString(); } } static void TestConstrainedMethodCalls_Unsupported() { MyStruct s = new MyStruct(); s.ToString(); } static void TestInterop() { // Verify both intra-module and inter-module PInvoke interop MyClass.GetTickCount(); MyClass.TestInterop(); } static void TestStaticFields() { MyClass.StaticObjectField = 894; MyClass.StaticLongField = 4392854; MyClass.StaticNullableGuidField = new Guid("0D7E505F-E767-4FEF-AEEC-3243A3005673"); MyClass.ThreadStaticStringField = "Hello"; MyClass.ThreadStaticIntField = 735; MyClass.ThreadStaticDateTimeField = new DateTime(2011, 1, 1); MyClass.TestStaticFields(); #if false // TODO: Enable once LDFTN is supported Task.Run(() => { MyClass.ThreadStaticStringField = "Garbage"; MyClass.ThreadStaticIntField = 0xBAAD; MyClass.ThreadStaticDateTimeField = DateTime.Now; }).Wait(); #endif Assert.AreEqual(MyClass.StaticObjectField, 894 + 12345678 /* + 1234 */); Assert.AreEqual(MyClass.StaticLongField, (long)(4392854 * 456 /* * 45 */)); Assert.AreEqual(MyClass.StaticNullableGuidField, null); Assert.AreEqual(MyClass.ThreadStaticStringField, "HelloWorld"); Assert.AreEqual(MyClass.ThreadStaticIntField, 735/78); Assert.AreEqual(MyClass.ThreadStaticDateTimeField, new DateTime(2011, 1, 1) + new TimeSpan(123)); } static void TestPreInitializedArray() { var a = new int[] { 1, 2, 4, 8, 16, 32, 64, 128, 256, 512 }; int sum = 0; foreach (var e in a) sum += e; Assert.AreEqual(sum, 1023); } static void TestMultiDimmArray() { var a = new int[2,3,4]; a[0,1,2] = a[0,0,0] + a[1,1,1]; a.ToString(); } static void TestGenericVirtualMethod() { var o = new MyGeneric<String, Object>(); Assert.AreEqual(o.GenericVirtualMethod<Program, IEnumerable<String>>(), "System.StringSystem.ObjectProgramSystem.Collections.Generic.IEnumerable`1[System.String]"); } static void TestMovedGenericVirtualMethod() { var o = new MyChildGeneric<Object>(); Assert.AreEqual(o.MovedToBaseClass<WeakReference>(), typeof(List<WeakReference>).ToString()); Assert.AreEqual(o.ChangedToVirtual<WeakReference>(), typeof(List<WeakReference>).ToString()); if (!LLILCJitEnabled) { o = null; try { o.MovedToBaseClass<WeakReference>(); } catch (NullReferenceException) { try { o.ChangedToVirtual<WeakReference>(); } catch (NullReferenceException) { return; } } Assert.AreEqual("NullReferenceException", "thrown"); } } static void TestInstanceFields() { var t = new InstanceFieldTest2(); t.Value = 123; t.Value2 = 234; t.InstanceField = 345; Assert.AreEqual(typeof(InstanceFieldTest).GetRuntimeField("Value").GetValue(t), 123); Assert.AreEqual(typeof(InstanceFieldTest2).GetRuntimeField("Value2").GetValue(t), 234); Assert.AreEqual(typeof(MyClass).GetRuntimeField("InstanceField").GetValue(t), 345); } static void TestInstanceFieldsWithLayout() { var t = new InstanceFieldTestWithLayout(); t.Value = 123; Assert.AreEqual(typeof(InstanceFieldTestWithLayout).GetRuntimeField("Value").GetValue(t), 123); } static void TestInheritingFromGrowingBase() { var o = new InheritingFromGrowingBase(); o.x = 6780; Assert.AreEqual(typeof(InheritingFromGrowingBase).GetRuntimeField("x").GetValue(o), 6780); } [MethodImplAttribute(MethodImplOptions.NoInlining)] static void TestGrowingStruct() { MyGrowingStruct s = MyGrowingStruct.Construct(); MyGrowingStruct.Check(ref s); } [MethodImplAttribute(MethodImplOptions.NoInlining)] static void TestChangingStruct() { MyChangingStruct s = MyChangingStruct.Construct(); s.x++; MyChangingStruct.Check(ref s); } [MethodImplAttribute(MethodImplOptions.NoInlining)] static void TestChangingHFAStruct() { MyChangingHFAStruct s = MyChangingHFAStruct.Construct(); MyChangingHFAStruct.Check(s); } [MethodImplAttribute(MethodImplOptions.NoInlining)] static void TestGetType() { new MyClass().GetType().ToString(); } #if CORECLR class MyLoadContext : AssemblyLoadContext { public MyLoadContext() { } public void TestMultipleLoads() { Assembly a = LoadFromAssemblyPath(Path.Combine(Directory.GetCurrentDirectory(), "NI", "test.ni.dll")); Assert.AreEqual(AssemblyLoadContext.GetLoadContext(a), this); } protected override Assembly Load(AssemblyName an) { throw new NotImplementedException(); } } static void TestMultipleLoads() { if (!LLILCJitEnabled) { try { new MyLoadContext().TestMultipleLoads(); } catch (FileLoadException e) { Assert.AreEqual(e.ToString().Contains("Native image cannot be loaded multiple times"), true); return; } Assert.AreEqual("FileLoadException", "thrown"); } } #endif static void TestFieldLayoutNGenMixAndMatch() { // This test is verifying consistent field layout when ReadyToRun images are combined with NGen images // "ngen install /nodependencies main.exe" to exercise the interesting case var o = new ByteChildClass(67); Assert.AreEqual(o.ChildByte, (byte)67); } static void RunAllTests() { TestVirtualMethodCalls(); TestMovedVirtualMethods(); TestConstrainedMethodCalls(); TestConstrainedMethodCalls_Unsupported(); TestInterop(); TestStaticFields(); TestPreInitializedArray(); TestMultiDimmArray(); TestGenericVirtualMethod(); TestMovedGenericVirtualMethod(); TestInstanceFields(); TestInstanceFieldsWithLayout(); TestInheritingFromGrowingBase(); TestGrowingStruct(); TestChangingStruct(); TestChangingHFAStruct(); TestGetType(); #if CORECLR TestMultipleLoads(); #endif TestFieldLayoutNGenMixAndMatch(); } static int Main() { // Code compiled by LLILC jit can't catch exceptions yet so the tests // don't throw them if LLILC jit is enabled. This should be removed once // exception catching is supported by LLILC jit. string AltJitName = System.Environment.GetEnvironmentVariable("complus_altjitname"); LLILCJitEnabled = ((AltJitName != null) && AltJitName.ToLower().StartsWith("llilcjit") && ((System.Environment.GetEnvironmentVariable("complus_altjit") != null) || (System.Environment.GetEnvironmentVariable("complus_altjitngen") != null))); // Run all tests 3x times to exercise both slow and fast paths work for (int i = 0; i < 3; i++) RunAllTests(); Console.WriteLine("PASSED"); return Assert.HasAssertFired ? 1 : 100; } static bool LLILCJitEnabled; }
// Copyright 2020 The Tilt Brush Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System.Collections.Generic; using UnityEngine; namespace TiltBrush { public abstract class GeometryBrush : BaseBrushScript { // TODO: change to class? public struct Knot { public PointerManager.ControlPoint point; /// Position, smoothed with a kernel of (.25, .5, .25) public Vector3 smoothedPos; /// Constant, associated with this knot public float smoothedPressure; /// Distance from previous knot to this knot, or 0 (if first). /// Mutated during geometry generation. public float length; /// Mutable, only valid if HasGeometry. /// Some subclasses choose to use this instead of nRight/nSurface /// TODO: remove nRight, nSurface and use this instead. public Quaternion qFrame; /// Mutable, associated with prev knot and this knot. /// Only valid if HasGeometry; unit-length. public Vector3 nRight; /// Mutable, associated with prev knot and this knot. /// Only valid if HasGeometry; unit-length. public Vector3 nSurface; /// First triangle used by this knot. /// Multiply by 3 and use as an index into m_geometry.m_tris2. /// Invariants: /// 0. knots[0].iTri == 0 /// This means that there's no geometry before the first knot. /// 0b. knots[0].nTri == 0 /// The first knot !HasGeometry. It's unclear whether we can relax this; but /// currently, geometry generation can never happen for knot 0 because all /// generators assume that there is a previous knot that they can use to create /// a coordinate frame. Also, brushes likely assume that the 1st knot is the start /// or a stroke (ie !prev.HasGeometry). /// 1. prev.iTri <= iTri <= prev.iTri + prev.nTri /// This means the first triangle(s) might be shared with the prev knot, /// but not further back than that. /// 2. iTri + nTri >= prev.iTri + prev.nTri /// This means that the current knot's geometry doesn't end somewhere /// in the middle of the last knot's geometry. That would have the effect /// of shortening the stroke. The amount of geometry must be non-decreasing. /// 3. iTri == prev.iTri + prev.nTri when !HasGeometry /// This means they point to the very end of the previous strip. public int iTri; /// First vertex used by this knot. /// Use as an index into m_geometry.m_verts2. /// Similar invariants to iTri (qv) public ushort iVert; /// Number of vertices/triangles in this chunk of geometry. /// Includes geometry shared with adjacent knots. public ushort nTri, nVert; /// Mutable, only valid HasGeometry. /// True if previous knot !HasGeometry public bool startsGeometry; /// Mutable, only valid HasGeometry. /// True if next knot !HasGeometry public bool endsGeometry; /// true iff there is geometry that ends at point.m_Pos. /// Used to detect the start/end of segments in a stroke. public bool HasGeometry { get { return nVert > 0; } } /// Because qFrame is only valid if HasGeometry public Quaternion? Frame { get { return HasGeometry ? (Quaternion?)qFrame : null; } } } // Distance for pressure to lerp to ~90% of its instantaneous value. Value in meters. static float kPressureSmoothWindowMeters_PS = .20f; /// Upper bound on the number of extra verts needed to add a new knot. protected int m_UpperBoundVertsPerKnot; /// If set, flipside geometry is automatically generated if you use /// the SetXxx APIs. Top- and bottom-side verts and tris are adjacent /// to each other in the vert and tri arrays (halving the effective /// size of the post-transform cache) protected bool m_bDoubleSided; protected readonly bool m_bSmoothPositions; protected bool m_bM11Compatibility; /// Number of verts that we try to stay under. /// When this is exceeded, the stroke is stopped and a new one started. protected readonly int m_SoftVertexLimit; /// Number of sides: 1 if single sided, 2 if double sided. /// Used for convenient indexing into vert and tri data protected int NS; protected List<Knot> m_knots; protected GeometryPool m_geometry; protected int m_CachedNumVerts; // for use after we free m_geometry protected int m_CachedNumTris; /// The first control point that hasn't had geometry created for it yet, /// or null if geometry is fully up-to-date. protected int? m_FirstChangedControlPoint; public int NumVerts { get { return (m_geometry != null) ? m_geometry.m_Vertices.Count : m_CachedNumVerts; } } public int NumTris { get { return (m_geometry != null) ? m_geometry.m_Tris.Count : m_CachedNumTris; } } /// bDoubleSided /// If set, the Set{Tri,Vert,UV,Tangent} APIs will automatically /// create duplicate geometry for the other side. If you want to /// take this setting from the brush descriptor, see /// SetDoubleSided() /// /// uppeBoundVertsPerKnot /// Upper bound on (single-sided) number of verts needed to add /// a new knot public GeometryBrush( bool bCanBatch, int upperBoundVertsPerKnot, bool bDoubleSided, bool bSmoothPositions = true) : base(bCanBatch: bCanBatch) { m_bDoubleSided = bDoubleSided; NS = (bDoubleSided ? 2 : 1); m_UpperBoundVertsPerKnot = NS * upperBoundVertsPerKnot; m_knots = new List<Knot>(); // TODO: make configurable by subclasses m_SoftVertexLimit = 9000; m_bSmoothPositions = bSmoothPositions; } // Useful for sanity-checking your new code, especially DecayBrush() protected void CheckKnotInvariants() { #if DEBUG Knot k0 = m_knots[0]; Debug.Assert(k0.iTri == 0 && k0.iVert == 0, "Invariant 0"); Debug.Assert(k0.nTri == 0 && k0.nVert == 0, "Invariant 0b"); for (int i = 1; i < m_knots.Count; ++i) { Knot prev = m_knots[i-1]; Knot cur = m_knots[i]; Debug.AssertFormat(prev.iTri <= cur.iTri, "starts before prev at {0}", i); Debug.AssertFormat(cur.iTri <= prev.iTri + prev.nTri, "non-contiguous at {0}", i); Debug.AssertFormat(cur.iTri + cur.nTri >= prev.iTri + prev.nTri, "shorten at {0}", i); Debug.AssertFormat(prev.iVert <= cur.iVert, "starts before prev at {0}", i); Debug.AssertFormat(cur.iVert <= prev.iVert + prev.nVert, "non-contiguous at {0}", i); Debug.AssertFormat(cur.iVert + cur.nVert >= prev.iVert + prev.nVert, "shorten at {0}", i); } #endif } /// Helper for DecayBrush(); removes initial knots and their associated geometry. protected void RemoveInitialKnots(int knotsToShift) { if (knotsToShift == 0) { return; } m_knots.RemoveRange(0, knotsToShift); if (m_FirstChangedControlPoint.HasValue) { m_FirstChangedControlPoint = Mathf.Max(m_FirstChangedControlPoint.Value - knotsToShift, 1); } // Shift knots' pointers into geometry // Invariant 0 says 0th knot must start at i{Vert,Tri} = 0 // Invariant 0b says that the 0th knot has no geometry ushort vertShift; int triShift; { Knot k0 = m_knots[0]; vertShift = (ushort) (k0.iVert + k0.nVert); triShift = k0.iTri + k0.nTri; k0.iVert = 0; k0.nVert = 0; k0.iTri = 0; k0.nTri = 0; m_knots[0] = k0; } for (int k = 1; k < m_knots.Count; k++) { Knot dupe = m_knots[k]; // Invariant 1 says that vertShift is <= cur.iVert, so this is safe. dupe.iVert = (ushort) (m_knots[k].iVert - vertShift); dupe.iTri = m_knots[k].iTri - triShift; m_knots[k] = dupe; } CheckKnotInvariants(); // Shift geometry m_geometry.ShiftForward(vertShift, triShift); } // // GeometryBrush API // /// Called when control points are changed and/or added. /// Subclass should assume that all knots >= iKnot have changed. /// It should regenerate geometry, indices, etc. /// iKnot will always be > 0, so there is guaranteed to always be a previous knot. abstract protected void ControlPointsChanged(int iKnot); /// Used to work out the distance of a point from a knot. Default implementation just uses the /// straight-line distance between the two. protected virtual float DistanceFromKnot(int knotIndex, Vector3 pos) { return (pos - m_knots[knotIndex].point.m_Pos).magnitude; } // // BaseBrushScript override API // override public bool AlwaysRebuildPreviewBrush() { return true; } override public int GetNumUsedVerts() { return NumVerts; } override public bool IsOutOfVerts() { // Check if we have room for one more stride's worth of verts. // This is undocumented, but in Unity, 0xffff is an invalid index int LAST_VALID_INDEX = 0xfffe; return (GetNumUsedVerts() + m_UpperBoundVertsPerKnot)-1 > LAST_VALID_INDEX; } override public bool ShouldCurrentLineEnd() { return (IsOutOfVerts() || NumVerts > m_SoftVertexLimit); } override public bool ShouldDiscard() { // TODO: This should discard if the last stroke is too short (ie., 3 or fewer knots // since two knots are automatically added at the beginning). return GetNumUsedVerts() <= 0; } override public void ResetBrushForPreview(TrTransform localPointerXf) { base.ResetBrushForPreview(localPointerXf); m_knots.Clear(); Vector3 pos = localPointerXf.translation; Quaternion ori = localPointerXf.rotation; Knot knot = new Knot { point = new PointerManager.ControlPoint { // TODO: better value for pressure? m_Pos = pos, m_Orient = ori, m_Pressure = 1 }, length = 0, smoothedPos = pos }; m_knots.Add(knot); m_knots.Add(knot); } /// Set m_bDoubleSided according to settings in the descriptor protected void SetDoubleSided(TiltBrush.BrushDescriptor desc) { // Yuck. This class was authored assuming all this stuff was readonly, // which makes it awkward to now be able to set it from the descriptor if (desc.m_RenderBackfaces && !m_bDoubleSided) { // enable m_bDoubleSided = true; NS *= 2; m_UpperBoundVertsPerKnot *= 2; } else if (!desc.m_RenderBackfaces && m_bDoubleSided) { // disable m_bDoubleSided = false; NS /= 2; m_UpperBoundVertsPerKnot /= 2; } } protected override void InitBrush(BrushDescriptor desc, TrTransform localPointerXf) { base.InitBrush(desc, localPointerXf); m_bM11Compatibility = desc.m_M11Compatibility; m_geometry = GeometryPool.Allocate(); m_knots.Clear(); Vector3 pos = localPointerXf.translation; Quaternion ori = localPointerXf.rotation; Knot knot = new Knot { point = new PointerManager.ControlPoint { m_Pos = pos, m_Orient = ori, m_Pressure = 1 }, length = 0, smoothedPos = pos }; m_knots.Add(knot); m_knots.Add(knot); MeshFilter mf = GetComponent<MeshFilter>(); mf.mesh = null; // Force a new, empty, mf-owned mesh to be generated mf.mesh.MarkDynamic(); } override public void DebugGetGeometry( out Vector3[] verts, out int nVerts, out Vector2[] uv0s, out int[] tris, out int nTris) { verts = m_geometry.m_Vertices.GetBackingArray(); nVerts = m_geometry.m_Vertices.Count; if (m_geometry.Layout.texcoord0.size == 2) { uv0s = m_geometry.m_Texcoord0.v2.GetBackingArray(); } else { uv0s = null; } tris = m_geometry.m_Tris.GetBackingArray(); nTris = m_geometry.m_Tris.Count; } override public void FinalizeSolitaryBrush() { var mesh = GetComponent<MeshFilter>().mesh; m_geometry.CopyToMesh(mesh); m_CachedNumVerts = NumVerts; m_CachedNumTris = NumTris; GeometryPool.Free(m_geometry); m_geometry = null; mesh.RecalculateBounds(); } override public BatchSubset FinalizeBatchedBrush() { var mgr = this.Canvas.BatchManager; return mgr.CreateSubset(m_Desc, m_geometry); } /// Don't necessarily have to use the master's information to update the mesh. override public void ApplyChangesToVisuals() { if (! m_geometry.VerifySizes()) { return; } if (m_FirstChangedControlPoint != null) { try { StatelessRng.BeginSaltReuseCheck(); ControlPointsChanged(m_FirstChangedControlPoint.Value); } finally { StatelessRng.EndSaltReuseCheck(); } m_FirstChangedControlPoint = null; } var mesh = GetComponent<MeshFilter>().mesh; m_geometry.CopyToMesh(mesh); mesh.RecalculateBounds(); } override protected void InitUndoClone(GameObject clone) { var rMeshScript = clone.AddComponent<UndoMeshAnimScript>(); rMeshScript.Init(); } override protected bool UpdatePositionImpl(Vector3 pos, Quaternion ori, float pressure) { Debug.Assert(m_knots.Count >= 2); // XXX: we want to be passed the control point instead int iUpdate = m_knots.Count-1; Knot updated = m_knots[iUpdate]; updated.point.m_Pos = pos; updated.point.m_Orient = ori; updated.point.m_Pressure = pressure; updated.point.m_TimestampMs = (uint)(App.Instance.CurrentSketchTime * 1000); updated.smoothedPos = pos; if (iUpdate < 2) { // Retroactively update the 0th knot with better pressure data. float initialPressure = m_bM11Compatibility || m_PreviewMode ? 0 : pressure; Knot initialKnot = m_knots[0]; initialKnot.point.m_Pressure = initialPressure; initialKnot.smoothedPressure = initialPressure; m_knots[0] = initialKnot; } else if (m_bSmoothPositions) { Knot middle = m_knots[iUpdate-1]; Vector3 v0 = m_knots[iUpdate-2].point.m_Pos; Vector3 v1 = middle.point.m_Pos; Vector3 v2 = pos; middle.smoothedPos = (v0 + 2*v1 + v2) / 4; m_knots[iUpdate-1] = middle; } if (m_bSmoothPositions) { ApplySmoothing(m_knots[iUpdate - 1], ref updated); } else { updated.smoothedPressure = updated.point.m_Pressure; } m_knots[iUpdate] = updated; if (m_FirstChangedControlPoint.HasValue) { m_FirstChangedControlPoint = Mathf.Min(m_FirstChangedControlPoint.Value, iUpdate); } else { m_FirstChangedControlPoint = iUpdate; } float lastLength = DistanceFromKnot(iUpdate - 1, updated.point.m_Pos); bool keep = (lastLength > GetSpawnInterval(updated.smoothedPressure)); // TODO: change this to the way PointerScript keeps control points if (keep) { Knot dupe = updated; dupe.iVert = (ushort)(updated.iVert + updated.nVert); dupe.nVert = 0; dupe.iTri = updated.iTri + updated.nTri; dupe.nTri = 0; m_knots.Add(dupe); } return keep; } // // Geometry-creation helpers // /// Set triangle and bottomside triangle. /// iTri, iVert pass knot.iTri, knot.iVert /// tp index of triangle pair /// vp0, vp1, vp2 vertex pairs in that solid protected void SetTri(int iTri, int iVert, int tp, int vp0, int vp1, int vp2) { var tris = m_geometry.m_Tris; int i = (iTri + tp * NS) * 3; tris[i ] = iVert + vp0 * NS; tris[i + 1] = iVert + vp1 * NS; tris[i + 2] = iVert + vp2 * NS; if (m_bDoubleSided) { tris[i + 3] = iVert + vp2 * NS + 1; tris[i + 4] = iVert + vp1 * NS + 1; tris[i + 5] = iVert + vp0 * NS + 1; } } /// Set position, normal, color of the vertex pair at offset. /// iVert pass knot.iVert /// vp index of vert pair protected void SetVert(int iVert, int vp, Vector3 v, Vector3 n, Color32 c, float alpha) { c.a = (byte)(alpha * 255); int i = iVert + vp * NS; m_geometry.m_Vertices[i] = v; m_geometry.m_Normals[i] = n; m_geometry.m_Colors[i] = c; if (m_bDoubleSided) { m_geometry.m_Vertices[i + 1] = v; m_geometry.m_Normals[i + 1] = -n; m_geometry.m_Colors[i + 1] = c; } } /// Set texcoord0 of the vertex pair at offset. /// iVert pass knot.iVert /// vp index of vert pair protected void SetUv0(int iVert, int vp, Vector2 data) { int i = iVert + vp * NS; m_geometry.m_Texcoord0.v2[i] = data; if (m_bDoubleSided) { m_geometry.m_Texcoord0.v2[i + 1] = data; } } /// Set texcoord0 of the vertex pair at offset. /// iVert pass knot.iVert /// vp index of vert pair protected void SetUv0(int iVert, int vp, Vector4 data) { int i = iVert + vp * NS; m_geometry.m_Texcoord0.v4[i] = data; if (m_bDoubleSided) { m_geometry.m_Texcoord0.v4[i + 1] = data; } } /// Set texcoord1 of the vertex pair at offset. /// iVert pass knot.iVert /// vp index of vert pair protected void SetUv1(int iVert, int vp, Vector3 data) { int i = iVert + vp * NS; m_geometry.m_Texcoord1.v3[i] = data; if (m_bDoubleSided) { m_geometry.m_Texcoord1.v3[i + 1] = data; } } /// Set texcoord0 of the vertex pair at offset. /// iVert pass knot.iVert /// vp index of vert pair protected void SetUv1(int iVert, int vp, Vector4 data) { int i = iVert + vp * NS; m_geometry.m_Texcoord1.v4[i] = data; if (m_bDoubleSided) { m_geometry.m_Texcoord1.v4[i + 1] = data; } } /// Set tangent of the vertex pair at offset. /// It will be made orthogonal to the existing normal. /// iVert pass knot.iVert /// vp index of vert pair protected void SetTangent(int iVert, int vp, Vector3 tangent, float w=1) { int i = iVert + vp * NS; Vector3 normal = m_geometry.m_Normals[i]; Vector4 orthoTangent = (tangent - Vector3.Dot(tangent, normal) * normal).normalized; orthoTangent.w = w; m_geometry.m_Tangents[i] = orthoTangent; if (m_bDoubleSided) { orthoTangent.w = -w; m_geometry.m_Tangents[i + 1] = orthoTangent; } } /// Set size of geometry arrays to exactly fit what the knots need. /// Helper for ControlPointsChanged. protected void ResizeGeometry() { Knot k = m_knots[m_knots.Count-1]; int nVerts = k.iVert + k.nVert; m_geometry.NumVerts = nVerts; int nTris = k.iTri + k.nTri; m_geometry.m_Tris.SetCount(nTris * 3); } /// Recompute tangents for all triangles. Does no blending. /// Verts that are part of more than one triangle use an /// arbitrarily-chosen triangle's tangent. /// /// Because it's brute-force, intended mainly for prototyping. protected void BruteForceRecomputeTangents(int iKnot0, List<Vector2> uvs) { for (int iTriIndex = m_knots[iKnot0].iTri*3; iTriIndex < m_geometry.m_Tris.Count; iTriIndex += 3) { int iv0 = m_geometry.m_Tris[iTriIndex ]; int iv1 = m_geometry.m_Tris[iTriIndex + 1]; int iv2 = m_geometry.m_Tris[iTriIndex + 2]; Vector3 vS, vT; ComputeST(m_geometry.m_Vertices, uvs, 0, iv0, iv1, iv2, out vS, out vT); SetTangent(0, iv0, vS); SetTangent(0, iv1, vS); SetTangent(0, iv2, vS); } } // // Internal utilities // // Update values in next that need smoothing (currently only smoothedPressure) protected void ApplySmoothing(Knot prev, ref Knot next) { float distance = Vector3.Distance(prev.point.m_Pos, next.point.m_Pos); float pressureSmoothWindowMeters_PS = m_bM11Compatibility ? 0.1f : kPressureSmoothWindowMeters_PS; float window = pressureSmoothWindowMeters_PS * App.METERS_TO_UNITS * POINTER_TO_LOCAL; float k = Mathf.Pow(.1f, distance / window); next.smoothedPressure = k * prev.smoothedPressure + (1-k) * next.point.m_Pressure; } } } // namespace TiltBrush
using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Reflection; using AutoMapper.Configuration; using AutoMapper.Configuration.Conventions; namespace AutoMapper { [DebuggerDisplay("{Name}")] public class ProfileMap { private readonly IEnumerable<ITypeMapConfiguration> _typeMapConfigs; private readonly IEnumerable<ITypeMapConfiguration> _openTypeMapConfigs; private readonly LockingConcurrentDictionary<Type, TypeDetails> _typeDetails; public ProfileMap(IProfileConfiguration profile) : this(profile, null) { } public ProfileMap(IProfileConfiguration profile, IConfiguration configuration) { _typeDetails = new LockingConcurrentDictionary<Type, TypeDetails>(TypeDetailsFactory); Name = profile.ProfileName; AllowNullCollections = profile.AllowNullCollections ?? configuration?.AllowNullCollections ?? false; AllowNullDestinationValues = profile.AllowNullDestinationValues ?? configuration?.AllowNullDestinationValues ?? true; EnableNullPropagationForQueryMapping = profile.EnableNullPropagationForQueryMapping ?? configuration?.EnableNullPropagationForQueryMapping ?? false; ConstructorMappingEnabled = profile.ConstructorMappingEnabled ?? configuration?.ConstructorMappingEnabled ?? true; ShouldMapField = profile.ShouldMapField ?? configuration?.ShouldMapField ?? (p => p.IsPublic()); ShouldMapProperty = profile.ShouldMapProperty ?? configuration?.ShouldMapProperty ?? (p => p.IsPublic()); ShouldMapMethod = profile.ShouldMapMethod ?? configuration?.ShouldMapMethod ?? (p => true); ShouldUseConstructor = profile.ShouldUseConstructor ?? configuration?.ShouldUseConstructor ?? (c => true); ValueTransformers = profile.ValueTransformers.Concat(configuration?.ValueTransformers ?? Enumerable.Empty<ValueTransformerConfiguration>()).ToArray(); MemberConfigurations = profile.MemberConfigurations.Concat(configuration?.MemberConfigurations ?? Enumerable.Empty<IMemberConfiguration>()).ToArray(); MemberConfigurations.FirstOrDefault()?.AddMember<NameSplitMember>(_ => _.SourceMemberNamingConvention = profile.SourceMemberNamingConvention); MemberConfigurations.FirstOrDefault()?.AddMember<NameSplitMember>(_ => _.DestinationMemberNamingConvention = profile.DestinationMemberNamingConvention); GlobalIgnores = profile.GlobalIgnores.Concat(configuration?.GlobalIgnores ?? Enumerable.Empty<string>()).ToArray(); SourceExtensionMethods = profile.SourceExtensionMethods.Concat(configuration?.SourceExtensionMethods ?? Enumerable.Empty<MethodInfo>()).ToArray(); AllPropertyMapActions = profile.AllPropertyMapActions.Concat(configuration?.AllPropertyMapActions ?? Enumerable.Empty<Action<PropertyMap, IMemberConfigurationExpression>>()).ToArray(); AllTypeMapActions = profile.AllTypeMapActions.Concat(configuration?.AllTypeMapActions ?? Enumerable.Empty<Action<TypeMap, IMappingExpression>>()).ToArray(); Prefixes = profile.MemberConfigurations .Select(m => m.NameMapper) .SelectMany(m => m.NamedMappers) .OfType<PrePostfixName>() .SelectMany(m => m.Prefixes) .ToArray(); Postfixes = profile.MemberConfigurations .Select(m => m.NameMapper) .SelectMany(m => m.NamedMappers) .OfType<PrePostfixName>() .SelectMany(m => m.Postfixes) .ToArray(); _typeMapConfigs = profile.TypeMapConfigs.ToArray(); _openTypeMapConfigs = profile.OpenTypeMapConfigs.ToArray(); } public bool AllowNullCollections { get; } public bool AllowNullDestinationValues { get; } public bool ConstructorMappingEnabled { get; } public bool EnableNullPropagationForQueryMapping { get; } public string Name { get; } public Func<FieldInfo, bool> ShouldMapField { get; } public Func<PropertyInfo, bool> ShouldMapProperty { get; } public Func<MethodInfo, bool> ShouldMapMethod { get; } public Func<ConstructorInfo, bool> ShouldUseConstructor { get; } public IEnumerable<Action<PropertyMap, IMemberConfigurationExpression>> AllPropertyMapActions { get; } public IEnumerable<Action<TypeMap, IMappingExpression>> AllTypeMapActions { get; } public IEnumerable<string> GlobalIgnores { get; } public IEnumerable<IMemberConfiguration> MemberConfigurations { get; } public IEnumerable<MethodInfo> SourceExtensionMethods { get; } public IEnumerable<string> Prefixes { get; } public IEnumerable<string> Postfixes { get; } public IEnumerable<ValueTransformerConfiguration> ValueTransformers { get; } public TypeDetails CreateTypeDetails(Type type) => _typeDetails.GetOrAdd(type); private TypeDetails TypeDetailsFactory(Type type) => new TypeDetails(type, this); public void Register(IConfigurationProvider configurationProvider) { foreach (var config in _typeMapConfigs) { BuildTypeMap(configurationProvider, config); if (config.ReverseTypeMap != null) { BuildTypeMap(configurationProvider, config.ReverseTypeMap); } } } public void Configure(IConfigurationProvider configurationProvider) { foreach (var typeMapConfiguration in _typeMapConfigs) { Configure(typeMapConfiguration, configurationProvider); if (typeMapConfiguration.ReverseTypeMap != null) { Configure(typeMapConfiguration.ReverseTypeMap, configurationProvider); } } } private void BuildTypeMap(IConfigurationProvider configurationProvider, ITypeMapConfiguration config) { var typeMap = TypeMapFactory.CreateTypeMap(config.SourceType, config.DestinationType, this); config.Configure(typeMap); configurationProvider.RegisterTypeMap(typeMap); } private void Configure(ITypeMapConfiguration typeMapConfiguration, IConfigurationProvider configurationProvider) { var typeMap = configurationProvider.FindTypeMapFor(typeMapConfiguration.Types); Configure(typeMap, configurationProvider); } private void Configure(TypeMap typeMap, IConfigurationProvider configurationProvider) { foreach (var action in AllTypeMapActions) { var expression = new MappingExpression(typeMap.Types, typeMap.ConfiguredMemberList); action(typeMap, expression); expression.Configure(typeMap); } foreach (var action in AllPropertyMapActions) { foreach (var propertyMap in typeMap.PropertyMaps) { var memberExpression = new MappingExpression.MemberConfigurationExpression(propertyMap.DestinationMember, typeMap.SourceType); action(propertyMap, memberExpression); memberExpression.Configure(typeMap); } } ApplyBaseMaps(typeMap, typeMap, configurationProvider); ApplyDerivedMaps(typeMap, typeMap, configurationProvider); ApplyMemberMaps(typeMap, configurationProvider); } public TypeMap CreateClosedGenericTypeMap(ITypeMapConfiguration openMapConfig, TypePair closedTypes, IConfigurationProvider configurationProvider) { var closedMap = TypeMapFactory.CreateTypeMap(closedTypes.SourceType, closedTypes.DestinationType, this); closedMap.IsClosedGeneric = true; openMapConfig.Configure(closedMap); Configure(closedMap, configurationProvider); if(closedMap.TypeConverterType != null) { var typeParams = (openMapConfig.SourceType.IsGenericTypeDefinition() ? closedTypes.SourceType.GetGenericArguments() : Type.EmptyTypes) .Concat (openMapConfig.DestinationType.IsGenericTypeDefinition() ? closedTypes.DestinationType.GetGenericArguments() : Type.EmptyTypes); var neededParameters = closedMap.TypeConverterType.GetGenericParameters().Length; closedMap.TypeConverterType = closedMap.TypeConverterType.MakeGenericType(typeParams.Take(neededParameters).ToArray()); } if(closedMap.DestinationTypeOverride?.IsGenericTypeDefinition() == true) { var neededParameters = closedMap.DestinationTypeOverride.GetGenericParameters().Length; closedMap.DestinationTypeOverride = closedMap.DestinationTypeOverride.MakeGenericType(closedTypes.DestinationType.GetGenericArguments().Take(neededParameters).ToArray()); } return closedMap; } public ITypeMapConfiguration GetGenericMap(TypePair closedTypes) { return _openTypeMapConfigs .SelectMany(tm => tm.ReverseTypeMap == null ? new[] { tm } : new[] { tm, tm.ReverseTypeMap }) .Where(tm => tm.Types.SourceType.GetGenericTypeDefinitionIfGeneric() == closedTypes.SourceType.GetGenericTypeDefinitionIfGeneric() && tm.Types.DestinationType.GetGenericTypeDefinitionIfGeneric() == closedTypes.DestinationType.GetGenericTypeDefinitionIfGeneric()) .OrderByDescending(tm => tm.DestinationType == closedTypes.DestinationType) // Favor more specific destination matches, .ThenByDescending(tm => tm.SourceType == closedTypes.SourceType) // then more specific source matches .FirstOrDefault(); } private void ApplyBaseMaps(TypeMap derivedMap, TypeMap currentMap, IConfigurationProvider configurationProvider) { foreach (var baseMap in configurationProvider.GetIncludedTypeMaps(currentMap.IncludedBaseTypes)) { baseMap.IncludeDerivedTypes(currentMap.SourceType, currentMap.DestinationType); derivedMap.AddInheritedMap(baseMap); ApplyBaseMaps(derivedMap, baseMap, configurationProvider); } } private void ApplyMemberMaps(TypeMap mainMap, IConfigurationProvider configurationProvider) { AddMemberMaps(mainMap.IncludedMembers, mainMap, configurationProvider); AddMemberMaps(mainMap.GetUntypedIncludedMembers(), mainMap, configurationProvider); } private void AddMemberMaps(LambdaExpression[] includedMembers, TypeMap mainMap, IConfigurationProvider configurationProvider) { foreach(var includedMember in configurationProvider.GetIncludedTypeMaps(includedMembers.Select(m => new TypePair(m.Body.Type, mainMap.DestinationType))).Zip(includedMembers, (memberMap, expression) => new IncludedMember(memberMap, expression))) { mainMap.AddMemberMap(includedMember); } } private void ApplyDerivedMaps(TypeMap baseMap, TypeMap typeMap, IConfigurationProvider configurationProvider) { foreach (var derivedMap in configurationProvider.GetIncludedTypeMaps(typeMap.IncludedDerivedTypes)) { derivedMap.IncludeBaseTypes(typeMap.SourceType, typeMap.DestinationType); derivedMap.AddInheritedMap(baseMap); ApplyDerivedMaps(baseMap, derivedMap, configurationProvider); } } public bool MapDestinationCtorToSource(TypeMap typeMap, ConstructorInfo destCtor, TypeDetails sourceTypeInfo, List<ICtorParameterConfiguration> ctorParamConfigurations) { var ctorParameters = destCtor.GetParameters(); if (ctorParameters.Length == 0 || !ConstructorMappingEnabled) return false; var ctorMap = new ConstructorMap(destCtor, typeMap); foreach (var parameter in ctorParameters) { var resolvers = new LinkedList<MemberInfo>(); var canResolve = MapDestinationPropertyToSource(sourceTypeInfo, destCtor.DeclaringType, parameter.GetType(), parameter.Name, resolvers); if ((!canResolve && parameter.IsOptional) || ctorParamConfigurations.Any(c => c.CtorParamName == parameter.Name)) { canResolve = true; } ctorMap.AddParameter(parameter, resolvers.ToArray(), canResolve); } typeMap.ConstructorMap = ctorMap; return ctorMap.CanResolve; } public bool MapDestinationPropertyToSource(TypeDetails sourceTypeInfo, Type destType, Type destMemberType, string destMemberInfo, LinkedList<MemberInfo> members) { if (string.IsNullOrEmpty(destMemberInfo)) { return false; } return MemberConfigurations.Any(_ => _.MapDestinationPropertyToSource(this, sourceTypeInfo, destType, destMemberType, destMemberInfo, members)); } } public readonly struct IncludedMember { public IncludedMember(TypeMap typeMap, LambdaExpression memberExpression) { TypeMap = typeMap; MemberExpression = memberExpression; } public TypeMap TypeMap { get; } public LambdaExpression MemberExpression { get; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Xunit; using System; using System.Collections.Generic; using System.Linq; namespace Test { public class UnionIntersectDistinctTests { // // Union // [Fact] public static void RunUnionTests() { RunUnionTest1(0, 0); RunUnionTest1(1, 0); RunUnionTest1(0, 1); RunUnionTest1(4, 4); RunUnionTest1(1024, 4); RunUnionTest1(4, 1024); RunUnionTest1(1024, 1024); RunUnionTest1(1024 * 4, 1024); RunUnionTest1(1024, 1024 * 4); RunUnionTest1(1024 * 1024, 1024 * 1024); } private static void RunUnionTest1(int leftDataSize, int rightDataSize) { string[] names1 = new string[] { "balmer", "duffy", "gates", "jobs", "silva", "brumme", "gray", "grover", "yedlin" }; string[] names2 = new string[] { "balmer", "duffy", "gates", "essey", "crocker", "smith", "callahan", "jimbob", "beebop" }; string method = string.Format("RunUnionTest1(leftSize={0}, rightSize={1}) - union of names: FAILED.", leftDataSize, rightDataSize); //Random r = new Random(33); // use constant seed for predictable test runs. string[] leftData = new string[leftDataSize]; for (int i = 0; i < leftDataSize; i++) { int index = i % names1.Length; leftData[i] = names1[index]; } string[] rightData = new string[rightDataSize]; for (int i = 0; i < rightDataSize; i++) { int index = i % names2.Length; rightData[i] = names2[index]; } // Just get the union of thw two sets. We expect every name in the left and right // to be found in the final set, with no dups. ParallelQuery<string> q = leftData.AsParallel().Union<string>(rightData.AsParallel()); // Build a list of seen names, ensuring we don't see dups. List<string> seen = new List<string>(); foreach (string n in q) { // Ensure we haven't seen this name before. if (seen.Contains(n)) { Assert.True(false, string.Format(method + " ** NotUnique: {0} is not unique, already seen (failure)", n)); } seen.Add(n); } // Now ensure we saw all unique elements from both. foreach (string n in leftData) { if (!seen.Contains(n)) { Assert.True(false, string.Format(method + " ** NotSeen: {0} wasn't found in the query, though it was in the left data", n)); } } foreach (string n in rightData) { if (!seen.Contains(n)) { Assert.True(false, string.Format(method + " ** NotSeen: {0} wasn't found in the query, though it was in the right data", n)); } } } [Fact] public static void RunOrderedUnionTest1() { for (int len = 1; len <= 300; len += 3) { var data = Enumerable.Repeat(0, len) .Concat(new int[] { 1 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 2 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 1 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 2 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 3 }) .Concat(Enumerable.Repeat(0, len)); int[][] outputs = { data.AsParallel().AsOrdered().Union(Enumerable.Empty<int>().AsParallel()).ToArray(), Enumerable.Empty<int>().AsParallel().AsOrdered().Union(data.AsParallel().AsOrdered()).ToArray(), data.AsParallel().AsOrdered().Union(data.AsParallel()).ToArray(), Enumerable.Empty<int>().AsParallel().Union(data.AsParallel().AsOrdered()).OrderBy(i=>i).ToArray(), data.AsParallel().Union(data.AsParallel()).OrderBy(i=>i).ToArray(), }; foreach (var output in outputs) { if (!Enumerable.Range(0, 4).SequenceEqual(output)) { Assert.True(false, string.Format("RunOrderedUnionTest1: FAILED. ** Incorrect output")); } } } } // // Intersect // [Fact] public static void RunIntersectTests() { RunIntersectTest1(0, 0); RunIntersectTest1(1, 0); RunIntersectTest1(0, 1); RunIntersectTest1(4, 4); RunIntersectTest1(1024, 4); RunIntersectTest1(4, 1024); RunIntersectTest1(1024, 1024); RunIntersectTest1(1024 * 4, 1024); RunIntersectTest1(1024, 1024 * 4); RunIntersectTest1(1024 * 1024, 1024 * 1024); } private static void RunIntersectTest1(int leftDataSize, int rightDataSize) { string[] names1 = new string[] { "balmer", "duffy", "gates", "jobs", "silva", "brumme", "gray", "grover", "yedlin" }; string[] names2 = new string[] { "balmer", "duffy", "gates", "essey", "crocker", "smith", "callahan", "jimbob", "beebop" }; string method = string.Format("RunIntersectTest1(leftSize={0}, rightSize={1}) - intersect of names", leftDataSize, rightDataSize); //Random r = new Random(33); // use constant seed for predictable test runs. string[] leftData = new string[leftDataSize]; for (int i = 0; i < leftDataSize; i++) { int index = i % names1.Length; leftData[i] = names1[index]; } string[] rightData = new string[rightDataSize]; for (int i = 0; i < rightDataSize; i++) { int index = i % names2.Length; rightData[i] = names2[index]; } // Just get the intersection of thw two sets. We expect every name in the left and right // to be found in the final set, with no dups. ParallelQuery<string> q = leftData.AsParallel().Intersect<string>(rightData.AsParallel()); // Build a list of seen names, ensuring we don't see dups. List<string> seen = new List<string>(); foreach (string n in q) { // Ensure we haven't seen this name before. if (seen.Contains(n)) { Assert.True(false, string.Format(method + " ** FAILED. NotUnique: {0} is not unique, already seen (failure)", n)); } // Ensure the data exists in both sources. if (Array.IndexOf(leftData, n) == -1) { Assert.True(false, string.Format(method + " ** FAILED. NotInLeft: {0} isn't in the left data source", n)); } if (Array.IndexOf(rightData, n) == -1) { Assert.True(false, string.Format(method + " ** FAILED. NotInRight: {0} isn't in the right data source", n)); } seen.Add(n); } } /// <summary> /// Unordered Intersect with a custom equality comparer /// </summary> [Fact] public static void RunIntersectTest2() { string[] first = { "Tim", "Bob", "Mike", "Robert" }; string[] second = { "ekiM", "bBo" }; var comparer = new AnagramEqualityComparer(); string[] expected = first.Except(second, comparer).ToArray(); string[] actual = first.AsParallel().AsOrdered().Except(second.AsParallel().AsOrdered(), comparer).ToArray(); Assert.True(expected.SequenceEqual(actual), "RunIntersectTest2: FAILED"); } [Fact] public static void RunOrderedIntersectTest1() { for (int len = 1; len <= 300; len += 3) { var data = Enumerable.Repeat(0, len) .Concat(new int[] { 1 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 2 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 1 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 2 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 3 }) .Concat(Enumerable.Repeat(0, len)); var output = data.AsParallel().AsOrdered().Intersect(data.AsParallel()).ToArray(); if (!Enumerable.Range(0, 4).SequenceEqual(output)) { Assert.True(false, string.Format("RunOrderedIntersectTest1: FAILED. ** Incorrect output")); } } } /// <summary> /// Ordered Intersect with a custom equality comparer /// </summary> [Fact] public static void RunOrderedIntersectTest2() { string[] first = { "Tim", "Bob", "Mike", "Robert" }; string[] second = { "ekiM", "bBo" }; var comparer = new AnagramEqualityComparer(); string[] expected = first.Except(second, comparer).ToArray(); string[] actual = first.AsParallel().AsOrdered().Except(second.AsParallel().AsOrdered(), comparer).ToArray(); Assert.True(expected.SequenceEqual(actual), "RunOrderedIntersectTest2: FAILED."); } // // Distinct // [Fact] public static void RunDistinctTests() { RunDistinctTest1(0); RunDistinctTest1(1); RunDistinctTest1(4); RunDistinctTest1(1024); RunDistinctTest1(1024 * 4); RunDistinctTest1(1024 * 1024); } private static void RunDistinctTest1(int dataSize) { string[] names1 = new string[] { "balmer", "duffy", "gates", "jobs", "silva", "brumme", "gray", "grover", "yedlin" }; string method = string.Format("RunDistinctTest1(dataSize={0}) - distinct names", dataSize); //Random r = new Random(33); // use constant seed for predictable test runs. string[] data = new string[dataSize]; for (int i = 0; i < dataSize; i++) { int index = i % names1.Length; data[i] = names1[index]; } // Find the distinct elements. ParallelQuery<string> q = data.AsParallel().Distinct<string>(); // Build a list of seen names, ensuring we don't see dups. List<string> seen = new List<string>(); foreach (string n in q) { // Ensure we haven't seen this name before. if (seen.Contains(n)) { Assert.True(false, string.Format(method + " ** FAILED. NotUnique: {0} is not unique, already seen (failure)", n)); } seen.Add(n); } // Now ensure we saw all elements at least once. foreach (string n in data) { if (!seen.Contains(n)) { Assert.True(false, string.Format(method + " ** FAILED. NotSeen: {0} wasn't found in the query, though it was in the data", n)); } } } [Fact] public static void RunOrderedDistinctTest1() { for (int len = 1; len <= 300; len += 3) { var data = Enumerable.Repeat(0, len) .Concat(new int[] { 1 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 2 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 1 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 2 }) .Concat(Enumerable.Repeat(0, len)) .Concat(new int[] { 3 }) .Concat(Enumerable.Repeat(0, len)); var output = data.AsParallel().AsOrdered().Distinct().ToArray(); if (!Enumerable.Range(0, 4).SequenceEqual(output)) { Assert.True(false, string.Format("RunOrderedDistinctTest1: FAILED. ** Incorrect output")); } } } // // A comparer that considers two strings equal if they are anagrams of each other // private class AnagramEqualityComparer : IEqualityComparer<string> { public bool Equals(string a, string b) { return a.ToCharArray().OrderBy(c => c).SequenceEqual(b.ToCharArray().OrderBy(c => c)); } public int GetHashCode(string str) { return new string(str.ToCharArray().OrderBy(c => c).ToArray()).GetHashCode(); } } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using UnityEngine; public class FileSystemUtil { public static void CreateDirectoryIfNeededAndAllowed(string path) { #if !UNITY_WEBPLAYER if (!Directory.Exists(path)) { if(DirectoryAllowed(path)) { LogUtil.Log("CreateDirectoryIfNeededAndAllowed:" + path); Directory.CreateDirectory(path); } } #endif } public static bool DirectoryAllowed(string path) { bool allowCreate = true; #if !UNITY_WEBPLAYER if(path.IndexOf(Application.persistentDataPath) == -1 && !Application.isEditor) { allowCreate = false; } #endif return allowCreate; } public static void DirectoryCopy(string sourceDirName, string destDirName, bool copySubDirs, bool versioned) { #if !UNITY_WEBPLAYER FileSystemUtil.EnsureDirectory(sourceDirName, false); FileSystemUtil.EnsureDirectory(destDirName, false); CreateDirectoryIfNeededAndAllowed(sourceDirName); DirectoryInfo dir = new DirectoryInfo(sourceDirName); DirectoryInfo[] dirs = dir.GetDirectories(); if (!dir.Exists) { throw new DirectoryNotFoundException( "Source directory does not exist or could not be found: " + sourceDirName); } CreateDirectoryIfNeededAndAllowed(destDirName); FileInfo[] files = dir.GetFiles(); LogUtil.Log("Directory Files: directory: " + destDirName); LogUtil.Log("files.Count:", files.Count()); //int curr = 0; foreach (FileInfo file in files) { if(file.Extension != ".meta" && file.Extension != ".DS_Store") { string temppath = PathUtil.Combine(destDirName, file.Name); if(!CheckFileExists(temppath) || Application.isEditor) { LogUtil.Log("copying ship file: " + file.FullName); LogUtil.Log("copying ship file to cache: " + temppath); file.CopyTo(temppath, true); ////SystemHelper.SetNoBackupFlag(temppath); } } } if (copySubDirs) { foreach (DirectoryInfo subdir in dirs) { string temppath = PathUtil.Combine(destDirName, subdir.Name); LogUtil.Log("Copying Directory: " + temppath); DirectoryCopy(subdir.FullName, temppath, copySubDirs, versioned); } } #endif } public static void EnsureDirectory(string filePath) { EnsureDirectory(filePath, true); } public static void EnsureDirectory(string filePath, bool filterFileName) { //LogUtil.Log("filePath:" + filePath); string directory = filePath; if(filePath.IndexOf('.') > -1 && filterFileName) { directory = filePath.Replace(Path.GetFileName(filePath), ""); } //LogUtil.Log("directory:" + directory); CreateDirectoryIfNeededAndAllowed(directory); } public static bool CheckFileExists(string path) { bool exists = false; #if UNITY_ANDROID if(!exists && path.Contains(Application.streamingAssetsPath)) { // android stores streamingassets in a compressed file, // must use WWW to check if you can access it if(!path.Contains("file://")){ path = "file://" + path; } WWW file = new WWW(path); float currentTime = Time.time; float endTime = currentTime + 6f; // only allow some seconds for file check while(!file.isDone && currentTime < endTime) { currentTime = Time.time; }; int length = file.bytes.Length; LogUtil.Log("CheckFileExists: Android: path:" + path); LogUtil.Log("CheckFileExists: Android: file.bytes.length:" + length); if(file.bytes.Length > 0) { exists = true; } } #elif UNITY_WEBPLAYER if(SystemPrefUtil.HasLocalSetting(path)) { exists = true; } #else exists = File.Exists(path); #endif return exists; } public static void CopyFile(string dataFilePath, string persistenceFilePath) { CopyFile(dataFilePath, persistenceFilePath, false); } public static void CopyFile(string dataFilePath, string persistenceFilePath, bool force) { #if !UNITY_WEBPLAYER EnsureDirectory(dataFilePath); EnsureDirectory(persistenceFilePath); LogUtil.Log("dataFilePath: " + dataFilePath); LogUtil.Log("persistenceFilePath: " + persistenceFilePath); if (CheckFileExists(dataFilePath) && (!CheckFileExists(persistenceFilePath) || force)) { #if UNITY_ANDROID if(dataFilePath.Contains(Application.streamingAssetsPath)) { // android stores streamingassets in a compressed file, // must use WWW to copy contents if you can access it if(!dataFilePath.Contains("file://")){ dataFilePath = "file://" + dataFilePath; } WWW file = new WWW(dataFilePath); float currentTime = Time.time; float endTime = currentTime + 6f; // only allow some seconds for file check while(!file.isDone && currentTime < endTime) { currentTime = Time.time; }; int length = file.bytes.Length; LogUtil.Log("CopyFile: Android: dataFilePath:" + dataFilePath); LogUtil.Log("CopyFile: Android: persistenceFilePath:" + persistenceFilePath); LogUtil.Log("CopyFile: Android: file.bytes.length:" + length); if(file.bytes.Length > 0) { // Save file contents to new location FileSystemUtil.WriteAllBytes(persistenceFilePath, file.bytes); } } else { File.Copy(dataFilePath, persistenceFilePath, true); } #else File.Copy(dataFilePath, persistenceFilePath, true); #endif ////SystemHelper.SetNoBackupFlag(persistenceFilePath); } #endif } public static void MoveFile(string dataFilePath, string persistenceFilePath) { MoveFile(dataFilePath, persistenceFilePath, false); } public static void MoveFile(string dataFilePath, string persistenceFilePath, bool force) { #if !UNITY_WEBPLAYER EnsureDirectory(dataFilePath); EnsureDirectory(persistenceFilePath); //LogUtil.Log("dataFilePath: " + dataFilePath); //LogUtil.Log("persistenceFilePath: " + persistenceFilePath); if (CheckFileExists(dataFilePath) && (!CheckFileExists(persistenceFilePath) || force)) { //LogUtil.Log("fileMoved: " + persistenceFilePath); #if UNITY_ANDROID if(dataFilePath.Contains(Application.streamingAssetsPath)) { // android stores streamingassets in a compressed file, // must use WWW to copy contents if you can access it if(!dataFilePath.Contains("file://")){ dataFilePath = "file://" + dataFilePath; } WWW file = new WWW(dataFilePath); float currentTime = Time.time; float endTime = currentTime + 6f; // only allow some seconds for file check while(!file.isDone && currentTime < endTime) { currentTime = Time.time; }; int length = file.bytes.Length; LogUtil.Log("CopyFile: Android: dataFilePath:" + dataFilePath); LogUtil.Log("CopyFile: Android: persistenceFilePath:" + persistenceFilePath); LogUtil.Log("CopyFile: Android: file.bytes.length:" + length); if(file.bytes.Length > 0) { // Save file contents to new location FileSystemUtil.WriteAllBytes(persistenceFilePath, file.bytes); } } else { File.Move(dataFilePath, persistenceFilePath); } #else File.Move(dataFilePath, persistenceFilePath); #endif //SystemHelper.SetNoBackupFlag(persistenceFilePath); } #endif } public static byte[] ReadAllBytes(string fileName) { #if !UNITY_WEBPLAYER return File.ReadAllBytes(fileName); #else return System.Text.Encoding.UTF8.GetBytes(SystemPrefUtil.GetLocalSettingString(fileName)); #endif } public static void WriteAllBytes(string fileName, byte[] buffer) { #if !UNITY_WEBPLAYER EnsureDirectory(fileName); File.WriteAllBytes(fileName, buffer); ////SystemHelper.SetNoBackupFlag(fileName); #else SystemPrefUtil.SetLocalSettingString(fileName, System.Text.Encoding.UTF8.GetString(buffer)); SystemPrefUtil.Save(); #endif } public static byte[] ReadStream(string fileName) { #if !UNITY_WEBPLAYER byte[] buffer = null; if (CheckFileExists(fileName)) { FileStream fs = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Read); BinaryReader br = new BinaryReader(fs); long length = new FileInfo(fileName).Length; buffer = br.ReadBytes((int)length); br.Close(); fs.Close(); } return buffer; #else return System.Text.Encoding.UTF8.GetBytes(SystemPrefUtil.GetLocalSettingString(fileName)); #endif } public static void WriteStream(string fileName, byte[] data) { #if !UNITY_WEBPLAYER EnsureDirectory(fileName); StreamWriter sw = new StreamWriter(fileName, false, Encoding.ASCII); sw.Write(data); sw.Flush(); sw.Close(); ////SystemHelper.SetNoBackupFlag(fileName); #else SystemPrefUtil.SetLocalSettingString(fileName, System.Text.Encoding.UTF8.GetString(data)); SystemPrefUtil.Save(); #endif } public static string ReadString(string fileName) { string contents = ""; if (CheckFileExists(fileName)) { #if UNITY_WEBPLAYER contents = SystemPrefUtil.GetLocalSettingString(fileName); #else StreamReader sr = new StreamReader(fileName, true); contents = sr.ReadToEnd(); sr.Close(); #endif } return contents; } public static void WriteString(string fileName, string data) { WriteString(fileName, data, false); } public static void WriteString(string fileName, string data, bool append) { #if UNITY_WEBPLAYER SystemPrefUtil.SetLocalSettingString(fileName, data); SystemPrefUtil.Save(); #else EnsureDirectory(fileName); StreamWriter sw = new StreamWriter(fileName, append); sw.Write(data); sw.Flush(); sw.Close(); ////SystemHelper.SetNoBackupFlag(fileName); #endif } public static void RemoveFile(string file) { if(CheckFileExists(file)) { #if UNITY_WEBPLAYER SystemPrefUtil.SetLocalSettingString(file, ""); SystemPrefUtil.Save(); #else File.Delete(file); #endif } } public static void RemoveFilesLikeRecursive(DirectoryInfo dirInfo, string fileKey) { #if !UNITY_WEBPLAYER foreach (FileInfo fileInfo in dirInfo.GetFiles()) { if (fileInfo.FullName.Contains(fileKey)) { File.Delete(fileInfo.FullName); } } foreach (DirectoryInfo dirInfoItem in dirInfo.GetDirectories()) { RemoveFilesLikeRecursive(dirInfoItem, fileKey); } #endif } public static void CopyFilesLikeRecursive( DirectoryInfo dirInfoCurrent, DirectoryInfo dirInfoFrom, DirectoryInfo dirInfoTo, string filter, List<string> excludeExts) { #if !UNITY_WEBPLAYER foreach (FileInfo fileInfo in dirInfoCurrent.GetFiles()) { if (fileInfo.FullName.Contains(filter)) { string fileTo = fileInfo.FullName.Replace(dirInfoFrom.FullName, dirInfoTo.FullName); if (!CheckFileExtention(fileTo, excludeExts)) { string directoryTo = Path.GetDirectoryName(fileTo); if (!Directory.Exists(directoryTo)) { Directory.CreateDirectory(directoryTo); } File.Copy(fileInfo.FullName, fileTo, true); } } } foreach (DirectoryInfo dirInfoItem in dirInfoCurrent.GetDirectories()) { CopyFilesLikeRecursive(dirInfoItem, dirInfoFrom, dirInfoTo, filter, excludeExts); } #endif } public static bool CheckFileExtention(string path, List<string> extensions) { foreach (string ext in extensions) { if (path.ToLower().EndsWith(ext.ToLower())) { return true; } } return false; } public static void MoveFilesLikeRecursive( DirectoryInfo dirInfoCurrent, DirectoryInfo dirInfoFrom, DirectoryInfo dirInfoTo, string filter, List<string> excludeExts) { #if !UNITY_WEBPLAYER foreach (FileInfo fileInfo in dirInfoCurrent.GetFiles()) { if (fileInfo.FullName.Contains(filter)) { string fileTo = fileInfo.FullName.Replace(dirInfoFrom.FullName, dirInfoTo.FullName); if (!CheckFileExtention(fileTo, excludeExts)) { string directoryTo = Path.GetDirectoryName(fileTo); if (!Directory.Exists(directoryTo)) { Directory.CreateDirectory(directoryTo); } LogUtil.Log("fileTo:" + fileTo); if (CheckFileExists(fileTo)) { File.Delete(fileTo); } File.Move(fileInfo.FullName, fileTo); } } } foreach (DirectoryInfo dirInfoItem in dirInfoCurrent.GetDirectories()) { MoveFilesLikeRecursive(dirInfoItem, dirInfoFrom, dirInfoTo, filter, excludeExts); } #endif } public static void RemoveDirectoriesLikeRecursive( DirectoryInfo dirInfoCurrent, string filterLike, string filterNotLike) { #if !UNITY_WEBPLAYER foreach (DirectoryInfo dirInfoItem in dirInfoCurrent.GetDirectories()) { RemoveDirectoriesLikeRecursive(dirInfoItem, filterLike, filterNotLike); } if (dirInfoCurrent.FullName.Contains(filterLike) && !dirInfoCurrent.FullName.Contains(filterNotLike)) { Directory.Delete(dirInfoCurrent.FullName, true); } #endif } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace System.Security.Cryptography.Rsa.Tests { public sealed class EncryptDecrypt_Array : EncryptDecrypt { protected override byte[] Encrypt(RSA rsa, byte[] data, RSAEncryptionPadding padding) => rsa.Encrypt(data, padding); protected override byte[] Decrypt(RSA rsa, byte[] data, RSAEncryptionPadding padding) => rsa.Decrypt(data, padding); [Fact] public void NullArray_Throws() { using (RSA rsa = RSAFactory.Create()) { AssertExtensions.Throws<ArgumentNullException>("data", () => rsa.Encrypt(null, RSAEncryptionPadding.OaepSHA1)); AssertExtensions.Throws<ArgumentNullException>("data", () => rsa.Decrypt(null, RSAEncryptionPadding.OaepSHA1)); } } } public abstract class EncryptDecrypt { private static bool EphemeralKeysAreExportable => !PlatformDetection.IsFullFramework || PlatformDetection.IsNetfx462OrNewer(); protected abstract byte[] Encrypt(RSA rsa, byte[] data, RSAEncryptionPadding padding); protected abstract byte[] Decrypt(RSA rsa, byte[] data, RSAEncryptionPadding padding); [Fact] public void NullPadding_Throws() { using (RSA rsa = RSAFactory.Create()) { AssertExtensions.Throws<ArgumentNullException>("padding", () => Encrypt(rsa, new byte[1], null)); AssertExtensions.Throws<ArgumentNullException>("padding", () => Decrypt(rsa, new byte[1], null)); } } [Fact] public void DecryptSavedAnswer() { byte[] cipherBytes = { 0x35, 0x6F, 0x8F, 0x2C, 0x4D, 0x1A, 0xAC, 0x6D, 0xE7, 0x52, 0xA5, 0xDF, 0x26, 0x54, 0xA6, 0x34, 0xF5, 0xBB, 0x14, 0x26, 0x1C, 0xE4, 0xDC, 0xA2, 0xD8, 0x4D, 0x8F, 0x1C, 0x55, 0xD4, 0xC7, 0xA7, 0xF2, 0x3C, 0x99, 0x77, 0x9F, 0xE4, 0xB7, 0x34, 0xA6, 0x28, 0xB2, 0xC4, 0xFB, 0x6F, 0x85, 0xCA, 0x19, 0x21, 0xCA, 0xC1, 0xA7, 0x8D, 0xAE, 0x95, 0xAB, 0x9B, 0xA9, 0x88, 0x5B, 0x44, 0xC6, 0x9B, 0x44, 0x26, 0x71, 0x5D, 0x02, 0x3F, 0x43, 0x42, 0xEF, 0x4E, 0xEE, 0x09, 0x87, 0xEF, 0xCD, 0xCF, 0xF9, 0x88, 0x99, 0xE8, 0x49, 0xF7, 0x8F, 0x9B, 0x59, 0x68, 0x20, 0xF3, 0xA7, 0xB2, 0x94, 0xA4, 0x23, 0x70, 0x83, 0xD9, 0xAC, 0xE7, 0x5E, 0xEE, 0xE9, 0x7B, 0xE4, 0x4F, 0x73, 0x2E, 0x9B, 0xD8, 0x2A, 0x75, 0xFB, 0x6C, 0xB9, 0x39, 0x6D, 0x72, 0x8A, 0x9C, 0xCD, 0x58, 0x1A, 0x27, 0x79, 0x97, }; byte[] output; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(TestData.RSA1024Params); output = Decrypt(rsa, cipherBytes, RSAEncryptionPadding.OaepSHA1); } Assert.Equal(TestData.HelloBytes, output); } [Fact] public void DecryptWithPublicKey_Fails() { byte[] cipherBytes = { 0x35, 0x6F, 0x8F, 0x2C, 0x4D, 0x1A, 0xAC, 0x6D, 0xE7, 0x52, 0xA5, 0xDF, 0x26, 0x54, 0xA6, 0x34, 0xF5, 0xBB, 0x14, 0x26, 0x1C, 0xE4, 0xDC, 0xA2, 0xD8, 0x4D, 0x8F, 0x1C, 0x55, 0xD4, 0xC7, 0xA7, 0xF2, 0x3C, 0x99, 0x77, 0x9F, 0xE4, 0xB7, 0x34, 0xA6, 0x28, 0xB2, 0xC4, 0xFB, 0x6F, 0x85, 0xCA, 0x19, 0x21, 0xCA, 0xC1, 0xA7, 0x8D, 0xAE, 0x95, 0xAB, 0x9B, 0xA9, 0x88, 0x5B, 0x44, 0xC6, 0x9B, 0x44, 0x26, 0x71, 0x5D, 0x02, 0x3F, 0x43, 0x42, 0xEF, 0x4E, 0xEE, 0x09, 0x87, 0xEF, 0xCD, 0xCF, 0xF9, 0x88, 0x99, 0xE8, 0x49, 0xF7, 0x8F, 0x9B, 0x59, 0x68, 0x20, 0xF3, 0xA7, 0xB2, 0x94, 0xA4, 0x23, 0x70, 0x83, 0xD9, 0xAC, 0xE7, 0x5E, 0xEE, 0xE9, 0x7B, 0xE4, 0x4F, 0x73, 0x2E, 0x9B, 0xD8, 0x2A, 0x75, 0xFB, 0x6C, 0xB9, 0x39, 0x6D, 0x72, 0x8A, 0x9C, 0xCD, 0x58, 0x1A, 0x27, 0x79, 0x97, }; using (RSA rsa = RSAFactory.Create()) { RSAParameters parameters = TestData.RSA1024Params; RSAParameters pubParameters = new RSAParameters { Modulus = parameters.Modulus, Exponent = parameters.Exponent, }; rsa.ImportParameters(pubParameters); Assert.ThrowsAny<CryptographicException>( () => Decrypt(rsa, cipherBytes, RSAEncryptionPadding.OaepSHA1)); } } [Fact] public void DecryptSavedAnswer_OaepSHA384() { byte[] cipherBytes = { 0x50, 0x71, 0x9F, 0x24, 0x7F, 0x63, 0xB6, 0xF6, 0xBE, 0xDB, 0x20, 0x5A, 0x79, 0xEB, 0x65, 0x04, 0x84, 0x96, 0xBF, 0xFA, 0x7E, 0x87, 0x4D, 0x38, 0x78, 0xA9, 0x9D, 0x13, 0xC7, 0x8F, 0x29, 0x8C, 0xFE, 0x57, 0x05, 0xE0, 0xC4, 0xD4, 0x20, 0x21, 0x8E, 0x12, 0xCD, 0xBB, 0xE2, 0x65, 0x78, 0x89, 0x6D, 0x58, 0x86, 0x3B, 0x30, 0x6E, 0xE3, 0x18, 0x89, 0xA4, 0xDF, 0x23, 0x47, 0x97, 0x55, 0x57, 0x07, 0xCD, 0xCA, 0x88, 0xC0, 0x88, 0x07, 0x58, 0x2D, 0x5D, 0x27, 0x06, 0x30, 0x2F, 0xD1, 0x42, 0x75, 0x4A, 0x48, 0xB9, 0xAA, 0x93, 0x9E, 0x1A, 0x3C, 0x9A, 0xD1, 0xCB, 0x09, 0xE2, 0x1A, 0x42, 0x2B, 0x80, 0xEC, 0x09, 0xD1, 0x4D, 0x5D, 0xB7, 0x8C, 0x2F, 0x69, 0x66, 0x0E, 0xEE, 0xE8, 0xCF, 0x13, 0x76, 0xD0, 0xB7, 0x6E, 0x19, 0x22, 0x4D, 0x50, 0x0B, 0x41, 0xDF, 0x3F, 0xF0, 0x69, 0xAD, 0x8F, 0xE8, 0x6E, 0xC6, 0xBB, 0x55, 0x12, 0x24, 0xE0, 0x30, 0x84, 0x75, 0xC5, 0x5C, 0x49, 0xB6, 0xBC, 0xD2, 0x07, 0x80, 0x53, 0xF0, 0xB3, 0xFA, 0xDA, 0x73, 0xD8, 0xB5, 0x68, 0xD0, 0xD9, 0x0B, 0x02, 0xF5, 0x20, 0xAA, 0x81, 0xA0, 0x07, 0xA2, 0x8A, 0x96, 0xD4, 0xE5, 0x37, 0xD9, 0x72, 0x05, 0x07, 0x5B, 0xE8, 0xEC, 0x09, 0xCA, 0x92, 0xA6, 0x63, 0xAC, 0x80, 0xC4, 0xB3, 0xEB, 0x00, 0x59, 0x0C, 0xF8, 0x84, 0xCF, 0x7E, 0x5F, 0x44, 0x08, 0x67, 0x08, 0x3D, 0x94, 0xBE, 0xBF, 0xBA, 0x90, 0xC3, 0xB8, 0xBE, 0x62, 0xB4, 0x13, 0x50, 0x92, 0x08, 0xB0, 0xA2, 0xA6, 0x9F, 0x61, 0x27, 0xEE, 0xA7, 0x37, 0xC3, 0x21, 0x31, 0x41, 0xD7, 0x8E, 0x19, 0xE5, 0x5E, 0x57, 0x69, 0x54, 0xAE, 0x74, 0x13, 0x93, 0x13, 0xCC, 0x3B, 0x55, 0x0C, 0x4F, 0x3E, 0xF6, 0x06, 0x78, 0x18, 0x46, 0x8A, 0x23, }; byte[] output; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(TestData.RSA2048Params); if (RSAFactory.SupportsSha2Oaep) { output = Decrypt(rsa, cipherBytes, RSAEncryptionPadding.OaepSHA384); } else { Assert.ThrowsAny<CryptographicException>( () => Decrypt(rsa, cipherBytes, RSAEncryptionPadding.OaepSHA384)); return; } } Assert.Equal(TestData.RSA2048Params.DP, output); } [Fact] public void DecryptSavedAnswerUnusualExponent() { byte[] cipherBytes = { 0x55, 0x64, 0x05, 0xF7, 0xBF, 0x99, 0xD8, 0x07, 0xD0, 0xAC, 0x1B, 0x1B, 0x60, 0x92, 0x57, 0x95, 0x5D, 0xA4, 0x5B, 0x55, 0x0E, 0x12, 0x90, 0x24, 0x86, 0x35, 0xEE, 0x6D, 0xB3, 0x46, 0x3A, 0xB0, 0x3D, 0x67, 0xCF, 0xB3, 0xFA, 0x61, 0xBB, 0x90, 0x6D, 0x6D, 0xF8, 0x90, 0x5D, 0x67, 0xD1, 0x8F, 0x99, 0x6C, 0x31, 0xA2, 0x2C, 0x8E, 0x99, 0x7E, 0x75, 0xC5, 0x26, 0x71, 0xD1, 0xB0, 0xA5, 0x41, 0x67, 0x19, 0xF7, 0x40, 0x04, 0xBE, 0xB2, 0xC0, 0x97, 0xFB, 0xF6, 0xD4, 0xEF, 0x48, 0x5B, 0x93, 0x81, 0xF8, 0xE1, 0x6A, 0x0E, 0xA0, 0x74, 0x6B, 0x99, 0xC6, 0x23, 0xF5, 0x02, 0xDE, 0x47, 0x49, 0x1E, 0x9D, 0xAE, 0x55, 0x20, 0xB5, 0xDE, 0xA0, 0x04, 0x32, 0x37, 0x4B, 0x24, 0xE4, 0x64, 0x1B, 0x1B, 0x4B, 0xC0, 0xC7, 0x30, 0x08, 0xA6, 0xAE, 0x50, 0x86, 0x08, 0x34, 0x70, 0xE5, 0xB0, 0x3B, }; byte[] output; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(TestData.UnusualExponentParameters); output = Decrypt(rsa, cipherBytes, RSAEncryptionPadding.OaepSHA1); } Assert.Equal(TestData.HelloBytes, output); } [Fact] public void RsaCryptRoundtrip() { byte[] crypt; byte[] output; using (RSA rsa = RSAFactory.Create()) { crypt = Encrypt(rsa, TestData.HelloBytes, RSAEncryptionPadding.OaepSHA1); output = Decrypt(rsa, crypt, RSAEncryptionPadding.OaepSHA1); } Assert.NotEqual(crypt, output); Assert.Equal(TestData.HelloBytes, output); } [ConditionalFact(nameof(EphemeralKeysAreExportable))] public void RsaDecryptAfterExport() { byte[] output; using (RSA rsa = RSAFactory.Create()) { byte[] crypt = Encrypt(rsa, TestData.HelloBytes, RSAEncryptionPadding.OaepSHA1); // Export the key, this should not clear/destroy the key. RSAParameters ignored = rsa.ExportParameters(true); output = Decrypt(rsa, crypt, RSAEncryptionPadding.OaepSHA1); } Assert.Equal(TestData.HelloBytes, output); } [Fact] public void LargeKeyCryptRoundtrip() { byte[] output; using (RSA rsa = RSAFactory.Create()) { try { rsa.ImportParameters(TestData.RSA16384Params); } catch (CryptographicException) { // The key is pretty big, perhaps it was refused. return; } byte[] crypt = Encrypt(rsa, TestData.HelloBytes, RSAEncryptionPadding.OaepSHA1); Assert.Equal(rsa.KeySize, crypt.Length * 8); output = Decrypt(rsa, crypt, RSAEncryptionPadding.OaepSHA1); } Assert.Equal(TestData.HelloBytes, output); } [Fact] public void UnusualExponentCryptRoundtrip() { byte[] crypt; byte[] output; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(TestData.UnusualExponentParameters); crypt = Encrypt(rsa, TestData.HelloBytes, RSAEncryptionPadding.OaepSHA1); output = Decrypt(rsa, crypt, RSAEncryptionPadding.OaepSHA1); } Assert.NotEqual(crypt, output); Assert.Equal(TestData.HelloBytes, output); } [Fact] public void NotSupportedValueMethods() { using (RSA rsa = RSAFactory.Create()) { Assert.Throws<NotSupportedException>(() => rsa.DecryptValue(null)); Assert.Throws<NotSupportedException>(() => rsa.EncryptValue(null)); } } } }
using System; using NUnit.Framework; using Pash.Implementation; using System.Management.Automation.Runspaces; using System.Management.Automation; using System.Collections.Generic; using System.Management.Automation.Language; using TestPSSnapIn; using System.Management.Automation.Provider; namespace TestHost { class DummyProvider : DriveCmdletProvider { } [TestFixture] public class SessionStateScopeTests { public enum AvailableStates { Global, Script, Function, Local }; private SessionState globalState; private SessionState scriptState; private SessionState functionState; private SessionState localState; private Dictionary<AvailableStates, SessionState> states; private CommandManager hostCommandManager; private ProviderInfo dummyProvider; [SetUp] public void createScopes() { TestHost testHost = new TestHost(new TestHostUserInterface()); Runspace hostRunspace = TestHost.CreateRunspace(testHost); globalState = hostRunspace.ExecutionContext.SessionState; var dummyProviderInfo = new ProviderInfo(globalState, typeof(DummyProvider), "DummyProvider", "", null); globalState.Provider.Add(dummyProviderInfo, hostRunspace.ExecutionContext); dummyProvider = globalState.Provider.GetOne("DummyProvider"); scriptState = new SessionState(globalState); scriptState.IsScriptScope = true; functionState = new SessionState(scriptState); localState = new SessionState(functionState); states = new Dictionary<AvailableStates, SessionState>(); states.Add(AvailableStates.Global, globalState); states.Add(AvailableStates.Script, scriptState); states.Add(AvailableStates.Function, functionState); states.Add(AvailableStates.Local, localState); hostCommandManager = new CommandManager(hostRunspace as LocalRunspace); } #region general scope related [TestCase("private:foo", "private", "foo")] [TestCase("local:foo", "local", "foo")] [TestCase("script:foo", "script", "foo")] [TestCase("global:foo", "global", "foo")] [TestCase("global:private:foo", "global", "private:foo")] [TestCase("0:foo", "", "0:foo")] [TestCase("1:foo", "", "1:foo")] [TestCase("-1:foo", "", "-1:foo")] [TestCase("bar:foo", "", "bar:foo")] public void QualifiedNameTest(string name, string specifier, string unqualifiedName) { var qualName = new SessionStateScope<PSVariable>.QualifiedName(name); Assert.AreEqual(specifier, qualName.ScopeSpecifier); Assert.AreEqual(unqualifiedName, qualName.UnqualifiedName); } #endregion #region variable related [TestCase("x", "f")] //correct x is fetched in general (from function scope) [TestCase("local:x", null)] //local scope has no variable x [TestCase("script:x", null)] //sx is private in the script scope [TestCase("global:x", "g")] [TestCase("y", "l")] //the overridden y in the local scope [TestCase("local:y", "l")] //also the local one, but explicitly [TestCase("script:y", "s")] [TestCase("global:y", "g")] [TestCase("z", "s")] //ignores the private z in function scope public void VariableAccessTest(string name, object expected) { globalState.PSVariable.Set(new PSVariable("x", "g")); globalState.PSVariable.Set(new PSVariable("y", "g")); scriptState.PSVariable.Set(new PSVariable("x", "s", ScopedItemOptions.Private)); scriptState.PSVariable.Set(new PSVariable("y", "s")); scriptState.PSVariable.Set(new PSVariable("z", "s")); functionState.PSVariable.Set(new PSVariable("x", "f")); functionState.PSVariable.Set(new PSVariable("y", "f")); functionState.PSVariable.Set(new PSVariable("z", "f", ScopedItemOptions.Private)); localState.PSVariable.Set(new PSVariable("y", "l")); Assert.AreEqual(expected, localState.PSVariable.GetValue(name)); } [TestCase(AvailableStates.Global, "g", true)] [TestCase(AvailableStates.Script, "s", true)] [TestCase(AvailableStates.Function, "f", true)] [TestCase(AvailableStates.Local, "l", true)] [TestCase(AvailableStates.Local, "s", false)] //makes sure private setting works public void VariableSetTest(AvailableStates sessionState, object value, bool initLocal=true) { functionState.PSVariable.Set("private:x", "f"); localState.PSVariable.Set("global:x", "g"); localState.PSVariable.Set("script:x", "s"); if (initLocal) { localState.PSVariable.Set ("local:x", "l"); } Assert.AreEqual(value, states[sessionState].PSVariable.GetValue("x")); } [Test] public void VariableNoPrivacyChangeTest() { globalState.PSVariable.Set("private:y", "py0"); globalState.PSVariable.Set("y", "py1"); globalState.PSVariable.Set("x", "x0"); globalState.PSVariable.Set("private:x", "x1"); Assert.IsNull(scriptState.PSVariable.GetValue("y")); Assert.AreEqual("x1", scriptState.PSVariable.GetValue("x")); } [TestCase("global:x", AvailableStates.Global, true)] [TestCase("script:x", AvailableStates.Script, true)] [TestCase("local:x", AvailableStates.Local, true)] [TestCase("x", AvailableStates.Function, false)] //looks in parent scopes and removes the variable public void VariableRemoveTest(string variable, AvailableStates affectedState, bool initLocal) { globalState.PSVariable.Set("x", "g"); scriptState.PSVariable.Set("x", "s"); functionState.PSVariable.Set("x", "f"); if (initLocal) { localState.PSVariable.Set("x", "l"); } localState.PSVariable.Remove(variable); foreach (KeyValuePair<AvailableStates, SessionState> curState in states) { if (curState.Key == affectedState || (curState.Key == AvailableStates.Local && !initLocal)) { Assert.IsNull(curState.Value.PSVariable.Get("local:x")); } else { Assert.IsNotNull(curState.Value.PSVariable.Get("local:x")); } } } [Test] public void VariableRemoveByObjectTest() { globalState.PSVariable.Set("x", "g"); scriptState.PSVariable.Set("x", "s"); var variable = new PSVariable("x"); scriptState.PSVariable.Remove(variable); Assert.IsNull(scriptState.PSVariable.Get("local:x")); Assert.IsNotNull(globalState.PSVariable.Get("local:x")); try { scriptState.PSVariable.Remove(variable); //doesn't affect parent scopes (different to passing the name) Assert.True(false); } catch (ItemNotFoundException) { } Assert.IsNotNull(globalState.PSVariable.Get("local:x")); } #endregion #region drive related [TestCase("4", null, ExpectedException=typeof(ArgumentOutOfRangeException))] [TestCase("foo", null, ExpectedException=typeof(ArgumentException))] [TestCase("3", AvailableStates.Global)] [TestCase("global", AvailableStates.Global)] [TestCase("2", AvailableStates.Script)] [TestCase("script", AvailableStates.Script)] [TestCase("1", AvailableStates.Function)] [TestCase("0", AvailableStates.Local)] [TestCase("local", AvailableStates.Local)] public void DriveNewTest(string scope, AvailableStates affectedState) { PSDriveInfo info = createDrive("test"); localState.Drive.New (info, scope); Assert.AreEqual(info, states[affectedState].Drive.Get(info.Name)); } [Test] public void DriveNewExistingTest () { PSDriveInfo info = createDrive ("test"); globalState.Drive.New (info, "local"); scriptState.Drive.New (info, "local"); //overriding should work of course try { scriptState.Drive.New (info, "global"); //this shouldn't work, as the global scope has that drive Assert.True (false); } catch (SessionStateException) { } } [TestCase("4", null, ExpectedException=typeof(ArgumentOutOfRangeException))] [TestCase("foo", null, ExpectedException=typeof(ArgumentException))] [TestCase("3", AvailableStates.Global)] [TestCase("global", AvailableStates.Global)] [TestCase("2", AvailableStates.Script)] [TestCase("script", AvailableStates.Script)] [TestCase("1", AvailableStates.Function)] [TestCase("0", AvailableStates.Local)] [TestCase("local", AvailableStates.Local)] public void DriveRemoveTest(string scope, AvailableStates affectedState) { Dictionary<AvailableStates, PSDriveInfo> driveInfos = new Dictionary<AvailableStates, PSDriveInfo>(); foreach (var curState in states) { var info = createDrive(curState.Key.ToString()); curState.Value.Drive.New(info, "local"); driveInfos[curState.Key] = info; } localState.Drive.Remove(driveInfos[affectedState].Name, true, scope); foreach (var curState in states) { if (curState.Key == affectedState) { Assert.AreEqual(0, curState.Value.Drive.GetAllAtScope("local").Count); } else { Assert.AreEqual(driveInfos[curState.Key], curState.Value.Drive.Get(driveInfos[curState.Key].Name)); } } } [Test] public void DriveRemoveNotExistingTest () { PSDriveInfo info = createDrive ("test"); Assert.Throws<DriveNotFoundException>(delegate { globalState.Drive.Remove(info.Name, true, "local"); }); } [Test] public void DriveGetAllTest() { globalState.Drive.New(createDrive("override", "first"), "local"); globalState.Drive.New(createDrive("global"), "local"); scriptState.Drive.New(createDrive("script"), "local"); functionState.Drive.New(createDrive("override", "second"), "local"); var drives = localState.Drive.GetAll(); Assert.AreEqual(3, drives.Count); bool found = false; foreach (var curDrive in drives) { if (curDrive.Name.Equals("override")) { if (found) //make sure it's only one time in there { Assert.True(false); } Assert.AreEqual("second", curDrive.Description); found = true; } } Assert.True (found); } [TestCase("local", new string [] {})] [TestCase("0", new string [] {})] [TestCase("1", new string [] {"function"})] [TestCase("script", new string [] {"script1", "script2"})] [TestCase("2", new string [] {"script1", "script2"})] [TestCase("global", new string [] {"global1", "global2"})] [TestCase("3", new string [] {"global1", "global2"})] [TestCase("4", new string [] {}, ExpectedException=typeof(ArgumentOutOfRangeException))] public void DriveGetAllAtScopeTest(string scope, string[] expectedDescriptions) { globalState.Drive.New(createDrive("x", "global1"), "local"); globalState.Drive.New(createDrive("y", "global2"), "local"); scriptState.Drive.New(createDrive("x", "script1"), "local"); scriptState.Drive.New(createDrive("y", "script2"), "local"); functionState.Drive.New(createDrive("x", "function"), "local"); var drives = localState.Drive.GetAllAtScope(scope); Assert.AreEqual(expectedDescriptions.Length, drives.Count); foreach (var curDrive in drives) { Assert.Contains(curDrive.Description, expectedDescriptions); } } [Test] public void DriveGetTest() { globalState.Drive.New(createDrive("override", "first"), "local"); globalState.Drive.New(createDrive("global"), "local"); functionState.Drive.New(createDrive("override", "second"), "local"); var drive = localState.Drive.Get ("override"); Assert.AreEqual("override", drive.Name); Assert.AreEqual("second", drive.Description); Assert.AreEqual("global", localState.Drive.Get("global").Name); try { localState.Drive.Get("doesnt_exist"); Assert.True(false); } catch (MethodInvocationException) { } } [TestCase("local", "local")] [TestCase("0", "local")] [TestCase("1", "function")] [TestCase("script", "script")] [TestCase("2", "script")] [TestCase("global", "global")] [TestCase("3", "global")] [TestCase("4", "", ExpectedException=typeof(ArgumentOutOfRangeException))] public void DriveGetAtScopeTest(string scope, string expectedDescription) { globalState.Drive.New(createDrive("x", "global"), "local"); scriptState.Drive.New(createDrive("x", "script"), "local"); functionState.Drive.New(createDrive("x", "function"), "local"); localState.Drive.New(createDrive("x", "local"), "local"); Assert.AreEqual(expectedDescription, localState.Drive.GetAtScope("x", scope).Description); } public void DriveGetAllForProviderTest() { globalState.Drive.New(createDrive ("global", ""), "local"); scriptState.Drive.New(createDrive ("script", ""), "local"); functionState.Drive.New(createDrive ("function", ""), "local"); localState.Drive.New (createDrive("local", ""), "local"); var drives = localState.Drive.GetAllForProvider("testProvider"); Assert.AreEqual(4, drives.Count); foreach (var curDrive in drives) { Assert.Contains(curDrive.Name, new string[] {"global", "script", "function", "local"}); } drives = scriptState.Drive.GetAllForProvider ("testProvider"); Assert.AreEqual(2, drives.Count); foreach (var curDrive in drives) { Assert.Contains(curDrive.Name, new string[] {"global", "script"}); } try { globalState.Drive.GetAllForProvider("doesnt_exist"); Assert.True(false); } catch (MethodInvocationException) { } } private PSDriveInfo createDrive(string name, string descr="") { return new PSDriveInfo(name, dummyProvider, String.Empty, descr, null); } #endregion #region function related [TestCase("x", "f")] //correct x is fetched in general (from function scope) [TestCase("local:x", null)] //local scope has no function x [TestCase("script:x", null)] //x is private in the script scope [TestCase("global:x", "g")] [TestCase("y", "l")] //the overridden y in the local scope [TestCase("local:y", "l")] //also the local one, but explicitly [TestCase("script:y", "s")] [TestCase("global:y", "g")] [TestCase("z", "s")] //ignores the private z in function scope public void FunctionGetTest(string name, string expectedDescription) { globalState.Function.Set(createFunction("x", "g")); globalState.Function.Set(createFunction("y", "g")); scriptState.Function.Set(createFunction("x", "s", ScopedItemOptions.Private)); scriptState.Function.Set(createFunction("y", "s")); scriptState.Function.Set(createFunction("z", "s")); functionState.Function.Set(createFunction("x", "f")); functionState.Function.Set(createFunction("y", "f")); functionState.Function.Set(createFunction("z", "f", ScopedItemOptions.Private)); localState.Function.Set(createFunction("y", "l")); var info = localState.Function.Get(name); if (expectedDescription == null) { Assert.IsNull(info); } else { Assert.AreEqual(expectedDescription, info.Description); } } [Test] public void FunctionGetAllTest() { globalState.Function.Set(createFunction("override", "first")); globalState.Function.Set(createFunction("global")); scriptState.Function.Set(createFunction("script")); functionState.Function.Set(createFunction("override", "second")); var funs = localState.Function.GetAll(); Assert.AreEqual(3, funs.Count); bool found = false; foreach (var curFun in funs) { if (curFun.Value.Name.Equals("override")) { if (found) //make sure it's only one time in there { Assert.True(false); } Assert.AreEqual("second", curFun.Value.Description); found = true; } } Assert.True(found); } [TestCase(AvailableStates.Global, "g", true)] [TestCase(AvailableStates.Script, "s", true)] [TestCase(AvailableStates.Function, "f", true)] [TestCase(AvailableStates.Local, "l", true)] [TestCase(AvailableStates.Local, "s", false)] //makes sure private setting works public void FunctionSetObjectTest(AvailableStates sessionState, object value, bool initLocal=true) { functionState.Function.Set("private:x", null, "f"); localState.Function.Set("global:x", null, "g"); localState.Function.Set("script:x", null, "s"); if (initLocal) { localState.Function.Set ("local:x", null, "l"); } Assert.AreEqual(value, states[sessionState].Function.Get("x").Description); } [TestCase("global:x", AvailableStates.Global, true)] [TestCase("script:x", AvailableStates.Script, true)] [TestCase("local:x", AvailableStates.Local, true)] [TestCase("x", AvailableStates.Function, false)] //looks in parent scopes and removes the variable public void FunctionRemoveTest(string variable, AvailableStates affectedState, bool initLocal) { globalState.Function.Set("x", null, "g"); scriptState.Function.Set("x", null, "s"); functionState.Function.Set("x", null, "f"); if (initLocal) { localState.Function.Set("x", null, "l"); } localState.Function.Remove(variable); foreach (KeyValuePair<AvailableStates, SessionState> curState in states) { if (curState.Key == affectedState || (curState.Key == AvailableStates.Local && !initLocal)) { Assert.IsNull(curState.Value.Function.Get("local:x")); } else { Assert.IsNotNull(curState.Value.Function.Get("local:x")); } } } private FunctionInfo createFunction(string name, string description = "", ScopedItemOptions options = ScopedItemOptions.None) { var info = new FunctionInfo(name, null, null, options); info.Description = description; return info; } #endregion #region alias related [TestCase(AvailableStates.Global, true)] //the private one [TestCase(AvailableStates.Script, false)] //doesn't see the private one [TestCase(AvailableStates.Function, true)] //the non-private [TestCase(AvailableStates.Local, true)] //can see the non-private public void AliasExistsTest(AvailableStates affectedState, bool exists) { globalState.Alias.New(createAlias("test", "", ScopedItemOptions.Private), "local"); functionState.Alias.New(createAlias("test", ""), "local"); Assert.AreEqual(exists, states[affectedState].Alias.Exists("test")); } [TestCase("4", null, ExpectedException=typeof(ArgumentOutOfRangeException))] [TestCase("foo", null, ExpectedException=typeof(ArgumentException))] [TestCase("3", AvailableStates.Global)] [TestCase("global", AvailableStates.Global)] [TestCase("2", AvailableStates.Script)] [TestCase("script", AvailableStates.Script)] [TestCase("1", AvailableStates.Function)] [TestCase("0", AvailableStates.Local)] [TestCase("local", AvailableStates.Local)] public void AliasNewTest(string scope, AvailableStates affectedState) { AliasInfo info = createAlias("test"); localState.Alias.New(info, scope); Assert.AreEqual(info, states[affectedState].Alias.Get(info.Name)); } [TestCase("4", null, ExpectedException=typeof(ArgumentOutOfRangeException))] [TestCase("foo", null, ExpectedException=typeof(ArgumentException))] [TestCase("3", AvailableStates.Global)] [TestCase("global", AvailableStates.Global)] [TestCase("2", AvailableStates.Script)] [TestCase("script", AvailableStates.Script)] [TestCase("1", AvailableStates.Function)] [TestCase("0", AvailableStates.Local)] [TestCase("local", AvailableStates.Local)] public void AliasRemoveTest(string scope, AvailableStates affectedState) { Dictionary<AvailableStates, AliasInfo> aliasInfos = new Dictionary<AvailableStates, AliasInfo>(); foreach (var curState in states) { var info = createAlias(curState.Key.ToString()); curState.Value.Alias.New(info, "local"); aliasInfos[curState.Key] = info; } localState.Alias.Remove(aliasInfos[affectedState].Name, scope); foreach (var curState in states) { if (curState.Key == affectedState) { Assert.AreEqual(0, curState.Value.Alias.GetAllLocal().Count); } else { Assert.AreEqual(aliasInfos[curState.Key], curState.Value.Alias.Get(aliasInfos[curState.Key].Name)); } } } [Test] public void AliasGetAllTest() { globalState.Alias.New(createAlias("override", "first"), "local"); globalState.Alias.New(createAlias("global"), "local"); scriptState.Alias.New(createAlias("script"), "local"); functionState.Alias.New(createAlias("override", "second"), "local"); var drives = localState.Alias.GetAll(); Assert.AreEqual(3, drives.Count); bool found = false; foreach (var curAlias in drives) { if (curAlias.Value.Name.Equals("override")) { if (found) //make sure it's only one time in there { Assert.True(false); } Assert.AreEqual("second", curAlias.Value.Definition); found = true; } } Assert.True (found); } [Test] public void AliasGetTest() { globalState.Alias.New(createAlias("override", "first"), "local"); globalState.Alias.New(createAlias("global"), "local"); functionState.Alias.New(createAlias("override", "second"), "local"); var alias = localState.Alias.Get ("override"); Assert.AreEqual("override", alias.Name); Assert.AreEqual("second", alias.Definition); Assert.AreEqual("global", localState.Alias.Get("global").Name); Assert.IsNull(localState.Alias.Get("doesnt_exist")); } [TestCase("local", "local")] [TestCase("0", "local")] [TestCase("1", "function")] [TestCase("script", "script")] [TestCase("2", "script")] [TestCase("global", "global")] [TestCase("3", "global")] [TestCase("4", "", ExpectedException=typeof(ArgumentOutOfRangeException))] public void AliasGetAtScopeTest(string scope, string expectedDefinition) { globalState.Alias.New(createAlias("x", "global"), "local"); scriptState.Alias.New(createAlias("x", "script"), "local"); functionState.Alias.New(createAlias("x", "function"), "local"); localState.Alias.New(createAlias("x", "local"), "local"); Assert.AreEqual(expectedDefinition, localState.Alias.GetAtScope("x", scope).Definition); } private AliasInfo createAlias(string name, string definition = "", ScopedItemOptions options = ScopedItemOptions.None) { return new AliasInfo(name, definition, hostCommandManager, options); } #endregion } }
// Copyright (c) Rotorz Limited. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root. using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using UnityEditor; using UnityEngine; using Object = UnityEngine.Object; namespace Rotorz.Tile.Editor { /// <summary> /// Brush database provides easy access to brush and tileset records in editor scripts. /// </summary> public sealed class BrushDatabase : ScriptableObject { #region Singleton private static BrushDatabase s_Instance; /// <summary> /// Gets the one and only brush database instance. /// </summary> public static BrushDatabase Instance { get { if (s_Instance == null) { Console.WriteLine("Loading brushes..."); s_Instance = UnityEngine.Resources.FindObjectsOfTypeAll<BrushDatabase>().FirstOrDefault(); if (s_Instance == null) { s_Instance = ScriptableObject.CreateInstance<BrushDatabase>(); } else { Console.WriteLine("Brush database already loaded."); } } // Ensure that instance has been enabled. if (!s_Instance.hasEnabled) { s_Instance.OnEnable(); } return s_Instance; } } // Prevent instantiation! private BrushDatabase() { } #endregion /// <summary> /// The time (in ticks) when brush database was last scanned. /// </summary> internal static long s_TimeLastUpdated; /// <summary> /// Collection of brush records that are sorted by name. /// </summary> [NonSerialized] private BrushAssetRecord[] brushRecords = { }; [NonSerialized] private ReadOnlyCollection<BrushAssetRecord> brushRecordsReadOnly; /// <summary> /// Gets a read-only list of brush records that are sorted by display name. /// </summary> /// <value> /// List of brush records. /// </value> public ReadOnlyCollection<BrushAssetRecord> BrushRecords { get { if (this.brushRecordsReadOnly == null) { this.brushRecordsReadOnly = new ReadOnlyCollection<BrushAssetRecord>(this.brushRecords); } return this.brushRecordsReadOnly; } } /// <summary> /// Collection of tileset records. /// </summary> [NonSerialized] private TilesetAssetRecord[] tilesetRecords = { }; [NonSerialized] private ReadOnlyCollection<TilesetAssetRecord> tilesetRecordsReadOnly; /// <summary> /// Gets a read-only list of tileset records that are sorted by display name. /// </summary> /// <value> /// List of tileset records. /// </value> public ReadOnlyCollection<TilesetAssetRecord> TilesetRecords { get { if (this.tilesetRecordsReadOnly == null) { this.tilesetRecordsReadOnly = new ReadOnlyCollection<TilesetAssetRecord>(this.tilesetRecords); } return this.tilesetRecordsReadOnly; } } #region Setup private const int HasInitializedCode = 2; [SerializeField] private int hasInitialized; [NonSerialized] private bool hasEnabled; private void OnEnable() { if (this.hasEnabled) { return; } this.hasEnabled = true; s_Instance = this; this.hideFlags = HideFlags.DontSave; if (this.hasInitialized < HasInitializedCode) { this.OnInitialize(); this.hasInitialized = HasInitializedCode; Console.WriteLine("Initialized brush database."); } else { this.Rescan(); } BrushDatabaseRescanProcessor.s_EnableScanner = true; } private void OnInitialize() { AssetDatabase.SaveAssets(); this.Rescan(); } #endregion #region Methods /// <summary> /// Rescan brush assets. /// </summary> public void Rescan() { Console.WriteLine("Scanning for brushes and tilesets..."); var newRecords = new List<BrushAssetRecord>(); var tilesetRecords = new List<TilesetAssetRecord>(); // Read records for brushes. foreach (var guid in AssetDatabase.FindAssets("t:Rotorz.Tile.Brush")) { var assetPath = AssetDatabase.GUIDToAssetPath(guid); this.ScanBrush(assetPath, newRecords); } // Read records for tilesets. foreach (var guid in AssetDatabase.FindAssets("t:Rotorz.Tile.Tileset")) { var assetPath = AssetDatabase.GUIDToAssetPath(guid); var tilesetRecord = this.ScanAtlas(assetPath, newRecords); if (tilesetRecord != null) { tilesetRecords.Add(tilesetRecord); } } // Sort prefabs by name. this.brushRecords = newRecords.ToArray(); this.brushRecordsReadOnly = null; this.SortBrushRecords(); // Sort tilesets by name. this.tilesetRecords = tilesetRecords.ToArray(); this.tilesetRecordsReadOnly = null; this.SortTilesetRecords(); // Note: Old brush records are retained until brushes have been rescanned. s_TimeLastUpdated = System.DateTime.Now.Ticks; } private void SortBrushRecords() { ++s_TimeLastUpdated; Array.Sort(this.brushRecords, (x, y) => string.Compare(x.DisplayName, y.DisplayName)); } private void SortTilesetRecords() { ++s_TimeLastUpdated; Array.Sort(this.tilesetRecords, (x, y) => string.Compare(x.DisplayName, y.DisplayName)); } /// <summary> /// Clear records for any missing brush and tileset assets. /// </summary> internal void ClearMissingRecords() { Console.WriteLine("Clearing missing brush/tileset records..."); this.ClearMissingBrushRecords(); this.ClearMissingTilesetRecords(); ++s_TimeLastUpdated; } private void ClearMissingBrushRecords() { List<BrushAssetRecord> brushRecords = null; for (int i = 0; i < this.brushRecords.Length; ++i) { var brushRecord = this.brushRecords[i]; if (brushRecord.Brush == null) { if (brushRecords == null) { // Copy all prior non-missing brush records to new list. brushRecords = new List<BrushAssetRecord>(); for (int j = 0; j < i; ++j) { brushRecords.Add(this.brushRecords[j]); } } } else if (brushRecords != null) { brushRecords.Add(brushRecord); } } if (brushRecords != null) { this.brushRecords = brushRecords.ToArray(); this.brushRecordsReadOnly = new ReadOnlyCollection<BrushAssetRecord>(this.brushRecords); } } private void ClearMissingTilesetRecords() { var tilesetRecords = new List<TilesetAssetRecord>(); var brushRecords = new List<BrushAssetRecord>(); for (int i = 0; i < this.tilesetRecords.Length; ++i) { var tilesetRecord = this.tilesetRecords[i]; if (tilesetRecord.Tileset == null) { continue; } tilesetRecords.Add(tilesetRecord); // Clear missing brush records from tileset. brushRecords.Clear(); foreach (var brushRecord in tilesetRecord.BrushRecords) { if (brushRecord.Brush != null) { brushRecords.Add(brushRecord); } } if (brushRecords.Count != tilesetRecord.BrushRecords.Count) { tilesetRecord.SetBrushRecords(brushRecords.ToArray()); } } if (tilesetRecords.Count != this.tilesetRecords.Length) { this.tilesetRecords = tilesetRecords.ToArray(); this.tilesetRecordsReadOnly = new ReadOnlyCollection<TilesetAssetRecord>(this.tilesetRecords); } } /// <summary> /// Find index of brush record. /// </summary> /// <remarks> /// <para>This function is slightly slower than <see cref="FindRecord"/> since /// it verifies whether record entries have been replaced with a value of <c>null</c>. /// This is important when rescanning a project for brushes since records are /// recycled where possible.</para> /// </remarks> /// <param name="brush">The brush.</param> /// <returns> /// Zero-based index of brush record; otherwise a value of -1 if not found. /// </returns> private int FindRecordIndexWithNullChecks(Brush brush) { if (brush != null) { for (int i = 0; i < this.brushRecords.Length; ++i) { if (this.brushRecords[i] != null && this.brushRecords[i].Brush == brush) { return i; } } } return -1; } private BrushAssetRecord AddBrushRecord(Object mainAsset, Brush brush, string assetPath, bool master) { BrushAssetRecord record; int recordIndex = this.FindRecordIndexWithNullChecks(brush); if (recordIndex != -1) { record = this.brushRecords[recordIndex]; // Master and asset path may have changed. record.AssetPath = assetPath; record.isMaster = master; // Remove record from old list since we want to recycle it! this.brushRecords[recordIndex] = null; } else { // Create new record. record = new BrushAssetRecord(assetPath, mainAsset, brush, master); } // Make sure that brush is awake! if (!brush._ready) { brush._ready = true; brush.Awake(); } return record; } private void ScanBrush(string assetPath, IList<BrushAssetRecord> newRecords) { var brush = AssetDatabase.LoadMainAssetAtPath(assetPath) as Brush; if (brush == null) { return; } // Is this a master brush? bool master = assetPath.Contains("/Master/"); var brushRecord = this.AddBrushRecord(brush, brush, assetPath, master); newRecords.Add(brushRecord); } private TilesetAssetRecord ScanAtlas(string assetPath, IList<BrushAssetRecord> newRecords) { Tileset tileset = null; Object[] assets = AssetDatabase.LoadAllAssetsAtPath(assetPath); foreach (var asset in assets) { tileset = asset as Tileset; if (tileset != null) { break; } } if (tileset == null) { return null; } // Is this a master brush? bool master = assetPath.Contains("/Master/"); var tilesetRecord = new TilesetAssetRecord(assetPath, tileset, master); var brushRecords = new List<BrushAssetRecord>(); foreach (var asset in assets) { var tilesetBrush = asset as TilesetBrush; if (tilesetBrush == null) { continue; } var tilesetBrushRecord = this.AddBrushRecord(tileset, tilesetBrush, assetPath, master); newRecords.Add(tilesetBrushRecord); brushRecords.Add(tilesetBrushRecord); } // Sort atlas brush index by atlas index. brushRecords.Sort((x, y) => { int a = (x.Brush as TilesetBrush).tileIndex; int b = (y.Brush as TilesetBrush).tileIndex; return (a == b) ? x.DisplayName.CompareTo(y.DisplayName) : a - b; }); tilesetRecord.SetBrushRecords(brushRecords.ToArray()); return tilesetRecord; } /// <summary> /// Index of record in sorted (by display name) list of brushes. /// </summary> /// <param name="brush">The brush.</param> /// <returns> /// Zero-based index of record; otherwise a value of -1 if not found. /// </returns> public int IndexOfRecord(Brush brush) { for (int i = 0; i < this.brushRecords.Length; ++i) { if (this.brushRecords[i].Brush == brush) { return i; } } return -1; } /// <summary> /// Find record for specified brush asset. /// </summary> /// <param name="brush">The brush.</param> /// <returns> /// Brush record when found; otherwise a value of <c>null</c>. /// </returns> public BrushAssetRecord FindRecord(Brush brush) { if (brush != null) { for (int i = 0; i < this.brushRecords.Length; ++i) { if (this.brushRecords[i].Brush == brush) { return this.brushRecords[i]; } } } return null; } /// <summary> /// Rename brush asset. /// </summary> /// <param name="brush">The brush.</param> /// <param name="newName">New name for brush.</param> /// <returns> /// Name that was assigned to brush. /// </returns> /// <exception cref="System.ArgumentException"> /// If unable to rename brush. /// </exception> public string RenameBrush(Brush brush, string newName) { newName = newName.Trim(); if (string.IsNullOrEmpty(newName)) { return brush.name; } var brushRecord = this.FindRecord(brush); if (brushRecord == null) { return newName; } string newAssetName = newName; // If renaming asset file itself. if (brushRecord.MainAsset == brushRecord.Brush) { // Return current brush name if an error occurred whilst renaming asset. string error = AssetDatabase.RenameAsset(brushRecord.AssetPath, newAssetName); if (!string.IsNullOrEmpty(error)) { throw new ArgumentException(error.Contains("does already exist") ? "Another asset already exists with specified name." : error ); } brushRecord.AssetPath = AssetDatabase.GetAssetPath(brush); } else { // Does another brush already exist with the same name? var tileset = brushRecord.MainAsset as Tileset; if (tileset != null) { var tilesetRecord = this.FindTilesetRecord(tileset); if (tilesetRecord != null && !tilesetRecord.IsNameUnique(newName, brush)) { throw new ArgumentException("Tileset already contains a brush with that name."); } } } brush.name = newAssetName; EditorUtility.SetDirty(brush); this.SortBrushRecords(); return brush.name; } /// <summary> /// Rename tileset asset. /// </summary> /// <param name="tileset">The tileset.</param> /// <param name="newName">New name for tileset.</param> /// <returns> /// Name that was assigned to tileset. /// </returns> /// <exception cref="System.ArgumentException"> /// If unable to rename tileset. /// </exception> public string RenameTileset(Tileset tileset, string newName) { newName = newName.Trim(); if (string.IsNullOrEmpty(newName)) { return tileset.name; } var tilesetRecord = this.FindTilesetRecord(tileset); if (tilesetRecord == null) { return newName; } // Return current tileset name if an error occurred whilst renaming asset. string error = AssetDatabase.RenameAsset(tilesetRecord.AssetPath, newName); if (!string.IsNullOrEmpty(error)) { throw new ArgumentException(error.Contains("does already exist") ? "Another asset already exists with specified name." : error ); } tileset.name = newName; EditorUtility.SetDirty(tileset); // Force update of atlas record display name. tilesetRecord.AssetPath = ""; tilesetRecord.AssetPath = AssetDatabase.GetAssetPath(tileset); this.SortTilesetRecords(); AssetDatabase.SaveAssets(); return tileset.name; } /// <summary> /// Find record for specified tileset. /// </summary> /// <param name="tileset">The tileset.</param> /// <returns> /// The tileset record. /// </returns> public TilesetAssetRecord FindTilesetRecord(Tileset tileset) { if (tileset != null) { for (int i = 0; i < this.tilesetRecords.Length; ++i) { if (this.tilesetRecords[i].Tileset == tileset) { return this.tilesetRecords[i]; } } } return null; } /// <summary> /// Gets a read-only list of brushes contained within tileset. /// </summary> /// <param name="tileset">The tileset.</param> /// <returns> /// List of tileset brushes. /// </returns> public IList<BrushAssetRecord> GetTilesetBrushes(Tileset tileset) { var tilesetRecord = this.FindTilesetRecord(tileset); if (tilesetRecord == null) { return new BrushAssetRecord[0]; } return tilesetRecord.BrushRecords; } #endregion } }
using System; using System.Threading.Tasks; using Plugin.Calendars.Abstractions; using Plugin.Calendars; using Plugin.Permissions; using Plugin.Permissions.Abstractions; using Xamarin.Forms; using Microsoft.AppCenter.Crashes; using Microsoft.AppCenter.Analytics; using Xamarin.Essentials; namespace DotNetRu.Clients.UI.News { public static class CalendarService { private const string CalendarName = "DotNetRu"; private static string GetEventIDKey(string eventID) => "calendar_" + eventID; private static void SaveExternalEventID(string eventID, string externalEventID) => Preferences.Set(GetEventIDKey(eventID), externalEventID); private static string GetExternalEventID(string eventID) => Preferences.Get(GetEventIDKey(eventID), string.Empty); private static void RemoveExternalEventID(string eventID) => Preferences.Set(GetEventIDKey(eventID), string.Empty); public static string CalendarID { get => Preferences.Get(nameof(CalendarID), string.Empty); set => Preferences.Set(nameof(CalendarID), value); } public static bool WasCalendarUsed { get => Preferences.Get(nameof(WasCalendarUsed), false); set => Preferences.Set(nameof(WasCalendarUsed), value); } public static async Task<bool> HasReminderAsync(string id) { if (!WasCalendarUsed) { return false; } var hasPermissions = await GetCalendarPermissionsAsync(); if (!hasPermissions) { return false; } var externalId = GetExternalEventID(id); if (string.IsNullOrWhiteSpace(externalId)) { return false; } try { var calEvent = await CrossCalendars.Current.GetEventByIdAsync(externalId); return calEvent != null; } catch (Exception ex) { Crashes.TrackError(ex); RemoveExternalEventID(id); } return false; } public static async Task<bool> RemoveCalendarEventAsync(string eventID) { var hasPermissions = await GetCalendarPermissionsAsync(); if (!hasPermissions) { return false; } try { var calendar = await GetOrCreateCalendarAsync(); var externalEventID = GetExternalEventID(eventID); var calendarEvent = await CrossCalendars.Current.GetEventByIdAsync(externalEventID); await CrossCalendars.Current.DeleteEventAsync(calendar, calendarEvent); RemoveExternalEventID(eventID); } catch (Exception ex) { Crashes.TrackError(ex); return false; } return true; } public static async Task<bool> AddCalendarEventAsync(string eventID, CalendarEvent calendarEvent) { var hasPermissions = await GetCalendarPermissionsAsync(); if (!hasPermissions) { return false; } try { var calendar = await GetOrCreateCalendarAsync(); await CrossCalendars.Current.AddOrUpdateEventAsync(calendar, calendarEvent); SaveExternalEventID(eventID, calendarEvent.ExternalID); } catch (Exception ex) { Crashes.TrackError(ex); return false; } return true; } public static async Task<bool> GetCalendarPermissionsAsync() { var status = await CrossPermissions.Current.CheckPermissionStatusAsync(Permission.Calendar); if (status != Plugin.Permissions.Abstractions.PermissionStatus.Granted) { var request = await CrossPermissions.Current.RequestPermissionsAsync(Permission.Calendar); if (!request.ContainsKey(Permission.Calendar) || request[Permission.Calendar] != Plugin.Permissions.Abstractions.PermissionStatus.Granted) { Analytics.TrackEvent("Calendar Permission Denied"); return false; } } return true; } public static async Task<Calendar> GetOrCreateCalendarAsync() { var id = CalendarID; if (!string.IsNullOrWhiteSpace(id)) { try { var calendar = await CrossCalendars.Current.GetCalendarByIdAsync(id); if (calendar != null) { return calendar; } } catch (Exception ex) { Crashes.TrackError(ex); } } // if for some reason the calendar does not exist then simply create a new one. if (Device.RuntimePlatform == Device.Android) { // On android it is really hard to delete a calendar made by an app, so just add to default calendar. try { var calendars = await CrossCalendars.Current.GetCalendarsAsync(); foreach (var calendar in calendars) { // find first calendar we can add stuff to if (!calendar.CanEditEvents) { continue; } CalendarID = calendar.ExternalID; return calendar; } } catch (Exception ex) { Crashes.TrackError(ex); } } else { // try to find app if already uninstalled for some reason try { var calendars = await CrossCalendars.Current.GetCalendarsAsync(); foreach (var calendar in calendars) { // find first calendar we can add stuff to if (calendar.CanEditEvents && calendar.Name == CalendarName) { CalendarID = calendar.ExternalID; return calendar; } } } catch (Exception ex) { Crashes.TrackError(ex); } } var appCalendar = new Calendar { Color = "#5c7cbc", Name = CalendarName }; try { await CrossCalendars.Current.AddOrUpdateCalendarAsync(appCalendar); CalendarID = appCalendar.ExternalID; return appCalendar; } catch (Exception ex) { Crashes.TrackError(ex); } return null; } } }
// Copyright (c) Charlie Poole, Rob Prouse and Contributors. MIT License - see LICENSE.txt using NUnit.Framework.Internal; using NUnit.Compatibility; using System.Collections; using System; using System.Reflection; namespace NUnit.Framework.Constraints { /// <summary> /// Delegate used to delay evaluation of the actual value /// to be used in evaluating a constraint /// </summary> public delegate TActual ActualValueDelegate<TActual>(); /// <summary> /// The Constraint class is the base of all built-in constraints /// within NUnit. It provides the operator overloads used to combine /// constraints. /// </summary> public abstract class Constraint : IConstraint { readonly Lazy<string> _displayName; #region Constructor /// <summary> /// Construct a constraint with optional arguments /// </summary> /// <param name="args">Arguments to be saved</param> protected Constraint(params object[] args) { Arguments = args; _displayName = new Lazy<string>(() => { var type = this.GetType(); var displayName = type.Name; if (type.GetTypeInfo().IsGenericType) displayName = displayName.Substring(0, displayName.Length - 2); if (displayName.EndsWith("Constraint", StringComparison.Ordinal)) displayName = displayName.Substring(0, displayName.Length - 10); return displayName; }); } #endregion #region Properties /// <summary> /// The display name of this Constraint for use by ToString(). /// The default value is the name of the constraint with /// trailing "Constraint" removed. Derived classes may set /// this to another name in their constructors. /// </summary> public virtual string DisplayName { get { return _displayName.Value; } } /// <summary> /// The Description of what this constraint tests, for /// use in messages and in the ConstraintResult. /// </summary> public virtual string Description { get; protected set; } /// <summary> /// Arguments provided to this Constraint, for use in /// formatting the description. /// </summary> public object[] Arguments { get; } /// <summary> /// The ConstraintBuilder holding this constraint /// </summary> public ConstraintBuilder Builder { get; set; } #endregion #region Abstract and Virtual Methods /// <summary> /// Applies the constraint to an actual value, returning a ConstraintResult. /// </summary> /// <param name="actual">The value to be tested</param> /// <returns>A ConstraintResult</returns> public abstract ConstraintResult ApplyTo<TActual>(TActual actual); /// <summary> /// Applies the constraint to an ActualValueDelegate that returns /// the value to be tested. The default implementation simply evaluates /// the delegate but derived classes may override it to provide for /// delayed processing. /// </summary> /// <param name="del">An ActualValueDelegate</param> /// <returns>A ConstraintResult</returns> public virtual ConstraintResult ApplyTo<TActual>(ActualValueDelegate<TActual> del) { if (AsyncToSyncAdapter.IsAsyncOperation(del)) return ApplyTo(AsyncToSyncAdapter.Await(() => del.Invoke())); return ApplyTo(GetTestObject(del)); } #pragma warning disable 3006 /// <summary> /// Test whether the constraint is satisfied by a given reference. /// The default implementation simply dereferences the value but /// derived classes may override it to provide for delayed processing. /// </summary> /// <param name="actual">A reference to the value to be tested</param> /// <returns>A ConstraintResult</returns> public virtual ConstraintResult ApplyTo<TActual>(ref TActual actual) { return ApplyTo(actual); } #pragma warning restore 3006 /// <summary> /// Retrieves the value to be tested from an ActualValueDelegate. /// The default implementation simply evaluates the delegate but derived /// classes may override it to provide for delayed processing. /// </summary> /// <param name="del">An ActualValueDelegate</param> /// <returns>Delegate evaluation result</returns> protected virtual object GetTestObject<TActual>(ActualValueDelegate<TActual> del) { return del(); } #endregion #region ToString Override /// <summary> /// Default override of ToString returns the constraint DisplayName /// followed by any arguments within angle brackets. /// </summary> /// <returns></returns> public override string ToString() { string rep = GetStringRepresentation(); return this.Builder == null ? rep : string.Format("<unresolved {0}>", rep); } /// <summary> /// Returns the string representation of this constraint /// </summary> protected virtual string GetStringRepresentation() { System.Text.StringBuilder sb = new System.Text.StringBuilder(); sb.Append("<"); sb.Append(DisplayName.ToLower()); foreach (object arg in Arguments) { sb.Append(" "); sb.Append(_displayable(arg)); } sb.Append(">"); return sb.ToString(); } private static string _displayable(object o) { if (o == null) return "null"; string fmt = o is string ? "\"{0}\"" : "{0}"; return string.Format(System.Globalization.CultureInfo.InvariantCulture, fmt, o); } #endregion #region Operator Overloads /// <summary> /// This operator creates a constraint that is satisfied only if both /// argument constraints are satisfied. /// </summary> public static Constraint operator &(Constraint left, Constraint right) { IResolveConstraint l = (IResolveConstraint)left; IResolveConstraint r = (IResolveConstraint)right; return new AndConstraint(l.Resolve(), r.Resolve()); } /// <summary> /// This operator creates a constraint that is satisfied if either /// of the argument constraints is satisfied. /// </summary> public static Constraint operator |(Constraint left, Constraint right) { IResolveConstraint l = (IResolveConstraint)left; IResolveConstraint r = (IResolveConstraint)right; return new OrConstraint(l.Resolve(), r.Resolve()); } /// <summary> /// This operator creates a constraint that is satisfied if the /// argument constraint is not satisfied. /// </summary> public static Constraint operator !(Constraint constraint) { IResolveConstraint r = (IResolveConstraint)constraint; return new NotConstraint(r.Resolve()); } #endregion #region Binary Operators /// <summary> /// Returns a ConstraintExpression by appending And /// to the current constraint. /// </summary> public ConstraintExpression And { get { ConstraintBuilder builder = this.Builder; if (builder == null) { builder = new ConstraintBuilder(); builder.Append(this); } builder.Append(new AndOperator()); return new ConstraintExpression(builder); } } /// <summary> /// Returns a ConstraintExpression by appending And /// to the current constraint. /// </summary> public ConstraintExpression With { get { return this.And; } } /// <summary> /// Returns a ConstraintExpression by appending Or /// to the current constraint. /// </summary> public ConstraintExpression Or { get { ConstraintBuilder builder = this.Builder; if (builder == null) { builder = new ConstraintBuilder(); builder.Append(this); } builder.Append(new OrOperator()); return new ConstraintExpression(builder); } } #endregion #region After Modifier /// <summary> /// Returns a DelayedConstraint.WithRawDelayInterval with the specified delay time. /// </summary> /// <param name="delay">The delay, which defaults to milliseconds.</param> /// <returns></returns> public DelayedConstraint.WithRawDelayInterval After(int delay) { return new DelayedConstraint.WithRawDelayInterval(new DelayedConstraint( Builder == null ? this : Builder.Resolve(), delay)); } /// <summary> /// Returns a DelayedConstraint with the specified delay time /// and polling interval. /// </summary> /// <param name="delayInMilliseconds">The delay in milliseconds.</param> /// <param name="pollingInterval">The interval at which to test the constraint.</param> /// <returns></returns> public DelayedConstraint After(int delayInMilliseconds, int pollingInterval) { return new DelayedConstraint( Builder == null ? this : Builder.Resolve(), delayInMilliseconds, pollingInterval); } #endregion #region IResolveConstraint Members /// <summary> /// Resolves any pending operators and returns the resolved constraint. /// </summary> IConstraint IResolveConstraint.Resolve() { return Builder == null ? this : Builder.Resolve(); } #endregion } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Runtime.Serialization; ///This file contains all the typed enums that the client rest api spec exposes. ///This file is automatically generated from https://github.com/elasticsearch/elasticsearch-rest-api-spec ///Generated of commit namespace Elasticsearch.Net { public enum Consistency { [EnumMember(Value = "one")] One, [EnumMember(Value = "quorum")] Quorum, [EnumMember(Value = "all")] All } public enum Replication { [EnumMember(Value = "sync")] Sync, [EnumMember(Value = "async")] Async } public enum Bytes { [EnumMember(Value = "b")] B, [EnumMember(Value = "k")] K, [EnumMember(Value = "m")] M, [EnumMember(Value = "g")] G } public enum Level { [EnumMember(Value = "cluster")] Cluster, [EnumMember(Value = "indices")] Indices, [EnumMember(Value = "shards")] Shards } public enum WaitForStatus { [EnumMember(Value = "green")] Green, [EnumMember(Value = "yellow")] Yellow, [EnumMember(Value = "red")] Red } public enum ExpandWildcards { [EnumMember(Value = "open")] Open, [EnumMember(Value = "closed")] Closed, [EnumMember(Value = "none")] None, [EnumMember(Value = "all")] All } public enum DefaultOperator { [EnumMember(Value = "AND")] And, [EnumMember(Value = "OR")] Or } public enum VersionType { [EnumMember(Value = "internal")] Internal, [EnumMember(Value = "external")] External, [EnumMember(Value = "external_gte")] ExternalGte, [EnumMember(Value = "force")] Force } public enum OpType { [EnumMember(Value = "index")] Index, [EnumMember(Value = "create")] Create } public enum Format { [EnumMember(Value = "detailed")] Detailed, [EnumMember(Value = "text")] Text } public enum SearchType { [EnumMember(Value = "query_then_fetch")] QueryThenFetch, [EnumMember(Value = "query_and_fetch")] QueryAndFetch, [EnumMember(Value = "dfs_query_then_fetch")] DfsQueryThenFetch, [EnumMember(Value = "dfs_query_and_fetch")] DfsQueryAndFetch, [EnumMember(Value = "count")] Count, [EnumMember(Value = "scan")] Scan } public enum ThreadType { [EnumMember(Value = "cpu")] Cpu, [EnumMember(Value = "wait")] Wait, [EnumMember(Value = "block")] Block } public enum PercolateFormat { [EnumMember(Value = "ids")] Ids } public enum SuggestMode { [EnumMember(Value = "missing")] Missing, [EnumMember(Value = "popular")] Popular, [EnumMember(Value = "always")] Always } public enum ClusterStateMetric { [EnumMember(Value = "_all")] All, [EnumMember(Value = "blocks")] Blocks, [EnumMember(Value = "metadata")] Metadata, [EnumMember(Value = "nodes")] Nodes, [EnumMember(Value = "routing_table")] RoutingTable, [EnumMember(Value = "routing_nodes")] RoutingNodes, [EnumMember(Value = "master_node")] MasterNode, [EnumMember(Value = "version")] Version } public enum IndicesStatsMetric { [EnumMember(Value = "_all")] All, [EnumMember(Value = "completion")] Completion, [EnumMember(Value = "docs")] Docs, [EnumMember(Value = "fielddata")] Fielddata, [EnumMember(Value = "filter_cache")] FilterCache, [EnumMember(Value = "flush")] Flush, [EnumMember(Value = "get")] Get, [EnumMember(Value = "id_cache")] IdCache, [EnumMember(Value = "indexing")] Indexing, [EnumMember(Value = "merge")] Merge, [EnumMember(Value = "percolate")] Percolate, [EnumMember(Value = "query_cache")] QueryCache, [EnumMember(Value = "refresh")] Refresh, [EnumMember(Value = "search")] Search, [EnumMember(Value = "segments")] Segments, [EnumMember(Value = "store")] Store, [EnumMember(Value = "warmer")] Warmer, [EnumMember(Value = "suggest")] Suggest } public enum NodesInfoMetric { [EnumMember(Value = "settings")] Settings, [EnumMember(Value = "os")] Os, [EnumMember(Value = "process")] Process, [EnumMember(Value = "jvm")] Jvm, [EnumMember(Value = "thread_pool")] ThreadPool, [EnumMember(Value = "network")] Network, [EnumMember(Value = "transport")] Transport, [EnumMember(Value = "http")] Http, [EnumMember(Value = "plugins")] Plugins } public enum NodesStatsMetric { [EnumMember(Value = "_all")] All, [EnumMember(Value = "breaker")] Breaker, [EnumMember(Value = "fs")] Fs, [EnumMember(Value = "http")] Http, [EnumMember(Value = "indices")] Indices, [EnumMember(Value = "jvm")] Jvm, [EnumMember(Value = "network")] Network, [EnumMember(Value = "os")] Os, [EnumMember(Value = "process")] Process, [EnumMember(Value = "thread_pool")] ThreadPool, [EnumMember(Value = "transport")] Transport } public enum NodesStatsIndexMetric { [EnumMember(Value = "_all")] All, [EnumMember(Value = "completion")] Completion, [EnumMember(Value = "docs")] Docs, [EnumMember(Value = "fielddata")] Fielddata, [EnumMember(Value = "filter_cache")] FilterCache, [EnumMember(Value = "flush")] Flush, [EnumMember(Value = "get")] Get, [EnumMember(Value = "id_cache")] IdCache, [EnumMember(Value = "indexing")] Indexing, [EnumMember(Value = "merge")] Merge, [EnumMember(Value = "percolate")] Percolate, [EnumMember(Value = "query_cache")] QueryCache, [EnumMember(Value = "refresh")] Refresh, [EnumMember(Value = "search")] Search, [EnumMember(Value = "segments")] Segments, [EnumMember(Value = "store")] Store, [EnumMember(Value = "warmer")] Warmer, [EnumMember(Value = "suggest")] Suggest } public static class KnownEnums { public static string Resolve(Enum e) { if (e is Consistency) { switch((Consistency)e) { case Consistency.One: return "one"; case Consistency.Quorum: return "quorum"; case Consistency.All: return "all"; } } if (e is Replication) { switch((Replication)e) { case Replication.Sync: return "sync"; case Replication.Async: return "async"; } } if (e is Bytes) { switch((Bytes)e) { case Bytes.B: return "b"; case Bytes.K: return "k"; case Bytes.M: return "m"; case Bytes.G: return "g"; } } if (e is Level) { switch((Level)e) { case Level.Cluster: return "cluster"; case Level.Indices: return "indices"; case Level.Shards: return "shards"; } } if (e is WaitForStatus) { switch((WaitForStatus)e) { case WaitForStatus.Green: return "green"; case WaitForStatus.Yellow: return "yellow"; case WaitForStatus.Red: return "red"; } } if (e is ExpandWildcards) { switch((ExpandWildcards)e) { case ExpandWildcards.Open: return "open"; case ExpandWildcards.Closed: return "closed"; case ExpandWildcards.None: return "none"; case ExpandWildcards.All: return "all"; } } if (e is DefaultOperator) { switch((DefaultOperator)e) { case DefaultOperator.And: return "AND"; case DefaultOperator.Or: return "OR"; } } if (e is VersionType) { switch((VersionType)e) { case VersionType.Internal: return "internal"; case VersionType.External: return "external"; case VersionType.ExternalGte: return "external_gte"; case VersionType.Force: return "force"; } } if (e is OpType) { switch((OpType)e) { case OpType.Index: return "index"; case OpType.Create: return "create"; } } if (e is Format) { switch((Format)e) { case Format.Detailed: return "detailed"; case Format.Text: return "text"; } } if (e is SearchType) { switch((SearchType)e) { case SearchType.QueryThenFetch: return "query_then_fetch"; case SearchType.QueryAndFetch: return "query_and_fetch"; case SearchType.DfsQueryThenFetch: return "dfs_query_then_fetch"; case SearchType.DfsQueryAndFetch: return "dfs_query_and_fetch"; case SearchType.Count: return "count"; case SearchType.Scan: return "scan"; } } if (e is ThreadType) { switch((ThreadType)e) { case ThreadType.Cpu: return "cpu"; case ThreadType.Wait: return "wait"; case ThreadType.Block: return "block"; } } if (e is PercolateFormat) { switch((PercolateFormat)e) { case PercolateFormat.Ids: return "ids"; } } if (e is SuggestMode) { switch((SuggestMode)e) { case SuggestMode.Missing: return "missing"; case SuggestMode.Popular: return "popular"; case SuggestMode.Always: return "always"; } } if (e is ClusterStateMetric) { switch((ClusterStateMetric)e) { case ClusterStateMetric.All: return "_all"; case ClusterStateMetric.Blocks: return "blocks"; case ClusterStateMetric.Metadata: return "metadata"; case ClusterStateMetric.Nodes: return "nodes"; case ClusterStateMetric.RoutingTable: return "routing_table"; case ClusterStateMetric.RoutingNodes: return "routing_nodes"; case ClusterStateMetric.MasterNode: return "master_node"; case ClusterStateMetric.Version: return "version"; } } if (e is IndicesStatsMetric) { switch((IndicesStatsMetric)e) { case IndicesStatsMetric.All: return "_all"; case IndicesStatsMetric.Completion: return "completion"; case IndicesStatsMetric.Docs: return "docs"; case IndicesStatsMetric.Fielddata: return "fielddata"; case IndicesStatsMetric.FilterCache: return "filter_cache"; case IndicesStatsMetric.Flush: return "flush"; case IndicesStatsMetric.Get: return "get"; case IndicesStatsMetric.IdCache: return "id_cache"; case IndicesStatsMetric.Indexing: return "indexing"; case IndicesStatsMetric.Merge: return "merge"; case IndicesStatsMetric.Percolate: return "percolate"; case IndicesStatsMetric.QueryCache: return "query_cache"; case IndicesStatsMetric.Refresh: return "refresh"; case IndicesStatsMetric.Search: return "search"; case IndicesStatsMetric.Segments: return "segments"; case IndicesStatsMetric.Store: return "store"; case IndicesStatsMetric.Warmer: return "warmer"; case IndicesStatsMetric.Suggest: return "suggest"; } } if (e is NodesInfoMetric) { switch((NodesInfoMetric)e) { case NodesInfoMetric.Settings: return "settings"; case NodesInfoMetric.Os: return "os"; case NodesInfoMetric.Process: return "process"; case NodesInfoMetric.Jvm: return "jvm"; case NodesInfoMetric.ThreadPool: return "thread_pool"; case NodesInfoMetric.Network: return "network"; case NodesInfoMetric.Transport: return "transport"; case NodesInfoMetric.Http: return "http"; case NodesInfoMetric.Plugins: return "plugins"; } } if (e is NodesStatsMetric) { switch((NodesStatsMetric)e) { case NodesStatsMetric.All: return "_all"; case NodesStatsMetric.Breaker: return "breaker"; case NodesStatsMetric.Fs: return "fs"; case NodesStatsMetric.Http: return "http"; case NodesStatsMetric.Indices: return "indices"; case NodesStatsMetric.Jvm: return "jvm"; case NodesStatsMetric.Network: return "network"; case NodesStatsMetric.Os: return "os"; case NodesStatsMetric.Process: return "process"; case NodesStatsMetric.ThreadPool: return "thread_pool"; case NodesStatsMetric.Transport: return "transport"; } } if (e is NodesStatsIndexMetric) { switch((NodesStatsIndexMetric)e) { case NodesStatsIndexMetric.All: return "_all"; case NodesStatsIndexMetric.Completion: return "completion"; case NodesStatsIndexMetric.Docs: return "docs"; case NodesStatsIndexMetric.Fielddata: return "fielddata"; case NodesStatsIndexMetric.FilterCache: return "filter_cache"; case NodesStatsIndexMetric.Flush: return "flush"; case NodesStatsIndexMetric.Get: return "get"; case NodesStatsIndexMetric.IdCache: return "id_cache"; case NodesStatsIndexMetric.Indexing: return "indexing"; case NodesStatsIndexMetric.Merge: return "merge"; case NodesStatsIndexMetric.Percolate: return "percolate"; case NodesStatsIndexMetric.QueryCache: return "query_cache"; case NodesStatsIndexMetric.Refresh: return "refresh"; case NodesStatsIndexMetric.Search: return "search"; case NodesStatsIndexMetric.Segments: return "segments"; case NodesStatsIndexMetric.Store: return "store"; case NodesStatsIndexMetric.Warmer: return "warmer"; case NodesStatsIndexMetric.Suggest: return "suggest"; } } return "UNKNOWNENUM"; } } }
using Cake.Common.Diagnostics; using Cake.Core; using CK.Text; using CodeCake.Abstractions; using CSemVer; using NuGet.Common; using NuGet.Configuration; using NuGet.Credentials; using NuGet.Packaging.Core; using NuGet.Protocol; using NuGet.Protocol.Core.Types; using NuGet.Protocol.Plugins; using NuGet.Versioning; using System; using System.Collections.Generic; using System.Linq; using System.Net.Http; using System.Text; using System.Threading; using System.Threading.Tasks; namespace CodeCake { public partial class Build { public static class NuGetHelper { static readonly SourceCacheContext _sourceCache; static readonly List<Lazy<INuGetResourceProvider>> _providers; static readonly ISettings _settings; static readonly PackageProviderProxy _sourceProvider; static readonly List<VSTSFeed> _vstsFeeds; static ILogger _logger; /// <summary> /// Implements a IPackageSourceProvider that mixes sources from NuGet.config settings /// and sources that are used by the build chain. /// </summary> class PackageProviderProxy : IPackageSourceProvider { readonly IPackageSourceProvider _fromSettings; readonly Lazy<List<PackageSource>> _sources; int _definedSourceCount; public PackageProviderProxy( ISettings settings ) { _fromSettings = new PackageSourceProvider( settings ); _sources = new Lazy<List<PackageSource>>( () => new List<PackageSource>( _fromSettings.LoadPackageSources() ) ); } public PackageSource FindOrCreateFromUrl( string name, string urlV3 ) { if( string.IsNullOrEmpty( urlV3 ) || !urlV3.EndsWith( "/v3/index.json" ) ) { throw new ArgumentException( "Feed requires a /v3/index.json url.", nameof( urlV3 ) ); } if( string.IsNullOrWhiteSpace( name ) ) { throw new ArgumentNullException( nameof( name ) ); } var exists = _sources.Value.FirstOrDefault( s => !s.IsLocal && s.Source == urlV3 ); if( exists != null ) return exists; exists = new PackageSource( urlV3, "CCB-" + name ); _sources.Value.Insert( _definedSourceCount++, exists ); return exists; } public PackageSource FindOrCreateFromLocalPath( string localPath ) { if( string.IsNullOrWhiteSpace( localPath ) ) throw new ArgumentNullException( nameof( localPath ) ); NormalizedPath path = System.IO.Path.GetFullPath( localPath ); var exists = _sources.Value.FirstOrDefault( s => s.IsLocal && new NormalizedPath( s.Source ) == path ); if( exists != null ) return exists; exists = new PackageSource( path, "CCB-" + path.LastPart ); _sources.Value.Insert( _definedSourceCount++, exists ); return exists; } string IPackageSourceProvider.ActivePackageSourceName => _fromSettings.ActivePackageSourceName; string IPackageSourceProvider.DefaultPushSource => _fromSettings.DefaultPushSource; event EventHandler IPackageSourceProvider.PackageSourcesChanged { add { } remove { } } /// <summary> /// Gets all the sources. /// </summary> /// <returns></returns> public IEnumerable<PackageSource> LoadPackageSources() => _sources.Value; bool IPackageSourceProvider.IsPackageSourceEnabled( string name ) => true; void IPackageSourceProvider.SaveActivePackageSource( PackageSource source ) { throw new NotSupportedException( "Should not be called in this scenario." ); } void IPackageSourceProvider.SavePackageSources( IEnumerable<PackageSource> sources ) { throw new NotSupportedException( "Should not be called in this scenario." ); } PackageSource IPackageSourceProvider.GetPackageSourceByName( string name ) => _sources.Value.FirstOrDefault( s => s.Name == name ); PackageSource IPackageSourceProvider.GetPackageSourceBySource( string source ) => _sources.Value.FirstOrDefault( s => s.Source == source ); void IPackageSourceProvider.RemovePackageSource( string name ) { throw new NotSupportedException( "Should not be called in this scenario." ); } void IPackageSourceProvider.EnablePackageSource( string name ) { throw new NotSupportedException( "Should not be called in this scenario." ); } void IPackageSourceProvider.DisablePackageSource( string name ) { throw new NotSupportedException( "Should not be called in this scenario." ); } void IPackageSourceProvider.UpdatePackageSource( PackageSource source, bool updateCredentials, bool updateEnabled ) { throw new NotSupportedException( "Should not be called in this scenario." ); } void IPackageSourceProvider.AddPackageSource( PackageSource source ) { throw new NotSupportedException( "Should not be called in this scenario." ); } } static NuGetHelper() { _settings = Settings.LoadDefaultSettings( Environment.CurrentDirectory ); _sourceProvider = new PackageProviderProxy( _settings ); _vstsFeeds = new List<VSTSFeed>(); // Setting "NoCache" (?) here is required to be able to retry a push after a // failure. Without it, the PUT is canceled. _sourceCache = new SourceCacheContext().WithRefreshCacheTrue(); _providers = new List<Lazy<INuGetResourceProvider>>(); _providers.AddRange( Repository.Provider.GetCoreV3() ); } class Logger : ILogger { readonly ICakeContext _ctx; readonly object _lock; public Logger( ICakeContext ctx ) { _ctx = ctx; _lock = new object(); } public void LogDebug( string data ) { lock( _lock ) _ctx.Debug( $"NuGet: {data}" ); } public void LogVerbose( string data ) { lock( _lock ) _ctx.Verbose( $"NuGet: {data}" ); } public void LogInformation( string data ) { lock( _lock ) _ctx.Information( $"NuGet: {data}" ); } public void LogMinimal( string data ) { lock( _lock ) _ctx.Information( $"NuGet: {data}" ); } public void LogWarning( string data ) { lock( _lock ) _ctx.Warning( $"NuGet: {data}" ); } public void LogError( string data ) { lock( _lock ) _ctx.Error( $"NuGet: {data}" ); } public void LogSummary( string data ) { lock( _lock ) _ctx.Information( $"NuGet: {data}" ); } public void LogInformationSummary( string data ) { lock( _lock ) _ctx.Information( $"NuGet: {data}" ); } public void Log( LogLevel level, string data ) { lock( _lock ) _ctx.Information( $"NuGet ({level}): {data}" ); } public Task LogAsync( LogLevel level, string data ) { Log( level, data ); return System.Threading.Tasks.Task.CompletedTask; } public void Log( ILogMessage message ) { lock( _lock ) _ctx.Information( $"NuGet ({message.Level}) - Code: {message.Code} - Project: {message.ProjectPath} - {message.Message}" ); } public Task LogAsync( ILogMessage message ) { Log( message ); return System.Threading.Tasks.Task.CompletedTask; } } static ILogger InitializeAndGetLogger( ICakeContext ctx ) { if( _logger == null ) { ctx.Information( $"Initializing with sources:" ); foreach( var s in _sourceProvider.LoadPackageSources() ) { ctx.Information( $"{s.Name} => {s.Source}" ); } InitializeVSTSEnvironment( ctx ); _logger = new Logger( ctx ); var credProviders = new AsyncLazy<IEnumerable<ICredentialProvider>>( async () => await GetCredentialProvidersAsync( _logger ) ); HttpHandlerResourceV3.CredentialService = new Lazy<ICredentialService>( () => new CredentialService( providers: credProviders, nonInteractive: true, handlesDefaultCredentials: true ) ); } return _logger; } static void InitializeVSTSEnvironment( ICakeContext ctx ) { // Workaround for dev/NuGet.Client\src\NuGet.Core\NuGet.Protocol\Plugins\PluginFactory.cs line 161: // FileName = Environment.GetEnvironmentVariable("DOTNET_HOST_PATH"), // This line should be: // FileName = Environment.GetEnvironmentVariable("DOTNET_HOST_PATH") ?? "dotnet", // // Issue: https://github.com/NuGet/Home/issues/7438 // Environment.SetEnvironmentVariable( "DOTNET_HOST_PATH", "dotnet" ); // The VSS_NUGET_EXTERNAL_FEED_ENDPOINTS is used by Azure Credential Provider to handle authentication // for the feed. int count = 0; StringBuilder b = new StringBuilder( @"{""endpointCredentials"":[" ); foreach( var f in _vstsFeeds ) { var azureFeedPAT = ctx.InteractiveEnvironmentVariable( f.SecretKeyName ); if( !String.IsNullOrEmpty( azureFeedPAT ) ) { ++count; b.Append( @"{""endpoint"":""" ).AppendJSONEscaped( f.Url ).Append( @"""," ) .Append( @"""username"":""Unused"",""password"":""" ).AppendJSONEscaped( azureFeedPAT ).Append( @"""" ) .Append( "}" ); } } b.Append( "]}" ); ctx.Information( $"Created {count} feed end point(s) in VSS_NUGET_EXTERNAL_FEED_ENDPOINTS." ); Environment.SetEnvironmentVariable( "VSS_NUGET_EXTERNAL_FEED_ENDPOINTS", b.ToString() ); } static async Task<IEnumerable<ICredentialProvider>> GetCredentialProvidersAsync( ILogger logger ) { var providers = new List<ICredentialProvider>(); var securePluginProviders = await new SecurePluginCredentialProviderBuilder( pluginManager: PluginManager.Instance, canShowDialog: false, logger: logger ).BuildAllAsync(); providers.AddRange( securePluginProviders ); return providers; } /// <summary> /// Base class for NuGet feeds. /// </summary> public abstract class NuGetFeed : ArtifactFeed { readonly PackageSource _packageSource; readonly SourceRepository _sourceRepository; readonly AsyncLazy<PackageUpdateResource> _updater; /// <summary> /// Initialize a new remote feed. /// Its final <see cref="Name"/> is the one of the existing feed if it appears in the existing /// sources (from NuGet configuration files) or "CCB-<paramref name="name"/>" if this is /// an unexisting source (CCB is for CodeCakeBuilder). /// </summary> /// <param name="type">The central NuGet handler.</param> /// <param name="name">Name of the feed.</param> /// <param name="urlV3">Must be a v3/index.json url otherwise an argument exception is thrown.</param> protected NuGetFeed( NuGetArtifactType type, string name, string urlV3 ) : this( type, _sourceProvider.FindOrCreateFromUrl( name, urlV3 ) ) { if( this is VSTSFeed f ) _vstsFeeds.Add( f ); } /// <summary> /// Initialize a new local feed. /// Its final <see cref="Name"/> is the one of the existing feed if it appears in the existing /// sources (from NuGet configuration files) or "CCB-GetDirectoryName(localPath)" if this is /// an unexisting source (CCB is for CodeCakeBuilder). /// </summary> /// <param name="type">The central NuGet handler.</param> /// <param name="localPath">Local path.</param> protected NuGetFeed( NuGetArtifactType type, string localPath ) : this( type, _sourceProvider.FindOrCreateFromLocalPath( localPath ) ) { } NuGetFeed( NuGetArtifactType type, PackageSource s ) : base( type ) { _packageSource = s; _sourceRepository = new SourceRepository( _packageSource, _providers ); _updater = new AsyncLazy<PackageUpdateResource>( async () => { var r = await _sourceRepository.GetResourceAsync<PackageUpdateResource>(); // TODO: Update for next NuGet version? // r.Settings = _settings; return r; } ); } /// <summary> /// Must provide the secret key name that must be found in the environment variables. /// Without it push is skipped. /// </summary> public abstract string SecretKeyName { get; } /// <summary> /// The url of the source. Can be a local path. /// </summary> public string Url => _packageSource.Source; /// <summary> /// Gets whether this is a local feed (a directory). /// </summary> public bool IsLocal => _packageSource.IsLocal; /// <summary> /// Gets the source name. /// If the source appears in NuGet configuration files, it is the configured name of the source, otherwise /// it is prefixed with "CCB-" (CCB is for CodeCakeBuilder). /// </summary> public override string Name => _packageSource.Name; /// <summary> /// Creates a list of push entries from a set of local artifacts into this feed. /// </summary> /// <param name="artifacts">Local artifacts.</param> /// <returns>The set of push into this feed.</returns> public override async Task<IEnumerable<ArtifactPush>> CreatePushListAsync( IEnumerable<ILocalArtifact> artifacts ) { var result = new List<ArtifactPush>(); var logger = InitializeAndGetLogger( Cake ); MetadataResource meta = await _sourceRepository.GetResourceAsync<MetadataResource>(); foreach( var p in artifacts ) { var pId = new PackageIdentity( p.ArtifactInstance.Artifact.Name, new NuGetVersion( p.ArtifactInstance.Version.ToNuGetPackageString() ) ); if( await meta.Exists( pId, _sourceCache, logger, CancellationToken.None ) ) { Cake.Debug( $" ==> Feed '{Name}' already contains {p.ArtifactInstance}." ); } else { Cake.Debug( $"Package {p.ArtifactInstance} must be published to remote feed '{Name}'." ); result.Add( new ArtifactPush( p, this ) ); } } return result; } /// <summary> /// Pushes a set of packages from .nupkg files that must exist in <see cref="CheckRepositoryInfo.ReleasesFolder"/>. /// </summary> /// <param name="pushes">The instances to push (that necessary target this feed).</param> /// <returns>The awaitable.</returns> public override async Task PushAsync( IEnumerable<ArtifactPush> pushes ) { string apiKey = null; if( !_packageSource.IsLocal ) { apiKey = ResolveAPIKey(); if( string.IsNullOrEmpty( apiKey ) ) { Cake.Information( $"Could not resolve API key. Push to '{Name}' => '{Url}' is skipped." ); return; } } Cake.Information( $"Pushing packages to '{Name}' => '{Url}'." ); var logger = InitializeAndGetLogger( Cake ); var updater = await _updater; foreach( var p in pushes ) { string packageString = p.Name + "." + p.Version.ToNuGetPackageString(); var fullPath = ArtifactType.GlobalInfo.ReleasesFolder.AppendPart( packageString + ".nupkg" ); await updater.Push( fullPath, string.Empty, // no Symbol source. 20, //20 seconds timeout disableBuffering: false, getApiKey: endpoint => apiKey, getSymbolApiKey: symbolsEndpoint => null, noServiceEndpoint: false, log: logger ); } await OnAllArtifactsPushed( pushes ); } /// <summary> /// Called once all the packages are pushed. /// Does nothing at this level. /// </summary> /// <param name="pushes">The instances to push (that necessary target this feed).</param> /// <returns>The awaitable.</returns> protected virtual Task OnAllArtifactsPushed( IEnumerable<ArtifactPush> pushes ) { return System.Threading.Tasks.Task.CompletedTask; } /// <summary> /// Must resolve the API key required to push the package. /// </summary> /// <returns>The secret (that can be null or empty).</returns> protected abstract string ResolveAPIKey(); } } /// <summary> /// A basic VSTS feed uses "VSTS" for the API key and does not handle views. /// The https://github.com/Microsoft/artifacts-credprovider must be installed. /// A Personal Access Token, <see cref="SecretKeyName"/> environment variable /// must be defined and contains the token. /// If this SecretKeyName is not defined or empty, push is skipped. /// </summary> class VSTSFeed : NuGetHelper.NuGetFeed { string _azureFeedPAT; /// <summary> /// Initialize a new remote VSTS feed. /// </summary> /// <param name="name">Name of the feed.</param> /// <param name="urlV3">Must be a v3/index.json url otherwise an argument exception is thrown.</param> /// <param name="secretKeyName">The secret key name. When null or empty, push is skipped.</param> public VSTSFeed( NuGetArtifactType t, string name, string urlV3, string secretKeyName ) : base( t, name, urlV3 ) { SecretKeyName = secretKeyName; } /// <summary> /// Gets the name of the environment variable that must contain the /// Personal Access Token that allows push to this feed. /// The https://github.com/Microsoft/artifacts-credprovider VSS_NUGET_EXTERNAL_FEED_ENDPOINTS /// will be dynalically generated. /// </summary> public override string SecretKeyName { get; } /// <summary> /// Looks up for the <see cref="SecretKeyName"/> environment variable that is required to promote packages. /// If this variable is empty or not defined, push is skipped. /// </summary> /// <param name="ctx">The Cake context.</param> /// <returns>The "VSTS" API key or null to skip the push.</returns> protected override string ResolveAPIKey() { _azureFeedPAT = Cake.InteractiveEnvironmentVariable( SecretKeyName ); if( string.IsNullOrWhiteSpace( _azureFeedPAT ) ) { Cake.Warning( $"No {SecretKeyName} environment variable found." ); _azureFeedPAT = null; } // The API key for the Credential Provider must be "VSTS". return _azureFeedPAT != null ? "VSTS" : null; } } /// <summary> /// A SignatureVSTSFeed handles Stable, Latest, Preview and CI Azure feed views with /// package promotion based on the published version. /// The secret key name is built by <see cref="GetSecretKeyName"/>: /// "AZURE_FEED_" + Organization.ToUpperInvariant().Replace( '-', '_' ).Replace( ' ', '_' ) + "_PAT". /// </summary> class SignatureVSTSFeed : VSTSFeed { /// <summary> /// Builds the standardized secret key name from the organization name: this is /// the Personal Access Token that allows push packages. /// </summary> /// <param name="organization">Organization name.</param> /// <returns>The secret key name to use.</returns> public static string GetSecretKeyName( string organization ) => "AZURE_FEED_" + organization.ToUpperInvariant() .Replace( '-', '_' ) .Replace( ' ', '_' ) + "_PAT"; /// <summary> /// Initialize a new SignatureVSTSFeed. /// Its <see cref="NuGetHelper.Feed.Name"/> is set to "<paramref name="organization"/>-<paramref name="feedName"/>" /// (and may be prefixed with "CCB-" if it doesn't correspond to a source defined in the NuGet.config settings. /// </summary> /// <param name="organization">Name of the organization.</param> /// <param name="feedName">Identifier of the feed in Azure, inside the organization.</param> public SignatureVSTSFeed( NuGetArtifactType t, string organization, string feedName ) : base( t, organization + "-" + feedName, $"https://pkgs.dev.azure.com/{organization}/_packaging/{feedName}/nuget/v3/index.json", GetSecretKeyName( organization ) ) { Organization = organization; FeedName = feedName; } /// <summary> /// Gets the organization name. /// </summary> public string Organization { get; } /// <summary> /// Gets the feed identifier. /// </summary> public string FeedName { get; } /// <summary> /// Implements Package promotion in @CI, @Exploratory, @Preview, @Latest and @Stable views. /// </summary> /// <param name="ctx">The Cake context.</param> /// <param name="pushes">The set of artifacts to promote.</param> /// <returns>The awaitable.</returns> protected override async Task OnAllArtifactsPushed( IEnumerable<ArtifactPush> pushes ) { var basicAuth = Convert.ToBase64String( Encoding.ASCII.GetBytes( ":" + Cake.InteractiveEnvironmentVariable( SecretKeyName ) ) ); foreach( var p in pushes ) { foreach( var view in p.Version.PackageQuality.GetLabels() ) { using( HttpRequestMessage req = new HttpRequestMessage( HttpMethod.Post, $"https://pkgs.dev.azure.com/{Organization}/_apis/packaging/feeds/{FeedName}/nuget/packagesBatch?api-version=5.0-preview.1" ) ) { req.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue( "Basic", basicAuth ); var body = GetPromotionJSONBody( p.Name, p.Version.ToNuGetPackageString(), view.ToString() ); req.Content = new StringContent( body, Encoding.UTF8, "application/json" ); using( var m = await StandardGlobalInfo.SharedHttpClient.SendAsync( req ) ) { if( m.IsSuccessStatusCode ) { Cake.Information( $"Package '{p.Name}' promoted to view '@{view}'." ); } else { Cake.Error( $"Package '{p.Name}' promotion to view '@{view}' failed." ); // Throws! m.EnsureSuccessStatusCode(); } } } } } } string GetPromotionJSONBody( string packageName, string packageVersion, string viewId, bool npm = false ) { var bodyFormat = @"{ ""data"": { ""viewId"": ""{viewId}"" }, ""operation"": 0, ""packages"": [{ ""id"": ""{packageName}"", ""version"": ""{packageVersion}"", ""protocolType"": ""{NuGetOrNpm}"" }] }"; return bodyFormat.Replace( "{NuGetOrNpm}", npm ? "Npm" : "NuGet" ) .Replace( "{viewId}", viewId ) .Replace( "{packageName}", packageName ) .Replace( "{packageVersion}", packageVersion ); } } /// <summary> /// A remote feed where push is controlled by its <see cref="SecretKeyName"/>. /// </summary> class RemoteFeed : NuGetHelper.NuGetFeed { /// <summary> /// Initialize a new remote feed. /// The push is controlled by an API key name that is the name of an environment variable /// that must contain the actual API key to push packages. /// </summary> /// <param name="name">Name of the feed.</param> /// <param name="urlV3">Must be a v3/index.json url otherwise an argument exception is thrown.</param> /// <param name="secretKeyName">The secret key name.</param> public RemoteFeed( NuGetArtifactType t, string name, string urlV3, string secretKeyName ) : base( t, name, urlV3 ) { SecretKeyName = secretKeyName; } /// <summary> /// Gets or sets the push API key name. /// This is the environment variable name that must contain the NuGet API key required to push. /// </summary> public override string SecretKeyName { get; } /// <summary> /// Resolves the API key from <see cref="APIKeyName"/> environment variable. /// </summary> /// <param name="ctx">The Cake context.</param> /// <returns>The API key or null.</returns> protected override string ResolveAPIKey() { if( String.IsNullOrEmpty( SecretKeyName ) ) { Cake.Information( $"Remote feed '{Name}' APIKeyName is null or empty." ); return null; } return Cake.InteractiveEnvironmentVariable( SecretKeyName ); } } /// <summary> /// Local feed. Pushes are always possible. /// </summary> class NugetLocalFeed : NuGetHelper.NuGetFeed { public NugetLocalFeed( NuGetArtifactType t, string path ) : base( t, path ) { } public override string SecretKeyName => null; protected override string ResolveAPIKey() => null; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using Baseline; using Marten.Linq; using Marten.Linq.QueryHandlers; using Marten.Schema; using Marten.Services; using Marten.Services.BatchQuerying; using Npgsql; using Remotion.Linq.Parsing.Structure; namespace Marten { public interface ILoader { FetchResult<T> LoadDocument<T>(object id) where T : class; Task<FetchResult<T>> LoadDocumentAsync<T>(object id, CancellationToken token) where T : class; } public class QuerySession : IQuerySession, ILoader { private readonly IDocumentSchema _schema; private readonly ISerializer _serializer; private readonly IManagedConnection _connection; private readonly IQueryParser _parser; private readonly IIdentityMap _identityMap; private bool _disposed; public QuerySession(IDocumentStore store, IDocumentSchema schema, ISerializer serializer, IManagedConnection connection, IQueryParser parser, IIdentityMap identityMap) { DocumentStore = store; _schema = schema; _serializer = serializer; _connection = connection; _parser = parser; _identityMap = identityMap; } public IDocumentStore DocumentStore { get; } public IJsonLoader Json => new JsonLoader(_connection, _schema); protected void assertNotDisposed() { if (_disposed) throw new ObjectDisposedException("This session has been disposed"); } public IMartenQueryable<T> Query<T>() { assertNotDisposed(); var executor = new MartenQueryExecutor(_connection, _schema, _identityMap); var queryProvider = new MartenQueryProvider(typeof (MartenQueryable<>), _parser, executor); return new MartenQueryable<T>(queryProvider); } public IList<T> Query<T>(string sql, params object[] parameters) { assertNotDisposed(); var handler = new UserSuppliedQueryHandler<T>(_schema, _serializer, sql, parameters); return _connection.Fetch(handler, _identityMap.ForQuery(), null); } public Task<IList<T>> QueryAsync<T>(string sql, CancellationToken token, params object[] parameters) { assertNotDisposed(); var handler = new UserSuppliedQueryHandler<T>(_schema, _serializer, sql, parameters); return _connection.FetchAsync(handler, _identityMap.ForQuery(), null, token); } public IBatchedQuery CreateBatchQuery() { assertNotDisposed(); return new BatchedQuery(_connection, _schema, _identityMap.ForQuery(), this, _serializer); } private IDocumentStorage storage<T>() { return _schema.StorageFor(typeof (T)); } public FetchResult<T> LoadDocument<T>(object id) where T : class { assertNotDisposed(); var storage = storage<T>(); var resolver = storage.As<IResolver<T>>(); var cmd = storage.LoaderCommand(id); return _connection.Execute(cmd, c => { using (var reader = cmd.ExecuteReader()) { return resolver.Fetch(reader, _serializer); } }); } public Task<FetchResult<T>> LoadDocumentAsync<T>(object id, CancellationToken token) where T : class { assertNotDisposed(); var storage = storage<T>(); var resolver = storage.As<IResolver<T>>(); var cmd = storage.LoaderCommand(id); return _connection.ExecuteAsync(cmd, async (c, tkn) => { using (var reader = await cmd.ExecuteReaderAsync(tkn).ConfigureAwait(false)) { return await resolver.FetchAsync(reader, _serializer, token).ConfigureAwait(false); } }, token); } public T Load<T>(string id) { return load<T>(id); } public Task<T> LoadAsync<T>(string id, CancellationToken token) { return loadAsync<T>(id, token); } public T Load<T>(ValueType id) { return load<T>(id); } private T load<T>(object id) { assertNotDisposed(); return storage<T>().As<IResolver<T>>().Resolve(_identityMap, this, id); } private Task<T> loadAsync<T>(object id, CancellationToken token) { assertNotDisposed(); return storage<T>().As<IResolver<T>>().ResolveAsync(_identityMap, this, token, id); } public ILoadByKeys<T> LoadMany<T>() { assertNotDisposed(); return new LoadByKeys<T>(this); } public IList<T> LoadMany<T>(params string[] ids) { assertNotDisposed(); return LoadMany<T>().ById(ids); } public IList<T> LoadMany<T>(params Guid[] ids) { assertNotDisposed(); return LoadMany<T>().ById(ids); } public IList<T> LoadMany<T>(params int[] ids) { assertNotDisposed(); return LoadMany<T>().ById(ids); } public IList<T> LoadMany<T>(params long[] ids) { assertNotDisposed(); return LoadMany<T>().ById(ids); } public Task<IList<T>> LoadManyAsync<T>(params string[] ids) { return LoadMany<T>().ByIdAsync(ids); } public Task<IList<T>> LoadManyAsync<T>(params Guid[] ids) { return LoadMany<T>().ByIdAsync(ids); } public Task<IList<T>> LoadManyAsync<T>(params int[] ids) { return LoadMany<T>().ByIdAsync(ids); } public Task<IList<T>> LoadManyAsync<T>(params long[] ids) { return LoadMany<T>().ByIdAsync(ids); } public Task<IList<T>> LoadManyAsync<T>(CancellationToken token, params string[] ids) { return LoadMany<T>().ByIdAsync(ids, token); } public Task<IList<T>> LoadManyAsync<T>(CancellationToken token, params Guid[] ids) { return LoadMany<T>().ByIdAsync(ids, token); } public Task<IList<T>> LoadManyAsync<T>(CancellationToken token, params int[] ids) { return LoadMany<T>().ByIdAsync(ids, token); } public Task<IList<T>> LoadManyAsync<T>(CancellationToken token, params long[] ids) { return LoadMany<T>().ByIdAsync(ids, token); } private class LoadByKeys<TDoc> : ILoadByKeys<TDoc> { private readonly QuerySession _parent; public LoadByKeys(QuerySession parent) { _parent = parent; } public IList<TDoc> ById<TKey>(params TKey[] keys) { var hitsAndMisses = this.hitsAndMisses(keys); var hits = hitsAndMisses.Item1; var misses = hitsAndMisses.Item2; var documents = fetchDocuments(misses); return concatDocuments(hits, documents); } public Task<IList<TDoc>> ByIdAsync<TKey>(params TKey[] keys) { return ByIdAsync(keys, CancellationToken.None); } public IList<TDoc> ById<TKey>(IEnumerable<TKey> keys) { return ById(keys.ToArray()); } public async Task<IList<TDoc>> ByIdAsync<TKey>(IEnumerable<TKey> keys, CancellationToken token = default(CancellationToken)) { var hitsAndMisses = this.hitsAndMisses(keys.ToArray()); var hits = hitsAndMisses.Item1; var misses = hitsAndMisses.Item2; var documents = await fetchDocumentsAsync(misses, token).ConfigureAwait(false); return concatDocuments(hits, documents); } private IList<TDoc> concatDocuments<TKey>(TKey[] hits, IEnumerable<TDoc> documents) { return hits.Select(key => _parent._identityMap.Retrieve<TDoc>(key)) .Concat(documents) .ToList(); } private Tuple<TKey[], TKey[]> hitsAndMisses<TKey>(TKey[] keys) { var hits = keys.Where(key => _parent._identityMap.Has<TDoc>(key)).ToArray(); var misses = keys.Where(x => !hits.Contains(x)).ToArray(); return new Tuple<TKey[], TKey[]>(hits, misses); } private IEnumerable<TDoc> fetchDocuments<TKey>(TKey[] keys) { var storage = _parent._schema.StorageFor(typeof (TDoc)); var resolver = storage.As<IResolver<TDoc>>(); var cmd = storage.LoadByArrayCommand(keys); var list = new List<TDoc>(); _parent._connection.Execute(cmd, c => { using (var reader = cmd.ExecuteReader()) { while (reader.Read()) { var doc = resolver.Resolve(0, reader, _parent._identityMap); list.Add(doc); } } }); return list; } private async Task<IEnumerable<TDoc>> fetchDocumentsAsync<TKey>(TKey[] keys, CancellationToken token) { var storage = _parent._schema.StorageFor(typeof (TDoc)); var resolver = storage.As<IResolver<TDoc>>(); var cmd = storage.LoadByArrayCommand(keys); var list = new List<TDoc>(); await _parent._connection.ExecuteAsync(cmd, async (conn, tkn) => { using (var reader = await cmd.ExecuteReaderAsync(tkn).ConfigureAwait(false)) { while (await reader.ReadAsync(tkn).ConfigureAwait(false)) { var doc = resolver.Resolve(0, reader, _parent._identityMap); list.Add(doc); } } }, token).ConfigureAwait(false); return list; } } public TOut Query<TDoc, TOut>(ICompiledQuery<TDoc, TOut> query) { assertNotDisposed(); QueryStatistics stats; var handler = _schema.HandlerFactory.HandlerFor(query, out stats); return _connection.Fetch(handler, _identityMap.ForQuery(), stats); } public Task<TOut> QueryAsync<TDoc, TOut>(ICompiledQuery<TDoc, TOut> query, CancellationToken token = new CancellationToken()) { assertNotDisposed(); QueryStatistics stats; var handler = _schema.HandlerFactory.HandlerFor(query, out stats); return _connection.FetchAsync(handler, _identityMap.ForQuery(), stats, token); } public NpgsqlConnection Connection { get { assertNotDisposed(); return _connection.Connection; } } public IMartenSessionLogger Logger { get { return _connection.As<ManagedConnection>().Logger; } set { _connection.As<ManagedConnection>().Logger = value; } } public int RequestCount => _connection.RequestCount; public void Dispose() { _disposed = true; _connection.Dispose(); } public T Load<T>(int id) { return load<T>(id); } public T Load<T>(long id) { return load<T>(id); } public T Load<T>(Guid id) { return load<T>(id); } public Task<T> LoadAsync<T>(int id, CancellationToken token = new CancellationToken()) { return loadAsync<T>(id, token); } public Task<T> LoadAsync<T>(long id, CancellationToken token = new CancellationToken()) { return loadAsync<T>(id, token); } public Task<T> LoadAsync<T>(Guid id, CancellationToken token = new CancellationToken()) { return loadAsync<T>(id, token); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. using CoreFtp; using Microsoft.Azure.Management.AppService.Fluent; using Microsoft.Azure.Management.Fluent; using Microsoft.Azure.Management.ResourceManager.Fluent; using Microsoft.Azure.Management.ResourceManager.Fluent.Core; using Microsoft.Azure.Management.Samples.Common; using System; using System.Diagnostics; using System.IO; using System.Net.Http; using System.Threading; using System.Threading.Tasks; namespace ManageWebAppSourceControlAsync { public class Program { private const string Suffix = ".azurewebsites.net"; /** * Azure App Service basic sample for managing web apps. * Note: you need to have the Git command line available on your PATH. The sample makes a direct call to 'git'. * - Create 4 web apps under the same new app service plan: * - Deploy to 1 using FTP * - Deploy to 2 using local Git repository * - Deploy to 3 using a publicly available Git repository * - Deploy to 4 using a GitHub repository with continuous integration */ public async static Task RunSampleAsync(IAzure azure) { string app1Name = SdkContext.RandomResourceName("webapp1-", 20); string app2Name = SdkContext.RandomResourceName("webapp2-", 20); string app3Name = SdkContext.RandomResourceName("webapp3-", 20); string app4Name = SdkContext.RandomResourceName("webapp4-", 20); string app1Url = app1Name + Suffix; string app2Url = app2Name + Suffix; string app3Url = app3Name + Suffix; string app4Url = app4Name + Suffix; string rgName = SdkContext.RandomResourceName("rg1NEMV_", 24); try { //============================================================ // Create a web app with a new app service plan Utilities.Log("Creating web app " + app1Name + " in resource group " + rgName + "..."); var app1 = await azure.WebApps .Define(app1Name) .WithRegion(Region.USWest) .WithNewResourceGroup(rgName) .WithNewWindowsPlan(PricingTier.StandardS1) .WithJavaVersion(JavaVersion.V8Newest) .WithWebContainer(WebContainer.Tomcat8_0Newest) .CreateAsync(); Utilities.Log("Created web app " + app1.Name); Utilities.Print(app1); //============================================================ // Deploy to app 1 through FTP Utilities.Log("Deploying helloworld.War to " + app1Name + " through FTP..."); Utilities.UploadFileToWebApp( await app1.GetPublishingProfileAsync(), Path.Combine(Utilities.ProjectPath, "Asset", "helloworld.war")); Utilities.Log("Deployment helloworld.War to web app " + app1.Name + " completed"); Utilities.Print(app1); // warm up Utilities.Log("Warming up " + app1Url + "/helloworld..."); Utilities.CheckAddress("http://" + app1Url + "/helloworld"); SdkContext.DelayProvider.Delay(5000); Utilities.Log("CURLing " + app1Url + "/helloworld..."); Utilities.Log(Utilities.CheckAddress("http://" + app1Url + "/helloworld")); //============================================================ // Create a second web app with local git source control Utilities.Log("Creating another web app " + app2Name + " in resource group " + rgName + "..."); var plan = await azure.AppServices.AppServicePlans.GetByIdAsync(app1.AppServicePlanId); var app2 = await azure.WebApps .Define(app2Name) .WithExistingWindowsPlan(plan) .WithExistingResourceGroup(rgName) .WithLocalGitSourceControl() .WithJavaVersion(JavaVersion.V8Newest) .WithWebContainer(WebContainer.Tomcat8_0Newest) .CreateAsync(); Utilities.Log("Created web app " + app2.Name); Utilities.Print(app2); //============================================================ // Deploy to app 2 through local Git Utilities.Log("Deploying a local Tomcat source to " + app2Name + " through Git..."); var profile = await app2.GetPublishingProfileAsync(); Utilities.DeployByGit(profile, "azure-samples-appservice-helloworld"); Utilities.Log("Deployment to web app " + app2.Name + " completed"); Utilities.Print(app2); // warm up Utilities.Log("Warming up " + app2Url + "/helloworld..."); Utilities.CheckAddress("http://" + app2Url + "/helloworld"); SdkContext.DelayProvider.Delay(5000); Utilities.Log("CURLing " + app2Url + "/helloworld..."); Utilities.Log(Utilities.CheckAddress("http://" + app2Url + "/helloworld")); //============================================================ // Create a 3rd web app with a public GitHub repo in Azure-Samples Utilities.Log("Creating another web app " + app3Name + "..."); var app3 = await azure.WebApps .Define(app3Name) .WithExistingWindowsPlan(plan) .WithNewResourceGroup(rgName) .DefineSourceControl() .WithPublicGitRepository("https://github.com/Azure-Samples/app-service-web-dotnet-get-started") .WithBranch("master") .Attach() .CreateAsync(); Utilities.Log("Created web app " + app3.Name); Utilities.Print(app3); // warm up Utilities.Log("Warming up " + app3Url + "..."); Utilities.CheckAddress("http://" + app3Url); SdkContext.DelayProvider.Delay(5000); Utilities.Log("CURLing " + app3Url + "..."); Utilities.Log(Utilities.CheckAddress("http://" + app3Url)); //============================================================ // Create a 4th web app with a personal GitHub repo and turn on continuous integration Utilities.Log("Creating another web app " + app4Name + "..."); var app4 = await azure.WebApps .Define(app4Name) .WithExistingWindowsPlan(plan) .WithExistingResourceGroup(rgName) // Uncomment the following lines to turn on 4th scenario //.DefineSourceControl() // .WithContinuouslyIntegratedGitHubRepository("username", "reponame") // .WithBranch("master") // .WithGitHubAccessToken("YOUR GITHUB PERSONAL TOKEN") // .Attach() .CreateAsync(); Utilities.Log("Created web app " + app4.Name); Utilities.Print(app4); // warm up Utilities.Log("Warming up " + app4Url + "..."); Utilities.CheckAddress("http://" + app4Url); SdkContext.DelayProvider.Delay(5000); Utilities.Log("CURLing " + app4Url + "..."); Utilities.Log(Utilities.CheckAddress("http://" + app4Url)); } catch (FileNotFoundException) { Utilities.Log("Cannot find 'git' command line. Make sure Git is installed and the directory of git.exe is included in your PATH environment variable."); } finally { try { Utilities.Log("Deleting Resource Group: " + rgName); await azure.ResourceGroups.DeleteByNameAsync(rgName); Utilities.Log("Deleted Resource Group: " + rgName); } catch (NullReferenceException) { Utilities.Log("Did not create any resources in Azure. No clean up is necessary"); } catch (Exception g) { Utilities.Log(g); } } } public static void Main(string[] args) { try { //================================================================= // Authenticate var credentials = SdkContext.AzureCredentialsFactory.FromFile(Environment.GetEnvironmentVariable("AZURE_AUTH_LOCATION")); var azure = Azure .Configure() .WithLogLevel(HttpLoggingDelegatingHandler.Level.Basic) .Authenticate(credentials) .WithDefaultSubscription(); // Print selected subscription Utilities.Log("Selected subscription: " + azure.SubscriptionId); RunSampleAsync(azure).GetAwaiter().GetResult(); } catch (Exception e) { Utilities.Log(e); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using System.Diagnostics.CodeAnalysis; using System.Globalization; namespace System.ComponentModel { /// <summary> /// <para>Converts the value of an object into a different data type.</para> /// </summary> public class TypeConverter { /// <summary> /// <para>Gets a value indicating whether this converter can convert an object in the /// given source type to the native type of the converter.</para> /// </summary> public bool CanConvertFrom(Type sourceType) { return CanConvertFrom(null, sourceType); } /// <summary> /// <para>Gets a value indicating whether this converter can /// convert an object in the given source type to the native type of the converter /// using the context.</para> /// </summary> public virtual bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType) { return false; } /// <summary> /// <para>Gets a value indicating whether this converter can /// convert an object to the given destination type using the context.</para> /// </summary> public bool CanConvertTo(Type destinationType) { return CanConvertTo(null, destinationType); } /// <summary> /// <para>Gets a value indicating whether this converter can /// convert an object to the given destination type using the context.</para> /// </summary> public virtual bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) { return (destinationType == typeof(string)); } /// <summary> /// <para>Converts the given value /// to the converter's native type.</para> /// </summary> public object ConvertFrom(object value) { return ConvertFrom(null, CultureInfo.CurrentCulture, value); } /// <summary> /// <para>Converts the given object to the converter's native type.</para> /// </summary> public virtual object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value) { throw GetConvertFromException(value); } /// <summary> /// Converts the given string to the converter's native type using the invariant culture. /// </summary> public object ConvertFromInvariantString(string text) { return ConvertFromString(null, CultureInfo.InvariantCulture, text); } /// <summary> /// Converts the given string to the converter's native type using the invariant culture. /// </summary> public object ConvertFromInvariantString(ITypeDescriptorContext context, string text) { return ConvertFromString(context, CultureInfo.InvariantCulture, text); } /// <summary> /// <para>Converts the specified text into an object.</para> /// </summary> public object ConvertFromString(string text) { return ConvertFrom(null, null, text); } /// <summary> /// <para>Converts the specified text into an object.</para> /// </summary> public object ConvertFromString(ITypeDescriptorContext context, string text) { return ConvertFrom(context, CultureInfo.CurrentCulture, text); } /// <summary> /// <para>Converts the specified text into an object.</para> /// </summary> public object ConvertFromString(ITypeDescriptorContext context, CultureInfo culture, string text) { return ConvertFrom(context, culture, text); } /// <summary> /// <para>Converts the given /// value object to the specified destination type using the arguments.</para> /// </summary> public object ConvertTo(object value, Type destinationType) { return ConvertTo(null, null, value, destinationType); } /// <summary> /// <para>Converts the given value object to /// the specified destination type using the specified context and arguments.</para> /// </summary> public virtual object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) { if (destinationType == null) { throw new ArgumentNullException(nameof(destinationType)); } if (destinationType == typeof(string)) { if (value == null) { return string.Empty; } if (culture != null && culture != CultureInfo.CurrentCulture) { IFormattable formattable = value as IFormattable; if (formattable != null) { return formattable.ToString(/* format = */ null, /* formatProvider = */ culture); } } return value.ToString(); } throw GetConvertToException(value, destinationType); } /// <summary> /// <para>Converts the specified value to a culture-invariant string representation.</para> /// </summary> public string ConvertToInvariantString(object value) { return ConvertToString(null, CultureInfo.InvariantCulture, value); } /// <summary> /// <para>Converts the specified value to a culture-invariant string representation.</para> /// </summary> [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")] public string ConvertToInvariantString(ITypeDescriptorContext context, object value) { return ConvertToString(context, CultureInfo.InvariantCulture, value); } /// <summary> /// <para>Converts the specified value to a string representation.</para> /// </summary> public string ConvertToString(object value) { return (string)ConvertTo(null, CultureInfo.CurrentCulture, value, typeof(string)); } /// <summary> /// <para>Converts the specified value to a string representation.</para> /// </summary> public string ConvertToString(ITypeDescriptorContext context, object value) { return (string)ConvertTo(context, CultureInfo.CurrentCulture, value, typeof(string)); } /// <summary> /// <para>Converts the specified value to a string representation.</para> /// </summary> public string ConvertToString(ITypeDescriptorContext context, CultureInfo culture, object value) { return (string)ConvertTo(context, culture, value, typeof(string)); } /// <summary> /// <para>Re-creates an <see cref='System.Object'/> given a set of property values for the object.</para> /// </summary> public object CreateInstance(IDictionary propertyValues) { return CreateInstance(null, propertyValues); } /// <summary> /// <para>Re-creates an <see cref='System.Object'/> given a set of property values for the object.</para> /// </summary> public virtual object CreateInstance(ITypeDescriptorContext context, IDictionary propertyValues) { return null; } /// <summary> /// <para>Gets a suitable exception to throw when a conversion cannot be performed.</para> /// </summary> protected Exception GetConvertFromException(object value) { string valueTypeName; if (value == null) { valueTypeName = SR.Null; } else { valueTypeName = value.GetType().FullName; } throw new NotSupportedException(SR.Format(SR.ConvertFromException, GetType().Name, valueTypeName)); } /// <summary> /// <para>Retrieves a suitable exception to throw when a conversion cannot /// be performed.</para> /// </summary> protected Exception GetConvertToException(object value, Type destinationType) { string valueTypeName; if (value == null) { valueTypeName = SR.Null; } else { valueTypeName = value.GetType().FullName; } throw new NotSupportedException(SR.Format(SR.ConvertToException, GetType().Name, valueTypeName, destinationType.FullName)); } /// <summary> /// <para> /// Gets a value indicating whether changing a value on this object requires a call to /// <see cref='System.ComponentModel.TypeConverter.CreateInstance'/> to create a new value. /// </para> /// </summary> public bool GetCreateInstanceSupported() { return GetCreateInstanceSupported(null); } /// <summary> /// <para> /// Gets a value indicating whether changing a value on this object requires a call to /// <see cref='System.ComponentModel.TypeConverter.CreateInstance'/> to create a new value, /// using the specified context. /// </para> /// </summary> public virtual bool GetCreateInstanceSupported(ITypeDescriptorContext context) { return false; } #if !NETSTANDARD10 /// <summary> /// <para>Gets a collection of properties for the type of array specified by the value parameter.</para> /// </summary> public PropertyDescriptorCollection GetProperties(object value) { return GetProperties(null, value); } /// <summary> /// <para> /// Gets a collection of properties for the type of array specified by the value parameter using /// the specified context. /// </para> /// </summary> public PropertyDescriptorCollection GetProperties(ITypeDescriptorContext context, object value) { return GetProperties(context, value, new Attribute[] { BrowsableAttribute.Yes }); } /// <summary> /// <para> /// Gets a collection of properties for the type of array specified by the value parameter using /// the specified context and attributes. /// </para> /// </summary> public virtual PropertyDescriptorCollection GetProperties(ITypeDescriptorContext context, object value, Attribute[] attributes) { return null; } #endif // !NETSTANDARD10 /// <summary> /// <para>Gets a value indicating whether this object supports properties.</para> /// </summary> public bool GetPropertiesSupported() { return GetPropertiesSupported(null); } /// <summary> /// <para>Gets a value indicating whether this object supports properties using the specified context.</para> /// </summary> public virtual bool GetPropertiesSupported(ITypeDescriptorContext context) { return false; } /// <summary> /// <para>Gets a collection of standard values for the data type this type converter is designed for.</para> /// </summary> public ICollection GetStandardValues() { return GetStandardValues(null); } /// <summary> /// <para>Gets a collection of standard values for the data type this type converter is designed for.</para> /// </summary> public virtual StandardValuesCollection GetStandardValues(ITypeDescriptorContext context) { return null; } /// <summary> /// <para> /// Gets a value indicating whether the collection of standard values returned from /// <see cref='System.ComponentModel.TypeConverter.GetStandardValues'/> is an exclusive list. /// </para> /// </summary> public bool GetStandardValuesExclusive() { return GetStandardValuesExclusive(null); } /// <summary> /// <para> /// Gets a value indicating whether the collection of standard values returned from /// <see cref='System.ComponentModel.TypeConverter.GetStandardValues'/> is an exclusive /// list of possible values, using the specified context. /// </para> /// </summary> public virtual bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; } /// <summary> /// <para> /// Gets a value indicating whether this object supports a standard set of values /// that can be picked from a list. /// </para> /// </summary> public bool GetStandardValuesSupported() { return GetStandardValuesSupported(null); } /// <summary> /// <para> /// Gets a value indicating whether this object supports a standard set of values that can be picked /// from a list using the specified context. /// </para> /// </summary> public virtual bool GetStandardValuesSupported(ITypeDescriptorContext context) { return false; } /// <summary> /// <para>Gets a value indicating whether the given value object is valid for this type.</para> /// </summary> public bool IsValid(object value) { return IsValid(null, value); } /// <summary> /// <para>Gets a value indicating whether the given value object is valid for this type.</para> /// </summary> public virtual bool IsValid(ITypeDescriptorContext context, object value) { bool isValid = true; try { // Because null doesn't have a type, so we couldn't pass this to CanConvertFrom. // Meanwhile, we couldn't silence null value here, such as type converter like // NullableConverter would consider null value as a valid value. if (value == null || CanConvertFrom(context, value.GetType())) { ConvertFrom(context, CultureInfo.InvariantCulture, value); } else { isValid = false; } } catch { isValid = false; } return isValid; } #if !NETSTANDARD10 /// <summary> /// <para>Sorts a collection of properties.</para> /// </summary> protected PropertyDescriptorCollection SortProperties(PropertyDescriptorCollection props, string[] names) { props.Sort(names); return props; } /// <summary> /// <para> /// An <see langword='abstract '/> class that provides properties for objects that do not have properties. /// </para> /// </summary> protected abstract class SimplePropertyDescriptor : PropertyDescriptor { private Type _componentType; private Type _propertyType; /// <summary> /// <para> /// Initializes a new instance of the <see cref='System.ComponentModel.TypeConverter.SimplePropertyDescriptor'/> class. /// </para> /// </summary> protected SimplePropertyDescriptor(Type componentType, string name, Type propertyType) : this(componentType, name, propertyType, new Attribute[0]) { } /// <summary> /// <para> /// Initializes a new instance of the <see cref='System.ComponentModel.TypeConverter.SimplePropertyDescriptor'/> class. /// </para> /// </summary> protected SimplePropertyDescriptor(Type componentType, string name, Type propertyType, Attribute[] attributes) : base(name, attributes) { _componentType = componentType; _propertyType = propertyType; } /// <summary> /// <para>Gets the type of the component this property description is bound to.</para> /// </summary> public override Type ComponentType { get { return _componentType; } } /// <summary> /// <para>Gets a value indicating whether this property is read-only.</para> /// </summary> public override bool IsReadOnly { get { return Attributes.Contains(ReadOnlyAttribute.Yes); } } /// <summary> /// <para>Gets the type of the property.</para> /// </summary> public override Type PropertyType { get { return _propertyType; } } /// <summary> /// <para> /// Gets a value indicating whether resetting the component will change the value of the component. /// </para> /// </summary> public override bool CanResetValue(object component) { DefaultValueAttribute attr = (DefaultValueAttribute)Attributes[typeof(DefaultValueAttribute)]; if (attr == null) { return false; } return (attr.Value.Equals(GetValue(component))); } /// <summary> /// <para>Resets the value for this property of the component.</para> /// </summary> public override void ResetValue(object component) { DefaultValueAttribute attr = (DefaultValueAttribute)Attributes[typeof(DefaultValueAttribute)]; if (attr != null) { SetValue(component, attr.Value); } } /// <summary> /// <para>Gets a value indicating whether the value of this property needs to be persisted.</para> /// </summary> public override bool ShouldSerializeValue(object component) { return false; } } #endif // !NETSTANDARD10 /// <summary> /// <para>Represents a collection of values.</para> /// </summary> public class StandardValuesCollection : ICollection { private ICollection _values; private Array _valueArray; /// <summary> /// <para> /// Initializes a new instance of the <see cref='System.ComponentModel.TypeConverter.StandardValuesCollection'/> class. /// </para> /// </summary> public StandardValuesCollection(ICollection values) { if (values == null) { values = new object[0]; } Array a = values as Array; if (a != null) { _valueArray = a; } _values = values; } /// <summary> /// <para> /// Gets the number of objects in the collection. /// </para> /// </summary> public int Count { get { if (_valueArray != null) { return _valueArray.Length; } else { return _values.Count; } } } /// <summary> /// <para>Gets the object at the specified index number.</para> /// </summary> public object this[int index] { get { if (_valueArray != null) { return _valueArray.GetValue(index); } IList list = _values as IList; if (list != null) { return list[index]; } // No other choice but to enumerate the collection. // _valueArray = new object[_values.Count]; _values.CopyTo(_valueArray, 0); return _valueArray.GetValue(index); } } /// <summary> /// <para> /// Copies the contents of this collection to an array. /// </para> /// </summary> public void CopyTo(Array array, int index) { _values.CopyTo(array, index); } /// <summary> /// <para> /// Gets an enumerator for this collection. /// </para> /// </summary> public IEnumerator GetEnumerator() { return _values.GetEnumerator(); } /// <internalonly/> /// <summary> /// Determines if this collection is synchronized. The ValidatorCollection is not synchronized for /// speed. Also, since it is read-only, there is no need to synchronize it. /// </summary> bool ICollection.IsSynchronized { get { return false; } } /// <internalonly/> /// <summary> /// Retrieves the synchronization root for this collection. Because we are not synchronized, /// this returns null. /// </summary> object ICollection.SyncRoot { get { return null; } } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.IO; using System.Text; using System.Diagnostics; using System.Globalization; namespace System.Xml { // Specifies formatting options for XmlTextWriter. internal enum Formatting { // No special formatting is done (this is the default). None, //This option causes child elements to be indented using the Indentation and IndentChar properties. // It only indents Element Content (http://www.w3.org/TR/1998/REC-xml-19980210#sec-element-content) // and not Mixed Content (http://www.w3.org/TR/1998/REC-xml-19980210#sec-mixed-content) // according to the XML 1.0 definitions of these terms. Indented, }; // Represents a writer that provides fast non-cached forward-only way of generating XML streams // containing XML documents that conform to the W3CExtensible Markup Language (XML) 1.0 specification // and the Namespaces in XML specification. [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] internal class XmlTextWriter : XmlWriter { // // Private types // enum NamespaceState { Uninitialized, NotDeclaredButInScope, DeclaredButNotWrittenOut, DeclaredAndWrittenOut } struct TagInfo { internal string name; internal string prefix; internal string defaultNs; internal NamespaceState defaultNsState; internal XmlSpace xmlSpace; internal string xmlLang; internal int prevNsTop; internal int prefixCount; internal bool mixed; // whether to pretty print the contents of this element. internal void Init(int nsTop) { name = null; defaultNs = String.Empty; defaultNsState = NamespaceState.Uninitialized; xmlSpace = XmlSpace.None; xmlLang = null; prevNsTop = nsTop; prefixCount = 0; mixed = false; } } struct Namespace { internal string prefix; internal string ns; internal bool declared; internal int prevNsIndex; internal void Set(string prefix, string ns, bool declared) { this.prefix = prefix; this.ns = ns; this.declared = declared; this.prevNsIndex = -1; } } enum SpecialAttr { None, XmlSpace, XmlLang, XmlNs }; // State machine is working through autocomplete private enum State { Start, Prolog, PostDTD, Element, Attribute, Content, AttrOnly, Epilog, Error, Closed, } private enum Token { PI, Doctype, Comment, CData, StartElement, EndElement, LongEndElement, StartAttribute, EndAttribute, Content, Base64, RawData, Whitespace, Empty } // // Fields // // output TextWriter textWriter; XmlTextEncoder xmlEncoder; Encoding encoding; // formatting Formatting formatting; bool indented; // perf - faster to check a boolean. int indentation; char indentChar; // element stack TagInfo[] stack; int top; // state machine for AutoComplete State[] stateTable; State currentState; Token lastToken; // Base64 content XmlTextWriterBase64Encoder base64Encoder; // misc char quoteChar; char curQuoteChar; bool namespaces; SpecialAttr specialAttr; string prefixForXmlNs; bool flush; // namespaces Namespace[] nsStack; int nsTop; Dictionary<string, int> nsHashtable; bool useNsHashtable; // char types XmlCharType xmlCharType = XmlCharType.Instance; // // Constants and constant tables // const int NamespaceStackInitialSize = 8; #if DEBUG const int MaxNamespacesWalkCount = 3; #else const int MaxNamespacesWalkCount = 16; #endif static string[] stateName = { "Start", "Prolog", "PostDTD", "Element", "Attribute", "Content", "AttrOnly", "Epilog", "Error", "Closed", }; static string[] tokenName = { "PI", "Doctype", "Comment", "CData", "StartElement", "EndElement", "LongEndElement", "StartAttribute", "EndAttribute", "Content", "Base64", "RawData", "Whitespace", "Empty" }; static readonly State[] stateTableDefault = { // State.Start State.Prolog State.PostDTD State.Element State.Attribute State.Content State.AttrOnly State.Epilog // /* Token.PI */ State.Prolog, State.Prolog, State.PostDTD, State.Content, State.Content, State.Content, State.Error, State.Epilog, /* Token.Doctype */ State.PostDTD, State.PostDTD, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, /* Token.Comment */ State.Prolog, State.Prolog, State.PostDTD, State.Content, State.Content, State.Content, State.Error, State.Epilog, /* Token.CData */ State.Content, State.Content, State.Error, State.Content, State.Content, State.Content, State.Error, State.Epilog, /* Token.StartElement */ State.Element, State.Element, State.Element, State.Element, State.Element, State.Element, State.Error, State.Element, /* Token.EndElement */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error, /* Token.LongEndElement */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error, /* Token.StartAttribute */ State.AttrOnly, State.Error, State.Error, State.Attribute, State.Attribute, State.Error, State.Error, State.Error, /* Token.EndAttribute */ State.Error, State.Error, State.Error, State.Error, State.Element, State.Error, State.Epilog, State.Error, /* Token.Content */ State.Content, State.Content, State.Error, State.Content, State.Attribute, State.Content, State.Attribute, State.Epilog, /* Token.Base64 */ State.Content, State.Content, State.Error, State.Content, State.Attribute, State.Content, State.Attribute, State.Epilog, /* Token.RawData */ State.Prolog, State.Prolog, State.PostDTD, State.Content, State.Attribute, State.Content, State.Attribute, State.Epilog, /* Token.Whitespace */ State.Prolog, State.Prolog, State.PostDTD, State.Content, State.Attribute, State.Content, State.Attribute, State.Epilog, }; static readonly State[] stateTableDocument = { // State.Start State.Prolog State.PostDTD State.Element State.Attribute State.Content State.AttrOnly State.Epilog // /* Token.PI */ State.Error, State.Prolog, State.PostDTD, State.Content, State.Content, State.Content, State.Error, State.Epilog, /* Token.Doctype */ State.Error, State.PostDTD, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, /* Token.Comment */ State.Error, State.Prolog, State.PostDTD, State.Content, State.Content, State.Content, State.Error, State.Epilog, /* Token.CData */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error, /* Token.StartElement */ State.Error, State.Element, State.Element, State.Element, State.Element, State.Element, State.Error, State.Error, /* Token.EndElement */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error, /* Token.LongEndElement */ State.Error, State.Error, State.Error, State.Content, State.Content, State.Content, State.Error, State.Error, /* Token.StartAttribute */ State.Error, State.Error, State.Error, State.Attribute, State.Attribute, State.Error, State.Error, State.Error, /* Token.EndAttribute */ State.Error, State.Error, State.Error, State.Error, State.Element, State.Error, State.Error, State.Error, /* Token.Content */ State.Error, State.Error, State.Error, State.Content, State.Attribute, State.Content, State.Error, State.Error, /* Token.Base64 */ State.Error, State.Error, State.Error, State.Content, State.Attribute, State.Content, State.Error, State.Error, /* Token.RawData */ State.Error, State.Prolog, State.PostDTD, State.Content, State.Attribute, State.Content, State.Error, State.Epilog, /* Token.Whitespace */ State.Error, State.Prolog, State.PostDTD, State.Content, State.Attribute, State.Content, State.Error, State.Epilog, }; // // Constructors // internal XmlTextWriter() { namespaces = true; formatting = Formatting.None; indentation = 2; indentChar = ' '; // namespaces nsStack = new Namespace[NamespaceStackInitialSize]; nsTop = -1; // element stack stack = new TagInfo[10]; top = 0;// 0 is an empty sentanial element stack[top].Init(-1); quoteChar = '"'; stateTable = stateTableDefault; currentState = State.Start; lastToken = Token.Empty; } // Creates an instance of the XmlTextWriter class using the specified stream. public XmlTextWriter(Stream w, Encoding encoding) : this() { this.encoding = encoding; if (encoding != null) textWriter = new StreamWriter(w, encoding); else textWriter = new StreamWriter(w); xmlEncoder = new XmlTextEncoder(textWriter); xmlEncoder.QuoteChar = this.quoteChar; } // Creates an instance of the XmlTextWriter class using the specified TextWriter. public XmlTextWriter(TextWriter w) : this() { textWriter = w; encoding = w.Encoding; xmlEncoder = new XmlTextEncoder(w); xmlEncoder.QuoteChar = this.quoteChar; } // // XmlTextWriter properties // // Gets the XmlTextWriter base stream. public Stream BaseStream { get { StreamWriter streamWriter = textWriter as StreamWriter; return (streamWriter == null ? null : streamWriter.BaseStream); } } // Gets or sets a value indicating whether to do namespace support. public bool Namespaces { get { return this.namespaces; } set { if (this.currentState != State.Start) throw new InvalidOperationException(SR.Xml_NotInWriteState); this.namespaces = value; } } // Indicates how the output is formatted. public Formatting Formatting { get { return this.formatting; } set { this.formatting = value; this.indented = value == Formatting.Indented; } } // Gets or sets how many IndentChars to write for each level in the hierarchy when Formatting is set to "Indented". public int Indentation { get { return this.indentation; } set { if (value < 0) throw new ArgumentException(SR.Xml_InvalidIndentation); this.indentation = value; } } // Gets or sets which character to use for indenting when Formatting is set to "Indented". public char IndentChar { get { return this.indentChar; } set { this.indentChar = value; } } // Gets or sets which character to use to quote attribute values. public char QuoteChar { get { return this.quoteChar; } set { if (value != '"' && value != '\'') { throw new ArgumentException(SR.Xml_InvalidQuote); } this.quoteChar = value; this.xmlEncoder.QuoteChar = value; } } // // XmlWriter implementation // // Writes out the XML declaration with the version "1.0". public override void WriteStartDocument() { StartDocument(-1); } // Writes out the XML declaration with the version "1.0" and the standalone attribute. public override void WriteStartDocument(bool standalone) { StartDocument(standalone ? 1 : 0); } // Closes any open elements or attributes and puts the writer back in the Start state. public override void WriteEndDocument() { try { AutoCompleteAll(); if (this.currentState != State.Epilog) { if (this.currentState == State.Closed) { throw new ArgumentException(SR.Xml_ClosedOrError); } else { throw new ArgumentException(SR.Xml_NoRoot); } } this.stateTable = stateTableDefault; this.currentState = State.Start; this.lastToken = Token.Empty; } catch { currentState = State.Error; throw; } } // Writes out the DOCTYPE declaration with the specified name and optional attributes. public override void WriteDocType(string name, string pubid, string sysid, string subset) { try { ValidateName(name, false); AutoComplete(Token.Doctype); textWriter.Write("<!DOCTYPE "); textWriter.Write(name); if (pubid != null) { textWriter.Write(" PUBLIC " + quoteChar); textWriter.Write(pubid); textWriter.Write(quoteChar + " " + quoteChar); textWriter.Write(sysid); textWriter.Write(quoteChar); } else if (sysid != null) { textWriter.Write(" SYSTEM " + quoteChar); textWriter.Write(sysid); textWriter.Write(quoteChar); } if (subset != null) { textWriter.Write("["); textWriter.Write(subset); textWriter.Write("]"); } textWriter.Write('>'); } catch { currentState = State.Error; throw; } } // Writes out the specified start tag and associates it with the given namespace and prefix. public override void WriteStartElement(string prefix, string localName, string ns) { try { AutoComplete(Token.StartElement); PushStack(); textWriter.Write('<'); if (this.namespaces) { // Propagate default namespace and mix model down the stack. stack[top].defaultNs = stack[top - 1].defaultNs; if (stack[top - 1].defaultNsState != NamespaceState.Uninitialized) stack[top].defaultNsState = NamespaceState.NotDeclaredButInScope; stack[top].mixed = stack[top - 1].mixed; if (ns == null) { // use defined prefix if (prefix != null && prefix.Length != 0 && (LookupNamespace(prefix) == -1)) { throw new ArgumentException(SR.Xml_UndefPrefix); } } else { if (prefix == null) { string definedPrefix = FindPrefix(ns); if (definedPrefix != null) { prefix = definedPrefix; } else { PushNamespace(null, ns, false); // new default } } else if (prefix.Length == 0) { PushNamespace(null, ns, false); // new default } else { if (ns.Length == 0) { prefix = null; } VerifyPrefixXml(prefix, ns); PushNamespace(prefix, ns, false); // define } } stack[top].prefix = null; if (prefix != null && prefix.Length != 0) { stack[top].prefix = prefix; textWriter.Write(prefix); textWriter.Write(':'); } } else { if ((ns != null && ns.Length != 0) || (prefix != null && prefix.Length != 0)) { throw new ArgumentException(SR.Xml_NoNamespaces); } } stack[top].name = localName; textWriter.Write(localName); } catch { currentState = State.Error; throw; } } // Closes one element and pops the corresponding namespace scope. public override void WriteEndElement() { InternalWriteEndElement(false); } // Closes one element and pops the corresponding namespace scope. public override void WriteFullEndElement() { InternalWriteEndElement(true); } // Writes the start of an attribute. public override void WriteStartAttribute(string prefix, string localName, string ns) { try { AutoComplete(Token.StartAttribute); this.specialAttr = SpecialAttr.None; if (this.namespaces) { if (prefix != null && prefix.Length == 0) { prefix = null; } if (ns == XmlConst.ReservedNsXmlNs && prefix == null && localName != "xmlns") { prefix = "xmlns"; } if (prefix == "xml") { if (localName == "lang") { this.specialAttr = SpecialAttr.XmlLang; } else if (localName == "space") { this.specialAttr = SpecialAttr.XmlSpace; } } else if (prefix == "xmlns") { if (XmlConst.ReservedNsXmlNs != ns && ns != null) { throw new ArgumentException(SR.Xml_XmlnsBelongsToReservedNs); } if (localName == null || localName.Length == 0) { localName = prefix; prefix = null; this.prefixForXmlNs = null; } else { this.prefixForXmlNs = localName; } this.specialAttr = SpecialAttr.XmlNs; } else if (prefix == null && localName == "xmlns") { if (XmlConst.ReservedNsXmlNs != ns && ns != null) { // add the below line back in when DOM is fixed throw new ArgumentException(SR.Xml_XmlnsBelongsToReservedNs); } this.specialAttr = SpecialAttr.XmlNs; this.prefixForXmlNs = null; } else { if (ns == null) { // use defined prefix if (prefix != null && (LookupNamespace(prefix) == -1)) { throw new ArgumentException(SR.Xml_UndefPrefix); } } else if (ns.Length == 0) { // empty namespace require null prefix prefix = string.Empty; } else { // ns.Length != 0 VerifyPrefixXml(prefix, ns); if (prefix != null && LookupNamespaceInCurrentScope(prefix) != -1) { prefix = null; } // Now verify prefix validity string definedPrefix = FindPrefix(ns); if (definedPrefix != null && (prefix == null || prefix == definedPrefix)) { prefix = definedPrefix; } else { if (prefix == null) { prefix = GeneratePrefix(); // need a prefix if } PushNamespace(prefix, ns, false); } } } if (prefix != null && prefix.Length != 0) { textWriter.Write(prefix); textWriter.Write(':'); } } else { if ((ns != null && ns.Length != 0) || (prefix != null && prefix.Length != 0)) { throw new ArgumentException(SR.Xml_NoNamespaces); } if (localName == "xml:lang") { this.specialAttr = SpecialAttr.XmlLang; } else if (localName == "xml:space") { this.specialAttr = SpecialAttr.XmlSpace; } } xmlEncoder.StartAttribute(this.specialAttr != SpecialAttr.None); textWriter.Write(localName); textWriter.Write('='); if (this.curQuoteChar != this.quoteChar) { this.curQuoteChar = this.quoteChar; xmlEncoder.QuoteChar = this.quoteChar; } textWriter.Write(this.curQuoteChar); } catch { currentState = State.Error; throw; } } // Closes the attribute opened by WriteStartAttribute. public override void WriteEndAttribute() { try { AutoComplete(Token.EndAttribute); } catch { currentState = State.Error; throw; } } // Writes out a &lt;![CDATA[...]]&gt; block containing the specified text. public override void WriteCData(string text) { try { AutoComplete(Token.CData); if (null != text && text.IndexOf("]]>", StringComparison.Ordinal) >= 0) { throw new ArgumentException(SR.Xml_InvalidCDataChars); } textWriter.Write("<![CDATA["); if (null != text) { xmlEncoder.WriteRawWithSurrogateChecking(text); } textWriter.Write("]]>"); } catch { currentState = State.Error; throw; } } // Writes out a comment <!--...--> containing the specified text. public override void WriteComment(string text) { try { if (null != text && (text.IndexOf("--", StringComparison.Ordinal) >= 0 || (text.Length != 0 && text[text.Length - 1] == '-'))) { throw new ArgumentException(SR.Xml_InvalidCommentChars); } AutoComplete(Token.Comment); textWriter.Write("<!--"); if (null != text) { xmlEncoder.WriteRawWithSurrogateChecking(text); } textWriter.Write("-->"); } catch { currentState = State.Error; throw; } } // Writes out a processing instruction with a space between the name and text as follows: <?name text?> public override void WriteProcessingInstruction(string name, string text) { try { if (null != text && text.IndexOf("?>", StringComparison.Ordinal) >= 0) { throw new ArgumentException(SR.Xml_InvalidPiChars); } if (0 == String.Compare(name, "xml", StringComparison.OrdinalIgnoreCase) && this.stateTable == stateTableDocument) { throw new ArgumentException(SR.Xml_DupXmlDecl); } AutoComplete(Token.PI); InternalWriteProcessingInstruction(name, text); } catch { currentState = State.Error; throw; } } // Writes out an entity reference as follows: "&"+name+";". public override void WriteEntityRef(string name) { try { ValidateName(name, false); AutoComplete(Token.Content); xmlEncoder.WriteEntityRef(name); } catch { currentState = State.Error; throw; } } // Forces the generation of a character entity for the specified Unicode character value. public override void WriteCharEntity(char ch) { try { AutoComplete(Token.Content); xmlEncoder.WriteCharEntity(ch); } catch { currentState = State.Error; throw; } } // Writes out the given whitespace. public override void WriteWhitespace(string ws) { try { if (null == ws) { ws = String.Empty; } if (!xmlCharType.IsOnlyWhitespace(ws)) { throw new ArgumentException(SR.Xml_NonWhitespace); } AutoComplete(Token.Whitespace); xmlEncoder.Write(ws); } catch { currentState = State.Error; throw; } } // Writes out the specified text content. public override void WriteString(string text) { try { if (null != text && text.Length != 0) { AutoComplete(Token.Content); xmlEncoder.Write(text); } } catch { currentState = State.Error; throw; } } // Writes out the specified surrogate pair as a character entity. public override void WriteSurrogateCharEntity(char lowChar, char highChar) { try { AutoComplete(Token.Content); xmlEncoder.WriteSurrogateCharEntity(lowChar, highChar); } catch { currentState = State.Error; throw; } } // Writes out the specified text content. public override void WriteChars(Char[] buffer, int index, int count) { try { AutoComplete(Token.Content); xmlEncoder.Write(buffer, index, count); } catch { currentState = State.Error; throw; } } // Writes raw markup from the specified character buffer. public override void WriteRaw(Char[] buffer, int index, int count) { try { AutoComplete(Token.RawData); xmlEncoder.WriteRaw(buffer, index, count); } catch { currentState = State.Error; throw; } } // Writes raw markup from the specified character string. public override void WriteRaw(String data) { try { AutoComplete(Token.RawData); xmlEncoder.WriteRawWithSurrogateChecking(data); } catch { currentState = State.Error; throw; } } // Encodes the specified binary bytes as base64 and writes out the resulting text. public override void WriteBase64(byte[] buffer, int index, int count) { try { if (!this.flush) { AutoComplete(Token.Base64); } this.flush = true; // No need for us to explicitly validate the args. The StreamWriter will do // it for us. if (null == this.base64Encoder) { this.base64Encoder = new XmlTextWriterBase64Encoder(xmlEncoder); } // Encode will call WriteRaw to write out the encoded characters this.base64Encoder.Encode(buffer, index, count); } catch { currentState = State.Error; throw; } } // Encodes the specified binary bytes as binhex and writes out the resulting text. public override void WriteBinHex(byte[] buffer, int index, int count) { try { AutoComplete(Token.Content); BinHexEncoder.Encode(buffer, index, count, this); } catch { currentState = State.Error; throw; } } // Returns the state of the XmlWriter. public override WriteState WriteState { get { switch (this.currentState) { case State.Start: return WriteState.Start; case State.Prolog: case State.PostDTD: return WriteState.Prolog; case State.Element: return WriteState.Element; case State.Attribute: case State.AttrOnly: return WriteState.Attribute; case State.Content: case State.Epilog: return WriteState.Content; case State.Error: return WriteState.Error; case State.Closed: return WriteState.Closed; default: Debug.Assert(false); return WriteState.Error; } } } // Disposes the XmlWriter and the underlying stream/TextWriter. protected override void Dispose(bool disposing) { if (disposing && this.currentState != State.Closed) { try { AutoCompleteAll(); } catch { // never fail } finally { this.currentState = State.Closed; textWriter.Dispose(); } } base.Dispose(disposing); } // Flushes whatever is in the buffer to the underlying stream/TextWriter and flushes the underlying stream/TextWriter. public override void Flush() { textWriter.Flush(); } // Writes out the specified name, ensuring it is a valid Name according to the XML specification // (http://www.w3.org/TR/1998/REC-xml-19980210#NT-Name public override void WriteName(string name) { try { AutoComplete(Token.Content); InternalWriteName(name, false); } catch { currentState = State.Error; throw; } } // Writes out the specified namespace-qualified name by looking up the prefix that is in scope for the given namespace. public override void WriteQualifiedName(string localName, string ns) { try { AutoComplete(Token.Content); if (this.namespaces) { if (ns != null && ns.Length != 0 && ns != stack[top].defaultNs) { string prefix = FindPrefix(ns); if (prefix == null) { if (this.currentState != State.Attribute) { throw new ArgumentException(SR.Format(SR.Xml_UndefNamespace, ns)); } prefix = GeneratePrefix(); // need a prefix if PushNamespace(prefix, ns, false); } if (prefix.Length != 0) { InternalWriteName(prefix, true); textWriter.Write(':'); } } } else if (ns != null && ns.Length != 0) { throw new ArgumentException(SR.Xml_NoNamespaces); } InternalWriteName(localName, true); } catch { currentState = State.Error; throw; } } // Returns the closest prefix defined in the current namespace scope for the specified namespace URI. public override string LookupPrefix(string ns) { if (ns == null || ns.Length == 0) { throw new ArgumentException(SR.Xml_EmptyName); } string s = FindPrefix(ns); if (s == null && ns == stack[top].defaultNs) { s = string.Empty; } return s; } // Gets an XmlSpace representing the current xml:space scope. public override XmlSpace XmlSpace { get { for (int i = top; i > 0; i--) { XmlSpace xs = stack[i].xmlSpace; if (xs != XmlSpace.None) return xs; } return XmlSpace.None; } } // Gets the current xml:lang scope. public override string XmlLang { get { for (int i = top; i > 0; i--) { String xlang = stack[i].xmlLang; if (xlang != null) return xlang; } return null; } } // Writes out the specified name, ensuring it is a valid NmToken // according to the XML specification (http://www.w3.org/TR/1998/REC-xml-19980210#NT-Name). public override void WriteNmToken(string name) { try { AutoComplete(Token.Content); if (name == null || name.Length == 0) { throw new ArgumentException(SR.Xml_EmptyName); } if (!ValidateNames.IsNmtokenNoNamespaces(name)) { throw new ArgumentException(SR.Format(SR.Xml_InvalidNameChars, name)); } textWriter.Write(name); } catch { currentState = State.Error; throw; } } // // Private implementation methods // void StartDocument(int standalone) { try { if (this.currentState != State.Start) { throw new InvalidOperationException(SR.Xml_NotTheFirst); } this.stateTable = stateTableDocument; this.currentState = State.Prolog; StringBuilder bufBld = new StringBuilder(128); bufBld.Append("version=" + quoteChar + "1.0" + quoteChar); if (this.encoding != null) { bufBld.Append(" encoding="); bufBld.Append(quoteChar); bufBld.Append(this.encoding.WebName); bufBld.Append(quoteChar); } if (standalone >= 0) { bufBld.Append(" standalone="); bufBld.Append(quoteChar); bufBld.Append(standalone == 0 ? "no" : "yes"); bufBld.Append(quoteChar); } InternalWriteProcessingInstruction("xml", bufBld.ToString()); } catch { currentState = State.Error; throw; } } void AutoComplete(Token token) { if (this.currentState == State.Closed) { throw new InvalidOperationException(SR.Xml_Closed); } else if (this.currentState == State.Error) { throw new InvalidOperationException(SR.Format(SR.Xml_WrongToken, tokenName[(int)token], stateName[(int)State.Error])); } State newState = this.stateTable[(int)token * 8 + (int)this.currentState]; if (newState == State.Error) { throw new InvalidOperationException(SR.Format(SR.Xml_WrongToken, tokenName[(int)token], stateName[(int)this.currentState])); } switch (token) { case Token.Doctype: if (this.indented && this.currentState != State.Start) { Indent(false); } break; case Token.StartElement: case Token.Comment: case Token.PI: case Token.CData: if (this.currentState == State.Attribute) { WriteEndAttributeQuote(); WriteEndStartTag(false); } else if (this.currentState == State.Element) { WriteEndStartTag(false); } if (token == Token.CData) { stack[top].mixed = true; } else if (this.indented && this.currentState != State.Start) { Indent(false); } break; case Token.EndElement: case Token.LongEndElement: if (this.flush) { FlushEncoders(); } if (this.currentState == State.Attribute) { WriteEndAttributeQuote(); } if (this.currentState == State.Content) { token = Token.LongEndElement; } else { WriteEndStartTag(token == Token.EndElement); } if (stateTableDocument == this.stateTable && top == 1) { newState = State.Epilog; } break; case Token.StartAttribute: if (this.flush) { FlushEncoders(); } if (this.currentState == State.Attribute) { WriteEndAttributeQuote(); textWriter.Write(' '); } else if (this.currentState == State.Element) { textWriter.Write(' '); } break; case Token.EndAttribute: if (this.flush) { FlushEncoders(); } WriteEndAttributeQuote(); break; case Token.Whitespace: case Token.Content: case Token.RawData: case Token.Base64: if (token != Token.Base64 && this.flush) { FlushEncoders(); } if (this.currentState == State.Element && this.lastToken != Token.Content) { WriteEndStartTag(false); } if (newState == State.Content) { stack[top].mixed = true; } break; default: throw new InvalidOperationException(SR.Xml_InvalidOperation); } this.currentState = newState; this.lastToken = token; } void AutoCompleteAll() { if (this.flush) { FlushEncoders(); } while (top > 0) { WriteEndElement(); } } void InternalWriteEndElement(bool longFormat) { try { if (top <= 0) { throw new InvalidOperationException(SR.Xml_NoStartTag); } // if we are in the element, we need to close it. AutoComplete(longFormat ? Token.LongEndElement : Token.EndElement); if (this.lastToken == Token.LongEndElement) { if (this.indented) { Indent(true); } textWriter.Write('<'); textWriter.Write('/'); if (this.namespaces && stack[top].prefix != null) { textWriter.Write(stack[top].prefix); textWriter.Write(':'); } textWriter.Write(stack[top].name); textWriter.Write('>'); } // pop namespaces int prevNsTop = stack[top].prevNsTop; if (useNsHashtable && prevNsTop < nsTop) { PopNamespaces(prevNsTop + 1, nsTop); } nsTop = prevNsTop; top--; } catch { currentState = State.Error; throw; } } void WriteEndStartTag(bool empty) { xmlEncoder.StartAttribute(false); for (int i = nsTop; i > stack[top].prevNsTop; i--) { if (!nsStack[i].declared) { textWriter.Write(" xmlns"); textWriter.Write(':'); textWriter.Write(nsStack[i].prefix); textWriter.Write('='); textWriter.Write(this.quoteChar); xmlEncoder.Write(nsStack[i].ns); textWriter.Write(this.quoteChar); } } // Default if ((stack[top].defaultNs != stack[top - 1].defaultNs) && (stack[top].defaultNsState == NamespaceState.DeclaredButNotWrittenOut)) { textWriter.Write(" xmlns"); textWriter.Write('='); textWriter.Write(this.quoteChar); xmlEncoder.Write(stack[top].defaultNs); textWriter.Write(this.quoteChar); stack[top].defaultNsState = NamespaceState.DeclaredAndWrittenOut; } xmlEncoder.EndAttribute(); if (empty) { textWriter.Write(" /"); } textWriter.Write('>'); } void WriteEndAttributeQuote() { if (this.specialAttr != SpecialAttr.None) { // Ok, now to handle xmlspace, etc. HandleSpecialAttribute(); } xmlEncoder.EndAttribute(); textWriter.Write(this.curQuoteChar); } void Indent(bool beforeEndElement) { // pretty printing. if (top == 0) { textWriter.WriteLine(); } else if (!stack[top].mixed) { textWriter.WriteLine(); int i = beforeEndElement ? top - 1 : top; for (i *= this.indentation; i > 0; i--) { textWriter.Write(this.indentChar); } } } // pushes new namespace scope, and returns generated prefix, if one // was needed to resolve conflicts. void PushNamespace(string prefix, string ns, bool declared) { if (XmlConst.ReservedNsXmlNs == ns) { throw new ArgumentException(SR.Xml_CanNotBindToReservedNamespace); } if (prefix == null) { switch (stack[top].defaultNsState) { case NamespaceState.DeclaredButNotWrittenOut: Debug.Assert(declared == true, "Unexpected situation!!"); // the first namespace that the user gave us is what we // like to keep. break; case NamespaceState.Uninitialized: case NamespaceState.NotDeclaredButInScope: // we now got a brand new namespace that we need to remember stack[top].defaultNs = ns; break; default: Debug.Assert(false, "Should have never come here"); return; } stack[top].defaultNsState = (declared ? NamespaceState.DeclaredAndWrittenOut : NamespaceState.DeclaredButNotWrittenOut); } else { if (prefix.Length != 0 && ns.Length == 0) { throw new ArgumentException(SR.Xml_PrefixForEmptyNs); } int existingNsIndex = LookupNamespace(prefix); if (existingNsIndex != -1 && nsStack[existingNsIndex].ns == ns) { // it is already in scope. if (declared) { nsStack[existingNsIndex].declared = true; } } else { // see if prefix conflicts for the current element if (declared) { if (existingNsIndex != -1 && existingNsIndex > stack[top].prevNsTop) { nsStack[existingNsIndex].declared = true; // old one is silenced now } } AddNamespace(prefix, ns, declared); } } } void AddNamespace(string prefix, string ns, bool declared) { int nsIndex = ++nsTop; if (nsIndex == nsStack.Length) { Namespace[] newStack = new Namespace[nsIndex * 2]; Array.Copy(nsStack, newStack, nsIndex); nsStack = newStack; } nsStack[nsIndex].Set(prefix, ns, declared); if (useNsHashtable) { AddToNamespaceHashtable(nsIndex); } else if (nsIndex == MaxNamespacesWalkCount) { // add all nsHashtable = new Dictionary<string, int>(new SecureStringHasher()); for (int i = 0; i <= nsIndex; i++) { AddToNamespaceHashtable(i); } useNsHashtable = true; } } void AddToNamespaceHashtable(int namespaceIndex) { string prefix = nsStack[namespaceIndex].prefix; int existingNsIndex; if (nsHashtable.TryGetValue(prefix, out existingNsIndex)) { nsStack[namespaceIndex].prevNsIndex = existingNsIndex; } nsHashtable[prefix] = namespaceIndex; } private void PopNamespaces(int indexFrom, int indexTo) { Debug.Assert(useNsHashtable); for (int i = indexTo; i >= indexFrom; i--) { Debug.Assert(nsHashtable.ContainsKey(nsStack[i].prefix)); if (nsStack[i].prevNsIndex == -1) { nsHashtable.Remove(nsStack[i].prefix); } else { nsHashtable[nsStack[i].prefix] = nsStack[i].prevNsIndex; } } } string GeneratePrefix() { int temp = stack[top].prefixCount++ + 1; return "d" + top.ToString("d", CultureInfo.InvariantCulture) + "p" + temp.ToString("d", CultureInfo.InvariantCulture); } void InternalWriteProcessingInstruction(string name, string text) { textWriter.Write("<?"); ValidateName(name, false); textWriter.Write(name); textWriter.Write(' '); if (null != text) { xmlEncoder.WriteRawWithSurrogateChecking(text); } textWriter.Write("?>"); } int LookupNamespace(string prefix) { if (useNsHashtable) { int nsIndex; if (nsHashtable.TryGetValue(prefix, out nsIndex)) { return nsIndex; } } else { for (int i = nsTop; i >= 0; i--) { if (nsStack[i].prefix == prefix) { return i; } } } return -1; } int LookupNamespaceInCurrentScope(string prefix) { if (useNsHashtable) { int nsIndex; if (nsHashtable.TryGetValue(prefix, out nsIndex)) { if (nsIndex > stack[top].prevNsTop) { return nsIndex; } } } else { for (int i = nsTop; i > stack[top].prevNsTop; i--) { if (nsStack[i].prefix == prefix) { return i; } } } return -1; } string FindPrefix(string ns) { for (int i = nsTop; i >= 0; i--) { if (nsStack[i].ns == ns) { if (LookupNamespace(nsStack[i].prefix) == i) { return nsStack[i].prefix; } } } return null; } // There are three kind of strings we write out - Name, LocalName and Prefix. // Both LocalName and Prefix can be represented with NCName == false and Name // can be represented as NCName == true void InternalWriteName(string name, bool isNCName) { ValidateName(name, isNCName); textWriter.Write(name); } // This method is used for validation of the DOCTYPE, processing instruction and entity names plus names // written out by the user via WriteName and WriteQualifiedName. // Unfortunatelly the names of elements and attributes are not validated by the XmlTextWriter. // Also this method does not check wheather the character after ':' is a valid start name character. It accepts // all valid name characters at that position. This can't be changed because of backwards compatibility. private unsafe void ValidateName(string name, bool isNCName) { if (name == null || name.Length == 0) { throw new ArgumentException(SR.Xml_EmptyName); } int nameLength = name.Length; // Namespaces supported if (namespaces) { // We can't use ValidateNames.ParseQName here because of backwards compatibility bug we need to preserve. // The bug is that the character after ':' is validated only as a NCName characters instead of NCStartName. int colonPosition = -1; // Parse NCName (may be prefix, may be local name) int position = ValidateNames.ParseNCName(name); Continue: if (position == nameLength) { return; } // we have prefix:localName if (name[position] == ':') { if (!isNCName) { // first colon in qname if (colonPosition == -1) { // make sure it is not the first or last characters if (position > 0 && position + 1 < nameLength) { colonPosition = position; // Because of the back-compat bug (described above) parse the rest as Nmtoken position++; position += ValidateNames.ParseNmtoken(name, position); goto Continue; } } } } } // Namespaces not supported else { if (ValidateNames.IsNameNoNamespaces(name)) { return; } } throw new ArgumentException(SR.Format(SR.Xml_InvalidNameChars, name)); } void HandleSpecialAttribute() { string value = xmlEncoder.AttributeValue; switch (this.specialAttr) { case SpecialAttr.XmlLang: stack[top].xmlLang = value; break; case SpecialAttr.XmlSpace: // validate XmlSpace attribute value = XmlConvertEx.TrimString(value); if (value == "default") { stack[top].xmlSpace = XmlSpace.Default; } else if (value == "preserve") { stack[top].xmlSpace = XmlSpace.Preserve; } else { throw new ArgumentException(SR.Format(SR.Xml_InvalidXmlSpace, value)); } break; case SpecialAttr.XmlNs: VerifyPrefixXml(this.prefixForXmlNs, value); PushNamespace(this.prefixForXmlNs, value, true); break; } } void VerifyPrefixXml(string prefix, string ns) { if (prefix != null && prefix.Length == 3) { if ( (prefix[0] == 'x' || prefix[0] == 'X') && (prefix[1] == 'm' || prefix[1] == 'M') && (prefix[2] == 'l' || prefix[2] == 'L') ) { if (XmlConst.ReservedNsXml != ns) { throw new ArgumentException(SR.Xml_InvalidPrefix); } } } } void PushStack() { if (top == stack.Length - 1) { TagInfo[] na = new TagInfo[stack.Length + 10]; if (top > 0) Array.Copy(stack, na, top + 1); stack = na; } top++; // Move up stack stack[top].Init(nsTop); } void FlushEncoders() { if (null != this.base64Encoder) { // The Flush will call WriteRaw to write out the rest of the encoded characters this.base64Encoder.Flush(); } this.flush = false; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // The native type of Vector3 is struct {float x,y,z} whose size is 12 bytes. RyuJit uses 16-byte // register or stack location to store a Vector3 variable with the assumptions below. New testcases // are added to check whether: // // - RyuJit correctly generates code and memory layout that matches the native side. // // - RyuJIt back-end assumptions about Vector3 types are satisfied. // // - Assumption1: Vector3 type args passed in registers or on stack is rounded to POINTER_SIZE // and hence on 64-bit targets it can be read/written as if it were TYP_SIMD16. // // - Assumption2: Vector3 args passed in registers (e.g. unix) or on stack have their upper // 4-bytes being zero. Similarly Vector3 return type value returned from a method will have // its upper 4-bytes zeroed out using System; using System.Diagnostics; using System.Numerics; using System.Runtime.InteropServices; using System.Text; public struct DT { public Vector3 a; public Vector3 b; }; [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)] public struct ComplexDT { public int iv; public DT vecs; [MarshalAs(UnmanagedType.ByValTStr, SizeConst=256)] public string str; public Vector3 v3; }; class PInvokeTest { [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern int nativeCall_PInvoke_CheckVector3Size(); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern float nativeCall_PInvoke_Vector3Arg( int i, Vector3 v1, [MarshalAs(UnmanagedType.LPStr)] string s, Vector3 v2); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern float nativeCall_PInvoke_Vector3Arg_Unix( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem1, float f32_mem2, float f32_mem3); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern float nativeCall_PInvoke_Vector3Arg_Unix2( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem1, float f32_mem2, float f32_mem3, Vector3 v3f32_mem4, float f32_mem5); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern Vector3 nativeCall_PInvoke_Vector3Ret(); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern float nativeCall_PInvoke_Vector3Array(Vector3[] v_array); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern DT nativeCall_PInvoke_Vector3InStruct(DT d); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_PInvoke_Vector3InComplexStruct(ref ComplexDT cdt); public static bool test() { // Expected return value is 12 bytes. if (nativeCall_PInvoke_CheckVector3Size() != 12) { Console.WriteLine("The size of native Vector3 type is not 12 bytes"); return false; } // Argument passing test. // The native code accesses only 12 bytes for each Vector object. { int iv = 123; Vector3 v1 = new Vector3(1,2,3); string str = "abcdefg"; Vector3 v2 = new Vector3(10,11,12); // Expected return value = 1 + 2 + 3 + 10 + 11 + 12 = 39 if (nativeCall_PInvoke_Vector3Arg(iv, v1, str, v2) != 39) { Console.WriteLine("PInvoke Vector3Arg test failed"); return false; } } // Argument passing test for Unix. // Few arguments are passed onto stack. { Vector3 v1 = new Vector3(1, 2, 3); Vector3 v2 = new Vector3(10, 20, 30); float f0 = 100, f1 = 101, f2 = 102, f3 = 103, f4 = 104, f5 = 105, f6 = 106, f7 = 107, f8 = 108; float sum = nativeCall_PInvoke_Vector3Arg_Unix( v1, // register f0, f1, f2, f3, f4, f5, // register f6, v2, // stack f7, f8); // stack if (sum != 1002) { Console.WriteLine("PInvoke Vector3Arg_Unix test failed"); return false; } } // Argument passing test for Unix. // Few arguments are passed onto stack. { Vector3 v1 = new Vector3(1, 2, 3); Vector3 v2 = new Vector3(4, 5, 6); Vector3 v3 = new Vector3(7, 8, 9); float f0 = 100, f1 = 101, f2 = 102, f3 = 103, f4 = 104, f5 = 105, f6 = 106, f7 = 107, f8 = 108, f9 = 109; float sum = nativeCall_PInvoke_Vector3Arg_Unix2( v1, // register f0, f1, f2, f3, f4, f5, // register f6, v2, // stack f7, f8, // stack v3, // stack f9); // stack if (sum != 1090) { Console.WriteLine("PInvoke Vector3Arg_Unix2 test failed"); return false; } } // Return test { Vector3 ret = nativeCall_PInvoke_Vector3Ret(); // Expected return value = (1, 2, 3) dot (1, 2, 3) = 14 float sum = Vector3.Dot(ret, ret); if (sum != 14) { Console.WriteLine("PInvoke Vector3Ret test failed"); return false; } } // Array argument test. // Both the managed and native code assumes 12 bytes for each element. { Vector3[] v3_array = new Vector3[2]; v3_array[0].X = 1; v3_array[0].Y = 2; v3_array[0].Z = 3; v3_array[1].X = 5; v3_array[1].Y = 6; v3_array[1].Z = 7; // Expected resutn value = 1 + 2 + 3 + 5 + 6 + 7 = 24 if (nativeCall_PInvoke_Vector3Array(v3_array) != 24) { Console.WriteLine("PInvoke Vector3Array test failed"); return false; } } // Structure pass and return test. // Both the managed and native side use 12 bytes for each Vector3 object. // Dot product makes sure that the backend assumption 1 and 2 are met. { DT data = new DT(); data.a = new Vector3(1,2,3); data.b = new Vector3(5,6,7); DT ret = nativeCall_PInvoke_Vector3InStruct(data); // Expected return value = (2, 3, 4) dot (6, 7, 8) = 12 + 21 + 32 = 65 float sum = Vector3.Dot(ret.a, ret.b); if (sum != 65) { Console.WriteLine("PInvoke Vector3InStruct test failed"); return false; } } // Complex struct test // Dot product makes sure that the backend assumption 1 and 2 are met. { ComplexDT cdt = new ComplexDT(); cdt.iv = 99; cdt.str = "arg_string"; cdt.vecs.a = new Vector3(1,2,3); cdt.vecs.b = new Vector3(5,6,7); cdt.v3 = new Vector3(10, 20, 30); nativeCall_PInvoke_Vector3InComplexStruct(ref cdt); Console.WriteLine(" Managed ival: {0}", cdt.iv); Console.WriteLine(" Managed Vector3 v1: ({0} {1} {2})", cdt.vecs.a.X, cdt.vecs.a.Y, cdt.vecs.a.Z); Console.WriteLine(" Managed Vector3 v2: ({0} {1} {2})", cdt.vecs.b.X, cdt.vecs.b.Y, cdt.vecs.b.Z); Console.WriteLine(" Managed Vector3 v3: ({0} {1} {2})", cdt.v3.X, cdt.v3.Y, cdt.v3.Z); Console.WriteLine(" Managed string arg: {0}", cdt.str); // (2, 3, 4) dot (6, 7 , 8) = 12 + 21 + 32 = 65 float t0 = Vector3.Dot(cdt.vecs.a, cdt.vecs.b); // (6, 7, 8) dot (11, 21, 31) = 66 + 147 + 248 = 461 float t1 = Vector3.Dot(cdt.vecs.b, cdt.v3); // (11, 21, 31) dot (2, 3, 4) = 209 float t2 = Vector3.Dot(cdt.v3, cdt.vecs.a); float sum = t0 + t1 + t2; Console.WriteLine(" Managed Sum = {0}", sum); if ((sum != 735) || (cdt.iv != 100) || (cdt.str.ToString() != "ret_string")) { Console.WriteLine("PInvoke Vector3InStruct test failed"); return false; } } Console.WriteLine("All PInvoke testcases passed"); return true; } } class RPInvokeTest { public delegate void CallBackDelegate_RPInvoke_Vector3Arg( int i, Vector3 v1, [MarshalAs(UnmanagedType.LPStr)] string s, Vector3 v2); public delegate void CallBackDelegate_RPInvoke_Vector3Arg_Unix( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem1, float f32_mem2, float f32_mem3); public delegate void CallBackDelegate_RPInvoke_Vector3Arg_Unix2( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem1, float f32_mem2, float f32_mem3, Vector3 v3f32_mem4, float f32_mem5); public delegate Vector3 CallBackDelegate_RPInvoke_Vector3Ret(); public delegate void CallBackDelegate_RPInvoke_Vector3Array( [In, MarshalAs(UnmanagedType.LPArray, SizeParamIndex=1)] Vector3[] v, int size); public delegate void CallBackDelegate_RPInvoke_Vector3InStruct( DT v); public delegate void CallBackDelegate_RPInvoke_Vector3InComplexStruct( ref ComplexDT v); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3Arg( CallBackDelegate_RPInvoke_Vector3Arg callBack); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3Arg_Unix( CallBackDelegate_RPInvoke_Vector3Arg_Unix callBack); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3Arg_Unix2( CallBackDelegate_RPInvoke_Vector3Arg_Unix2 callBack); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern bool nativeCall_RPInvoke_Vector3Ret( CallBackDelegate_RPInvoke_Vector3Ret callBack); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3Array( CallBackDelegate_RPInvoke_Vector3Array callBack, int v); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3InStruct( CallBackDelegate_RPInvoke_Vector3InStruct callBack, int v); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern bool nativeCall_RPInvoke_Vector3InComplexStruct( CallBackDelegate_RPInvoke_Vector3InComplexStruct callBack); static bool result = false; static float x,y,z; // Argument pass test // Test if the managed side correctly reads 12-byte Vector3 argument from the native side // and meet the backend assumption 1 and 2. static void callBack_RPInvoke_Vector3Arg( int i, Vector3 v1, [MarshalAs(UnmanagedType.LPStr)] string s, Vector3 v2) { // sum = (1, 2, 3) dot (1, 2, 3) = 14 float sum0 = Vector3.Dot(v1, v1); // sum = (10, 20, 30) dot (10, 20, 30) = 1400 float sum1 = Vector3.Dot(v2, v2); // sum = (10, 20, 30) dot (1, 2, 3) = 140 float sum2 = Vector3.Dot(v2, v1); Console.WriteLine("callBack_RPInvoke_Vector3Arg:"); Console.WriteLine(" iVal {0}", i); Console.WriteLine(" Sum0,1,2 = {0}, {1}, {2}", sum0, sum1, sum2); Console.WriteLine(" str {0}", s); result = (sum0 == 14) && (sum1 == 1400) && (sum2 == 140) && (s == "abcdefg") && (i == 123); } // Arugument test for Unix // Some arguments are mapped onto stack static void callBack_RPInvoke_Vector3Arg_Unix( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem0, float f32_mem1, float f32_mem2) { // sum = (1, 2, 3) dot (1, 2, 3) = 14 float sum0 = Vector3.Dot(v3f32_xmm0, v3f32_xmm0); // sum = (10, 20, 30) dot (10, 20, 30) = 1400 float sum1 = Vector3.Dot(v3f32_mem0, v3f32_mem0); // sum = (1, 2, 3) dot (10, 20, 30) = 140 float sum2 = Vector3.Dot(v3f32_xmm0, v3f32_mem0); // sum = 100 + 101 + 102 + 103 + 104 + 105 + 106 + 107 + 108 = 936 float sum3 = f32_xmm2 + f32_xmm3 + f32_xmm4 + f32_xmm5 + f32_xmm6 + f32_xmm7 + f32_mem0 + f32_mem1 + f32_mem2; Console.WriteLine("callBack_RPInvoke_Vector3Arg_Unix:"); Console.WriteLine(" {0}, {1}, {2}", v3f32_xmm0.X, v3f32_xmm0.Y, v3f32_xmm0.Z); Console.WriteLine(" {0}, {1}, {2}", v3f32_mem0.X, v3f32_mem0.Y, v3f32_mem0.Z); Console.WriteLine(" Sum0,1,2,3 = {0}, {1}, {2}, {3}", sum0, sum1, sum2, sum3); result = (sum0 == 14) && (sum1 == 1400) && (sum2 == 140) && (sum3==936); } // Arugument test for Unix // Some arguments are mapped onto stack static void callBack_RPInvoke_Vector3Arg_Unix2( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem0, float f32_mem1, float f32_mem2, Vector3 v3f32_mem3, float f32_mem4) { // sum = (1, 2, 3) dot (1, 2, 3) = 14 float sum0 = Vector3.Dot(v3f32_xmm0, v3f32_xmm0); // sum = (4, 5, 6) dot (4, 5, 6) = 77 float sum1 = Vector3.Dot(v3f32_mem0, v3f32_mem0); // sum = (7, 8, 9) dot (7, 8, 9) = 194 float sum2 = Vector3.Dot(v3f32_mem3, v3f32_mem3); // sum = (1, 2, 3) dot (4, 5, 6) = 32 float sum3 = Vector3.Dot(v3f32_xmm0, v3f32_mem0); // sum = (4, 5, 6) dot (7, 8, 9) = 122 float sum4 = Vector3.Dot(v3f32_mem0, v3f32_mem3); // sum = 100 + 101 + 102 + 103 + 104 + 105 + 106 + 107 + 108 + 109 = 1045 float sum5 = f32_xmm2 + f32_xmm3 + f32_xmm4 + f32_xmm5 + f32_xmm6 + f32_xmm7 + f32_mem0 + f32_mem1 + f32_mem2 + f32_mem4; Console.WriteLine("callBack_RPInvoke_Vector3Arg_Unix2:"); Console.WriteLine(" {0}, {1}, {2}", v3f32_xmm0.X, v3f32_xmm0.Y, v3f32_xmm0.Z); Console.WriteLine(" {0}, {1}, {2}", v3f32_mem0.X, v3f32_mem0.Y, v3f32_mem0.Z); Console.WriteLine(" {0}, {1}, {2}", v3f32_mem3.X, v3f32_mem3.Y, v3f32_mem3.Z); Console.WriteLine(" Sum0,1,2,3,4,5 = {0}, {1}, {2}, {3}, {4}, {5}", sum0, sum1, sum2, sum3, sum4, sum5); result = (sum0 == 14) && (sum1 == 77) && (sum2 == 194) && (sum3 == 32) && (sum4 == 122) && (sum5 == 1045); } // Return test. static Vector3 callBack_RPInvoke_Vector3Ret() { Vector3 tmp = new Vector3(1, 2, 3); return tmp; } // Test if the managed side correctly reads an array of 12-byte Vector3 elements // from the native side and meets the backend assumptions. static void callBack_RPInvoke_Vector3Array( [In, MarshalAs(UnmanagedType.LPArray, SizeParamIndex=1)] Vector3[] v, int size) { // sum0 = (2,3,4) dot (2,3,4) = 4 + 9 + 16 = 29 float sum0 = Vector3.Dot(v[0], v[0]); // sum0 = (11,21,31) dot (11,21,31) = 121 + 441 + 961 = 1523 float sum1 = Vector3.Dot(v[1], v[1]); // sum0 = (11,21,31) dot (2,3,4) = 22 + 63 + 124 = 209 float sum2 = Vector3.Dot(v[0], v[1]); Console.WriteLine("callBack_RPInvoke_Vector3Array:"); Console.WriteLine(" Sum0 = {0} Sum1 = {1} Sum2 = {2}", sum0, sum1, sum2); result = (sum0 == 29) && (sum1 == 1523) && (sum2 == 209); } // Test if the managed side correctly reads 12-byte Vector objects in a struct and // meet the backend assumptions. static void callBack_RPInvoke_Vector3InStruct(DT v) { // sum0 = (2,3,4) dot (2,3,4) = 29 float sum0 = Vector3.Dot(v.a, v.a); // sum1 = (11,21,31) dot (11,21,31) = 22 + 42 + 62 = 1523 float sum1 = Vector3.Dot(v.b, v.b); // sum2 = (2,3,4) dot (11,21,31) = 209 float sum2 = Vector3.Dot(v.a, v.b); Console.WriteLine("callBack_RPInvoke_Vector3InStruct:"); Console.WriteLine(" Sum0 = {0} Sum1 = {1} Sum2 = {2}", sum0, sum1, sum2); result = (sum0 == 29) && (sum1 == 1523) == (sum2 == 209); } // Complex struct type test static void callBack_RPInvoke_Vector3InComplexStruct(ref ComplexDT arg) { ComplexDT ret; Console.WriteLine("callBack_RPInvoke_Vector3InComplexStruct"); Console.WriteLine(" Arg ival: {0}", arg.iv); Console.WriteLine(" Arg Vector3 v1: ({0} {1} {2})", arg.vecs.a.X, arg.vecs.a.Y, arg.vecs.a.Z); Console.WriteLine(" Arg Vector3 v2: ({0} {1} {2})", arg.vecs.b.X, arg.vecs.b.Y, arg.vecs.b.Z); Console.WriteLine(" Arg Vector3 v3: ({0} {1} {2})", arg.v3.X, arg.v3.Y, arg.v3.Z); Console.WriteLine(" Arg string arg: {0}", arg.str); arg.vecs.a.X = arg.vecs.a.X + 1; arg.vecs.a.Y = arg.vecs.a.Y + 1; arg.vecs.a.Z = arg.vecs.a.Z + 1; arg.vecs.b.X = arg.vecs.b.X + 1; arg.vecs.b.Y = arg.vecs.b.Y + 1; arg.vecs.b.Z = arg.vecs.b.Z + 1; arg.v3.X = arg.v3.X + 1; arg.v3.Y = arg.v3.Y + 1; arg.v3.Z = arg.v3.Z + 1; arg.iv = arg.iv + 1; arg.str = "ret_string"; Console.WriteLine(" Return ival: {0}", arg.iv); Console.WriteLine(" Return Vector3 v1: ({0} {1} {2})", arg.vecs.a.X, arg.vecs.a.Y, arg.vecs.a.Z); Console.WriteLine(" Return Vector3 v2: ({0} {1} {2})", arg.vecs.b.X, arg.vecs.b.Y, arg.vecs.b.Z); Console.WriteLine(" Return Vector3 v3: ({0} {1} {2})", arg.v3.X, arg.v3.Y, arg.v3.Z); Console.WriteLine(" Return string arg: {0}", arg.str); float sum = arg.vecs.a.X + arg.vecs.a.Y + arg.vecs.a.Z + arg.vecs.b.X + arg.vecs.b.Y + arg.vecs.b.Z + arg.v3.X + arg.v3.Y + arg.v3.Z; Console.WriteLine(" Sum of all return float scalar values = {0}", sum); } public static bool test() { int x = 1; nativeCall_RPInvoke_Vector3Arg(callBack_RPInvoke_Vector3Arg); if (!result) { Console.WriteLine("RPInvoke Vector3Arg test failed"); return false; } nativeCall_RPInvoke_Vector3Arg_Unix(callBack_RPInvoke_Vector3Arg_Unix); if (!result) { Console.WriteLine("RPInvoke Vector3Arg_Unix test failed"); return false; } nativeCall_RPInvoke_Vector3Arg_Unix2(callBack_RPInvoke_Vector3Arg_Unix2); if (!result) { Console.WriteLine("RPInvoke Vector3Arg_Unix2 test failed"); return false; } result = nativeCall_RPInvoke_Vector3Ret(callBack_RPInvoke_Vector3Ret); if (!result) { Console.WriteLine("RPInvoke Vector3Ret test failed"); return false; } nativeCall_RPInvoke_Vector3Array(callBack_RPInvoke_Vector3Array, x); if (!result) { Console.WriteLine("RPInvoke Vector3Array test failed"); return false; } nativeCall_RPInvoke_Vector3InStruct(callBack_RPInvoke_Vector3InStruct, x); if (!result) { Console.WriteLine("RPInvoke Vector3InStruct test failed"); return false; } result = nativeCall_RPInvoke_Vector3InComplexStruct(callBack_RPInvoke_Vector3InComplexStruct); if (!result) { Console.WriteLine("RPInvoke Vector3InComplexStruct test failed"); return false; } Console.WriteLine("All RPInvoke testcases passed"); return true; } } class Test { public static int Main() { if (!PInvokeTest.test()) { return 101; } if (!RPInvokeTest.test()) { return 101; } return 100; } }
using System.Text.RegularExpressions; using System.Diagnostics; using System; using System.Drawing.Drawing2D; using System.Windows.Forms; using System.Collections; using System.Drawing; using Microsoft.VisualBasic; using System.Data.SqlClient; using System.Data; using System.Collections.Generic; using WeifenLuo.WinFormsUI; using Microsoft.Win32; using WeifenLuo; using System.Collections.Specialized; namespace SoftLogik.Win.UI { /// <summary> /// Dynamically determine and set a tab order for a container and children according to a given strategy. /// </summary> public class VisualTabOrderManager { /// <summary> /// Compare two controls in the selected tab scheme. /// </summary> private class TabSchemeComparer : IComparer { private TabScheme comparisonScheme; #region IComparer Members public virtual int Compare(object x, object y) { Control control1 = (Control) x; Control control2 = (Control) y; if ((control1 == null) || (control2 == null)) { Debug.Assert(false, "Attempting to compare a non-control"); return 0; } if (comparisonScheme == TabScheme.AcrossFirst) { //// The primary direction to sort is the y direction (using the Top property). //// If two controls have the same y coordination, then we sort them by their x's. if (control1.Top < control2.Top) { return - 1; } else if (control1.Top > control2.Top) { return 1; } else { return (control1.Left.CompareTo(control2.Left)); } } else //// comparisonScheme = TabScheme.DownFirst { //// The primary direction to sort is the x direction (using the Left property). //// If two controls have the same x coordination, then we sort them by their y's. if (control1.Left < control2.Left) { return - 1; } else if (control1.Left > control2.Left) { return 1; } else { return (control1.Top.CompareTo(control2.Top)); } } } #endregion //// Create a tab scheme comparer that uses the given scheme. public TabSchemeComparer(TabScheme scheme) { comparisonScheme = scheme; } } /// <summary> /// The container whose tab order we manage. /// </summary> private Control container; /// <summary> /// Hash of controls to schemes so that individual containers can have different ordering /// strategies than their parents. /// </summary> private Hashtable schemeOverrides; /// <summary> /// The tab index we start numbering from when the tab order is applied. /// </summary> private int curTabIndex = 0; /// <summary> /// The general tab-ordering strategy (i.e. whether we tab across rows first, or down columns). /// </summary> public enum TabScheme { None, AcrossFirst, DownFirst } /// <summary> /// Constructor /// </summary> /// <param name="container">The container whose tab order we manage.</param> public VisualTabOrderManager(Control container) { this.container = container; this.curTabIndex = 0; this.schemeOverrides = new Hashtable(); } /// <summary> /// Construct a tab order manager that starts numbering at the given tab index. /// </summary> /// <param name="container">The container whose tab order we manage.</param> /// <param name="curTabIndex">Where to start numbering.</param> /// <param name="schemeOverrides">List of controls with explicitly defined schemes.</param> private VisualTabOrderManager(Control container, int curTabIndex, Hashtable schemeOverrides) { this.container = container; this.curTabIndex = curTabIndex; this.schemeOverrides = schemeOverrides; } /// <summary> /// Explicitly set a tab order scheme for a given (presumably container) control. /// </summary> /// <param name="c">The control to set the scheme for.</param> /// <param name="scheme">The requested scheme.</param> public void SetSchemeForControl(Control c, TabScheme scheme) { schemeOverrides[c] = scheme; } /// <summary> /// Recursively set the tab order on this container and all of its children. /// </summary> /// <param name="scheme">The tab ordering strategy to apply.</param> /// <returns>The next tab index to be used.</returns> public int SetTabOrder(TabScheme scheme) { //// Tab order isn't important enough to ever cause a crash, so replace any exceptions //// with assertions. try { ArrayList controlArraySorted = new ArrayList(); controlArraySorted.AddRange(container.Controls); controlArraySorted.Sort(new TabSchemeComparer(scheme)); ArrayList controlsWithScheme = null; Control c; foreach (Control tempLoopVar_c in controlArraySorted) { c = tempLoopVar_c; c.TabIndex = curTabIndex; curTabIndex++; if (c.Controls.Count > 0) { //// Control has children -- recurse. TabScheme childScheme = scheme; if (schemeOverrides.Contains(c)) { childScheme = (TabScheme) (schemeOverrides[c]); } curTabIndex = (new VisualTabOrderManager(c, curTabIndex, schemeOverrides)).SetTabOrder(childScheme); } } return curTabIndex; } catch (Exception e) { Debug.Assert(false, "Exception in TabOrderManager.SetTabOrder: " + e.Message); return 0; } } } }
using System; using System.Collections; using System.ComponentModel; using System.Data; using System.Drawing; using System.Web; using System.Web.SessionState; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.HtmlControls; using System.Data.SqlClient; namespace WebApplication2 { /// <summary> /// Summary description for frmEmergencyProcedures. /// </summary> public partial class frmProfileSEProcs : System.Web.UI.Page { private static string strURL = System.Configuration.ConfigurationSettings.AppSettings["local_url"]; private static string strDB = System.Configuration.ConfigurationSettings.AppSettings["local_db"]; protected System.Web.UI.WebControls.Label Label2; public SqlConnection epsDbConn=new SqlConnection(strDB); protected void Page_Load(object sender, System.EventArgs e) { // Put user code to initialize the page here Load_Procedures(); } #region Web Form Designer generated code override protected void OnInit(EventArgs e) { // // CODEGEN: This call is required by the ASP.NET Web Form Designer. // InitializeComponent(); base.OnInit(e); } /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.DataGrid1.ItemCommand += new System.Web.UI.WebControls.DataGridCommandEventHandler(this.DataGrid1_ItemCommand); } #endregion private void Load_Procedures() { Session["TableFlag"] = 2; lblProfilesName.Text = "Business Profile for: " + Session["ProfilesName"].ToString(); lblServiceName.Text = "Service Delivered: " + Session["ServiceName"].ToString(); lblDeliverableName.Text = "Deliverable: " + Session["EventsName"].ToString(); lblContents1.Text="Listed below are difference processes that are undertaken at different" + " stages of preparation or follow-up to an event '" + Session["EventsName"].ToString() + "' for service '" + Session["ServiceName"].ToString() + "'"; lblContents2.Text="Each process has certain basic characteristics, including the level of detail" + " at which it is budgeted and progress monitored, service standards, and so on. Click on the" + " button titled 'Update' to maintain these characteristics for that process."; lblContents3.Text="A given process is delivered in a series of steps." + " Click on the button titled 'Timetables' to identify and provide details about" + " steps for each process." ; if (!IsPostBack) { loadData(); } } private void loadData () { SqlCommand cmd=new SqlCommand(); cmd.CommandType=CommandType.StoredProcedure; cmd.CommandText="wms_RetrieveProfileSEProcs"; cmd.Connection=this.epsDbConn; cmd.Parameters.Add ("@ProfileSEventsId",SqlDbType.Int); cmd.Parameters["@ProfileSEventsId"].Value=Session["ProfileSEventsId"].ToString(); DataSet ds=new DataSet(); SqlDataAdapter da=new SqlDataAdapter(cmd); da.Fill(ds,"ProfileSEProcs"); Session["ds"] = ds; DataGrid1.DataSource=ds; DataGrid1.DataBind(); refreshGrid(); } protected void btnAdd_Click(object sender, System.EventArgs e) { Session["btnAction"]="Add"; //Session["CUPSEP"]="frmProfileSEProcs"; updateGrid(); //Response.Redirect (strURL + "frmUpdProfileSEProcs.aspx?"); Session["CProcsAll"]="frmProfileSEProcs"; Response.Redirect(strURL + "frmProcsAll.aspx?"); } protected void btnExit_Click(object sender, System.EventArgs e) { Exit(); } private void Exit() { updateGrid(); Response.Redirect (strURL + Session["CPSEProcs"].ToString() + ".aspx?"); } private void refreshGrid() { foreach (DataGridItem i in DataGrid1.Items) { TextBox tb = (TextBox) (i.Cells[2].FindControl("txtSeq")); Button btSt = (Button) (i.Cells[4].FindControl("btnStaff")); Button btSe = (Button) (i.Cells[4].FindControl("btnServices")); Button btRe = (Button) (i.Cells[4].FindControl("btnOther")); //Button btTimetables = (Button) (i.Cells[4].FindControl("btnTimetables")); if (i.Cells[1].Text == "&nbsp;") { tb.Text="99"; } else tb.Text=i.Cells[1].Text; /*if (i.Cells[6].Text == "1") { } else { btTimetables.Enabled = false; btTimetables.BackColor=Color.Transparent; btTimetables.Text=""; }*/ } } private void updateGrid() { foreach (DataGridItem i in DataGrid1.Items) { TextBox tb = (TextBox) (i.Cells[2].FindControl("txtSeq")); { SqlCommand cmd=new SqlCommand(); cmd.CommandType=CommandType.StoredProcedure; cmd.CommandText="wms_UpdateProfileSEProcsSeqNo"; cmd.Connection=this.epsDbConn; cmd.Parameters.Add("@Id", SqlDbType.Int); cmd.Parameters ["@Id"].Value=i.Cells[0].Text; if (tb.Text != "") { cmd.Parameters.Add("@Seq", SqlDbType.Int); cmd.Parameters["@Seq"].Value = Int32.Parse(tb.Text); } cmd.Connection.Open(); cmd.ExecuteNonQuery(); cmd.Connection.Close(); } } } private void DataGrid1_ItemCommand(object source, System.Web.UI.WebControls.DataGridCommandEventArgs e) { updateGrid(); Session["ProcsId"] = e.Item.Cells[8].Text; if (e.CommandName == "Update") { Session["CUPSEP"]="frmProfileSEProcs"; Session["btnAction"]="Update"; Session["ProcessName"]=e.Item.Cells[3].Text; Session["Id"]=e.Item.Cells[0].Text; Response.Redirect (strURL + "frmUpdProfileSEProcs.aspx?"); } else if (e.CommandName == "Staff") { Session["CPSEPStaff"]="frmProfileSEProcs"; Session["ProcessName"]=e.Item.Cells[3].Text; Session["PSEPID"]=e.Item.Cells[0].Text; Response.Redirect (strURL + "frmPSEPStaff.aspx?"); } else if (e.CommandName == "Services") { Session["CPSEPO"] = "frmProfileSEProcs"; Session["ProcessName"] = e.Item.Cells[3].Text; Session["PSEPID"] = e.Item.Cells[0].Text; Session["RType"] = 1; Response.Redirect(strURL + "frmPSEPO.aspx?"); } else if (e.CommandName == "Other") { Session["CPSEPO"] = "frmProfileSEProcs"; Session["ProcessName"] = e.Item.Cells[3].Text; Session["PSEPID"] = e.Item.Cells[0].Text; Session["RType"] = 0; Response.Redirect(strURL + "frmPSEPO.aspx?"); } else if (e.CommandName == "Remove") { SqlCommand cmd=new SqlCommand(); cmd.CommandType=CommandType.StoredProcedure; cmd.CommandText="wms_DeleteProfileSEProcs"; cmd.Connection=this.epsDbConn; cmd.Parameters.Add ("@Id",SqlDbType.Int); cmd.Parameters["@Id"].Value=e.Item.Cells[0].Text; cmd.Connection.Open(); cmd.ExecuteNonQuery(); cmd.Connection.Close(); loadData(); } } protected void btnSignoff_Click(object sender, System.EventArgs e) { updateGrid(); Response.Redirect (strURL + "frmEnd.aspx?"); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Collections.Specialized; using System.Linq; using System.Runtime.CompilerServices; namespace Xamarin.Forms { [ContentProperty("Children")] public abstract class MultiPage<T> : Page, IViewContainer<T>, IPageContainer<T>, IItemsView<T> where T : Page { public static readonly BindableProperty ItemsSourceProperty = BindableProperty.Create("ItemsSource", typeof(IEnumerable), typeof(MultiPage<>), null); public static readonly BindableProperty ItemTemplateProperty = BindableProperty.Create("ItemTemplate", typeof(DataTemplate), typeof(MultiPage<>), null); public static readonly BindableProperty SelectedItemProperty = BindableProperty.Create("SelectedItem", typeof(object), typeof(MultiPage<>), null, BindingMode.TwoWay); internal static readonly BindableProperty IndexProperty = BindableProperty.Create("Index", typeof(int), typeof(Page), -1); readonly ElementCollection<T> _children; readonly TemplatedItemsList<MultiPage<T>, T> _templatedItems; T _current; ObservableCollection<Element> InternalChildren => ((IPageController)this).InternalChildren; protected MultiPage() { _templatedItems = new TemplatedItemsList<MultiPage<T>, T>(this, ItemsSourceProperty, ItemTemplateProperty); _templatedItems.CollectionChanged += OnTemplatedItemsChanged; _children = new ElementCollection<T>(InternalChildren); InternalChildren.CollectionChanged += OnChildrenChanged; } public IEnumerable ItemsSource { get { return (IEnumerable)GetValue(ItemsSourceProperty); } set { SetValue(ItemsSourceProperty, value); } } public DataTemplate ItemTemplate { get { return (DataTemplate)GetValue(ItemTemplateProperty); } set { SetValue(ItemTemplateProperty, value); } } public object SelectedItem { get { return GetValue(SelectedItemProperty); } set { SetValue(SelectedItemProperty, value); } } T IItemsView<T>.CreateDefault(object item) { return CreateDefault(item); } void IItemsView<T>.SetupContent(T content, int index) { SetupContent(content, index); } void IItemsView<T>.UnhookContent(T content) { UnhookContent(content); } public T CurrentPage { get { return _current; } set { if (_current == value) return; OnPropertyChanging(); _current = value; OnPropertyChanged(); OnCurrentPageChanged(); } } public IList<T> Children { get { return _children; } } public event EventHandler CurrentPageChanged; public event NotifyCollectionChangedEventHandler PagesChanged; protected abstract T CreateDefault(object item); protected override bool OnBackButtonPressed() { if (CurrentPage != null) { bool handled = CurrentPage.SendBackButtonPressed(); if (handled) return true; } return base.OnBackButtonPressed(); } protected override void OnChildAdded(Element child) { base.OnChildAdded(child); ForceLayout(); } protected virtual void OnCurrentPageChanged() { EventHandler changed = CurrentPageChanged; if (changed != null) changed(this, EventArgs.Empty); } protected virtual void OnPagesChanged(NotifyCollectionChangedEventArgs e) { NotifyCollectionChangedEventHandler handler = PagesChanged; if (handler != null) handler(this, e); } protected override void OnPropertyChanged([CallerMemberName] string propertyName = null) { if (propertyName == ItemsSourceProperty.PropertyName) _children.IsReadOnly = ItemsSource != null; else if (propertyName == SelectedItemProperty.PropertyName) { UpdateCurrentPage(); } else if (propertyName == "CurrentPage" && ItemsSource != null) { if (CurrentPage == null) { SelectedItem = null; } else { int index = _templatedItems.IndexOf(CurrentPage); SelectedItem = index != -1 ? _templatedItems.ListProxy[index] : null; } } base.OnPropertyChanged(propertyName); } protected virtual void SetupContent(T content, int index) { } protected virtual void UnhookContent(T content) { } internal static int GetIndex(T page) { if (page == null) throw new ArgumentNullException("page"); return (int)page.GetValue(IndexProperty); } internal T GetPageByIndex(int index) { foreach (T page in InternalChildren) { if (index == GetIndex(page)) return page; } return null; } internal static void SetIndex(Page page, int index) { if (page == null) throw new ArgumentNullException("page"); page.SetValue(IndexProperty, index); } void OnChildrenChanged(object sender, NotifyCollectionChangedEventArgs e) { if (Children.IsReadOnly) return; var i = 0; foreach (T page in Children) SetIndex(page, i++); OnPagesChanged(e); if (CurrentPage == null || Children.IndexOf(CurrentPage) == -1) CurrentPage = Children.FirstOrDefault(); } void OnTemplatedItemsChanged(object sender, NotifyCollectionChangedEventArgs e) { switch (e.Action) { case NotifyCollectionChangedAction.Add: if (e.NewStartingIndex < 0) goto case NotifyCollectionChangedAction.Reset; for (int i = e.NewStartingIndex; i < Children.Count; i++) SetIndex((T)InternalChildren[i], i + e.NewItems.Count); for (var i = 0; i < e.NewItems.Count; i++) { var page = (T)e.NewItems[i]; page.Owned = true; int index = i + e.NewStartingIndex; SetIndex(page, index); InternalChildren.Insert(index, (T)e.NewItems[i]); } break; case NotifyCollectionChangedAction.Remove: if (e.OldStartingIndex < 0) goto case NotifyCollectionChangedAction.Reset; int removeIndex = e.OldStartingIndex; for (int i = removeIndex + e.OldItems.Count; i < Children.Count; i++) SetIndex((T)InternalChildren[i], removeIndex++); for (var i = 0; i < e.OldItems.Count; i++) { Element element = InternalChildren[e.OldStartingIndex]; InternalChildren.RemoveAt(e.OldStartingIndex); element.Owned = false; } break; case NotifyCollectionChangedAction.Move: if (e.NewStartingIndex < 0 || e.OldStartingIndex < 0) goto case NotifyCollectionChangedAction.Reset; if (e.NewStartingIndex == e.OldStartingIndex) return; bool movingForward = e.OldStartingIndex < e.NewStartingIndex; if (movingForward) { int moveIndex = e.OldStartingIndex; for (int i = moveIndex + e.OldItems.Count; i <= e.NewStartingIndex; i++) SetIndex((T)InternalChildren[i], moveIndex++); } else { for (var i = 0; i < e.OldStartingIndex - e.NewStartingIndex; i++) { var page = (T)InternalChildren[i + e.NewStartingIndex]; SetIndex(page, GetIndex(page) + e.OldItems.Count); } } for (var i = 0; i < e.OldItems.Count; i++) InternalChildren.RemoveAt(e.OldStartingIndex); int insertIndex = e.NewStartingIndex; if (movingForward) insertIndex -= e.OldItems.Count - 1; for (var i = 0; i < e.OldItems.Count; i++) { var page = (T)e.OldItems[i]; SetIndex(page, insertIndex + i); InternalChildren.Insert(insertIndex + i, page); } break; case NotifyCollectionChangedAction.Replace: if (e.OldStartingIndex < 0) goto case NotifyCollectionChangedAction.Reset; for (int i = e.OldStartingIndex; i - e.OldStartingIndex < e.OldItems.Count; i++) { Element element = InternalChildren[i]; InternalChildren.RemoveAt(i); element.Owned = false; T page = _templatedItems.GetOrCreateContent(i, e.NewItems[i - e.OldStartingIndex]); page.Owned = true; SetIndex(page, i); InternalChildren.Insert(i, page); } break; case NotifyCollectionChangedAction.Reset: Reset(); return; } OnPagesChanged(e); UpdateCurrentPage(); } void Reset() { List <Element> snapshot = InternalChildren.ToList(); InternalChildren.Clear(); foreach (Element element in snapshot) element.Owned = false; for (var i = 0; i < _templatedItems.Count; i++) { T page = _templatedItems.GetOrCreateContent(i, _templatedItems.ListProxy[i]); page.Owned = true; SetIndex(page, i); InternalChildren.Add(page); } var currentNeedsUpdate = true; BatchBegin(); if (ItemsSource != null) { object selected = SelectedItem; if (selected == null || !ItemsSource.Cast<object>().Contains(selected)) { SelectedItem = ItemsSource.Cast<object>().FirstOrDefault(); currentNeedsUpdate = false; } } if (currentNeedsUpdate) UpdateCurrentPage(); OnPagesChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Reset)); BatchCommit(); } void UpdateCurrentPage() { if (ItemsSource != null) { int index = _templatedItems.ListProxy.IndexOf(SelectedItem); if (index == -1) CurrentPage = (T)InternalChildren.FirstOrDefault(); else CurrentPage = _templatedItems.GetOrCreateContent(index, SelectedItem); } else if (SelectedItem is T) CurrentPage = (T)SelectedItem; } } }
namespace Lucas.Solutions.Migrations { using System; using System.Data.Entity.Migrations; public partial class Initial : DbMigration { public override void Up() { CreateTable( "dbo.AspNetRoles", c => new { Id = c.String(nullable: false, maxLength: 128), Name = c.String(nullable: false, maxLength: 256), }) .PrimaryKey(t => t.Id) .Index(t => t.Name, unique: true, name: "RoleNameIndex"); CreateTable( "dbo.AspNetUserRoles", c => new { UserId = c.String(nullable: false, maxLength: 128), RoleId = c.String(nullable: false, maxLength: 128), }) .PrimaryKey(t => new { t.UserId, t.RoleId }) .ForeignKey("dbo.AspNetRoles", t => t.RoleId, cascadeDelete: true) .ForeignKey("dbo.AspNetUsers", t => t.UserId, cascadeDelete: true) .Index(t => t.UserId) .Index(t => t.RoleId); CreateTable( "dbo.AspNetUsers", c => new { Id = c.String(nullable: false, maxLength: 128), Department = c.String(), FullName = c.String(), Email = c.String(maxLength: 256), EmailConfirmed = c.Boolean(nullable: false), PasswordHash = c.String(), SecurityStamp = c.String(), PhoneNumber = c.String(), PhoneNumberConfirmed = c.Boolean(nullable: false), TwoFactorEnabled = c.Boolean(nullable: false), LockoutEndDateUtc = c.DateTime(), LockoutEnabled = c.Boolean(nullable: false), AccessFailedCount = c.Int(nullable: false), UserName = c.String(nullable: false, maxLength: 256), }) .PrimaryKey(t => t.Id) .Index(t => t.UserName, unique: true, name: "UserNameIndex"); CreateTable( "dbo.AspNetUserClaims", c => new { Id = c.Int(nullable: false, identity: true), UserId = c.String(nullable: false, maxLength: 128), ClaimType = c.String(), ClaimValue = c.String(), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.AspNetUsers", t => t.UserId, cascadeDelete: true) .Index(t => t.UserId); CreateTable( "dbo.AspNetUserLogins", c => new { LoginProvider = c.String(nullable: false, maxLength: 128), ProviderKey = c.String(nullable: false, maxLength: 128), UserId = c.String(nullable: false, maxLength: 128), }) .PrimaryKey(t => new { t.LoginProvider, t.ProviderKey, t.UserId }) .ForeignKey("dbo.AspNetUsers", t => t.UserId, cascadeDelete: true) .Index(t => t.UserId); CreateTable( "dbo.WorkHost", c => new { Id = c.Int(nullable: false, identity: true), Address = c.String(nullable: false, maxLength: 64, unicode: false), Credential = c.String(maxLength: 64, unicode: false), Password = c.String(maxLength: 64, unicode: false), Port = c.String(), Protocol = c.Int(nullable: false), Summary = c.String(maxLength: 128), }) .PrimaryKey(t => t.Id) .Index(t => t.Address, unique: true, name: "WorkHostAddress"); CreateTable( "dbo.WorkTask", c => new { Id = c.Int(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 32), Start = c.String(), State = c.Byte(nullable: false), Summary = c.String(maxLength: 128), Type = c.String(nullable: false, maxLength: 16, unicode: false), }) .PrimaryKey(t => t.Id) .Index(t => t.Name, unique: true, name: "WorkTaskName") .Index(t => t.Type, name: "WorkTaskType"); CreateTable( "dbo.WorkParty", c => new { Id = c.Int(nullable: false, identity: true), Credential = c.String(maxLength: 64, unicode: false), Direction = c.Byte(nullable: false), Email = c.String(nullable: false, maxLength: 64, unicode: false), HostId = c.Int(nullable: false), Name = c.String(nullable: false, maxLength: 32), Password = c.String(maxLength: 64, unicode: false), Path = c.String(nullable: false, maxLength: 64, unicode: false), TransferId = c.Int(nullable: false), Summary = c.String(maxLength: 128), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.WorkHost", t => t.HostId) .ForeignKey("dbo.WorkTransfer", t => t.TransferId) .Index(t => t.Email, name: "WorkPartyEmail") .Index(t => t.HostId) .Index(t => t.Name, unique: true, name: "WorkPartyName") .Index(t => t.TransferId); CreateTable( "dbo.WorkTrace", c => new { Id = c.Int(nullable: false, identity: true), Duration = c.Time(nullable: false, precision: 7), Message = c.String(), Start = c.DateTimeOffset(nullable: false, precision: 7), Success = c.Boolean(nullable: false), Type = c.String(nullable: false, maxLength: 16, unicode: false), }) .PrimaryKey(t => t.Id) .Index(t => t.Type, name: "WorkTraceType"); CreateTable( "dbo.WorkTransfer", c => new { Id = c.Int(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.WorkTask", t => t.Id) .Index(t => t.Id); CreateTable( "dbo.WorkTransferTrace", c => new { Id = c.Int(nullable: false), Direction = c.Byte(nullable: false), File = c.String(), PartyId = c.Int(nullable: false), Size = c.Long(nullable: false), TransferId = c.Int(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.WorkTrace", t => t.Id) .ForeignKey("dbo.WorkParty", t => t.PartyId, cascadeDelete: true) .ForeignKey("dbo.WorkTransfer", t => t.TransferId) .Index(t => t.Id) .Index(t => t.PartyId) .Index(t => t.TransferId); CreateTable( "dbo.WorkOutgoingTrace", c => new { Id = c.Int(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.WorkTransferTrace", t => t.Id) .Index(t => t.Id); CreateTable( "dbo.WorkIncomingTrace", c => new { Id = c.Int(nullable: false), SenderId = c.Int(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.WorkTransferTrace", t => t.Id) .ForeignKey("dbo.WorkOutgoingTrace", t => t.SenderId) .Index(t => t.Id) .Index(t => t.SenderId); } public override void Down() { DropForeignKey("dbo.WorkIncomingTrace", "SenderId", "dbo.WorkOutgoingTrace"); DropForeignKey("dbo.WorkIncomingTrace", "Id", "dbo.WorkTransferTrace"); DropForeignKey("dbo.WorkOutgoingTrace", "Id", "dbo.WorkTransferTrace"); DropForeignKey("dbo.WorkTransferTrace", "TransferId", "dbo.WorkTransfer"); DropForeignKey("dbo.WorkTransferTrace", "PartyId", "dbo.WorkParty"); DropForeignKey("dbo.WorkTransferTrace", "Id", "dbo.WorkTrace"); DropForeignKey("dbo.WorkTransfer", "Id", "dbo.WorkTask"); DropForeignKey("dbo.WorkParty", "TransferId", "dbo.WorkTransfer"); DropForeignKey("dbo.WorkParty", "HostId", "dbo.WorkHost"); DropForeignKey("dbo.AspNetUserRoles", "UserId", "dbo.AspNetUsers"); DropForeignKey("dbo.AspNetUserLogins", "UserId", "dbo.AspNetUsers"); DropForeignKey("dbo.AspNetUserClaims", "UserId", "dbo.AspNetUsers"); DropForeignKey("dbo.AspNetUserRoles", "RoleId", "dbo.AspNetRoles"); DropIndex("dbo.WorkIncomingTrace", new[] { "SenderId" }); DropIndex("dbo.WorkIncomingTrace", new[] { "Id" }); DropIndex("dbo.WorkOutgoingTrace", new[] { "Id" }); DropIndex("dbo.WorkTransferTrace", new[] { "TransferId" }); DropIndex("dbo.WorkTransferTrace", new[] { "PartyId" }); DropIndex("dbo.WorkTransferTrace", new[] { "Id" }); DropIndex("dbo.WorkTransfer", new[] { "Id" }); DropIndex("dbo.WorkTrace", "WorkTraceType"); DropIndex("dbo.WorkParty", new[] { "TransferId" }); DropIndex("dbo.WorkParty", "WorkPartyName"); DropIndex("dbo.WorkParty", new[] { "HostId" }); DropIndex("dbo.WorkParty", "WorkPartyEmail"); DropIndex("dbo.WorkTask", "WorkTaskType"); DropIndex("dbo.WorkTask", "WorkTaskName"); DropIndex("dbo.WorkHost", "WorkHostAddress"); DropIndex("dbo.AspNetUserLogins", new[] { "UserId" }); DropIndex("dbo.AspNetUserClaims", new[] { "UserId" }); DropIndex("dbo.AspNetUsers", "UserNameIndex"); DropIndex("dbo.AspNetUserRoles", new[] { "RoleId" }); DropIndex("dbo.AspNetUserRoles", new[] { "UserId" }); DropIndex("dbo.AspNetRoles", "RoleNameIndex"); DropTable("dbo.WorkIncomingTrace"); DropTable("dbo.WorkOutgoingTrace"); DropTable("dbo.WorkTransferTrace"); DropTable("dbo.WorkTransfer"); DropTable("dbo.WorkTrace"); DropTable("dbo.WorkParty"); DropTable("dbo.WorkTask"); DropTable("dbo.WorkHost"); DropTable("dbo.AspNetUserLogins"); DropTable("dbo.AspNetUserClaims"); DropTable("dbo.AspNetUsers"); DropTable("dbo.AspNetUserRoles"); DropTable("dbo.AspNetRoles"); } } }
using System; using System.Collections.Generic; using System.Text; namespace Jovian.JavaScriptPro { class Environment { public Environment Parent; SortedList<string, int> Mapping; public int Declared; public bool ReWriteOff = false; public void Grwo(bool v) { ReWriteOff = v; } public bool Global() { return Parent == null; } ImageResolve ImgRes = null; public string ResolveImage(string x) { if (Parent == null) { if(ImgRes != null) return ImgRes.ResolveImageURL(x); return x; } return Parent.ResolveImage(x); } public string ResolveCSS(string x) { if (Parent == null) { if(ImgRes != null) return ImgRes.ResolveCSS(x); return x; } return Parent.ResolveCSS(x); } public string ResolveJS(string x) { if (Parent == null) { if (ImgRes != null) return ImgRes.ResolveJS(x); return x; } return Parent.ResolveJS(x); } public Environment(ImageResolve ir) { ImgRes = ir; Parent = null; Mapping = new SortedList<string, int>(); Declared = 0; } /* public Environment() { Parent = null; Mapping = new SortedList<string, int>(); Declared = 0; } */ public Environment(Environment P) { Parent = P; Mapping = new SortedList<string, int>(); Declared = 0; } public int NewKey() { int p = 0; if (Parent != null) { p += Parent.NewKey(); } return p + Declared; } public string declare(string x) { if (Parent == null) return x; if (Mapping.ContainsKey(x)) { return "_" + Mapping[x]; } int key = NewKey(); Declared++; Mapping.Add(x, key); return "_" + key; } public string require(string x) { if (Mapping.ContainsKey(x)) { return "_" + Mapping[x]; } if (Parent != null) { return Parent.require(x); } if (!ReWriteOff) { string y = x.ToUpper().Trim(); if (y.Equals("SM_VALLINK")) return "s99"; if (y.Equals("GETVALUE")) return "s00"; if (y.Equals("FORCEVALUE")) return "s01"; if (y.Equals("SIMPLEVALUESTATE")) return "s02"; if (y.Equals("ISVISIBLE")) return "s03"; if (y.Equals("HIDELAYER")) return "s04"; if (y.Equals("SHOWLAYER")) return "s05"; if (y.Equals("TOGGLELAYER")) return "s06"; if (y.Equals("DOMPREFIX")) return "s07"; if (y.Equals("CTXID")) return "s08"; if (y.Equals("INCREMENTCURSOR")) return "s09"; if (y.Equals("DECREMENTCURSOR")) return "s10"; if (y.Equals("JUMPCURSOR")) return "s11"; if (y.Equals("ISCURSORHEAD")) return "s12"; if (y.Equals("ISCURSORTAIL")) return "s13"; if (y.Equals("TOOLTIPW")) return "s14"; if (y.Equals("TOOLTIP")) return "s15"; if (y.Equals("TOOLTIPID")) return "s16"; if (y.Equals("INVALIDATESOFT")) return "s17"; if (y.Equals("INVALIDATEHARD")) return "s18"; if (y.Equals("REPLACEALL")) return "s98"; if (y.Equals("ISWHITESPACE")) return "s97"; if (y.Equals("LISPLEX")) return "s96"; if (y.Equals("LISPPARSE")) return "s95"; if(y.Equals("EVENTCODE")) return "s80"; if(y.Equals("IMAGEPNG")) return "iP"; if(y.Equals("IMAGEJPEG")) return "iJ"; if(y.Equals("GETGROUPASOBJECT")) return "s70"; } return x; } } public enum OperationalMode { Simple, RewriteFunctionals, RewriteVariables } public class Singleton { private static Environment E; private static OperationalMode M; public static bool CanTurnOffRewrite = true; public static void Push() { E = new Environment(E); } public static void Pop() { E = E.Parent; } public static bool rFunction() { return M==OperationalMode.RewriteFunctionals; } public static void RewriteOff(bool v) { if(CanTurnOffRewrite) E.Grwo(v); } public static string declare(string x) { //if (E.Global()) return x; if(M==OperationalMode.RewriteVariables) return E.declare(x); return x; } public static string require(string x) { if(M==OperationalMode.RewriteVariables) return E.require(x); return x; } public static void Begin(OperationalMode mode, ImageResolve ir) { E = new Environment(ir); M = mode; } public static string Resolve(string X) { return E.ResolveImage(X); } public static string ResolveCSS(string X) { return E.ResolveCSS(X); } public static string ResolveJS(string X) { return E.ResolveJS(X); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** Purpose: Unsafe code that uses pointers should use ** SafePointer to fix subtle lifetime problems with the ** underlying resource. ** ===========================================================*/ // Design points: // *) Avoid handle-recycling problems (including ones triggered via // resurrection attacks) for all accesses via pointers. This requires tying // together the lifetime of the unmanaged resource with the code that reads // from that resource, in a package that uses synchronization to enforce // the correct semantics during finalization. We're using SafeHandle's // ref count as a gate on whether the pointer can be dereferenced because that // controls the lifetime of the resource. // // *) Keep the penalties for using this class small, both in terms of space // and time. Having multiple threads reading from a memory mapped file // will already require 2 additional interlocked operations. If we add in // a "current position" concept, that requires additional space in memory and // synchronization. Since the position in memory is often (but not always) // something that can be stored on the stack, we can save some memory by // excluding it from this object. However, avoiding the need for // synchronization is a more significant win. This design allows multiple // threads to read and write memory simultaneously without locks (as long as // you don't write to a region of memory that overlaps with what another // thread is accessing). // // *) Space-wise, we use the following memory, including SafeHandle's fields: // Object Header MT* handle int bool bool <2 pad bytes> length // On 32 bit platforms: 24 bytes. On 64 bit platforms: 40 bytes. // (We can safe 4 bytes on x86 only by shrinking SafeHandle) // // *) Wrapping a SafeHandle would have been a nice solution, but without an // ordering between critical finalizable objects, it would have required // changes to each SafeHandle subclass to opt in to being usable from a // SafeBuffer (or some clever exposure of SafeHandle's state fields and a // way of forcing ReleaseHandle to run even after the SafeHandle has been // finalized with a ref count > 1). We can use less memory and create fewer // objects by simply inserting a SafeBuffer into the class hierarchy. // // *) In an ideal world, we could get marshaling support for SafeBuffer that // would allow us to annotate a P/Invoke declaration, saying this parameter // specifies the length of the buffer, and the units of that length are X. // P/Invoke would then pass that size parameter to SafeBuffer. // [DllImport(...)] // static extern SafeMemoryHandle AllocCharBuffer(int numChars); // If we could put an attribute on the SafeMemoryHandle saying numChars is // the element length, and it must be multiplied by 2 to get to the byte // length, we can simplify the usage model for SafeBuffer. // // *) This class could benefit from a constraint saying T is a value type // containing no GC references. // Implementation notes: // *) The Initialize method must be called before you use any instance of // a SafeBuffer. To avoid race conditions when storing SafeBuffers in statics, // you either need to take a lock when publishing the SafeBuffer, or you // need to create a local, initialize the SafeBuffer, then assign to the // static variable (perhaps using Interlocked.CompareExchange). Of course, // assignments in a static class constructor are under a lock implicitly. namespace System.Runtime.InteropServices { using System; using System.Security.Permissions; using System.Runtime.InteropServices; using System.Runtime.CompilerServices; using System.Runtime.ConstrainedExecution; using System.Runtime.Versioning; using Microsoft.Win32.SafeHandles; using System.Diagnostics.Contracts; [System.Security.SecurityCritical] public abstract unsafe class SafeBuffer : SafeHandleZeroOrMinusOneIsInvalid { // Steal UIntPtr.MaxValue as our uninitialized value. private static readonly UIntPtr Uninitialized = (UIntPtr.Size == 4) ? ((UIntPtr) UInt32.MaxValue) : ((UIntPtr) UInt64.MaxValue); private UIntPtr _numBytes; protected SafeBuffer(bool ownsHandle) : base(ownsHandle) { _numBytes = Uninitialized; } /// <summary> /// Specifies the size of the region of memory, in bytes. Must be /// called before using the SafeBuffer. /// </summary> /// <param name="numBytes">Number of valid bytes in memory.</param> [CLSCompliant(false)] public void Initialize(ulong numBytes) { if (numBytes < 0) throw new ArgumentOutOfRangeException("numBytes", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (IntPtr.Size == 4 && numBytes > UInt32.MaxValue) throw new ArgumentOutOfRangeException("numBytes", Environment.GetResourceString("ArgumentOutOfRange_AddressSpace")); Contract.EndContractBlock(); if (numBytes >= (ulong)Uninitialized) throw new ArgumentOutOfRangeException("numBytes", Environment.GetResourceString("ArgumentOutOfRange_UIntPtrMax-1")); _numBytes = (UIntPtr) numBytes; } /// <summary> /// Specifies the the size of the region in memory, as the number of /// elements in an array. Must be called before using the SafeBuffer. /// </summary> [CLSCompliant(false)] public void Initialize(uint numElements, uint sizeOfEachElement) { if (numElements < 0) throw new ArgumentOutOfRangeException("numElements", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (sizeOfEachElement < 0) throw new ArgumentOutOfRangeException("sizeOfEachElement", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (IntPtr.Size == 4 && numElements * sizeOfEachElement > UInt32.MaxValue) throw new ArgumentOutOfRangeException("numBytes", Environment.GetResourceString("ArgumentOutOfRange_AddressSpace")); Contract.EndContractBlock(); if (numElements * sizeOfEachElement >= (ulong)Uninitialized) throw new ArgumentOutOfRangeException("numElements", Environment.GetResourceString("ArgumentOutOfRange_UIntPtrMax-1")); _numBytes = checked((UIntPtr) (numElements * sizeOfEachElement)); } /// <summary> /// Specifies the the size of the region in memory, as the number of /// elements in an array. Must be called before using the SafeBuffer. /// </summary> [CLSCompliant(false)] public void Initialize<T>(uint numElements) where T : struct { Initialize(numElements, Marshal.AlignedSizeOf<T>()); } // Callers should ensure that they check whether the pointer ref param // is null when AcquirePointer returns. If it is not null, they must // call ReleasePointer in a CER. This method calls DangerousAddRef // & exposes the pointer. Unlike Read, it does not alter the "current // position" of the pointer. Here's how to use it: // // byte* pointer = null; // RuntimeHelpers.PrepareConstrainedRegions(); // try { // safeBuffer.AcquirePointer(ref pointer); // // Use pointer here, with your own bounds checking // } // finally { // if (pointer != null) // safeBuffer.ReleasePointer(); // } // // Note: If you cast this byte* to a T*, you have to worry about // whether your pointer is aligned. Additionally, you must take // responsibility for all bounds checking with this pointer. /// <summary> /// Obtain the pointer from a SafeBuffer for a block of code, /// with the express responsibility for bounds checking and calling /// ReleasePointer later within a CER to ensure the pointer can be /// freed later. This method either completes successfully or /// throws an exception and returns with pointer set to null. /// </summary> /// <param name="pointer">A byte*, passed by reference, to receive /// the pointer from within the SafeBuffer. You must set /// pointer to null before calling this method.</param> [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] public void AcquirePointer(ref byte* pointer) { if (_numBytes == Uninitialized) throw NotInitialized(); pointer = null; RuntimeHelpers.PrepareConstrainedRegions(); try { } finally { bool junk = false; DangerousAddRef(ref junk); pointer = (byte*)handle; } } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] public void ReleasePointer() { if (_numBytes == Uninitialized) throw NotInitialized(); DangerousRelease(); } /// <summary> /// Read a value type from memory at the given offset. This is /// equivalent to: return *(T*)(bytePtr + byteOffset); /// </summary> /// <typeparam name="T">The value type to read</typeparam> /// <param name="byteOffset">Where to start reading from memory. You /// may have to consider alignment.</param> /// <returns>An instance of T read from memory.</returns> [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] public T Read<T>(ulong byteOffset) where T : struct { if (_numBytes == Uninitialized) throw NotInitialized(); uint sizeofT = Marshal.SizeOfType(typeof(T)); byte* ptr = (byte*)handle + byteOffset; SpaceCheck(ptr, sizeofT); // return *(T*) (_ptr + byteOffset); T value; bool mustCallRelease = false; RuntimeHelpers.PrepareConstrainedRegions(); try { DangerousAddRef(ref mustCallRelease); GenericPtrToStructure<T>(ptr, out value, sizeofT); } finally { if (mustCallRelease) DangerousRelease(); } return value; } [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] public void ReadArray<T>(ulong byteOffset, T[] array, int index, int count) where T : struct { if (array == null) throw new ArgumentNullException("array", Environment.GetResourceString("ArgumentNull_Buffer")); if (index < 0) throw new ArgumentOutOfRangeException("index", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (count < 0) throw new ArgumentOutOfRangeException("count", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (array.Length - index < count) throw new ArgumentException(Environment.GetResourceString("Argument_InvalidOffLen")); Contract.EndContractBlock(); if (_numBytes == Uninitialized) throw NotInitialized(); uint sizeofT = Marshal.SizeOfType(typeof(T)); uint alignedSizeofT = Marshal.AlignedSizeOf<T>(); byte* ptr = (byte*)handle + byteOffset; SpaceCheck(ptr, checked((ulong)(alignedSizeofT * count))); bool mustCallRelease = false; RuntimeHelpers.PrepareConstrainedRegions(); try { DangerousAddRef(ref mustCallRelease); for (int i = 0; i < count; i++) unsafe { GenericPtrToStructure<T>(ptr + alignedSizeofT * i, out array[i + index], sizeofT); } } finally { if (mustCallRelease) DangerousRelease(); } } /// <summary> /// Write a value type to memory at the given offset. This is /// equivalent to: *(T*)(bytePtr + byteOffset) = value; /// </summary> /// <typeparam name="T">The type of the value type to write to memory.</typeparam> /// <param name="byteOffset">The location in memory to write to. You /// may have to consider alignment.</param> /// <param name="value">The value type to write to memory.</param> [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] public void Write<T>(ulong byteOffset, T value) where T : struct { if (_numBytes == Uninitialized) throw NotInitialized(); uint sizeofT = Marshal.SizeOfType(typeof(T)); byte* ptr = (byte*)handle + byteOffset; SpaceCheck(ptr, sizeofT); // *((T*) (_ptr + byteOffset)) = value; bool mustCallRelease = false; RuntimeHelpers.PrepareConstrainedRegions(); try { DangerousAddRef(ref mustCallRelease); GenericStructureToPtr(ref value, ptr, sizeofT); } finally { if (mustCallRelease) DangerousRelease(); } } [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.MayFail)] public void WriteArray<T>(ulong byteOffset, T[] array, int index, int count) where T : struct { if (array == null) throw new ArgumentNullException("array", Environment.GetResourceString("ArgumentNull_Buffer")); if (index < 0) throw new ArgumentOutOfRangeException("index", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (count < 0) throw new ArgumentOutOfRangeException("count", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); if (array.Length - index < count) throw new ArgumentException(Environment.GetResourceString("Argument_InvalidOffLen")); Contract.EndContractBlock(); if (_numBytes == Uninitialized) throw NotInitialized(); uint sizeofT = Marshal.SizeOfType(typeof(T)); uint alignedSizeofT = Marshal.AlignedSizeOf<T>(); byte* ptr = (byte*)handle + byteOffset; SpaceCheck(ptr, checked((ulong)(alignedSizeofT * count))); bool mustCallRelease = false; RuntimeHelpers.PrepareConstrainedRegions(); try { DangerousAddRef(ref mustCallRelease); for (int i = 0; i < count; i++) unsafe { GenericStructureToPtr(ref array[i + index], ptr + alignedSizeofT * i, sizeofT); } } finally { if (mustCallRelease) DangerousRelease(); } } /// <summary> /// Returns the number of bytes in the memory region. /// </summary> [CLSCompliant(false)] public ulong ByteLength { [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] get { if (_numBytes == Uninitialized) throw NotInitialized(); return (ulong) _numBytes; } } /* No indexer. The perf would be misleadingly bad. People should use * AcquirePointer and ReleasePointer instead. */ [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] private void SpaceCheck(byte* ptr, ulong sizeInBytes) { if ((ulong)_numBytes < sizeInBytes) NotEnoughRoom(); if ((ulong)(ptr - (byte*) handle) > ((ulong)_numBytes) - sizeInBytes) NotEnoughRoom(); } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] private static void NotEnoughRoom() { throw new ArgumentException(Environment.GetResourceString("Arg_BufferTooSmall")); } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] private static InvalidOperationException NotInitialized() { Contract.Assert(false, "Uninitialized SafeBuffer! Someone needs to call Initialize before using this instance!"); return new InvalidOperationException(Environment.GetResourceString("InvalidOperation_MustCallInitialize")); } // FCALL limitations mean we can't have generic FCALL methods. However, we can pass // TypedReferences to FCALL methods. [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] internal static void GenericPtrToStructure<T>(byte* ptr, out T structure, uint sizeofT) where T : struct { structure = default(T); // Dummy assignment to silence the compiler PtrToStructureNative(ptr, __makeref(structure), sizeofT); } [MethodImpl(MethodImplOptions.InternalCall)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] private static extern void PtrToStructureNative(byte* ptr, /*out T*/ TypedReference structure, uint sizeofT); [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] internal static void GenericStructureToPtr<T>(ref T structure, byte* ptr, uint sizeofT) where T : struct { StructureToPtrNative(__makeref(structure), ptr, sizeofT); } [MethodImpl(MethodImplOptions.InternalCall)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] private static extern void StructureToPtrNative(/*ref T*/ TypedReference structure, byte* ptr, uint sizeofT); } }
using MatterHackers.Agg.VertexSource; using MatterHackers.VectorMath; //---------------------------------------------------------------------------- // Anti-Grain Geometry - Version 2.4 // Copyright (C) 2002-2005 Maxim Shemanarev (http://www.antigrain.com) // // Permission to copy, use, modify, sell and distribute this software // is granted provided this copyright notice appears in all copies. // This software is provided "as is" without express or implied // warranty, and with no claim as to its suitability for any purpose. // //---------------------------------------------------------------------------- // Contact: mcseem@antigrain.com // mcseemagg@yahoo.com // http://www.antigrain.com //---------------------------------------------------------------------------- // // classes bezier_ctrl_impl, bezier_ctrl // //---------------------------------------------------------------------------- using System; using System.Collections.Generic; namespace MatterHackers.Agg.UI { //--------------------------------------------------------bezier_ctrl_impl public class bezier_ctrl_impl : SimpleVertexSourceWidget { private Curve4 m_curve = new Curve4(); private VertexSource.Ellipse m_ellipse; private Stroke m_stroke; private polygon_ctrl_impl m_poly; private int m_idx; public bezier_ctrl_impl() : base(new Vector2(0, 0)) { m_stroke = new Stroke(m_curve); m_poly = new polygon_ctrl_impl(4, 5.0); m_idx = (0); m_ellipse = new MatterHackers.Agg.VertexSource.Ellipse(); m_poly.in_polygon_check(false); m_poly.SetXN(0, 100.0); m_poly.SetYN(0, 0.0); m_poly.SetXN(1, 100.0); m_poly.SetYN(1, 50.0); m_poly.SetXN(2, 50.0); m_poly.SetYN(2, 100.0); m_poly.SetXN(3, 0.0); m_poly.SetYN(3, 100.0); } public void curve(double x1, double y1, double x2, double y2, double x3, double y3, double x4, double y4) { m_poly.SetXN(0, x1); m_poly.SetYN(0, y1); m_poly.SetXN(1, x2); m_poly.SetYN(1, y2); m_poly.SetXN(2, x3); m_poly.SetYN(2, y3); m_poly.SetXN(3, x4); m_poly.SetYN(3, y4); curve(); } public Curve4 curve() { m_curve.init(m_poly.GetXN(0), m_poly.GetYN(0), m_poly.GetXN(1), m_poly.GetYN(1), m_poly.GetXN(2), m_poly.GetYN(2), m_poly.GetXN(3), m_poly.GetYN(3)); return m_curve; } public double x1() { return m_poly.GetXN(0); } public double y1() { return m_poly.GetYN(0); } public double x2() { return m_poly.GetXN(1); } public double y2() { return m_poly.GetYN(1); } public double x3() { return m_poly.GetXN(2); } public double y3() { return m_poly.GetYN(2); } public double x4() { return m_poly.GetXN(3); } public double y4() { return m_poly.GetYN(3); } public void x1(double x) { m_poly.SetXN(0, x); } public void y1(double y) { m_poly.SetYN(0, y); } public void x2(double x) { m_poly.SetXN(1, x); } public void y2(double y) { m_poly.SetYN(1, y); } public void x3(double x) { m_poly.SetXN(2, x); } public void y3(double y) { m_poly.SetYN(2, y); } public void x4(double x) { m_poly.SetXN(3, x); } public void y4(double y) { m_poly.SetYN(3, y); } public void line_width(double w) { m_stroke.Width = w; } public double line_width() { return m_stroke.Width; } public void point_radius(double r) { m_poly.point_radius(r); } public double point_radius() { return m_poly.point_radius(); } /* public override bool InRect(double x, double y) { return false; } */ public override void OnMouseDown(MouseEventArgs mouseEvent) { double x = mouseEvent.X; double y = mouseEvent.Y; ParentToChildTransform.inverse_transform(ref x, ref y); m_poly.OnMouseDown(new MouseEventArgs(mouseEvent, x, y)); Invalidate(); base.OnMouseDown(mouseEvent); } public override void OnMouseUp(MouseEventArgs mouseEvent) { double x = mouseEvent.X; double y = mouseEvent.Y; m_poly.OnMouseUp(new MouseEventArgs(mouseEvent, x, y)); Invalidate(); base.OnMouseUp(mouseEvent); } public override void OnMouseMove(MouseEventArgs mouseEvent) { double x = mouseEvent.X; double y = mouseEvent.Y; ParentToChildTransform.inverse_transform(ref x, ref y); m_poly.OnMouseMove(new MouseEventArgs(mouseEvent, x, y)); Invalidate(); BoundsRelativeToParent = m_poly.BoundsRelativeToParent; Invalidate(); base.OnMouseMove(mouseEvent); } public override void OnKeyDown(KeyEventArgs keyEvent) { // this must be called first to ensure we get the correct Handled state base.OnKeyDown(keyEvent); if (!keyEvent.Handled) { m_poly.OnKeyDown(keyEvent); } } // Vertex source interface public override int num_paths() { return 7; } public override IEnumerable<VertexData> Vertices() { throw new NotImplementedException(); } public override void rewind(int idx) { m_poly.rewind(0); m_idx = idx; m_curve.approximation_scale(1); switch (idx) { default: case 0: // Control line 1 m_curve.init(m_poly.GetXN(0), m_poly.GetYN(0), (m_poly.GetXN(0) + m_poly.GetXN(1)) * 0.5, (m_poly.GetYN(0) + m_poly.GetYN(1)) * 0.5, (m_poly.GetXN(0) + m_poly.GetXN(1)) * 0.5, (m_poly.GetYN(0) + m_poly.GetYN(1)) * 0.5, m_poly.GetXN(1), m_poly.GetYN(1)); m_stroke.rewind(0); break; case 1: // Control line 2 m_curve.init(m_poly.GetXN(2), m_poly.GetYN(2), (m_poly.GetXN(2) + m_poly.GetXN(3)) * 0.5, (m_poly.GetYN(2) + m_poly.GetYN(3)) * 0.5, (m_poly.GetXN(2) + m_poly.GetXN(3)) * 0.5, (m_poly.GetYN(2) + m_poly.GetYN(3)) * 0.5, m_poly.GetXN(3), m_poly.GetYN(3)); m_stroke.rewind(0); break; case 2: // Curve itself m_curve.init(m_poly.GetXN(0), m_poly.GetYN(0), m_poly.GetXN(1), m_poly.GetYN(1), m_poly.GetXN(2), m_poly.GetYN(2), m_poly.GetXN(3), m_poly.GetYN(3)); m_stroke.rewind(0); break; case 3: // Point 1 m_ellipse.init(m_poly.GetXN(0), m_poly.GetYN(0), point_radius(), point_radius(), 20); m_ellipse.rewind(0); break; case 4: // Point 2 m_ellipse.init(m_poly.GetXN(1), m_poly.GetYN(1), point_radius(), point_radius(), 20); m_ellipse.rewind(0); break; case 5: // Point 3 m_ellipse.init(m_poly.GetXN(2), m_poly.GetYN(2), point_radius(), point_radius(), 20); m_ellipse.rewind(0); break; case 6: // Point 4 m_ellipse.init(m_poly.GetXN(3), m_poly.GetYN(3), point_radius(), point_radius(), 20); m_ellipse.rewind(0); break; } } public override ShapePath.FlagsAndCommand vertex(out double x, out double y) { x = 0; y = 0; ShapePath.FlagsAndCommand cmd = ShapePath.FlagsAndCommand.Stop; switch (m_idx) { case 0: case 1: case 2: cmd = m_stroke.vertex(out x, out y); break; case 3: case 4: case 5: case 6: case 7: cmd = m_ellipse.vertex(out x, out y); break; } if (!ShapePath.is_stop(cmd)) { ParentToChildTransform.transform(ref x, ref y); } return cmd; } }; //----------------------------------------------------------bezier_ctrl //template<class IColorType> public class bezier_ctrl : bezier_ctrl_impl { private ColorF m_color; public bezier_ctrl() { m_color = new ColorF(0.0, 0.0, 0.0); } public void line_color(IColorType c) { m_color = c.ToColorF(); } public override IColorType color(int i) { return m_color; } }; //--------------------------------------------------------curve3_ctrl_impl public class curve3_ctrl_impl : SimpleVertexSourceWidget { private Curve3 m_curve; private VertexSource.Ellipse m_ellipse; private Stroke m_stroke; private polygon_ctrl_impl m_poly; private int m_idx; public curve3_ctrl_impl() : base(new Vector2()) { m_stroke = new Stroke(m_curve); m_poly = new polygon_ctrl_impl(3, 5.0); m_idx = 0; m_curve = new Curve3(); m_ellipse = new MatterHackers.Agg.VertexSource.Ellipse(); m_poly.in_polygon_check(false); m_poly.SetXN(0, 100.0); m_poly.SetYN(0, 0.0); m_poly.SetXN(1, 100.0); m_poly.SetYN(1, 50.0); m_poly.SetXN(2, 50.0); m_poly.SetYN(2, 100.0); } public void curve(double x1, double y1, double x2, double y2, double x3, double y3) { m_poly.SetXN(0, x1); m_poly.SetYN(0, y1); m_poly.SetXN(1, x2); m_poly.SetYN(1, y2); m_poly.SetXN(2, x3); m_poly.SetYN(2, y3); curve(); } public Curve3 curve() { m_curve.init(m_poly.GetXN(0), m_poly.GetYN(0), m_poly.GetXN(1), m_poly.GetYN(1), m_poly.GetXN(2), m_poly.GetYN(2)); return m_curve; } private double x1() { return m_poly.GetXN(0); } private double y1() { return m_poly.GetYN(0); } private double x2() { return m_poly.GetXN(1); } private double y2() { return m_poly.GetYN(1); } private double x3() { return m_poly.GetXN(2); } private double y3() { return m_poly.GetYN(2); } private void x1(double x) { m_poly.SetXN(0, x); } private void y1(double y) { m_poly.SetYN(0, y); } private void x2(double x) { m_poly.SetXN(1, x); } private void y2(double y) { m_poly.SetYN(1, y); } private void x3(double x) { m_poly.SetXN(2, x); } private void y3(double y) { m_poly.SetYN(2, y); } private void line_width(double w) { m_stroke.Width = w; } private double line_width() { return m_stroke.Width; } private void point_radius(double r) { m_poly.point_radius(r); } private double point_radius() { return m_poly.point_radius(); } public override bool PositionWithinLocalBounds(double x, double y) { return false; } public override void OnMouseDown(MouseEventArgs mouseEvent) { double x = mouseEvent.X; double y = mouseEvent.Y; ParentToChildTransform.inverse_transform(ref x, ref y); m_poly.OnMouseDown(new MouseEventArgs(mouseEvent, x, y)); } public override void OnMouseUp(MouseEventArgs mouseEvent) { double x = mouseEvent.X; double y = mouseEvent.Y; m_poly.OnMouseUp(new MouseEventArgs(mouseEvent, x, y)); } public override void OnMouseMove(MouseEventArgs mouseEvent) { double x = mouseEvent.X; double y = mouseEvent.Y; ParentToChildTransform.inverse_transform(ref x, ref y); m_poly.OnMouseMove(new MouseEventArgs(mouseEvent, x, y)); } public override void OnKeyDown(KeyEventArgs keyEvent) { // this must be called first to ensure we get the correct Handled state base.OnKeyDown(keyEvent); if (!keyEvent.Handled) { m_poly.OnKeyDown(keyEvent); } } // Vertex source interface public override int num_paths() { return 6; } public override IEnumerable<VertexData> Vertices() { throw new NotImplementedException(); } public override void rewind(int idx) { m_idx = idx; switch (idx) { default: case 0: // Control line m_curve.init(m_poly.GetXN(0), m_poly.GetYN(0), (m_poly.GetXN(0) + m_poly.GetXN(1)) * 0.5, (m_poly.GetYN(0) + m_poly.GetYN(1)) * 0.5, m_poly.GetXN(1), m_poly.GetYN(1)); m_stroke.rewind(0); break; case 1: // Control line 2 m_curve.init(m_poly.GetXN(1), m_poly.GetYN(1), (m_poly.GetXN(1) + m_poly.GetXN(2)) * 0.5, (m_poly.GetYN(1) + m_poly.GetYN(2)) * 0.5, m_poly.GetXN(2), m_poly.GetYN(2)); m_stroke.rewind(0); break; case 2: // Curve itself m_curve.init(m_poly.GetXN(0), m_poly.GetYN(0), m_poly.GetXN(1), m_poly.GetYN(1), m_poly.GetXN(2), m_poly.GetYN(2)); m_stroke.rewind(0); break; case 3: // Point 1 m_ellipse.init(m_poly.GetXN(0), m_poly.GetYN(0), point_radius(), point_radius(), 20); m_ellipse.rewind(0); break; case 4: // Point 2 m_ellipse.init(m_poly.GetXN(1), m_poly.GetYN(1), point_radius(), point_radius(), 20); m_ellipse.rewind(0); break; case 5: // Point 3 m_ellipse.init(m_poly.GetXN(2), m_poly.GetYN(2), point_radius(), point_radius(), 20); m_ellipse.rewind(0); break; } } public override ShapePath.FlagsAndCommand vertex(out double x, out double y) { x = 0; y = 0; ShapePath.FlagsAndCommand cmd = ShapePath.FlagsAndCommand.Stop; switch (m_idx) { case 0: case 1: case 2: cmd = m_stroke.vertex(out x, out y); break; case 3: case 4: case 5: case 6: cmd = m_ellipse.vertex(out x, out y); break; } if (!ShapePath.is_stop(cmd)) { ParentToChildTransform.transform(ref x, ref y); } return cmd; } }; //----------------------------------------------------------curve3_ctrl //template<class IColorType> public class curve3_ctrl : curve3_ctrl_impl { private IColorType m_color; public curve3_ctrl() { m_color = new ColorF(0.0, 0.0, 0.0); } public void line_color(IColorType c) { m_color = c; } public override IColorType color(int i) { return m_color; } }; }
/* insert license info here */ using System; using System.Collections; namespace Business.Data.AutoAnalizador { /// <summary> /// Generated by MyGeneration using the NHibernate Object Mapping template /// </summary> [Serializable] public sealed class MindrayProtocolo: Business.BaseDataAccess { #region Private Members private bool m_isChanged; private int m_idmindrayprotocolo; private int m_iddetalleprotocolo; private string m_numeroprotocolo; private DateTime m_fechaprotocolo; private string m_tipomuestra; private int m_iditemmindray; private string m_paciente; private DateTime m_fechanacimiento; private string m_sexo; private string m_sectorsolicitante; private bool m_urgente; private bool m_estado; #endregion #region Default ( Empty ) Class Constuctor /// <summary> /// default constructor /// </summary> public MindrayProtocolo() { m_idmindrayprotocolo = 0; m_iddetalleprotocolo = 0; m_numeroprotocolo = String.Empty; m_fechaprotocolo = DateTime.MinValue; m_tipomuestra = String.Empty; m_iditemmindray = 0; m_paciente = String.Empty; m_fechanacimiento = DateTime.MinValue; m_sexo = String.Empty; m_sectorsolicitante = String.Empty; m_urgente = false; m_estado = false; } #endregion // End of Default ( Empty ) Class Constuctor #region Required Fields Only Constructor /// <summary> /// required (not null) fields only constructor /// </summary> public MindrayProtocolo( int iddetalleprotocolo, string numeroprotocolo, DateTime fechaprotocolo, string tipomuestra, int iditemmindray, string paciente, DateTime fechanacimiento, string sexo, string sectorsolicitante, bool urgente, bool estado) : this() { m_iddetalleprotocolo = iddetalleprotocolo; m_numeroprotocolo = numeroprotocolo; m_fechaprotocolo = fechaprotocolo; m_tipomuestra = tipomuestra; m_iditemmindray = iditemmindray; m_paciente = paciente; m_fechanacimiento = fechanacimiento; m_sexo = sexo; m_sectorsolicitante = sectorsolicitante; m_urgente = urgente; m_estado = estado; } #endregion // End Required Fields Only Constructor #region Public Properties /// <summary> /// /// </summary> public int IdMindrayProtocolo { get { return m_idmindrayprotocolo; } set { m_isChanged |= ( m_idmindrayprotocolo != value ); m_idmindrayprotocolo = value; } } /// <summary> /// /// </summary> public int IddetalleProtocolo { get { return m_iddetalleprotocolo; } set { m_isChanged |= ( m_iddetalleprotocolo != value ); m_iddetalleprotocolo = value; } } /// <summary> /// /// </summary> public string NumeroProtocolo { get { return m_numeroprotocolo; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for NumeroProtocolo", value, "null"); if( value.Length > 50) throw new ArgumentOutOfRangeException("Invalid value for NumeroProtocolo", value, value.ToString()); m_isChanged |= (m_numeroprotocolo != value); m_numeroprotocolo = value; } } /// <summary> /// /// </summary> public DateTime FechaProtocolo { get { return m_fechaprotocolo; } set { m_isChanged |= ( m_fechaprotocolo != value ); m_fechaprotocolo = value; } } /// <summary> /// /// </summary> public string TipoMuestra { get { return m_tipomuestra; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for TipoMuestra", value, "null"); if( value.Length > 50) throw new ArgumentOutOfRangeException("Invalid value for TipoMuestra", value, value.ToString()); m_isChanged |= (m_tipomuestra != value); m_tipomuestra = value; } } /// <summary> /// /// </summary> public int IditemMindray { get { return m_iditemmindray; } set { m_isChanged |= ( m_iditemmindray != value ); m_iditemmindray = value; } } /// <summary> /// /// </summary> public string Paciente { get { return m_paciente; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for Paciente", value, "null"); if( value.Length > 200) throw new ArgumentOutOfRangeException("Invalid value for Paciente", value, value.ToString()); m_isChanged |= (m_paciente != value); m_paciente = value; } } /// <summary> /// /// </summary> public DateTime FechaNacimiento { get { return m_fechanacimiento; } set { m_isChanged |= ( m_fechanacimiento != value ); m_fechanacimiento = value; } } /// <summary> /// /// </summary> public string Sexo { get { return m_sexo; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for Sexo", value, "null"); if( value.Length > 1) throw new ArgumentOutOfRangeException("Invalid value for Sexo", value, value.ToString()); m_isChanged |= (m_sexo != value); m_sexo = value; } } /// <summary> /// /// </summary> public string SectorSolicitante { get { return m_sectorsolicitante; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for SectorSolicitante", value, "null"); if( value.Length > 150) throw new ArgumentOutOfRangeException("Invalid value for SectorSolicitante", value, value.ToString()); m_isChanged |= (m_sectorsolicitante != value); m_sectorsolicitante = value; } } /// <summary> /// /// </summary> public bool Urgente { get { return m_urgente; } set { m_isChanged |= ( m_urgente != value ); m_urgente = value; } } /// <summary> /// /// </summary> public bool Estado { get { return m_estado; } set { m_isChanged |= ( m_estado != value ); m_estado = value; } } /// <summary> /// Returns whether or not the object has changed it's values. /// </summary> public bool IsChanged { get { return m_isChanged; } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.IO.PortsTests; using System.Text; using System.Threading; using System.Threading.Tasks; using Legacy.Support; using Xunit; using Microsoft.DotNet.XUnitExtensions; namespace System.IO.Ports.Tests { public class ReadTo_Generic : PortsTest { //Set bounds fore random timeout values. //If the min is to low read will not timeout accurately and the testcase will fail private const int minRandomTimeout = 250; //If the max is to large then the testcase will take forever to run private const int maxRandomTimeout = 2000; //If the percentage difference between the expected timeout and the actual timeout //found through Stopwatch is greater then 10% then the timeout value was not correctly //to the read method and the testcase fails. private const double maxPercentageDifference = .15; //The number of random bytes to receive for parity testing private const int numRndBytesParity = 8; //The number of random bytes to receive for BytesToRead testing private const int numRndBytesToRead = 16; //The number of new lines to insert into the string not including the one at the end //For BytesToRead testing private const int DEFAULT_NUMBER_NEW_LINES = 2; private const byte DEFAULT_NEW_LINE = (byte)'\n'; private const int NUM_TRYS = 5; #region Test Cases [Fact] public void ReadWithoutOpen() { using (SerialPort com = new SerialPort()) { Debug.WriteLine("Verifying read method throws exception without a call to Open()"); VerifyReadException(com, typeof(InvalidOperationException)); } } [ConditionalFact(nameof(HasOneSerialPort))] public void ReadAfterFailedOpen() { using (SerialPort com = new SerialPort("BAD_PORT_NAME")) { Debug.WriteLine("Verifying read method throws exception with a failed call to Open()"); //Since the PortName is set to a bad port name Open will thrown an exception //however we don't care what it is since we are verfifying a read method Assert.ThrowsAny<Exception>(() => com.Open()); VerifyReadException(com, typeof(InvalidOperationException)); } } [ConditionalFact(nameof(HasOneSerialPort))] public void ReadAfterClose() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { Debug.WriteLine("Verifying read method throws exception after a call to Cloes()"); com.Open(); com.Close(); VerifyReadException(com, typeof(InvalidOperationException)); } } [Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Timing-sensitive [ConditionalFact(nameof(HasOneSerialPort))] public void Timeout() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { Random rndGen = new Random(-55); com.ReadTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout); Debug.WriteLine("Verifying ReadTimeout={0}", com.ReadTimeout); com.Open(); VerifyTimeout(com); } } [Trait(XunitConstants.Category, XunitConstants.IgnoreForCI)] // Timing-sensitive [ConditionalFact(nameof(HasOneSerialPort))] public void SuccessiveReadTimeoutNoData() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { Random rndGen = new Random(-55); com.ReadTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout); // com.Encoding = new System.Text.UTF7Encoding(); com.Encoding = Encoding.Unicode; Debug.WriteLine("Verifying ReadTimeout={0} with successive call to read method and no data", com.ReadTimeout); com.Open(); Assert.Throws<TimeoutException>(() => com.ReadTo(com.NewLine)); VerifyTimeout(com); } } [ConditionalFact(nameof(HasNullModem))] public void SuccessiveReadTimeoutSomeData() { using (SerialPort com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { Random rndGen = new Random(-55); var t = new Task(WriteToCom1); com1.ReadTimeout = rndGen.Next(minRandomTimeout, maxRandomTimeout); com1.Encoding = new UTF8Encoding(); Debug.WriteLine("Verifying ReadTimeout={0} with successive call to read method and some data being received in the first call", com1.ReadTimeout); com1.Open(); //Call WriteToCom1 asynchronously this will write to com1 some time before the following call //to a read method times out t.Start(); try { com1.ReadTo(com1.NewLine); } catch (TimeoutException) { } TCSupport.WaitForTaskCompletion(t); //Make sure there is no bytes in the buffer so the next call to read will timeout com1.DiscardInBuffer(); VerifyTimeout(com1); } } private void WriteToCom1() { using (SerialPort com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { Random rndGen = new Random(-55); int sleepPeriod = rndGen.Next(minRandomTimeout, maxRandomTimeout / 2); //Sleep some random period with of a maximum duration of half the largest possible timeout value for a read method on COM1 Thread.Sleep(sleepPeriod); com2.Open(); com2.WriteLine(""); if (com2.IsOpen) com2.Close(); } } [KnownFailure] [ConditionalFact(nameof(HasNullModem))] public void DefaultParityReplaceByte() { VerifyParityReplaceByte(-1, numRndBytesParity - 2); } [KnownFailure] [ConditionalFact(nameof(HasNullModem))] public void NoParityReplaceByte() { Random rndGen = new Random(-55); VerifyParityReplaceByte('\0', rndGen.Next(0, numRndBytesParity - 1)); } [KnownFailure] [ConditionalFact(nameof(HasNullModem))] public void RNDParityReplaceByte() { Random rndGen = new Random(-55); VerifyParityReplaceByte(rndGen.Next(0, 128), 0); } [KnownFailure] [ConditionalFact(nameof(HasNullModem))] public void ParityErrorOnLastByte() { using (SerialPort com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) using (SerialPort com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { Random rndGen = new Random(15); byte[] bytesToWrite = new byte[numRndBytesParity]; char[] expectedChars = new char[numRndBytesParity]; /* 1 Additional character gets added to the input buffer when the parity error occurs on the last byte of a stream We are verifying that besides this everything gets read in correctly. See NDP Whidbey: 24216 for more info on this */ Debug.WriteLine("Verifying default ParityReplace byte with a parity errro on the last byte"); //Genrate random characters without an parity error for (int i = 0; i < bytesToWrite.Length; i++) { byte randByte = (byte)rndGen.Next(0, 128); bytesToWrite[i] = randByte; expectedChars[i] = (char)randByte; } bytesToWrite[bytesToWrite.Length - 1] = (byte)(bytesToWrite[bytesToWrite.Length - 1] | 0x80); //Create a parity error on the last byte expectedChars[expectedChars.Length - 1] = (char)com1.ParityReplace; // Set the last expected char to be the ParityReplace Byte com1.Parity = Parity.Space; com1.DataBits = 7; com1.ReadTimeout = 250; com1.Open(); com2.Open(); com2.Write(bytesToWrite, 0, bytesToWrite.Length); com2.Write(com1.NewLine); TCSupport.WaitForReadBufferToLoad(com1, bytesToWrite.Length + com1.NewLine.Length); string strRead = com1.ReadTo(com1.NewLine); char[] actualChars = strRead.ToCharArray(); Assert.Equal(expectedChars, actualChars); if (1 < com1.BytesToRead) { Fail("ERROR!!!: Expected BytesToRead=0 actual={0}", com1.BytesToRead); Debug.WriteLine("ByteRead={0}, {1}", com1.ReadByte(), bytesToWrite[bytesToWrite.Length - 1]); } com1.DiscardInBuffer(); bytesToWrite[bytesToWrite.Length - 1] = (byte)'\n'; expectedChars[expectedChars.Length - 1] = (char)bytesToWrite[bytesToWrite.Length - 1]; VerifyRead(com1, com2, bytesToWrite, expectedChars); } } [ConditionalFact(nameof(HasNullModem))] public void BytesToRead_RND_Buffer_Size() { Random rndGen = new Random(-55); VerifyBytesToRead(rndGen.Next(1, 2 * numRndBytesToRead)); } [ConditionalFact(nameof(HasNullModem))] public void BytesToRead_1_Buffer_Size() { VerifyBytesToRead(1); } [ConditionalFact(nameof(HasNullModem))] public void BytesToRead_Equal_Buffer_Size() { VerifyBytesToRead(numRndBytesToRead); } #endregion #region Verification for Test Cases private void VerifyTimeout(SerialPort com) { Stopwatch timer = new Stopwatch(); int expectedTime = com.ReadTimeout; int actualTime = 0; double percentageDifference; Assert.Throws<TimeoutException>(() => com.ReadTo(com.NewLine)); Thread.CurrentThread.Priority = ThreadPriority.Highest; for (int i = 0; i < NUM_TRYS; i++) { timer.Start(); Assert.Throws<TimeoutException>(() => com.ReadTo(com.NewLine)); timer.Stop(); actualTime += (int)timer.ElapsedMilliseconds; timer.Reset(); } Thread.CurrentThread.Priority = ThreadPriority.Normal; actualTime /= NUM_TRYS; percentageDifference = Math.Abs((expectedTime - actualTime) / (double)expectedTime); //Verify that the percentage difference between the expected and actual timeout is less then maxPercentageDifference if (maxPercentageDifference < percentageDifference) { Fail("ERROR!!!: The read method timedout in {0} expected {1} percentage difference: {2}", actualTime, expectedTime, percentageDifference); } } private void VerifyReadException(SerialPort com, Type expectedException) { Assert.Throws(expectedException, () => com.ReadTo(com.NewLine)); } private void VerifyParityReplaceByte(int parityReplace, int parityErrorIndex) { using (SerialPort com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) using (SerialPort com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { Random rndGen = new Random(-55); byte[] bytesToWrite = new byte[numRndBytesParity + 1]; //Plus one to accomidate the NewLineByte char[] expectedChars = new char[numRndBytesParity + 1]; //Plus one to accomidate the NewLineByte byte expectedByte; //Genrate random characters without an parity error for (int i = 0; i < numRndBytesParity; i++) { byte randByte = (byte)rndGen.Next(0, 128); bytesToWrite[i] = randByte; expectedChars[i] = (char)randByte; } if (-1 == parityReplace) { //If parityReplace is -1 and we should just use the default value expectedByte = com1.ParityReplace; } else if ('\0' == parityReplace) { //If parityReplace is the null charachater and parity replacement should not occur com1.ParityReplace = (byte)parityReplace; expectedByte = bytesToWrite[parityErrorIndex]; } else { //Else parityReplace was set to a value and we should expect this value to be returned on a parity error com1.ParityReplace = (byte)parityReplace; expectedByte = (byte)parityReplace; } //Create an parity error by setting the highest order bit to true bytesToWrite[parityErrorIndex] = (byte)(bytesToWrite[parityErrorIndex] | 0x80); expectedChars[parityErrorIndex] = (char)expectedByte; Debug.WriteLine("Verifying ParityReplace={0} with an ParityError at: {1} ", com1.ParityReplace, parityErrorIndex); com1.Parity = Parity.Space; com1.DataBits = 7; com1.Open(); com2.Open(); bytesToWrite[numRndBytesParity] = DEFAULT_NEW_LINE; expectedChars[numRndBytesParity] = (char)DEFAULT_NEW_LINE; VerifyRead(com1, com2, bytesToWrite, expectedChars); } } private void VerifyBytesToRead(int numBytesRead) { VerifyBytesToRead(numBytesRead, DEFAULT_NUMBER_NEW_LINES); } private void VerifyBytesToRead(int numBytesRead, int numNewLines) { using (SerialPort com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) using (SerialPort com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { Random rndGen = new Random(-55); byte[] bytesToWrite = new byte[numBytesRead + 1]; //Plus one to accomidate the NewLineByte ASCIIEncoding encoding = new ASCIIEncoding(); //Genrate random characters for (int i = 0; i < numBytesRead; i++) { byte randByte = (byte)rndGen.Next(0, 256); bytesToWrite[i] = randByte; } char[] expectedChars = encoding.GetChars(bytesToWrite, 0, bytesToWrite.Length); for (int i = 0; i < numNewLines; i++) { int newLineIndex; newLineIndex = rndGen.Next(0, numBytesRead); bytesToWrite[newLineIndex] = (byte)'\n'; expectedChars[newLineIndex] = (char)'\n'; } Debug.WriteLine("Verifying BytesToRead with a buffer of: {0} ", numBytesRead); com1.Open(); com2.Open(); bytesToWrite[numBytesRead] = DEFAULT_NEW_LINE; expectedChars[numBytesRead] = (char)DEFAULT_NEW_LINE; VerifyRead(com1, com2, bytesToWrite, expectedChars); } } private void VerifyRead(SerialPort com1, SerialPort com2, byte[] bytesToWrite, char[] expectedChars) { char[] actualChars = new char[expectedChars.Length]; int totalBytesRead; int totalCharsRead; int bytesToRead; int lastIndexOfNewLine = -1; com2.Write(bytesToWrite, 0, bytesToWrite.Length); com1.ReadTimeout = 250; TCSupport.WaitForReadBufferToLoad(com1, bytesToWrite.Length); totalBytesRead = 0; totalCharsRead = 0; bytesToRead = com1.BytesToRead; while (true) { string rcvString; try { rcvString = com1.ReadTo(com1.NewLine); } catch (TimeoutException) { break; } //While their are more characters to be read char[] rcvBuffer = rcvString.ToCharArray(); int charsRead = rcvBuffer.Length; int bytesRead = com1.Encoding.GetByteCount(rcvBuffer, 0, charsRead); int indexOfNewLine = Array.IndexOf(expectedChars, (char)DEFAULT_NEW_LINE, lastIndexOfNewLine + 1); if (indexOfNewLine - (lastIndexOfNewLine + 1) != charsRead) { //If we have not read all of the characters that we should have Fail("ERROR!!!: Read did not return all of the characters that were in SerialPort buffer"); Debug.WriteLine("indexOfNewLine={0} lastIndexOfNewLine={1} charsRead={2}", indexOfNewLine, lastIndexOfNewLine, charsRead); } if (expectedChars.Length < totalCharsRead + charsRead) { //If we have read in more characters then we expect Fail("ERROR!!!: We have received more characters then were sent"); } Array.Copy(rcvBuffer, 0, actualChars, totalCharsRead, charsRead); actualChars[totalCharsRead + charsRead] = (char)DEFAULT_NEW_LINE; //Add the NewLine char into actualChars totalBytesRead += bytesRead + 1; //Plus 1 because we read the NewLine char totalCharsRead += charsRead + 1; //Plus 1 because we read the NewLine char lastIndexOfNewLine = indexOfNewLine; if (bytesToWrite.Length - totalBytesRead != com1.BytesToRead) { Fail("ERROR!!!: Expected BytesToRead={0} actual={1}", bytesToWrite.Length - totalBytesRead, com1.BytesToRead); } bytesToRead = com1.BytesToRead; }//End while there are more characters to read Assert.Equal(expectedChars, actualChars); } #endregion } }
using System; using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.Logging; using NPoco; using Umbraco.Cms.Core; using Umbraco.Cms.Core.Cache; using Umbraco.Cms.Core.Models; using Umbraco.Cms.Core.Persistence.Querying; using Umbraco.Cms.Core.Persistence.Repositories; using Umbraco.Cms.Core.Scoping; using Umbraco.Cms.Core.Strings; using Umbraco.Cms.Infrastructure.Persistence.Dtos; using Umbraco.Cms.Infrastructure.Persistence.Factories; using Umbraco.Cms.Infrastructure.Persistence.Querying; using Umbraco.Extensions; namespace Umbraco.Cms.Infrastructure.Persistence.Repositories.Implement { /// <summary> /// Represents a repository for doing CRUD operations for <see cref="IMemberType"/> /// </summary> internal class MemberTypeRepository : ContentTypeRepositoryBase<IMemberType>, IMemberTypeRepository { private readonly IShortStringHelper _shortStringHelper; public MemberTypeRepository(IScopeAccessor scopeAccessor, AppCaches cache, ILogger<MemberTypeRepository> logger, IContentTypeCommonRepository commonRepository, ILanguageRepository languageRepository, IShortStringHelper shortStringHelper) : base(scopeAccessor, cache, logger, commonRepository, languageRepository, shortStringHelper) { _shortStringHelper = shortStringHelper; } protected override bool SupportsPublishing => MemberType.SupportsPublishingConst; protected override IRepositoryCachePolicy<IMemberType, int> CreateCachePolicy() { return new FullDataSetRepositoryCachePolicy<IMemberType, int>(GlobalIsolatedCache, ScopeAccessor, GetEntityId, /*expires:*/ true); } // every GetExists method goes cachePolicy.GetSomething which in turns goes PerformGetAll, // since this is a FullDataSet policy - and everything is cached // so here, // every PerformGet/Exists just GetMany() and then filters // except PerformGetAll which is the one really doing the job protected override IMemberType PerformGet(int id) => GetMany().FirstOrDefault(x => x.Id == id); protected override IMemberType PerformGet(Guid id) => GetMany().FirstOrDefault(x => x.Key == id); protected override IEnumerable<IMemberType> PerformGetAll(params Guid[] ids) { var all = GetMany(); return ids.Any() ? all.Where(x => ids.Contains(x.Key)) : all; } protected override bool PerformExists(Guid id) => GetMany().FirstOrDefault(x => x.Key == id) != null; protected override IMemberType PerformGet(string alias) => GetMany().FirstOrDefault(x => x.Alias.InvariantEquals(alias)); protected override IEnumerable<IMemberType> GetAllWithFullCachePolicy() { return CommonRepository.GetAllTypes().OfType<IMemberType>(); } protected override IEnumerable<IMemberType> PerformGetByQuery(IQuery<IMemberType> query) { var subQuery = GetSubquery(); var translator = new SqlTranslator<IMemberType>(subQuery, query); var subSql = translator.Translate(); var sql = GetBaseQuery(false) .WhereIn<NodeDto>(x => x.NodeId, subSql) .OrderBy<NodeDto>(x => x.SortOrder); var ids = Database.Fetch<int>(sql).Distinct().ToArray(); return ids.Length > 0 ? GetMany(ids).OrderBy(x => x.Name) : Enumerable.Empty<IMemberType>(); } protected override Sql<ISqlContext> GetBaseQuery(bool isCount) { if (isCount) { return Sql() .SelectCount() .From<NodeDto>() .InnerJoin<ContentTypeDto>().On<ContentTypeDto, NodeDto>(left => left.NodeId, right => right.NodeId) .Where<NodeDto>(x => x.NodeObjectType == NodeObjectTypeId); } var sql = Sql() .Select<NodeDto>(x => x.NodeId) .From<NodeDto>() .InnerJoin<ContentTypeDto>().On<ContentTypeDto, NodeDto>(left => left.NodeId, right => right.NodeId) .LeftJoin<PropertyTypeDto>().On<PropertyTypeDto, NodeDto>(left => left.ContentTypeId, right => right.NodeId) .LeftJoin<MemberPropertyTypeDto>().On<MemberPropertyTypeDto, PropertyTypeDto>(left => left.PropertyTypeId, right => right.Id) .LeftJoin<DataTypeDto>().On<DataTypeDto, PropertyTypeDto>(left => left.NodeId, right => right.DataTypeId) .LeftJoin<PropertyTypeGroupDto>().On<PropertyTypeGroupDto, NodeDto>(left => left.ContentTypeNodeId, right => right.NodeId) .Where<NodeDto>(x => x.NodeObjectType == NodeObjectTypeId); return sql; } protected Sql<ISqlContext> GetSubquery() { var sql = Sql() .Select("DISTINCT(umbracoNode.id)") .From<NodeDto>() .InnerJoin<ContentTypeDto>().On<ContentTypeDto, NodeDto>(left => left.NodeId, right => right.NodeId) .LeftJoin<PropertyTypeDto>().On<PropertyTypeDto, NodeDto>(left => left.ContentTypeId, right => right.NodeId) .LeftJoin<MemberPropertyTypeDto>().On<MemberPropertyTypeDto, PropertyTypeDto>(left => left.PropertyTypeId, right => right.Id) .LeftJoin<DataTypeDto>().On<DataTypeDto, PropertyTypeDto>(left => left.NodeId, right => right.DataTypeId) .LeftJoin<PropertyTypeGroupDto>().On<PropertyTypeGroupDto, NodeDto>(left => left.ContentTypeNodeId, right => right.NodeId) .Where<NodeDto>(x => x.NodeObjectType == NodeObjectTypeId); return sql; } protected override string GetBaseWhereClause() { return $"{Constants.DatabaseSchema.Tables.Node}.id = @id"; } protected override IEnumerable<string> GetDeleteClauses() { var l = (List<string>) base.GetDeleteClauses(); // we know it's a list l.Add("DELETE FROM cmsMemberType WHERE NodeId = @id"); l.Add("DELETE FROM cmsContentType WHERE nodeId = @id"); l.Add("DELETE FROM umbracoNode WHERE id = @id"); return l; } protected override Guid NodeObjectTypeId => Cms.Core.Constants.ObjectTypes.MemberType; protected override void PersistNewItem(IMemberType entity) { ValidateAlias(entity); entity.AddingEntity(); //set a default icon if one is not specified if (entity.Icon.IsNullOrWhiteSpace()) { entity.Icon = Cms.Core.Constants.Icons.Member; } //By Convention we add 9 standard PropertyTypes to an Umbraco MemberType entity.AddPropertyGroup(Cms.Core.Constants.Conventions.Member.StandardPropertiesGroupAlias, Cms.Core.Constants.Conventions.Member.StandardPropertiesGroupName); var standardPropertyTypes = ConventionsHelper.GetStandardPropertyTypeStubs(_shortStringHelper); foreach (var standardPropertyType in standardPropertyTypes) { entity.AddPropertyType(standardPropertyType.Value, Cms.Core.Constants.Conventions.Member.StandardPropertiesGroupAlias, Cms.Core.Constants.Conventions.Member.StandardPropertiesGroupName); } EnsureExplicitDataTypeForBuiltInProperties(entity); PersistNewBaseContentType(entity); //Handles the MemberTypeDto (cmsMemberType table) var memberTypeDtos = ContentTypeFactory.BuildMemberPropertyTypeDtos(entity); foreach (var memberTypeDto in memberTypeDtos) { Database.Insert(memberTypeDto); } entity.ResetDirtyProperties(); } protected override void PersistUpdatedItem(IMemberType entity) { ValidateAlias(entity); //Updates Modified date entity.UpdatingEntity(); //Look up parent to get and set the correct Path if ParentId has changed if (entity.IsPropertyDirty("ParentId")) { var parent = Database.First<NodeDto>("WHERE id = @ParentId", new { ParentId = entity.ParentId }); entity.Path = string.Concat(parent.Path, ",", entity.Id); entity.Level = parent.Level + 1; var maxSortOrder = Database.ExecuteScalar<int>( "SELECT coalesce(max(sortOrder),0) FROM umbracoNode WHERE parentid = @ParentId AND nodeObjectType = @NodeObjectType", new { ParentId = entity.ParentId, NodeObjectType = NodeObjectTypeId }); entity.SortOrder = maxSortOrder + 1; } EnsureExplicitDataTypeForBuiltInProperties(entity); PersistUpdatedBaseContentType(entity); // remove and insert - handle cmsMemberType table Database.Delete<MemberPropertyTypeDto>("WHERE NodeId = @Id", new { Id = entity.Id }); var memberTypeDtos = ContentTypeFactory.BuildMemberPropertyTypeDtos(entity); foreach (var memberTypeDto in memberTypeDtos) { Database.Insert(memberTypeDto); } entity.ResetDirtyProperties(); } /// <summary> /// Override so we can specify explicit db type's on any property types that are built-in. /// </summary> /// <param name="propertyEditorAlias"></param> /// <param name="storageType"></param> /// <param name="propertyTypeAlias"></param> /// <returns></returns> protected override PropertyType CreatePropertyType(string propertyEditorAlias, ValueStorageType storageType, string propertyTypeAlias) { //custom property type constructor logic to set explicit dbtype's for built in properties var builtinProperties = ConventionsHelper.GetStandardPropertyTypeStubs(_shortStringHelper); var readonlyStorageType = builtinProperties.TryGetValue(propertyTypeAlias, out var propertyType); storageType = readonlyStorageType ? propertyType.ValueStorageType : storageType; return new PropertyType(_shortStringHelper, propertyEditorAlias, storageType, readonlyStorageType, propertyTypeAlias); } /// <summary> /// Ensure that all the built-in membership provider properties have their correct data type /// and property editors assigned. This occurs prior to saving so that the correct values are persisted. /// </summary> /// <param name="memberType"></param> private void EnsureExplicitDataTypeForBuiltInProperties(IContentTypeBase memberType) { var builtinProperties = ConventionsHelper.GetStandardPropertyTypeStubs(_shortStringHelper); foreach (var propertyType in memberType.PropertyTypes) { if (builtinProperties.ContainsKey(propertyType.Alias)) { //this reset's its current data type reference which will be re-assigned based on the property editor assigned on the next line if (builtinProperties.TryGetValue(propertyType.Alias, out var propDefinition) && propDefinition != null) { propertyType.DataTypeId = propDefinition.DataTypeId; propertyType.DataTypeKey = propDefinition.DataTypeKey; } else { propertyType.DataTypeId = 0; propertyType.DataTypeKey = default; } } } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Reflection; using System.ServiceModel; using System.ServiceModel.Channels; using System.Text; using System.Threading.Tasks; using Xunit; public class ChannelFactoryTest { [Fact] public static void CreateChannel_Of_IRequestChannel_Using_CustomBinding() { ChannelFactory<IRequestChannel> factory = null; ChannelFactory<IRequestChannel> factory2 = null; IRequestChannel channel = null; IRequestChannel channel2 = null; try { CustomBinding binding = new CustomBinding(new BindingElement[] { new TextMessageEncodingBindingElement(MessageVersion.Default, Encoding.UTF8), new HttpTransportBindingElement() }); EndpointAddress endpointAddress = new EndpointAddress(FakeAddress.HttpAddress); // Create the channel factory for the request-reply message exchange pattern. factory = new ChannelFactory<IRequestChannel>(binding, endpointAddress); factory2 = new ChannelFactory<IRequestChannel>(binding, endpointAddress); // Create the channel. channel = factory.CreateChannel(); Assert.True(typeof(IRequestChannel).GetTypeInfo().IsAssignableFrom(channel.GetType().GetTypeInfo()), String.Format("Channel type '{0}' was not assignable to '{1}'", channel.GetType(), typeof(IRequestChannel))); channel2 = factory2.CreateChannel(); Assert.True(typeof(IRequestChannel).GetTypeInfo().IsAssignableFrom(channel2.GetType().GetTypeInfo()), String.Format("Channel type '{0}' was not assignable to '{1}'", channel2.GetType(), typeof(IRequestChannel))); // Validate ToString() string toStringResult = channel.ToString(); string toStringExpected = "System.ServiceModel.Channels.IRequestChannel"; Assert.Equal<string>(toStringExpected, toStringResult); // Validate Equals() Assert.StrictEqual<IRequestChannel>(channel, channel); // Validate Equals(other channel) negative Assert.NotStrictEqual<IRequestChannel>(channel, channel2); // Validate Equals("other") negative Assert.NotStrictEqual<object>(channel, "other"); // Validate Equals(null) negative Assert.NotStrictEqual<IRequestChannel>(channel, null); } finally { if (factory != null) { factory.Close(); } if (factory2 != null) { factory2.Close(); } } } [Fact] public static void CreateChannel_Of_IRequestChannel_Using_BasicHttpBinding_Creates_Unique_Instances() { ChannelFactory<IRequestChannel> factory = null; ChannelFactory<IRequestChannel> factory2 = null; IRequestChannel channel = null; IRequestChannel channel2 = null; try { BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(FakeAddress.HttpAddress); // Create the channel factory for the request-reply message exchange pattern. factory = new ChannelFactory<IRequestChannel>(binding, endpointAddress); factory2 = new ChannelFactory<IRequestChannel>(binding, endpointAddress); // Create the channel. channel = factory.CreateChannel(); Assert.True(typeof(IRequestChannel).GetTypeInfo().IsAssignableFrom(channel.GetType().GetTypeInfo()), String.Format("Channel type '{0}' was not assignable to '{1}'", channel.GetType(), typeof(IRequestChannel))); channel2 = factory2.CreateChannel(); Assert.True(typeof(IRequestChannel).GetTypeInfo().IsAssignableFrom(channel2.GetType().GetTypeInfo()), String.Format("Channel type '{0}' was not assignable to '{1}'", channel2.GetType(), typeof(IRequestChannel))); // Validate ToString() string toStringResult = channel.ToString(); string toStringExpected = "System.ServiceModel.Channels.IRequestChannel"; Assert.Equal<string>(toStringExpected, toStringResult); // Validate Equals() Assert.StrictEqual<IRequestChannel>(channel, channel); // Validate Equals(other channel) negative Assert.NotStrictEqual<IRequestChannel>(channel, channel2); // Validate Equals("other") negative Assert.NotStrictEqual<object>(channel, "other"); // Validate Equals(null) negative Assert.NotStrictEqual<IRequestChannel>(channel, null); } finally { if (factory != null) { factory.Close(); } if (factory2 != null) { factory2.Close(); } } } [Fact] public static void ChannelFactory_Verify_CommunicationStates() { ChannelFactory<IRequestChannel> factory = null; IRequestChannel channel = null; try { BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(FakeAddress.HttpAddress); // Create the channel factory for the request-reply message exchange pattern. factory = new ChannelFactory<IRequestChannel>(binding, endpointAddress); Assert.Equal(CommunicationState.Created, factory.State); // Create the channel. channel = factory.CreateChannel(); Assert.True(typeof(IRequestChannel).GetTypeInfo().IsAssignableFrom(channel.GetType().GetTypeInfo()), String.Format("Channel type '{0}' was not assignable to '{1}'", channel.GetType(), typeof(IRequestChannel))); Assert.Equal(CommunicationState.Opened, factory.State); // Validate ToString() string toStringResult = channel.ToString(); string toStringExpected = "System.ServiceModel.Channels.IRequestChannel"; Assert.Equal<string>(toStringExpected, toStringResult); factory.Close(); Assert.Equal(CommunicationState.Closed, factory.State); } finally { if (factory != null) { // check that there are no effects after calling Abort after Close factory.Abort(); Assert.Equal(CommunicationState.Closed, factory.State); // check that there are no effects after calling Close again factory.Close(); Assert.Equal(CommunicationState.Closed, factory.State); } } } [Fact] // Create the channel factory using BasicHttpBinding and open the channel using a user generated interface public static void CreateChannel_Of_Typed_Proxy_Using_BasicHttpBinding() { ChannelFactory<IWcfServiceGenerated> factory = null; try { BasicHttpBinding binding = new BasicHttpBinding(); // Create the channel factory factory = new ChannelFactory<IWcfServiceGenerated>(binding, new EndpointAddress(FakeAddress.HttpAddress)); factory.Open(); // Create the channel. IWcfServiceGenerated channel = factory.CreateChannel(); Assert.True(typeof(IWcfServiceGenerated).GetTypeInfo().IsAssignableFrom(channel.GetType().GetTypeInfo()), String.Format("Channel type '{0}' was not assignable to '{1}'", channel.GetType(), typeof(IRequestChannel))); } finally { if (factory != null) { factory.Close(); } } } [Fact] public static void ChannelFactory_Async_Open_Close() { ChannelFactory<IRequestChannel> factory = null; try { BasicHttpBinding binding = new BasicHttpBinding(); // Create the channel factory factory = new ChannelFactory<IRequestChannel>(binding, new EndpointAddress(FakeAddress.HttpAddress)); Assert.True(CommunicationState.Created == factory.State, string.Format("factory.State - Expected: {0}, Actual: {1}.", CommunicationState.Created, factory.State)); Task.Factory.FromAsync(factory.BeginOpen(null, null), factory.EndOpen).GetAwaiter().GetResult(); Assert.True(CommunicationState.Opened == factory.State, string.Format("factory.State - Expected: {0}, Actual: {1}.", CommunicationState.Opened, factory.State)); Task.Factory.FromAsync(factory.BeginClose(null, null), factory.EndClose).GetAwaiter().GetResult(); Assert.True(CommunicationState.Closed == factory.State, string.Format("factory.State - Expected: {0}, Actual: {1}.", CommunicationState.Closed, factory.State)); } finally { if (factory != null && factory.State != CommunicationState.Closed) { factory.Abort(); } } } [Theory] [InlineData(true)] [InlineData(false)] public static void ChannelFactory_AllowCookies(bool allowCookies) { ChannelFactory<IWcfService> factory = null; try { factory = new ChannelFactory<IWcfService>( new BasicHttpBinding() { AllowCookies = allowCookies }, new EndpointAddress(FakeAddress.HttpAddress)); IWcfService serviceProxy = factory.CreateChannel(); IHttpCookieContainerManager cookieManager = ((IChannel)serviceProxy).GetProperty<IHttpCookieContainerManager>(); Assert.True(allowCookies == (cookieManager != null), string.Format("AllowCookies was '{0}', 'cookieManager != null' was expected to be '{0}', but it was '{1}'.", allowCookies, cookieManager != null)); if (allowCookies) { Assert.True(allowCookies == (cookieManager.CookieContainer != null), string.Format("AllowCookies was '{0}', 'cookieManager.CookieContainer != null' was expected to be '{0}', but it was '{1}'.", allowCookies, cookieManager != null)); } } finally { if (factory != null) { factory.Close(); } } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System; using System.Collections.Generic; using Avalonia.Platform; namespace Avalonia.Media { /// <summary> /// Represents a piece of text with formatting. /// </summary> public class FormattedText : AvaloniaDisposable { /// <summary> /// Initializes a new instance of the <see cref="FormattedText"/> class. /// </summary> /// <param name="text">The text.</param> /// <param name="fontFamilyName">The font family.</param> /// <param name="fontSize">The font size.</param> /// <param name="fontStyle">The font style.</param> /// <param name="textAlignment">The text alignment.</param> /// <param name="fontWeight">The font weight.</param> /// <param name="wrapping">The text wrapping mode.</param> public FormattedText( string text, string fontFamilyName, double fontSize, FontStyle fontStyle = FontStyle.Normal, TextAlignment textAlignment = TextAlignment.Left, FontWeight fontWeight = FontWeight.Normal, TextWrapping wrapping = TextWrapping.Wrap) { Contract.Requires<ArgumentNullException>(text != null); Contract.Requires<ArgumentNullException>(fontFamilyName != null); if (fontSize <= 0) { throw new ArgumentException("FontSize must be greater than 0"); } if (fontWeight <= 0) { throw new ArgumentException("FontWeight must be greater than 0"); } Text = text; FontFamilyName = fontFamilyName; FontSize = fontSize; FontStyle = fontStyle; FontWeight = fontWeight; TextAlignment = textAlignment; Wrapping = wrapping; var platform = AvaloniaLocator.Current.GetService<IPlatformRenderInterface>(); if (platform == null) { throw new Exception("Could not create FormattedText: IPlatformRenderInterface not registered."); } PlatformImpl = platform.CreateFormattedText( text, fontFamilyName, fontSize, fontStyle, textAlignment, fontWeight, wrapping); } /// <summary> /// Gets or sets the constraint of the text. /// </summary> public Size Constraint { get { CheckDisposed(); return PlatformImpl.Constraint; } set { CheckDisposed(); PlatformImpl.Constraint = value; } } /// <summary> /// Gets the font family. /// </summary> public string FontFamilyName { get; } /// <summary> /// Gets the font size. /// </summary> public double FontSize { get; } /// <summary> /// Gets the font style. /// </summary> public FontStyle FontStyle { get; } /// <summary> /// Gets the font weight. /// </summary> public FontWeight FontWeight { get; } /// <summary> /// Gets the text. /// </summary> public string Text { get; } /// <summary> /// Gets platform-specific platform implementation. /// </summary> public IFormattedTextImpl PlatformImpl { get; } /// <summary> /// Gets the text alignment. /// </summary> public TextAlignment TextAlignment { get; } /// <summary> /// Gets the text wrapping. /// </summary> public TextWrapping Wrapping { get; } /// <summary> /// Disposes of unmanaged resources associated with the formatted text. /// </summary> protected override void DoDispose() { PlatformImpl.Dispose(); } /// <summary> /// Gets the lines in the text. /// </summary> /// <returns> /// A collection of <see cref="FormattedTextLine"/> objects. /// </returns> public IEnumerable<FormattedTextLine> GetLines() { CheckDisposed(); return PlatformImpl.GetLines(); } /// <summary> /// Hit tests a point in the text. /// </summary> /// <param name="point">The point.</param> /// <returns> /// A <see cref="TextHitTestResult"/> describing the result of the hit test. /// </returns> public TextHitTestResult HitTestPoint(Point point) { CheckDisposed(); return PlatformImpl.HitTestPoint(point); } /// <summary> /// Gets the bounds rectangle that the specified character occupies. /// </summary> /// <param name="index">The index of the character.</param> /// <returns>The character bounds.</returns> public Rect HitTestTextPosition(int index) { CheckDisposed(); return PlatformImpl.HitTestTextPosition(index); } /// <summary> /// Gets the bounds rectangles that the specified text range occupies. /// </summary> /// <param name="index">The index of the first character.</param> /// <param name="length">The number of characters in the text range.</param> /// <returns>The character bounds.</returns> public IEnumerable<Rect> HitTestTextRange(int index, int length) { CheckDisposed(); return PlatformImpl.HitTestTextRange(index, length); } /// <summary> /// Gets the size of the text, taking <see cref="Constraint"/> into account. /// </summary> /// <returns>The bounds box of the text.</returns> public Size Measure() { CheckDisposed(); return PlatformImpl.Measure(); } /// <summary> /// Sets the foreground brush for the specified text range. /// </summary> /// <param name="brush">The brush.</param> /// <param name="startIndex">The start of the text range.</param> /// <param name="length">The length of the text range.</param> public void SetForegroundBrush(IBrush brush, int startIndex, int length) { CheckDisposed(); PlatformImpl.SetForegroundBrush(brush, startIndex, length); } } }
// ******************************************************************************************************** // Product Name: DotSpatial.Symbology.dll // Description: Contains the business logic for symbology layers and symbol categories. // ******************************************************************************************************** // The contents of this file are subject to the MIT License (MIT) // you may not use this file except in compliance with the License. You may obtain a copy of the License at // http://dotspatial.codeplex.com/license // // Software distributed under the License is distributed on an "AS IS" basis, WITHOUT WARRANTY OF // ANY KIND, either expressed or implied. See the License for the specific language governing rights and // limitations under the License. // // The Original Code is from MapWindow.dll version 6.0 // // The Initial Developer of this Original Code is Ted Dunsford. Created 2/20/2009 2:45:18 PM // // Contributor(s): (Open source contributors should list themselves and their modifications here). // // ******************************************************************************************************** using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Linq; using DotSpatial.NTSExtension; using DotSpatial.Serialization; namespace DotSpatial.Symbology { /// <summary> /// PointSchemeCategory /// </summary> [Serializable] public class PointCategory : FeatureCategory, IPointCategory { #region Private Variables #endregion #region Constructors /// <summary> /// Creates a new instance of PointSchemeCategory /// </summary> public PointCategory() { Symbolizer = new PointSymbolizer(); SelectionSymbolizer = new PointSymbolizer(true); } /// <summary> /// Creates a new instance of a default point scheme category where the geographic symbol size has been /// scaled to the specified extent. /// </summary> /// <param name="extent">The geographic extent that is 100 times wider than the geographic size of the points.</param> public PointCategory(IRectangle extent) { Symbolizer = new PointSymbolizer(false, extent); SelectionSymbolizer = new PointSymbolizer(true, extent); } /// <summary> /// Creates a new category based on a symbolizer, and uses the same symbolizer, but with a fill and border color of light cyan /// for the selection symbolizer /// </summary> /// <param name="pointSymbolizer">The symbolizer to use in order to create a category</param> public PointCategory(IPointSymbolizer pointSymbolizer) { Symbolizer = pointSymbolizer; SelectionSymbolizer = pointSymbolizer.Copy(); SelectionSymbolizer.SetFillColor(Color.Cyan); } /// <summary> /// Creates a simple point category where the symbolizer is based on the simple characteristics. /// The selection symbolizer has the same shape and size, but will be colored cyan. /// </summary> /// <param name="color">The color of the regular symbolizer</param> /// <param name="shape">The shape of the regular symbolizer</param> /// <param name="size">the size of the regular symbolizer</param> public PointCategory(Color color, PointShape shape, double size) { Symbolizer = new PointSymbolizer(color, shape, size); SelectionSymbolizer = new PointSymbolizer(Color.Cyan, shape, size); } /// <summary> /// Creates a new Point Category based on the specified character /// </summary> /// <param name="character">The character to use for the symbol</param> /// <param name="fontFamilyName">The font family name to use as the font</param> /// <param name="color">The color of the character</param> /// <param name="size">The size of the symbol</param> public PointCategory(char character, string fontFamilyName, Color color, double size) { Symbolizer = new PointSymbolizer(character, fontFamilyName, color, size); SelectionSymbolizer = new PointSymbolizer(character, fontFamilyName, Color.Cyan, size); } /// <summary> /// Creates a category where the picture is used for the symbol, and a selected /// symbol is created as the same symbol but with a cyan border. /// </summary> /// <param name="picture">The image to use</param> /// <param name="size">The size of the symbol</param> public PointCategory(Image picture, double size) { Symbolizer = new PointSymbolizer(picture, size); PictureSymbol ps = new PictureSymbol(picture, size) { OutlineColor = Color.Cyan, OutlineWidth = 2, OutlineOpacity = 1f }; SelectionSymbolizer = new PointSymbolizer(ps); } /// <summary> /// Creates a category from the single symbol specified. If the symbol is colorable, /// the color of the selection symbol will be duplicated, but set to cyan. /// </summary> /// <param name="symbol"></param> public PointCategory(ISymbol symbol) { Symbolizer = new PointSymbolizer(symbol); ISymbol copy = symbol.Copy(); IColorable c = copy as IColorable; if (c != null) { c.Color = Color.Cyan; } SelectionSymbolizer = new PointSymbolizer(copy); } /// <summary> /// Creates a new Point Category from the list of symbols /// </summary> /// <param name="symbols"></param> public PointCategory(IEnumerable<ISymbol> symbols) { Symbolizer = new PointSymbolizer(symbols); List<ISymbol> copy = symbols.CloneList(); if (copy.Any()) { IColorable c = symbols.Last() as IColorable; if (c != null) { c.Color = Color.Cyan; } } SelectionSymbolizer = new PointSymbolizer(copy); } #endregion #region Methods /// <summary> /// This gets a single color that attempts to represent the specified /// category. For polygons, for example, this is the fill color (or central fill color) /// of the top pattern. If an image is being used, the color will be gray. /// </summary> /// <returns>The System.Color that can be used as an approximation to represent this category.</returns> public override Color GetColor() { if (Symbolizer == null || Symbolizer.Symbols == null || Symbolizer.Symbols.Count == 0) return Color.Gray; ISymbol p = Symbolizer.Symbols[0]; return p.GetColor(); } /// <summary> /// Sets the Color of the top symbol in the symbols. /// </summary> /// <param name="color">The color to set the point.</param> public override void SetColor(Color color) { if (Symbolizer == null) return; Symbolizer.SetFillColor(color); } #endregion #region Properties /// <summary> /// Gets or sets the symbolizer for this category /// </summary> [Description("Gets or sets the symbolizer for this category")] public new IPointSymbolizer Symbolizer { get { return base.Symbolizer as IPointSymbolizer; } set { base.Symbolizer = value; } } /// <summary> /// Gets the legend symbol size of the symbolizer for this category /// </summary> /// <returns></returns> public override Size GetLegendSymbolSize() { return Symbolizer.GetLegendSymbolSize(); } /// <summary> /// Gets or sets the symbolizer to use to draw selected features from this category. /// </summary> [Description("Gets or sets the symbolizer to use to draw selected features from this category.")] public new IPointSymbolizer SelectionSymbolizer { get { return base.SelectionSymbolizer as IPointSymbolizer; } set { base.SelectionSymbolizer = value; } } #endregion } }
using System.Collections.Generic; using System.Linq; using Microsoft.AspNetCore.Html; using Microsoft.AspNetCore.Mvc.Rendering; using Microsoft.AspNetCore.Razor.TagHelpers; namespace BootstrapTagHelpers.Extensions { using System; using System.Threading.Tasks; public static class TagHelperOutputExtensions { /// <summary> /// Adds an attribute to the Attributes collection. Existing Attributes are overwritten. /// </summary> public static void MergeAttribute(this TagHelperOutput output, string key, object value) { output.Attributes.SetAttribute( key, value); } /// <summary> /// Adds an aria attribute /// </summary> /// <param name="name">Name of the attribute. "aria-" is prepended.</param> /// <param name="value"></param> public static void AddAriaAttribute(this TagHelperOutput output, string name, object value) { output.MergeAttribute("aria-" + name, value); } /// <summary> /// Adds an aria attribute /// </summary> /// <param name="name">Name of the attribute. "aria-" is prepended.</param> /// <param name="value"></param> public static void AddDataAttribute(this TagHelperOutput output, string name, object value) { output.MergeAttribute("data-" + name, value); } /// <summary> /// Adds an attribute to the Attributes collection. Existing Attributes are overwritten. /// </summary> public static void MergeAttribute(this TagHelperOutput output, string key, string value) { MergeAttribute(output, key, value, false); } /// <summary> /// Adds an attribute to the Attributes collection. Existing Attributes are overwritten. /// </summary> /// <param name="appendText"> /// If true value will be added to an existing attribute. If false exiting attributes are /// overwrtitten /// </param> /// <param name="separator">Is inserted between the old value and the appended value</param> public static void MergeAttribute(this TagHelperOutput output, string key, string value, bool appendText) { MergeAttribute(output, key, value, appendText, null); } /// <summary> /// Adds an attribute to the Attributes collection. Existing Attributes are overwritten. /// </summary> /// <param name="separator">Is inserted between the old value and the appended value</param> public static void MergeAttribute(this TagHelperOutput output, string key, string value, string separator) { MergeAttribute(output, key, value, separator != null, separator); } /// <summary> /// Adds an attribute to the Attributes collection. Existing Attributes are overwritten. /// </summary> /// <param name="appendText"> /// If true value will be added to an existing attribute. If false exiting attributes are /// overwrtitten /// </param> /// <param name="separator">Is inserted between the old value and the appended value</param> public static void MergeAttribute(this TagHelperOutput output, string key, string value, bool appendText, string separator) { if (appendText && output.Attributes.ContainsName(key)) output.Attributes.SetAttribute(key, output.Attributes[key] == null ? value : output.Attributes[key] + separator + value); else output.Attributes.SetAttribute(key, value); } /// <summary> /// Adds an css class if not already added /// </summary> public static void AddCssClass(this TagHelperOutput output, string cssClass) { AddCssClass(output, new[] { cssClass }); } /// <summary> /// Adds css classes if not already existing /// </summary> public static void AddCssClass(this TagHelperOutput output, IEnumerable<string> cssClasses) { if (output.Attributes.ContainsName("class") && output.Attributes["class"] != null) { List<string> classes = output.Attributes["class"].Value.ToString().Split(' ').ToList(); foreach (string cssClass in cssClasses.Where(cssClass => !classes.Contains(cssClass))) classes.Add(cssClass); output.Attributes.SetAttribute("class", classes.Aggregate((s, s1) => s + " " + s1)); } else if (output.Attributes.ContainsName("class")) output.Attributes.SetAttribute("class", cssClasses.Aggregate((s, s1) => s + " " + s1)); else output.Attributes.Add("class", cssClasses.Aggregate((s, s1) => s + " " + s1)); } public static void RemoveCssClass(this TagHelperOutput output, string cssClass) { if (output.Attributes.ContainsName("class")) { List<string> classes = output.Attributes["class"].Value.ToString().Split(' ').ToList(); classes.Remove(cssClass); if (classes.Count == 0) output.Attributes.RemoveAll("class"); else output.Attributes.SetAttribute("class", classes.Aggregate((s, s1) => s + " " + s1)); } } /// <summary> /// Adds an style entry /// </summary> public static void AddCssStyle(this TagHelperOutput output, string name, string value) { if (output.Attributes.ContainsName("style")) if (string.IsNullOrEmpty(output.Attributes["style"].Value.ToString())) output.Attributes.SetAttribute("style", name + ": " + value + ";"); else output.Attributes.SetAttribute("style", (output.Attributes["style"].Value.ToString().EndsWith(";") ? " " : "; ") + name + ": " + value + ";"); else output.Attributes.Add("style", name + ": " + value + ";"); } /// <summary> /// Converts a <see cref="output" /> into a <see cref="TagHelperContent" /> /// </summary> public static TagHelperContent ToTagHelperContent(this TagHelperOutput output) { var content = new DefaultTagHelperContent(); content.AppendHtml(output.PreElement); var builder = new TagBuilder(output.TagName); foreach (TagHelperAttribute attribute in output.Attributes) builder.Attributes.Add(attribute.Name, attribute.ValueStyle == HtmlAttributeValueStyle.Minimized ? null : attribute.Value?.ToString()); if (output.TagMode == TagMode.SelfClosing) { builder.TagRenderMode = TagRenderMode.SelfClosing; content.AppendHtml(builder); } else { builder.TagRenderMode = TagRenderMode.StartTag; content.AppendHtml(builder); content.AppendHtml(output.PreContent); content.AppendHtml(output.Content); content.AppendHtml(output.PostContent); if (output.TagMode == TagMode.StartTagAndEndTag) content.AppendHtml($"</{output.TagName}>"); } content.AppendHtml(output.PostElement); return content; } /// <summary> /// Wraps a <see cref="builder" /> around the content of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" />. All content that is /// inside the <see cref="output" /> will be inside of the <see cref="builder" />. /// <see cref="TagBuilder.InnerHtml" /> will not be included. /// </summary> public static void WrapContentOutside(this TagHelperOutput output, TagBuilder builder) { builder.TagRenderMode = TagRenderMode.StartTag; WrapContentOutside(output, builder, new TagBuilder(builder.TagName) { TagRenderMode = TagRenderMode.EndTag }); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the content of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" />. All content that is /// inside the <see cref="output" /> will be inside of the <see cref="Microsoft.AspNetCore.Html.IHtmlContent" />s. /// </summary> public static void WrapContentOutside(this TagHelperOutput output, IHtmlContent startTag, IHtmlContent endTag) { output.PreContent.Prepend(startTag); output.PostContent.AppendHtml(endTag); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the content of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" />. All content that is /// inside the <see cref="output" /> will be inside of the <see cref="string" />s. /// </summary> public static void WrapContentOutside(this TagHelperOutput output, string startTag, string endTag) { output.PreContent.Prepend(startTag); output.PostContent.Append(endTag); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the content of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" />. All content that is /// inside the <see cref="output" /> will be inside of the <see cref="string" />s. <see cref="startTag" /> and /// <see cref="endTag" /> will not be encoded. /// </summary> public static void WrapHtmlContentOutside(this TagHelperOutput output, string startTag, string endTag) { output.PreContent.PrependHtml(startTag); output.PostContent.AppendHtml(endTag); } /// <summary> /// Wraps a <see cref="builder" /> around the content of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" />. The current contents of /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" /> will be outside. /// </summary> public static void WrapContentInside(this TagHelperOutput output, TagBuilder builder) { builder.TagRenderMode = TagRenderMode.StartTag; WrapContentInside(output, builder, new TagBuilder(builder.TagName) { TagRenderMode = TagRenderMode.EndTag }); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the content of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" />. /// <see cref="TagBuilder.InnerHtml" /> will not be included. The current contents of /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" /> will be outside. /// </summary> public static void WrapContentInside(this TagHelperOutput output, IHtmlContent startTag, IHtmlContent endTag) { output.PreContent.AppendHtml(startTag); output.PostContent.Prepend(endTag); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the content of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" />. /// <see cref="TagBuilder.InnerHtml" /> will not be included. The current contents of /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" /> will be outside. /// </summary> public static void WrapContentInside(this TagHelperOutput output, string startTag, string endTag) { output.PreContent.Append(startTag); output.PostContent.Prepend(endTag); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the content of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" />. /// <see cref="TagBuilder.InnerHtml" /> will not be included. The current contents of /// <see cref="TagHelperOutput.PreContent" /> and <see cref="TagHelperOutput.PostContent" /> will be outside. /// <see cref="startTag" /> and <see cref="endTag" /> will not be encoded. /// </summary> public static void WrapHtmlContentInside(this TagHelperOutput output, string startTag, string endTag) { output.PreContent.AppendHtml(startTag); output.PostContent.PrependHtml(endTag); } /// <summary> /// Wraps a <see cref="builder" /> around the element of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" />. The current contents of /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" /> will be inside. /// <see cref="TagBuilder.InnerHtml" /> will not be included. /// </summary> public static void WrapOutside(this TagHelperOutput output, TagBuilder builder) { builder.TagRenderMode = TagRenderMode.StartTag; WrapOutside(output, builder, new TagBuilder(builder.TagName) { TagRenderMode = TagRenderMode.EndTag }); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the element of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" />. The current contents of /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" /> will be inside. /// </summary> public static void WrapOutside(this TagHelperOutput output, IHtmlContent startTag, IHtmlContent endTag) { output.PreElement.Prepend(startTag); output.PostElement.AppendHtml(endTag); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the element of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" />. The current contents of /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" /> will be inside. /// </summary> public static void WrapOutside(this TagHelperOutput output, string startTag, string endTag) { output.PreElement.Prepend(startTag); output.PostElement.Append(endTag); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the element of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" />. The current contents of /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" /> will be inside. /// <see cref="startTag" /> and <see cref="endTag" /> will not be encoded. /// </summary> public static void WrapHtmlOutside(this TagHelperOutput output, string startTag, string endTag) { output.PreElement.PrependHtml(startTag); output.PostElement.AppendHtml(endTag); } /// <summary> /// Wraps a <see cref="builder" /> around the element of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" />. The current contents of /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" /> will be outside. /// <see cref="TagBuilder.InnerHtml" /> will not be included. /// </summary> public static void WrapInside(this TagHelperOutput output, TagBuilder builder) { builder.TagRenderMode = TagRenderMode.StartTag; WrapInside(output, builder, new TagBuilder(builder.TagName) { TagRenderMode = TagRenderMode.EndTag }); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the element of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" />. The current contents of /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" /> will be Outside. /// </summary> public static void WrapInside(this TagHelperOutput output, IHtmlContent startTag, IHtmlContent endTag) { output.PreElement.AppendHtml(startTag); output.PostElement.Prepend(endTag); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the element of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" />. The current contents of /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" /> will be outside. /// </summary> public static void WrapInside(this TagHelperOutput output, string startTag, string endTag) { output.PreElement.Append(startTag); output.PostElement.Prepend(endTag); } /// <summary> /// Wraps <see cref="startTag" /> and <see cref="endTag" /> around the element of the <see cref="output" /> using /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" />. The current contents of /// <see cref="TagHelperOutput.PreElement" /> and <see cref="TagHelperOutput.PostElement" /> will be outside. /// <see cref="startTag" /> and <see cref="endTag" /> will not be encoded. /// </summary> public static void WrapHtmlInside(this TagHelperOutput output, string startTag, string endTag) { output.PreElement.AppendHtml(startTag); output.PostElement.PrependHtml(endTag); } public static async Task LoadChildContentAsync(this TagHelperOutput output) { output.Content.SetHtmlContent(await output.GetChildContentAsync() ?? new DefaultTagHelperContent()); } public static async Task LoadChildContentAsync(this TagHelperOutput output, bool useCachedResult) { output.Content.SetHtmlContent(await output.GetChildContentAsync(useCachedResult)); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Insights { using System.Linq; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; /// <summary> /// AlertRulesOperations operations. /// </summary> internal partial class AlertRulesOperations : Microsoft.Rest.IServiceOperations<InsightsManagementClient>, IAlertRulesOperations { /// <summary> /// Initializes a new instance of the AlertRulesOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal AlertRulesOperations(InsightsManagementClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } this.Client = client; } /// <summary> /// Gets a reference to the InsightsManagementClient /// </summary> public InsightsManagementClient Client { get; private set; } /// <summary> /// Creates or updates an alert rule. /// Request method: PUT Request URI: /// https://management.azure.com/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/microsoft.insights/alertRules/{alert-rule-name}?api-version={api-version} /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='ruleName'> /// The name of the rule. /// </param> /// <param name='parameters'> /// The parameters of the rule to create or update. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<AlertRuleResource>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string ruleName, AlertRuleResource parameters, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { if (resourceGroupName == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "resourceGroupName"); } if (ruleName == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "ruleName"); } if (parameters == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "parameters"); } if (parameters != null) { parameters.Validate(); } if (this.Client.SubscriptionId == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2016-03-01"; // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("ruleName", ruleName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("parameters", parameters); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "CreateOrUpdate", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/microsoft.insights/alertrules/{ruleName}").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{ruleName}", System.Uri.EscapeDataString(ruleName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId)); System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("PUT"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(parameters != null) { _requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, this.Client.SerializationSettings); _httpRequest.Content = new System.Net.Http.StringContent(_requestContent, System.Text.Encoding.UTF8); _httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200 && (int)_statusCode != 201) { var ex = new Microsoft.Rest.Azure.CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new Microsoft.Rest.Azure.CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.Azure.AzureOperationResponse<AlertRuleResource>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<AlertRuleResource>(_responseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex); } } // Deserialize Response if ((int)_statusCode == 201) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<AlertRuleResource>(_responseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Deletes an alert rule /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='ruleName'> /// The name of the rule. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string ruleName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { if (resourceGroupName == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "resourceGroupName"); } if (ruleName == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "ruleName"); } if (this.Client.SubscriptionId == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2016-03-01"; // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("ruleName", ruleName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "Delete", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/microsoft.insights/alertrules/{ruleName}").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{ruleName}", System.Uri.EscapeDataString(ruleName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId)); System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("DELETE"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 204 && (int)_statusCode != 200) { var ex = new Microsoft.Rest.Azure.CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); if (_httpResponse.Content != null) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); } else { _responseContent = string.Empty; } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.Azure.AzureOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Gets an alert rule /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='ruleName'> /// The name of the rule. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<AlertRuleResource>> GetWithHttpMessagesAsync(string resourceGroupName, string ruleName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { if (resourceGroupName == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "resourceGroupName"); } if (ruleName == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "ruleName"); } if (this.Client.SubscriptionId == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2016-03-01"; // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("ruleName", ruleName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/microsoft.insights/alertrules/{ruleName}").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{ruleName}", System.Uri.EscapeDataString(ruleName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId)); System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new Microsoft.Rest.Azure.CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new Microsoft.Rest.Azure.CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.Azure.AzureOperationResponse<AlertRuleResource>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<AlertRuleResource>(_responseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// List the alert rules within a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='odataQuery'> /// OData parameters to apply to the operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<System.Collections.Generic.IEnumerable<AlertRuleResource>>> ListByResourceGroupWithHttpMessagesAsync(string resourceGroupName, Microsoft.Rest.Azure.OData.ODataQuery<AlertRuleResource> odataQuery = default(Microsoft.Rest.Azure.OData.ODataQuery<AlertRuleResource>), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { if (resourceGroupName == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "resourceGroupName"); } if (this.Client.SubscriptionId == null) { throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2016-03-01"; // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("odataQuery", odataQuery); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "ListByResourceGroup", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/microsoft.insights/alertrules").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId)); System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>(); if (odataQuery != null) { var _odataFilter = odataQuery.ToString(); if (!string.IsNullOrEmpty(_odataFilter)) { _queryParameters.Add(_odataFilter); } } if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new Microsoft.Rest.Azure.CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new Microsoft.Rest.Azure.CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.Azure.AzureOperationResponse<System.Collections.Generic.IEnumerable<AlertRuleResource>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<AlertRuleResource>>(_responseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Diagnostics; using System.IO; using System.Net.Http; using System.Net.Http.Headers; using System.Runtime.InteropServices; using System.Text; using System.Threading; using System.Threading.Tasks; using SafeCurlHandle = Interop.libcurl.SafeCurlHandle; using SafeCurlMultiHandle = Interop.libcurl.SafeCurlMultiHandle; using SafeCurlSlistHandle = Interop.libcurl.SafeCurlSlistHandle; using CURLoption = Interop.libcurl.CURLoption; using CURLMoption = Interop.libcurl.CURLMoption; using CURLcode = Interop.libcurl.CURLcode; using CURLMcode = Interop.libcurl.CURLMcode; using CURLINFO = Interop.libcurl.CURLINFO; using CurlVersionInfoData = Interop.libcurl.curl_version_info_data; using CurlFeatures = Interop.libcurl.CURL_VERSION_Features; using CURLProxyType = Interop.libcurl.curl_proxytype; using size_t = System.IntPtr; namespace System.Net.Http { internal partial class CurlHandler : HttpMessageHandler { #region Constants private const string UriSchemeHttp = "http"; private const string UriSchemeHttps = "https"; private const string EncodingNameGzip = "gzip"; private const string EncodingNameDeflate = "deflate"; private readonly static string[] AuthenticationSchemes = { "Negotiate", "Digest", "Basic" }; // the order in which libcurl goes over authentication schemes private static readonly string[] s_headerDelimiters = new string[] { "\r\n" }; private const int s_requestBufferSize = 16384; // Default used by libcurl private const string NoTransferEncoding = HttpKnownHeaderNames.TransferEncoding + ":"; private readonly static CurlVersionInfoData curlVersionInfoData; private const int CurlAge = 5; private const int MinCurlAge = 3; #endregion #region Fields private static readonly bool _supportsAutomaticDecompression; private static readonly bool _supportsSSL; private volatile bool _anyOperationStarted; private volatile bool _disposed; private IWebProxy _proxy = null; private ICredentials _serverCredentials = null; private ProxyUsePolicy _proxyPolicy = ProxyUsePolicy.UseDefaultProxy; private DecompressionMethods _automaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate; private SafeCurlMultiHandle _multiHandle; private GCHandle _multiHandlePtr = new GCHandle(); private CookieContainer _cookieContainer = null; private bool _useCookie = false; private bool _automaticRedirection = true; private int _maxAutomaticRedirections = 50; #endregion static CurlHandler() { int result = Interop.libcurl.curl_global_init(Interop.libcurl.CurlGlobalFlags.CURL_GLOBAL_ALL); if (result != CURLcode.CURLE_OK) { throw new InvalidOperationException("Cannot use libcurl in this process"); } curlVersionInfoData = Marshal.PtrToStructure<CurlVersionInfoData>(Interop.libcurl.curl_version_info(CurlAge)); if (curlVersionInfoData.age < MinCurlAge) { throw new InvalidOperationException(SR.net_http_unix_https_libcurl_too_old); } _supportsSSL = (CurlFeatures.CURL_VERSION_SSL & curlVersionInfoData.features) != 0; _supportsAutomaticDecompression = (CurlFeatures.CURL_VERSION_LIBZ & curlVersionInfoData.features) != 0; } internal CurlHandler() { _multiHandle = Interop.libcurl.curl_multi_init(); if (_multiHandle.IsInvalid) { throw new HttpRequestException(SR.net_http_client_execution_error); } SetCurlMultiOptions(); } #region Properties internal bool AutomaticRedirection { get { return _automaticRedirection; } set { CheckDisposedOrStarted(); _automaticRedirection = value; } } internal bool SupportsProxy { get { return true; } } internal bool UseProxy { get { return _proxyPolicy != ProxyUsePolicy.DoNotUseProxy; } set { CheckDisposedOrStarted(); if (value) { _proxyPolicy = ProxyUsePolicy.UseCustomProxy; } else { _proxyPolicy = ProxyUsePolicy.DoNotUseProxy; } } } internal IWebProxy Proxy { get { return _proxy; } set { CheckDisposedOrStarted(); _proxy = value; } } internal ICredentials Credentials { get { return _serverCredentials; } set { CheckDisposedOrStarted(); _serverCredentials = value; } } internal ClientCertificateOption ClientCertificateOptions { get { return ClientCertificateOption.Manual; } set { if (ClientCertificateOption.Manual != value) { throw new PlatformNotSupportedException(SR.net_http_unix_invalid_client_cert_option); } } } internal bool SupportsAutomaticDecompression { get { return _supportsAutomaticDecompression; } } internal DecompressionMethods AutomaticDecompression { get { return _automaticDecompression; } set { CheckDisposedOrStarted(); _automaticDecompression = value; } } internal bool UseCookie { get { return _useCookie; } set { CheckDisposedOrStarted(); _useCookie = value; } } internal CookieContainer CookieContainer { get { return _cookieContainer; } set { CheckDisposedOrStarted(); _cookieContainer = value; } } internal int MaxAutomaticRedirections { get { return _maxAutomaticRedirections; } set { if (value <= 0) { throw new ArgumentOutOfRangeException( "value", value, string.Format(SR.net_http_value_must_be_greater_than, 0)); } CheckDisposedOrStarted(); _maxAutomaticRedirections = value; } } #endregion protected override void Dispose(bool disposing) { if (disposing && !_disposed) { _disposed = true; if (_multiHandlePtr.IsAllocated) { _multiHandlePtr.Free(); } _multiHandle = null; } base.Dispose(disposing); } protected internal override Task<HttpResponseMessage> SendAsync( HttpRequestMessage request, CancellationToken cancellationToken) { if (request == null) { throw new ArgumentNullException("request", SR.net_http_handler_norequest); } if ((request.RequestUri.Scheme != UriSchemeHttp) && (request.RequestUri.Scheme != UriSchemeHttps)) { throw NotImplemented.ByDesignWithMessage(SR.net_http_client_http_baseaddress_required); } if (request.RequestUri.Scheme == UriSchemeHttps && !_supportsSSL) { throw new PlatformNotSupportedException(SR.net_http_unix_https_support_unavailable_libcurl); } if (request.Headers.TransferEncodingChunked.GetValueOrDefault() && (request.Content == null)) { throw new InvalidOperationException(SR.net_http_chunked_not_allowed_with_empty_content); } // TODO: Check that SendAsync is not being called again for same request object. // Probably fix is needed in WinHttpHandler as well CheckDisposed(); SetOperationStarted(); if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled<HttpResponseMessage>(cancellationToken); } // Create RequestCompletionSource object and save current values of handler settings. RequestCompletionSource state = new RequestCompletionSource { CancellationToken = cancellationToken, RequestMessage = request, }; BeginRequest(state); return state.Task; } #region Private methods private async void BeginRequest(RequestCompletionSource state) { SafeCurlHandle requestHandle = new SafeCurlHandle(); GCHandle stateHandle = new GCHandle(); bool needCleanup = false; try { // Prepare context objects state.ResponseMessage = new CurlResponseMessage(state.RequestMessage); stateHandle = GCHandle.Alloc(state); requestHandle = CreateRequestHandle(state, stateHandle); state.RequestHandle = requestHandle; needCleanup = true; if (state.CancellationToken.IsCancellationRequested) { state.TrySetCanceled(state.CancellationToken); return; } if (state.RequestMessage.Content != null) { Stream requestContentStream = await state.RequestMessage.Content.ReadAsStreamAsync().ConfigureAwait(false); if (state.CancellationToken.IsCancellationRequested) { state.TrySetCanceled(state.CancellationToken); return; } state.RequestContentStream = requestContentStream; state.RequestContentBuffer = new byte[s_requestBufferSize]; } AddEasyHandle(state); needCleanup = false; } catch (Exception ex) { HandleAsyncException(state, ex); } finally { if (needCleanup) { RemoveEasyHandle(_multiHandle, stateHandle, false); } else if (state.Task.IsCompleted) { if (stateHandle.IsAllocated) { stateHandle.Free(); } if (!requestHandle.IsInvalid) { SafeCurlHandle.DisposeAndClearHandle(ref requestHandle); } } } } private static void EndRequest(SafeCurlMultiHandle multiHandle, IntPtr statePtr, int result) { GCHandle stateHandle = GCHandle.FromIntPtr(statePtr); RequestCompletionSource state = (RequestCompletionSource)stateHandle.Target; try { // No more callbacks so no more data state.ResponseMessage.ContentStream.SignalComplete(); if (CURLcode.CURLE_OK == result) { state.TrySetResult(state.ResponseMessage); } else { state.TrySetException(new HttpRequestException(SR.net_http_client_execution_error, GetCurlException(result))); } } catch (Exception ex) { HandleAsyncException(state, ex); } finally { RemoveEasyHandle(multiHandle, stateHandle, true); } } private void SetOperationStarted() { if (!_anyOperationStarted) { _anyOperationStarted = true; } } private SafeCurlHandle CreateRequestHandle(RequestCompletionSource state, GCHandle stateHandle) { // TODO: If this impacts perf, optimize using a handle pool SafeCurlHandle requestHandle = Interop.libcurl.curl_easy_init(); if (requestHandle.IsInvalid) { throw new HttpRequestException(SR.net_http_client_execution_error); } SetCurlOption(requestHandle, CURLoption.CURLOPT_URL, state.RequestMessage.RequestUri.AbsoluteUri); if (_automaticRedirection) { SetCurlOption(requestHandle, CURLoption.CURLOPT_FOLLOWLOCATION, 1L); // Set maximum automatic redirection option SetCurlOption(requestHandle, CURLoption.CURLOPT_MAXREDIRS, _maxAutomaticRedirections); } if (state.RequestMessage.Content != null) { SetCurlOption(requestHandle, CURLoption.CURLOPT_UPLOAD, 1L); } if (state.RequestMessage.Method == HttpMethod.Head) { SetCurlOption(requestHandle, CURLoption.CURLOPT_NOBODY, 1L); } IntPtr statePtr = GCHandle.ToIntPtr(stateHandle); SetCurlOption(requestHandle, CURLoption.CURLOPT_PRIVATE, statePtr); SetCurlCallbacks(requestHandle, state.RequestMessage, statePtr); if (_supportsAutomaticDecompression) { SetRequestHandleDecompressionOptions(requestHandle); } SetProxyOptions(requestHandle, state.RequestMessage.RequestUri); SetCookieOption(requestHandle, state.RequestMessage.RequestUri); state.RequestHeaderHandle = SetRequestHeaders(requestHandle, state.RequestMessage); // TODO: Handle other options return requestHandle; } private void SetRequestHandleDecompressionOptions(SafeCurlHandle requestHandle) { bool gzip = (AutomaticDecompression & DecompressionMethods.GZip) != 0; bool deflate = (AutomaticDecompression & DecompressionMethods.Deflate) != 0; if (gzip || deflate) { string encoding = (gzip && deflate) ? EncodingNameGzip + "," + EncodingNameDeflate : gzip ? EncodingNameGzip : EncodingNameDeflate ; SetCurlOption(requestHandle, CURLoption.CURLOPT_ACCEPTENCODING, encoding); } } private void SetProxyOptions(SafeCurlHandle requestHandle, Uri requestUri) { if (_proxyPolicy == ProxyUsePolicy.DoNotUseProxy) { SetCurlOption(requestHandle, CURLoption.CURLOPT_PROXY, string.Empty); return; } if ((_proxyPolicy == ProxyUsePolicy.UseDefaultProxy) || (Proxy == null)) { return; } Debug.Assert( (Proxy != null) && (_proxyPolicy == ProxyUsePolicy.UseCustomProxy)); if (Proxy.IsBypassed(requestUri)) { SetCurlOption(requestHandle, CURLoption.CURLOPT_PROXY, string.Empty); return; } var proxyUri = Proxy.GetProxy(requestUri); if (proxyUri == null) { return; } SetCurlOption(requestHandle, CURLoption.CURLOPT_PROXYTYPE, CURLProxyType.CURLPROXY_HTTP); SetCurlOption(requestHandle, CURLoption.CURLOPT_PROXY, proxyUri.AbsoluteUri); SetCurlOption(requestHandle, CURLoption.CURLOPT_PROXYPORT, proxyUri.Port); NetworkCredential credentials = GetCredentials(Proxy.Credentials, requestUri); if (credentials != null) { if (string.IsNullOrEmpty(credentials.UserName)) { throw new ArgumentException(SR.net_http_argument_empty_string, "UserName"); } string credentialText; if (string.IsNullOrEmpty(credentials.Domain)) { credentialText = string.Format("{0}:{1}", credentials.UserName, credentials.Password); } else { credentialText = string.Format("{2}\\{0}:{1}", credentials.UserName, credentials.Password, credentials.Domain); } SetCurlOption(requestHandle, CURLoption.CURLOPT_PROXYUSERPWD, credentialText); } } private void SetCookieOption(SafeCurlHandle requestHandle, Uri requestUri) { if (!_useCookie) { return; } else if (_cookieContainer == null) { throw new InvalidOperationException(SR.net_http_invalid_cookiecontainer); } string cookieValues = _cookieContainer.GetCookieHeader(requestUri); if (cookieValues != null) { SetCurlOption(requestHandle, CURLoption.CURLOPT_COOKIE, cookieValues); } } private NetworkCredential GetCredentials(ICredentials proxyCredentials, Uri requestUri) { if (proxyCredentials == null) { return null; } foreach (var authScheme in AuthenticationSchemes) { NetworkCredential proxyCreds = proxyCredentials.GetCredential(requestUri, authScheme); if (proxyCreds != null) { return proxyCreds; } } return null; } private static void HandleAsyncException(RequestCompletionSource state, Exception ex) { if ((null == ex) && state.CancellationToken.IsCancellationRequested) { state.TrySetCanceled(state.CancellationToken); } if (null == ex) { return; } var oce = (ex as OperationCanceledException); if (oce != null) { // If the exception was due to the cancellation token being canceled, throw cancellation exception. Debug.Assert(state.CancellationToken.IsCancellationRequested); state.TrySetCanceled(oce.CancellationToken); } else if (ex is HttpRequestException) { state.TrySetException(ex); } else { state.TrySetException(new HttpRequestException(SR.net_http_client_execution_error, ex)); } } private void CheckDisposed() { if (_disposed) { throw new ObjectDisposedException(GetType().FullName); } } private void CheckDisposedOrStarted() { CheckDisposed(); if (_anyOperationStarted) { throw new InvalidOperationException(SR.net_http_operation_started); } } private static string GetCurlErrorString(int code, bool isMulti = false) { IntPtr ptr = isMulti ? Interop.libcurl.curl_multi_strerror(code) : Interop.libcurl.curl_easy_strerror(code); return Marshal.PtrToStringAnsi(ptr); } private static Exception GetCurlException(int code, bool isMulti = false) { return new Exception(GetCurlErrorString(code, isMulti)); } private void SetCurlCallbacks(SafeCurlHandle requestHandle, HttpRequestMessage request, IntPtr stateHandle) { SetCurlOption(requestHandle, CURLoption.CURLOPT_HEADERDATA, stateHandle); SetCurlOption(requestHandle, CURLoption.CURLOPT_HEADERFUNCTION, s_receiveHeadersCallback); if (request.Method != HttpMethod.Head) { SetCurlOption(requestHandle, CURLoption.CURLOPT_WRITEDATA, stateHandle); unsafe { SetCurlOption(requestHandle, CURLoption.CURLOPT_WRITEFUNCTION, s_receiveBodyCallback); } } if (request.Content != null) { SetCurlOption(requestHandle, CURLoption.CURLOPT_READDATA, stateHandle); SetCurlOption(requestHandle, CURLoption.CURLOPT_READFUNCTION, s_sendCallback); SetCurlOption(requestHandle, CURLoption.CURLOPT_IOCTLDATA, stateHandle); SetCurlOption(requestHandle, CURLoption.CURLOPT_IOCTLFUNCTION, s_sendIoCtlCallback); } } private void SetCurlOption(SafeCurlHandle handle, int option, string value) { int result = Interop.libcurl.curl_easy_setopt(handle, option, value); if (result != CURLcode.CURLE_OK) { throw new HttpRequestException(SR.net_http_client_execution_error, GetCurlException(result)); } } private void SetCurlOption(SafeCurlHandle handle, int option, long value) { int result = Interop.libcurl.curl_easy_setopt(handle, option, value); if (result != CURLcode.CURLE_OK) { throw new HttpRequestException(SR.net_http_client_execution_error, GetCurlException(result)); } } private void SetCurlOption(SafeCurlHandle handle, int option, Interop.libcurl.curl_readwrite_callback value) { int result = Interop.libcurl.curl_easy_setopt(handle, option, value); if (result != CURLcode.CURLE_OK) { throw new HttpRequestException(SR.net_http_client_execution_error, GetCurlException(result)); } } private unsafe void SetCurlOption(SafeCurlHandle handle, int option, Interop.libcurl.curl_unsafe_write_callback value) { int result = Interop.libcurl.curl_easy_setopt(handle, option, value); if (result != CURLcode.CURLE_OK) { throw new HttpRequestException(SR.net_http_client_execution_error, GetCurlException(result)); } } private void SetCurlOption(SafeCurlHandle handle, int option, Interop.libcurl.curl_ioctl_callback value) { int result = Interop.libcurl.curl_easy_setopt(handle, option, value); if (result != CURLcode.CURLE_OK) { throw new HttpRequestException(SR.net_http_client_execution_error, GetCurlException(result)); } } private void SetCurlOption(SafeCurlHandle handle, int option, IntPtr value) { int result = Interop.libcurl.curl_easy_setopt(handle, option, value); if (result != CURLcode.CURLE_OK) { throw new HttpRequestException(SR.net_http_client_execution_error, GetCurlException(result)); } } private void SetCurlMultiOptions() { _multiHandlePtr = GCHandle.Alloc(_multiHandle); IntPtr callbackContext = GCHandle.ToIntPtr(_multiHandlePtr); int result = Interop.libcurl.curl_multi_setopt(_multiHandle, CURLMoption.CURLMOPT_SOCKETFUNCTION, s_socketCallback); if (result == CURLMcode.CURLM_OK) { result = Interop.libcurl.curl_multi_setopt(_multiHandle, CURLMoption.CURLMOPT_TIMERFUNCTION, s_multiTimerCallback); } if (result == CURLMcode.CURLM_OK) { result = Interop.libcurl.curl_multi_setopt(_multiHandle, CURLMoption.CURLMOPT_TIMERDATA, callbackContext); } if (result != CURLMcode.CURLM_OK) { throw new HttpRequestException(SR.net_http_client_execution_error, GetCurlException(result, true)); } } private SafeCurlSlistHandle SetRequestHeaders(SafeCurlHandle handle, HttpRequestMessage request) { SafeCurlSlistHandle retVal = new SafeCurlSlistHandle(); if (request.Headers == null) { return retVal; } HttpHeaders contentHeaders = null; if (request.Content != null) { SetChunkedModeForSend(request); // TODO: Content-Length header isn't getting correctly placed using ToString() // This is a bug in HttpContentHeaders that needs to be fixed. if (request.Content.Headers.ContentLength.HasValue) { long contentLength = request.Content.Headers.ContentLength.Value; request.Content.Headers.ContentLength = null; request.Content.Headers.ContentLength = contentLength; } contentHeaders = request.Content.Headers; } string[] allHeaders = HeaderUtilities.DumpHeaders(request.Headers, contentHeaders) .Split(s_headerDelimiters, StringSplitOptions.RemoveEmptyEntries); bool gotReference = false; try { retVal.DangerousAddRef(ref gotReference); IntPtr rawHandle = IntPtr.Zero; for (int i = 0; i < allHeaders.Length; i++) { string header = allHeaders[i].Trim(); if (header.Equals("{") || header.Equals("}")) { continue; } rawHandle = Interop.libcurl.curl_slist_append(rawHandle, header); retVal.SetHandle(rawHandle); } // Since libcurl always adds a Transfer-Encoding header, we need to explicitly block // it if caller specifically does not want to set the header if (request.Headers.TransferEncodingChunked.HasValue && !request.Headers.TransferEncodingChunked.Value) { rawHandle = Interop.libcurl.curl_slist_append(rawHandle, NoTransferEncoding); retVal.SetHandle(rawHandle); } if (!retVal.IsInvalid) { SetCurlOption(handle, CURLoption.CURLOPT_HTTPHEADER, rawHandle); } } finally { if (gotReference) { retVal.DangerousRelease(); } } return retVal; } private static void SetChunkedModeForSend(HttpRequestMessage request) { bool chunkedMode = request.Headers.TransferEncodingChunked.GetValueOrDefault(); HttpContent requestContent = request.Content; Debug.Assert(requestContent != null); // Deal with conflict between 'Content-Length' vs. 'Transfer-Encoding: chunked' semantics. // libcurl adds a Tranfer-Encoding header by default and the request fails if both are set. if (requestContent.Headers.ContentLength.HasValue) { if (chunkedMode) { // Same behaviour as WinHttpHandler requestContent.Headers.ContentLength = null; } else { // Prevent libcurl from adding Transfer-Encoding header request.Headers.TransferEncodingChunked = false; } } } private void AddEasyHandle(RequestCompletionSource state) { bool gotReference = false; SafeCurlHandle requestHandle = state.RequestHandle; try { requestHandle.DangerousAddRef(ref gotReference); lock (_multiHandle) { int result = Interop.libcurl.curl_multi_add_handle(_multiHandle, requestHandle); if (result != CURLcode.CURLE_OK) { throw new HttpRequestException(SR.net_http_client_execution_error, GetCurlException(result, true)); } } state.SessionHandle = _multiHandle; // Note that we are deliberately not decreasing the ref counts of // the multi and easy handles since that will be done in RemoveEasyHandle // when the request is completed and the handles are used in an // unmanaged context till then // TODO: Investigate if SafeCurlHandle is really useful since we are not // avoiding any leaks due to the ref count increment } catch (Exception) { if (gotReference) { requestHandle.DangerousRelease(); } throw; } } private static void RemoveEasyHandle(SafeCurlMultiHandle multiHandle, GCHandle stateHandle, bool onMultiStack) { RequestCompletionSource state = (RequestCompletionSource)stateHandle.Target; SafeCurlHandle requestHandle = state.RequestHandle; if (onMultiStack) { lock (multiHandle) { Interop.libcurl.curl_multi_remove_handle(multiHandle, requestHandle); } state.SessionHandle = null; requestHandle.DangerousRelease(); } if (!state.RequestHeaderHandle.IsInvalid) { SafeCurlSlistHandle headerHandle = state.RequestHeaderHandle; SafeCurlSlistHandle.DisposeAndClearHandle(ref headerHandle); } SafeCurlHandle.DisposeAndClearHandle(ref requestHandle); stateHandle.Free(); } #endregion private sealed class RequestCompletionSource : TaskCompletionSource<HttpResponseMessage> { public CancellationToken CancellationToken { get; set; } public HttpRequestMessage RequestMessage { get; set; } public CurlResponseMessage ResponseMessage { get; set; } public SafeCurlMultiHandle SessionHandle { get; set; } public SafeCurlHandle RequestHandle { get; set; } public SafeCurlSlistHandle RequestHeaderHandle { get; set; } public Stream RequestContentStream { get; set; } public byte[] RequestContentBuffer { get; set; } } private enum ProxyUsePolicy { DoNotUseProxy = 0, // Do not use proxy. Ignores the value set in the environment. UseDefaultProxy = 1, // Do not set the proxy parameter. Use the value of environment variable, if any. UseCustomProxy = 2 // Use The proxy specified by the user. } } }
// Copyright 2007-2016 Chris Patterson, Dru Sellers, Travis Smith, et. al. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. namespace MassTransit.TestFramework { using System; using System.Threading.Tasks; using NUnit.Framework; using Testing; using Transports.InMemory; using Util; [TestFixture] public class InMemoryTestFixture : BusTestFixture { [SetUp] public Task SetupInMemoryTest() { return _busCreationScope.TestSetup(); } [TearDown] public Task TearDownInMemoryTest() { return _busCreationScope.TestTeardown(); } protected InMemoryTestHarness InMemoryTestHarness { get; } readonly IBusCreationScope _busCreationScope; protected string InputQueueName => InMemoryTestHarness.InputQueueName; protected Uri BaseAddress => InMemoryTestHarness.BaseAddress; protected IInMemoryHost Host => InMemoryTestHarness.Host; public InMemoryTestFixture(bool busPerTest = false) : this(new InMemoryTestHarness(), busPerTest) { } public InMemoryTestFixture(InMemoryTestHarness harness, bool busPerTest = false) : base(harness) { InMemoryTestHarness = harness; if (busPerTest) _busCreationScope = new PerTestBusCreationScope(SetupBus, TeardownBus); else _busCreationScope = new PerTestFixtureBusCreationScope(SetupBus, TeardownBus); InMemoryTestHarness.OnConnectObservers += ConnectObservers; InMemoryTestHarness.OnConfigureInMemoryBus += ConfigureInMemoryBus; InMemoryTestHarness.OnConfigureInMemoryReceiveEndpoint += ConfigureInMemoryReceiveEndpoint; } /// <summary> /// The sending endpoint for the InputQueue /// </summary> protected ISendEndpoint InputQueueSendEndpoint => InMemoryTestHarness.InputQueueSendEndpoint; /// <summary> /// The sending endpoint for the Bus /// </summary> protected ISendEndpoint BusSendEndpoint => InMemoryTestHarness.BusSendEndpoint; protected Uri BusAddress => InMemoryTestHarness.BusAddress; protected Uri InputQueueAddress => InMemoryTestHarness.InputQueueAddress; protected IRequestClient<TRequest, TResponse> CreateRequestClient<TRequest, TResponse>() where TRequest : class where TResponse : class { return InMemoryTestHarness.CreateRequestClient<TRequest, TResponse>(); } [OneTimeSetUp] public Task SetupInMemoryTestFixture() { return _busCreationScope.TestFixtureSetup(); } Task SetupBus() { return InMemoryTestHarness.Start(); } protected Task<ISendEndpoint> GetSendEndpoint(Uri address) { return InMemoryTestHarness.GetSendEndpoint(address); } [OneTimeTearDown] public async Task TearDownInMemoryTestFixture() { await _busCreationScope.TestFixtureTeardown().ConfigureAwait(false); InMemoryTestHarness.Dispose(); } Task TeardownBus() { return InMemoryTestHarness.Stop(); } protected virtual void ConfigureInMemoryBus(IInMemoryBusFactoryConfigurator configurator) { } protected virtual void ConfigureInMemoryReceiveEndpoint(IInMemoryReceiveEndpointConfigurator configurator) { } protected Task<ConsumeContext<T>> ConnectPublishHandler<T>() where T : class { Task<ConsumeContext<T>> result = null; Host.ConnectReceiveEndpoint(NewId.NextGuid().ToString(), context => { result = Handled<T>(context); }); return result; } protected Task<ConsumeContext<T>> ConnectPublishHandler<T>(Func<ConsumeContext<T>, bool> filter) where T : class { Task<ConsumeContext<T>> result = null; Host.ConnectReceiveEndpoint(NewId.NextGuid().ToString(), context => { result = Handled<T>(context, filter); }); return result; } interface IBusCreationScope { Task TestFixtureSetup(); Task TestSetup(); Task TestTeardown(); Task TestFixtureTeardown(); } class PerTestFixtureBusCreationScope : IBusCreationScope { readonly Func<Task> _setupBus; readonly Func<Task> _teardownBus; public PerTestFixtureBusCreationScope(Func<Task> setupBus, Func<Task> teardownBus) { _setupBus = setupBus; _teardownBus = teardownBus; } public Task TestFixtureSetup() { return _setupBus(); } public Task TestSetup() { return TaskUtil.Completed; } public Task TestTeardown() { return TaskUtil.Completed; } public Task TestFixtureTeardown() { return _teardownBus(); } } class PerTestBusCreationScope : IBusCreationScope { readonly Func<Task> _setupBus; readonly Func<Task> _teardownBus; public PerTestBusCreationScope(Func<Task> setupBus, Func<Task> teardownBus) { _setupBus = setupBus; _teardownBus = teardownBus; } public Task TestFixtureSetup() { return TaskUtil.Completed; } public Task TestSetup() { return _setupBus(); } public Task TestTeardown() { return _teardownBus(); } public Task TestFixtureTeardown() { return TaskUtil.Completed; } } } }
// SF API version v50.0 // Custom fields included: False // Relationship objects included: True using System; using NetCoreForce.Client.Models; using NetCoreForce.Client.Attributes; using Newtonsoft.Json; namespace NetCoreForce.Models { ///<summary> /// Matching Rule ///<para>SObject Name: MatchingRule</para> ///<para>Custom Object: False</para> ///</summary> public class SfMatchingRule : SObject { [JsonIgnore] public static string SObjectTypeName { get { return "MatchingRule"; } } ///<summary> /// Matching Rule ID /// <para>Name: Id</para> /// <para>SF Type: id</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "id")] [Updateable(false), Createable(false)] public string Id { get; set; } ///<summary> /// Deleted /// <para>Name: IsDeleted</para> /// <para>SF Type: boolean</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "isDeleted")] [Updateable(false), Createable(false)] public bool? IsDeleted { get; set; } ///<summary> /// Custom Object Definition ID /// <para>Name: SobjectType</para> /// <para>SF Type: picklist</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "sobjectType")] [Updateable(false), Createable(false)] public string SobjectType { get; set; } ///<summary> /// Unique Name /// <para>Name: DeveloperName</para> /// <para>SF Type: string</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "developerName")] [Updateable(false), Createable(false)] public string DeveloperName { get; set; } ///<summary> /// Master Language /// <para>Name: Language</para> /// <para>SF Type: picklist</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "language")] [Updateable(false), Createable(false)] public string Language { get; set; } ///<summary> /// Rule Name /// <para>Name: MasterLabel</para> /// <para>SF Type: string</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "masterLabel")] [Updateable(false), Createable(false)] public string MasterLabel { get; set; } ///<summary> /// Namespace Prefix /// <para>Name: NamespacePrefix</para> /// <para>SF Type: string</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "namespacePrefix")] [Updateable(false), Createable(false)] public string NamespacePrefix { get; set; } ///<summary> /// Created Date /// <para>Name: CreatedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "createdDate")] [Updateable(false), Createable(false)] public DateTimeOffset? CreatedDate { get; set; } ///<summary> /// Created By ID /// <para>Name: CreatedById</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "createdById")] [Updateable(false), Createable(false)] public string CreatedById { get; set; } ///<summary> /// ReferenceTo: User /// <para>RelationshipName: CreatedBy</para> ///</summary> [JsonProperty(PropertyName = "createdBy")] [Updateable(false), Createable(false)] public SfUser CreatedBy { get; set; } ///<summary> /// Last Modified Date /// <para>Name: LastModifiedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedDate")] [Updateable(false), Createable(false)] public DateTimeOffset? LastModifiedDate { get; set; } ///<summary> /// Last Modified By ID /// <para>Name: LastModifiedById</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedById")] [Updateable(false), Createable(false)] public string LastModifiedById { get; set; } ///<summary> /// ReferenceTo: User /// <para>RelationshipName: LastModifiedBy</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedBy")] [Updateable(false), Createable(false)] public SfUser LastModifiedBy { get; set; } ///<summary> /// System Modstamp /// <para>Name: SystemModstamp</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "systemModstamp")] [Updateable(false), Createable(false)] public DateTimeOffset? SystemModstamp { get; set; } ///<summary> /// Custom Object Definition ID /// <para>Name: MatchEngine</para> /// <para>SF Type: picklist</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "matchEngine")] [Updateable(false), Createable(false)] public string MatchEngine { get; set; } ///<summary> /// Advanced Logic /// <para>Name: BooleanFilter</para> /// <para>SF Type: string</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "booleanFilter")] [Updateable(false), Createable(false)] public string BooleanFilter { get; set; } ///<summary> /// Description /// <para>Name: Description</para> /// <para>SF Type: textarea</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "description")] [Updateable(false), Createable(false)] public string Description { get; set; } ///<summary> /// Status /// <para>Name: RuleStatus</para> /// <para>SF Type: picklist</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "ruleStatus")] [Updateable(false), Createable(false)] public string RuleStatus { get; set; } ///<summary> /// Object Subtype /// <para>Name: SobjectSubtype</para> /// <para>SF Type: picklist</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "sobjectSubtype")] [Updateable(false), Createable(false)] public string SobjectSubtype { get; set; } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Linq; using NuGet.Versioning; namespace TIKSN.Versioning { public sealed class Version : IComparable<Version>, IEquatable<Version> { private const Milestone DefaultMilestone = Milestone.Release; private const int DefaultPrereleaseNumber = -1; private int prereleaseNumber; public Version(int ReleaseMajor, int ReleaseMinor) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor); this.Milestone = DefaultMilestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = null; } public Version(int ReleaseMajor, int ReleaseMinor, DateTimeOffset ReleaseDate) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor); this.Milestone = DefaultMilestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = ReleaseDate; } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild); this.Milestone = DefaultMilestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = null; } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, DateTimeOffset ReleaseDate) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild); this.Milestone = DefaultMilestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = ReleaseDate; } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, int ReleaseRevision) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild, ReleaseRevision); this.Milestone = DefaultMilestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = null; } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, int ReleaseRevision, DateTimeOffset ReleaseDate) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild, ReleaseRevision); this.Milestone = DefaultMilestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = ReleaseDate; } public Version(System.Version Release) { this.Release = Release; this.Milestone = DefaultMilestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = null; } public Version(System.Version Release, DateTimeOffset ReleaseDate) { this.Release = Release; this.Milestone = DefaultMilestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = ReleaseDate; } public Version(int ReleaseMajor, int ReleaseMinor, Milestone Milestone) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor); this.Milestone = Milestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = null; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, Milestone Milestone, DateTimeOffset ReleaseDate) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor); this.Milestone = Milestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = ReleaseDate; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, Milestone Milestone) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild); this.Milestone = Milestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = null; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, Milestone Milestone, DateTimeOffset ReleaseDate) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild); this.Milestone = Milestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = ReleaseDate; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, int ReleaseRevision, Milestone Milestone) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild, ReleaseRevision); this.Milestone = Milestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = null; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, int ReleaseRevision, Milestone Milestone, DateTimeOffset ReleaseDate) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild, ReleaseRevision); this.Milestone = Milestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = ReleaseDate; this.ValidateMilestoneAndPrerelease(); } public Version(System.Version Release, Milestone Milestone) { this.Release = Release; this.Milestone = Milestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = null; this.ValidateMilestoneAndPrerelease(); } public Version(System.Version Release, Milestone Milestone, DateTimeOffset ReleaseDate) { this.Release = Release; this.Milestone = Milestone; this.prereleaseNumber = DefaultPrereleaseNumber; this.ReleaseDate = ReleaseDate; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, Milestone Milestone, int PrereleaseNumber) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor); this.Milestone = Milestone; this.PrereleaseNumber = PrereleaseNumber; this.ReleaseDate = null; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, Milestone Milestone, int PrereleaseNumber, DateTimeOffset ReleaseDate) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor); this.Milestone = Milestone; this.PrereleaseNumber = PrereleaseNumber; this.ReleaseDate = ReleaseDate; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, Milestone Milestone, int PrereleaseNumber) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild); this.Milestone = Milestone; this.PrereleaseNumber = PrereleaseNumber; this.ReleaseDate = null; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, Milestone Milestone, int PrereleaseNumber, DateTimeOffset ReleaseDate) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild); this.Milestone = Milestone; this.PrereleaseNumber = PrereleaseNumber; this.ReleaseDate = ReleaseDate; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, int ReleaseRevision, Milestone Milestone, int PrereleaseNumber) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild, ReleaseRevision); this.Milestone = Milestone; this.PrereleaseNumber = PrereleaseNumber; this.ReleaseDate = null; this.ValidateMilestoneAndPrerelease(); } public Version(int ReleaseMajor, int ReleaseMinor, int ReleaseBuild, int ReleaseRevision, Milestone Milestone, int PrereleaseNumber, DateTimeOffset ReleaseDate) { this.Release = new System.Version(ReleaseMajor, ReleaseMinor, ReleaseBuild, ReleaseRevision); this.Milestone = Milestone; this.PrereleaseNumber = PrereleaseNumber; this.ReleaseDate = ReleaseDate; this.ValidateMilestoneAndPrerelease(); } public Version(System.Version Release, Milestone Milestone, int PrereleaseNumber) { this.Release = Release; this.Milestone = Milestone; this.PrereleaseNumber = PrereleaseNumber; this.ReleaseDate = null; this.ValidateMilestoneAndPrerelease(); } public Version(System.Version Release, Milestone Milestone, int PrereleaseNumber, DateTimeOffset ReleaseDate) { this.Release = Release; this.Milestone = Milestone; this.PrereleaseNumber = PrereleaseNumber; this.ReleaseDate = ReleaseDate; this.ValidateMilestoneAndPrerelease(); } public Milestone Milestone { get; } public int PrereleaseNumber { get => this.prereleaseNumber; private set { if (value < -1) { throw new ArgumentOutOfRangeException(nameof(this.PrereleaseNumber)); } this.prereleaseNumber = value; } } public System.Version Release { get; } public DateTimeOffset? ReleaseDate { get; } public Stability Stability { get { if (this.Milestone == Milestone.Release) { return Stability.Stable; } return Stability.Unstable; } } public int CompareTo(Version that) { if (ReferenceEquals(this, that)) { return 0; } var ReleaseComparison = this.Release.CompareTo(that.Release); if (ReleaseComparison == 0) { _ = this.Milestone - that.Milestone; if (this.Milestone > that.Milestone) { return 1; } if (this.Milestone < that.Milestone) { return -1; } Debug.Assert(this.Milestone == that.Milestone); if (this.prereleaseNumber > that.prereleaseNumber) { return 1; } if (this.prereleaseNumber < that.prereleaseNumber) { return -1; } Debug.Assert(this.prereleaseNumber == that.prereleaseNumber); return 0; } return ReleaseComparison; } public bool Equals(Version that) { if (that is null) { return false; } return this.CompareTo(that) == 0; } public static explicit operator NuGetVersion(Version version) { var releaseLabels = GetReleaseLabels(version); var metadata = GetMetadata(version); if (version.Release.Revision >= 0) { return new NuGetVersion(version.Release.Major, version.Release.Minor, version.Release.Build, version.Release.Revision, releaseLabels, metadata); } if (version.Release.Build >= 0) { return new NuGetVersion(version.Release.Major, version.Release.Minor, version.Release.Build, releaseLabels, metadata); } return new NuGetVersion(version.Release.Major, version.Release.Minor, 0, releaseLabels, metadata); } public static explicit operator SemanticVersion(Version version) { var releaseLabels = GetReleaseLabels(version); var metadata = GetMetadata(version); if (version.Release.Revision != -1) { throw new FormatException("There is no revision in semantic version."); } if (version.Release.Build >= 0) { return new SemanticVersion(version.Release.Major, version.Release.Minor, version.Release.Build, releaseLabels, metadata); } return new SemanticVersion(version.Release.Major, version.Release.Minor, 0, releaseLabels, metadata); } public static explicit operator Version(NuGetVersion nuGetVersion) { var (milestone, prereleaseNumber) = GetMilestoneAndPrereleaseNumber(nuGetVersion.IsPrerelease, nuGetVersion.ReleaseLabels.ToArray()); if (nuGetVersion.HasMetadata) { return new Version(nuGetVersion.Version, milestone, prereleaseNumber, GetReleaseDate(nuGetVersion.Metadata)); } return new Version(nuGetVersion.Version, milestone, prereleaseNumber); } public static explicit operator Version(SemanticVersion semanticVersion) { var (milestone, prereleaseNumber) = GetMilestoneAndPrereleaseNumber(semanticVersion.IsPrerelease, semanticVersion.ReleaseLabels.ToArray()); if (semanticVersion.HasMetadata) { return new Version(semanticVersion.Major, semanticVersion.Minor, semanticVersion.Patch, milestone, prereleaseNumber, GetReleaseDate(semanticVersion.Metadata)); } return new Version(semanticVersion.Major, semanticVersion.Minor, semanticVersion.Patch, milestone, prereleaseNumber); } public static bool operator !=(Version v1, Version v2) => v1.CompareTo(v2) != 0; public static bool operator <(Version v1, Version v2) => v1.CompareTo(v2) < 0; public static bool operator <=(Version v1, Version v2) => v1.CompareTo(v2) <= 0; public static bool operator ==(Version v1, Version v2) => v1.CompareTo(v2) == 0; public static bool operator >(Version v1, Version v2) => v1.CompareTo(v2) > 0; public static bool operator >=(Version v1, Version v2) => v1.CompareTo(v2) >= 0; public string ToLongReleaseString() => this.Release.ToString(); public string ToShortReleaseString() { if (this.Release.Revision > 0) { return this.Release.ToString(4); } if (this.Release.Build > 0) { return this.Release.ToString(3); } if (this.Release.Minor > 0) { return this.Release.ToString(2); } return this.Release.ToString(1); } public override string ToString() { if (this.prereleaseNumber == DefaultPrereleaseNumber && this.Milestone == DefaultMilestone) { return this.ToShortReleaseString(); } return $"{this.ToShortReleaseString()}-{string.Join(".", GetReleaseLabels(this))}"; } private static string GetMetadata(Version version) => version.ReleaseDate?.ToString("s"); private static (Milestone milestone, int prereleaseNumber) GetMilestoneAndPrereleaseNumber(bool isPrerelease, string[] releaseLabels) { if (!isPrerelease || releaseLabels.Length == 0) { return (Milestone.Release, DefaultPrereleaseNumber); } if (releaseLabels.Length > 2) { throw new FormatException("Release labels passed are more than 2."); } var prereleaseNumber = releaseLabels.Length == 2 ? int.Parse(releaseLabels[1], CultureInfo.InvariantCulture) : DefaultPrereleaseNumber; var milestoneTag = releaseLabels.ElementAt(0); return milestoneTag.ToLowerInvariant() switch { "alpha" => (Milestone.Alpha, prereleaseNumber), "beta" => (Milestone.Beta, prereleaseNumber), "rc" => (Milestone.ReleaseCandidate, prereleaseNumber), _ => throw new FormatException($"Unknown milestone tag '{milestoneTag}'."), }; } private static DateTimeOffset GetReleaseDate(string metadata) => DateTimeOffset.Parse(metadata); private static IEnumerable<string> GetReleaseLabels(Version version) { if (version.Stability == Stability.Stable) { return Array.Empty<string>(); } var milestoneTag = version.Milestone switch { Milestone.Alpha => "alpha", Milestone.Beta => "beta", Milestone.ReleaseCandidate => "rc", Milestone.Release => null, _ => throw new NotSupportedException("Unsupported milestone name."), }; if (version.prereleaseNumber == DefaultPrereleaseNumber) { return new[] { milestoneTag }; } return new[] { milestoneTag, version.prereleaseNumber.ToString() }; } private void ValidateMilestoneAndPrerelease() { if (this.Stability == Stability.Stable && this.prereleaseNumber != DefaultPrereleaseNumber) { throw new FormatException("Stable version cannot have pre-release number."); } var values = Enum.GetValues(typeof(Milestone)); for (var i = 0; i < values.Length; i++) { var value = (Milestone)values.GetValue(i); if (value == this.Milestone) { return; } } throw new NotSupportedException($"Milestone '{this.Milestone}' value is not supported."); } public override bool Equals(object obj) { if (ReferenceEquals(this, obj)) { return true; } if (obj is null) { return false; } throw new NotImplementedException(); } public override int GetHashCode() => throw new NotImplementedException(); public static NuGetVersion ToNuGetVersion(Version left, Version right) => throw new NotImplementedException(); public static SemanticVersion ToSemanticVersion(Version left, Version right) => throw new NotImplementedException(); public static Version ToVersion(Version left, Version right) => throw new NotImplementedException(); } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace ProCultura.Web.Api.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { private const int DefaultCollectionSize = 3; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Search { using System; using System.Linq; using System.Collections.Generic; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Microsoft.Rest.Azure; using Models; /// <summary> /// AdminKeysOperations operations. /// </summary> internal partial class AdminKeysOperations : IServiceOperations<SearchManagementClient>, IAdminKeysOperations { /// <summary> /// Initializes a new instance of the AdminKeysOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal AdminKeysOperations(SearchManagementClient client) { if (client == null) { throw new ArgumentNullException("client"); } this.Client = client; } /// <summary> /// Gets a reference to the SearchManagementClient /// </summary> public SearchManagementClient Client { get; private set; } /// <summary> /// Returns the primary and secondary API keys for the given Azure Search /// service. /// <see href="https://msdn.microsoft.com/library/azure/dn832685.aspx" /> /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group within the current subscription. /// </param> /// <param name='serviceName'> /// The name of the Search service for which to list admin keys. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<AdminKeyResult>> ListWithHttpMessagesAsync(string resourceGroupName, string serviceName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (serviceName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "serviceName"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("serviceName", serviceName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Search/searchServices/{serviceName}/listAdminKeys").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{serviceName}", Uri.EscapeDataString(serviceName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("POST"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<AdminKeyResult>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<AdminKeyResult>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Concurrent; using System.Collections.Generic; using System.Collections.Immutable; using System.Threading; using Microsoft.CodeAnalysis.CodeStyle; using Microsoft.CodeAnalysis.Diagnostics; namespace Microsoft.CodeAnalysis.UseAutoProperty { internal static class Constants { public const string SymbolEquivalenceKey = nameof(SymbolEquivalenceKey); } internal abstract class AbstractUseAutoPropertyAnalyzer<TPropertyDeclaration, TFieldDeclaration, TVariableDeclarator, TExpression> : AbstractCodeStyleDiagnosticAnalyzer where TPropertyDeclaration : SyntaxNode where TFieldDeclaration : SyntaxNode where TVariableDeclarator : SyntaxNode where TExpression : SyntaxNode { private static readonly LocalizableString s_title = new LocalizableResourceString(nameof(FeaturesResources.Use_auto_property), FeaturesResources.ResourceManager, typeof(FeaturesResources)); protected AbstractUseAutoPropertyAnalyzer() : base(IDEDiagnosticIds.UseAutoPropertyDiagnosticId, s_title, s_title) { } protected abstract void RegisterIneligibleFieldsAction(CompilationStartAnalysisContext context, ConcurrentBag<IFieldSymbol> ineligibleFields); protected abstract bool SupportsReadOnlyProperties(Compilation compilation); protected abstract bool SupportsPropertyInitializer(Compilation compilation); protected abstract TExpression GetFieldInitializer(TVariableDeclarator variable, CancellationToken cancellationToken); protected abstract TExpression GetGetterExpression(IMethodSymbol getMethod, CancellationToken cancellationToken); protected abstract TExpression GetSetterExpression(IMethodSymbol setMethod, SemanticModel semanticModel, CancellationToken cancellationToken); protected abstract SyntaxNode GetNodeToFade(TFieldDeclaration fieldDeclaration, TVariableDeclarator variableDeclarator); protected sealed override void InitializeWorker(AnalysisContext context) => context.RegisterCompilationStartAction(csac => { var analysisResults = new ConcurrentBag<AnalysisResult>(); var ineligibleFields = new ConcurrentBag<IFieldSymbol>(); csac.RegisterSymbolAction(sac => AnalyzeProperty(analysisResults, sac), SymbolKind.Property); RegisterIneligibleFieldsAction(csac, ineligibleFields); csac.RegisterCompilationEndAction(cac => Process(analysisResults, ineligibleFields, cac)); }); private void AnalyzeProperty(ConcurrentBag<AnalysisResult> analysisResults, SymbolAnalysisContext symbolContext) { var property = (IPropertySymbol)symbolContext.Symbol; if (property.IsIndexer) { return; } // The property can't be virtual. We don't know if it is overridden somewhere. If it // is, then calls to it may not actually assign to the field. if (property.IsVirtual || property.IsOverride || property.IsSealed) { return; } if (property.IsWithEvents) { return; } if (property.Parameters.Length > 0) { return; } // Need at least a getter. if (property.GetMethod == null) { return; } var containingType = property.ContainingType; if (containingType == null) { return; } var declarations = property.DeclaringSyntaxReferences; if (declarations.Length != 1) { return; } var cancellationToken = symbolContext.CancellationToken; var propertyDeclaration = property.DeclaringSyntaxReferences[0].GetSyntax(cancellationToken).FirstAncestorOrSelf<TPropertyDeclaration>(); if (propertyDeclaration == null) { return; } var semanticModel = symbolContext.Compilation.GetSemanticModel(propertyDeclaration.SyntaxTree); var getterField = GetGetterField(semanticModel, property.GetMethod, cancellationToken); if (getterField == null) { return; } // If the user made the field readonly, we only want to convert it to a property if we // can keep it readonly. if (getterField.IsReadOnly && !SupportsReadOnlyProperties(symbolContext.Compilation)) { return; } if (!containingType.Equals(getterField.ContainingType)) { // Field and property have to be in the same type. return; } // Property and field have to agree on type. if (!property.Type.Equals(getterField.Type)) { return; } // Don't want to remove constants. if (getterField.IsConst) { return; } if (getterField.DeclaringSyntaxReferences.Length != 1) { return; } // Field and property should match in static-ness if (getterField.IsStatic != property.IsStatic) { return; } // A setter is optional though. var setMethod = property.SetMethod; if (setMethod != null) { var setterField = GetSetterField(semanticModel, containingType, setMethod, cancellationToken); if (setterField != getterField) { // If there is a getter and a setter, they both need to agree on which field they are // writing to. return; } } var fieldReference = getterField.DeclaringSyntaxReferences[0]; var variableDeclarator = fieldReference.GetSyntax(symbolContext.CancellationToken) as TVariableDeclarator; if (variableDeclarator == null) { return; } var initializer = GetFieldInitializer(variableDeclarator, cancellationToken); if (initializer != null && !SupportsPropertyInitializer(symbolContext.Compilation)) { return; } var fieldDeclaration = variableDeclarator?.Parent?.Parent as TFieldDeclaration; if (fieldDeclaration == null) { return; } // Can't remove the field if it has attributes on it. if (getterField.GetAttributes().Length > 0) { return; } // Looks like a viable property/field to convert into an auto property. analysisResults.Add(new AnalysisResult(property, getterField, propertyDeclaration, fieldDeclaration, variableDeclarator, property.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat))); } private IFieldSymbol GetSetterField( SemanticModel semanticModel, ISymbol containingType, IMethodSymbol setMethod, CancellationToken cancellationToken) { return CheckFieldAccessExpression(semanticModel, GetSetterExpression(setMethod, semanticModel, cancellationToken)); } private IFieldSymbol GetGetterField(SemanticModel semanticModel, IMethodSymbol getMethod, CancellationToken cancellationToken) { return CheckFieldAccessExpression(semanticModel, GetGetterExpression(getMethod, cancellationToken)); } private IFieldSymbol CheckFieldAccessExpression(SemanticModel semanticModel, TExpression expression) { if (expression == null) { return null; } var symbolInfo = semanticModel.GetSymbolInfo(expression); if (symbolInfo.Symbol == null || symbolInfo.Symbol.Kind != SymbolKind.Field) { return null; } var field = (IFieldSymbol)symbolInfo.Symbol; if (field.DeclaringSyntaxReferences.Length > 1) { return null; } return field; } private void Process( ConcurrentBag<AnalysisResult> analysisResults, ConcurrentBag<IFieldSymbol> ineligibleFields, CompilationAnalysisContext compilationContext) { var ineligibleFieldsSet = new HashSet<IFieldSymbol>(ineligibleFields); foreach (var result in analysisResults) { var field = result.Field; if (ineligibleFieldsSet.Contains(field)) { continue; } Process(result, compilationContext); } } private void Process(AnalysisResult result, CompilationAnalysisContext compilationContext) { // Check if there are additional reasons we think this field might be ineligible for // replacing with an auto prop. if (!IsEligibleHeuristic(result.Field, result.PropertyDeclaration, compilationContext.Compilation, compilationContext.CancellationToken)) { return; } var propertyDeclaration = result.PropertyDeclaration; var variableDeclarator = result.VariableDeclarator; var nodeToFade = GetNodeToFade(result.FieldDeclaration, variableDeclarator); var properties = ImmutableDictionary<string, string>.Empty.Add( Constants.SymbolEquivalenceKey, result.SymbolEquivalenceKey); // Fade out the field/variable we are going to remove. var diagnostic1 = Diagnostic.Create(UnnecessaryWithoutSuggestionDescriptor, nodeToFade.GetLocation()); compilationContext.ReportDiagnostic(diagnostic1); // Now add diagnostics to both the field and the property saying we can convert it to // an auto property. For each diagnostic store both location so we can easily retrieve // them when performing the code fix. IEnumerable<Location> additionalLocations = new Location[] { propertyDeclaration.GetLocation(), variableDeclarator.GetLocation() }; var diagnostic2 = Diagnostic.Create(HiddenDescriptor, propertyDeclaration.GetLocation(), additionalLocations, properties); compilationContext.ReportDiagnostic(diagnostic2); var diagnostic3 = Diagnostic.Create(HiddenDescriptor, nodeToFade.GetLocation(), additionalLocations, properties); compilationContext.ReportDiagnostic(diagnostic3); } protected virtual bool IsEligibleHeuristic(IFieldSymbol field, TPropertyDeclaration propertyDeclaration, Compilation compilation, CancellationToken cancellationToken) { return true; } internal class AnalysisResult { public readonly IPropertySymbol Property; public readonly IFieldSymbol Field; public readonly TPropertyDeclaration PropertyDeclaration; public readonly TFieldDeclaration FieldDeclaration; public readonly TVariableDeclarator VariableDeclarator; public readonly string SymbolEquivalenceKey; public AnalysisResult( IPropertySymbol property, IFieldSymbol field, TPropertyDeclaration propertyDeclaration, TFieldDeclaration fieldDeclaration, TVariableDeclarator variableDeclarator, string symbolEquivalenceKey) { Property = property; Field = field; PropertyDeclaration = propertyDeclaration; FieldDeclaration = fieldDeclaration; VariableDeclarator = variableDeclarator; SymbolEquivalenceKey = symbolEquivalenceKey; } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.ServerManagement { using Microsoft.Rest.Azure; using Models; /// <summary> /// GatewayOperations operations. /// </summary> public partial interface IGatewayOperations { /// <summary> /// Creates or updates a ManagementService gateway. /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='location'> /// location of the resource /// </param> /// <param name='tags'> /// resource tags /// </param> /// <param name='upgradeMode'> /// The upgradeMode property gives the flexibility to gateway to auto /// upgrade itself. If properties value not specified, then we assume /// upgradeMode = Automatic. Possible values include: 'Manual', /// 'Automatic' /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<GatewayResource>> CreateWithHttpMessagesAsync(string resourceGroupName, string gatewayName, string location = default(string), object tags = default(object), UpgradeMode? upgradeMode = default(UpgradeMode?), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Creates or updates a ManagementService gateway. /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='location'> /// location of the resource /// </param> /// <param name='tags'> /// resource tags /// </param> /// <param name='upgradeMode'> /// The upgradeMode property gives the flexibility to gateway to auto /// upgrade itself. If properties value not specified, then we assume /// upgradeMode = Automatic. Possible values include: 'Manual', /// 'Automatic' /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<GatewayResource>> BeginCreateWithHttpMessagesAsync(string resourceGroupName, string gatewayName, string location = default(string), object tags = default(object), UpgradeMode? upgradeMode = default(UpgradeMode?), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Updates a gateway belonging to a resource group. /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='location'> /// location of the resource /// </param> /// <param name='tags'> /// resource tags /// </param> /// <param name='upgradeMode'> /// The upgradeMode property gives the flexibility to gateway to auto /// upgrade itself. If properties value not specified, then we assume /// upgradeMode = Automatic. Possible values include: 'Manual', /// 'Automatic' /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<GatewayResource>> UpdateWithHttpMessagesAsync(string resourceGroupName, string gatewayName, string location = default(string), object tags = default(object), UpgradeMode? upgradeMode = default(UpgradeMode?), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Updates a gateway belonging to a resource group. /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='location'> /// location of the resource /// </param> /// <param name='tags'> /// resource tags /// </param> /// <param name='upgradeMode'> /// The upgradeMode property gives the flexibility to gateway to auto /// upgrade itself. If properties value not specified, then we assume /// upgradeMode = Automatic. Possible values include: 'Manual', /// 'Automatic' /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<GatewayResource>> BeginUpdateWithHttpMessagesAsync(string resourceGroupName, string gatewayName, string location = default(string), object tags = default(object), UpgradeMode? upgradeMode = default(UpgradeMode?), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Deletes a gateway from a resource group. /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string gatewayName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Returns a gateway /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum) /// </param> /// <param name='expand'> /// Gets subscription credentials which uniquely identify Microsoft /// Azure subscription. The subscription ID forms part of the URI for /// every service call. Possible values include: 'status', 'download' /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<GatewayResource>> GetWithHttpMessagesAsync(string resourceGroupName, string gatewayName, GatewayExpandOption? expand = default(GatewayExpandOption?), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Returns gateways in a subscription /// </summary> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<GatewayResource>>> ListWithHttpMessagesAsync(System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Returns gateways in a resource group /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<GatewayResource>>> ListForResourceGroupWithHttpMessagesAsync(string resourceGroupName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Upgrade a gateway /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> UpgradeWithHttpMessagesAsync(string resourceGroupName, string gatewayName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Upgrade a gateway /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> BeginUpgradeWithHttpMessagesAsync(string resourceGroupName, string gatewayName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Regenerate a gateway's profile /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> RegenerateProfileWithHttpMessagesAsync(string resourceGroupName, string gatewayName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Regenerate a gateway's profile /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> BeginRegenerateProfileWithHttpMessagesAsync(string resourceGroupName, string gatewayName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Gets a gateway profile /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<GatewayProfile>> GetProfileWithHttpMessagesAsync(string resourceGroupName, string gatewayName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Gets a gateway profile /// </summary> /// <param name='resourceGroupName'> /// The resource group name uniquely identifies the resource group /// within the user subscriptionId. /// </param> /// <param name='gatewayName'> /// The gateway name (256 characters maximum). /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<GatewayProfile>> BeginGetProfileWithHttpMessagesAsync(string resourceGroupName, string gatewayName, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Returns gateways in a subscription /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<GatewayResource>>> ListNextWithHttpMessagesAsync(string nextPageLink, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); /// <summary> /// Returns gateways in a resource group /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<Microsoft.Rest.Azure.IPage<GatewayResource>>> ListForResourceGroupNextWithHttpMessagesAsync(string nextPageLink, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)); } }
using System; using Org.BouncyCastle.Crypto.Parameters; using Org.BouncyCastle.Crypto.Utilities; namespace Org.BouncyCastle.Crypto.Engines { /** * A class that provides Blowfish key encryption operations, * such as encoding data and generating keys. * All the algorithms herein are from Applied Cryptography * and implement a simplified cryptography interface. */ public sealed class BlowfishEngine : IBlockCipher { private readonly static uint[] KP = { 0x243F6A88, 0x85A308D3, 0x13198A2E, 0x03707344, 0xA4093822, 0x299F31D0, 0x082EFA98, 0xEC4E6C89, 0x452821E6, 0x38D01377, 0xBE5466CF, 0x34E90C6C, 0xC0AC29B7, 0xC97C50DD, 0x3F84D5B5, 0xB5470917, 0x9216D5D9, 0x8979FB1B }, KS0 = { 0xD1310BA6, 0x98DFB5AC, 0x2FFD72DB, 0xD01ADFB7, 0xB8E1AFED, 0x6A267E96, 0xBA7C9045, 0xF12C7F99, 0x24A19947, 0xB3916CF7, 0x0801F2E2, 0x858EFC16, 0x636920D8, 0x71574E69, 0xA458FEA3, 0xF4933D7E, 0x0D95748F, 0x728EB658, 0x718BCD58, 0x82154AEE, 0x7B54A41D, 0xC25A59B5, 0x9C30D539, 0x2AF26013, 0xC5D1B023, 0x286085F0, 0xCA417918, 0xB8DB38EF, 0x8E79DCB0, 0x603A180E, 0x6C9E0E8B, 0xB01E8A3E, 0xD71577C1, 0xBD314B27, 0x78AF2FDA, 0x55605C60, 0xE65525F3, 0xAA55AB94, 0x57489862, 0x63E81440, 0x55CA396A, 0x2AAB10B6, 0xB4CC5C34, 0x1141E8CE, 0xA15486AF, 0x7C72E993, 0xB3EE1411, 0x636FBC2A, 0x2BA9C55D, 0x741831F6, 0xCE5C3E16, 0x9B87931E, 0xAFD6BA33, 0x6C24CF5C, 0x7A325381, 0x28958677, 0x3B8F4898, 0x6B4BB9AF, 0xC4BFE81B, 0x66282193, 0x61D809CC, 0xFB21A991, 0x487CAC60, 0x5DEC8032, 0xEF845D5D, 0xE98575B1, 0xDC262302, 0xEB651B88, 0x23893E81, 0xD396ACC5, 0x0F6D6FF3, 0x83F44239, 0x2E0B4482, 0xA4842004, 0x69C8F04A, 0x9E1F9B5E, 0x21C66842, 0xF6E96C9A, 0x670C9C61, 0xABD388F0, 0x6A51A0D2, 0xD8542F68, 0x960FA728, 0xAB5133A3, 0x6EEF0B6C, 0x137A3BE4, 0xBA3BF050, 0x7EFB2A98, 0xA1F1651D, 0x39AF0176, 0x66CA593E, 0x82430E88, 0x8CEE8619, 0x456F9FB4, 0x7D84A5C3, 0x3B8B5EBE, 0xE06F75D8, 0x85C12073, 0x401A449F, 0x56C16AA6, 0x4ED3AA62, 0x363F7706, 0x1BFEDF72, 0x429B023D, 0x37D0D724, 0xD00A1248, 0xDB0FEAD3, 0x49F1C09B, 0x075372C9, 0x80991B7B, 0x25D479D8, 0xF6E8DEF7, 0xE3FE501A, 0xB6794C3B, 0x976CE0BD, 0x04C006BA, 0xC1A94FB6, 0x409F60C4, 0x5E5C9EC2, 0x196A2463, 0x68FB6FAF, 0x3E6C53B5, 0x1339B2EB, 0x3B52EC6F, 0x6DFC511F, 0x9B30952C, 0xCC814544, 0xAF5EBD09, 0xBEE3D004, 0xDE334AFD, 0x660F2807, 0x192E4BB3, 0xC0CBA857, 0x45C8740F, 0xD20B5F39, 0xB9D3FBDB, 0x5579C0BD, 0x1A60320A, 0xD6A100C6, 0x402C7279, 0x679F25FE, 0xFB1FA3CC, 0x8EA5E9F8, 0xDB3222F8, 0x3C7516DF, 0xFD616B15, 0x2F501EC8, 0xAD0552AB, 0x323DB5FA, 0xFD238760, 0x53317B48, 0x3E00DF82, 0x9E5C57BB, 0xCA6F8CA0, 0x1A87562E, 0xDF1769DB, 0xD542A8F6, 0x287EFFC3, 0xAC6732C6, 0x8C4F5573, 0x695B27B0, 0xBBCA58C8, 0xE1FFA35D, 0xB8F011A0, 0x10FA3D98, 0xFD2183B8, 0x4AFCB56C, 0x2DD1D35B, 0x9A53E479, 0xB6F84565, 0xD28E49BC, 0x4BFB9790, 0xE1DDF2DA, 0xA4CB7E33, 0x62FB1341, 0xCEE4C6E8, 0xEF20CADA, 0x36774C01, 0xD07E9EFE, 0x2BF11FB4, 0x95DBDA4D, 0xAE909198, 0xEAAD8E71, 0x6B93D5A0, 0xD08ED1D0, 0xAFC725E0, 0x8E3C5B2F, 0x8E7594B7, 0x8FF6E2FB, 0xF2122B64, 0x8888B812, 0x900DF01C, 0x4FAD5EA0, 0x688FC31C, 0xD1CFF191, 0xB3A8C1AD, 0x2F2F2218, 0xBE0E1777, 0xEA752DFE, 0x8B021FA1, 0xE5A0CC0F, 0xB56F74E8, 0x18ACF3D6, 0xCE89E299, 0xB4A84FE0, 0xFD13E0B7, 0x7CC43B81, 0xD2ADA8D9, 0x165FA266, 0x80957705, 0x93CC7314, 0x211A1477, 0xE6AD2065, 0x77B5FA86, 0xC75442F5, 0xFB9D35CF, 0xEBCDAF0C, 0x7B3E89A0, 0xD6411BD3, 0xAE1E7E49, 0x00250E2D, 0x2071B35E, 0x226800BB, 0x57B8E0AF, 0x2464369B, 0xF009B91E, 0x5563911D, 0x59DFA6AA, 0x78C14389, 0xD95A537F, 0x207D5BA2, 0x02E5B9C5, 0x83260376, 0x6295CFA9, 0x11C81968, 0x4E734A41, 0xB3472DCA, 0x7B14A94A, 0x1B510052, 0x9A532915, 0xD60F573F, 0xBC9BC6E4, 0x2B60A476, 0x81E67400, 0x08BA6FB5, 0x571BE91F, 0xF296EC6B, 0x2A0DD915, 0xB6636521, 0xE7B9F9B6, 0xFF34052E, 0xC5855664, 0x53B02D5D, 0xA99F8FA1, 0x08BA4799, 0x6E85076A }, KS1 = { 0x4B7A70E9, 0xB5B32944, 0xDB75092E, 0xC4192623, 0xAD6EA6B0, 0x49A7DF7D, 0x9CEE60B8, 0x8FEDB266, 0xECAA8C71, 0x699A17FF, 0x5664526C, 0xC2B19EE1, 0x193602A5, 0x75094C29, 0xA0591340, 0xE4183A3E, 0x3F54989A, 0x5B429D65, 0x6B8FE4D6, 0x99F73FD6, 0xA1D29C07, 0xEFE830F5, 0x4D2D38E6, 0xF0255DC1, 0x4CDD2086, 0x8470EB26, 0x6382E9C6, 0x021ECC5E, 0x09686B3F, 0x3EBAEFC9, 0x3C971814, 0x6B6A70A1, 0x687F3584, 0x52A0E286, 0xB79C5305, 0xAA500737, 0x3E07841C, 0x7FDEAE5C, 0x8E7D44EC, 0x5716F2B8, 0xB03ADA37, 0xF0500C0D, 0xF01C1F04, 0x0200B3FF, 0xAE0CF51A, 0x3CB574B2, 0x25837A58, 0xDC0921BD, 0xD19113F9, 0x7CA92FF6, 0x94324773, 0x22F54701, 0x3AE5E581, 0x37C2DADC, 0xC8B57634, 0x9AF3DDA7, 0xA9446146, 0x0FD0030E, 0xECC8C73E, 0xA4751E41, 0xE238CD99, 0x3BEA0E2F, 0x3280BBA1, 0x183EB331, 0x4E548B38, 0x4F6DB908, 0x6F420D03, 0xF60A04BF, 0x2CB81290, 0x24977C79, 0x5679B072, 0xBCAF89AF, 0xDE9A771F, 0xD9930810, 0xB38BAE12, 0xDCCF3F2E, 0x5512721F, 0x2E6B7124, 0x501ADDE6, 0x9F84CD87, 0x7A584718, 0x7408DA17, 0xBC9F9ABC, 0xE94B7D8C, 0xEC7AEC3A, 0xDB851DFA, 0x63094366, 0xC464C3D2, 0xEF1C1847, 0x3215D908, 0xDD433B37, 0x24C2BA16, 0x12A14D43, 0x2A65C451, 0x50940002, 0x133AE4DD, 0x71DFF89E, 0x10314E55, 0x81AC77D6, 0x5F11199B, 0x043556F1, 0xD7A3C76B, 0x3C11183B, 0x5924A509, 0xF28FE6ED, 0x97F1FBFA, 0x9EBABF2C, 0x1E153C6E, 0x86E34570, 0xEAE96FB1, 0x860E5E0A, 0x5A3E2AB3, 0x771FE71C, 0x4E3D06FA, 0x2965DCB9, 0x99E71D0F, 0x803E89D6, 0x5266C825, 0x2E4CC978, 0x9C10B36A, 0xC6150EBA, 0x94E2EA78, 0xA5FC3C53, 0x1E0A2DF4, 0xF2F74EA7, 0x361D2B3D, 0x1939260F, 0x19C27960, 0x5223A708, 0xF71312B6, 0xEBADFE6E, 0xEAC31F66, 0xE3BC4595, 0xA67BC883, 0xB17F37D1, 0x018CFF28, 0xC332DDEF, 0xBE6C5AA5, 0x65582185, 0x68AB9802, 0xEECEA50F, 0xDB2F953B, 0x2AEF7DAD, 0x5B6E2F84, 0x1521B628, 0x29076170, 0xECDD4775, 0x619F1510, 0x13CCA830, 0xEB61BD96, 0x0334FE1E, 0xAA0363CF, 0xB5735C90, 0x4C70A239, 0xD59E9E0B, 0xCBAADE14, 0xEECC86BC, 0x60622CA7, 0x9CAB5CAB, 0xB2F3846E, 0x648B1EAF, 0x19BDF0CA, 0xA02369B9, 0x655ABB50, 0x40685A32, 0x3C2AB4B3, 0x319EE9D5, 0xC021B8F7, 0x9B540B19, 0x875FA099, 0x95F7997E, 0x623D7DA8, 0xF837889A, 0x97E32D77, 0x11ED935F, 0x16681281, 0x0E358829, 0xC7E61FD6, 0x96DEDFA1, 0x7858BA99, 0x57F584A5, 0x1B227263, 0x9B83C3FF, 0x1AC24696, 0xCDB30AEB, 0x532E3054, 0x8FD948E4, 0x6DBC3128, 0x58EBF2EF, 0x34C6FFEA, 0xFE28ED61, 0xEE7C3C73, 0x5D4A14D9, 0xE864B7E3, 0x42105D14, 0x203E13E0, 0x45EEE2B6, 0xA3AAABEA, 0xDB6C4F15, 0xFACB4FD0, 0xC742F442, 0xEF6ABBB5, 0x654F3B1D, 0x41CD2105, 0xD81E799E, 0x86854DC7, 0xE44B476A, 0x3D816250, 0xCF62A1F2, 0x5B8D2646, 0xFC8883A0, 0xC1C7B6A3, 0x7F1524C3, 0x69CB7492, 0x47848A0B, 0x5692B285, 0x095BBF00, 0xAD19489D, 0x1462B174, 0x23820E00, 0x58428D2A, 0x0C55F5EA, 0x1DADF43E, 0x233F7061, 0x3372F092, 0x8D937E41, 0xD65FECF1, 0x6C223BDB, 0x7CDE3759, 0xCBEE7460, 0x4085F2A7, 0xCE77326E, 0xA6078084, 0x19F8509E, 0xE8EFD855, 0x61D99735, 0xA969A7AA, 0xC50C06C2, 0x5A04ABFC, 0x800BCADC, 0x9E447A2E, 0xC3453484, 0xFDD56705, 0x0E1E9EC9, 0xDB73DBD3, 0x105588CD, 0x675FDA79, 0xE3674340, 0xC5C43465, 0x713E38D8, 0x3D28F89E, 0xF16DFF20, 0x153E21E7, 0x8FB03D4A, 0xE6E39F2B, 0xDB83ADF7 }, KS2 = { 0xE93D5A68, 0x948140F7, 0xF64C261C, 0x94692934, 0x411520F7, 0x7602D4F7, 0xBCF46B2E, 0xD4A20068, 0xD4082471, 0x3320F46A, 0x43B7D4B7, 0x500061AF, 0x1E39F62E, 0x97244546, 0x14214F74, 0xBF8B8840, 0x4D95FC1D, 0x96B591AF, 0x70F4DDD3, 0x66A02F45, 0xBFBC09EC, 0x03BD9785, 0x7FAC6DD0, 0x31CB8504, 0x96EB27B3, 0x55FD3941, 0xDA2547E6, 0xABCA0A9A, 0x28507825, 0x530429F4, 0x0A2C86DA, 0xE9B66DFB, 0x68DC1462, 0xD7486900, 0x680EC0A4, 0x27A18DEE, 0x4F3FFEA2, 0xE887AD8C, 0xB58CE006, 0x7AF4D6B6, 0xAACE1E7C, 0xD3375FEC, 0xCE78A399, 0x406B2A42, 0x20FE9E35, 0xD9F385B9, 0xEE39D7AB, 0x3B124E8B, 0x1DC9FAF7, 0x4B6D1856, 0x26A36631, 0xEAE397B2, 0x3A6EFA74, 0xDD5B4332, 0x6841E7F7, 0xCA7820FB, 0xFB0AF54E, 0xD8FEB397, 0x454056AC, 0xBA489527, 0x55533A3A, 0x20838D87, 0xFE6BA9B7, 0xD096954B, 0x55A867BC, 0xA1159A58, 0xCCA92963, 0x99E1DB33, 0xA62A4A56, 0x3F3125F9, 0x5EF47E1C, 0x9029317C, 0xFDF8E802, 0x04272F70, 0x80BB155C, 0x05282CE3, 0x95C11548, 0xE4C66D22, 0x48C1133F, 0xC70F86DC, 0x07F9C9EE, 0x41041F0F, 0x404779A4, 0x5D886E17, 0x325F51EB, 0xD59BC0D1, 0xF2BCC18F, 0x41113564, 0x257B7834, 0x602A9C60, 0xDFF8E8A3, 0x1F636C1B, 0x0E12B4C2, 0x02E1329E, 0xAF664FD1, 0xCAD18115, 0x6B2395E0, 0x333E92E1, 0x3B240B62, 0xEEBEB922, 0x85B2A20E, 0xE6BA0D99, 0xDE720C8C, 0x2DA2F728, 0xD0127845, 0x95B794FD, 0x647D0862, 0xE7CCF5F0, 0x5449A36F, 0x877D48FA, 0xC39DFD27, 0xF33E8D1E, 0x0A476341, 0x992EFF74, 0x3A6F6EAB, 0xF4F8FD37, 0xA812DC60, 0xA1EBDDF8, 0x991BE14C, 0xDB6E6B0D, 0xC67B5510, 0x6D672C37, 0x2765D43B, 0xDCD0E804, 0xF1290DC7, 0xCC00FFA3, 0xB5390F92, 0x690FED0B, 0x667B9FFB, 0xCEDB7D9C, 0xA091CF0B, 0xD9155EA3, 0xBB132F88, 0x515BAD24, 0x7B9479BF, 0x763BD6EB, 0x37392EB3, 0xCC115979, 0x8026E297, 0xF42E312D, 0x6842ADA7, 0xC66A2B3B, 0x12754CCC, 0x782EF11C, 0x6A124237, 0xB79251E7, 0x06A1BBE6, 0x4BFB6350, 0x1A6B1018, 0x11CAEDFA, 0x3D25BDD8, 0xE2E1C3C9, 0x44421659, 0x0A121386, 0xD90CEC6E, 0xD5ABEA2A, 0x64AF674E, 0xDA86A85F, 0xBEBFE988, 0x64E4C3FE, 0x9DBC8057, 0xF0F7C086, 0x60787BF8, 0x6003604D, 0xD1FD8346, 0xF6381FB0, 0x7745AE04, 0xD736FCCC, 0x83426B33, 0xF01EAB71, 0xB0804187, 0x3C005E5F, 0x77A057BE, 0xBDE8AE24, 0x55464299, 0xBF582E61, 0x4E58F48F, 0xF2DDFDA2, 0xF474EF38, 0x8789BDC2, 0x5366F9C3, 0xC8B38E74, 0xB475F255, 0x46FCD9B9, 0x7AEB2661, 0x8B1DDF84, 0x846A0E79, 0x915F95E2, 0x466E598E, 0x20B45770, 0x8CD55591, 0xC902DE4C, 0xB90BACE1, 0xBB8205D0, 0x11A86248, 0x7574A99E, 0xB77F19B6, 0xE0A9DC09, 0x662D09A1, 0xC4324633, 0xE85A1F02, 0x09F0BE8C, 0x4A99A025, 0x1D6EFE10, 0x1AB93D1D, 0x0BA5A4DF, 0xA186F20F, 0x2868F169, 0xDCB7DA83, 0x573906FE, 0xA1E2CE9B, 0x4FCD7F52, 0x50115E01, 0xA70683FA, 0xA002B5C4, 0x0DE6D027, 0x9AF88C27, 0x773F8641, 0xC3604C06, 0x61A806B5, 0xF0177A28, 0xC0F586E0, 0x006058AA, 0x30DC7D62, 0x11E69ED7, 0x2338EA63, 0x53C2DD94, 0xC2C21634, 0xBBCBEE56, 0x90BCB6DE, 0xEBFC7DA1, 0xCE591D76, 0x6F05E409, 0x4B7C0188, 0x39720A3D, 0x7C927C24, 0x86E3725F, 0x724D9DB9, 0x1AC15BB4, 0xD39EB8FC, 0xED545578, 0x08FCA5B5, 0xD83D7CD3, 0x4DAD0FC4, 0x1E50EF5E, 0xB161E6F8, 0xA28514D9, 0x6C51133C, 0x6FD5C7E7, 0x56E14EC4, 0x362ABFCE, 0xDDC6C837, 0xD79A3234, 0x92638212, 0x670EFA8E, 0x406000E0 }, KS3 = { 0x3A39CE37, 0xD3FAF5CF, 0xABC27737, 0x5AC52D1B, 0x5CB0679E, 0x4FA33742, 0xD3822740, 0x99BC9BBE, 0xD5118E9D, 0xBF0F7315, 0xD62D1C7E, 0xC700C47B, 0xB78C1B6B, 0x21A19045, 0xB26EB1BE, 0x6A366EB4, 0x5748AB2F, 0xBC946E79, 0xC6A376D2, 0x6549C2C8, 0x530FF8EE, 0x468DDE7D, 0xD5730A1D, 0x4CD04DC6, 0x2939BBDB, 0xA9BA4650, 0xAC9526E8, 0xBE5EE304, 0xA1FAD5F0, 0x6A2D519A, 0x63EF8CE2, 0x9A86EE22, 0xC089C2B8, 0x43242EF6, 0xA51E03AA, 0x9CF2D0A4, 0x83C061BA, 0x9BE96A4D, 0x8FE51550, 0xBA645BD6, 0x2826A2F9, 0xA73A3AE1, 0x4BA99586, 0xEF5562E9, 0xC72FEFD3, 0xF752F7DA, 0x3F046F69, 0x77FA0A59, 0x80E4A915, 0x87B08601, 0x9B09E6AD, 0x3B3EE593, 0xE990FD5A, 0x9E34D797, 0x2CF0B7D9, 0x022B8B51, 0x96D5AC3A, 0x017DA67D, 0xD1CF3ED6, 0x7C7D2D28, 0x1F9F25CF, 0xADF2B89B, 0x5AD6B472, 0x5A88F54C, 0xE029AC71, 0xE019A5E6, 0x47B0ACFD, 0xED93FA9B, 0xE8D3C48D, 0x283B57CC, 0xF8D56629, 0x79132E28, 0x785F0191, 0xED756055, 0xF7960E44, 0xE3D35E8C, 0x15056DD4, 0x88F46DBA, 0x03A16125, 0x0564F0BD, 0xC3EB9E15, 0x3C9057A2, 0x97271AEC, 0xA93A072A, 0x1B3F6D9B, 0x1E6321F5, 0xF59C66FB, 0x26DCF319, 0x7533D928, 0xB155FDF5, 0x03563482, 0x8ABA3CBB, 0x28517711, 0xC20AD9F8, 0xABCC5167, 0xCCAD925F, 0x4DE81751, 0x3830DC8E, 0x379D5862, 0x9320F991, 0xEA7A90C2, 0xFB3E7BCE, 0x5121CE64, 0x774FBE32, 0xA8B6E37E, 0xC3293D46, 0x48DE5369, 0x6413E680, 0xA2AE0810, 0xDD6DB224, 0x69852DFD, 0x09072166, 0xB39A460A, 0x6445C0DD, 0x586CDECF, 0x1C20C8AE, 0x5BBEF7DD, 0x1B588D40, 0xCCD2017F, 0x6BB4E3BB, 0xDDA26A7E, 0x3A59FF45, 0x3E350A44, 0xBCB4CDD5, 0x72EACEA8, 0xFA6484BB, 0x8D6612AE, 0xBF3C6F47, 0xD29BE463, 0x542F5D9E, 0xAEC2771B, 0xF64E6370, 0x740E0D8D, 0xE75B1357, 0xF8721671, 0xAF537D5D, 0x4040CB08, 0x4EB4E2CC, 0x34D2466A, 0x0115AF84, 0xE1B00428, 0x95983A1D, 0x06B89FB4, 0xCE6EA048, 0x6F3F3B82, 0x3520AB82, 0x011A1D4B, 0x277227F8, 0x611560B1, 0xE7933FDC, 0xBB3A792B, 0x344525BD, 0xA08839E1, 0x51CE794B, 0x2F32C9B7, 0xA01FBAC9, 0xE01CC87E, 0xBCC7D1F6, 0xCF0111C3, 0xA1E8AAC7, 0x1A908749, 0xD44FBD9A, 0xD0DADECB, 0xD50ADA38, 0x0339C32A, 0xC6913667, 0x8DF9317C, 0xE0B12B4F, 0xF79E59B7, 0x43F5BB3A, 0xF2D519FF, 0x27D9459C, 0xBF97222C, 0x15E6FC2A, 0x0F91FC71, 0x9B941525, 0xFAE59361, 0xCEB69CEB, 0xC2A86459, 0x12BAA8D1, 0xB6C1075E, 0xE3056A0C, 0x10D25065, 0xCB03A442, 0xE0EC6E0E, 0x1698DB3B, 0x4C98A0BE, 0x3278E964, 0x9F1F9532, 0xE0D392DF, 0xD3A0342B, 0x8971F21E, 0x1B0A7441, 0x4BA3348C, 0xC5BE7120, 0xC37632D8, 0xDF359F8D, 0x9B992F2E, 0xE60B6F47, 0x0FE3F11D, 0xE54CDA54, 0x1EDAD891, 0xCE6279CF, 0xCD3E7E6F, 0x1618B166, 0xFD2C1D05, 0x848FD2C5, 0xF6FB2299, 0xF523F357, 0xA6327623, 0x93A83531, 0x56CCCD02, 0xACF08162, 0x5A75EBB5, 0x6E163697, 0x88D273CC, 0xDE966292, 0x81B949D0, 0x4C50901B, 0x71C65614, 0xE6C6C7BD, 0x327A140A, 0x45E1D006, 0xC3F27B9A, 0xC9AA53FD, 0x62A80F00, 0xBB25BFE2, 0x35BDD2F6, 0x71126905, 0xB2040222, 0xB6CBCF7C, 0xCD769C2B, 0x53113EC0, 0x1640E3D3, 0x38ABBD60, 0x2547ADF0, 0xBA38209C, 0xF746CE76, 0x77AFA1C5, 0x20756060, 0x85CBFE4E, 0x8AE88DD8, 0x7AAAF9B0, 0x4CF9AA7E, 0x1948C25C, 0x02FB8A8C, 0x01C36AE4, 0xD6EBE1F9, 0x90D4F869, 0xA65CDEA0, 0x3F09252D, 0xC208E69F, 0xB74E6132, 0xCE77E25B, 0x578FDFE3, 0x3AC372E6 }; //==================================== // Useful constants //==================================== private static readonly int ROUNDS = 16; private const int BLOCK_SIZE = 8; // bytes = 64 bits private static readonly int SBOX_SK = 256; private static readonly int P_SZ = ROUNDS+2; private readonly uint[] S0, S1, S2, S3; // the s-boxes private readonly uint[] P; // the p-array private bool encrypting; private byte[] workingKey; public BlowfishEngine() { S0 = new uint[SBOX_SK]; S1 = new uint[SBOX_SK]; S2 = new uint[SBOX_SK]; S3 = new uint[SBOX_SK]; P = new uint[P_SZ]; } /** * initialise a Blowfish cipher. * * @param forEncryption whether or not we are for encryption. * @param parameters the parameters required to set up the cipher. * @exception ArgumentException if the parameters argument is * inappropriate. */ public void Init( bool forEncryption, ICipherParameters parameters) { if (!(parameters is KeyParameter)) throw new ArgumentException("invalid parameter passed to Blowfish init - " + parameters.GetType().ToString()); this.encrypting = forEncryption; this.workingKey = ((KeyParameter)parameters).GetKey(); SetKey(this.workingKey); } public string AlgorithmName { get { return "Blowfish"; } } public bool IsPartialBlockOkay { get { return false; } } public int ProcessBlock( byte[] input, int inOff, byte[] output, int outOff) { if (workingKey == null) throw new InvalidOperationException("Blowfish not initialised"); Check.DataLength(input, inOff, BLOCK_SIZE, "input buffer too short"); Check.OutputLength(output, outOff, BLOCK_SIZE, "output buffer too short"); if (encrypting) { EncryptBlock(input, inOff, output, outOff); } else { DecryptBlock(input, inOff, output, outOff); } return BLOCK_SIZE; } public void Reset() { } public int GetBlockSize() { return BLOCK_SIZE; } //================================== // Private Implementation //================================== private uint F(uint x) { return (((S0[x >> 24] + S1[(x >> 16) & 0xff]) ^ S2[(x >> 8) & 0xff]) + S3[x & 0xff]); } /** * apply the encryption cycle to each value pair in the table. */ private void ProcessTable( uint xl, uint xr, uint[] table) { int size = table.Length; for (int s = 0; s < size; s += 2) { xl ^= P[0]; for (int i = 1; i < ROUNDS; i += 2) { xr ^= F(xl) ^ P[i]; xl ^= F(xr) ^ P[i + 1]; } xr ^= P[ROUNDS + 1]; table[s] = xr; table[s + 1] = xl; xr = xl; // end of cycle swap xl = table[s]; } } private void SetKey(byte[] key) { /* * - comments are from _Applied Crypto_, Schneier, p338 * please be careful comparing the two, AC numbers the * arrays from 1, the enclosed code from 0. * * (1) * Initialise the S-boxes and the P-array, with a fixed string * This string contains the hexadecimal digits of pi (3.141...) */ Array.Copy(KS0, 0, S0, 0, SBOX_SK); Array.Copy(KS1, 0, S1, 0, SBOX_SK); Array.Copy(KS2, 0, S2, 0, SBOX_SK); Array.Copy(KS3, 0, S3, 0, SBOX_SK); Array.Copy(KP, 0, P, 0, P_SZ); /* * (2) * Now, XOR P[0] with the first 32 bits of the key, XOR P[1] with the * second 32-bits of the key, and so on for all bits of the key * (up to P[17]). Repeatedly cycle through the key bits until the * entire P-array has been XOR-ed with the key bits */ int keyLength = key.Length; int keyIndex = 0; for (int i=0; i < P_SZ; i++) { // Get the 32 bits of the key, in 4 * 8 bit chunks uint data = 0x0000000; for (int j=0; j < 4; j++) { // create a 32 bit block data = (data << 8) | (uint)key[keyIndex++]; // wrap when we get to the end of the key if (keyIndex >= keyLength) { keyIndex = 0; } } // XOR the newly created 32 bit chunk onto the P-array P[i] ^= data; } /* * (3) * Encrypt the all-zero string with the Blowfish algorithm, using * the subkeys described in (1) and (2) * * (4) * Replace P1 and P2 with the output of step (3) * * (5) * Encrypt the output of step(3) using the Blowfish algorithm, * with the modified subkeys. * * (6) * Replace P3 and P4 with the output of step (5) * * (7) * Continue the process, replacing all elements of the P-array * and then all four S-boxes in order, with the output of the * continuously changing Blowfish algorithm */ ProcessTable(0, 0, P); ProcessTable(P[P_SZ - 2], P[P_SZ - 1], S0); ProcessTable(S0[SBOX_SK - 2], S0[SBOX_SK - 1], S1); ProcessTable(S1[SBOX_SK - 2], S1[SBOX_SK - 1], S2); ProcessTable(S2[SBOX_SK - 2], S2[SBOX_SK - 1], S3); } /** * Encrypt the given input starting at the given offset and place * the result in the provided buffer starting at the given offset. * The input will be an exact multiple of our blocksize. */ private void EncryptBlock( byte[] src, int srcIndex, byte[] dst, int dstIndex) { uint xl = Pack.BE_To_UInt32(src, srcIndex); uint xr = Pack.BE_To_UInt32(src, srcIndex+4); xl ^= P[0]; for (int i = 1; i < ROUNDS; i += 2) { xr ^= F(xl) ^ P[i]; xl ^= F(xr) ^ P[i + 1]; } xr ^= P[ROUNDS + 1]; Pack.UInt32_To_BE(xr, dst, dstIndex); Pack.UInt32_To_BE(xl, dst, dstIndex + 4); } /** * Decrypt the given input starting at the given offset and place * the result in the provided buffer starting at the given offset. * The input will be an exact multiple of our blocksize. */ private void DecryptBlock( byte[] src, int srcIndex, byte[] dst, int dstIndex) { uint xl = Pack.BE_To_UInt32(src, srcIndex); uint xr = Pack.BE_To_UInt32(src, srcIndex + 4); xl ^= P[ROUNDS + 1]; for (int i = ROUNDS; i > 0 ; i -= 2) { xr ^= F(xl) ^ P[i]; xl ^= F(xr) ^ P[i - 1]; } xr ^= P[0]; Pack.UInt32_To_BE(xr, dst, dstIndex); Pack.UInt32_To_BE(xl, dst, dstIndex + 4); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.14.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.AcceptanceTestsBodyDateTime { using System; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Models; public static partial class DatetimeExtensions { /// <summary> /// Get null datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetNull(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetNullAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get null datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetNullAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetNullWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Get invalid datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetInvalid(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetInvalidAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get invalid datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetInvalidAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetInvalidWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Get overflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetOverflow(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetOverflowAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get overflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetOverflowAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetOverflowWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Get underflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetUnderflow(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetUnderflowAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get underflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetUnderflowAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetUnderflowWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Put max datetime value 9999-12-31T23:59:59.9999999Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutUtcMaxDateTime(this IDatetime operations, DateTime? datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutUtcMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put max datetime value 9999-12-31T23:59:59.9999999Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutUtcMaxDateTimeAsync( this IDatetime operations, DateTime? datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutUtcMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get max datetime value 9999-12-31t23:59:59.9999999z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetUtcLowercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetUtcLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value 9999-12-31t23:59:59.9999999z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetUtcLowercaseMaxDateTimeAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetUtcLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Get max datetime value 9999-12-31T23:59:59.9999999Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetUtcUppercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetUtcUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value 9999-12-31T23:59:59.9999999Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetUtcUppercaseMaxDateTimeAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetUtcUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Put max datetime value with positive numoffset /// 9999-12-31t23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutLocalPositiveOffsetMaxDateTime(this IDatetime operations, DateTime? datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutLocalPositiveOffsetMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put max datetime value with positive numoffset /// 9999-12-31t23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutLocalPositiveOffsetMaxDateTimeAsync( this IDatetime operations, DateTime? datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutLocalPositiveOffsetMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31t23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetLocalPositiveOffsetLowercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalPositiveOffsetLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31t23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetLocalPositiveOffsetLowercaseMaxDateTimeAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetLocalPositiveOffsetLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31T23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetLocalPositiveOffsetUppercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalPositiveOffsetUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31T23:59:59.9999999+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetLocalPositiveOffsetUppercaseMaxDateTimeAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetLocalPositiveOffsetUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Put max datetime value with positive numoffset /// 9999-12-31t23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutLocalNegativeOffsetMaxDateTime(this IDatetime operations, DateTime? datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutLocalNegativeOffsetMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put max datetime value with positive numoffset /// 9999-12-31t23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutLocalNegativeOffsetMaxDateTimeAsync( this IDatetime operations, DateTime? datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutLocalNegativeOffsetMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31T23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetLocalNegativeOffsetUppercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalNegativeOffsetUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31T23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetLocalNegativeOffsetUppercaseMaxDateTimeAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetLocalNegativeOffsetUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31t23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetLocalNegativeOffsetLowercaseMaxDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalNegativeOffsetLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value with positive num offset /// 9999-12-31t23:59:59.9999999-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetLocalNegativeOffsetLowercaseMaxDateTimeAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetLocalNegativeOffsetLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Put min datetime value 0001-01-01T00:00:00Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutUtcMinDateTime(this IDatetime operations, DateTime? datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutUtcMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put min datetime value 0001-01-01T00:00:00Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutUtcMinDateTimeAsync( this IDatetime operations, DateTime? datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutUtcMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetUtcMinDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetUtcMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00Z /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetUtcMinDateTimeAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetUtcMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Put min datetime value 0001-01-01T00:00:00+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutLocalPositiveOffsetMinDateTime(this IDatetime operations, DateTime? datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutLocalPositiveOffsetMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put min datetime value 0001-01-01T00:00:00+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutLocalPositiveOffsetMinDateTimeAsync( this IDatetime operations, DateTime? datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutLocalPositiveOffsetMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetLocalPositiveOffsetMinDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalPositiveOffsetMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00+14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetLocalPositiveOffsetMinDateTimeAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetLocalPositiveOffsetMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } /// <summary> /// Put min datetime value 0001-01-01T00:00:00-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutLocalNegativeOffsetMinDateTime(this IDatetime operations, DateTime? datetimeBody) { Task.Factory.StartNew(s => ((IDatetime)s).PutLocalNegativeOffsetMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put min datetime value 0001-01-01T00:00:00-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutLocalNegativeOffsetMinDateTimeAsync( this IDatetime operations, DateTime? datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutLocalNegativeOffsetMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetLocalNegativeOffsetMinDateTime(this IDatetime operations) { return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalNegativeOffsetMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get min datetime value 0001-01-01T00:00:00-14:00 /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetLocalNegativeOffsetMinDateTimeAsync( this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken)) { var _result = await operations.GetLocalNegativeOffsetMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false); return _result.Body; } } }
namespace PTWisej { partial class ResourceEdit { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); Wisej.Web.Label FirstNameLabel; Wisej.Web.Label IdLabel; Wisej.Web.Label LastNameLabel; this.CloseButton = new Wisej.Web.Button(); this.ApplyButton = new Wisej.Web.Button(); this.Cancel_Button = new Wisej.Web.Button(); this.OKButton = new Wisej.Web.Button(); this.GroupBox1 = new Wisej.Web.GroupBox(); this.AssignmentsDataGridView = new Wisej.Web.DataGridView(); this.ProjectId = new Wisej.Web.DataGridViewTextBoxColumn(); this.ProjectName = new Wisej.Web.DataGridViewLinkColumn(); this.Assigned = new Wisej.Web.DataGridViewTextBoxColumn(); this.Role = new Wisej.Web.DataGridViewComboBoxColumn(); this.RoleListBindingSource = new Wisej.Web.BindingSource(this.components); this.AssignmentsBindingSource = new Wisej.Web.BindingSource(this.components); this.ResourceBindingSource = new Wisej.Web.BindingSource(this.components); this.UnassignButton = new Wisej.Web.Button(); this.AssignButton = new Wisej.Web.Button(); this.FirstNameTextBox = new Wisej.Web.TextBox(); this.IdLabel1 = new Wisej.Web.Label(); this.LastNameTextBox = new Wisej.Web.TextBox(); this.ErrorProvider1 = new Wisej.Web.ErrorProvider(this.components); this.ReadWriteAuthorization1 = new CslaContrib.WisejWeb.ReadWriteAuthorization(this.components); this.RefreshButton = new Wisej.Web.Button(); FirstNameLabel = new Wisej.Web.Label(); IdLabel = new Wisej.Web.Label(); LastNameLabel = new Wisej.Web.Label(); this.GroupBox1.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.AssignmentsDataGridView)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.RoleListBindingSource)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.AssignmentsBindingSource)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.ResourceBindingSource)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.ErrorProvider1)).BeginInit(); this.SuspendLayout(); // // FirstNameLabel // this.ReadWriteAuthorization1.SetApplyAuthorization(FirstNameLabel, false); FirstNameLabel.AutoSize = true; FirstNameLabel.Location = new System.Drawing.Point(13, 42); FirstNameLabel.Name = "FirstNameLabel"; FirstNameLabel.Size = new System.Drawing.Size(63, 14); FirstNameLabel.TabIndex = 2; FirstNameLabel.Text = "First Name:"; // // IdLabel // this.ReadWriteAuthorization1.SetApplyAuthorization(IdLabel, false); IdLabel.AutoSize = true; IdLabel.Location = new System.Drawing.Point(13, 13); IdLabel.Name = "IdLabel"; IdLabel.Size = new System.Drawing.Size(21, 14); IdLabel.TabIndex = 0; IdLabel.Text = "Id:"; // // LastNameLabel // this.ReadWriteAuthorization1.SetApplyAuthorization(LastNameLabel, false); LastNameLabel.AutoSize = true; LastNameLabel.Location = new System.Drawing.Point(13, 68); LastNameLabel.Name = "LastNameLabel"; LastNameLabel.Size = new System.Drawing.Size(62, 14); LastNameLabel.TabIndex = 4; LastNameLabel.Text = "Last Name:"; // // CloseButton // this.CloseButton.Anchor = ((Wisej.Web.AnchorStyles)((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.CloseButton, false); this.CloseButton.DialogResult = Wisej.Web.DialogResult.Cancel; this.CloseButton.Location = new System.Drawing.Point(814, 100); this.CloseButton.Name = "CloseButton"; this.CloseButton.Size = new System.Drawing.Size(75, 23); this.CloseButton.TabIndex = 10; this.CloseButton.Text = "Close"; this.CloseButton.Click += new System.EventHandler(this.CloseButton_Click); // // ApplyButton // this.ApplyButton.Anchor = ((Wisej.Web.AnchorStyles)((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.ApplyButton, false); this.ApplyButton.DialogResult = Wisej.Web.DialogResult.Cancel; this.ApplyButton.Location = new System.Drawing.Point(814, 42); this.ApplyButton.Name = "ApplyButton"; this.ApplyButton.Size = new System.Drawing.Size(75, 23); this.ApplyButton.TabIndex = 8; this.ApplyButton.Text = "Apply"; this.ApplyButton.Click += new System.EventHandler(this.ApplyButton_Click); // // Cancel_Button // this.Cancel_Button.Anchor = ((Wisej.Web.AnchorStyles)((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.Cancel_Button, false); this.Cancel_Button.DialogResult = Wisej.Web.DialogResult.Cancel; this.Cancel_Button.Location = new System.Drawing.Point(814, 71); this.Cancel_Button.Name = "Cancel_Button"; this.Cancel_Button.Size = new System.Drawing.Size(75, 23); this.Cancel_Button.TabIndex = 9; this.Cancel_Button.Text = "Cancel"; this.Cancel_Button.Click += new System.EventHandler(this.Cancel_Button_Click); // // OKButton // this.OKButton.Anchor = ((Wisej.Web.AnchorStyles)((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.OKButton, false); this.OKButton.Location = new System.Drawing.Point(814, 13); this.OKButton.Name = "OKButton"; this.OKButton.Size = new System.Drawing.Size(75, 23); this.OKButton.TabIndex = 7; this.OKButton.Text = "OK"; this.OKButton.Click += new System.EventHandler(this.OKButton_Click); // // GroupBox1 // this.GroupBox1.Anchor = ((Wisej.Web.AnchorStyles)((((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Bottom) | Wisej.Web.AnchorStyles.Left) | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.GroupBox1, false); this.GroupBox1.Controls.Add(this.AssignmentsDataGridView); this.GroupBox1.Controls.Add(this.UnassignButton); this.GroupBox1.Controls.Add(this.AssignButton); this.GroupBox1.Location = new System.Drawing.Point(16, 91); this.GroupBox1.Name = "GroupBox1"; this.GroupBox1.Size = new System.Drawing.Size(762, 310); this.GroupBox1.TabIndex = 6; this.GroupBox1.Text = "Assigned projects"; // // AssignmentsDataGridView // this.AssignmentsDataGridView.Anchor = ((Wisej.Web.AnchorStyles)((((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Bottom) | Wisej.Web.AnchorStyles.Left) | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.AssignmentsDataGridView, true); this.AssignmentsDataGridView.AutoGenerateColumns = false; this.AssignmentsDataGridView.AutoSizeColumnsMode = Wisej.Web.DataGridViewAutoSizeColumnsMode.AllCells; this.AssignmentsDataGridView.Columns.AddRange(new Wisej.Web.DataGridViewColumn[] { this.ProjectId, this.ProjectName, this.Assigned, this.Role}); this.AssignmentsDataGridView.DataSource = this.AssignmentsBindingSource; this.AssignmentsDataGridView.Location = new System.Drawing.Point(6, 19); this.AssignmentsDataGridView.MultiSelect = false; this.AssignmentsDataGridView.Name = "AssignmentsDataGridView"; this.AssignmentsDataGridView.RowHeadersVisible = false; this.AssignmentsDataGridView.SelectionMode = Wisej.Web.DataGridViewSelectionMode.FullRowSelect; this.AssignmentsDataGridView.ShowColumnVisibilityMenu = false; this.AssignmentsDataGridView.Size = new System.Drawing.Size(669, 285); this.AssignmentsDataGridView.TabIndex = 0; this.AssignmentsDataGridView.CellClick += new Wisej.Web.DataGridViewCellEventHandler(this.AssignmentsDataGridView_CellContentClick); // // ProjectId // this.ProjectId.DataPropertyName = "ProjectId"; this.ProjectId.HeaderText = "ProjectId"; this.ProjectId.Name = "ProjectId"; this.ProjectId.ReadOnly = true; this.ProjectId.Visible = false; // // ProjectName // this.ProjectName.AutoSizeMode = Wisej.Web.DataGridViewAutoSizeColumnMode.AllCells; this.ProjectName.DataPropertyName = "ProjectName"; this.ProjectName.FillWeight = 200F; this.ProjectName.HeaderText = "Project Name"; this.ProjectName.MinimumWidth = 50; this.ProjectName.Name = "ProjectName"; this.ProjectName.ReadOnly = true; this.ProjectName.Resizable = Wisej.Web.DataGridViewTriState.True; this.ProjectName.SortMode = Wisej.Web.DataGridViewColumnSortMode.Automatic; this.ProjectName.Text = ""; // // Assigned // this.Assigned.DataPropertyName = "Assigned"; this.Assigned.HeaderText = "Assigned"; this.Assigned.MinimumWidth = 50; this.Assigned.Name = "Assigned"; this.Assigned.ReadOnly = true; // // Role // this.Role.AutoSizeMode = Wisej.Web.DataGridViewAutoSizeColumnMode.AllCells; this.Role.DataPropertyName = "Role"; this.Role.DataSource = this.RoleListBindingSource; this.Role.DisplayMember = "Value"; this.Role.DropDownStyle = Wisej.Web.ComboBoxStyle.DropDownList; this.Role.FillWeight = 200F; this.Role.HeaderText = "Role"; this.Role.MinimumWidth = 50; this.Role.Name = "Role"; this.Role.ValueMember = "Key"; // // RoleListBindingSource // this.RoleListBindingSource.DataSource = typeof(ProjectTracker.Library.RoleList); // // AssignmentsBindingSource // this.AssignmentsBindingSource.DataMember = "Assignments"; this.AssignmentsBindingSource.DataSource = this.ResourceBindingSource; // // ResourceBindingSource // this.ResourceBindingSource.DataSource = typeof(ProjectTracker.Library.ResourceEdit); this.ResourceBindingSource.RefreshValueOnChange = true; // // UnassignButton // this.UnassignButton.Anchor = ((Wisej.Web.AnchorStyles)((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.UnassignButton, false); this.UnassignButton.Location = new System.Drawing.Point(681, 48); this.UnassignButton.Name = "UnassignButton"; this.UnassignButton.Size = new System.Drawing.Size(75, 23); this.UnassignButton.TabIndex = 2; this.UnassignButton.Text = "Unassign"; this.UnassignButton.Click += new System.EventHandler(this.UnassignButton_Click); // // AssignButton // this.AssignButton.Anchor = ((Wisej.Web.AnchorStyles)((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.AssignButton, false); this.AssignButton.Location = new System.Drawing.Point(681, 19); this.AssignButton.Name = "AssignButton"; this.AssignButton.Size = new System.Drawing.Size(75, 23); this.AssignButton.TabIndex = 1; this.AssignButton.Text = "Assign"; this.AssignButton.Click += new System.EventHandler(this.AssignButton_Click); // // FirstNameTextBox // this.FirstNameTextBox.Anchor = ((Wisej.Web.AnchorStyles)(((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Left) | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.FirstNameTextBox, true); this.FirstNameTextBox.DataBindings.Add(new Wisej.Web.Binding("Text", this.ResourceBindingSource, "FirstName", true)); this.FirstNameTextBox.Location = new System.Drawing.Point(80, 39); this.FirstNameTextBox.Name = "FirstNameTextBox"; this.FirstNameTextBox.Size = new System.Drawing.Size(698, 20); this.FirstNameTextBox.TabIndex = 3; // // IdLabel1 // this.IdLabel1.Anchor = ((Wisej.Web.AnchorStyles)(((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Left) | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.IdLabel1, true); this.IdLabel1.DataBindings.Add(new Wisej.Web.Binding("Text", this.ResourceBindingSource, "Id", true)); this.IdLabel1.Location = new System.Drawing.Point(80, 13); this.IdLabel1.Name = "IdLabel1"; this.IdLabel1.Size = new System.Drawing.Size(698, 23); this.IdLabel1.TabIndex = 1; // // LastNameTextBox // this.LastNameTextBox.Anchor = ((Wisej.Web.AnchorStyles)(((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Left) | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.LastNameTextBox, true); this.LastNameTextBox.DataBindings.Add(new Wisej.Web.Binding("Text", this.ResourceBindingSource, "LastName", true)); this.LastNameTextBox.Location = new System.Drawing.Point(80, 65); this.LastNameTextBox.Name = "LastNameTextBox"; this.LastNameTextBox.Size = new System.Drawing.Size(698, 20); this.LastNameTextBox.TabIndex = 5; // // ErrorProvider1 // this.ErrorProvider1.BlinkStyle = Wisej.Web.ErrorBlinkStyle.NeverBlink; this.ErrorProvider1.ContainerControl = this; this.ErrorProvider1.DataSource = this.ResourceBindingSource; // // RefreshButton // this.RefreshButton.Anchor = ((Wisej.Web.AnchorStyles)((Wisej.Web.AnchorStyles.Top | Wisej.Web.AnchorStyles.Right))); this.ReadWriteAuthorization1.SetApplyAuthorization(this.RefreshButton, false); this.RefreshButton.Location = new System.Drawing.Point(814, 129); this.RefreshButton.Name = "RefreshButton"; this.RefreshButton.Size = new System.Drawing.Size(75, 23); this.RefreshButton.TabIndex = 11; this.RefreshButton.Text = "Refresh"; this.RefreshButton.Click += new System.EventHandler(this.RefreshButton_Click); // // ResourceEdit // this.ReadWriteAuthorization1.SetApplyAuthorization(this, false); this.Controls.Add(this.RefreshButton); this.Controls.Add(this.CloseButton); this.Controls.Add(this.ApplyButton); this.Controls.Add(this.Cancel_Button); this.Controls.Add(this.OKButton); this.Controls.Add(this.GroupBox1); this.Controls.Add(FirstNameLabel); this.Controls.Add(this.FirstNameTextBox); this.Controls.Add(IdLabel); this.Controls.Add(this.IdLabel1); this.Controls.Add(LastNameLabel); this.Controls.Add(this.LastNameTextBox); this.Name = "ResourceEdit"; this.Size = new System.Drawing.Size(901, 431); this.Load += new System.EventHandler(this.ResourceEdit_Load); this.GroupBox1.ResumeLayout(false); ((System.ComponentModel.ISupportInitialize)(this.AssignmentsDataGridView)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.RoleListBindingSource)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.AssignmentsBindingSource)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.ResourceBindingSource)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.ErrorProvider1)).EndInit(); this.ResumeLayout(false); this.PerformLayout(); } #endregion internal Wisej.Web.Button CloseButton; internal Wisej.Web.Button ApplyButton; internal Wisej.Web.Button Cancel_Button; internal Wisej.Web.Button OKButton; internal Wisej.Web.GroupBox GroupBox1; internal Wisej.Web.Button UnassignButton; internal Wisej.Web.Button AssignButton; internal Wisej.Web.TextBox FirstNameTextBox; internal Wisej.Web.Label IdLabel1; internal Wisej.Web.TextBox LastNameTextBox; internal CslaContrib.WisejWeb.ReadWriteAuthorization ReadWriteAuthorization1; internal Wisej.Web.DataGridView AssignmentsDataGridView; internal Wisej.Web.BindingSource RoleListBindingSource; internal Wisej.Web.BindingSource AssignmentsBindingSource; internal Wisej.Web.BindingSource ResourceBindingSource; internal Wisej.Web.ErrorProvider ErrorProvider1; private Wisej.Web.DataGridViewTextBoxColumn ProjectId; private Wisej.Web.DataGridViewLinkColumn ProjectName; private Wisej.Web.DataGridViewTextBoxColumn Assigned; private Wisej.Web.DataGridViewComboBoxColumn Role; internal Wisej.Web.Button RefreshButton; } }
using System; using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; using System.Xml; using Umbraco.Core; using Umbraco.Core.Models; using Umbraco.Core.Models.EntityBase; using Umbraco.Core.Persistence; using umbraco.cms.businesslogic.web; using umbraco.DataLayer; using umbraco.BusinessLogic; using System.IO; using System.Text.RegularExpressions; using System.ComponentModel; using Umbraco.Core.IO; using System.Collections; using umbraco.cms.businesslogic.task; using umbraco.cms.businesslogic.workflow; using umbraco.cms.businesslogic.Tags; using File = System.IO.File; using Media = umbraco.cms.businesslogic.media.Media; using Tag = umbraco.cms.businesslogic.Tags.Tag; using Notification = umbraco.cms.businesslogic.workflow.Notification; using Task = umbraco.cms.businesslogic.task.Task; namespace umbraco.cms.businesslogic { /// <summary> /// CMSNode class serves as the base class for many of the other components in the cms.businesslogic.xx namespaces. /// Providing the basic hierarchical data structure and properties Text (name), Creator, Createdate, updatedate etc. /// which are shared by most umbraco objects. /// /// The child classes are required to implement an identifier (Guid) which is used as the objecttype identifier, for /// distinguishing the different types of CMSNodes (ex. Documents/Medias/Stylesheets/documenttypes and so forth). /// </summary> [Obsolete("Obsolete, This class will eventually be phased out", false)] public class CMSNode : BusinessLogic.console.IconI { #region Private Members private string _text; private int _id = 0; private Guid _uniqueID; private int _parentid; private Guid _nodeObjectType; private int _level; private string _path; private bool _hasChildren; private int _sortOrder; private int _userId; private DateTime _createDate; private bool _hasChildrenInitialized; private string m_image = "default.png"; private bool? _isTrashed = null; protected IUmbracoEntity Entity; #endregion #region Private static private static readonly string DefaultIconCssFile = IOHelper.MapPath(SystemDirectories.UmbracoClient + "/Tree/treeIcons.css"); private static readonly List<string> InternalDefaultIconClasses = new List<string>(); private static readonly ReaderWriterLockSlim Locker = new ReaderWriterLockSlim(); private static void InitializeIconClasses() { StreamReader re = File.OpenText(DefaultIconCssFile); string content = string.Empty; string input = null; while ((input = re.ReadLine()) != null) { content += input.Replace("\n", "") + "\n"; } re.Close(); // parse the classes var m = Regex.Matches(content, "([^{]*){([^}]*)}", RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace); foreach (Match match in m) { var groups = match.Groups; var cssClass = groups[1].Value.Replace("\n", "").Replace("\r", "").Trim().Trim(Environment.NewLine.ToCharArray()); if (string.IsNullOrEmpty(cssClass) == false) { InternalDefaultIconClasses.Add(cssClass); } } } private const string SqlSingle = "SELECT id, createDate, trashed, parentId, nodeObjectType, nodeUser, level, path, sortOrder, uniqueID, text FROM umbracoNode WHERE id = @id"; private const string SqlDescendants = @" SELECT id, createDate, trashed, parentId, nodeObjectType, nodeUser, level, path, sortOrder, uniqueID, text FROM umbracoNode WHERE path LIKE '%,{0},%'"; #endregion #region Public static /// <summary> /// Get a count on all CMSNodes given the objecttype /// </summary> /// <param name="objectType">The objecttype identifier</param> /// <returns> /// The number of CMSNodes of the given objecttype /// </returns> public static int CountByObjectType(Guid objectType) { return SqlHelper.ExecuteScalar<int>("SELECT COUNT(*) from umbracoNode WHERE nodeObjectType = @type", SqlHelper.CreateParameter("@type", objectType)); } /// <summary> /// Number of ancestors of the current CMSNode /// </summary> /// <param name="Id">The CMSNode Id</param> /// <returns> /// The number of ancestors from the given CMSNode /// </returns> public static int CountSubs(int Id) { return SqlHelper.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoNode WHERE ','+path+',' LIKE '%," + Id.ToString() + ",%'"); } /// <summary> /// Returns the number of leaf nodes from the newParent id for a given object type /// </summary> /// <param name="parentId"></param> /// <param name="objectType"></param> /// <returns></returns> public static int CountLeafNodes(int parentId, Guid objectType) { return SqlHelper.ExecuteScalar<int>("Select count(uniqueID) from umbracoNode where nodeObjectType = @type And parentId = @parentId", SqlHelper.CreateParameter("@type", objectType), SqlHelper.CreateParameter("@parentId", parentId)); } /// <summary> /// Gets the default icon classes. /// </summary> /// <value>The default icon classes.</value> public static List<string> DefaultIconClasses { get { using (var l = new UpgradeableReadLock(Locker)) { if (InternalDefaultIconClasses.Count == 0) { l.UpgradeToWriteLock(); InitializeIconClasses(); } return InternalDefaultIconClasses; } } } /// <summary> /// Method for checking if a CMSNode exits with the given Guid /// </summary> /// <param name="uniqueID">Identifier</param> /// <returns>True if there is a CMSNode with the given Guid</returns> public static bool IsNode(Guid uniqueID) { return (SqlHelper.ExecuteScalar<int>("select count(id) from umbracoNode where uniqueID = @uniqueID", SqlHelper.CreateParameter("@uniqueId", uniqueID)) > 0); } /// <summary> /// Method for checking if a CMSNode exits with the given id /// </summary> /// <param name="Id">Identifier</param> /// <returns>True if there is a CMSNode with the given id</returns> public static bool IsNode(int Id) { return (SqlHelper.ExecuteScalar<int>("select count(id) from umbracoNode where id = @id", SqlHelper.CreateParameter("@id", Id)) > 0); } /// <summary> /// Retrieve a list of the unique id's of all CMSNodes given the objecttype /// </summary> /// <param name="objectType">The objecttype identifier</param> /// <returns> /// A list of all unique identifiers which each are associated to a CMSNode /// </returns> public static Guid[] getAllUniquesFromObjectType(Guid objectType) { IRecordsReader dr = SqlHelper.ExecuteReader("Select uniqueID from umbracoNode where nodeObjectType = @type", SqlHelper.CreateParameter("@type", objectType)); System.Collections.ArrayList tmp = new System.Collections.ArrayList(); while (dr.Read()) tmp.Add(dr.GetGuid("uniqueID")); dr.Close(); Guid[] retval = new Guid[tmp.Count]; for (int i = 0; i < tmp.Count; i++) retval[i] = (Guid)tmp[i]; return retval; } /// <summary> /// Retrieve a list of the node id's of all CMSNodes given the objecttype /// </summary> /// <param name="objectType">The objecttype identifier</param> /// <returns> /// A list of all node ids which each are associated to a CMSNode /// </returns> public static int[] getAllUniqueNodeIdsFromObjectType(Guid objectType) { IRecordsReader dr = SqlHelper.ExecuteReader("Select id from umbracoNode where nodeObjectType = @type", SqlHelper.CreateParameter("@type", objectType)); System.Collections.ArrayList tmp = new System.Collections.ArrayList(); while (dr.Read()) tmp.Add(dr.GetInt("id")); dr.Close(); return (int[])tmp.ToArray(typeof(int)); } /// <summary> /// Retrieves the top level nodes in the hierarchy /// </summary> /// <param name="ObjectType">The Guid identifier of the type of objects</param> /// <returns> /// A list of all top level nodes given the objecttype /// </returns> public static Guid[] TopMostNodeIds(Guid ObjectType) { IRecordsReader dr = SqlHelper.ExecuteReader("Select uniqueID from umbracoNode where nodeObjectType = @type And parentId = -1 order by sortOrder", SqlHelper.CreateParameter("@type", ObjectType)); System.Collections.ArrayList tmp = new System.Collections.ArrayList(); while (dr.Read()) tmp.Add(dr.GetGuid("uniqueID")); dr.Close(); Guid[] retval = new Guid[tmp.Count]; for (int i = 0; i < tmp.Count; i++) retval[i] = (Guid)tmp[i]; return retval; } #endregion #region Protected static /// <summary> /// Given the protected modifier the CMSNode.MakeNew method can only be accessed by /// derived classes &gt; who by definition knows of its own objectType. /// </summary> /// <param name="parentId">The newParent CMSNode id</param> /// <param name="objectType">The objecttype identifier</param> /// <param name="userId">Creator</param> /// <param name="level">The level in the tree hieararchy</param> /// <param name="text">The name of the CMSNode</param> /// <param name="uniqueID">The unique identifier</param> /// <returns></returns> protected static CMSNode MakeNew(int parentId, Guid objectType, int userId, int level, string text, Guid uniqueID) { CMSNode parent = null; string path = ""; int sortOrder = 0; if (level > 0) { parent = new CMSNode(parentId); sortOrder = GetNewDocumentSortOrder(parentId); path = parent.Path; } else path = "-1"; // Ruben 8/1/2007: I replace this with a parameterized version. // But does anyone know what the 'level++' is supposed to be doing there? // Nothing obviously, since it's a postfix. SqlHelper.ExecuteNonQuery("INSERT INTO umbracoNode(trashed, parentID, nodeObjectType, nodeUser, level, path, sortOrder, uniqueID, text, createDate) VALUES(@trashed, @parentID, @nodeObjectType, @nodeUser, @level, @path, @sortOrder, @uniqueID, @text, @createDate)", SqlHelper.CreateParameter("@trashed", 0), SqlHelper.CreateParameter("@parentID", parentId), SqlHelper.CreateParameter("@nodeObjectType", objectType), SqlHelper.CreateParameter("@nodeUser", userId), SqlHelper.CreateParameter("@level", level++), SqlHelper.CreateParameter("@path", path), SqlHelper.CreateParameter("@sortOrder", sortOrder), SqlHelper.CreateParameter("@uniqueID", uniqueID), SqlHelper.CreateParameter("@text", text), SqlHelper.CreateParameter("@createDate", DateTime.Now)); CMSNode retVal = new CMSNode(uniqueID); retVal.Path = path + "," + retVal.Id.ToString(); // NH 4.7.1 duplicate permissions because of refactor if (parent != null) { IEnumerable<Permission> permissions = Permission.GetNodePermissions(parent); foreach (Permission p in permissions) { Permission.MakeNew(User.GetUser(p.UserId), retVal, p.PermissionId); } } //event NewEventArgs e = new NewEventArgs(); retVal.FireAfterNew(e); return retVal; } private static int GetNewDocumentSortOrder(int parentId) { var sortOrder = 0; using (IRecordsReader dr = SqlHelper.ExecuteReader( "SELECT MAX(sortOrder) AS sortOrder FROM umbracoNode WHERE parentID = @parentID AND nodeObjectType = @GuidForNodesOfTypeDocument", SqlHelper.CreateParameter("@parentID", parentId), SqlHelper.CreateParameter("@GuidForNodesOfTypeDocument", Document._objectType) )) { while (dr.Read()) sortOrder = dr.GetInt("sortOrder") + 1; } return sortOrder; } /// <summary> /// Retrieve a list of the id's of all CMSNodes given the objecttype and the first letter of the name. /// </summary> /// <param name="objectType">The objecttype identifier</param> /// <param name="letter">Firstletter</param> /// <returns> /// A list of all CMSNodes which has the objecttype and a name that starts with the given letter /// </returns> protected static int[] getUniquesFromObjectTypeAndFirstLetter(Guid objectType, char letter) { using (IRecordsReader dr = SqlHelper.ExecuteReader("Select id from umbracoNode where nodeObjectType = @objectType AND text like @letter", SqlHelper.CreateParameter("@objectType", objectType), SqlHelper.CreateParameter("@letter", letter.ToString() + "%"))) { List<int> tmp = new List<int>(); while (dr.Read()) tmp.Add(dr.GetInt("id")); return tmp.ToArray(); } } /// <summary> /// Gets the SQL helper. /// </summary> /// <value>The SQL helper.</value> [Obsolete("Obsolete, For querying the database use the new UmbracoDatabase object ApplicationContext.Current.DatabaseContext.Database", false)] protected static ISqlHelper SqlHelper { get { return Application.SqlHelper; } } internal static UmbracoDatabase Database { get { return ApplicationContext.Current.DatabaseContext.Database; } } #endregion #region Constructors /// <summary> /// Empty constructor that is not suported /// ...why is it here? /// </summary> public CMSNode() { throw new NotSupportedException(); } /// <summary> /// Initializes a new instance of the <see cref="CMSNode"/> class. /// </summary> /// <param name="Id">The id.</param> public CMSNode(int Id) { _id = Id; setupNode(); } /// <summary> /// This is purely for a hackity hack hack hack in order to make the new Document(id, version) constructor work because /// the Version property needs to be set on the object before setupNode is called, otherwise it never works! this allows /// inheritors to set default data before setupNode() is called. /// </summary> /// <param name="id"></param> /// <param name="ctorArgs"></param> internal CMSNode(int id, object[] ctorArgs) { _id = id; PreSetupNode(ctorArgs); } /// <summary> /// Initializes a new instance of the <see cref="CMSNode"/> class. /// </summary> /// <param name="id">The id.</param> /// <param name="noSetup">if set to <c>true</c> [no setup].</param> public CMSNode(int id, bool noSetup) { _id = id; if (!noSetup) setupNode(); } /// <summary> /// Initializes a new instance of the <see cref="CMSNode"/> class. /// </summary> /// <param name="uniqueID">The unique ID.</param> public CMSNode(Guid uniqueID) { _id = SqlHelper.ExecuteScalar<int>("SELECT id FROM umbracoNode WHERE uniqueID = @uniqueId", SqlHelper.CreateParameter("@uniqueId", uniqueID)); setupNode(); } public CMSNode(Guid uniqueID, bool noSetup) { _id = SqlHelper.ExecuteScalar<int>("SELECT id FROM umbracoNode WHERE uniqueID = @uniqueId", SqlHelper.CreateParameter("@uniqueId", uniqueID)); if (!noSetup) setupNode(); } protected internal CMSNode(IRecordsReader reader) { _id = reader.GetInt("id"); PopulateCMSNodeFromReader(reader); } protected internal CMSNode(IUmbracoEntity entity) { _id = entity.Id; Entity = entity; } protected internal CMSNode(IEntity entity) { _id = entity.Id; } #endregion #region Public Methods /// <summary> /// Ensures uniqueness by id /// </summary> /// <param name="obj"></param> /// <returns></returns> public override bool Equals(object obj) { var l = obj as CMSNode; if (l != null) { return this._id.Equals(l._id); } return false; } /// <summary> /// Ensures uniqueness by id /// </summary> /// <returns></returns> public override int GetHashCode() { return _id.GetHashCode(); } /// <summary> /// An xml representation of the CMSNOde /// </summary> /// <param name="xd">Xmldocument context</param> /// <param name="Deep">If true the xml will append the CMSNodes child xml</param> /// <returns>The CMSNode Xmlrepresentation</returns> public virtual XmlNode ToXml(XmlDocument xd, bool Deep) { XmlNode x = xd.CreateNode(XmlNodeType.Element, "node", ""); XmlPopulate(xd, x, Deep); return x; } public virtual XmlNode ToPreviewXml(XmlDocument xd) { // If xml already exists if (!PreviewExists(UniqueId)) { SavePreviewXml(ToXml(xd, false), UniqueId); } return GetPreviewXml(xd, UniqueId); } public virtual List<CMSPreviewNode> GetNodesForPreview(bool childrenOnly) { List<CMSPreviewNode> nodes = new List<CMSPreviewNode>(); string sql = @" select umbracoNode.id, umbracoNode.parentId, umbracoNode.level, umbracoNode.sortOrder, cmsPreviewXml.xml from umbracoNode inner join cmsPreviewXml on cmsPreviewXml.nodeId = umbracoNode.id where trashed = 0 and path like '{0}' order by level,sortOrder"; string pathExp = childrenOnly ? Path + ",%" : Path; IRecordsReader dr = SqlHelper.ExecuteReader(String.Format(sql, pathExp)); while (dr.Read()) nodes.Add(new CMSPreviewNode(dr.GetInt("id"), dr.GetGuid("uniqueID"), dr.GetInt("parentId"), dr.GetShort("level"), dr.GetInt("sortOrder"), dr.GetString("xml"), false)); dr.Close(); return nodes; } /// <summary> /// Used to persist object changes to the database. In Version3.0 it's just a stub for future compatibility /// </summary> public virtual void Save() { SaveEventArgs e = new SaveEventArgs(); this.FireBeforeSave(e); if (!e.Cancel) { //In the future there will be SQL stuff happening here... this.FireAfterSave(e); } } public override string ToString() { if (Id != int.MinValue || !string.IsNullOrEmpty(Text)) { return string.Format("{{ Id: {0}, Text: {1}, ParentId: {2} }}", Id, Text, _parentid ); } return base.ToString(); } private void Move(CMSNode newParent) { MoveEventArgs e = new MoveEventArgs(); FireBeforeMove(e); if (!e.Cancel) { //first we need to establish if the node already exists under the newParent node //var isNewParentInPath = (Path.Contains("," + newParent.Id + ",")); //if it's the same newParent, we can save some SQL calls since we know these wont change. //level and path might change even if it's the same newParent because the newParent could be moving somewhere. if (ParentId != newParent.Id) { int maxSortOrder = SqlHelper.ExecuteScalar<int>("select coalesce(max(sortOrder),0) from umbracoNode where parentid = @parentId", SqlHelper.CreateParameter("@parentId", newParent.Id)); this.Parent = newParent; this.sortOrder = maxSortOrder + 1; } //detect if we have moved, then update the level and path // issue: http://issues.umbraco.org/issue/U4-1579 if (this.Path != newParent.Path + "," + this.Id.ToString()) { this.Level = newParent.Level + 1; this.Path = newParent.Path + "," + this.Id.ToString(); } //this code block should not be here but since the class structure is very poor and doesn't use //overrides (instead using shadows/new) for the Children property, when iterating over the children //and calling Move(), the super classes overridden OnMove or Move methods never get fired, so //we now need to hard code this here :( if (Path.Contains("," + ((int)RecycleBin.RecycleBinType.Content).ToString() + ",") || Path.Contains("," + ((int)RecycleBin.RecycleBinType.Media).ToString() + ",")) { //if we've moved this to the recyle bin, we need to update the trashed property if (!IsTrashed) IsTrashed = true; //don't update if it's not necessary } else { if (IsTrashed) IsTrashed = false; //don't update if it's not necessary } //make sure the node type is a document/media, if it is a recycle bin then this will not be equal if (!IsTrashed && newParent.nodeObjectType == Document._objectType) { // regenerate the xml of the current document var movedDocument = new Document(this.Id); movedDocument.XmlGenerate(new XmlDocument()); //regenerate the xml for the newParent node var parentDocument = new Document(newParent.Id); parentDocument.XmlGenerate(new XmlDocument()); } else if (!IsTrashed && newParent.nodeObjectType == Media._objectType) { //regenerate the xml for the newParent node var m = new Media(newParent.Id); m.XmlGenerate(new XmlDocument()); } var children = this.Children; foreach (CMSNode c in children) { c.Move(this); } FireAfterMove(e); } } /// <summary> /// Moves the CMSNode from the current position in the hierarchy to the target /// </summary> /// <param name="NewParentId">Target CMSNode id</param> [Obsolete("Obsolete, Use Umbraco.Core.Services.ContentService.Move() or Umbraco.Core.Services.MediaService.Move()", false)] public virtual void Move(int newParentId) { CMSNode parent = new CMSNode(newParentId); Move(parent); } /// <summary> /// Deletes this instance. /// </summary> public virtual void delete() { DeleteEventArgs e = new DeleteEventArgs(); FireBeforeDelete(e); if (!e.Cancel) { // remove relations var rels = Relations; foreach (relation.Relation rel in rels) { rel.Delete(); } //removes tasks foreach (Task t in Tasks) { t.Delete(); } //remove notifications Notification.DeleteNotifications(this); //remove permissions Permission.DeletePermissions(this); //removes tag associations (i know the key is set to cascade but do it anyways) Tag.RemoveTagsFromNode(this.Id); SqlHelper.ExecuteNonQuery("DELETE FROM umbracoNode WHERE uniqueID= @uniqueId", SqlHelper.CreateParameter("@uniqueId", _uniqueID)); FireAfterDelete(e); } } /// <summary> /// Does the current CMSNode have any child nodes. /// </summary> /// <value> /// <c>true</c> if this instance has children; otherwise, <c>false</c>. /// </value> public virtual bool HasChildren { get { if (!_hasChildrenInitialized) { int tmpChildrenCount = SqlHelper.ExecuteScalar<int>("select count(id) from umbracoNode where ParentId = @id", SqlHelper.CreateParameter("@id", Id)); HasChildren = (tmpChildrenCount > 0); } return _hasChildren; } set { _hasChildrenInitialized = true; _hasChildren = value; } } /// <summary> /// Returns all descendant nodes from this node. /// </summary> /// <returns></returns> /// <remarks> /// This doesn't return a strongly typed IEnumerable object so that we can override in in super clases /// and since this class isn't a generic (thought it should be) this is not strongly typed. /// </remarks> public virtual IEnumerable GetDescendants() { var descendants = new List<CMSNode>(); using (IRecordsReader dr = SqlHelper.ExecuteReader(string.Format(SqlDescendants, Id))) { while (dr.Read()) { var node = new CMSNode(dr.GetInt("id"), true); node.PopulateCMSNodeFromReader(dr); descendants.Add(node); } } return descendants; } #endregion #region Public properties /// <summary> /// Determines if the node is in the recycle bin. /// This is only relavent for node types that support a recyle bin (such as Document/Media) /// </summary> public virtual bool IsTrashed { get { if (!_isTrashed.HasValue) { _isTrashed = Convert.ToBoolean(SqlHelper.ExecuteScalar<object>("SELECT trashed FROM umbracoNode where id=@id", SqlHelper.CreateParameter("@id", this.Id))); } return _isTrashed.Value; } set { _isTrashed = value; SqlHelper.ExecuteNonQuery("update umbracoNode set trashed = @trashed where id = @id", SqlHelper.CreateParameter("@trashed", value), SqlHelper.CreateParameter("@id", this.Id)); } } /// <summary> /// Gets or sets the sort order. /// </summary> /// <value>The sort order.</value> public virtual int sortOrder { get { return _sortOrder; } set { _sortOrder = value; SqlHelper.ExecuteNonQuery("update umbracoNode set sortOrder = '" + value + "' where id = " + this.Id.ToString()); if (Entity != null) Entity.SortOrder = value; } } /// <summary> /// Gets or sets the create date time. /// </summary> /// <value>The create date time.</value> public virtual DateTime CreateDateTime { get { return _createDate; } set { _createDate = value; SqlHelper.ExecuteNonQuery("update umbracoNode set createDate = @createDate where id = " + this.Id.ToString(), SqlHelper.CreateParameter("@createDate", _createDate)); } } /// <summary> /// Gets the creator /// </summary> /// <value>The user.</value> public BusinessLogic.User User { get { return BusinessLogic.User.GetUser(_userId); } } /// <summary> /// Gets the id. /// </summary> /// <value>The id.</value> public int Id { get { return _id; } } /// <summary> /// Get the newParent id of the node /// </summary> public virtual int ParentId { get { return _parentid; } internal set { _parentid = value; } } /// <summary> /// Given the hierarchical tree structure a CMSNode has only one newParent but can have many children /// </summary> /// <value>The newParent.</value> public CMSNode Parent { get { if (Level == 1) throw new ArgumentException("No newParent node"); return new CMSNode(_parentid); } set { _parentid = value.Id; SqlHelper.ExecuteNonQuery("update umbracoNode set parentId = " + value.Id.ToString() + " where id = " + this.Id.ToString()); if (Entity != null) Entity.ParentId = value.Id; } } /// <summary> /// An comma separated string consisting of integer node id's /// that indicates the path from the topmost node to the given node /// </summary> /// <value>The path.</value> public virtual string Path { get { return _path; } set { _path = value; SqlHelper.ExecuteNonQuery("update umbracoNode set path = '" + _path + "' where id = " + this.Id.ToString()); if (Entity != null) Entity.Path = value; } } /// <summary> /// Returns an integer value that indicates in which level of the /// tree structure the given node is /// </summary> /// <value>The level.</value> public virtual int Level { get { return _level; } set { _level = value; SqlHelper.ExecuteNonQuery("update umbracoNode set level = " + _level.ToString() + " where id = " + this.Id.ToString()); if (Entity != null) Entity.Level = value; } } /// <summary> /// All CMSNodes has an objecttype ie. Webpage, StyleSheet etc., used to distinguish between the different /// object types for for fast loading children to the tree. /// </summary> /// <value>The type of the node object.</value> public Guid nodeObjectType { get { return _nodeObjectType; } } /// <summary> /// Besides the hierarchy it's possible to relate one CMSNode to another, use this for alternative /// non-strict hierarchy /// </summary> /// <value>The relations.</value> public relation.Relation[] Relations { get { return relation.Relation.GetRelations(this.Id); } } /// <summary> /// Returns all tasks associated with this node /// </summary> public Tasks Tasks { get { return Task.GetTasks(this.Id); } } public virtual int ChildCount { get { return SqlHelper.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoNode where ParentID = @parentId", SqlHelper.CreateParameter("@parentId", this.Id)); } } /// <summary> /// The basic recursive tree pattern /// </summary> /// <value>The children.</value> public virtual BusinessLogic.console.IconI[] Children { get { System.Collections.ArrayList tmp = new System.Collections.ArrayList(); using (IRecordsReader dr = SqlHelper.ExecuteReader("SELECT id, createDate, trashed, parentId, nodeObjectType, nodeUser, level, path, sortOrder, uniqueID, text FROM umbracoNode WHERE ParentID = @ParentID AND nodeObjectType = @type order by sortOrder", SqlHelper.CreateParameter("@type", this.nodeObjectType), SqlHelper.CreateParameter("ParentID", this.Id))) { while (dr.Read()) { tmp.Add(new CMSNode(dr)); } } CMSNode[] retval = new CMSNode[tmp.Count]; for (int i = 0; i < tmp.Count; i++) { retval[i] = (CMSNode)tmp[i]; } return retval; } } /// <summary> /// Retrieve all CMSNodes in the umbraco installation /// Use with care. /// </summary> /// <value>The children of all object types.</value> public BusinessLogic.console.IconI[] ChildrenOfAllObjectTypes { get { System.Collections.ArrayList tmp = new System.Collections.ArrayList(); IRecordsReader dr = SqlHelper.ExecuteReader("select id from umbracoNode where ParentID = " + this.Id + " order by sortOrder"); while (dr.Read()) tmp.Add(dr.GetInt("Id")); dr.Close(); CMSNode[] retval = new CMSNode[tmp.Count]; for (int i = 0; i < tmp.Count; i++) retval[i] = new CMSNode((int)tmp[i]); return retval; } } #region IconI members // Unique identifier of the given node /// <summary> /// Unique identifier of the CMSNode, used when locating data. /// </summary> public Guid UniqueId { get { return _uniqueID; } } /// <summary> /// Human readable name/label /// </summary> public virtual string Text { get { return _text; } set { _text = value; SqlHelper.ExecuteNonQuery("UPDATE umbracoNode SET text = @text WHERE id = @id", SqlHelper.CreateParameter("@text", value.Trim()), SqlHelper.CreateParameter("@id", this.Id)); if (Entity != null) Entity.Name = value; } } /// <summary> /// The menu items used in the tree view /// </summary> [Obsolete("this is not used anywhere")] public virtual BusinessLogic.console.MenuItemI[] MenuItems { get { return new BusinessLogic.console.MenuItemI[0]; } } /// <summary> /// Not implemented, always returns "about:blank" /// </summary> public virtual string DefaultEditorURL { get { return "about:blank"; } } /// <summary> /// The icon in the tree /// </summary> public virtual string Image { get { return m_image; } set { m_image = value; } } /// <summary> /// The "open/active" icon in the tree /// </summary> public virtual string OpenImage { get { return ""; } } #endregion #endregion #region Protected methods /// <summary> /// This allows inheritors to set the underlying text property without persisting the change to the database. /// </summary> /// <param name="txt"></param> protected void SetText(string txt) { _text = txt; if (Entity != null) Entity.Name = txt; } /// <summary> /// This is purely for a hackity hack hack hack in order to make the new Document(id, version) constructor work because /// the Version property needs to be set on the object before setupNode is called, otherwise it never works! /// </summary> /// <param name="ctorArgs"></param> internal virtual void PreSetupNode(params object[] ctorArgs) { //if people want to override then awesome but then we call setupNode so they need to ensure // to call base.PreSetupNode setupNode(); } /// <summary> /// Sets up the internal data of the CMSNode, used by the various constructors /// </summary> protected virtual void setupNode() { using (IRecordsReader dr = SqlHelper.ExecuteReader(SqlSingle, SqlHelper.CreateParameter("@id", this.Id))) { if (dr.Read()) { PopulateCMSNodeFromReader(dr); } else { throw new ArgumentException(string.Format("No node exists with id '{0}'", Id)); } } } /// <summary> /// Sets up the node for the content tree, this makes no database calls, just sets the underlying properties /// </summary> /// <param name="uniqueID">The unique ID.</param> /// <param name="nodeObjectType">Type of the node object.</param> /// <param name="Level">The level.</param> /// <param name="ParentId">The newParent id.</param> /// <param name="UserId">The user id.</param> /// <param name="Path">The path.</param> /// <param name="Text">The text.</param> /// <param name="CreateDate">The create date.</param> /// <param name="hasChildren">if set to <c>true</c> [has children].</param> protected void SetupNodeForTree(Guid uniqueID, Guid nodeObjectType, int leve, int parentId, int userId, string path, string text, DateTime createDate, bool hasChildren) { _uniqueID = uniqueID; _nodeObjectType = nodeObjectType; _level = leve; _parentid = parentId; _userId = userId; _path = path; _text = text; _createDate = createDate; HasChildren = hasChildren; } /// <summary> /// Updates the temp path for the content tree. /// </summary> /// <param name="Path">The path.</param> protected void UpdateTempPathForTree(string Path) { this._path = Path; } protected virtual XmlNode GetPreviewXml(XmlDocument xd, Guid version) { XmlDocument xmlDoc = new XmlDocument(); using (XmlReader xmlRdr = SqlHelper.ExecuteXmlReader( "select xml from cmsPreviewXml where nodeID = @nodeId and versionId = @versionId", SqlHelper.CreateParameter("@nodeId", Id), SqlHelper.CreateParameter("@versionId", version))) { xmlDoc.Load(xmlRdr); } return xd.ImportNode(xmlDoc.FirstChild, true); } protected internal virtual bool PreviewExists(Guid versionId) { return (SqlHelper.ExecuteScalar<int>("SELECT COUNT(nodeId) FROM cmsPreviewXml WHERE nodeId=@nodeId and versionId = @versionId", SqlHelper.CreateParameter("@nodeId", Id), SqlHelper.CreateParameter("@versionId", versionId)) != 0); } /// <summary> /// This needs to be synchronized since we are doing multiple sql operations in one method /// </summary> /// <param name="x"></param> /// <param name="versionId"></param> [MethodImpl(MethodImplOptions.Synchronized)] protected void SavePreviewXml(XmlNode x, Guid versionId) { string sql = PreviewExists(versionId) ? "UPDATE cmsPreviewXml SET xml = @xml, timestamp = @timestamp WHERE nodeId=@nodeId AND versionId = @versionId" : "INSERT INTO cmsPreviewXml(nodeId, versionId, timestamp, xml) VALUES (@nodeId, @versionId, @timestamp, @xml)"; SqlHelper.ExecuteNonQuery(sql, SqlHelper.CreateParameter("@nodeId", Id), SqlHelper.CreateParameter("@versionId", versionId), SqlHelper.CreateParameter("@timestamp", DateTime.Now), SqlHelper.CreateParameter("@xml", x.OuterXml)); } protected void PopulateCMSNodeFromReader(IRecordsReader dr) { // testing purposes only > original umbraco data hasn't any unique values ;) // And we need to have a newParent in order to create a new node .. // Should automatically add an unique value if no exists (or throw a decent exception) if (dr.IsNull("uniqueID")) _uniqueID = Guid.NewGuid(); else _uniqueID = dr.GetGuid("uniqueID"); _nodeObjectType = dr.GetGuid("nodeObjectType"); _level = dr.GetShort("level"); _path = dr.GetString("path"); _parentid = dr.GetInt("parentId"); _text = dr.GetString("text"); _sortOrder = dr.GetInt("sortOrder"); _userId = dr.GetInt("nodeUser"); _createDate = dr.GetDateTime("createDate"); _isTrashed = dr.GetBoolean("trashed"); } internal protected void PopulateCMSNodeFromUmbracoEntity(IUmbracoEntity content, Guid objectType) { _uniqueID = content.Key; _nodeObjectType = objectType; _level = content.Level; _path = content.Path; _parentid = content.ParentId; _text = content.Name; _sortOrder = content.SortOrder; _userId = content.CreatorId; _createDate = content.CreateDate; _isTrashed = content.Trashed; Entity = content; } internal protected void PopulateCMSNodeFromUmbracoEntity(IAggregateRoot content, Guid objectType) { _uniqueID = content.Key; _nodeObjectType = objectType; _createDate = content.CreateDate; } #endregion #region Private Methods private void XmlPopulate(XmlDocument xd, XmlNode x, bool Deep) { // attributes x.Attributes.Append(xmlHelper.addAttribute(xd, "id", this.Id.ToString())); x.Attributes.Append(xmlHelper.addAttribute(xd, "key", this.UniqueId.ToString())); if (this.Level > 1) x.Attributes.Append(xmlHelper.addAttribute(xd, "parentID", this.Parent.Id.ToString())); else x.Attributes.Append(xmlHelper.addAttribute(xd, "parentID", "-1")); x.Attributes.Append(xmlHelper.addAttribute(xd, "level", this.Level.ToString())); x.Attributes.Append(xmlHelper.addAttribute(xd, "writerID", this.User.Id.ToString())); x.Attributes.Append(xmlHelper.addAttribute(xd, "sortOrder", this.sortOrder.ToString())); x.Attributes.Append(xmlHelper.addAttribute(xd, "createDate", this.CreateDateTime.ToString("s"))); x.Attributes.Append(xmlHelper.addAttribute(xd, "nodeName", this.Text)); x.Attributes.Append(xmlHelper.addAttribute(xd, "path", this.Path)); if (Deep) { //store children array here because iterating over an Array property object is very inneficient. var children = this.Children; foreach (Content c in children) x.AppendChild(c.ToXml(xd, true)); } } #endregion #region Events /// <summary> /// Calls the subscribers of a cancelable event handler, /// stopping at the event handler which cancels the event (if any). /// </summary> /// <typeparam name="T">Type of the event arguments.</typeparam> /// <param name="cancelableEvent">The event to fire.</param> /// <param name="sender">Sender of the event.</param> /// <param name="eventArgs">Event arguments.</param> protected virtual void FireCancelableEvent<T>(EventHandler<T> cancelableEvent, object sender, T eventArgs) where T : CancelEventArgs { if (cancelableEvent != null) { foreach (Delegate invocation in cancelableEvent.GetInvocationList()) { invocation.DynamicInvoke(sender, eventArgs); if (eventArgs.Cancel) break; } } } /// <summary> /// Occurs before a node is saved. /// </summary> public static event EventHandler<SaveEventArgs> BeforeSave; /// <summary> /// Raises the <see cref="E:BeforeSave"/> event. /// </summary> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> protected virtual void FireBeforeSave(SaveEventArgs e) { FireCancelableEvent(BeforeSave, this, e); } /// <summary> /// Occurs after a node is saved. /// </summary> public static event EventHandler<SaveEventArgs> AfterSave; /// <summary> /// Raises the <see cref="E:AfterSave"/> event. /// </summary> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> protected virtual void FireAfterSave(SaveEventArgs e) { if (AfterSave != null) AfterSave(this, e); } /// <summary> /// Occurs after a new node is created. /// </summary> public static event EventHandler<NewEventArgs> AfterNew; /// <summary> /// Raises the <see cref="E:AfterNew"/> event. /// </summary> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> protected virtual void FireAfterNew(NewEventArgs e) { if (AfterNew != null) AfterNew(this, e); } /// <summary> /// Occurs before a node is deleted. /// </summary> public static event EventHandler<DeleteEventArgs> BeforeDelete; /// <summary> /// Raises the <see cref="E:BeforeDelete"/> event. /// </summary> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> protected virtual void FireBeforeDelete(DeleteEventArgs e) { FireCancelableEvent(BeforeDelete, this, e); } /// <summary> /// Occurs after a node is deleted. /// </summary> public static event EventHandler<DeleteEventArgs> AfterDelete; /// <summary> /// Raises the <see cref="E:AfterDelete"/> event. /// </summary> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> protected virtual void FireAfterDelete(DeleteEventArgs e) { if (AfterDelete != null) AfterDelete(this, e); } /// <summary> /// Occurs before a node is moved. /// </summary> public static event EventHandler<MoveEventArgs> BeforeMove; /// <summary> /// Raises the <see cref="E:BeforeMove"/> event. /// </summary> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> protected virtual void FireBeforeMove(MoveEventArgs e) { FireCancelableEvent(BeforeMove, this, e); } /// <summary> /// Occurs after a node is moved. /// </summary> public static event EventHandler<MoveEventArgs> AfterMove; /// <summary> /// Raises the <see cref="E:AfterMove"/> event. /// </summary> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> protected virtual void FireAfterMove(MoveEventArgs e) { if (AfterMove != null) AfterMove(this, e); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IO; using System.Linq; using System.Globalization; using System.Collections; using System.Collections.Generic; using System.Security.Cryptography; using System.Runtime.InteropServices; using System.Text; using System.Security.Cryptography.Pkcs; using System.Security.Cryptography.Xml; using System.Security.Cryptography.X509Certificates; using Xunit; using Test.Cryptography; namespace System.Security.Cryptography.Pkcs.Tests { public static class CryptographicAttributeObjectCollectionTests { [Fact] public static void Nullary() { CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(); AssertEquals(c, Array.Empty<CryptographicAttributeObject>()); } [Fact] public static void Oneary() { CryptographicAttributeObject a0 = s_ca0; CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(a0); AssertEquals(c, new CryptographicAttributeObject[] { a0 }); } [Fact] public static void Add() { CryptographicAttributeObject a0 = s_ca0; CryptographicAttributeObject a1 = s_ca1; CryptographicAttributeObject a2 = s_ca2; CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(); int index; index = c.Add(a0); Assert.Equal(0, index); index = c.Add(a1); Assert.Equal(1, index); index = c.Add(a2); Assert.Equal(2, index); AssertEquals(c, new CryptographicAttributeObject[] { a0, a1, a2 }); } [Fact] public static void AddFold() { AsnEncodedData dd1 = new Pkcs9DocumentDescription("My Description 1"); AsnEncodedData dd2 = new Pkcs9DocumentDescription("My Description 2"); CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(); int index; index = c.Add(dd1); Assert.Equal(0, index); index = c.Add(dd2); Assert.Equal(0, index); AsnEncodedDataCollection expectedValues = new AsnEncodedDataCollection(); expectedValues.Add(dd1); expectedValues.Add(dd2); CryptographicAttributeObject expected = new CryptographicAttributeObject(dd1.Oid, expectedValues); AssertEquals(c, new CryptographicAttributeObject[] { expected }); } [Fact] public static void Remove() { CryptographicAttributeObject a0 = s_ca0; CryptographicAttributeObject a1 = s_ca1; CryptographicAttributeObject a2 = s_ca2; CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(); int index; index = c.Add(a0); Assert.Equal(0, index); index = c.Add(a1); Assert.Equal(1, index); index = c.Add(a2); Assert.Equal(2, index); c.Remove(a1); AssertEquals(c, new CryptographicAttributeObject[] { a0, a2 }); } [Fact] public static void AddNegative() { CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(); Assert.Throws<ArgumentNullException>(() => c.Add((CryptographicAttributeObject)null)); Assert.Throws<ArgumentNullException>(() => c.Add((AsnEncodedData)null)); } [Fact] public static void RemoveNegative() { CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(); Assert.Throws<ArgumentNullException>(() => c.Remove(null)); } [Fact] public static void RemoveNonExistent() { CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(); CryptographicAttributeObject a0 = s_ca0; c.Remove(a0); // You can "remove" items that aren't in the collection - this is defined as a NOP. } [Fact] public static void IndexOutOfBounds() { CryptographicAttributeObject a0 = s_ca0; CryptographicAttributeObject a1 = s_ca1; CryptographicAttributeObject a2 = s_ca2; CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(); c.Add(a0); c.Add(a1); c.Add(a2); object ignore = null; Assert.Throws<ArgumentOutOfRangeException>(() => ignore = c[-1]); Assert.Throws<ArgumentOutOfRangeException>(() => ignore = c[3]); } [Fact] public static void CopyExceptions() { CryptographicAttributeObject a0 = s_ca0; CryptographicAttributeObject a1 = s_ca1; CryptographicAttributeObject a2 = s_ca2; CryptographicAttributeObjectCollection c = new CryptographicAttributeObjectCollection(); c.Add(a0); c.Add(a1); c.Add(a2); CryptographicAttributeObject[] a = new CryptographicAttributeObject[3]; Assert.Throws<ArgumentNullException>(() => c.CopyTo(null, 0)); Assert.Throws<ArgumentOutOfRangeException>(() => c.CopyTo(a, -1)); Assert.Throws<ArgumentOutOfRangeException>(() => c.CopyTo(a, 3)); AssertExtensions.Throws<ArgumentException>(null, () => c.CopyTo(a, 1)); ICollection ic = c; Assert.Throws<ArgumentNullException>(() => ic.CopyTo(null, 0)); Assert.Throws<ArgumentOutOfRangeException>(() => ic.CopyTo(a, -1)); Assert.Throws<ArgumentOutOfRangeException>(() => ic.CopyTo(a, 3)); AssertExtensions.Throws<ArgumentException>(null, () => ic.CopyTo(a, 1)); AssertExtensions.Throws<ArgumentException>(null, () => ic.CopyTo(new CryptographicAttributeObject[2, 2], 0)); Assert.Throws<InvalidCastException>(() => ic.CopyTo(new int[10], 0)); if (PlatformDetection.IsNonZeroLowerBoundArraySupported) { // Array has non-zero lower bound Array array = Array.CreateInstance(typeof(object), new int[] { 10 }, new int[] { 10 }); Assert.Throws<IndexOutOfRangeException>(() => ic.CopyTo(array, 0)); } } private static void AssertEquals(CryptographicAttributeObjectCollection c, IList<CryptographicAttributeObject> expected) { Assert.Equal(expected.Count, c.Count); for (int i = 0; i < c.Count; i++) { Assert.Equal(expected[i], c[i], s_CryptographicAttributeObjectComparer); } int index = 0; foreach (CryptographicAttributeObject a in c) { Assert.Equal(expected[index++], a, s_CryptographicAttributeObjectComparer); } Assert.Equal(c.Count, index); ValidateEnumerator(c.GetEnumerator(), expected); ValidateEnumerator(((ICollection)c).GetEnumerator(), expected); { CryptographicAttributeObject[] dumped = new CryptographicAttributeObject[c.Count + 3]; c.CopyTo(dumped, 2); Assert.Null(dumped[0]); Assert.Null(dumped[1]); Assert.Null(dumped[dumped.Length - 1]); for (int i = 0; i < expected.Count; i++) { Assert.Equal(expected[i], dumped[i + 2], s_CryptographicAttributeObjectComparer); } } { CryptographicAttributeObject[] dumped = new CryptographicAttributeObject[c.Count + 3]; ((ICollection)c).CopyTo(dumped, 2); Assert.Null(dumped[0]); Assert.Null(dumped[1]); Assert.Null(dumped[dumped.Length - 1]); for (int i = 0; i < expected.Count; i++) { Assert.Equal(expected[i], dumped[i + 2], s_CryptographicAttributeObjectComparer); } } } private static void ValidateEnumerator(IEnumerator enumerator, IList<CryptographicAttributeObject> expected) { foreach (CryptographicAttributeObject e in expected) { Assert.True(enumerator.MoveNext()); CryptographicAttributeObject actual = (CryptographicAttributeObject)(enumerator.Current); Assert.Equal(e, actual, s_CryptographicAttributeObjectComparer); } Assert.False(enumerator.MoveNext()); } private sealed class CryptographicEqualityComparer : IEqualityComparer<CryptographicAttributeObject> { public bool Equals(CryptographicAttributeObject x, CryptographicAttributeObject y) { if (x.Oid.Value != y.Oid.Value) return false; AsnEncodedDataCollection xv = x.Values; AsnEncodedDataCollection yv = y.Values; if (xv.Count != yv.Count) return false; for (int i = 0; i < xv.Count; i++) { AsnEncodedData xa = xv[i]; AsnEncodedData ya = yv[i]; if (xa.Oid.Value != ya.Oid.Value) return false; if (!xa.RawData.SequenceEqual(ya.RawData)) return false; } return true; } public int GetHashCode(CryptographicAttributeObject obj) { return 1; } } private static readonly CryptographicAttributeObject s_ca0 = new CryptographicAttributeObject(new Oid(Oids.DocumentName), new AsnEncodedDataCollection(new Pkcs9DocumentName("My Name"))); private static readonly CryptographicAttributeObject s_ca1 = new CryptographicAttributeObject(new Oid(Oids.DocumentDescription), new AsnEncodedDataCollection(new Pkcs9DocumentDescription("My Description"))); private static readonly CryptographicAttributeObject s_ca2 = new CryptographicAttributeObject(new Oid(Oids.SigningTime), new AsnEncodedDataCollection(new Pkcs9SigningTime(new DateTime(2015, 4, 1, 12, 30, 20)))); private static readonly CryptographicEqualityComparer s_CryptographicAttributeObjectComparer = new CryptographicEqualityComparer(); } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Text; namespace AmmySidekick { public class ListenerParser { public event EventHandler<ListenerParserEventArgs> MessageReceived; private enum ParserState { Init, Header, NumberOfMessages, TargetIdLen, TargetId, MessageLen, Message, Checksum, PropertiesLength, PropertyList, Footer }; private readonly List<byte> _buffer = new List<byte>(); private ParserState _parserState = ParserState.Init; private int _targetIdLen; private string _targetId; private int _messageLen; private byte[] _message; private ushort _expectedChecksum; private byte _numberOfMessages; private List<Message> _messages; private ushort _propertiesLength; public void Feed(byte b) { switch (_parserState) { case ParserState.Init: if (b == 0xbe) { _messages = new List<Message>(); _parserState = ParserState.Header; //Debug.WriteLine("init-"); } break; case ParserState.Header: if (b == 0xef) { _parserState = ParserState.NumberOfMessages; //Debug.WriteLine("header-"); } break; case ParserState.NumberOfMessages: _numberOfMessages = b; _parserState = ParserState.TargetIdLen; //Debug.WriteLine("number(" + _numberOfMessages + ")-"); break; case ParserState.TargetIdLen: _buffer.Add(b); if (_buffer.Count == 4) { _targetIdLen = BitConverter.ToInt32(_buffer.ToArray(), 0); _buffer.Clear(); if (_targetIdLen <= 0 || _targetIdLen > 10000) _parserState = ParserState.Init; else _parserState = ParserState.TargetId; //Debug.WriteLine("targetidlen(" + _targetIdLen + ")-"); } break; case ParserState.TargetId: _buffer.Add(b); if (_buffer.Count == _targetIdLen) { _targetId = Encoding.Unicode.GetString(_buffer.ToArray(), 0, _buffer.Count); _buffer.Clear(); _parserState = ParserState.MessageLen; //Debug.WriteLine("targetid(" + _targetId + ")-"); } break; case ParserState.MessageLen: _buffer.Add(b); if (_buffer.Count == 4) { _messageLen = BitConverter.ToInt32(_buffer.Take(4).ToArray(), 0); _buffer.Clear(); _parserState = ParserState.Message; if (_messageLen <= 0 || _messageLen > 1024*1024) { _parserState = ParserState.Init; //Debug.WriteLine("invalid-messagelen(" + _messageLen + ")-"); } else { _parserState = ParserState.Message; //Debug.WriteLine("messagelen(" + _messageLen + ")-"); } } break; case ParserState.Message: _buffer.Add(b); if (_buffer.Count == _messageLen) { var bufferArray = _buffer.ToArray(); _message = bufferArray; _expectedChecksum = Fletcher16(bufferArray); _buffer.Clear(); _parserState = ParserState.Checksum; //Debug.WriteLine("message(" + _message + ")-"); } break; case ParserState.Checksum: _buffer.Add(b); if (_buffer.Count == 2) { var checksum = BitConverter.ToUInt16(_buffer.ToArray(), 0); _buffer.Clear(); if (_expectedChecksum == checksum) { _messages.Add(new Message { TargetId = _targetId, Buffer = _message }); _parserState = ParserState.PropertiesLength; //Debug.WriteLine("checksum-"); } else _parserState = ParserState.Init; } break; case ParserState.PropertiesLength: _buffer.Add(b); if (_buffer.Count == 2) { _propertiesLength = BitConverter.ToUInt16(_buffer.ToArray(), 0); _buffer.Clear(); if (_propertiesLength > 0) _parserState = ParserState.PropertyList; else if (_numberOfMessages > 1) { _numberOfMessages--; _parserState = ParserState.TargetIdLen; } else _parserState = ParserState.Footer; //Debug.WriteLine("proplen(" + _propertiesLength + ")-"); } break; case ParserState.PropertyList: _buffer.Add(b); if (_buffer.Count == _propertiesLength) { _messages.Last().PropertyList = Encoding.Unicode.GetString(_buffer.ToArray(), 0, _buffer.Count); _buffer.Clear(); if (--_numberOfMessages > 0) _parserState = ParserState.TargetIdLen; else _parserState = ParserState.Footer; //Debug.WriteLine("proplst-"); } break; case ParserState.Footer: if (b == 0xff) { var evt = MessageReceived; if (evt != null) evt(this, new ListenerParserEventArgs { Messages = _messages }); //Debug.WriteLine("footer"); } _parserState = ParserState.Init; break; default: throw new ArgumentOutOfRangeException(); } } public ushort Fletcher16(byte[] data) { ushort sum1 = 0; ushort sum2 = 0; for(var index = 0; index<data.Length; ++index ) { sum1 = (ushort) ((sum1 + data[index]) % 255); sum2 = (ushort) ((sum2 + sum1) % 255); } return (ushort) ((sum2 << 8) | sum1); } } public class Message { public string TargetId { get; set; } public byte[] Buffer { get; set; } public string PropertyList { get; set; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Tests.Client { using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Security.Authentication; using System.Text.RegularExpressions; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Cache; using Apache.Ignite.Core.Client; using Apache.Ignite.Core.Client.Cache; using Apache.Ignite.Core.Impl.Client; using Apache.Ignite.Core.Log; using Apache.Ignite.Core.Tests.Client.Cache; using NUnit.Framework; /// <summary> /// Base class for client tests. /// </summary> public class ClientTestBase { /** Cache name. */ protected const string CacheName = "cache"; /** */ protected const string RequestNamePrefixCache = "cache.ClientCache"; /** */ protected const string RequestNamePrefixStreamer = "streamer.ClientDataStreamer"; /** Grid count. */ private readonly int _gridCount = 1; /** SSL. */ private readonly bool _enableSsl; /** Partition Awareness */ private readonly bool _enablePartitionAwareness; /** Enable logging to a list logger for checks and assertions. */ private readonly bool _enableServerListLogging; /** Server list log levels. */ private readonly LogLevel[] _serverListLoggerLevels; /** */ protected readonly bool _useBinaryArray; /// <summary> /// Initializes a new instance of the <see cref="ClientTestBase"/> class. /// </summary> public ClientTestBase() { // No-op. } /// <summary> /// Initializes a new instance of the <see cref="ClientTestBase"/> class. /// </summary> public ClientTestBase( int gridCount, bool enableSsl = false, bool enablePartitionAwareness = false, bool enableServerListLogging = false, LogLevel[] serverListLoggerLevels = null, bool useBinaryArray = false) { _gridCount = gridCount; _enableSsl = enableSsl; _enablePartitionAwareness = enablePartitionAwareness; _enableServerListLogging = enableServerListLogging; _serverListLoggerLevels = serverListLoggerLevels ?? new[] { LogLevel.Debug, LogLevel.Warn, LogLevel.Error }; _useBinaryArray = useBinaryArray; } /// <summary> /// Fixture set up. /// </summary> [TestFixtureSetUp] public virtual void FixtureSetUp() { var cfg = GetIgniteConfiguration(); Ignition.Start(cfg); for (var i = 1; i < _gridCount; i++) { cfg = GetIgniteConfiguration(); cfg.IgniteInstanceName = i.ToString(); Ignition.Start(cfg); } Client = GetClient(); } /// <summary> /// Fixture tear down. /// </summary> [TestFixtureTearDown] public void FixtureTearDown() { Ignition.StopAll(true); Client.Dispose(); } /// <summary> /// Sets up the test. /// </summary> [SetUp] public virtual void TestSetUp() { var cache = GetCache<int>(); cache.RemoveAll(); cache.Clear(); Assert.AreEqual(0, cache.GetSize(CachePeekMode.All)); Assert.AreEqual(0, GetClientCache<int>().GetSize(CachePeekMode.All)); ClearLoggers(); } /// <summary> /// Gets the client. /// </summary> public IIgniteClient Client { get; set; } /// <summary> /// Gets Ignite. /// </summary> protected static IIgnite GetIgnite(int? idx = null) { if (idx == null) { return Ignition.GetAll().First(i => i.Name == null); } return Ignition.GetIgnite(idx.ToString()); } /// <summary> /// Gets the cache. /// </summary> protected static ICache<int, T> GetCache<T>() { return Ignition.GetAll().First().GetOrCreateCache<int, T>(CacheName); } /// <summary> /// Gets the client cache. /// </summary> protected ICacheClient<int, T> GetClientCache<T>() { return GetClientCache<int, T>(); } /// <summary> /// Gets the client cache. /// </summary> protected virtual ICacheClient<TK, TV> GetClientCache<TK, TV>(string cacheName = CacheName) { return Client.GetCache<TK, TV>(cacheName ?? CacheName); } /// <summary> /// Gets the client. /// </summary> protected IIgniteClient GetClient() { return Ignition.StartClient(GetClientConfiguration()); } /// <summary> /// Gets the client configuration. /// </summary> protected virtual IgniteClientConfiguration GetClientConfiguration() { var port = _enableSsl ? 11110 : IgniteClientConfiguration.DefaultPort; return new IgniteClientConfiguration { Endpoints = new List<string> {IPAddress.Loopback + ":" + port}, SocketTimeout = TimeSpan.FromSeconds(15), Logger = new ListLogger(new ConsoleLogger {MinLevel = LogLevel.Trace}), SslStreamFactory = _enableSsl ? new SslStreamFactory { CertificatePath = Path.Combine("Config", "Client", "thin-client-cert.pfx"), CertificatePassword = "123456", SkipServerCertificateValidation = true, CheckCertificateRevocation = true, SslProtocols = SslProtocols.Tls12 } : null, EnablePartitionAwareness = _enablePartitionAwareness }; } /// <summary> /// Gets the Ignite configuration. /// </summary> protected virtual IgniteConfiguration GetIgniteConfiguration() { return new IgniteConfiguration(TestUtils.GetTestConfiguration()) { Logger = _enableServerListLogging ? (ILogger) new ListLogger(new TestUtils.TestContextLogger()) { EnabledLevels = _serverListLoggerLevels } : new TestUtils.TestContextLogger(), SpringConfigUrl = _enableSsl ? Path.Combine("Config", "Client", "server-with-ssl.xml") : null, RedirectJavaConsoleOutput = false, LifecycleHandlers = _useBinaryArray ? new[] { new SetUseBinaryArray() } : null }; } /// <summary> /// Converts object to binary form. /// </summary> private IBinaryObject ToBinary(object o) { return Client.GetBinary().ToBinary<IBinaryObject>(o); } /// <summary> /// Gets the binary cache. /// </summary> protected ICacheClient<int, IBinaryObject> GetBinaryCache() { return Client.GetCache<int, Person>(CacheName).WithKeepBinary<int, IBinaryObject>(); } /// <summary> /// Gets the binary key cache. /// </summary> protected ICacheClient<IBinaryObject, int> GetBinaryKeyCache() { return Client.GetCache<Person, int>(CacheName).WithKeepBinary<IBinaryObject, int>(); } /// <summary> /// Gets the binary key-val cache. /// </summary> protected ICacheClient<IBinaryObject, IBinaryObject> GetBinaryKeyValCache() { return Client.GetCache<Person, Person>(CacheName).WithKeepBinary<IBinaryObject, IBinaryObject>(); } /// <summary> /// Gets the binary person. /// </summary> protected IBinaryObject GetBinaryPerson(int id) { return ToBinary(new Person(id) { DateTime = DateTime.MinValue.ToUniversalTime() }); } /// <summary> /// Gets the logs. /// </summary> protected static List<ListLogger.Entry> GetLogs(IIgniteClient client) { var igniteClient = (IgniteClient) client; var logger = igniteClient.GetConfiguration().Logger; var listLogger = (ListLogger) logger; return listLogger.Entries; } /// <summary> /// Gets client request names for a given server node. /// </summary> protected static IEnumerable<string> GetServerRequestNames(int serverIndex = 0, string prefix = null) { var instanceName = serverIndex == 0 ? null : serverIndex.ToString(); var grid = Ignition.GetIgnite(instanceName); var logger = (ListLogger) grid.Logger; return GetServerRequestNames(logger, prefix); } /// <summary> /// Gets client request names from a given logger. /// </summary> protected static IEnumerable<string> GetServerRequestNames(ListLogger logger, string prefix = null) { // Full request class name examples: // org.apache.ignite.internal.processors.platform.client.binary.ClientBinaryTypeGetRequest // org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetRequest var messageRegex = new Regex( @"Client request received \[reqId=\d+, addr=/127.0.0.1:\d+, " + @"req=org\.apache\.ignite\.internal\.processors\.platform\.client\..*?" + prefix + @"(\w+)Request@"); return logger.Entries .Select(m => messageRegex.Match(m.Message)) .Where(m => m.Success) .Select(m => m.Groups[1].Value); } /// <summary> /// Gets client request names from all server nodes. /// </summary> protected static IEnumerable<string> GetAllServerRequestNames(string prefix = null) { return GetLoggers().SelectMany(l => GetServerRequestNames(l, prefix)); } /// <summary> /// Gets loggers from all server nodes. /// </summary> protected static IEnumerable<ListLogger> GetLoggers() { return Ignition.GetAll() .OrderBy(i => i.Name) .Select(i => i.Logger) .OfType<ListLogger>(); } /// <summary> /// Clears loggers of all server nodes. /// </summary> protected static void ClearLoggers() { foreach (var logger in GetLoggers()) { logger.Clear(); } } /// <summary> /// Asserts the client configs are equal. /// </summary> public static void AssertClientConfigsAreEqual(CacheClientConfiguration cfg, CacheClientConfiguration cfg2) { if (cfg2.QueryEntities != null) { // Remove identical aliases which are added during config roundtrip. foreach (var e in cfg2.QueryEntities) { e.Aliases = e.Aliases.Where(x => x.Alias != x.FullName).ToArray(); } } HashSet<string> ignoredProps = null; if (cfg.ExpiryPolicyFactory != null && cfg2.ExpiryPolicyFactory != null) { ignoredProps = new HashSet<string> {"ExpiryPolicyFactory"}; AssertExtensions.ReflectionEqual(cfg.ExpiryPolicyFactory.CreateInstance(), cfg2.ExpiryPolicyFactory.CreateInstance()); } AssertExtensions.ReflectionEqual(cfg, cfg2, ignoredProperties : ignoredProps); } } }
/* ==================================================================== */ using System; using System.Drawing; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Windows.Forms; using System.Xml; namespace Oranikle.ReportDesigner { internal enum PropertyTypeEnum { Report, DataSets, ReportItems, Grouping, ChartLegend, CategoryAxis, ValueAxis, ChartTitle, CategoryAxisTitle, ValueAxisTitle, TableGroup, ValueAxis2Title// 20022008 AJM GJL } /// <summary> /// Summary description for PropertyDialog. /// </summary> internal class PropertyDialog : System.Windows.Forms.Form { private DesignXmlDraw _Draw; // design draw private List<XmlNode> _Nodes; // selected nodes private PropertyTypeEnum _Type; private bool _Changed=false; private bool _Delete=false; private XmlNode _TableColumn=null; // when table this is the current table column private XmlNode _TableRow=null; // when table this is the current table row private List<UserControl> _TabPanels = new List<UserControl>(); // list of IProperty controls private System.Windows.Forms.Panel panel1; private Oranikle.Studio.Controls.StyledButton bCancel; private Oranikle.Studio.Controls.StyledButton bOK; private Oranikle.Studio.Controls.StyledButton bApply; private Oranikle.Studio.Controls.CtrlStyledTab tcProps; private Oranikle.Studio.Controls.StyledButton bDelete; /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; internal PropertyDialog(DesignXmlDraw dxDraw, List<XmlNode> sNodes, PropertyTypeEnum type) : this(dxDraw, sNodes, type, null, null) {} internal PropertyDialog(DesignXmlDraw dxDraw, List<XmlNode> sNodes, PropertyTypeEnum type, XmlNode tcNode, XmlNode trNode) { this._Draw = dxDraw; this._Nodes = sNodes; this._Type = type; _TableColumn = tcNode; _TableRow = trNode; // // Required for Windows Form Designer support // InitializeComponent(); // Add the controls for the selected ReportItems switch (_Type) { case PropertyTypeEnum.Report: BuildReportTabs(); break; case PropertyTypeEnum.DataSets: BuildDataSetsTabs(); break; case PropertyTypeEnum.Grouping: BuildGroupingTabs(); break; case PropertyTypeEnum.ChartLegend: BuildChartLegendTabs(); break; case PropertyTypeEnum.CategoryAxis: case PropertyTypeEnum.ValueAxis: BuildChartAxisTabs(type); break; case PropertyTypeEnum.ChartTitle: case PropertyTypeEnum.CategoryAxisTitle: case PropertyTypeEnum.ValueAxisTitle: case PropertyTypeEnum.ValueAxis2Title:// 20022008 AJM GJL BuildTitle(type); break; case PropertyTypeEnum.ReportItems: default: BuildReportItemTabs(); break; } } internal bool Changed { get {return _Changed; } } internal bool Delete { get {return _Delete; } } private void BuildReportTabs() { this.Text = "Report Properties"; ReportCtl rc = new ReportCtl(_Draw); AddTab("Report", rc); ReportParameterCtl pc = new ReportParameterCtl(_Draw); AddTab("Parameters", pc); ReportXmlCtl xc = new ReportXmlCtl(_Draw); AddTab("XML Rendering", xc); BodyCtl bc = new BodyCtl(_Draw); AddTab("Body", bc); CodeCtl cc = new CodeCtl(_Draw); AddTab("Code", cc); ModulesClassesCtl mc = new ModulesClassesCtl(_Draw); AddTab("Modules/Classes", mc); return; } private void BuildDataSetsTabs() { bDelete.Visible = true; this.Text = "DataSet"; XmlNode aNode; if (_Nodes != null && _Nodes.Count > 0) aNode = _Nodes[0]; else aNode = null; DataSetsCtl dsc = new DataSetsCtl(_Draw, aNode); AddTab("DataSet", dsc); QueryParametersCtl qp = new QueryParametersCtl(_Draw, dsc.DSV); AddTab("Query Parameters", qp); FiltersCtl fc = new FiltersCtl(_Draw, aNode); AddTab("Filters", fc); DataSetRowsCtl dsrc = new DataSetRowsCtl(_Draw, aNode, dsc.DSV); AddTab("Data", dsrc); return; } private void BuildGroupingTabs() { XmlNode aNode = _Nodes[0]; if (aNode.Name == "DynamicSeries") { this.Text = "Series Grouping"; } else if (aNode.Name == "DynamicCategories") { this.Text = "Category Grouping"; } else { this.Text = "Grouping and Sorting"; } GroupingCtl gc = new GroupingCtl(_Draw, aNode); AddTab("Grouping", gc); SortingCtl sc = new SortingCtl(_Draw, aNode); AddTab("Sorting", sc); // We have to create a grouping here but will need to kill it if no definition follows it XmlNode gNode = _Draw.GetCreateNamedChildNode(aNode, "Grouping"); FiltersCtl fc = new FiltersCtl(_Draw, gNode); AddTab("Filters", fc); return; } private void BuildReportItemTabs() { XmlNode aNode = _Nodes[0]; // Determine if all nodes are the same type string type = aNode.Name; if (type == "CustomReportItem") { // For customReportItems we use the type that is a parameter string t = _Draw.GetElementValue(aNode, "Type", ""); if (t.Length > 0) type = t; } foreach (XmlNode pNode in this._Nodes) { // For customReportItems we use the type that is a parameter string t = pNode.Name; if (t == "CustomReportItem") { t = _Draw.GetElementValue(aNode, "Type", ""); if (t.Length == 0) // Shouldn't happen t = pNode.Name; } if (t != type) type = ""; // Not all nodes have the same type } EnsureStyle(); // Make sure we have Style nodes for all the report items if (_Nodes.Count > 1) this.Text = "Group Selection Properties"; else { string name = _Draw.GetElementAttribute(aNode, "Name", ""); this.Text = string.Format("{0} {1} Properties", type.Replace("fyi:",""), name); } // Create all the tabs if (type == "Textbox") { StyleTextCtl stc = new StyleTextCtl(_Draw, this._Nodes); AddTab("Text", stc); } else if (type == "List") { ListCtl lc = new ListCtl(_Draw, this._Nodes); AddTab("List", lc); if (_Nodes.Count == 1) { XmlNode l = _Nodes[0]; FiltersCtl fc = new FiltersCtl(_Draw, l); AddTab("Filters", fc); SortingCtl srtc = new SortingCtl(_Draw, l); AddTab("Sorting", srtc); } } else if (type == "Chart") { ChartCtl cc = new ChartCtl(_Draw, this._Nodes); AddTab("Chart", cc); // 05122007 AJM & GJL Create a new StaticSeriesCtl tab StaticSeriesCtl ssc = new StaticSeriesCtl(_Draw, this._Nodes); if (ssc.ShowMe) { //If the chart has static series, then show the StaticSeriesCtl GJL AddTab("Static Series", ssc); } if (_Nodes.Count == 1) { FiltersCtl fc = new FiltersCtl(_Draw, _Nodes[0]); AddTab("Filters", fc); } } else if (type == "System.Drawing.Image") { ImageCtl imgc = new ImageCtl(_Draw, this._Nodes); AddTab("System.Drawing.Image", imgc); } else if (type == "Table") { XmlNode table = _Nodes[0]; TableCtl tc = new TableCtl(_Draw, this._Nodes); AddTab("Table", tc); FiltersCtl fc = new FiltersCtl(_Draw, table); AddTab("Filters", fc); XmlNode details = _Draw.GetNamedChildNode(table, "Details"); if (details != null) { // if no details then we don't need details sorting GroupingCtl grpc = new GroupingCtl(_Draw, details); AddTab("Grouping", grpc); SortingCtl srtc = new SortingCtl(_Draw, details); AddTab("Sorting", srtc); } if (_TableColumn != null) { TableColumnCtl tcc = new TableColumnCtl(_Draw, _TableColumn); AddTab("Table Column", tcc); } if (_TableRow != null) { TableRowCtl trc = new TableRowCtl(_Draw, _TableRow); AddTab("Table Row", trc); } } else if (type == "fyi:Grid") { GridCtl gc = new GridCtl(_Draw, this._Nodes); AddTab("Grid", gc); } else if (type == "Matrix") { XmlNode matrix = _Nodes[0]; MatrixCtl mc = new MatrixCtl(_Draw, this._Nodes); AddTab("Matrix", mc); FiltersCtl fc = new FiltersCtl(_Draw, matrix); AddTab("Filters", fc); } else if (type == "Subreport" && _Nodes.Count == 1) { XmlNode subreport = _Nodes[0]; SubreportCtl src = new SubreportCtl(_Draw, subreport); AddTab("Subreport", src); } else if (aNode.Name == "CustomReportItem") { XmlNode cri = _Nodes[0]; CustomReportItemCtl cric = new CustomReportItemCtl(_Draw, _Nodes); AddTab(type, cric); } // Position tab PositionCtl pc = new PositionCtl(_Draw, this._Nodes); AddTab("Name/Position", pc); // Border tab StyleBorderCtl bc = new StyleBorderCtl(_Draw, null, this._Nodes); AddTab("Border", bc); if (! (type == "Line" || type == "Subreport")) { // Style tab StyleCtl sc = new StyleCtl(_Draw, this._Nodes); AddTab("Style", sc); // Interactivity tab InteractivityCtl ic = new InteractivityCtl(_Draw, this._Nodes); AddTab("Interactivity", ic); } } private void BuildChartAxisTabs(PropertyTypeEnum type) { string propName; if (type == PropertyTypeEnum.CategoryAxis) { this.Text = "Chart Category (X) Axis"; propName = "CategoryAxis"; } else { this.Text = "Chart Value (Y) Axis"; propName = "ValueAxis"; } XmlNode cNode = _Nodes[0]; XmlNode aNode = _Draw.GetCreateNamedChildNode(cNode, propName); XmlNode axNode = _Draw.GetCreateNamedChildNode(aNode, "Axis"); // Now we replace the node array with a new one containing only the legend _Nodes = new List<XmlNode>(); _Nodes.Add(axNode); EnsureStyle(); // Make sure we have Style nodes // Chart Axis ChartAxisCtl cac = new ChartAxisCtl(_Draw, this._Nodes); AddTab("Axis", cac); // Style Text StyleTextCtl stc = new StyleTextCtl(_Draw, this._Nodes); AddTab("Text", stc); // Border tab StyleBorderCtl bc = new StyleBorderCtl(_Draw, null, this._Nodes); AddTab("Border", bc); // Style tab StyleCtl sc = new StyleCtl(_Draw, this._Nodes); AddTab("Style", sc); } private void BuildChartLegendTabs() { this.Text = "Chart Legend Properties"; XmlNode cNode = _Nodes[0]; XmlNode lNode = _Draw.GetCreateNamedChildNode(cNode, "Legend"); // Now we replace the node array with a new one containing only the legend _Nodes = new List<XmlNode>(); _Nodes.Add(lNode); EnsureStyle(); // Make sure we have Style nodes // Chart Legend ChartLegendCtl clc = new ChartLegendCtl(_Draw, this._Nodes); AddTab("Legend", clc); // Style Text StyleTextCtl stc = new StyleTextCtl(_Draw, this._Nodes); AddTab("Text", stc); // Border tab StyleBorderCtl bc = new StyleBorderCtl(_Draw, null, this._Nodes); AddTab("Border", bc); // Style tab StyleCtl sc = new StyleCtl(_Draw, this._Nodes); AddTab("Style", sc); } private void BuildTitle(PropertyTypeEnum type) { XmlNode cNode = _Nodes[0]; _Nodes = new List<XmlNode>(); // replace with a new one if (type == PropertyTypeEnum.ChartTitle) { this.Text = "Chart Title"; XmlNode lNode = _Draw.GetCreateNamedChildNode(cNode, "Title"); _Nodes.Add(lNode); // Working on the title } else if (type == PropertyTypeEnum.CategoryAxisTitle) { this.Text = "Category (X) Axis Title"; XmlNode caNode = _Draw.GetCreateNamedChildNode(cNode, "CategoryAxis"); XmlNode aNode = _Draw.GetCreateNamedChildNode(caNode, "Axis"); XmlNode tNode = _Draw.GetCreateNamedChildNode(aNode, "Title"); _Nodes.Add(tNode); // Working on the title } // 20022008 AJM GJL else if (type == PropertyTypeEnum.ValueAxis2Title) { this.Text = "Value (Y) Axis (Right) Title"; XmlNode caNode = _Draw.GetCreateNamedChildNode(cNode, "ValueAxis"); XmlNode aNode = _Draw.GetCreateNamedChildNode(caNode, "Axis"); XmlNode tNode = _Draw.GetCreateNamedChildNode(aNode, "fyi:Title2"); _Nodes.Add(tNode); // Working on the title } else { this.Text = "Value (Y) Axis Title"; XmlNode caNode = _Draw.GetCreateNamedChildNode(cNode, "ValueAxis"); XmlNode aNode = _Draw.GetCreateNamedChildNode(caNode, "Axis"); XmlNode tNode = _Draw.GetCreateNamedChildNode(aNode, "Title"); _Nodes.Add(tNode); // Working on the title } EnsureStyle(); // Make sure we have Style nodes // Style Text StyleTextCtl stc = new StyleTextCtl(_Draw, this._Nodes); AddTab("Text", stc); // Border tab StyleBorderCtl bc = new StyleBorderCtl(_Draw, null, this._Nodes); AddTab("Border", bc); // Style tab StyleCtl sc = new StyleCtl(_Draw, this._Nodes); AddTab("Style", sc); } private void EnsureStyle() { // Make sure we have Style nodes for all the nodes foreach (XmlNode pNode in this._Nodes) { XmlNode stNode = _Draw.GetCreateNamedChildNode(pNode, "Style"); } return; } private void AddTab(string name, UserControl uc) { // Style tab TabPage tp = new TabPage(); tp.Location = new System.Drawing.Point(4, 22); tp.Name = name + "1"; tp.Size = new System.Drawing.Size(552, 284); tp.TabIndex = 1; tp.Text = name; _TabPanels.Add(uc); tp.Controls.Add(uc); uc.Dock = System.Windows.Forms.DockStyle.Fill; uc.Location = new System.Drawing.Point(0, 0); uc.Name = name + "1"; uc.Size = new System.Drawing.Size(552, 284); uc.TabIndex = 0; tcProps.Controls.Add(tp); } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.panel1 = new System.Windows.Forms.Panel(); this.bDelete = new Oranikle.Studio.Controls.StyledButton(); this.bApply = new Oranikle.Studio.Controls.StyledButton(); this.bOK = new Oranikle.Studio.Controls.StyledButton(); this.bCancel = new Oranikle.Studio.Controls.StyledButton(); this.tcProps = new Oranikle.Studio.Controls.CtrlStyledTab(); this.panel1.SuspendLayout(); this.SuspendLayout(); // // panel1 // this.panel1.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(228)))), ((int)(((byte)(241)))), ((int)(((byte)(249))))); this.panel1.CausesValidation = false; this.panel1.Controls.Add(this.bDelete); this.panel1.Controls.Add(this.bApply); this.panel1.Controls.Add(this.bOK); this.panel1.Controls.Add(this.bCancel); this.panel1.Dock = System.Windows.Forms.DockStyle.Bottom; this.panel1.Location = new System.Drawing.Point(0, 331); this.panel1.Name = "panel1"; this.panel1.Size = new System.Drawing.Size(765, 40); this.panel1.TabIndex = 1; // // bDelete // this.bDelete.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(245)))), ((int)(((byte)(245)))), ((int)(((byte)(245))))); this.bDelete.BackColor2 = System.Drawing.Color.FromArgb(((int)(((byte)(225)))), ((int)(((byte)(225)))), ((int)(((byte)(225))))); this.bDelete.BackFillMode = System.Drawing.Drawing2D.LinearGradientMode.ForwardDiagonal; this.bDelete.BorderColor = System.Drawing.Color.FromArgb(((int)(((byte)(200)))), ((int)(((byte)(200)))), ((int)(((byte)(200))))); this.bDelete.FlatStyle = System.Windows.Forms.FlatStyle.Flat; this.bDelete.Font = new System.Drawing.Font("Arial", 9F); this.bDelete.ForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(90)))), ((int)(((byte)(90)))), ((int)(((byte)(90))))); this.bDelete.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft; this.bDelete.Location = new System.Drawing.Point(8, 8); this.bDelete.Name = "bDelete"; this.bDelete.OverriddenSize = null; this.bDelete.Size = new System.Drawing.Size(75, 21); this.bDelete.TabIndex = 3; this.bDelete.Text = "Delete"; this.bDelete.UseVisualStyleBackColor = true; this.bDelete.Visible = false; this.bDelete.Click += new System.EventHandler(this.bDelete_Click); // // bApply // this.bApply.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(245)))), ((int)(((byte)(245)))), ((int)(((byte)(245))))); this.bApply.BackColor2 = System.Drawing.Color.FromArgb(((int)(((byte)(225)))), ((int)(((byte)(225)))), ((int)(((byte)(225))))); this.bApply.BackFillMode = System.Drawing.Drawing2D.LinearGradientMode.ForwardDiagonal; this.bApply.BorderColor = System.Drawing.Color.FromArgb(((int)(((byte)(200)))), ((int)(((byte)(200)))), ((int)(((byte)(200))))); this.bApply.FlatStyle = System.Windows.Forms.FlatStyle.Flat; this.bApply.Font = new System.Drawing.Font("Arial", 9F); this.bApply.ForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(90)))), ((int)(((byte)(90)))), ((int)(((byte)(90))))); this.bApply.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft; this.bApply.Location = new System.Drawing.Point(678, 8); this.bApply.Name = "bApply"; this.bApply.OverriddenSize = null; this.bApply.Size = new System.Drawing.Size(75, 21); this.bApply.TabIndex = 2; this.bApply.Text = "Apply"; this.bApply.UseVisualStyleBackColor = true; this.bApply.Click += new System.EventHandler(this.bApply_Click); // // bOK // this.bOK.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(245)))), ((int)(((byte)(245)))), ((int)(((byte)(245))))); this.bOK.BackColor2 = System.Drawing.Color.FromArgb(((int)(((byte)(225)))), ((int)(((byte)(225)))), ((int)(((byte)(225))))); this.bOK.BackFillMode = System.Drawing.Drawing2D.LinearGradientMode.ForwardDiagonal; this.bOK.BorderColor = System.Drawing.Color.FromArgb(((int)(((byte)(200)))), ((int)(((byte)(200)))), ((int)(((byte)(200))))); this.bOK.FlatStyle = System.Windows.Forms.FlatStyle.Flat; this.bOK.Font = new System.Drawing.Font("Arial", 9F); this.bOK.ForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(90)))), ((int)(((byte)(90)))), ((int)(((byte)(90))))); this.bOK.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft; this.bOK.Location = new System.Drawing.Point(518, 8); this.bOK.Name = "bOK"; this.bOK.OverriddenSize = null; this.bOK.Size = new System.Drawing.Size(75, 21); this.bOK.TabIndex = 0; this.bOK.Text = "OK"; this.bOK.UseVisualStyleBackColor = true; this.bOK.Click += new System.EventHandler(this.bOK_Click); // // bCancel // this.bCancel.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(245)))), ((int)(((byte)(245)))), ((int)(((byte)(245))))); this.bCancel.BackColor2 = System.Drawing.Color.FromArgb(((int)(((byte)(225)))), ((int)(((byte)(225)))), ((int)(((byte)(225))))); this.bCancel.BackFillMode = System.Drawing.Drawing2D.LinearGradientMode.ForwardDiagonal; this.bCancel.BorderColor = System.Drawing.Color.FromArgb(((int)(((byte)(200)))), ((int)(((byte)(200)))), ((int)(((byte)(200))))); this.bCancel.CausesValidation = false; this.bCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel; this.bCancel.FlatStyle = System.Windows.Forms.FlatStyle.Flat; this.bCancel.Font = new System.Drawing.Font("Arial", 9F); this.bCancel.ForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(90)))), ((int)(((byte)(90)))), ((int)(((byte)(90))))); this.bCancel.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft; this.bCancel.Location = new System.Drawing.Point(598, 8); this.bCancel.Name = "bCancel"; this.bCancel.OverriddenSize = null; this.bCancel.Size = new System.Drawing.Size(75, 21); this.bCancel.TabIndex = 1; this.bCancel.Text = "Cancel"; this.bCancel.UseVisualStyleBackColor = true; // // tcProps // this.tcProps.BorderColor = System.Drawing.Color.FromArgb(((int)(((byte)(228)))), ((int)(((byte)(241)))), ((int)(((byte)(249))))); this.tcProps.Dock = System.Windows.Forms.DockStyle.Fill; this.tcProps.DontSlantMiddle = false; this.tcProps.LeftSpacing = 0; this.tcProps.Location = new System.Drawing.Point(0, 0); this.tcProps.myBackColor = System.Drawing.Color.Transparent; this.tcProps.Name = "tcProps"; this.tcProps.SelectedIndex = 0; this.tcProps.Size = new System.Drawing.Size(765, 331); this.tcProps.SizeMode = System.Windows.Forms.TabSizeMode.Fixed; this.tcProps.TabFont = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.tcProps.TabIndex = 0; this.tcProps.TabSlant = 2; this.tcProps.TabStop = false; this.tcProps.TabTextColor = System.Drawing.Color.FromArgb(((int)(((byte)(64)))), ((int)(((byte)(64)))), ((int)(((byte)(64))))); this.tcProps.TabTextHAlignment = System.Drawing.StringAlignment.Near; this.tcProps.TabTextVAlignment = System.Drawing.StringAlignment.Center; this.tcProps.TagPageSelectedColor = System.Drawing.Color.White; this.tcProps.TagPageUnselectedColor = System.Drawing.Color.LightGray; // // PropertyDialog // this.AcceptButton = this.bOK; this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.CancelButton = this.bCancel; this.ClientSize = new System.Drawing.Size(765, 371); this.Controls.Add(this.tcProps); this.Controls.Add(this.panel1); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "PropertyDialog"; this.ShowInTaskbar = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "Properties"; this.Closing += new System.ComponentModel.CancelEventHandler(this.PropertyDialog_Closing); this.panel1.ResumeLayout(false); this.ResumeLayout(false); } #endregion private void bApply_Click(object sender, System.EventArgs e) { if (!IsValid()) return; this._Changed = true; foreach (IProperty ip in _TabPanels) { ip.Apply(); } this._Draw.Invalidate(); // Force screen to redraw } private void bOK_Click(object sender, System.EventArgs e) { if (!IsValid()) return; bApply_Click(sender, e); // Apply does all the work this.DialogResult = DialogResult.OK; } private bool IsValid() { int index=0; foreach (IProperty ip in _TabPanels) { if (!ip.IsValid()) { tcProps.SelectedIndex = index; return false; } index++; } return true; } private void PropertyDialog_Closing(object sender, System.ComponentModel.CancelEventArgs e) { if (_Type == PropertyTypeEnum.Grouping) { // Need to check if grouping value is still required XmlNode aNode = _Nodes[0]; // We have to create a grouping here but will need to kill it if no definition follows it XmlNode gNode = _Draw.GetNamedChildNode(aNode, "Grouping"); if (gNode != null && _Draw.GetNamedChildNode(gNode, "GroupExpressions") == null) { // Not a valid group if no GroupExpressions aNode.RemoveChild(gNode); } } } private void bDelete_Click(object sender, System.EventArgs e) { if (MessageBox.Show(this, "Are you sure you want to delete this dataset?", "DataSet", MessageBoxButtons.YesNo) == DialogResult.Yes) { _Delete = true; this.DialogResult = DialogResult.OK; } } } internal interface IProperty { void Apply(); bool IsValid(); } }
namespace System.Windows.Forms { using System; using System.ComponentModel; using System.Reactive; using System.Reactive.Linq; /// <summary> /// Extension methods providing IObservable wrappers for the events on WebBrowserBase. /// </summary> [EditorBrowsable(EditorBrowsableState.Never)] public static class ObservableWebBrowserBaseEvents { /// <summary> /// Returns an observable sequence wrapping the BackgroundImageLayoutChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the BackgroundImageLayoutChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> BackgroundImageLayoutChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.BackgroundImageLayoutChanged += handler, handler => instance.BackgroundImageLayoutChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the Enter event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the Enter event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> EnterObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.Enter += handler, handler => instance.Enter -= handler); } /// <summary> /// Returns an observable sequence wrapping the Leave event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the Leave event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> LeaveObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.Leave += handler, handler => instance.Leave -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseCaptureChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseCaptureChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> MouseCaptureChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.MouseCaptureChanged += handler, handler => instance.MouseCaptureChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseClick event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseClick event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<MouseEventArgs>> MouseClickObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<MouseEventHandler, MouseEventArgs>( handler => instance.MouseClick += handler, handler => instance.MouseClick -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseDoubleClick event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseDoubleClick event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<MouseEventArgs>> MouseDoubleClickObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<MouseEventHandler, MouseEventArgs>( handler => instance.MouseDoubleClick += handler, handler => instance.MouseDoubleClick -= handler); } /// <summary> /// Returns an observable sequence wrapping the BackColorChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the BackColorChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> BackColorChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.BackColorChanged += handler, handler => instance.BackColorChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the BackgroundImageChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the BackgroundImageChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> BackgroundImageChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.BackgroundImageChanged += handler, handler => instance.BackgroundImageChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the BindingContextChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the BindingContextChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> BindingContextChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.BindingContextChanged += handler, handler => instance.BindingContextChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the CursorChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the CursorChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> CursorChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.CursorChanged += handler, handler => instance.CursorChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the EnabledChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the EnabledChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> EnabledChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.EnabledChanged += handler, handler => instance.EnabledChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the FontChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the FontChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> FontChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.FontChanged += handler, handler => instance.FontChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the ForeColorChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the ForeColorChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> ForeColorChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.ForeColorChanged += handler, handler => instance.ForeColorChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the RightToLeftChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the RightToLeftChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> RightToLeftChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.RightToLeftChanged += handler, handler => instance.RightToLeftChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the TextChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the TextChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> TextChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.TextChanged += handler, handler => instance.TextChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the Click event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the Click event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> ClickObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.Click += handler, handler => instance.Click -= handler); } /// <summary> /// Returns an observable sequence wrapping the DragDrop event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the DragDrop event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<DragEventArgs>> DragDropObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<DragEventHandler, DragEventArgs>( handler => instance.DragDrop += handler, handler => instance.DragDrop -= handler); } /// <summary> /// Returns an observable sequence wrapping the DragEnter event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the DragEnter event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<DragEventArgs>> DragEnterObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<DragEventHandler, DragEventArgs>( handler => instance.DragEnter += handler, handler => instance.DragEnter -= handler); } /// <summary> /// Returns an observable sequence wrapping the DragOver event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the DragOver event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<DragEventArgs>> DragOverObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<DragEventHandler, DragEventArgs>( handler => instance.DragOver += handler, handler => instance.DragOver -= handler); } /// <summary> /// Returns an observable sequence wrapping the DragLeave event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the DragLeave event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> DragLeaveObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.DragLeave += handler, handler => instance.DragLeave -= handler); } /// <summary> /// Returns an observable sequence wrapping the GiveFeedback event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the GiveFeedback event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<GiveFeedbackEventArgs>> GiveFeedbackObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<GiveFeedbackEventHandler, GiveFeedbackEventArgs>( handler => instance.GiveFeedback += handler, handler => instance.GiveFeedback -= handler); } /// <summary> /// Returns an observable sequence wrapping the HelpRequested event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the HelpRequested event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<HelpEventArgs>> HelpRequestedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<HelpEventHandler, HelpEventArgs>( handler => instance.HelpRequested += handler, handler => instance.HelpRequested -= handler); } /// <summary> /// Returns an observable sequence wrapping the Paint event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the Paint event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<PaintEventArgs>> PaintObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<PaintEventHandler, PaintEventArgs>( handler => instance.Paint += handler, handler => instance.Paint -= handler); } /// <summary> /// Returns an observable sequence wrapping the QueryContinueDrag event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the QueryContinueDrag event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<QueryContinueDragEventArgs>> QueryContinueDragObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<QueryContinueDragEventHandler, QueryContinueDragEventArgs>( handler => instance.QueryContinueDrag += handler, handler => instance.QueryContinueDrag -= handler); } /// <summary> /// Returns an observable sequence wrapping the QueryAccessibilityHelp event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the QueryAccessibilityHelp event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<QueryAccessibilityHelpEventArgs>> QueryAccessibilityHelpObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<QueryAccessibilityHelpEventHandler, QueryAccessibilityHelpEventArgs>( handler => instance.QueryAccessibilityHelp += handler, handler => instance.QueryAccessibilityHelp -= handler); } /// <summary> /// Returns an observable sequence wrapping the DoubleClick event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the DoubleClick event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> DoubleClickObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.DoubleClick += handler, handler => instance.DoubleClick -= handler); } /// <summary> /// Returns an observable sequence wrapping the ImeModeChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the ImeModeChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> ImeModeChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.ImeModeChanged += handler, handler => instance.ImeModeChanged -= handler); } /// <summary> /// Returns an observable sequence wrapping the KeyDown event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the KeyDown event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<KeyEventArgs>> KeyDownObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<KeyEventHandler, KeyEventArgs>( handler => instance.KeyDown += handler, handler => instance.KeyDown -= handler); } /// <summary> /// Returns an observable sequence wrapping the KeyPress event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the KeyPress event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<KeyPressEventArgs>> KeyPressObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<KeyPressEventHandler, KeyPressEventArgs>( handler => instance.KeyPress += handler, handler => instance.KeyPress -= handler); } /// <summary> /// Returns an observable sequence wrapping the KeyUp event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the KeyUp event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<KeyEventArgs>> KeyUpObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<KeyEventHandler, KeyEventArgs>( handler => instance.KeyUp += handler, handler => instance.KeyUp -= handler); } /// <summary> /// Returns an observable sequence wrapping the Layout event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the Layout event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<LayoutEventArgs>> LayoutObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<LayoutEventHandler, LayoutEventArgs>( handler => instance.Layout += handler, handler => instance.Layout -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseDown event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseDown event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<MouseEventArgs>> MouseDownObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<MouseEventHandler, MouseEventArgs>( handler => instance.MouseDown += handler, handler => instance.MouseDown -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseEnter event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseEnter event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> MouseEnterObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.MouseEnter += handler, handler => instance.MouseEnter -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseLeave event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseLeave event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> MouseLeaveObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.MouseLeave += handler, handler => instance.MouseLeave -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseHover event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseHover event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> MouseHoverObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.MouseHover += handler, handler => instance.MouseHover -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseMove event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseMove event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<MouseEventArgs>> MouseMoveObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<MouseEventHandler, MouseEventArgs>( handler => instance.MouseMove += handler, handler => instance.MouseMove -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseUp event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseUp event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<MouseEventArgs>> MouseUpObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<MouseEventHandler, MouseEventArgs>( handler => instance.MouseUp += handler, handler => instance.MouseUp -= handler); } /// <summary> /// Returns an observable sequence wrapping the MouseWheel event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the MouseWheel event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<MouseEventArgs>> MouseWheelObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<MouseEventHandler, MouseEventArgs>( handler => instance.MouseWheel += handler, handler => instance.MouseWheel -= handler); } /// <summary> /// Returns an observable sequence wrapping the ChangeUICues event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the ChangeUICues event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<UICuesEventArgs>> ChangeUICuesObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<UICuesEventHandler, UICuesEventArgs>( handler => instance.ChangeUICues += handler, handler => instance.ChangeUICues -= handler); } /// <summary> /// Returns an observable sequence wrapping the StyleChanged event on the WebBrowserBase instance. /// </summary> /// <param name="instance">The WebBrowserBase instance to observe.</param> /// <returns>An observable sequence wrapping the StyleChanged event on the WebBrowserBase instance.</returns> public static IObservable<EventPattern<EventArgs>> StyleChangedObservable(this WebBrowserBase instance) { return Observable.FromEventPattern<EventHandler, EventArgs>( handler => instance.StyleChanged += handler, handler => instance.StyleChanged -= handler); } } }
using UnityEngine; #if UNITY_EDITOR using UnityEditor; #endif using System; using System.Collections; using System.Collections.Generic; /* The AssetBundle Manager provides a High-Level API for working with AssetBundles. The AssetBundle Manager will take care of loading AssetBundles and their associated Asset Dependencies. Initialize() Initializes the AssetBundle index object. This contains the standard Unity AssetBundleIndex data as well as an index of what assets are in what asset bundles LoadAssetAsync() Loads a given asset from a given AssetBundle and handles all the dependencies. LoadLevelAsync() Loads a given scene from a given AssetBundle and handles all the dependencies. LoadDependencies() Loads all the dependent AssetBundles for a given AssetBundle. BaseDownloadingURL Sets the base downloading url which is used for automatic downloading dependencies. SimulateAssetBundleInEditor Sets Simulation Mode in the Editor. Variants Sets the active variant. RemapVariantName() Resolves the correct AssetBundle according to the active variant. */ namespace UMA.AssetBundles { /// <summary> /// After an asset bundles download or cache retrieval opertaion is complete a LoadaedAssetBundle object is created for it. /// Loaded assetBundle contains the references count which can be used to unload dependent assetBundles automatically. /// </summary> public class LoadedAssetBundle { public AssetBundle m_AssetBundle; public int m_ReferencedCount; //to enable a json index we need to have a string/data field here public string m_data; internal event Action unload; internal void OnUnload() { m_AssetBundle.Unload(false); if (unload != null) unload(); } public LoadedAssetBundle(AssetBundle assetBundle) { m_AssetBundle = assetBundle; m_ReferencedCount = 1; } public LoadedAssetBundle(string data) { m_AssetBundle = null; m_data = data; m_ReferencedCount = 1; } } /// <summary> /// Class takes care of loading assetBundle and its dependencies automatically, loading variants automatically. /// </summary> public class AssetBundleManager : MonoBehaviour { public enum LogMode { All, JustErrors }; public enum LogType { Info, Warning, Error }; static LogMode m_LogMode = LogMode.All; static string m_BaseDownloadingURL = ""; //If we are using Encrypted Bundles DynamicAssetLoader will set the encryption key here. static string m_BundleEncryptionKey = ""; static string[] m_ActiveVariants = { }; static AssetBundleIndex m_AssetBundleIndex = null; #if UNITY_EDITOR static int m_SimulateAssetBundleInEditor = -1; const string kSimulateAssetBundles = "SimulateAssetBundles"; static SimpleWebServer webserver; #endif static Dictionary<string, LoadedAssetBundle> m_LoadedAssetBundles = new Dictionary<string, LoadedAssetBundle>(); static Dictionary<string, string> m_DownloadingErrors = new Dictionary<string, string>(); static List<string> m_DownloadingBundles = new List<string>(); static List<AssetBundleLoadOperation> m_InProgressOperations = new List<AssetBundleLoadOperation>(); static Dictionary<string, string[]> m_Dependencies = new Dictionary<string, string[]>(); public static bool SimulateOverride; public static LogMode logMode { get { return m_LogMode; } set { m_LogMode = value; } } /// <summary> /// The base downloading url which is used to generate the full /// downloading url with the assetBundle names. /// </summary> public static string BaseDownloadingURL { get { return m_BaseDownloadingURL; } set { m_BaseDownloadingURL = value; } } public static string BundleEncryptionKey { get { return m_BundleEncryptionKey; } set { m_BundleEncryptionKey = value; } } public delegate string OverrideBaseDownloadingURLDelegate(string bundleName); /// <summary> /// Implements per-bundle base downloading URL override. /// The subscribers must return null values for unknown bundle names; /// </summary> public static event OverrideBaseDownloadingURLDelegate overrideBaseDownloadingURL; /// <summary> /// Variants which is used to define the active variants. /// </summary> public static string[] ActiveVariants { get { return m_ActiveVariants; } set { m_ActiveVariants = value; } } /// <summary> /// AssetBundleIndex object which can be used to check the contents of /// any asset bundle without having to download it first. /// </summary> public static AssetBundleIndex AssetBundleIndexObject { get { return m_AssetBundleIndex; } set { m_AssetBundleIndex = value; } } private static void Log(LogType logType, string text) { if (logType == LogType.Error) Debug.LogError("[AssetBundleManager] " + text); else if (m_LogMode == LogMode.All) Debug.Log("[AssetBundleManager] " + text); } #if UNITY_EDITOR // Flag to indicate if we want to simulate assetBundles in Editor without building them actually. //we dont want an editorPrefs for this now because there is no way of changing it! public static bool SimulateAssetBundleInEditor { get { if (SimulateOverride) return true; if (Application.isPlaying == false) // always simulate when out of play mode return true; if (m_SimulateAssetBundleInEditor == -1) m_SimulateAssetBundleInEditor = 0; return m_SimulateAssetBundleInEditor != 0; } set { int newValue = value ? 1 : 0; if (newValue != m_SimulateAssetBundleInEditor) { m_SimulateAssetBundleInEditor = newValue; } } } #else public static bool SimulateAssetBundleInEditor { get { return false;} } #endif private static string GetStreamingAssetsPath() { if (Application.isEditor) return "file://" + System.Environment.CurrentDirectory.Replace("\\", "/"); // Use the build output folder directly. else if (Application.isMobilePlatform || Application.isConsolePlatform) return Application.streamingAssetsPath; else // For standalone player. return "file://" + Application.streamingAssetsPath; } /// <summary> /// Sets base downloading URL to a directory relative to the streaming assets directory.Asset bundles are loaded from a local directory. /// </summary> public static void SetSourceAssetBundleDirectory(string relativePath) { BaseDownloadingURL = GetStreamingAssetsPath() + relativePath; } /// <summary> /// Sets base downloading URL to a web URL. The directory pointed to by this URL /// on the web-server should have the same structure as the AssetBundles directory /// in the demo project root. For example, AssetBundles/iOS/xyz-scene must map to /// absolutePath/iOS/xyz-scene. /// If you are using assetBundle encryption this should be absolutePath/Encrypted/iOS/xyz-scene /// </summary> /// <param name="absolutePath"></param> public static void SetSourceAssetBundleURL(string absolutePath) { string encryptedSuffix = m_BundleEncryptionKey != "" ? "Encrypted/" : ""; if (absolutePath != "") { if (!absolutePath.EndsWith("/")) absolutePath += "/"; Debug.Log("[AssetBundleManager] SetSourceAssetBundleURL to " + absolutePath + encryptedSuffix + Utility.GetPlatformName() + "/"); BaseDownloadingURL = absolutePath + encryptedSuffix + Utility.GetPlatformName() + "/"; } } /// <summary> /// Retrieves an asset bundle that has previously been requested via LoadAssetBundle. /// Returns null if the asset bundle or one of its dependencies have not been downloaded yet. /// </summary> /// <param name="assetBundleName"></param> /// <param name="error"></param> /// <returns></returns> static public LoadedAssetBundle GetLoadedAssetBundle(string assetBundleName, out string error) { if (m_DownloadingErrors.TryGetValue(assetBundleName, out error)) { if (!error.StartsWith("-")) { m_DownloadingErrors[assetBundleName] = "-" + error; #if UNITY_EDITOR if (assetBundleName == Utility.GetPlatformName().ToLower() + "index") { if (EditorPrefs.GetBool(Application.dataPath+"LocalAssetBundleServerEnabled") == false || SimpleWebServer.serverStarted == false)//when the user restarts Unity this might be true even if the server has not actually been started { if (SimulateAssetBundleInEditor) { //we already outputted a message in DynamicAssetloader } else { Debug.LogWarning("AssetBundleManager could not download the AssetBundleIndex from the Remote Server URL you have set in DynamicAssetLoader. Have you set the URL correctly and uploaded your AssetBundles?"); error = "AssetBundleManager could not download the AssetBundleIndex from the Remote Server URL you have set in DynamicAssetLoader. Have you set the URL correctly and uploaded your AssetBundles?"; } } else { //Otherwise the AssetBundles themselves will not have been built. Debug.LogWarning("Switched to Simulation mode because no AssetBundles were found. Have you build them? (Go to 'Assets/AssetBundles/Build AssetBundles')."); error = "Switched to Simulation mode because no AssetBundles were found.Have you build them? (Go to 'Assets/AssetBundles/Build AssetBundles')."; //this needs to hide the loading infobox- or something needs too.. } SimulateOverride = true; } else #endif Debug.LogWarning("Could not return " + assetBundleName + " because of error:" + error); } return null; } LoadedAssetBundle bundle = null; m_LoadedAssetBundles.TryGetValue(assetBundleName, out bundle); if (bundle == null) return null; // No dependencies are recorded, only the bundle itself is required. string[] dependencies = null; if (!m_Dependencies.TryGetValue(assetBundleName, out dependencies)) return bundle; // Otherwise Make sure all dependencies are loaded foreach (var dependency in dependencies) { if (m_DownloadingErrors.TryGetValue(dependency, out error)) return null; // Wait all the dependent assetBundles being loaded. LoadedAssetBundle dependentBundle = null; m_LoadedAssetBundles.TryGetValue(dependency, out dependentBundle); if (dependentBundle == null) return null; } return bundle; } /// <summary> /// Returns the download progress of an assetbundle, optionally including any bundles it is dependent on /// </summary> static public float GetBundleDownloadProgress(string assetBundleName, bool andDependencies) { float overallProgress = 0; string error; if (m_DownloadingErrors.TryGetValue(assetBundleName, out error)) { Debug.LogWarning(error); return 0; } if (m_LoadedAssetBundles.ContainsKey(assetBundleName)) { overallProgress = 1f; } else { //find out its progress foreach (AssetBundleLoadOperation operation in m_InProgressOperations) { if (operation.GetType() == typeof(AssetBundleDownloadOperation) || operation.GetType().IsSubclassOf(typeof(AssetBundleDownloadOperation))) { AssetBundleDownloadOperation typedOperation = (AssetBundleDownloadOperation)operation; if (typedOperation.assetBundleName == assetBundleName) overallProgress = typedOperation.downloadProgress == 1f ? 0.99f : typedOperation.downloadProgress; } } } //deal with dependencies if necessary if (andDependencies) { string[] dependencies = null; m_Dependencies.TryGetValue(assetBundleName, out dependencies); if (dependencies != null) { if (dependencies.Length > 0) { foreach (string dependency in dependencies) { if (m_LoadedAssetBundles.ContainsKey(dependency)) { overallProgress += 1; } else //It must be in progress { foreach (AssetBundleLoadOperation operation in m_InProgressOperations) { if (operation.GetType() == typeof(AssetBundleDownloadOperation) || operation.GetType().IsSubclassOf(typeof(AssetBundleDownloadOperation))) { AssetBundleDownloadOperation typedOperation = (AssetBundleDownloadOperation)operation; if (typedOperation.assetBundleName == dependency) overallProgress += typedOperation.downloadProgress == 1f ? 0.99f : typedOperation.downloadProgress; } } } } //divide by num dependencies +1 overallProgress = overallProgress / (dependencies.Length + 1); } } } return overallProgress; } /// <summary> /// Returns the current LoadedAssetBundlesDictionary /// </summary> static public Dictionary<string, LoadedAssetBundle> GetLoadedAssetBundles() { return m_LoadedAssetBundles; } /// <summary> /// Returns true if certain asset bundle has been downloaded regardless of whether its /// whether it's dependencies have been loaded. /// </summary> static public bool IsAssetBundleDownloaded(string assetBundleName) { return m_LoadedAssetBundles.ContainsKey(assetBundleName); } /// <summary> /// Returns true if any asset bundles are still downloading optionally filtered by name. /// </summary> static public bool AreBundlesDownloading(string assetBundleName = "") { if (assetBundleName == "") { return (m_DownloadingBundles.Count > 0 && m_InProgressOperations.Count > 0); } else { if (m_DownloadingBundles.Contains(assetBundleName)) { return true; } else { foreach (string key in m_DownloadingBundles) { if (key.IndexOf(assetBundleName + "/") > -1) { return true; } } return false; } } } static public bool IsOperationInProgress(AssetBundleLoadOperation operation) { if (m_InProgressOperations.Contains(operation)) return true; else return false; } /// <summary> /// Initializes asset bundle namager and starts download of index asset bundle /// </summary> /// <returns>Returns the index asset bundle download operation object.</returns> // TODO I think that the index should be available if the device is offline. // Right now I think ABM always tries to download the index and the game will break if it cant. // What I think should happen is that if the game is offline it should still be able to get a cached index static public AssetBundleLoadIndexOperation Initialize() { return Initialize(Utility.GetPlatformName(), false, ""); } static public AssetBundleLoadIndexOperation Initialize(bool useJsonIndex) { return Initialize(Utility.GetPlatformName(), useJsonIndex, ""); } static public AssetBundleLoadIndexOperation Initialize(bool useJsonIndex, string jsonIndexUrl) { return Initialize(Utility.GetPlatformName(), useJsonIndex, jsonIndexUrl); } static public AssetBundleLoadIndexOperation Initialize(string indexAssetBundleName, bool useJsonIndex, string jsonIndexUrl) { if (!SimulateAssetBundleInEditor)//dont show the indicator if we are not using asset bundles - TODO we need a more comprehensive solution for this scenerio { if(AssetBundleLoadingIndicator.Instance) AssetBundleLoadingIndicator.Instance.Show(indexAssetBundleName.ToLower() + "index", "Initializing...", "", "Initialized"); } #if UNITY_EDITOR Log(LogType.Info, "Simulation Mode: " + (SimulateAssetBundleInEditor ? "Enabled" : "Disabled")); #endif var go = new GameObject("AssetBundleManager", typeof(AssetBundleManager)); DontDestroyOnLoad(go); #if UNITY_EDITOR // If we're in Editor simulation mode, we don't need the index assetBundle. if (SimulateAssetBundleInEditor) return null; #endif //as of 05/08/2016 we dont use Unitys AssetBundleManifest at all we just use our AssetBundleIndex LoadAssetBundle(indexAssetBundleName.ToLower() + "index", true, useJsonIndex, jsonIndexUrl); var operation = new AssetBundleLoadIndexOperation(indexAssetBundleName.ToLower() + "index", indexAssetBundleName + "Index", typeof(AssetBundleIndex), useJsonIndex); m_InProgressOperations.Add(operation); return operation; } // Temporarily work around a il2cpp bug static protected void LoadAssetBundle(string assetBundleName) { LoadAssetBundle(assetBundleName, false); } /// <summary> /// Starts the download of the asset bundle identified by the given name. Also downloads any asset bundles the given asset bundle is dependent on. /// </summary> /// <param name="assetBundleName">The bundle to load- if bundles are encrypted this should be the name of the UNENCRYPTED bundle.</param> /// <param name="isLoadingAssetBundleIndex">If true does not check for the existance of the assetBundleIndex. This should be false unless you ARE downloading the index</param> /// <param name="useJsonIndex">if true will attempt to download an asset called [platformname]index.json unless a specific json Url is supplied in the following param</param> /// <param name="jsonIndexUrl">provides a specific url to download a json index from</param> public static void LoadAssetBundle(string assetBundleName, bool isLoadingAssetBundleIndex = false, bool useJsonIndex = false, string jsonIndexUrl = "") { #if UNITY_EDITOR string fromLocalServer = (EditorPrefs.GetBool(Application.dataPath+"LocalAssetBundleServerEnabled") && SimpleWebServer.serverStarted) ? "from LocalServer " : ""; string encrypted = BundleEncryptionKey != "" ? " (Encrypted)" : ""; Log(LogType.Info, "Loading Asset Bundle " + fromLocalServer + (isLoadingAssetBundleIndex ? "Index: " : ": ") + assetBundleName + encrypted); // If we're in Editor simulation mode, we don't have to really load the assetBundle and its dependencies. if (SimulateAssetBundleInEditor) return; #endif if (!isLoadingAssetBundleIndex) { if (m_AssetBundleIndex == null) { Debug.LogError("Please initialize AssetBundleIndex by calling AssetBundleManager.Initialize()"); return; } } // Check if the assetBundle has already been processed. bool isAlreadyProcessed = LoadAssetBundleInternal(assetBundleName, isLoadingAssetBundleIndex, useJsonIndex, jsonIndexUrl); // Load dependencies. if (!isAlreadyProcessed && !isLoadingAssetBundleIndex) LoadDependencies(assetBundleName); } /// <summary> /// Returns base downloading URL for the given asset bundle. /// This URL may be overridden on per-bundle basis via overrideBaseDownloadingURL event. /// </summary> protected static string GetAssetBundleBaseDownloadingURL(string bundleName) { if (overrideBaseDownloadingURL != null) { foreach (OverrideBaseDownloadingURLDelegate method in overrideBaseDownloadingURL.GetInvocationList()) { string res = method(bundleName); if (!String.IsNullOrEmpty(res)) return res; } } return m_BaseDownloadingURL; } /// <summary> /// Checks who is responsible for determination of the correct asset bundle variant that should be loaded on this platform. /// /// On most platforms, this is done by the AssetBundleManager itself. However, on /// certain platforms (iOS at the moment) it's possible that an external asset bundle /// variant resolution mechanism is used. In these cases, we use base asset bundle /// name (without the variant tag) as the bundle identifier. The platform-specific /// code is responsible for correctly loading the bundle. /// </summary> static protected bool UsesExternalBundleVariantResolutionMechanism(string baseAssetBundleName) { #if ENABLE_IOS_APP_SLICING var url = GetAssetBundleBaseDownloadingURL(baseAssetBundleName); if (url.ToLower().StartsWith("res://") || url.ToLower().StartsWith("odr://")) return true; #endif return false; } /// <summary> /// Remaps the asset bundle name to the best fitting asset bundle variant. /// </summary> static protected string RemapVariantName(string assetBundleName) { string[] bundlesWithVariant = m_AssetBundleIndex.GetAllAssetBundlesWithVariant(); // Get base bundle name string baseName = assetBundleName.Split('.')[0]; if (UsesExternalBundleVariantResolutionMechanism(baseName)) return baseName; int bestFit = int.MaxValue; int bestFitIndex = -1; // Loop all the assetBundles with variant to find the best fit variant assetBundle. for (int i = 0; i < bundlesWithVariant.Length; i++) { string[] curSplit = bundlesWithVariant[i].Split('.'); string curBaseName = curSplit[0]; string curVariant = curSplit[1]; if (curBaseName != baseName) continue; int found = System.Array.IndexOf(m_ActiveVariants, curVariant); // If there is no active variant found. We still want to use the first if (found == -1) found = int.MaxValue - 1; if (found < bestFit) { bestFit = found; bestFitIndex = i; } } if (bestFit == int.MaxValue - 1) { Log(LogType.Warning, "Ambigious asset bundle variant chosen because there was no matching active variant: " + bundlesWithVariant[bestFitIndex]); } if (bestFitIndex != -1) { return bundlesWithVariant[bestFitIndex]; } else { return assetBundleName; } } /// <summary> /// Sets up download operation for the given asset bundle if it's not downloaded already. /// </summary> static protected bool LoadAssetBundleInternal(string assetBundleToFind, bool isLoadingAssetBundleIndex = false, bool useJsonIndex = false, string jsonIndexUrl = "") { //encrypted bundles have the suffix 'encrypted' appended to the name TODO this should probably go in the index though and be settable in the UMAAssetBundleManagerSettings window //string encryptedSuffix = BundleEncryptionKey != "" ? "encrypted" : ""; string assetBundleToGet = assetBundleToFind; if(BundleEncryptionKey != "" && m_AssetBundleIndex != null) { assetBundleToGet = m_AssetBundleIndex.GetAssetBundleEncryptedName(assetBundleToFind); Debug.Log("assetBundleToFind was " + assetBundleToFind + " assetBundleToGet was " + assetBundleToGet); } else if(BundleEncryptionKey != "" && m_AssetBundleIndex == null) { assetBundleToGet = assetBundleToFind + "encrypted"; } // Already loaded. LoadedAssetBundle bundle = null; m_LoadedAssetBundles.TryGetValue(assetBundleToFind, out bundle);//encrypted or not this will have the assetbundlename without the 'encrypted' suffix if (bundle != null && bundle.m_AssetBundle != null) { bundle.m_ReferencedCount++; return true; } // @TODO: Do we need to consider the referenced count of WWWs? // users can call LoadAssetAsync()/LoadLevelAsync() several times then wait them to be finished which might have duplicate WWWs. if (m_DownloadingBundles.Contains(assetBundleToFind)) return true; string bundleBaseDownloadingURL = GetAssetBundleBaseDownloadingURL(assetBundleToFind); //TODO These dont support encrypted bundles yet if (bundleBaseDownloadingURL.ToLower().StartsWith("odr://")) { #if ENABLE_IOS_ON_DEMAND_RESOURCES Log(LogType.Info, "Requesting bundle " + assetBundleToGet + " through ODR"); m_InProgressOperations.Add(new AssetBundleDownloadFromODROperation(assetBundleToGet)); #else new ApplicationException("Can't load bundle " + assetBundleToFind + " through ODR: this Unity version or build target doesn't support it."); #endif } else if (bundleBaseDownloadingURL.ToLower().StartsWith("res://")) { #if ENABLE_IOS_APP_SLICING Log(LogType.Info, "Requesting bundle " + assetBundleToGet + " through asset catalog"); m_InProgressOperations.Add(new AssetBundleOpenFromAssetCatalogOperation(assetBundleToGet)); #else new ApplicationException("Can't load bundle " + assetBundleToFind + " through asset catalog: this Unity version or build target doesn't support it."); #endif } else { if (!bundleBaseDownloadingURL.EndsWith("/")) bundleBaseDownloadingURL += "/"; string url = bundleBaseDownloadingURL + assetBundleToGet; WWW download = null; // For index assetbundle, always download it as we don't have hash for it. //TODO make something to test if there is and internet connection and if not try to get a cached version of this so we can still access the stuff that has been previously cached //TODO2 Make the index cache somewhere when it is downloaded. if (isLoadingAssetBundleIndex) { if (useJsonIndex && jsonIndexUrl != "") { url = jsonIndexUrl.Replace("[PLATFORM]", Utility.GetPlatformName()); } else if (useJsonIndex) { url = url+ ".json"; } download = new WWW(url); if (!String.IsNullOrEmpty(download.error) || download == null) { if (!String.IsNullOrEmpty(download.error)) Log(LogType.Warning, download.error); else Log(LogType.Warning, " index new WWW(url) was NULL"); } } else { download = WWW.LoadFromCacheOrDownload(url, m_AssetBundleIndex.GetAssetBundleHash(assetBundleToFind), 0); } m_InProgressOperations.Add(new AssetBundleDownloadFromWebOperation(assetBundleToFind/* + encryptedSuffix*/, download, useJsonIndex)); } m_DownloadingBundles.Add(assetBundleToFind); return false; } /// <summary> /// Where we get all the dependencies from the index for the given asset bundle and load them all. /// </summary> /// <param name="assetBundleName"></param> static protected void LoadDependencies(string assetBundleName) { if (m_AssetBundleIndex == null) { Log(LogType.Error, "Please initialize AssetBundleIndex by calling AssetBundleManager.Initialize()"); return; } // Get dependecies from the AssetBundleIndex object.. string[] dependencies = m_AssetBundleIndex.GetAllDependencies(assetBundleName); if (dependencies.Length == 0) return; for (int i = 0; i < dependencies.Length; i++) dependencies[i] = RemapVariantName(dependencies[i]); // Record and load all dependencies. m_Dependencies.Add(assetBundleName, dependencies); for (int i = 0; i < dependencies.Length; i++) LoadAssetBundleInternal(dependencies[i], false); } /// <summary> /// WARNING Not working right with DynamicAssetLoader yet! Unloads all AssetBundles compressed data (not the assets loaded from the bundle) to free up memory /// </summary> static public void UnloadAllAssetBundles() { #if UNITY_EDITOR // If we're in Editor simulation mode, we don't have to load the manifest assetBundle. if (SimulateAssetBundleInEditor) return; #endif List<string> bundlesToUnload = new List<string>(); foreach (KeyValuePair<string, LoadedAssetBundle> kp in m_LoadedAssetBundles) { if (kp.Key.IndexOf(Utility.GetPlatformName().ToLower() + "index") == -1)//dont try to unload the index... bundlesToUnload.Add(kp.Key); } foreach (string bundleName in bundlesToUnload) { UnloadAssetBundleInternal(bundleName); UnloadDependencies(bundleName);//I think its unloading dependencies thats causing an issue with UMA } } /// <summary> /// Unloads assetbundle and its dependencies. /// </summary> /// <param name="assetBundleName"></param> static public void UnloadAssetBundle(string assetBundleName) { #if UNITY_EDITOR // If we're in Editor simulation mode, we don't have to load the index assetBundle. if (SimulateAssetBundleInEditor) return; #endif UnloadAssetBundleInternal(assetBundleName); UnloadDependencies(assetBundleName); } static protected void UnloadDependencies(string assetBundleName) { string[] dependencies = null; if (!m_Dependencies.TryGetValue(assetBundleName, out dependencies)) return; // Loop dependencies. foreach (var dependency in dependencies) { UnloadAssetBundleInternal(dependency); } m_Dependencies.Remove(assetBundleName); } static protected void UnloadAssetBundleInternal(string assetBundleName, bool disregardRefrencedStatus = false) { string error; LoadedAssetBundle bundle = GetLoadedAssetBundle(assetBundleName, out error); if (bundle == null) return; if (--bundle.m_ReferencedCount == 0 || disregardRefrencedStatus) { bundle.OnUnload(); m_LoadedAssetBundles.Remove(assetBundleName); Log(LogType.Info, assetBundleName + " has been unloaded successfully"); } } void Update() { // Update all in progress operations for (int i = 0; i < m_InProgressOperations.Count;) { var operation = m_InProgressOperations[i]; if (operation.Update()) { i++; } else { m_InProgressOperations.RemoveAt(i); ProcessFinishedOperation(operation); } } } void ProcessFinishedOperation(AssetBundleLoadOperation operation) { AssetBundleDownloadOperation download = operation as AssetBundleDownloadOperation; if (download == null) return; if (String.IsNullOrEmpty(download.error)) { //Debug.Log("[AssetBundleManager] processed downloaded bundle " + download.assetBundleName); m_LoadedAssetBundles.Add(download.assetBundleName, download.assetBundle); } else { string msg = string.Format("Failed downloading bundle {0} from {1}: {2}", download.assetBundleName, download.GetSourceURL(), download.error); m_DownloadingErrors.Add(download.assetBundleName, msg); } m_DownloadingBundles.Remove(download.assetBundleName); } /// <summary> /// Starts a load operation for an asset from the given asset bundle. /// </summary> static public AssetBundleLoadAssetOperation LoadAssetAsync(string assetBundleName, string assetName, System.Type type) { Log(LogType.Info, "Loading " + assetName + " from " + assetBundleName + " bundle"); AssetBundleLoadAssetOperation operation = null; #if UNITY_EDITOR if (SimulateAssetBundleInEditor) { string[] assetPaths = AssetDatabase.GetAssetPathsFromAssetBundleAndAssetName(assetBundleName, assetName); if (assetPaths.Length == 0) { Debug.LogError("There is no asset with name \"" + assetName + "\" in " + assetBundleName); return null; } // @TODO: Now we only get the main object from the first asset. Should consider type also. UnityEngine.Object target = AssetDatabase.LoadMainAssetAtPath(assetPaths[0]); operation = new AssetBundleLoadAssetOperationSimulation(target); } else #endif { assetBundleName = RemapVariantName(assetBundleName); LoadAssetBundle(assetBundleName, false); operation = new AssetBundleLoadAssetOperationFull(assetBundleName, assetName, type); m_InProgressOperations.Add(operation); } return operation; } /// <summary> /// Starts a load operation for a level from the given asset bundle. /// </summary> static public AssetBundleLoadOperation LoadLevelAsync(string assetBundleName, string levelName, bool isAdditive) { Log(LogType.Info, "Loading " + levelName + " from " + assetBundleName + " bundle"); AssetBundleLoadOperation operation = null; #if UNITY_EDITOR if (SimulateAssetBundleInEditor) { operation = new AssetBundleLoadLevelSimulationOperation(assetBundleName, levelName, isAdditive); } else #endif { assetBundleName = RemapVariantName(assetBundleName); LoadAssetBundle(assetBundleName, false); operation = new AssetBundleLoadLevelOperation(assetBundleName, levelName, isAdditive); m_InProgressOperations.Add(operation); } return operation; } } // End of AssetBundleManager. }
#region License /* * EndPointListener.cs * * This code is derived from EndPointListener.cs (System.Net) of Mono * (http://www.mono-project.com). * * The MIT License * * Copyright (c) 2005 Novell, Inc. (http://www.novell.com) * Copyright (c) 2012-2015 sta.blockhead * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ #endregion #region Authors /* * Authors: * - Gonzalo Paniagua Javier <gonzalo@novell.com> */ #endregion #region Contributors /* * Contributors: * - Liryna <liryna.stark@gmail.com> * - Nicholas Devenish */ #endregion using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Net; using System.Net.Sockets; using System.Security.Cryptography; using System.Security.Cryptography.X509Certificates; using System.Threading; namespace WebSocketSharp.Net { internal sealed class EndPointListener { #region Private Fields private List<HttpListenerPrefix> _all; // host == '+' private static readonly string _defaultCertFolderPath; private IPEndPoint _endpoint; private Dictionary<HttpListenerPrefix, HttpListener> _prefixes; private bool _secure; private Socket _socket; private ServerSslConfiguration _sslConfig; private List<HttpListenerPrefix> _unhandled; // host == '*' private Dictionary<HttpConnection, HttpConnection> _unregistered; private object _unregisteredSync; #endregion #region Static Constructor static EndPointListener () { _defaultCertFolderPath = Environment.GetFolderPath (Environment.SpecialFolder.ApplicationData); } #endregion #region Internal Constructors internal EndPointListener ( IPAddress address, int port, bool reuseAddress, bool secure, string certificateFolderPath, ServerSslConfiguration sslConfig) { if (secure) { var cert = getCertificate (port, certificateFolderPath, sslConfig.ServerCertificate); if (cert == null) throw new ArgumentException ("No server certificate could be found."); _secure = secure; _sslConfig = sslConfig; _sslConfig.ServerCertificate = cert; } _prefixes = new Dictionary<HttpListenerPrefix, HttpListener> (); _unregistered = new Dictionary<HttpConnection, HttpConnection> (); _unregisteredSync = ((ICollection) _unregistered).SyncRoot; _socket = new Socket (address.AddressFamily, SocketType.Stream, ProtocolType.Tcp); if (reuseAddress) _socket.SetSocketOption (SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true); _endpoint = new IPEndPoint (address, port); _socket.Bind (_endpoint); _socket.Listen (500); _socket.BeginAccept (onAccept, this); } #endregion #region Public Properties public IPAddress Address { get { return _endpoint.Address; } } public bool IsSecure { get { return _secure; } } public int Port { get { return _endpoint.Port; } } public ServerSslConfiguration SslConfiguration { get { return _sslConfig; } } #endregion #region Private Methods private static void addSpecial (List<HttpListenerPrefix> prefixes, HttpListenerPrefix prefix) { var path = prefix.Path; foreach (var pref in prefixes) if (pref.Path == path) throw new HttpListenerException (400, "The prefix is already in use."); // TODO: Code? prefixes.Add (prefix); } private void checkIfRemove () { if (_prefixes.Count > 0) return; var list = _unhandled; if (list != null && list.Count > 0) return; list = _all; if (list != null && list.Count > 0) return; EndPointManager.RemoveEndPoint (this); } private static RSACryptoServiceProvider createRSAFromFile (string filename) { byte[] pvk = null; using (var fs = File.Open (filename, FileMode.Open, FileAccess.Read, FileShare.Read)) { pvk = new byte[fs.Length]; fs.Read (pvk, 0, pvk.Length); } var rsa = new RSACryptoServiceProvider (); rsa.ImportCspBlob (pvk); return rsa; } private static X509Certificate2 getCertificate ( int port, string certificateFolderPath, X509Certificate2 defaultCertificate) { if (certificateFolderPath == null || certificateFolderPath.Length == 0) certificateFolderPath = _defaultCertFolderPath; try { var cer = Path.Combine (certificateFolderPath, String.Format ("{0}.cer", port)); var key = Path.Combine (certificateFolderPath, String.Format ("{0}.key", port)); if (File.Exists (cer) && File.Exists (key)) { var cert = new X509Certificate2 (cer); cert.PrivateKey = createRSAFromFile (key); return cert; } } catch { } return defaultCertificate; } private static HttpListener matchFromList ( string host, string path, List<HttpListenerPrefix> list, out HttpListenerPrefix prefix) { prefix = null; if (list == null) return null; HttpListener bestMatch = null; var bestLen = -1; foreach (var pref in list) { var ppath = pref.Path; if (ppath.Length < bestLen) continue; if (path.StartsWith (ppath)) { bestLen = ppath.Length; bestMatch = pref.Listener; prefix = pref; } } return bestMatch; } private static void onAccept (IAsyncResult asyncResult) { var lsnr = (EndPointListener) asyncResult.AsyncState; Socket sock = null; try { sock = lsnr._socket.EndAccept (asyncResult); lsnr._socket.BeginAccept (onAccept, lsnr); } catch { if (sock != null) sock.Close (); return; } processAccepted (sock, lsnr); } private static void processAccepted (Socket socket, EndPointListener listener) { HttpConnection conn = null; try { conn = new HttpConnection (socket, listener); lock (listener._unregisteredSync) listener._unregistered[conn] = conn; conn.BeginReadRequest (); } catch { if (conn != null) { conn.Close (true); return; } socket.Close (); } } private static bool removeSpecial (List<HttpListenerPrefix> prefixes, HttpListenerPrefix prefix) { var path = prefix.Path; var cnt = prefixes.Count; for (var i = 0; i < cnt; i++) { if (prefixes[i].Path == path) { prefixes.RemoveAt (i); return true; } } return false; } private HttpListener searchListener (Uri uri, out HttpListenerPrefix prefix) { prefix = null; if (uri == null) return null; var host = uri.Host; var dns = Uri.CheckHostName (host) == UriHostNameType.Dns; var port = uri.Port; var path = HttpUtility.UrlDecode (uri.AbsolutePath); var pathSlash = path[path.Length - 1] == '/' ? path : path + "/"; HttpListener bestMatch = null; var bestLen = -1; if (host != null && host.Length > 0) { foreach (var pref in _prefixes.Keys) { var ppath = pref.Path; if (ppath.Length < bestLen) continue; if (pref.Port != port) continue; if (dns) { var phost = pref.Host; if (Uri.CheckHostName (phost) == UriHostNameType.Dns && phost != host) continue; } if (path.StartsWith (ppath) || pathSlash.StartsWith (ppath)) { bestLen = ppath.Length; bestMatch = _prefixes[pref]; prefix = pref; } } if (bestLen != -1) return bestMatch; } var list = _unhandled; bestMatch = matchFromList (host, path, list, out prefix); if (path != pathSlash && bestMatch == null) bestMatch = matchFromList (host, pathSlash, list, out prefix); if (bestMatch != null) return bestMatch; list = _all; bestMatch = matchFromList (host, path, list, out prefix); if (path != pathSlash && bestMatch == null) bestMatch = matchFromList (host, pathSlash, list, out prefix); if (bestMatch != null) return bestMatch; return null; } #endregion #region Internal Methods internal static bool CertificateExists (int port, string certificateFolderPath) { if (certificateFolderPath == null || certificateFolderPath.Length == 0) certificateFolderPath = _defaultCertFolderPath; var cer = Path.Combine (certificateFolderPath, String.Format ("{0}.cer", port)); var key = Path.Combine (certificateFolderPath, String.Format ("{0}.key", port)); return File.Exists (cer) && File.Exists (key); } internal void RemoveConnection (HttpConnection connection) { lock (_unregisteredSync) _unregistered.Remove (connection); } #endregion #region Public Methods public void AddPrefix (HttpListenerPrefix prefix, HttpListener listener) { List<HttpListenerPrefix> current, future; if (prefix.Host == "*") { do { current = _unhandled; future = current != null ? new List<HttpListenerPrefix> (current) : new List<HttpListenerPrefix> (); prefix.Listener = listener; addSpecial (future, prefix); } while (Interlocked.CompareExchange (ref _unhandled, future, current) != current); return; } if (prefix.Host == "+") { do { current = _all; future = current != null ? new List<HttpListenerPrefix> (current) : new List<HttpListenerPrefix> (); prefix.Listener = listener; addSpecial (future, prefix); } while (Interlocked.CompareExchange (ref _all, future, current) != current); return; } Dictionary<HttpListenerPrefix, HttpListener> prefs, prefs2; do { prefs = _prefixes; if (prefs.ContainsKey (prefix)) { if (prefs[prefix] != listener) throw new HttpListenerException ( 400, String.Format ("There's another listener for {0}.", prefix)); // TODO: Code? return; } prefs2 = new Dictionary<HttpListenerPrefix, HttpListener> (prefs); prefs2[prefix] = listener; } while (Interlocked.CompareExchange (ref _prefixes, prefs2, prefs) != prefs); } public bool BindContext (HttpListenerContext context) { HttpListenerPrefix pref; var lsnr = searchListener (context.Request.Url, out pref); if (lsnr == null) return false; context.Listener = lsnr; context.Connection.Prefix = pref; return true; } public void Close () { _socket.Close (); lock (_unregisteredSync) { var conns = new List<HttpConnection> (_unregistered.Keys); _unregistered.Clear (); foreach (var conn in conns) conn.Close (true); conns.Clear (); } } public void RemovePrefix (HttpListenerPrefix prefix, HttpListener listener) { List<HttpListenerPrefix> current, future; if (prefix.Host == "*") { do { current = _unhandled; if (current == null) break; future = new List<HttpListenerPrefix> (current); if (!removeSpecial (future, prefix)) break; // The prefix wasn't found. } while (Interlocked.CompareExchange (ref _unhandled, future, current) != current); checkIfRemove (); return; } if (prefix.Host == "+") { do { current = _all; if (current == null) break; future = new List<HttpListenerPrefix> (current); if (!removeSpecial (future, prefix)) break; // The prefix wasn't found. } while (Interlocked.CompareExchange (ref _all, future, current) != current); checkIfRemove (); return; } Dictionary<HttpListenerPrefix, HttpListener> prefs, prefs2; do { prefs = _prefixes; if (!prefs.ContainsKey (prefix)) break; prefs2 = new Dictionary<HttpListenerPrefix, HttpListener> (prefs); prefs2.Remove (prefix); } while (Interlocked.CompareExchange (ref _prefixes, prefs2, prefs) != prefs); checkIfRemove (); } public void UnbindContext (HttpListenerContext context) { if (context == null || context.Listener == null) return; context.Listener.UnregisterContext (context); } #endregion } }
/* * Copyright (c) 2015, InWorldz Halcyon Developers * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of halcyon nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using OpenSim.Region.Physics.Manager; using log4net; using System.Reflection; using System.Threading; using System.Diagnostics; using OpenSim.Framework; using System.Runtime.InteropServices; using System.IO; using OpenSim.Region.Interfaces; namespace InWorldz.PhysxPhysics { internal class PhysxScene : PhysicsScene { [DllImport("kernel32.dll")] static extern bool SetThreadPriority(IntPtr hThread, ThreadPriorityLevel nPriority); [DllImport("kernel32.dll")] static extern IntPtr GetCurrentThread(); public const int TIMESTEP = 15; public const float TIMESTEP_IN_SECONDS = TIMESTEP / 1000.0f; public const float DILATED_TIMESTEP_IN_SECONDS = TIMESTEP_IN_SECONDS * 2.0f; public const int SIMULATE_DELAY_TO_BEGIN_DILATION = (int)(TIMESTEP * 1.9f); private const int UPDATE_WATCHDOG_FRAMES = 200; private const int CHECK_EXPIRED_KINEMATIC_FRAMES = (int) ((1.0f / TIMESTEP_IN_SECONDS) * 60.0f); private const int UPDATE_FPS_FRAMES = 30; private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private static Debugging.PhysxErrorCallback s_ErrorCallback = new Debugging.PhysxErrorCallback(); public PhysX.Material DEFAULT_MATERIAL; private static PhysX.Foundation _foundation; private static PhysX.Physics _physics; private PhysX.SceneDesc _sceneDesc; private PhysX.Scene _scene; private Meshing.TerrainMesher _terrainMesher; private TerrainManager _terrainMgr; private Meshing.MeshingStage _meshingStage; private Thread HeartbeatThread; private Thread TimingThread; private ManualResetEventSlim _timingSignal = new ManualResetEventSlim(true); private uint _lastSimulate; private int _frameNum; private OpenSim.Framework.LocklessQueue<Commands.ICommand> _currentCommandQueue = new OpenSim.Framework.LocklessQueue<Commands.ICommand>(); private OpenSim.Framework.LocklessQueue<Commands.ICommand> _waitingCommandQueue = new OpenSim.Framework.LocklessQueue<Commands.ICommand>(); private C5.HashSet<PhysxPrim> _allPrims = new C5.HashSet<PhysxPrim>(); private C5.HashSet<PhysxPrim> _dynPrims = new C5.HashSet<PhysxPrim>(); private C5.HashSet<PhysxPrim> _collisionRepeatPrims = new C5.HashSet<PhysxPrim>(); private delegate void TaintHandler(PhysxPrim prim, TaintType taint); private readonly Dictionary<TaintType, TaintHandler> _taintHandlers; private volatile bool _stop = false; private bool _simulating = false; private KinematicManager _kinematicManager = new KinematicManager(); private uint _lastFpsCalc = (uint)Environment.TickCount; private short _framesSinceLastFpsCalc = 0; private volatile float _currFps = 60.0f; private bool _gridmode = true; private class DelayedCommandInfo { public Commands.ICommand Initiator; public Dictionary<Type, LinkedListNode<Commands.ICommand>> TopCullables; public LinkedList<Commands.ICommand> Commands; } /// <summary> /// Stores commands that are being delayed pending execution of an async operation on a prim (such as meshing) /// This ensures a proper order of execution for physics commands /// </summary> private Dictionary<PhysxPrim, DelayedCommandInfo> _delayedCommands = new Dictionary<PhysxPrim, DelayedCommandInfo>(); /// <summary> /// Manages the agent actors in this scene /// </summary> private PhysX.ControllerManager _controllerManager; /// <summary> /// Holds the avatar/character actors in the scene /// </summary> private C5.HashSet<PhysxCharacter> _charActors = new C5.HashSet<PhysxCharacter>(); private SimulationEventCallbackDelegator _simEventDelegator; private MovingIntegerAverage _frameTimeAvg = new MovingIntegerAverage(10); private OpenMetaverse.UUID _regionId; internal OpenMetaverse.UUID RegionID { get { return _regionId; } } private Queue<Commands.ICommand> _freedCommands = new Queue<Commands.ICommand>(); public override float SimulationFPS { get { return _currFps; } } public override bool Simulating { get { return _simulating; } set { _simulating = value; } } internal PhysX.Scene SceneImpl { get { return _scene; } } internal Meshing.MeshingStage MeshingStageImpl { get { return _meshingStage; } } internal PhysX.ControllerManager ControllerManager { get { return _controllerManager; } } public override int SimulationFrameTimeAvg { get { return _frameTimeAvg.CalculateAverage(); } } public uint CurrentFrameNum { get { return (uint)_frameNum; } } public override OpenSim.Region.Interfaces.ITerrainChannel TerrainChannel { get; set; } public override RegionSettings RegionSettings { get; set; } public Debugging.ContactDebugManager ContactDebug { get; set; } public IEnumerable<PhysxPrim> DynamicPrims { get { return _dynPrims; } } IMesher _mesher; public override IMesher Mesher { get { return _mesher; } } internal OpenMetaverse.Vector2[] RegionWaterCurrents = null; internal OpenMetaverse.Vector2[] RegionWindGround = null; internal OpenMetaverse.Vector2[] RegionWindAloft = null; internal float[] RegionTerrainRanges = null; internal float[] RegionTerrainMaxHeights = null; public PhysxScene() { _taintHandlers = new Dictionary<TaintType, TaintHandler>() { {TaintType.MadeDynamic, HandlePrimMadeDynamic}, {TaintType.MadeStatic, HandlePrimMadeStatic}, {TaintType.ChangedScale, HandlePrimChangedShape}, {TaintType.ChangedShape, HandlePrimChangedShape} }; ContactDebug = new Debugging.ContactDebugManager(this); } private void CreateDefaults() { DEFAULT_MATERIAL = _physics.CreateMaterial(0.5f, 0.5f, 0.15f); } public override void Initialize(IMesher meshmerizer, Nini.Config.IConfigSource config, OpenMetaverse.UUID regionId) { _regionId = regionId; _mesher = meshmerizer; m_log.Info("[InWorldz.PhysxPhysics] Creating PhysX scene"); if (config.Configs["InWorldz.PhysxPhysics"] != null) { Settings.Instance.UseVisualDebugger = config.Configs["InWorldz.PhysxPhysics"].GetBoolean("use_visual_debugger", false); Settings.Instance.UseCCD = config.Configs["InWorldz.PhysxPhysics"].GetBoolean("use_ccd", true); Settings.Instance.Gravity = config.Configs["InWorldz.PhysxPhysics"].GetFloat("gravity", -9.8f); Settings.Instance.ThrowOnSdkError = config.Configs["InWorldz.PhysxPhysics"].GetBoolean("throw_on_sdk_error", false); Settings.Instance.InstrumentMeshing = config.Configs["InWorldz.PhysxPhysics"].GetBoolean("instrument_meshing", false); } else { Settings.Instance.UseVisualDebugger = false; Settings.Instance.UseCCD = true; Settings.Instance.Gravity = -9.8f; Settings.Instance.ThrowOnSdkError = false; Settings.Instance.InstrumentMeshing = false; } Nini.Config.IConfig startupcfg = config.Configs["Startup"]; if (startupcfg != null) _gridmode = startupcfg.GetBoolean("gridmode", false); if (_foundation == null) { _foundation = new PhysX.Foundation(s_ErrorCallback); _physics = new PhysX.Physics(_foundation); Material.BuiltinMaterialInit(_physics); } _sceneDesc = new PhysX.SceneDesc(null, Settings.Instance.UseCCD); _sceneDesc.Gravity = new PhysX.Math.Vector3(0f, 0f, Settings.Instance.Gravity); _simEventDelegator = new SimulationEventCallbackDelegator(); _simEventDelegator.OnContactCallback += this.OnContact; _simEventDelegator.OnTriggerCallback += this.OnTrigger; _sceneDesc.SimulationEventCallback = _simEventDelegator; _scene = _physics.CreateScene(_sceneDesc); Preload(); if (Settings.Instance.UseCCD) { _scene.SetFlag(PhysX.SceneFlag.SweptIntegration, true); } if (Settings.Instance.UseVisualDebugger && _physics.RemoteDebugger != null) { _physics.RemoteDebugger.Connect("localhost", null, null, PhysX.VisualDebugger.VisualDebuggerConnectionFlag.Debug, null); } _controllerManager = _scene.CreateControllerManager(); CreateDefaults(); _terrainMesher = new Meshing.TerrainMesher(_scene); _terrainMgr = new TerrainManager(_scene, _terrainMesher, regionId); _meshingStage = new Meshing.MeshingStage(_scene, meshmerizer, _terrainMesher); _meshingStage.OnShapeNeedsFreeing += new Meshing.MeshingStage.ShapeNeedsFreeingDelegate(_meshingStage_OnShapeNeedsFreeing); _kinematicManager = new KinematicManager(); //fire up our work loop HeartbeatThread = Watchdog.StartThread(new ThreadStart(Heartbeat), "Physics Heartbeat", ThreadPriority.Normal, false); TimingThread = Watchdog.StartThread(new ThreadStart(DoTiming), string.Format("Physics Timing"), ThreadPriority.Highest, false); } private void Preload() { using (PhysX.Collection coll = _scene.Physics.CreateCollection()) { using (MemoryStream ms = new MemoryStream()) { coll.Deserialize(ms); } } } void _meshingStage_OnShapeNeedsFreeing(PhysicsShape shape) { this.QueueCommand(new Commands.DestroyShapeCmd { Shape = shape }); } void DoTiming() { IntPtr thrdHandle = GetCurrentThread(); SetThreadPriority(thrdHandle, ThreadPriorityLevel.TimeCritical); while (!_stop) { _timingSignal.Set(); if (_frameNum % 100 == 0) { Watchdog.UpdateThread(); } Thread.Sleep(TIMESTEP); Interlocked.Increment(ref _frameNum); } } private void Heartbeat() { uint lastSimulateFrame = 0; while (!_stop) { uint startingFrame = (uint)Environment.TickCount; bool processedCommandsThisIteration = ProcessQueueCommands(); uint uframe = (uint)_frameNum; if (_simulating && (uframe > lastSimulateFrame)) { uint tickCount = (uint)Environment.TickCount; uint ticksSinceLastSimulate = Math.Max(tickCount - _lastSimulate, TIMESTEP); _lastSimulate = (uint)Environment.TickCount; lastSimulateFrame = uframe; if (ticksSinceLastSimulate >= SIMULATE_DELAY_TO_BEGIN_DILATION) { Simulate(DILATED_TIMESTEP_IN_SECONDS, ticksSinceLastSimulate, uframe, true); //m_log.DebugFormat("[PHYSICS]: Dilated simulate {0}", ticksSinceLastSimulate); } else { Simulate(ticksSinceLastSimulate * 0.001f, ticksSinceLastSimulate, uframe, false); } ++_framesSinceLastFpsCalc; if (uframe % UPDATE_WATCHDOG_FRAMES == 0) { Watchdog.UpdateThread(); } if (uframe % CHECK_EXPIRED_KINEMATIC_FRAMES == 0) { this.CheckForExpiredKinematics(); } if (uframe % UPDATE_FPS_FRAMES == 0) { this.UpdateFpsCalc(); //CheckForPhysicsLongFramesAndDebug(); } } _frameTimeAvg.AddValue((uint)Environment.TickCount - startingFrame); ContactDebug.OnFramePassed(); if (_currentCommandQueue.Count == 0) { _timingSignal.Wait(); } _timingSignal.Reset(); } } private void CheckForPhysicsLongFramesAndDebug() { throw new NotImplementedException(); } //Stopwatch sw = new Stopwatch(); public override float Simulate(float timeStep, uint ticksSinceLastSimulate, uint frameNum, bool dilated) { //sw.Start(); _scene.Simulate(timeStep); _scene.FetchResults(true); //sw.Stop(); //m_log.DebugFormat("Simulate took: {0}", sw.Elapsed); //sw.Reset(); ProcessDynamicPrimChanges(timeStep, ticksSinceLastSimulate, frameNum); ProcessCollisionRepeats(timeStep, ticksSinceLastSimulate, frameNum); //run avatar dynamics at 1/2 simulation speed (30fps nominal) if (frameNum % 2 == 0) { ProcessAvatarDynamics(timeStep, ticksSinceLastSimulate, frameNum); } return 0.0f; } private void UpdateFpsCalc() { uint msSinceLastCalc = (uint)Environment.TickCount - _lastFpsCalc; _currFps = _framesSinceLastFpsCalc / (msSinceLastCalc * 0.001f); _framesSinceLastFpsCalc = 0; _lastFpsCalc = (uint)Environment.TickCount; //Console.WriteLine("FPS: {0}", _currFps); //const float LOW_FPS_THRESHOLD = 54.0f; const float LOW_FPS_THRESHOLD = 45.0f; if (_currFps < LOW_FPS_THRESHOLD) { m_log.WarnFormat("[InWorldz.PhysxPhysics] Low physics FPS {0}", _currFps); } } private void CheckForExpiredKinematics() { _kinematicManager.CheckForExipiredKinematics(); } public override PhysicsActor AddAvatar(string avName, OpenMetaverse.Vector3 position, OpenMetaverse.Quaternion rotation, OpenMetaverse.Vector3 size, bool isFlying, OpenMetaverse.Vector3 initialVelocity) { Commands.CreateCharacterCmd cmd = new Commands.CreateCharacterCmd(size.Z, size.X, position, rotation, isFlying, initialVelocity); this.QueueCommand(cmd); cmd.FinshedEvent.Wait(); cmd.Dispose(); return cmd.FinalActor; } public override void RemoveAvatar(PhysicsActor actor) { this.QueueCommand(new Commands.RemoveCharacterCmd((PhysxCharacter)actor)); } public override void RemovePrim(PhysicsActor prim) { this.QueueCommand(new Commands.RemoveObjectCmd((PhysxPrim)prim)); } /// <summary> /// The AddPrimShape calls are pseudo synchronous by default. /// </summary> /// <param name="primName"></param> /// <param name="pbs"></param> /// <param name="position"></param> /// <param name="size"></param> /// <param name="rotation"></param> /// <returns></returns> public override PhysicsActor AddPrimShape(string primName, AddPrimShapeFlags flags, BulkShapeData shapeData) { Commands.CreateObjectCmd createObj = new Commands.CreateObjectCmd( null, primName, shapeData.Pbs, shapeData.Position, shapeData.Size, shapeData.Rotation, shapeData.Velocity, shapeData.AngularVelocity, Meshing.MeshingStage.SCULPT_MESH_LOD, flags, (Material)shapeData.Material, shapeData.PhysicsProperties, shapeData.SerializedShapes, shapeData.ObjectReceivedOn); this.QueueCommand(createObj); createObj.FinshedEvent.Wait(); //wait for meshing and all prerequisites to complete createObj.Dispose(); return createObj.FinalPrim; } public override void BulkAddPrimShapes(ICollection<BulkShapeData> shapeData, AddPrimShapeFlags flags) { Commands.BulkCreateObjectCmd createObjs = new Commands.BulkCreateObjectCmd(flags, shapeData); this.QueueCommand(createObjs); createObjs.FinishedEvent.Wait(); //wait for meshing and all prerequisites to complete createObjs.Dispose(); } public override void AddPhysicsActorTaint(PhysicsActor prim) { //throw new NotSupportedException("AddPhysicsActorTaint must be called with a taint type"); } public override void AddPhysicsActorTaint(PhysicsActor prim, TaintType taint) { TaintHandler handler; if (_taintHandlers.TryGetValue(taint, out handler)) { handler((PhysxPrim)prim, taint); } } private void HandlePrimMadeStatic(PhysxPrim prim, TaintType taint) { this.QueueCommand(new Commands.SetPhysicalityCmd(prim, false)); } private void HandlePrimMadeDynamic(PhysxPrim prim, TaintType taint) { this.QueueCommand(new Commands.SetPhysicalityCmd(prim, true)); } private void HandlePrimChangedShape(PhysxPrim prim, TaintType taint) { this.QueueCommand(new Commands.ChangedShapeCmd(prim)); } private void ProcessAvatarDynamics(float timeStep, uint ticksSinceLastSimulate, uint frameNum) { _controllerManager.ComputeInteractions(TimeSpan.FromMilliseconds(ticksSinceLastSimulate)); foreach (PhysxCharacter character in _charActors) { character.SyncWithPhysics(timeStep, ticksSinceLastSimulate, frameNum); } } private void ProcessDynamicPrimChanges(float timeStep, uint ticksSinceLastSimulate, uint frameNum) { foreach (PhysicsActor actor in _dynPrims) { actor.SyncWithPhysics(timeStep, ticksSinceLastSimulate, frameNum); } } private void ProcessCollisionRepeats(float timeStep, uint ticksSinceLastSimulate, uint frameNum) { //repeat collision notifications every 4 frames (7.5 fps nominal) if (ticksSinceLastSimulate > 0 && frameNum % 4 == 0) { foreach (PhysicsActor actor in _collisionRepeatPrims) { actor.DoCollisionRepeats(timeStep, ticksSinceLastSimulate, frameNum); } } } private bool ProcessQueueCommands() { OpenSim.Framework.LocklessQueue<Commands.ICommand> oldCurrentQueue = Interlocked.Exchange<OpenSim.Framework.LocklessQueue<Commands.ICommand>>(ref _currentCommandQueue, _waitingCommandQueue); try { if (oldCurrentQueue.Count == 0) { _waitingCommandQueue = oldCurrentQueue; return false; } while (oldCurrentQueue.Count > 0) { Commands.ICommand cmd; if (oldCurrentQueue.Dequeue(out cmd)) { //remember, each command that is executed from the queue may free other //commands that are waiting on that command to complete. therefore, after executing //each command from the current queue, we must check to see if new commands //have been put into the freed queue, and execute those. this ensures proper //ordering of commands relative to each object DelayOrExecuteCommand(cmd); ExecuteFreedCommands(); } } } catch (Exception e) { m_log.ErrorFormat("[PhysxScene]: ProcessQueueCommands exception:\n {0}", e); } _waitingCommandQueue = oldCurrentQueue; return true; } private void ExecuteFreedCommands() { while (_freedCommands.Count > 0) { DelayOrExecuteCommand(_freedCommands.Dequeue()); } } private void DelayOrExecuteCommand(Commands.ICommand cmd) { if (!this.CheckDelayedCommand(cmd)) { //Util.DebugOut(cmd.ToString()); cmd.Execute(this); } } private bool CheckDelayedCommand(Commands.ICommand cmd) { if (cmd.AffectsMultiplePrims()) { bool delay = false; Commands.IMultiPrimCommand mpCommand = (Commands.IMultiPrimCommand)cmd; IEnumerable<PhysxPrim> targets = mpCommand.GetTargetPrims(); foreach (PhysxPrim target in targets) { delay |= CheckAddDelay(cmd, target); } //if (delay) m_log.DebugFormat("[InWorldz.PhysX] Delaying physics command pending command completion"); return delay; } else { PhysxPrim target = cmd.GetTargetPrim(); if (target == null) { return false; } return CheckAddDelay(cmd, target); } } private bool CheckAddDelay(Commands.ICommand cmd, PhysxPrim target) { DelayedCommandInfo delayInfo; if (_delayedCommands.TryGetValue(target, out delayInfo) && delayInfo.Initiator != cmd) { //if we're already the last delayed command delayed behind the other command //for the given prim, we only need to be added once per command so we can safely //just return if (delayInfo.Commands.Count > 0 && delayInfo.Commands.Last.Value == cmd) { return true; } //before adding this new command to wait, check to see if it is cullable. //if the command is cullable, and has the same targets, we replace it with this command //maintaining its position in the queue LinkedListNode<Commands.ICommand> cmdNode; if (cmd.IsCullable && delayInfo.TopCullables != null && delayInfo.TopCullables.TryGetValue(cmd.GetType(), out cmdNode) && HasSameTargets(cmdNode.Value, cmd)) { cmdNode.Value = cmd; if (cmd.AffectsMultiplePrims()) ((Commands.IMultiPrimCommand)cmd).AddDelay(); return true; } else { cmdNode = delayInfo.Commands.AddLast(cmd); if (cmd.AffectsMultiplePrims()) ((Commands.IMultiPrimCommand)cmd).AddDelay(); if (cmd.IsCullable) { if (delayInfo.TopCullables == null) { delayInfo.TopCullables = new Dictionary<Type, LinkedListNode<Commands.ICommand>>(); } delayInfo.TopCullables.Add(cmd.GetType(), cmdNode); } return true; } } return false; } private bool HasSameTargets(Commands.ICommand cmd1, Commands.ICommand cmd2) { if (cmd1.AffectsMultiplePrims() != cmd2.AffectsMultiplePrims()) { m_log.ErrorFormat("[InWorldz.PhysxPhysics] Asked to check command targets for different command types!"); return false; } if (cmd1.AffectsMultiplePrims()) { IEnumerator<PhysxPrim> cmd1prims = ((Commands.IMultiPrimCommand)cmd1).GetTargetPrims().GetEnumerator(); IEnumerator<PhysxPrim> cmd2prims = ((Commands.IMultiPrimCommand)cmd2).GetTargetPrims().GetEnumerator(); bool cmd1end = false; bool cmd2end = false; while (true) { cmd1end = cmd1prims.MoveNext(); cmd2end = cmd2prims.MoveNext(); if (cmd1end || cmd2end) break; if (cmd1prims.Current != cmd2prims.Current) return false; } return cmd1end == cmd2end; } else { return cmd1.GetTargetPrim() == cmd2.GetTargetPrim(); } } public override void GetResults() { } public override void SetTerrain(float[] heightMap, int revision) { this._meshingStage.MeshHeightfield(heightMap, delegate(Tuple<PhysX.TriangleMesh, MemoryStream> meshedHeightfield) { this.QueueCommand(new Commands.SetTerrainCmd(meshedHeightfield, revision)); }); } /// <summary> /// Called by the loader before the scene loop is running /// </summary> /// <param name="heightMap"></param> public override void SetStartupTerrain(float[] heightMap, int revision) { m_log.Info("[InWorldz.PhysxPhysics] Setting starup terrain"); this.QueueCommand(new Commands.SetTerrainCmd(heightMap, true, revision)); } public void SetTerrainSync(float[] heightMap, bool canLoadFromCache, int revision) { _terrainMgr.SetTerrainSync(heightMap, canLoadFromCache, revision); } internal void SetPremeshedTerrainSync(Tuple<PhysX.TriangleMesh, MemoryStream> premeshedTerrainData, int revision) { _terrainMgr.SetTerrainPremeshedSync(premeshedTerrainData, revision); } public override void SetWaterLevel(float baseheight) { //NOP } public override void Dispose() { _stop = true; TimingThread.Join(); _timingSignal.Set(); HeartbeatThread.Join(); _meshingStage.InformCachesToPerformDirectDeletes(); foreach (PhysxPrim actor in _allPrims) { actor.Dispose(); } _meshingStage.Stop(); _meshingStage.Dispose(); _terrainMgr.Dispose(); } public override Dictionary<uint, float> GetTopColliders() { return new Dictionary<uint, float>(); } public override bool IsThreaded { get { return false; } } public void QueueCommand(Commands.ICommand command) { _currentCommandQueue.Enqueue(command); _timingSignal.Set(); } internal void AddPrimSync(PhysxPrim prim, bool physical, bool kinematicStatic) { _allPrims.Add(prim); if (physical) _dynPrims.Add(prim); if (kinematicStatic) _kinematicManager.KinematicChanged(prim); } internal void PrimMadeDynamic(PhysxPrim prim) { _dynPrims.Add(prim); _kinematicManager.KinematicRemoved(prim); } internal void PrimMadeStaticKinematic(PhysxPrim actor) { _dynPrims.Remove(actor); _kinematicManager.KinematicChanged(actor); } internal void UpdateKinematic(PhysxPrim actor) { _kinematicManager.KinematicChanged(actor); } internal void RemovePrim(PhysxPrim prim) { _dynPrims.Remove(prim); _allPrims.Remove(prim); _collisionRepeatPrims.Remove(prim); _kinematicManager.KinematicRemoved(prim); _delayedCommands.Remove(prim); prim.Dispose(); } internal void PrimBecameChild(PhysxPrim prim) { _dynPrims.Remove(prim); _allPrims.Remove(prim); _kinematicManager.KinematicRemoved(prim); _delayedCommands.Remove(prim); } internal void BeginDelayCommands(PhysxPrim prim, Commands.ICommand initiator) { DelayedCommandInfo info = new DelayedCommandInfo { Commands = new LinkedList<Commands.ICommand>(), Initiator = initiator }; _delayedCommands.Add(prim, info); } internal void EndDelayCommands(PhysxPrim prim) { DelayedCommandInfo delayedCmds; if (_delayedCommands.TryGetValue(prim, out delayedCmds)) { _delayedCommands.Remove(prim); foreach (Commands.ICommand cmd in delayedCmds.Commands) { if (cmd.RemoveWaitAndCheckReady()) { this.EnqueueFreedCommand(cmd); } } } } /// <summary> /// Enqueues commands to be processed FIRST on the next physics spin /// This ensures that commands that were blocked and delayed for a specific /// object run first before other commands that may have gotten in just after /// the delay was released /// </summary> /// <param name="cmd"></param> private void EnqueueFreedCommand(Commands.ICommand cmd) { _freedCommands.Enqueue(cmd); } internal void AddCharacterSync(PhysxCharacter newChar) { _charActors.Add(newChar); } internal void RemoveCharacterSync(PhysxCharacter physxCharacter) { _charActors.Remove(physxCharacter); } private void OnContact(PhysX.ContactPairHeader contactPairHeader, PhysX.ContactPair[] pairs) { if ((contactPairHeader.Flags & PhysX.ContactPairHeaderFlag.DeletedActor0) == 0) { bool wasPrim = TryInformPrimOfContactChange(contactPairHeader, pairs, 0); if (! wasPrim) { TryInformCharacterOfContactChange(contactPairHeader, pairs, 0); } } if ((contactPairHeader.Flags & PhysX.ContactPairHeaderFlag.DeletedActor1) == 0) { bool wasPrim = TryInformPrimOfContactChange(contactPairHeader, pairs, 1); if (!wasPrim) { TryInformCharacterOfContactChange(contactPairHeader, pairs, 1); } } } private bool TryInformCharacterOfContactChange(PhysX.ContactPairHeader contactPairHeader, PhysX.ContactPair[] pairs, int actorIndex) { PhysxCharacter character = contactPairHeader.Actors[actorIndex].UserData as PhysxCharacter; if (character != null) { character.OnContactChangeSync(contactPairHeader, pairs, actorIndex); return true; } else { return false; } } private bool TryInformPrimOfContactChange(PhysX.ContactPairHeader contactPairHeader, PhysX.ContactPair[] pairs, int actorIndex) { PhysxPrim prim = contactPairHeader.Actors[actorIndex].UserData as PhysxPrim; if (prim != null) { prim.OnContactChangeSync(contactPairHeader, pairs, actorIndex); return true; } else { return false; } } void OnTrigger(PhysX.TriggerPair[] pairs) { foreach (var pair in pairs) { if (pair.TriggerShape != null) { PhysxPrim triggerPrim = pair.TriggerShape.Actor.UserData as PhysxPrim; if (triggerPrim != null) { triggerPrim.OnTrigger(pair); } } } } internal void WakeAllDynamics() { foreach (PhysxPrim prim in _dynPrims) { prim.WakeUp(); } } public override IMaterial FindMaterialImpl(OpenMetaverse.Material materialEnum) { return Material.FindImpl(materialEnum); } public void PrimWantsCollisionRepeat(PhysxPrim prim) { _collisionRepeatPrims.Add(prim); } public void PrimDisabledCollisionRepeat(PhysxPrim prim) { _collisionRepeatPrims.Remove(prim); } public void ForEachCharacter(Action<PhysxCharacter> eachCallback) { foreach (PhysxCharacter character in _charActors) { eachCallback(character); } } public override void DumpCollisionInfo() { ContactDebug.OnDataReady += new Debugging.ContactDebugManager.DataCallback(ContactDebug_OnDataReady); ContactDebug.BeginCollectingContactData(); } void ContactDebug_OnDataReady(IEnumerable<KeyValuePair<PhysX.Actor, int>> data) { m_log.InfoFormat("[InWorldz.PhysX.Debugging] Contact Dump --"); foreach (var kvp in data) { if (kvp.Key.UserData == null) continue; PhysxPrim prim = kvp.Key.UserData as PhysxPrim; if (prim == null) { m_log.DebugFormat("[InWorldz.PhysX.Debugging]: (object) {0}", kvp.Value); } else { OpenMetaverse.Vector3 pos = prim.Position; m_log.DebugFormat("[InWorldz.PhysX.Debugging]: {0} {1} at {2}/{3}/{4}", prim.SOPName, kvp.Value, (int)(pos.X + 0.5), (int)(pos.Y + 0.5), (int)(pos.Z + 0.5)); } } ContactDebug.OnDataReady -= new Debugging.ContactDebugManager.DataCallback(ContactDebug_OnDataReady); } internal void DisableKinematicTransitionTracking(PhysxPrim physxPrim) { _kinematicManager.KinematicRemoved(physxPrim); } internal void ChildPrimDeleted(PhysxPrim childPrim) { _collisionRepeatPrims.Remove(childPrim); } public override void SendPhysicsWindData(OpenMetaverse.Vector2[] sea, OpenMetaverse.Vector2[] gnd, OpenMetaverse.Vector2[] air, float[] ranges, float[] maxheights) { QueueCommand( new Commands.GenericSyncCmd( (PhysxScene scene) => { scene.RegionWaterCurrents = sea; scene.RegionWindGround = gnd; scene.RegionWindAloft = air; scene.RegionTerrainRanges = ranges; scene.RegionTerrainMaxHeights = maxheights; } )); } public override List<ContactResult> RayCastWorld(OpenMetaverse.Vector3 start, OpenMetaverse.Vector3 direction, float distance, int hitAmounts) { List<ContactResult> contactResults = new List<ContactResult>(); AutoResetEvent ev = new AutoResetEvent(false); RayCastWorld(start, direction, distance, hitAmounts, (r) => { contactResults = r; ev.Set(); }); ev.WaitOne(1000); return contactResults; } public override void RayCastWorld(OpenMetaverse.Vector3 start, OpenMetaverse.Vector3 direction, float distance, int hitAmounts, Action<List<ContactResult>> result) { QueueCommand( new Commands.GenericSyncCmd( (PhysxScene scene) => { GetRayCastResults(start, direction, distance, hitAmounts, result, scene); } )); } private void GetRayCastResults(OpenMetaverse.Vector3 start, OpenMetaverse.Vector3 direction, float distance, int hitAmounts, Action<List<ContactResult>> result, PhysxScene scene) { int buffercount = 16; int maxbuffercount = 1024; PhysX.RaycastHit[] hits = null; direction = OpenMetaverse.Vector3.Normalize(direction); //Increase the buffer count if the call indicates overflow. Prevent infinite loops. while (hits == null && buffercount <= maxbuffercount) { hits = SceneImpl.RaycastMultiple(PhysUtil.OmvVectorToPhysx(start), PhysUtil.OmvVectorToPhysx(direction), distance, PhysX.SceneQueryFlags.All, buffercount, null); buffercount *= 2; } List<ContactResult> contactResults = new List<ContactResult>(); if (hits != null) { List<PhysX.RaycastHit> hitsSorted = new List<PhysX.RaycastHit>(hits); hitsSorted.Sort((a, b) => a.Distance.CompareTo(b.Distance)); int count = 0; foreach (PhysX.RaycastHit hit in hitsSorted) { contactResults.Add(new ContactResult() { Distance = hit.Distance, FaceIndex = hit.FaceIndex, CollisionActor = hit.Shape.Actor.UserData as PhysicsActor, Position = PhysUtil.PhysxVectorToOmv(hit.Impact), Normal = PhysUtil.PhysxVectorToOmv(hit.Normal), }); if (++count >= hitAmounts) break; } } result(contactResults); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ namespace NVelocity.App { using System; using System.IO; using System.Text; using Commons.Collections; using Context; using Exception; using Runtime; using Runtime.Log; /// <summary> This class provides services to the application /// developer, such as : /// <ul> /// <li> Simple Velocity Runtime engine initialization methods. /// <li> Functions to apply the template engine to streams and strings /// to allow embedding and dynamic template generation. /// <li> Methods to access Velocimacros directly. /// </ul> /// /// <br><br> /// While the most common way to use Velocity is via templates, as /// Velocity is a general-purpose template engine, there are other /// uses that Velocity is well suited for, such as processing dynamically /// created templates, or processing content streams. /// /// <br><br> /// The methods herein were developed to allow easy access to the Velocity /// facilities without direct spelunking of the internals. If there is /// something you feel is necessary to Add here, please, send a patch. /// /// </summary> /// <author> <a href="mailto:geirm@optonline.net">Geir Magnusson Jr.</a> /// </author> /// <author> <a href="mailto:Christoph.Reck@dlr.de">Christoph Reck</a> /// </author> /// <author> <a href="mailto:jvanzyl@apache.org">Jason van Zyl</a> /// </author> /// <version> $Id: Velocity.java 687177 2008-08-19 22:00:32Z nbubna $ /// </version> public class Velocity { /// <summary> Set an entire configuration at once. This is /// useful in cases where the parent application uses /// the ExtendedProperties class and the velocity configuration /// is a subset of the parent application's configuration. /// /// </summary> /// <param name="configuration">A configuration object. /// /// </param> public static ExtendedProperties ExtendedProperties { set { RuntimeSingleton.Configuration = value; } } /// <summary> Returns a convenient LogMessage instance that wraps the current LogChute. /// Use this to Log Error messages. It has the usual methods. /// /// </summary> /// <returns> A convenience LogMessage instance that wraps the current LogChute. /// </returns> /// <since> 1.5 /// </since> public static Log Log { get { return RuntimeSingleton.Log; } } /// <summary> Initialize the Velocity runtime engine, using the default /// properties of the Velocity distribution /// /// </summary> /// <throws> Exception When an Error during initialization occurs. </throws> public static void Init() { RuntimeSingleton.Init(); } /// <summary> Initialize the Velocity runtime engine, using default properties /// plus the properties in the properties file passed in as the arg /// /// </summary> /// <param name="propsFilename">file containing properties to use to Initialize /// the Velocity runtime /// </param> /// <throws> Exception When an Error during initialization occurs. </throws> public static void Init(string propsFilename) { RuntimeSingleton.Init(propsFilename); } /// <summary> Initialize the Velocity runtime engine, using default properties /// plus the properties in the passed in java.util.Properties object /// /// </summary> /// <param name="p"> Properties object containing initialization properties /// </param> /// <throws> Exception When an Error during initialization occurs. </throws> /// <summary> /// </summary> public static void Init(ExtendedProperties p) { RuntimeSingleton.Init(p); } /// <summary> Set a Velocity Runtime property. /// /// </summary> /// <param name="key">The property key. /// </param> /// <param name="value">The property value. /// </param> public static void SetProperty(string key, object value) { RuntimeSingleton.SetProperty(key, value); } /// <summary> Add a Velocity Runtime property. /// /// </summary> /// <param name="key">The property key. /// </param> /// <param name="value">The property value. /// </param> public static void AddProperty(string key, object value) { RuntimeSingleton.AddProperty(key, value); } /// <summary> Clear a Velocity Runtime property. /// /// </summary> /// <param name="key">of property to clear /// </param> public static void ClearProperty(string key) { RuntimeSingleton.ClearProperty(key); } /// <summary> Get a Velocity Runtime property. /// /// </summary> /// <param name="key">property to retrieve /// </param> /// <returns> property value or null if the property /// not currently set /// </returns> public static object GetProperty(string key) { return RuntimeSingleton.GetProperty(key); } /// <summary> renders the input string using the context into the output writer. /// To be used when a template is dynamically constructed, or want to use /// Velocity as a token replacer. /// /// </summary> /// <param name="context">context to use in rendering input string /// </param> /// <param name="out"> Writer in which to render the output /// </param> /// <param name="logTag"> string to be used as the template name for Log /// messages in case of Error /// </param> /// <param name="instring">input string containing the VTL to be rendered /// /// </param> /// <returns> true if successful, false otherwise. If false, see /// Velocity runtime Log /// </returns> /// <throws> ParseErrorException The template could not be parsed. </throws> /// <throws> MethodInvocationException A method on a context object could not be invoked. </throws> /// <throws> ResourceNotFoundException A referenced resource could not be loaded. </throws> /// <throws> IOException While rendering to the writer, an I/O problem occured. </throws> public static bool Evaluate(IContext context, TextWriter writer, string logTag, string instring) { return RuntimeSingleton.RuntimeServices.Evaluate(context, writer, logTag, instring); } /// <summary> Renders the input reader using the context into the output writer. /// To be used when a template is dynamically constructed, or want to /// use Velocity as a token replacer. /// /// </summary> /// <param name="context">context to use in rendering input string /// </param> /// <param name="writer"> Writer in which to render the output /// </param> /// <param name="logTag"> string to be used as the template name for Log messages /// in case of Error /// </param> /// <param name="reader">Reader containing the VTL to be rendered /// /// </param> /// <returns> true if successful, false otherwise. If false, see /// Velocity runtime Log /// </returns> /// <throws> ParseErrorException The template could not be parsed. </throws> /// <throws> MethodInvocationException A method on a context object could not be invoked. </throws> /// <throws> ResourceNotFoundException A referenced resource could not be loaded. </throws> /// <throws> IOException While reading from the reader or rendering to the writer, </throws> /// <summary> an I/O problem occured. /// </summary> /// <since> Velocity v1.1 /// </since> public static bool Evaluate(IContext context, TextWriter writer, string logTag, TextReader reader) { return RuntimeSingleton.RuntimeServices.Evaluate(context, writer, logTag, reader); } /// <summary> Invokes a currently registered Velocimacro with the params provided /// and places the rendered stream into the writer. /// <br> /// Note : currently only accepts args to the VM if they are in the context. /// /// </summary> /// <param name="vmName">name of Velocimacro to call /// </param> /// <param name="logTag">string to be used for template name in case of Error. if null, /// the vmName will be used /// </param> /// <param name="params">keys for args used to invoke Velocimacro, in java format /// rather than VTL (eg "foo" or "bar" rather than "$foo" or "$bar") /// </param> /// <param name="context">Context object containing data/objects used for rendering. /// </param> /// <param name="writer"> Writer for output stream /// </param> /// <returns> true if Velocimacro exists and successfully invoked, false otherwise. /// </returns> public static bool InvokeVelocimacro(string vmName, string logTag, string[] parameters, IContext context, TextWriter writer) { try { return RuntimeSingleton.RuntimeServices.InvokeVelocimacro(vmName, logTag, parameters, context, writer); } catch (IOException ioe) { string msg = "Velocity.InvokeVelocimacro(" + vmName + ") failed"; Log.Error(msg, ioe); throw new VelocityException(msg, ioe); } } /// <summary> merges a template and puts the rendered stream into the writer /// /// </summary> /// <param name="templateName">name of template to be used in merge /// </param> /// <param name="encoding">encoding used in template /// </param> /// <param name="context"> filled context to be used in merge /// </param> /// <param name="writer"> writer to write template into /// /// </param> /// <returns> true if successful, false otherwise. Errors /// logged to velocity Log /// /// </returns> /// <throws> ParseErrorException The template could not be parsed. </throws> /// <throws> MethodInvocationException A method on a context object could not be invoked. </throws> /// <throws> ResourceNotFoundException A referenced resource could not be loaded. </throws> /// <throws> Exception Any other exception. </throws> /// <summary> /// </summary> /// <since> Velocity v1.1 /// </since> public static bool MergeTemplate(string templateName, string encoding, IContext context, TextWriter writer) { Template template = RuntimeSingleton.GetTemplate(templateName, encoding); if (template == null) { string msg = "Velocity.mergeTemplate() was unable to load template '" + templateName + "'"; Log.Error(msg); throw new ResourceNotFoundException(msg); } else { template.Merge(context, writer); return true; } } /// <summary> Returns a <code>Template</code> from the Velocity /// resource management system. /// /// </summary> /// <param name="name">The file name of the desired template. /// </param> /// <returns> The template. /// </returns> /// <throws> ResourceNotFoundException if template not found </throws> /// <summary> from any available source. /// </summary> /// <throws> ParseErrorException if template cannot be parsed due </throws> /// <summary> to syntax (or other) Error. /// </summary> /// <throws> Exception if an Error occurs in template initialization </throws> public static Template GetTemplate(string name) { return RuntimeSingleton.GetTemplate(name); } /// <summary> Returns a <code>Template</code> from the Velocity /// resource management system. /// /// </summary> /// <param name="name">The file name of the desired template. /// </param> /// <param name="encoding">The character encoding to use for the template. /// </param> /// <returns> The template. /// </returns> /// <throws> ResourceNotFoundException if template not found </throws> /// <summary> from any available source. /// </summary> /// <throws> ParseErrorException if template cannot be parsed due </throws> /// <summary> to syntax (or other) Error. /// </summary> /// <throws> Exception if an Error occurs in template initialization </throws> /// <summary> /// </summary> /// <since> Velocity v1.1 /// </since> public static Template GetTemplate(string name, string encoding) { return RuntimeSingleton.GetTemplate(name, encoding); } /// <summary> <p>Determines whether a resource is accessable via the /// currently configured resource loaders. {@link /// org.apache.velocity.runtime.resource.Resource} is the generic /// description of templates, static content, etc.</p> /// /// <p>Note that the current implementation will <b>not</b> change /// the state of the system in any real way - so this cannot be /// used to pre-load the resource cache, as the previous /// implementation did as a side-effect.</p> /// /// </summary> /// <param name="resourceName">The name of the resource to search for. /// </param> /// <returns> Whether the resource was located. /// </returns> public static bool ResourceExists(string resourceName) { return (RuntimeSingleton.GetLoaderNameForResource(resourceName) != null); } /// <summary> <p> /// Set the an ApplicationAttribue, which is an Object /// set by the application which is accessable from /// any component of the system that gets a RuntimeServices. /// This allows communication between the application /// environment and custom pluggable components of the /// Velocity engine, such as loaders and loggers. /// </p> /// /// <p> /// Note that there is no enfocement or rules for the key /// used - it is up to the application developer. However, to /// help make the intermixing of components possible, using /// the target Class name (e.g. com.foo.bar ) as the key /// might help avoid collision. /// </p> /// /// </summary> /// <param name="key">object 'name' under which the object is stored /// </param> /// <param name="value">object to store under this key /// </param> public static void SetApplicationAttribute(object key, object value) { RuntimeSingleton.RuntimeServices.SetApplicationAttribute(key, value); } } }
using Android.Runtime; using System; using System.Linq; namespace Xamarin.Facebook.Share.Model { public partial class ShareContent { public partial class Builder { static IntPtr id_build; [Register("build", "()Lcom/facebook/share/model/ShareContent;", "")] public global::Java.Lang.Object Build() { if (id_build == IntPtr.Zero) id_build = JNIEnv.GetMethodID(class_ref, "build", "()Lcom/facebook/share/model/ShareContent;"); return global::Java.Lang.Object.GetObject<global::Java.Lang.Object>(JNIEnv.CallObjectMethod(Handle, id_build), JniHandleOwnership.TransferLocalRef); } //// This method is explicitly implemented as a member of an instantiated Xamarin.Facebook.Share.Model.IShareModelBuilder //global::Java.Lang.Object global::Xamarin.Facebook.Share.Model.IShareModelBuilder.ReadFrom (global::Android.OS.Parcel p0) //{ // return global::Java.Interop.JavaObjectExtensions.JavaCast<Java.Lang.Object>(ReadFrom (p0)); //} static IntPtr id_readFrom_Landroid_os_Parcel_; [Register("readFrom", "(Landroid/os/Parcel;)Lcom/facebook/share/model/ShareContent$Builder;", "")] public global::Xamarin.Facebook.Share.Model.ShareContent.Builder ReadFrom(global::Android.OS.Parcel p0) { if (id_readFrom_Landroid_os_Parcel_ == IntPtr.Zero) id_readFrom_Landroid_os_Parcel_ = JNIEnv.GetMethodID(class_ref, "readFrom", "(Landroid/os/Parcel;)Lcom/facebook/share/model/ShareContent$Builder;"); global::Xamarin.Facebook.Share.Model.ShareContent.Builder __ret = global::Java.Lang.Object.GetObject<global::Xamarin.Facebook.Share.Model.ShareContent.Builder>(JNIEnv.CallObjectMethod(Handle, id_readFrom_Landroid_os_Parcel_, new JValue(p0)), JniHandleOwnership.TransferLocalRef); return __ret; } } } public partial class ShareMedia { public partial class Builder { static IntPtr id_build; [Register("build", "()Lcom/facebook/share/model/ShareMedia;", "")] public global::Java.Lang.Object Build() { if (id_build == IntPtr.Zero) id_build = JNIEnv.GetMethodID(class_ref, "build", "()Lcom/facebook/share/model/ShareMedia;"); return global::Java.Lang.Object.GetObject<global::Java.Lang.Object>(JNIEnv.CallObjectMethod(Handle, id_build), JniHandleOwnership.TransferLocalRef); } //// This method is explicitly implemented as a member of an instantiated Xamarin.Facebook.Share.Model.IShareModelBuilder //global::Java.Lang.Object global::Xamarin.Facebook.Share.Model.IShareModelBuilder.ReadFrom (global::Android.OS.Parcel p0) //{ // return global::Java.Interop.JavaObjectExtensions.JavaCast<Java.Lang.Object>(ReadFrom (p0)); //} static IntPtr id_readFrom_Landroid_os_Parcel_; [Register("readFrom", "(Landroid/os/Parcel;)Lcom/facebook/share/model/ShareMedia$Builder;", "")] public global::Xamarin.Facebook.Share.Model.ShareMedia.Builder ReadFrom(global::Android.OS.Parcel p0) { if (id_readFrom_Landroid_os_Parcel_ == IntPtr.Zero) id_readFrom_Landroid_os_Parcel_ = JNIEnv.GetMethodID(class_ref, "readFrom", "(Landroid/os/Parcel;)Lcom/facebook/share/model/ShareMedia$Builder;"); global::Xamarin.Facebook.Share.Model.ShareMedia.Builder __ret = global::Java.Lang.Object.GetObject<global::Xamarin.Facebook.Share.Model.ShareMedia.Builder>(JNIEnv.CallObjectMethod(Handle, id_readFrom_Landroid_os_Parcel_, new JValue(p0)), JniHandleOwnership.TransferLocalRef); return __ret; } } } public partial class ShareOpenGraphValueContainer { public partial class Builder { static IntPtr id_build; [Register("build", "()Lcom/facebook/share/model/ShareOpenGraphValueContainer;", "")] public global::Java.Lang.Object Build() { if (id_build == IntPtr.Zero) id_build = JNIEnv.GetMethodID(class_ref, "build", "()Lcom/facebook/share/model/ShareOpenGraphValueContainer;"); return global::Java.Lang.Object.GetObject<global::Java.Lang.Object>(JNIEnv.CallObjectMethod(Handle, id_build), JniHandleOwnership.TransferLocalRef); } //// This method is explicitly implemented as a member of an instantiated Xamarin.Facebook.Share.Model.IShareModelBuilder //global::Java.Lang.Object global::Xamarin.Facebook.Share.Model.IShareModelBuilder.ReadFrom (global::Android.OS.Parcel p0) //{ // return global::Java.Interop.JavaObjectExtensions.JavaCast<Java.Lang.Object>(ReadFrom (p0)); //} static IntPtr id_readFrom_Landroid_os_Parcel_; [Register("readFrom", "(Landroid/os/Parcel;)Lcom/facebook/share/model/ShareOpenGraphValueContainer$Builder;", "")] public global::Xamarin.Facebook.Share.Model.ShareOpenGraphValueContainer.Builder ReadFrom(global::Android.OS.Parcel p0) { if (id_readFrom_Landroid_os_Parcel_ == IntPtr.Zero) id_readFrom_Landroid_os_Parcel_ = JNIEnv.GetMethodID(class_ref, "readFrom", "(Landroid/os/Parcel;)Lcom/facebook/share/model/ShareOpenGraphValueContainer$Builder;"); global::Xamarin.Facebook.Share.Model.ShareOpenGraphValueContainer.Builder __ret = global::Java.Lang.Object.GetObject<global::Xamarin.Facebook.Share.Model.ShareOpenGraphValueContainer.Builder>(JNIEnv.CallObjectMethod(Handle, id_readFrom_Landroid_os_Parcel_, new JValue(p0)), JniHandleOwnership.TransferLocalRef); return __ret; } } } public partial class ShareMessengerActionButton { public partial class Builder { static IntPtr id_build; [Register("build", "()Lcom/facebook/share/model/ShareMessengerActionButton;", "")] public global::Java.Lang.Object Build() { if (id_build == IntPtr.Zero) id_build = JNIEnv.GetMethodID(class_ref, "build", "()Lcom/facebook/share/model/ShareMessengerActionButton;"); return global::Java.Lang.Object.GetObject<global::Java.Lang.Object>(JNIEnv.CallObjectMethod(Handle, id_build), JniHandleOwnership.TransferLocalRef); } //// This method is explicitly implemented as a member of an instantiated Xamarin.Facebook.Share.Model.IShareModelBuilder //global::Java.Lang.Object global::Xamarin.Facebook.Share.Model.IShareModelBuilder.ReadFrom (global::Android.OS.Parcel p0) //{ // return global::Java.Interop.JavaObjectExtensions.JavaCast<Java.Lang.Object>(ReadFrom (p0)); //} static IntPtr id_readFrom_Landroid_os_Parcel_; [Register("readFrom", "(Landroid/os/Parcel;)Lcom/facebook/share/model/ShareMessengerActionButton$Builder;", "")] public global::Xamarin.Facebook.Share.Model.ShareMessengerActionButton.Builder ReadFrom(global::Android.OS.Parcel p0) { if (id_readFrom_Landroid_os_Parcel_ == IntPtr.Zero) id_readFrom_Landroid_os_Parcel_ = JNIEnv.GetMethodID(class_ref, "readFrom", "(Landroid/os/Parcel;)Lcom/facebook/share/model/ShareMedia$Builder;"); global::Xamarin.Facebook.Share.Model.ShareMessengerActionButton.Builder __ret = global::Java.Lang.Object.GetObject<global::Xamarin.Facebook.Share.Model.ShareMessengerActionButton.Builder>(JNIEnv.CallObjectMethod(Handle, id_readFrom_Landroid_os_Parcel_, new JValue(p0)), JniHandleOwnership.TransferLocalRef); return __ret; } } } } namespace Xamarin.Facebook.Share.Widget { //public partial class DeviceShareButton //{ // static IntPtr id_setEnabled_Z; // // Metadata.xml XPath method reference: path="/api/package[@name='com.facebook.share.widget']/class[@name='DeviceShareButton']/method[@name='setEnabled' and count(parameter)=1 and parameter[1][@type='boolean']]" // [Register("setEnabled", "(Z)V", "")] // public unsafe void SetEnabled(bool enabled) // { // if (id_setEnabled_Z == IntPtr.Zero) // id_setEnabled_Z = JNIEnv.GetMethodID(class_ref, "setEnabled", "(Z)V"); // try // { // JValue* __args = stackalloc JValue[1]; // __args[0] = new JValue(enabled); // JNIEnv.CallVoidMethod(((global::Java.Lang.Object)this).Handle, id_setEnabled_Z, __args); // } // finally // { // } // } //} public partial class LikeView { static Delegate cb_setEnabled_Z; #pragma warning disable 0169 static Delegate GetSetEnabled_ZHandler() { if (cb_setEnabled_Z == null) cb_setEnabled_Z = JNINativeWrapper.CreateDelegate((Action<IntPtr, IntPtr, bool>)n_SetEnabled_Z); return cb_setEnabled_Z; } static void n_SetEnabled_Z(IntPtr jnienv, IntPtr native__this, bool enabled) { global::Xamarin.Facebook.Share.Widget.LikeView __this = global::Java.Lang.Object.GetObject<global::Xamarin.Facebook.Share.Widget.LikeView>(jnienv, native__this, JniHandleOwnership.DoNotTransfer); __this.SetEnabled(enabled); } #pragma warning restore 0169 static IntPtr id_setEnabled_Z; // Metadata.xml XPath method reference: path="/api/package[@name='com.facebook.share.widget']/class[@name='LikeView']/method[@name='setEnabled' and count(parameter)=1 and parameter[1][@type='boolean']]" [Register("setEnabled", "(Z)V", "GetSetEnabled_ZHandler")] public unsafe void SetEnabled(bool enabled) { if (id_setEnabled_Z == IntPtr.Zero) id_setEnabled_Z = JNIEnv.GetMethodID(class_ref, "setEnabled", "(Z)V"); try { JValue* __args = stackalloc JValue[1]; __args[0] = new JValue(enabled); if (GetType() == ThresholdType) JNIEnv.CallVoidMethod(((global::Java.Lang.Object)this).Handle, id_setEnabled_Z, __args); else JNIEnv.CallNonvirtualVoidMethod(((global::Java.Lang.Object)this).Handle, ThresholdClass, JNIEnv.GetMethodID(ThresholdClass, "setEnabled", "(Z)V"), __args); } finally { } } } // public partial class ShareButtonBase // { // static Delegate cb_setEnabled_Z; //#pragma warning disable 0169 // static Delegate GetSetEnabled_ZHandler() // { // if (cb_setEnabled_Z == null) // cb_setEnabled_Z = JNINativeWrapper.CreateDelegate((Action<IntPtr, IntPtr, bool>)n_SetEnabled_Z); // return cb_setEnabled_Z; // } // static void n_SetEnabled_Z(IntPtr jnienv, IntPtr native__this, bool enabled) // { // global::Xamarin.Facebook.Share.Widget.ShareButtonBase __this = global::Java.Lang.Object.GetObject<global::Xamarin.Facebook.Share.Widget.ShareButtonBase>(jnienv, native__this, JniHandleOwnership.DoNotTransfer); // __this.SetEnabled(enabled); // } //#pragma warning restore 0169 // static IntPtr id_setEnabled_Z; // // Metadata.xml XPath method reference: path="/api/package[@name='com.facebook.share.widget']/class[@name='ShareButtonBase']/method[@name='setEnabled' and count(parameter)=1 and parameter[1][@type='boolean']]" // [Register("setEnabled", "(Z)V", "GetSetEnabled_ZHandler")] // public unsafe void SetEnabled(bool enabled) // { // if (id_setEnabled_Z == IntPtr.Zero) // id_setEnabled_Z = JNIEnv.GetMethodID(class_ref, "setEnabled", "(Z)V"); // try // { // JValue* __args = stackalloc JValue[1]; // __args[0] = new JValue(enabled); // if (GetType() == ThresholdType) // JNIEnv.CallVoidMethod(((global::Java.Lang.Object)this).Handle, id_setEnabled_Z, __args); // else // JNIEnv.CallNonvirtualVoidMethod(((global::Java.Lang.Object)this).Handle, ThresholdClass, JNIEnv.GetMethodID(ThresholdClass, "setEnabled", "(Z)V"), __args); // } // finally // { // } // } //} //public partial class AppInviteDialog //{ // protected override global::System.Collections.IList _OrderedModeHandlers() // { // return OrderedModeHandlers.ToList(); // } //} //public partial class CreateAppGroupDialog //{ // protected override global::System.Collections.IList _OrderedModeHandlers() // { // return OrderedModeHandlers.ToList(); // } //} //public partial class GameRequestDialog //{ // protected override global::System.Collections.IList _OrderedModeHandlers() // { // return OrderedModeHandlers.ToList(); // } //} //public partial class JoinAppGroupDialog //{ // protected override global::System.Collections.IList _OrderedModeHandlers() // { // return OrderedModeHandlers.ToList(); // } //} //public partial class MessageDialog //{ // protected override global::System.Collections.IList _OrderedModeHandlers() // { // return OrderedModeHandlers.ToList(); // } //} public partial class ShareDialog { protected override global::System.Collections.IList _OrderedModeHandlers() { return OrderedModeHandlers.ToList(); } } } //namespace Xamarin.Facebook.Share //{ // public partial class DeviceShareDialog // { // protected override global::System.Collections.IList _OrderedModeHandlers() // { // return OrderedModeHandlers.ToList(); // } // } //}
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Text.RegularExpressions; using Nini.Config; using Mono.Addins; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; // using log4net; // using System.Reflection; /***************************************************** * * WorldCommModule * * * Holding place for world comms - basically llListen * function implementation. * * lLListen(integer channel, string name, key id, string msg) * The name, id, and msg arguments specify the filtering * criteria. You can pass the empty string * (or NULL_KEY for id) for these to set a completely * open filter; this causes the listen() event handler to be * invoked for all chat on the channel. To listen only * for chat spoken by a specific object or avatar, * specify the name and/or id arguments. To listen * only for a specific command, specify the * (case-sensitive) msg argument. If msg is not empty, * listener will only hear strings which are exactly equal * to msg. You can also use all the arguments to establish * the most restrictive filtering criteria. * * It might be useful for each listener to maintain a message * digest, with a list of recent messages by UUID. This can * be used to prevent in-world repeater loops. However, the * linden functions do not have this capability, so for now * thats the way it works. * Instead it blocks messages originating from the same prim. * (not Object!) * * For LSL compliance, note the following: * (Tested again 1.21.1 on May 2, 2008) * 1. 'id' has to be parsed into a UUID. None-UUID keys are * to be replaced by the ZeroID key. (Well, TryParse does * that for us. * 2. Setting up an listen event from the same script, with the * same filter settings (including step 1), returns the same * handle as the original filter. * 3. (TODO) handles should be script-local. Starting from 1. * Might be actually easier to map the global handle into * script-local handle in the ScriptEngine. Not sure if its * worth the effort tho. * * **************************************************/ namespace OpenSim.Region.CoreModules.Scripting.WorldComm { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "WorldCommModule")] public class WorldCommModule : IWorldComm, INonSharedRegionModule { // private static readonly ILog m_log = // LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private const int DEBUG_CHANNEL = 2147483647; private ListenerManager m_listenerManager; private Queue m_pending; private Queue m_pendingQ; private Scene m_scene; private int m_whisperdistance = 10; private int m_saydistance = 20; private int m_shoutdistance = 100; #region INonSharedRegionModule Members public void Initialise(IConfigSource config) { // wrap this in a try block so that defaults will work if // the config file doesn't specify otherwise. int maxlisteners = 1000; int maxhandles = 65; try { m_whisperdistance = config.Configs["Chat"].GetInt( "whisper_distance", m_whisperdistance); m_saydistance = config.Configs["Chat"].GetInt( "say_distance", m_saydistance); m_shoutdistance = config.Configs["Chat"].GetInt( "shout_distance", m_shoutdistance); maxlisteners = config.Configs["LL-Functions"].GetInt( "max_listens_per_region", maxlisteners); maxhandles = config.Configs["LL-Functions"].GetInt( "max_listens_per_script", maxhandles); } catch (Exception) { } if (maxlisteners < 1) maxlisteners = int.MaxValue; if (maxhandles < 1) maxhandles = int.MaxValue; if (maxlisteners < maxhandles) maxlisteners = maxhandles; m_listenerManager = new ListenerManager(maxlisteners, maxhandles); m_pendingQ = new Queue(); m_pending = Queue.Synchronized(m_pendingQ); } public void PostInitialise() { } public void AddRegion(Scene scene) { m_scene = scene; m_scene.RegisterModuleInterface<IWorldComm>(this); m_scene.EventManager.OnChatFromClient += DeliverClientMessage; m_scene.EventManager.OnChatBroadcast += DeliverClientMessage; } public void RegionLoaded(Scene scene) { } public void RemoveRegion(Scene scene) { if (scene != m_scene) return; m_scene.UnregisterModuleInterface<IWorldComm>(this); m_scene.EventManager.OnChatBroadcast -= DeliverClientMessage; m_scene.EventManager.OnChatBroadcast -= DeliverClientMessage; } public void Close() { } public string Name { get { return "WorldCommModule"; } } public Type ReplaceableInterface { get { return null; } } #endregion #region IWorldComm Members public int ListenerCount { get { return m_listenerManager.ListenerCount; } } /// <summary> /// Create a listen event callback with the specified filters. /// The parameters localID,itemID are needed to uniquely identify /// the script during 'peek' time. Parameter hostID is needed to /// determine the position of the script. /// </summary> /// <param name="localID">localID of the script engine</param> /// <param name="itemID">UUID of the script engine</param> /// <param name="hostID">UUID of the SceneObjectPart</param> /// <param name="channel">channel to listen on</param> /// <param name="name">name to filter on</param> /// <param name="id"> /// key to filter on (user given, could be totally faked) /// </param> /// <param name="msg">msg to filter on</param> /// <returns>number of the scripts handle</returns> public int Listen(uint localID, UUID itemID, UUID hostID, int channel, string name, UUID id, string msg) { return m_listenerManager.AddListener(localID, itemID, hostID, channel, name, id, msg); } /// <summary> /// Create a listen event callback with the specified filters. /// The parameters localID,itemID are needed to uniquely identify /// the script during 'peek' time. Parameter hostID is needed to /// determine the position of the script. /// </summary> /// <param name="localID">localID of the script engine</param> /// <param name="itemID">UUID of the script engine</param> /// <param name="hostID">UUID of the SceneObjectPart</param> /// <param name="channel">channel to listen on</param> /// <param name="name">name to filter on</param> /// <param name="id"> /// key to filter on (user given, could be totally faked) /// </param> /// <param name="msg">msg to filter on</param> /// <param name="regexBitfield"> /// Bitfield indicating which strings should be processed as regex. /// </param> /// <returns>number of the scripts handle</returns> public int Listen(uint localID, UUID itemID, UUID hostID, int channel, string name, UUID id, string msg, int regexBitfield) { return m_listenerManager.AddListener(localID, itemID, hostID, channel, name, id, msg, regexBitfield); } /// <summary> /// Sets the listen event with handle as active (active = TRUE) or inactive (active = FALSE). /// The handle used is returned from Listen() /// </summary> /// <param name="itemID">UUID of the script engine</param> /// <param name="handle">handle returned by Listen()</param> /// <param name="active">temp. activate or deactivate the Listen()</param> public void ListenControl(UUID itemID, int handle, int active) { if (active == 1) m_listenerManager.Activate(itemID, handle); else if (active == 0) m_listenerManager.Dectivate(itemID, handle); } /// <summary> /// Removes the listen event callback with handle /// </summary> /// <param name="itemID">UUID of the script engine</param> /// <param name="handle">handle returned by Listen()</param> public void ListenRemove(UUID itemID, int handle) { m_listenerManager.Remove(itemID, handle); } /// <summary> /// Removes all listen event callbacks for the given itemID /// (script engine) /// </summary> /// <param name="itemID">UUID of the script engine</param> public void DeleteListener(UUID itemID) { m_listenerManager.DeleteListener(itemID); } protected static Vector3 CenterOfRegion = new Vector3(128, 128, 20); public void DeliverMessage(ChatTypeEnum type, int channel, string name, UUID id, string msg) { Vector3 position; SceneObjectPart source; ScenePresence avatar; if ((source = m_scene.GetSceneObjectPart(id)) != null) position = source.AbsolutePosition; else if ((avatar = m_scene.GetScenePresence(id)) != null) position = avatar.AbsolutePosition; else if (ChatTypeEnum.Region == type) position = CenterOfRegion; else return; DeliverMessage(type, channel, name, id, msg, position); } /// <summary> /// This method scans over the objects which registered an interest in listen callbacks. /// For everyone it finds, it checks if it fits the given filter. If it does, then /// enqueue the message for delivery to the objects listen event handler. /// The enqueued ListenerInfo no longer has filter values, but the actually trigged values. /// Objects that do an llSay have their messages delivered here and for nearby avatars, /// the OnChatFromClient event is used. /// </summary> /// <param name="type">type of delvery (whisper,say,shout or regionwide)</param> /// <param name="channel">channel to sent on</param> /// <param name="name">name of sender (object or avatar)</param> /// <param name="id">key of sender (object or avatar)</param> /// <param name="msg">msg to sent</param> public void DeliverMessage(ChatTypeEnum type, int channel, string name, UUID id, string msg, Vector3 position) { // m_log.DebugFormat("[WorldComm] got[2] type {0}, channel {1}, name {2}, id {3}, msg {4}", // type, channel, name, id, msg); // Determine which listen event filters match the given set of arguments, this results // in a limited set of listeners, each belonging a host. If the host is in range, add them // to the pending queue. foreach (ListenerInfo li in m_listenerManager.GetListeners(UUID.Zero, channel, name, id, msg)) { // Dont process if this message is from yourself! if (li.GetHostID().Equals(id)) continue; SceneObjectPart sPart = m_scene.GetSceneObjectPart( li.GetHostID()); if (sPart == null) continue; double dis = Util.GetDistanceTo(sPart.AbsolutePosition, position); switch (type) { case ChatTypeEnum.Whisper: if (dis < m_whisperdistance) QueueMessage(new ListenerInfo(li, name, id, msg)); break; case ChatTypeEnum.Say: if (dis < m_saydistance) QueueMessage(new ListenerInfo(li, name, id, msg)); break; case ChatTypeEnum.Shout: if (dis < m_shoutdistance) QueueMessage(new ListenerInfo(li, name, id, msg)); break; case ChatTypeEnum.Region: QueueMessage(new ListenerInfo(li, name, id, msg)); break; } } } /// <summary> /// Delivers the message to a scene entity. /// </summary> /// <param name='target'> /// Target. /// </param> /// <param name='channel'> /// Channel. /// </param> /// <param name='name'> /// Name. /// </param> /// <param name='id'> /// Identifier. /// </param> /// <param name='msg'> /// Message. /// </param> public void DeliverMessageTo(UUID target, int channel, Vector3 pos, string name, UUID id, string msg) { if (channel == DEBUG_CHANNEL) return; if(target == UUID.Zero) return; // Is target an avatar? ScenePresence sp = m_scene.GetScenePresence(target); if (sp != null) { // Send message to avatar if (channel == 0) { // Channel 0 goes to viewer ONLY m_scene.SimChat(Utils.StringToBytes(msg), ChatTypeEnum.Broadcast, 0, pos, name, id, target, false, false); return; } // for now messages to prims don't cross regions if(sp.IsChildAgent) return; List<SceneObjectGroup> attachments = sp.GetAttachments(); if (attachments.Count == 0) return; // Get uuid of attachments List<UUID> targets = new List<UUID>(); foreach (SceneObjectGroup sog in attachments) { if (!sog.IsDeleted) { SceneObjectPart[] parts = sog.Parts; foreach(SceneObjectPart p in parts) targets.Add(p.UUID); } } foreach (ListenerInfo li in m_listenerManager.GetListeners(UUID.Zero, channel, name, id, msg)) { UUID liHostID = li.GetHostID(); if (liHostID.Equals(id)) continue; if (m_scene.GetSceneObjectPart(liHostID) == null) continue; if (targets.Contains(liHostID)) QueueMessage(new ListenerInfo(li, name, id, msg)); } return; } SceneObjectPart part = m_scene.GetSceneObjectPart(target); if (part == null) // Not even an object return; // No error foreach (ListenerInfo li in m_listenerManager.GetListeners(UUID.Zero, channel, name, id, msg)) { UUID liHostID = li.GetHostID(); // Dont process if this message is from yourself! if (liHostID.Equals(id)) continue; if (m_scene.GetSceneObjectPart(liHostID) == null) continue; if (liHostID.Equals(target)) { QueueMessage(new ListenerInfo(li, name, id, msg)); break; } } } protected void QueueMessage(ListenerInfo li) { lock (m_pending.SyncRoot) { m_pending.Enqueue(li); } } /// <summary> /// Are there any listen events ready to be dispatched? /// </summary> /// <returns>boolean indication</returns> public bool HasMessages() { return (m_pending.Count > 0); } /// <summary> /// Pop the first availlable listen event from the queue /// </summary> /// <returns>ListenerInfo with filter filled in</returns> public IWorldCommListenerInfo GetNextMessage() { ListenerInfo li = null; lock (m_pending.SyncRoot) { li = (ListenerInfo)m_pending.Dequeue(); } return li; } #endregion /******************************************************************** * * Listener Stuff * * *****************************************************************/ private void DeliverClientMessage(Object sender, OSChatMessage e) { if (null != e.Sender) { DeliverMessage(e.Type, e.Channel, e.Sender.Name, e.Sender.AgentId, e.Message, e.Position); } else { DeliverMessage(e.Type, e.Channel, e.From, UUID.Zero, e.Message, e.Position); } } public Object[] GetSerializationData(UUID itemID) { return m_listenerManager.GetSerializationData(itemID); } public void CreateFromData(uint localID, UUID itemID, UUID hostID, Object[] data) { m_listenerManager.AddFromData(localID, itemID, hostID, data); } } public class ListenerManager { private Dictionary<int, List<ListenerInfo>> m_listeners = new Dictionary<int, List<ListenerInfo>>(); private int m_maxlisteners; private int m_maxhandles; private int m_curlisteners; /// <summary> /// Total number of listeners /// </summary> public int ListenerCount { get { lock (m_listeners) return m_listeners.Count; } } public ListenerManager(int maxlisteners, int maxhandles) { m_maxlisteners = maxlisteners; m_maxhandles = maxhandles; m_curlisteners = 0; } public int AddListener(uint localID, UUID itemID, UUID hostID, int channel, string name, UUID id, string msg) { return AddListener(localID, itemID, hostID, channel, name, id, msg, 0); } public int AddListener(uint localID, UUID itemID, UUID hostID, int channel, string name, UUID id, string msg, int regexBitfield) { // do we already have a match on this particular filter event? List<ListenerInfo> coll = GetListeners(itemID, channel, name, id, msg); if (coll.Count > 0) { // special case, called with same filter settings, return same // handle (2008-05-02, tested on 1.21.1 server, still holds) return coll[0].GetHandle(); } lock (m_listeners) { if (m_curlisteners < m_maxlisteners) { int newHandle = GetNewHandle(itemID); if (newHandle > 0) { ListenerInfo li = new ListenerInfo(newHandle, localID, itemID, hostID, channel, name, id, msg, regexBitfield); List<ListenerInfo> listeners; if (!m_listeners.TryGetValue( channel, out listeners)) { listeners = new List<ListenerInfo>(); m_listeners.Add(channel, listeners); } listeners.Add(li); m_curlisteners++; return newHandle; } } } return -1; } public void Remove(UUID itemID, int handle) { lock (m_listeners) { foreach (KeyValuePair<int, List<ListenerInfo>> lis in m_listeners) { foreach (ListenerInfo li in lis.Value) { if (li.GetItemID().Equals(itemID) && li.GetHandle().Equals(handle)) { lis.Value.Remove(li); m_curlisteners--; if (lis.Value.Count == 0) m_listeners.Remove(lis.Key); // bailing of loop so this does not smoke // there should be only one, so we bail out early return; } } } } } public void DeleteListener(UUID itemID) { List<int> emptyChannels = new List<int>(); List<ListenerInfo> removedListeners = new List<ListenerInfo>(); lock (m_listeners) { foreach (KeyValuePair<int, List<ListenerInfo>> lis in m_listeners) { foreach (ListenerInfo li in lis.Value) { if (li.GetItemID().Equals(itemID)) { // store them first, else the enumerated bails on // us removedListeners.Add(li); } } foreach (ListenerInfo li in removedListeners) { lis.Value.Remove(li); m_curlisteners--; } removedListeners.Clear(); if (lis.Value.Count == 0) { // again, store first, remove later emptyChannels.Add(lis.Key); } } foreach (int channel in emptyChannels) { m_listeners.Remove(channel); } } } public void Activate(UUID itemID, int handle) { lock (m_listeners) { foreach (KeyValuePair<int, List<ListenerInfo>> lis in m_listeners) { foreach (ListenerInfo li in lis.Value) { if (li.GetItemID().Equals(itemID) && li.GetHandle() == handle) { li.Activate(); // only one, bail out return; } } } } } public void Dectivate(UUID itemID, int handle) { lock (m_listeners) { foreach (KeyValuePair<int, List<ListenerInfo>> lis in m_listeners) { foreach (ListenerInfo li in lis.Value) { if (li.GetItemID().Equals(itemID) && li.GetHandle() == handle) { li.Deactivate(); // only one, bail out return; } } } } } /// <summary> /// non-locked access, since its always called in the context of the /// lock /// </summary> /// <param name="itemID"></param> /// <returns></returns> private int GetNewHandle(UUID itemID) { List<int> handles = new List<int>(); // build a list of used keys for this specific itemID... foreach (KeyValuePair<int, List<ListenerInfo>> lis in m_listeners) { foreach (ListenerInfo li in lis.Value) { if (li.GetItemID().Equals(itemID)) handles.Add(li.GetHandle()); } } if(handles.Count >= m_maxhandles) return -1; // Note: 0 is NOT a valid handle for llListen() to return for (int i = 1; i <= m_maxhandles; i++) { if (!handles.Contains(i)) return i; } return -1; } /// These are duplicated from ScriptBaseClass /// http://opensimulator.org/mantis/view.php?id=6106#c21945 #region Constants for the bitfield parameter of osListenRegex /// <summary> /// process name parameter as regex /// </summary> public const int OS_LISTEN_REGEX_NAME = 0x1; /// <summary> /// process message parameter as regex /// </summary> public const int OS_LISTEN_REGEX_MESSAGE = 0x2; #endregion /// <summary> /// Get listeners matching the input parameters. /// </summary> /// <remarks> /// Theres probably a more clever and efficient way to do this, maybe /// with regex. /// PM2008: Ha, one could even be smart and define a specialized /// Enumerator. /// </remarks> /// <param name="itemID"></param> /// <param name="channel"></param> /// <param name="name"></param> /// <param name="id"></param> /// <param name="msg"></param> /// <returns></returns> public List<ListenerInfo> GetListeners(UUID itemID, int channel, string name, UUID id, string msg) { List<ListenerInfo> collection = new List<ListenerInfo>(); lock (m_listeners) { List<ListenerInfo> listeners; if (!m_listeners.TryGetValue(channel, out listeners)) { return collection; } foreach (ListenerInfo li in listeners) { if (!li.IsActive()) { continue; } if (!itemID.Equals(UUID.Zero) && !li.GetItemID().Equals(itemID)) { continue; } if (li.GetName().Length > 0 && ( ((li.RegexBitfield & OS_LISTEN_REGEX_NAME) != OS_LISTEN_REGEX_NAME && !li.GetName().Equals(name)) || ((li.RegexBitfield & OS_LISTEN_REGEX_NAME) == OS_LISTEN_REGEX_NAME && !Regex.IsMatch(name, li.GetName())) )) { continue; } if (!li.GetID().Equals(UUID.Zero) && !li.GetID().Equals(id)) { continue; } if (li.GetMessage().Length > 0 && ( ((li.RegexBitfield & OS_LISTEN_REGEX_MESSAGE) != OS_LISTEN_REGEX_MESSAGE && !li.GetMessage().Equals(msg)) || ((li.RegexBitfield & OS_LISTEN_REGEX_MESSAGE) == OS_LISTEN_REGEX_MESSAGE && !Regex.IsMatch(msg, li.GetMessage())) )) { continue; } collection.Add(li); } } return collection; } public Object[] GetSerializationData(UUID itemID) { List<Object> data = new List<Object>(); lock (m_listeners) { foreach (List<ListenerInfo> list in m_listeners.Values) { foreach (ListenerInfo l in list) { if (l.GetItemID() == itemID) data.AddRange(l.GetSerializationData()); } } } return (Object[])data.ToArray(); } public void AddFromData(uint localID, UUID itemID, UUID hostID, Object[] data) { int idx = 0; Object[] item = new Object[6]; int dataItemLength = 6; while (idx < data.Length) { dataItemLength = (idx + 7 == data.Length || (idx + 7 < data.Length && data[idx + 7] is bool)) ? 7 : 6; item = new Object[dataItemLength]; Array.Copy(data, idx, item, 0, dataItemLength); ListenerInfo info = ListenerInfo.FromData(localID, itemID, hostID, item); lock (m_listeners) { if (!m_listeners.ContainsKey((int)item[2])) { m_listeners.Add((int)item[2], new List<ListenerInfo>()); } m_listeners[(int)item[2]].Add(info); } idx += dataItemLength; } } } public class ListenerInfo : IWorldCommListenerInfo { /// <summary> /// Listener is active or not /// </summary> private bool m_active; /// <summary> /// Assigned handle of this listener /// </summary> private int m_handle; /// <summary> /// Local ID from script engine /// </summary> private uint m_localID; /// <summary> /// ID of the host script engine /// </summary> private UUID m_itemID; /// <summary> /// ID of the host/scene part /// </summary> private UUID m_hostID; /// <summary> /// Channel /// </summary> private int m_channel; /// <summary> /// ID to filter messages from /// </summary> private UUID m_id; /// <summary> /// Object name to filter messages from /// </summary> private string m_name; /// <summary> /// The message /// </summary> private string m_message; public ListenerInfo(int handle, uint localID, UUID ItemID, UUID hostID, int channel, string name, UUID id, string message) { Initialise(handle, localID, ItemID, hostID, channel, name, id, message, 0); } public ListenerInfo(int handle, uint localID, UUID ItemID, UUID hostID, int channel, string name, UUID id, string message, int regexBitfield) { Initialise(handle, localID, ItemID, hostID, channel, name, id, message, regexBitfield); } public ListenerInfo(ListenerInfo li, string name, UUID id, string message) { Initialise(li.m_handle, li.m_localID, li.m_itemID, li.m_hostID, li.m_channel, name, id, message, 0); } public ListenerInfo(ListenerInfo li, string name, UUID id, string message, int regexBitfield) { Initialise(li.m_handle, li.m_localID, li.m_itemID, li.m_hostID, li.m_channel, name, id, message, regexBitfield); } private void Initialise(int handle, uint localID, UUID ItemID, UUID hostID, int channel, string name, UUID id, string message, int regexBitfield) { m_active = true; m_handle = handle; m_localID = localID; m_itemID = ItemID; m_hostID = hostID; m_channel = channel; m_name = name; m_id = id; m_message = message; RegexBitfield = regexBitfield; } public Object[] GetSerializationData() { Object[] data = new Object[7]; data[0] = m_active; data[1] = m_handle; data[2] = m_channel; data[3] = m_name; data[4] = m_id; data[5] = m_message; data[6] = RegexBitfield; return data; } public static ListenerInfo FromData(uint localID, UUID ItemID, UUID hostID, Object[] data) { ListenerInfo linfo = new ListenerInfo((int)data[1], localID, ItemID, hostID, (int)data[2], (string)data[3], (UUID)data[4], (string)data[5]); linfo.m_active = (bool)data[0]; if (data.Length >= 7) { linfo.RegexBitfield = (int)data[6]; } return linfo; } public UUID GetItemID() { return m_itemID; } public UUID GetHostID() { return m_hostID; } public int GetChannel() { return m_channel; } public uint GetLocalID() { return m_localID; } public int GetHandle() { return m_handle; } public string GetMessage() { return m_message; } public string GetName() { return m_name; } public bool IsActive() { return m_active; } public void Deactivate() { m_active = false; } public void Activate() { m_active = true; } public UUID GetID() { return m_id; } public int RegexBitfield { get; private set; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace Microsoft.Xml { using System; using Microsoft.Xml.Schema; using Microsoft.Xml.XPath; using System.Diagnostics; // Represents an attribute of the XMLElement object. Valid and default // values for the attribute are defined in a DTD or schema. public class XmlAttribute : XmlNode { private XmlName _name; private XmlLinkedNode _lastChild; internal XmlAttribute(XmlName name, XmlDocument doc) : base(doc) { Debug.Assert(name != null); Debug.Assert(doc != null); this.parentNode = null; if (!doc.IsLoading) { XmlDocument.CheckName(name.Prefix); XmlDocument.CheckName(name.LocalName); } if (name.LocalName.Length == 0) throw new ArgumentException(ResXml.Xdom_Attr_Name); _name = name; } internal int LocalNameHash { get { return _name.HashCode; } } protected internal XmlAttribute(string prefix, string localName, string namespaceURI, XmlDocument doc) : this(doc.AddAttrXmlName(prefix, localName, namespaceURI, null), doc) { } internal XmlName XmlName { get { return _name; } set { _name = value; } } // Creates a duplicate of this node. public override XmlNode CloneNode(bool deep) { // CloneNode for attributes is deep irrespective of parameter 'deep' value Debug.Assert(OwnerDocument != null); XmlDocument doc = OwnerDocument; XmlAttribute attr = doc.CreateAttribute(Prefix, LocalName, NamespaceURI); attr.CopyChildren(doc, this, true); return attr; } // Gets the parent of this node (for nodes that can have parents). public override XmlNode ParentNode { get { return null; } } // Gets the name of the node. public override String Name { get { return _name.Name; } } // Gets the name of the node without the namespace prefix. public override String LocalName { get { return _name.LocalName; } } // Gets the namespace URI of this node. public override String NamespaceURI { get { return _name.NamespaceURI; } } // Gets or sets the namespace prefix of this node. public override String Prefix { get { return _name.Prefix; } set { _name = _name.OwnerDocument.AddAttrXmlName(value, LocalName, NamespaceURI, SchemaInfo); } } // Gets the type of the current node. public override XmlNodeType NodeType { get { return XmlNodeType.Attribute; } } // Gets the XmlDocument that contains this node. public override XmlDocument OwnerDocument { get { return _name.OwnerDocument; } } // Gets or sets the value of the node. public override String Value { get { return InnerText; } set { InnerText = value; } //use InnerText which has perf optimization } public override IXmlSchemaInfo SchemaInfo { get { return _name; } } public override String InnerText { set { if (PrepareOwnerElementInElementIdAttrMap()) { string innerText = base.InnerText; base.InnerText = value; ResetOwnerElementInElementIdAttrMap(innerText); } else { base.InnerText = value; } } } internal bool PrepareOwnerElementInElementIdAttrMap() { XmlDocument ownerDocument = OwnerDocument; if (ownerDocument.DtdSchemaInfo != null) { // DTD exists XmlElement ownerElement = OwnerElement; if (ownerElement != null) { return ownerElement.Attributes.PrepareParentInElementIdAttrMap(Prefix, LocalName); } } return false; } internal void ResetOwnerElementInElementIdAttrMap(string oldInnerText) { XmlElement ownerElement = OwnerElement; if (ownerElement != null) { ownerElement.Attributes.ResetParentInElementIdAttrMap(oldInnerText, InnerText); } } internal override bool IsContainer { get { return true; } } //the function is provided only at Load time to speed up Load process internal override XmlNode AppendChildForLoad(XmlNode newChild, XmlDocument doc) { XmlNodeChangedEventArgs args = doc.GetInsertEventArgsForLoad(newChild, this); if (args != null) doc.BeforeEvent(args); XmlLinkedNode newNode = (XmlLinkedNode)newChild; if (_lastChild == null) { // if LastNode == null newNode.next = newNode; _lastChild = newNode; newNode.SetParentForLoad(this); } else { XmlLinkedNode refNode = _lastChild; // refNode = LastNode; newNode.next = refNode.next; refNode.next = newNode; _lastChild = newNode; // LastNode = newNode; if (refNode.IsText && newNode.IsText) { NestTextNodes(refNode, newNode); } else { newNode.SetParentForLoad(this); } } if (args != null) doc.AfterEvent(args); return newNode; } internal override XmlLinkedNode LastNode { get { return _lastChild; } set { _lastChild = value; } } internal override bool IsValidChildType(XmlNodeType type) { return (type == XmlNodeType.Text) || (type == XmlNodeType.EntityReference); } // Gets a value indicating whether the value was explicitly set. public virtual bool Specified { get { return true; } } public override XmlNode InsertBefore(XmlNode newChild, XmlNode refChild) { XmlNode node; if (PrepareOwnerElementInElementIdAttrMap()) { string innerText = InnerText; node = base.InsertBefore(newChild, refChild); ResetOwnerElementInElementIdAttrMap(innerText); } else { node = base.InsertBefore(newChild, refChild); } return node; } public override XmlNode InsertAfter(XmlNode newChild, XmlNode refChild) { XmlNode node; if (PrepareOwnerElementInElementIdAttrMap()) { string innerText = InnerText; node = base.InsertAfter(newChild, refChild); ResetOwnerElementInElementIdAttrMap(innerText); } else { node = base.InsertAfter(newChild, refChild); } return node; } public override XmlNode ReplaceChild(XmlNode newChild, XmlNode oldChild) { XmlNode node; if (PrepareOwnerElementInElementIdAttrMap()) { string innerText = InnerText; node = base.ReplaceChild(newChild, oldChild); ResetOwnerElementInElementIdAttrMap(innerText); } else { node = base.ReplaceChild(newChild, oldChild); } return node; } public override XmlNode RemoveChild(XmlNode oldChild) { XmlNode node; if (PrepareOwnerElementInElementIdAttrMap()) { string innerText = InnerText; node = base.RemoveChild(oldChild); ResetOwnerElementInElementIdAttrMap(innerText); } else { node = base.RemoveChild(oldChild); } return node; } public override XmlNode PrependChild(XmlNode newChild) { XmlNode node; if (PrepareOwnerElementInElementIdAttrMap()) { string innerText = InnerText; node = base.PrependChild(newChild); ResetOwnerElementInElementIdAttrMap(innerText); } else { node = base.PrependChild(newChild); } return node; } public override XmlNode AppendChild(XmlNode newChild) { XmlNode node; if (PrepareOwnerElementInElementIdAttrMap()) { string innerText = InnerText; node = base.AppendChild(newChild); ResetOwnerElementInElementIdAttrMap(innerText); } else { node = base.AppendChild(newChild); } return node; } // DOM Level 2 // Gets the XmlElement node that contains this attribute. public virtual XmlElement OwnerElement { get { return parentNode as XmlElement; } } // Gets or sets the markup representing just the children of this node. public override string InnerXml { set { RemoveAll(); XmlLoader loader = new XmlLoader(); loader.LoadInnerXmlAttribute(this, value); } } // Saves the node to the specified XmlWriter. public override void WriteTo(XmlWriter w) { w.WriteStartAttribute(Prefix, LocalName, NamespaceURI); WriteContentTo(w); w.WriteEndAttribute(); } // Saves all the children of the node to the specified XmlWriter. public override void WriteContentTo(XmlWriter w) { for (XmlNode node = FirstChild; node != null; node = node.NextSibling) { node.WriteTo(w); } } public override String BaseURI { get { if (OwnerElement != null) return OwnerElement.BaseURI; return String.Empty; } } internal override void SetParent(XmlNode node) { this.parentNode = node; } internal override XmlSpace XmlSpace { get { if (OwnerElement != null) return OwnerElement.XmlSpace; return XmlSpace.None; } } internal override String XmlLang { get { if (OwnerElement != null) return OwnerElement.XmlLang; return String.Empty; } } internal override XPathNodeType XPNodeType { get { if (IsNamespace) { return XPathNodeType.Namespace; } return XPathNodeType.Attribute; } } internal override string XPLocalName { get { if (_name.Prefix.Length == 0 && _name.LocalName == "xmlns") return string.Empty; return _name.LocalName; } } internal bool IsNamespace { get { return Ref.Equal(_name.NamespaceURI, _name.OwnerDocument.strReservedXmlns); } } } }
using Microsoft.IdentityModel; using Microsoft.IdentityModel.S2S.Protocols.OAuth2; using Microsoft.IdentityModel.S2S.Tokens; using Microsoft.SharePoint.Client; using Microsoft.SharePoint.Client.EventReceivers; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.IO; using System.Linq; using System.Net; using System.Security.Cryptography.X509Certificates; using System.Security.Principal; using System.ServiceModel; using System.Text; using System.Web; using System.Web.Configuration; using System.Web.Script.Serialization; using AudienceRestriction = Microsoft.IdentityModel.Tokens.AudienceRestriction; using AudienceUriValidationFailedException = Microsoft.IdentityModel.Tokens.AudienceUriValidationFailedException; using SecurityTokenHandlerConfiguration = Microsoft.IdentityModel.Tokens.SecurityTokenHandlerConfiguration; using X509SigningCredentials = Microsoft.IdentityModel.SecurityTokenService.X509SigningCredentials; namespace SocialEventPlannerWeb { public static class TokenHelper { #region public fields /// <summary> /// SharePoint principal. /// </summary> public const string SharePointPrincipal = "00000003-0000-0ff1-ce00-000000000000"; /// <summary> /// Lifetime of HighTrust access token, 12 hours. /// </summary> public static readonly TimeSpan HighTrustAccessTokenLifetime = TimeSpan.FromHours(12.0); #endregion public fields #region public methods /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequest request) { return GetContextTokenFromRequest(new HttpRequestWrapper(request)); } /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequestBase request) { string[] paramNames = { "AppContext", "AppContextToken", "AccessToken", "SPAppToken" }; foreach (string paramName in paramNames) { if (!string.IsNullOrEmpty(request.Form[paramName])) { return request.Form[paramName]; } if (!string.IsNullOrEmpty(request.QueryString[paramName])) { return request.QueryString[paramName]; } } return null; } /// <summary> /// Validate that a specified context token string is intended for this application based on the parameters /// specified in web.config. Parameters used from web.config used for validation include ClientId, /// HostedAppHostNameOverride, HostedAppHostName, ClientSecret, and Realm (if it is specified). If HostedAppHostNameOverride is present, /// it will be used for validation. Otherwise, if the <paramref name="appHostName"/> is not /// null, it is used for validation instead of the web.config's HostedAppHostName. If the token is invalid, an /// exception is thrown. If the token is valid, TokenHelper's static STS metadata url is updated based on the token contents /// and a JsonWebSecurityToken based on the context token is returned. /// </summary> /// <param name="contextTokenString">The context token to validate</param> /// <param name="appHostName">The URL authority, consisting of Domain Name System (DNS) host name or IP address and the port number, to use for token audience validation. /// If null, HostedAppHostName web.config setting is used instead. HostedAppHostNameOverride web.config setting, if present, will be used /// for validation instead of <paramref name="appHostName"/> .</param> /// <returns>A JsonWebSecurityToken based on the context token.</returns> public static SharePointContextToken ReadAndValidateContextToken(string contextTokenString, string appHostName = null) { JsonWebSecurityTokenHandler tokenHandler = CreateJsonWebSecurityTokenHandler(); SecurityToken securityToken = tokenHandler.ReadToken(contextTokenString); JsonWebSecurityToken jsonToken = securityToken as JsonWebSecurityToken; SharePointContextToken token = SharePointContextToken.Create(jsonToken); string stsAuthority = (new Uri(token.SecurityTokenServiceUri)).Authority; int firstDot = stsAuthority.IndexOf('.'); GlobalEndPointPrefix = stsAuthority.Substring(0, firstDot); AcsHostUrl = stsAuthority.Substring(firstDot + 1); tokenHandler.ValidateToken(jsonToken); string[] acceptableAudiences; if (!String.IsNullOrEmpty(HostedAppHostNameOverride)) { acceptableAudiences = HostedAppHostNameOverride.Split(';'); } else if (appHostName == null) { acceptableAudiences = new[] { HostedAppHostName }; } else { acceptableAudiences = new[] { appHostName }; } bool validationSuccessful = false; string realm = Realm ?? token.Realm; foreach (var audience in acceptableAudiences) { string principal = GetFormattedPrincipal(ClientId, audience, realm); if (StringComparer.OrdinalIgnoreCase.Equals(token.Audience, principal)) { validationSuccessful = true; break; } } if (!validationSuccessful) { throw new AudienceUriValidationFailedException( String.Format(CultureInfo.CurrentCulture, "\"{0}\" is not the intended audience \"{1}\"", String.Join(";", acceptableAudiences), token.Audience)); } return token; } /// <summary> /// Retrieves an access token from ACS to call the source of the specified context token at the specified /// targetHost. The targetHost must be registered for the principal that sent the context token. /// </summary> /// <param name="contextToken">Context token issued by the intended access token audience</param> /// <param name="targetHost">Url authority of the target principal</param> /// <returns>An access token with an audience matching the context token's source</returns> public static OAuth2AccessTokenResponse GetAccessToken(SharePointContextToken contextToken, string targetHost) { string targetPrincipalName = contextToken.TargetPrincipalName; // Extract the refreshToken from the context token string refreshToken = contextToken.RefreshToken; if (String.IsNullOrEmpty(refreshToken)) { return null; } string targetRealm = Realm ?? contextToken.Realm; return GetAccessToken(refreshToken, targetPrincipalName, targetHost, targetRealm); } /// <summary> /// Uses the specified authorization code to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="authorizationCode">Authorization code to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string authorizationCode, string targetPrincipalName, string targetHost, string targetRealm, Uri redirectUri) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); // Create request for token. The RedirectUri is null here. This will fail if redirect uri is registered OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithAuthorizationCode( clientId, ClientSecret, authorizationCode, redirectUri, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Uses the specified refresh token to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="refreshToken">Refresh token to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string refreshToken, string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithRefreshToken(clientId, ClientSecret, refreshToken, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Retrieves an app-only access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAppOnlyAccessToken( string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, HostedAppHostName, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithClientCredentials(clientId, ClientSecret, resource); oauth2Request.Resource = resource; // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Creates a client context based on the properties of a remote event receiver /// </summary> /// <param name="properties">Properties of a remote event receiver</param> /// <returns>A ClientContext ready to call the web where the event originated</returns> public static ClientContext CreateRemoteEventReceiverClientContext(SPRemoteEventProperties properties) { Uri sharepointUrl; if (properties.ListEventProperties != null) { sharepointUrl = new Uri(properties.ListEventProperties.WebUrl); } else if (properties.ItemEventProperties != null) { sharepointUrl = new Uri(properties.ItemEventProperties.WebUrl); } else if (properties.WebEventProperties != null) { sharepointUrl = new Uri(properties.WebEventProperties.FullUrl); } else { return null; } if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Creates a client context based on the properties of an app event /// </summary> /// <param name="properties">Properties of an app event</param> /// <param name="useAppWeb">True to target the app web, false to target the host web</param> /// <returns>A ClientContext ready to call the app web or the parent web</returns> public static ClientContext CreateAppEventClientContext(SPRemoteEventProperties properties, bool useAppWeb) { if (properties.AppEventProperties == null) { return null; } Uri sharepointUrl = useAppWeb ? properties.AppEventProperties.AppWebFullUrl : properties.AppEventProperties.HostWebFullUrl; if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string authorizationCode, Uri redirectUri) { return GetClientContextWithAuthorizationCode(targetUrl, SharePointPrincipal, authorizationCode, GetRealmFromTargetUrl(new Uri(targetUrl)), redirectUri); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="targetPrincipalName">Name of the target SharePoint principal</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string targetPrincipalName, string authorizationCode, string targetRealm, Uri redirectUri) { Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(authorizationCode, targetPrincipalName, targetUri.Authority, targetRealm, redirectUri).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Uses the specified access token to create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="accessToken">Access token to be used when calling the specified targetUrl</param> /// <returns>A ClientContext ready to call targetUrl with the specified access token</returns> public static ClientContext GetClientContextWithAccessToken(string targetUrl, string accessToken) { ClientContext clientContext = new ClientContext(targetUrl); clientContext.AuthenticationMode = ClientAuthenticationMode.Anonymous; clientContext.FormDigestHandlingEnabled = false; clientContext.ExecutingWebRequest += delegate(object oSender, WebRequestEventArgs webRequestEventArgs) { webRequestEventArgs.WebRequestExecutor.RequestHeaders["Authorization"] = "Bearer " + accessToken; }; return clientContext; } /// <summary> /// Retrieves an access token from ACS using the specified context token, and uses that access token to create /// a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="contextTokenString">Context token received from the target SharePoint site</param> /// <param name="appHostUrl">Url authority of the hosted app. If this is null, the value in the HostedAppHostName /// of web.config will be used instead</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithContextToken( string targetUrl, string contextTokenString, string appHostUrl) { SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, appHostUrl); Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(contextToken, targetUri.Authority).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to after consent is /// granted</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope, string redirectUri) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code&redirect_uri={4}", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope, redirectUri); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request a new context token. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to with a context token</param> /// <returns>Url of the SharePoint site's context token redirect page</returns> public static string GetAppContextTokenRequestUrl(string contextUrl, string redirectUri) { return string.Format( "{0}{1}?client_id={2}&redirect_uri={3}", EnsureTrailingSlash(contextUrl), RedirectPage, ClientId, redirectUri); } /// <summary> /// Retrieves an S2S access token signed by the application's private certificate on behalf of the specified /// WindowsIdentity and intended for the SharePoint at the targetApplicationUri. If no Realm is specified in /// web.config, an auth challenge will be issued to the targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>An access token with an audience of the target principal</returns> public static string GetS2SAccessTokenWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; return GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); } /// <summary> /// Retrieves an S2S client context with an access token signed by the application's private certificate on /// behalf of the specified WindowsIdentity and intended for application at the targetApplicationUri using the /// targetRealm. If no Realm is specified in web.config, an auth challenge will be issued to the /// targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>A ClientContext using an access token with an audience of the target application</returns> public static ClientContext GetS2SClientContextWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; string accessToken = GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); return GetClientContextWithAccessToken(targetApplicationUri.ToString(), accessToken); } /// <summary> /// Get authentication realm from SharePoint /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <returns>String representation of the realm GUID</returns> public static string GetRealmFromTargetUrl(Uri targetApplicationUri) { WebRequest request = WebRequest.Create(targetApplicationUri + "/_vti_bin/client.svc"); request.Headers.Add("Authorization: Bearer "); try { using (request.GetResponse()) { } } catch (WebException e) { if (e.Response == null) { return null; } string bearerResponseHeader = e.Response.Headers["WWW-Authenticate"]; if (string.IsNullOrEmpty(bearerResponseHeader)) { return null; } const string bearer = "Bearer realm=\""; int bearerIndex = bearerResponseHeader.IndexOf(bearer, StringComparison.Ordinal); if (bearerIndex < 0) { return null; } int realmIndex = bearerIndex + bearer.Length; if (bearerResponseHeader.Length >= realmIndex + 36) { string targetRealm = bearerResponseHeader.Substring(realmIndex, 36); Guid realmGuid; if (Guid.TryParse(targetRealm, out realmGuid)) { return targetRealm; } } } return null; } /// <summary> /// Determines if this is a high trust app. /// </summary> /// <returns>True if this is a high trust app.</returns> public static bool IsHighTrustApp() { return SigningCredentials != null; } /// <summary> /// Ensures that the specified URL ends with '/' if it is not null or empty. /// </summary> /// <param name="url">The url.</param> /// <returns>The url ending with '/' if it is not null or empty.</returns> public static string EnsureTrailingSlash(string url) { if (!string.IsNullOrEmpty(url) && url[url.Length - 1] != '/') { return url + "/"; } return url; } #endregion #region private fields // // Configuration Constants // private const string AuthorizationPage = "_layouts/15/OAuthAuthorize.aspx"; private const string RedirectPage = "_layouts/15/AppRedirect.aspx"; private const string AcsPrincipalName = "00000001-0000-0000-c000-000000000000"; private const string AcsMetadataEndPointRelativeUrl = "metadata/json/1"; private const string S2SProtocol = "OAuth2"; private const string DelegationIssuance = "DelegationIssuance1.0"; private const string NameIdentifierClaimType = JsonWebTokenConstants.ReservedClaims.NameIdentifier; private const string TrustedForImpersonationClaimType = "trustedfordelegation"; private const string ActorTokenClaimType = JsonWebTokenConstants.ReservedClaims.ActorToken; // // Environment Constants // private static string GlobalEndPointPrefix = "accounts"; private static string AcsHostUrl = "accesscontrol.windows.net"; // // Hosted app configuration // private static readonly string ClientId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientId")) ? WebConfigurationManager.AppSettings.Get("HostedAppName") : WebConfigurationManager.AppSettings.Get("ClientId"); private static readonly string IssuerId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("IssuerId")) ? ClientId : WebConfigurationManager.AppSettings.Get("IssuerId"); private static readonly string HostedAppHostNameOverride = WebConfigurationManager.AppSettings.Get("HostedAppHostNameOverride"); private static readonly string HostedAppHostName = WebConfigurationManager.AppSettings.Get("HostedAppHostName"); private static readonly string ClientSecret = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientSecret")) ? WebConfigurationManager.AppSettings.Get("HostedAppSigningKey") : WebConfigurationManager.AppSettings.Get("ClientSecret"); private static readonly string SecondaryClientSecret = WebConfigurationManager.AppSettings.Get("SecondaryClientSecret"); private static readonly string Realm = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ServiceNamespace = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ClientSigningCertificatePath = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePath"); private static readonly string ClientSigningCertificatePassword = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePassword"); private static readonly X509Certificate2 ClientCertificate = (string.IsNullOrEmpty(ClientSigningCertificatePath) || string.IsNullOrEmpty(ClientSigningCertificatePassword)) ? null : new X509Certificate2(ClientSigningCertificatePath, ClientSigningCertificatePassword); private static readonly X509SigningCredentials SigningCredentials = (ClientCertificate == null) ? null : new X509SigningCredentials(ClientCertificate, SecurityAlgorithms.RsaSha256Signature, SecurityAlgorithms.Sha256Digest); #endregion #region private methods private static ClientContext CreateAcsClientContextForUrl(SPRemoteEventProperties properties, Uri sharepointUrl) { string contextTokenString = properties.ContextToken; if (String.IsNullOrEmpty(contextTokenString)) { return null; } SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, OperationContext.Current.IncomingMessageHeaders.To.Host); string accessToken = GetAccessToken(contextToken, sharepointUrl.Authority).AccessToken; return GetClientContextWithAccessToken(sharepointUrl.ToString(), accessToken); } private static string GetAcsMetadataEndpointUrl() { return Path.Combine(GetAcsGlobalEndpointUrl(), AcsMetadataEndPointRelativeUrl); } private static string GetFormattedPrincipal(string principalName, string hostName, string realm) { if (!String.IsNullOrEmpty(hostName)) { return String.Format(CultureInfo.InvariantCulture, "{0}/{1}@{2}", principalName, hostName, realm); } return String.Format(CultureInfo.InvariantCulture, "{0}@{1}", principalName, realm); } private static string GetAcsPrincipalName(string realm) { return GetFormattedPrincipal(AcsPrincipalName, new Uri(GetAcsGlobalEndpointUrl()).Host, realm); } private static string GetAcsGlobalEndpointUrl() { return String.Format(CultureInfo.InvariantCulture, "https://{0}.{1}/", GlobalEndPointPrefix, AcsHostUrl); } private static JsonWebSecurityTokenHandler CreateJsonWebSecurityTokenHandler() { JsonWebSecurityTokenHandler handler = new JsonWebSecurityTokenHandler(); handler.Configuration = new SecurityTokenHandlerConfiguration(); handler.Configuration.AudienceRestriction = new AudienceRestriction(AudienceUriMode.Never); handler.Configuration.CertificateValidator = X509CertificateValidator.None; List<byte[]> securityKeys = new List<byte[]>(); securityKeys.Add(Convert.FromBase64String(ClientSecret)); if (!string.IsNullOrEmpty(SecondaryClientSecret)) { securityKeys.Add(Convert.FromBase64String(SecondaryClientSecret)); } List<SecurityToken> securityTokens = new List<SecurityToken>(); securityTokens.Add(new MultipleSymmetricKeySecurityToken(securityKeys)); handler.Configuration.IssuerTokenResolver = SecurityTokenResolver.CreateDefaultSecurityTokenResolver( new ReadOnlyCollection<SecurityToken>(securityTokens), false); SymmetricKeyIssuerNameRegistry issuerNameRegistry = new SymmetricKeyIssuerNameRegistry(); foreach (byte[] securitykey in securityKeys) { issuerNameRegistry.AddTrustedIssuer(securitykey, GetAcsPrincipalName(ServiceNamespace)); } handler.Configuration.IssuerNameRegistry = issuerNameRegistry; return handler; } private static string GetS2SAccessTokenWithClaims( string targetApplicationHostName, string targetRealm, IEnumerable<JsonWebTokenClaim> claims) { return IssueToken( ClientId, IssuerId, targetRealm, SharePointPrincipal, targetRealm, targetApplicationHostName, true, claims, claims == null); } private static JsonWebTokenClaim[] GetClaimsWithWindowsIdentity(WindowsIdentity identity) { JsonWebTokenClaim[] claims = new JsonWebTokenClaim[] { new JsonWebTokenClaim(NameIdentifierClaimType, identity.User.Value.ToLower()), new JsonWebTokenClaim("nii", "urn:office:idp:activedirectory") }; return claims; } private static string IssueToken( string sourceApplication, string issuerApplication, string sourceRealm, string targetApplication, string targetRealm, string targetApplicationHostName, bool trustedForDelegation, IEnumerable<JsonWebTokenClaim> claims, bool appOnly = false) { if (null == SigningCredentials) { throw new InvalidOperationException("SigningCredentials was not initialized"); } #region Actor token string issuer = string.IsNullOrEmpty(sourceRealm) ? issuerApplication : string.Format("{0}@{1}", issuerApplication, sourceRealm); string nameid = string.IsNullOrEmpty(sourceRealm) ? sourceApplication : string.Format("{0}@{1}", sourceApplication, sourceRealm); string audience = string.Format("{0}/{1}@{2}", targetApplication, targetApplicationHostName, targetRealm); List<JsonWebTokenClaim> actorClaims = new List<JsonWebTokenClaim>(); actorClaims.Add(new JsonWebTokenClaim(JsonWebTokenConstants.ReservedClaims.NameIdentifier, nameid)); if (trustedForDelegation && !appOnly) { actorClaims.Add(new JsonWebTokenClaim(TrustedForImpersonationClaimType, "true")); } // Create token JsonWebSecurityToken actorToken = new JsonWebSecurityToken( issuer: issuer, audience: audience, validFrom: DateTime.UtcNow, validTo: DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), signingCredentials: SigningCredentials, claims: actorClaims); string actorTokenString = new JsonWebSecurityTokenHandler().WriteTokenAsString(actorToken); if (appOnly) { // App-only token is the same as actor token for delegated case return actorTokenString; } #endregion Actor token #region Outer token List<JsonWebTokenClaim> outerClaims = null == claims ? new List<JsonWebTokenClaim>() : new List<JsonWebTokenClaim>(claims); outerClaims.Add(new JsonWebTokenClaim(ActorTokenClaimType, actorTokenString)); JsonWebSecurityToken jsonToken = new JsonWebSecurityToken( nameid, // outer token issuer should match actor token nameid audience, DateTime.UtcNow, DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), outerClaims); string accessToken = new JsonWebSecurityTokenHandler().WriteTokenAsString(jsonToken); #endregion Outer token return accessToken; } #endregion #region AcsMetadataParser // This class is used to get MetaData document from the global STS endpoint. It contains // methods to parse the MetaData document and get endpoints and STS certificate. public static class AcsMetadataParser { public static X509Certificate2 GetAcsSigningCert(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); if (null != document.keys && document.keys.Count > 0) { JsonKey signingKey = document.keys[0]; if (null != signingKey && null != signingKey.keyValue) { return new X509Certificate2(Encoding.UTF8.GetBytes(signingKey.keyValue.value)); } } throw new Exception("Metadata document does not contain ACS signing certificate."); } public static string GetDelegationServiceUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint delegationEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == DelegationIssuance); if (null != delegationEndpoint) { return delegationEndpoint.location; } throw new Exception("Metadata document does not contain Delegation Service endpoint Url"); } private static JsonMetadataDocument GetMetadataDocument(string realm) { string acsMetadataEndpointUrlWithRealm = String.Format(CultureInfo.InvariantCulture, "{0}?realm={1}", GetAcsMetadataEndpointUrl(), realm); byte[] acsMetadata; using (WebClient webClient = new WebClient()) { acsMetadata = webClient.DownloadData(acsMetadataEndpointUrlWithRealm); } string jsonResponseString = Encoding.UTF8.GetString(acsMetadata); JavaScriptSerializer serializer = new JavaScriptSerializer(); JsonMetadataDocument document = serializer.Deserialize<JsonMetadataDocument>(jsonResponseString); if (null == document) { throw new Exception("No metadata document found at the global endpoint " + acsMetadataEndpointUrlWithRealm); } return document; } public static string GetStsUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint s2sEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == S2SProtocol); if (null != s2sEndpoint) { return s2sEndpoint.location; } throw new Exception("Metadata document does not contain STS endpoint url"); } private class JsonMetadataDocument { public string serviceName { get; set; } public List<JsonEndpoint> endpoints { get; set; } public List<JsonKey> keys { get; set; } } private class JsonEndpoint { public string location { get; set; } public string protocol { get; set; } public string usage { get; set; } } private class JsonKeyValue { public string type { get; set; } public string value { get; set; } } private class JsonKey { public string usage { get; set; } public JsonKeyValue keyValue { get; set; } } } #endregion } /// <summary> /// A JsonWebSecurityToken generated by SharePoint to authenticate to a 3rd party application and allow callbacks using a refresh token /// </summary> public class SharePointContextToken : JsonWebSecurityToken { public static SharePointContextToken Create(JsonWebSecurityToken contextToken) { return new SharePointContextToken(contextToken.Issuer, contextToken.Audience, contextToken.ValidFrom, contextToken.ValidTo, contextToken.Claims); } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims) : base(issuer, audience, validFrom, validTo, claims) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SecurityToken issuerToken, JsonWebSecurityToken actorToken) : base(issuer, audience, validFrom, validTo, claims, issuerToken, actorToken) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SigningCredentials signingCredentials) : base(issuer, audience, validFrom, validTo, claims, signingCredentials) { } public string NameId { get { return GetClaimValue(this, "nameid"); } } /// <summary> /// The principal name portion of the context token's "appctxsender" claim /// </summary> public string TargetPrincipalName { get { string appctxsender = GetClaimValue(this, "appctxsender"); if (appctxsender == null) { return null; } return appctxsender.Split('@')[0]; } } /// <summary> /// The context token's "refreshtoken" claim /// </summary> public string RefreshToken { get { return GetClaimValue(this, "refreshtoken"); } } /// <summary> /// The context token's "CacheKey" claim /// </summary> public string CacheKey { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string cacheKey = (string)dict["CacheKey"]; return cacheKey; } } /// <summary> /// The context token's "SecurityTokenServiceUri" claim /// </summary> public string SecurityTokenServiceUri { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string securityTokenServiceUri = (string)dict["SecurityTokenServiceUri"]; return securityTokenServiceUri; } } /// <summary> /// The realm portion of the context token's "audience" claim /// </summary> public string Realm { get { string aud = Audience; if (aud == null) { return null; } string tokenRealm = aud.Substring(aud.IndexOf('@') + 1); return tokenRealm; } } private static string GetClaimValue(JsonWebSecurityToken token, string claimType) { if (token == null) { throw new ArgumentNullException("token"); } foreach (JsonWebTokenClaim claim in token.Claims) { if (StringComparer.Ordinal.Equals(claim.ClaimType, claimType)) { return claim.Value; } } return null; } } /// <summary> /// Represents a security token which contains multiple security keys that are generated using symmetric algorithms. /// </summary> public class MultipleSymmetricKeySecurityToken : SecurityToken { /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(IEnumerable<byte[]> keys) : this(UniqueId.CreateUniqueId(), keys) { } /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="tokenId">The unique identifier of the security token.</param> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(string tokenId, IEnumerable<byte[]> keys) { if (keys == null) { throw new ArgumentNullException("keys"); } if (String.IsNullOrEmpty(tokenId)) { throw new ArgumentException("Value cannot be a null or empty string.", "tokenId"); } foreach (byte[] key in keys) { if (key.Length <= 0) { throw new ArgumentException("The key length must be greater then zero.", "keys"); } } id = tokenId; effectiveTime = DateTime.UtcNow; securityKeys = CreateSymmetricSecurityKeys(keys); } /// <summary> /// Gets the unique identifier of the security token. /// </summary> public override string Id { get { return id; } } /// <summary> /// Gets the cryptographic keys associated with the security token. /// </summary> public override ReadOnlyCollection<SecurityKey> SecurityKeys { get { return securityKeys.AsReadOnly(); } } /// <summary> /// Gets the first instant in time at which this security token is valid. /// </summary> public override DateTime ValidFrom { get { return effectiveTime; } } /// <summary> /// Gets the last instant in time at which this security token is valid. /// </summary> public override DateTime ValidTo { get { // Never expire return DateTime.MaxValue; } } /// <summary> /// Returns a value that indicates whether the key identifier for this instance can be resolved to the specified key identifier. /// </summary> /// <param name="keyIdentifierClause">A SecurityKeyIdentifierClause to compare to this instance</param> /// <returns>true if keyIdentifierClause is a SecurityKeyIdentifierClause and it has the same unique identifier as the Id property; otherwise, false.</returns> public override bool MatchesKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause) { if (keyIdentifierClause == null) { throw new ArgumentNullException("keyIdentifierClause"); } // Since this is a symmetric token and we do not have IDs to distinguish tokens, we just check for the // presence of a SymmetricIssuerKeyIdentifier. The actual mapping to the issuer takes place later // when the key is matched to the issuer. if (keyIdentifierClause is SymmetricIssuerKeyIdentifierClause) { return true; } return base.MatchesKeyIdentifierClause(keyIdentifierClause); } #region private members private List<SecurityKey> CreateSymmetricSecurityKeys(IEnumerable<byte[]> keys) { List<SecurityKey> symmetricKeys = new List<SecurityKey>(); foreach (byte[] key in keys) { symmetricKeys.Add(new InMemorySymmetricSecurityKey(key)); } return symmetricKeys; } private string id; private DateTime effectiveTime; private List<SecurityKey> securityKeys; #endregion } }