prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>base.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding:utf-8 -*- import abc import platform from UserList import UserList class Monitor(object): @abc.abstractmethod def current(self): pass @abc.abstractmethod def percent(self, range): pass @abc.abstractmethod def reset(self): pass @abc.abstractmethod def max(self): pass @abc.abstractmethod def min(self): pass class Monitors(UserList): @abc.abstractmethod def percent(self, range): pass @abc.abstractmethod def reset(self): pass @abc.abstractmethod def max(self): pass @abc.abstractmethod def min(self): pass def get_monitors(): if platform.system() == "Windows": from .driver_win_wmi import WinWMIMonitors return WinWMIMonitors() elif platform.system() == "Darwin": from .driver_mac import MacMonitors return MacMonitors() elif platform.system() == "Linux": from .driver_linux import LinuxMonitors return LinuxMonitors() else: raise OSError()<|fim▁end|>
<|file_name|>reports.py<|end_file_name|><|fim▁begin|>from django.core.exceptions import ObjectDoesNotExist from django.db.models import Q from django.template.loader import render_to_string from django.utils.html import format_html from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from django.utils.translation import ugettext_lazy from memoized import memoized from corehq import privileges from corehq.apps.accounting.models import BillingAccount from corehq.apps.accounting.utils import domain_has_privilege from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader from corehq.apps.reports.dispatcher import UserManagementReportDispatcher from corehq.apps.reports.filters.users import ( ChangeActionFilter, ChangedByUserFilter, EnterpriseUserFilter, ) from corehq.apps.reports.filters.users import \ ExpandedMobileWorkerFilter as EMWF from corehq.apps.reports.generic import GenericTabularReport, GetParamsMixin, PaginatedReportMixin from corehq.apps.reports.standard import DatespanMixin, ProjectReport from corehq.apps.users.audit.change_messages import ( ASSIGNED_LOCATIONS_FIELD, CHANGE_MESSAGES_FIELDS, DOMAIN_FIELD, LOCATION_FIELD, PHONE_NUMBERS_FIELD, ROLE_FIELD, TWO_FACTOR_FIELD, get_messages, ) from corehq.apps.users.models import UserHistory from corehq.const import USER_DATETIME_FORMAT from corehq.util.timezones.conversions import ServerTime class UserHistoryReport(GetParamsMixin, DatespanMixin, GenericTabularReport, ProjectReport, PaginatedReportMixin): slug = 'user_history' name = ugettext_lazy("User History") section_name = ugettext_lazy("User Management") dispatcher = UserManagementReportDispatcher fields = [ 'corehq.apps.reports.filters.users.AffectedUserFilter', 'corehq.apps.reports.filters.users.ChangedByUserFilter', 'corehq.apps.reports.filters.dates.DatespanFilter', 'corehq.apps.reports.filters.users.ChangeActionFilter', 'corehq.apps.reports.filters.users.UserPropertyFilter', 'corehq.apps.reports.filters.users.UserUploadRecordFilter', ] description = ugettext_lazy("History of user updates") ajax_pagination = True default_sort = {'changed_at': 'desc'} @classmethod def get_primary_properties(cls, domain): """ Get slugs and human-friendly names for the properties that are available for filtering and/or displayed by default in the report, without needing to click "See More". """ if domain_has_privilege(domain, privileges.APP_USER_PROFILES): user_data_label = _("profile or user data") else: user_data_label = _("user data") return { "username": _("username"), ROLE_FIELD: _("role"), "email": _("email"), DOMAIN_FIELD: _("project"), "is_active": _("is active"), "language": _("language"), PHONE_NUMBERS_FIELD: _("phone numbers"), LOCATION_FIELD: _("primary location"), "user_data": user_data_label, TWO_FACTOR_FIELD: _("two factor authentication disabled"), ASSIGNED_LOCATIONS_FIELD: _("assigned locations"), } @property def headers(self): h = [ DataTablesColumn(_("Affected User"), sortable=False), DataTablesColumn(_("Modified by User"), sortable=False), DataTablesColumn(_("Action"), prop_name='action'), DataTablesColumn(_("Via"), prop_name='changed_via'), DataTablesColumn(_("Changes"), sortable=False), DataTablesColumn(_("Change Message"), sortable=False), DataTablesColumn(_("Timestamp"), prop_name='changed_at'), ] return DataTablesHeader(*h) @property def total_records(self): return self._get_queryset().count() @memoized def _get_queryset(self): user_slugs = self.request.GET.getlist(EMWF.slug) user_ids = self._get_user_ids(user_slugs) # return empty queryset if no matching users were found if user_slugs and not user_ids: return UserHistory.objects.none() changed_by_user_slugs = self.request.GET.getlist(ChangedByUserFilter.slug) changed_by_user_ids = self._get_user_ids(changed_by_user_slugs) # return empty queryset if no matching users were found if changed_by_user_slugs and not changed_by_user_ids: return UserHistory.objects.none() user_property = self.request.GET.get('user_property') actions = self.request.GET.getlist('action') user_upload_record_id = self.request.GET.get('user_upload_record') query = self._build_query(user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id) return query def _get_user_ids(self, slugs): es_query = self._get_users_es_query(slugs) return es_query.values_list('_id', flat=True) def _get_users_es_query(self, slugs): return EnterpriseUserFilter.user_es_query( self.domain, slugs, self.request.couch_user, ) def _build_query(self, user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id): filters = Q(for_domain__in=self._for_domains()) if user_ids: filters = filters & Q(user_id__in=user_ids) if changed_by_user_ids: filters = filters & Q(changed_by__in=changed_by_user_ids) if user_property: filters = filters & self._get_property_filters(user_property) if actions and ChangeActionFilter.ALL not in actions: filters = filters & Q(action__in=actions) if user_upload_record_id: filters = filters & Q(user_upload_record_id=user_upload_record_id) if self.datespan: filters = filters & Q(changed_at__lt=self.datespan.enddate_adjusted, changed_at__gte=self.datespan.startdate) return UserHistory.objects.filter(filters) def _for_domains(self): return BillingAccount.get_account_by_domain(self.domain).get_domains() @staticmethod def _get_property_filters(user_property): if user_property in CHANGE_MESSAGES_FIELDS: query_filters = Q(change_messages__has_key=user_property) # to include CommCareUser creation from UI where a location can be assigned as a part of user creation # which is tracked only under "changes" and not "change messages" if user_property == LOCATION_FIELD: query_filters = query_filters | Q(changes__has_key='location_id') else: query_filters = Q(changes__has_key=user_property) return query_filters @property def rows(self): records = self._get_queryset().order_by(self.ordering)[ self.pagination.start:self.pagination.start + self.pagination.count ] for record in records: yield self._user_history_row(record, self.domain, self.timezone) @property def ordering(self): by, direction = list(self.get_sorting_block()[0].items())[0] return '-' + by if direction == 'desc' else by<|fim▁hole|> @memoized def _get_location_name(self, location_id): from corehq.apps.locations.models import SQLLocation if not location_id: return None try: location_object = SQLLocation.objects.get(location_id=location_id) except ObjectDoesNotExist: return None return location_object.display_name def _user_history_row(self, record, domain, timezone): return [ record.user_repr, record.changed_by_repr, _get_action_display(record.action), record.changed_via, self._user_history_details_cell(record.changes, domain), self._html_list(list(get_messages(record.change_messages))), ServerTime(record.changed_at).user_time(timezone).ui_string(USER_DATETIME_FORMAT), ] def _html_list(self, changes): items = [] if isinstance(changes, dict): for key, value in changes.items(): if isinstance(value, dict): value = self._html_list(value) elif isinstance(value, list): value = format_html(", ".join(value)) else: value = format_html(str(value)) items.append("<li>{}: {}</li>".format(key, value)) elif isinstance(changes, list): items = ["<li>{}</li>".format(format_html(change)) for change in changes] return mark_safe(f"<ul class='list-unstyled'>{''.join(items)}</ul>") def _user_history_details_cell(self, changes, domain): properties = UserHistoryReport.get_primary_properties(domain) properties.pop("user_data", None) primary_changes = {} all_changes = {} for key, value in changes.items(): if key == 'location_id': value = self._get_location_name(value) primary_changes[properties[LOCATION_FIELD]] = value all_changes[properties[LOCATION_FIELD]] = value elif key == 'user_data': for user_data_key, user_data_value in changes['user_data'].items(): all_changes[f"user data: {user_data_key}"] = user_data_value elif key in properties: primary_changes[properties[key]] = value all_changes[properties[key]] = value more_count = len(all_changes) - len(primary_changes) return render_to_string("reports/standard/partials/user_history_changes.html", { "primary_changes": self._html_list(primary_changes), "all_changes": self._html_list(all_changes), "more_count": more_count, }) def _get_action_display(logged_action): action = ugettext_lazy("Updated") if logged_action == UserHistory.CREATE: action = ugettext_lazy("Added") elif logged_action == UserHistory.DELETE: action = ugettext_lazy("Deleted") return action<|fim▁end|>
<|file_name|>logreader.py<|end_file_name|><|fim▁begin|>__author__ = 'bdeutsch' import re import numpy as np import pandas as pd # List cards drawn by me and played by opponent def get_cards(filename): # Open the file with open(filename) as f: mycards = [] oppcards = [] for line in f: # Generate my revealed card list m = re.search('name=(.+)id.+to FRIENDLY HAND', line) if m: mycards.append(m.group(1)) n = re.search('name=(.+)id.+to OPPOSING PLAY(?! \(Hero)', line) if n: oppcards.append(n.group(1)) for item in mycards: print item print '\n' for item in oppcards: print item # make a list of card IDs and names def get_ids(): # Create an empty list of IDs idlist = [] with open('test_game') as f: # For each line for line in f: # Find the entity ids m = re.search('[\[ ]id=(\d+) ', line) # if one is found if m: # Check that we haven't found it yet, convert to an integer id = int(m.group(1)) # Add it to the list if id not in idlist: idlist.append(id) # Sort the ids idlist.sort() # Convert to dataframe d = pd.DataFrame(index=idlist) # Rename the index d.index.name = "Entity ID" # Create an empty column for names d["Name"] = np.nan #print d return d # make a list of card names only if followed by id def get_names(): with open('test_game') as f: for line in f: # Find the entity ids m = re.search('[\[ ]name=([\w ]+?) id=', line) if m: print m.group(1) def get_ids_names(df): with open('test_game') as f: namedict = {} for line in f: # Find combinations of entities and names m = re.search('[\[ ]name=([\w ]+?) id=(\d+)', line) if m: ent_id = int(m.group(2)) name = m.group(1) df.ix[ent_id, 'Name'] = name #print m.group(2), m.group(1) return df idlist = [] with open('test_game') as f: # For each line for line in f: # Find the entity ids m = re.search('[\[ ]id=(\d+) ', line) # if one is found if m: # Check that we haven't found it yet, convert to an integer id = int(m.group(1)) # Add it to the list if id not in idlist: idlist.append(id) # Sort the ids idlist.sort() # Convert to dataframe df = pd.DataFrame(index=idlist) # Rename the index df.index.name = "Entity ID" # Create an empty column for names df["Name"] = np.nan df["CardId"] = np.nan df["Player"] = np.nan with open('test_game') as f: updates = [] for line in f: # Find lists of the innermost nested brackets m = re.findall(r"\[([^\[]+?)]", line) # If it's not just the command designation bracket ("zone", e.g.) if len(m)>1: # for each set of bracket contents for item in m[1:]: # add to the list of updates updates.append(item) for item in updates: # find the id m = re.search("id=(\d+)", item) if m: # Assign ID variable id = int(m.group(1)) # find name and assign<|fim▁hole|> df.ix[id, "Name"] = name # find cardId and assign n = re.search("cardId=(\w.+?) ", item) if n: cardId = n.group(1) df.ix[id, "CardId"] = cardId # find player n = re.search("player=(\d)", item) if n: player = n.group(1) df.ix[id, "Player"] = player # update the dataframe for each update # get rid of the "zone" and "power" markers. # collect the entries into a list # Put card IDs into a DataFrame #df = get_ids_names(get_ids()) pd.set_option('display.max_rows', 200) print df # get_cards('test_game')<|fim▁end|>
n = re.search("name=(.+?) \w+?=", item) if n: name = n.group(1)
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>export { useDebouncedInput } from './useDebouncedInput';<|fim▁hole|>export { DebouncedInput } from './DebouncedInput';<|fim▁end|>
<|file_name|>manage.rs<|end_file_name|><|fim▁begin|>use super::*; use mime; use std::path::PathBuf; use std::path::Path; #[derive(Debug)] pub struct Fieldset { pub q_variants: Vec<(PathBuf, Option<String>, mime::Mime)>, pub answer_audio: Option<(PathBuf, Option<String>, mime::Mime)>, pub answer_text: String, } pub struct NewQuestion { pub q_name: String, pub q_explanation: String, pub question_text: String, pub skill_nugget: String, } pub fn create_quiz(conn: &Connection, new_q: NewQuestion, mut answers: Vec<Fieldset>, audio_dir: &Path) -> Result<QuizQuestion> { use schema::{quiz_questions, question_answers}; info!("Creating quiz!"); // Sanity check if answers.is_empty() { warn!("Can't create a question with 0 answers!"); return Err(ErrorKind::FormParseError.into()); } for a in &answers { if a.q_variants.is_empty() { warn!("Can't create a question with 0 audio files for question!"); return Err(ErrorKind::FormParseError.into()); } } let nugget = skill::get_create_by_name(&*conn, &new_q.skill_nugget)?; let new_quiz = NewQuizQuestion { q_name: &new_q.q_name, q_explanation: &new_q.q_explanation, question_text: &new_q.question_text, skill_id: nugget.id, skill_level: 2, // FIXME }; let quiz: QuizQuestion = diesel::insert_into(quiz_questions::table).values(&new_quiz) .get_result(&**conn) .chain_err(|| "Couldn't create a new question!")?; info!("{:?}", &quiz); let mut narrator = None; for fieldset in &mut answers { let mut a_bundle = None; let a_audio_id = match fieldset.answer_audio { Some(ref mut a) => { Some(audio::save(&*conn, &mut narrator, a, &mut a_bundle, audio_dir)?.id) } None => None, }; let mut q_bundle = None; for mut q_audio in &mut fieldset.q_variants { audio::save(&*conn, &mut narrator, &mut q_audio, &mut q_bundle, audio_dir)?; } let q_bundle = q_bundle.expect("The audio bundle is initialized now."); let new_answer = NewAnswer { question_id: quiz.id, answer_text: &fieldset.answer_text, a_audio_bundle: a_audio_id, q_audio_bundle: q_bundle.id, }; let answer: Answer = diesel::insert_into(question_answers::table).values(&new_answer) .get_result(&**conn) .chain_err(|| "Couldn't create a new answer!")?; info!("{:?}", &answer); } Ok(quiz) } #[derive(Debug)] pub struct NewWordFromStrings<'a> { pub word: String, pub explanation: String, pub nugget: String, pub narrator: &'a str, pub files: Vec<(PathBuf, Option<String>, mime::Mime)>, pub skill_level: i32, pub priority: i32, } #[derive(Debug)] pub struct NewAudio<'a> { pub word: String, pub narrator: &'a str, pub files: Vec<(PathBuf, Option<String>, mime::Mime)>, } pub fn add_audio(conn: &Connection, w: NewAudio, audio_dir: &Path) -> Result<AudioBundle> { info!("Add audio {:?}", w); let mut narrator = Some(audio::get_create_narrator(conn, w.narrator)?); let mut bundle = Some(audio::get_create_bundle(conn, &w.word)?); for mut file in w.files { audio::save(&*conn, &mut narrator, &mut file, &mut bundle, audio_dir)?; } let bundle = bundle.expect("The audio bundle is initialized by now."); Ok(bundle) } pub fn create_or_update_word(conn: &Connection, mut w: NewWordFromStrings, audio_dir: &Path) -> Result<Word> { use schema::{words, audio_files}; info!("Create word {:?}", w); let nugget = skill::get_create_by_name(&*conn, &w.nugget)?; let mut audio_file = None; match conn.transaction(|| { let mut narrator = Some(audio::get_create_narrator(conn, w.narrator)?); let mut bundle = Some(audio::get_create_bundle(conn, &w.word)?); for mut file in &mut w.files { audio_file = Some(audio::save(&*conn, &mut narrator, &mut file, &mut bundle, audio_dir)?); } Ok(()) }) { Err(Error(ErrorKind::FileAlreadyExists(hash), ..)) => { audio_file = audio_files::table.filter(audio_files::file_sha2.eq(hash)) .get_result(&**conn) .optional()?; } Err(e) => return Err(e), Ok(()) => (), }; let audio_file = audio_file.expect("If we are here, everything was successful."); let word = words::table.filter(words::word.eq(&w.word)) .get_result(&**conn) .optional()?; if let Some(word) = word { info!("The word existed already. Returning."); return Ok(word); } else { let new_word = NewWord { word: &w.word, explanation: &w.explanation, audio_bundle: audio_file.bundle_id, skill_nugget: nugget.id, skill_level: w.skill_level, priority: w.priority, }; let word = diesel::insert_into(words::table).values(&new_word).get_result(&**conn)?; return Ok(word); } } pub fn get_question(conn: &Connection, id: i32) -> Result<Option<(QuizQuestion, Vec<Answer>)>> { if let Some((qq, aas, _)) = quiz::load_question(conn, id)? { Ok(Some((qq, aas))) } else { Ok(None) } } pub fn get_exercise(conn: &Connection, id: i32) -> Result<Option<(Exercise, Vec<ExerciseVariant>)>> { if let Some((qq, aas, _)) = quiz::load_exercise(conn, id)? { Ok(Some((qq, aas))) } else { Ok(None) } } pub fn get_word(conn: &Connection, id: i32) -> Result<Option<Word>> { Ok(schema::words::table.filter(schema::words::id.eq(id)) .get_result(&**conn) .optional()?) } pub fn publish_question(conn: &Connection, id: i32, published: bool) -> Result<()> { use schema::quiz_questions; diesel::update(quiz_questions::table .filter(quiz_questions::id.eq(id))) .set(quiz_questions::published.eq(published)) .execute(&**conn)?; Ok(()) } pub fn publish_exercise(conn: &Connection, id: i32, published: bool) -> Result<()> { use schema::exercises; diesel::update(exercises::table .filter(exercises::id.eq(id))) .set(exercises::published.eq(published)) .execute(&**conn)?; Ok(()) } pub fn publish_word(conn: &Connection, id: i32, published: bool) -> Result<()> { use schema::words; diesel::update(words::table.filter(words::id.eq(id))).set(words::published.eq(published)) .execute(&**conn)?; Ok(()) } pub fn update_word(conn: &Connection, id: i32, mut item: UpdateWord, image_dir: &Path) -> Result<Option<Word>> { use schema::words; item.explanation = item.explanation.try_map(|s| sanitize_links(&s, image_dir))?; let item = diesel::update(words::table.filter(words::id.eq(id))).set(&item) .get_result(&**conn) .optional()?; Ok(item) } pub fn update_exercise(conn: &Connection, id: i32, item: UpdateExercise) -> Result<Option<Exercise>> { use schema::exercises; let item = diesel::update(exercises::table.filter(exercises::id.eq(id))).set(&item) .get_result(&**conn) .optional()?; Ok(item) } pub fn update_question(conn: &Connection, id: i32, item: UpdateQuestion) -> Result<Option<QuizQuestion>> { use schema::quiz_questions; let item = diesel::update(quiz_questions::table.filter(quiz_questions::id.eq(id))).set(&item) .get_result(&**conn) .optional()?; Ok(item) } pub fn update_answer(conn: &Connection, id: i32, mut item: UpdateAnswer, image_dir: &Path) -> Result<Option<Answer>> { use schema::question_answers; item.answer_text = item.answer_text.try_map(|s| sanitize_links(&s, image_dir))?; let item = diesel::update(question_answers::table.filter(question_answers::id.eq(id))).set(&item) .get_result(&**conn) .optional()?; Ok(item) } pub fn update_variant(conn: &Connection, id: i32, item: UpdateExerciseVariant) -> Result<Option<ExerciseVariant>> { use schema::exercise_variants; let item = diesel::update(exercise_variants::table.filter(exercise_variants::id.eq(id))).set(&item) .get_result(&**conn) .optional()?; Ok(item) } pub fn remove_word(conn: &Connection, id: i32) -> Result<Option<Word>> { use schema::words; let word: Option<Word> = diesel::delete(words::table.filter(words::id.eq(id))).get_result(&**conn) .optional()?; Ok(word) } pub fn remove_question(conn: &Connection, id: i32) -> Result<bool> { use schema::{quiz_questions, question_answers}; diesel::delete(question_answers::table.filter(question_answers::question_id.eq(id))) .execute(&**conn)?; let count = diesel::delete(quiz_questions::table.filter(quiz_questions::id.eq(id))).execute(&**conn)?; Ok(count == 1) } pub fn remove_exercise(conn: &Connection, id: i32) -> Result<bool> { use schema::{exercises, exercise_variants}; diesel::delete(exercise_variants::table.filter(exercise_variants::exercise_id.eq(id))) .execute(&**conn)?; let count = diesel::delete(exercises::table.filter(exercises::id.eq(id))).execute(&**conn)?; Ok(count == 1) } pub fn post_question(conn: &Connection, question: NewQuizQuestion, mut answers: Vec<NewAnswer>) -> Result<i32> { use schema::{question_answers, quiz_questions}; debug!("Post question: {:?} and answers: {:?}", question, answers); let q: QuizQuestion = diesel::insert_into(quiz_questions::table).values(&question).get_result(&**conn)?; for aa in &mut answers { aa.question_id = q.id; diesel::insert_into(question_answers::table).values(&*aa).execute(&**conn)?; } Ok(q.id) } pub fn post_exercise(conn: &Connection, exercise: NewExercise, mut answers: Vec<ExerciseVariant>) -> Result<i32> { use schema::{exercises, exercise_variants}; conn.transaction(|| -> Result<i32> { let q: Exercise = diesel::insert_into(exercises::table).values(&exercise).get_result(&**conn)?; for aa in &mut answers { aa.exercise_id = q.id; diesel::insert_into(exercise_variants::table).values(&*aa).execute(&**conn)?; } Ok(q.id) }) .chain_err(|| ErrorKind::from("Transaction failed")) } pub fn del_due_and_pending_items(conn: &Connection, user_id: i32) -> Result<()> { use schema::{due_items, pending_items, question_data, exercise_data, e_asked_data, q_asked_data, e_answered_data, q_answered_data}; use diesel::expression::dsl::any; let p = diesel::update( pending_items::table .filter(pending_items::user_id.eq(user_id).and(pending_items::pending.eq(true))) ) .set(pending_items::pending.eq(false)) .execute(&**conn)?; let pending: Vec<PendingItem> = pending_items::table.filter(pending_items::user_id.eq(user_id)) .get_results(&**conn)?; let due_items = due_items::table.filter(due_items::user_id.eq(user_id)).select(due_items::id); let q = diesel::delete(question_data::table.filter(question_data::due.eq(any(due_items)))) .execute(&**conn)?; let e = diesel::delete(exercise_data::table.filter(exercise_data::due.eq(any(due_items)))) .execute(&**conn)?; let d = diesel::delete(due_items::table.filter(due_items::user_id.eq(user_id))).execute(&**conn)?; let mut asks = 0; let mut answers = 0; for p in &pending { answers += diesel::delete(e_answered_data::table.filter(e_answered_data::id.eq(p.id))) .execute(&**conn)?; answers += diesel::delete(q_answered_data::table.filter(q_answered_data::id.eq(p.id))) .execute(&**conn)?; asks += diesel::delete(e_asked_data::table.filter(e_asked_data::id.eq(p.id))).execute(&**conn)?; asks += diesel::delete(q_asked_data::table.filter(q_asked_data::id.eq(p.id))).execute(&**conn)?; } debug!("Deactivated {} pending items and deleted {} due items. ({} questions, {} exercises, \ {} asks, {} answers)", p, d, q, e, asks, answers); Ok(()) } pub fn replace_audio_bundle(conn: &Connection, bundle_id: i32, new_bundle_id: i32) -> Result<()> { use schema::{words, question_answers}; info!("Replacing old bundle references (id {}) with new ones (id {}).", bundle_id, new_bundle_id); conn.transaction(|| { let count = diesel::update( words::table.filter(words::audio_bundle.eq(bundle_id)) ).set(words::audio_bundle.eq(new_bundle_id)) .execute(&**conn)?; info!("{} audio bundles in words replaced with a new audio bundle.", count); let count = diesel::update( question_answers::table.filter(question_answers::a_audio_bundle.eq(bundle_id)) ).set(question_answers::a_audio_bundle.eq(new_bundle_id)) .execute(&**conn)?; info!("{} audio bundles in question_answers::a_audio_bundle replaced with a new audio \ bundle.", count); let count = diesel::update( question_answers::table.filter(question_answers::q_audio_bundle.eq(bundle_id)) ).set(question_answers::q_audio_bundle.eq(new_bundle_id)) .execute(&**conn)?; info!("{} audio bundles in question_answers::q_audio_bundle replaced with a new audio \ bundle.", count); Ok(()) }) } use ureq; use regex::Regex; use std::collections::HashMap; use std::sync::RwLock; lazy_static! { static ref URL_REGEX: Regex = Regex::new(r#"['"](https?://.*?(\.[a-zA-Z0-9]{1,4})?)['"]"#) .expect("<- that is a valid regex there"); static ref EXTENSION_GUESS: Regex = Regex::new(r#"\.png|\.jpg|\.jpeg|\.gif"#) .expect("<- that is a valid regex there"); static ref CONVERTED_LINKS: RwLock<HashMap<String, String>> = RwLock::new(HashMap::<String, String>::new()); } pub fn sanitize_links(text: &str, image_dir: &Path) -> Result<String> { use rand::{thread_rng, Rng}; use std::fs; use std::io; use rand::distributions::Alphanumeric; info!("Sanitizing text: {}", text); let mut result = text.to_string(); for url_match in URL_REGEX.captures_iter(text) { let url = url_match.get(1).expect("The whole match won't match without this submatch.").as_str(); info!("Outbound link found: {}", url); if CONVERTED_LINKS.read() .expect("If the lock is poisoned, we're screwed anyway") .contains_key(url) { let new_url = &CONVERTED_LINKS.read().expect("If the lock is poisoned, we're screwed anyway") [url]; result = result.replace(url, new_url); } else { info!("Downloading the link target."); let desanitized_url = url.replace("&amp;", "&"); let resp = ureq::get(&desanitized_url).call(); assert!(resp.status() < 400); let extension = { let fuzzy_guess_url: Option<&str> = EXTENSION_GUESS.captures_iter(url) .next() .and_then(|c| c.get(0)) .map(|g| g.as_str()); let file_extension = url_match.get(2).map(|m| m.as_str()); let content_type = resp.header("Content-Type"); debug!("Original file extension: {:?}, Guess from URL: {:?}, Content type: {:?}", file_extension, fuzzy_guess_url, content_type); match content_type { Some("image/png") => ".png", Some("image/jpeg") => ".jpg", Some("image/gif") => ".gif", Some(_) | None => { file_extension.or_else(|| fuzzy_guess_url).unwrap_or(".noextension") } } };<|fim▁hole|> filename.push_str(extension); filename = format!("{}", chrono::offset::Utc::now().format(&filename)); new_path.push(&filename); let mut file = fs::File::create(new_path)?; io::copy(&mut resp.into_reader(), &mut file)?; info!("Saved the file to {:?}", file); let new_url = String::from("/api/images/") + &filename; result = result.replace(url, &new_url); CONVERTED_LINKS.write() .expect("If the lock is poisoned, we're screwed anyway") .insert(url.to_string(), new_url); } info!("Sanitized to: {}", &result); } Ok(result) } #[test] fn test_sanitize_links() { use tempdir; use std::fs; let tempdir = tempdir::TempDir::new("").unwrap(); assert_eq!(fs::read_dir(tempdir.path()).unwrap().count(), 0); let result = sanitize_links("Testing \"http://static4.depositphotos.\ com/1016045/326/i/950/depositphotos_3267906-stock-photo-cool-emoticon.\ jpg\" testing", tempdir.path()) .unwrap(); assert_eq!(fs::read_dir(tempdir.path()).unwrap().count(), 1); let result2 = sanitize_links("Testing \"http://static4.depositphotos.\ com/1016045/326/i/950/depositphotos_3267906-stock-photo-cool-emoticon.\ jpg\" testing", tempdir.path()) .unwrap(); assert_eq!(fs::read_dir(tempdir.path()).unwrap().count(), 1); assert_eq!(result.len(), 64); assert_eq!(result, result2); let result3 = sanitize_links("Testing \"https://c2.staticflickr.\ com/2/1216/1408154388_b34a66bdcf.jpg\" testing", tempdir.path()) .unwrap(); assert_eq!(fs::read_dir(tempdir.path()).unwrap().count(), 2); assert_eq!(result3.len(), 64); assert_ne!(result, result3); tempdir.close().unwrap(); }<|fim▁end|>
let mut new_path = image_dir.to_owned(); let mut filename = "%FT%H-%M-%SZ".to_string(); filename.extend(thread_rng().sample_iter(Alphanumeric).take(10));
<|file_name|>qtfm_ru.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.0" language="ru" sourcelanguage="en"> <context> <name>MainWindow</name> <message> <source>New folder</source> <translation>Новая папка</translation> </message> <message> <source>Create a new folder</source> <translation>Создать новую папку</translation> </message> <message> <source>New file</source> <translation>Новый файл</translation> </message> <message> <source>Create a new file</source> <translation>Создать новый файл</translation> </message> <message> <source>New tab</source> <translation>Открыть вкладку</translation> </message> <message> <source>Middle-click things to open tab</source> <translation>Щелкните средней кнопкой мыши вещи, чтобы открыть вкладку</translation> </message> <message> <source>Close tab</source> <translation>Закрыть вкладку</translation> </message> <message> <source>Middle-click tabs to close</source> <translation>Щелкните средней кнопкой мыши вкладки, чтобы закрыть</translation> </message> <message> <source>Cut</source> <translation>Вырезать</translation> </message> <message> <source>Move the current file</source> <translation>Переместить текущий файл</translation> </message> <message> <source>Copy</source> <translation>Копировать</translation> </message> <message> <source>Copy the current file</source> <translation>Копировать текущий файл</translation> </message> <message> <source>Paste</source> <translation>Вставить</translation> </message> <message> <source>Paste the file here</source> <translatorcomment>1 Вставить файл сюда/здесь (2 вставить в эту папку) - вставить куда? правильнее чем вставить где?. второй вариант недостаточно соответствует оригиналу</translatorcomment> <translation>Вставить файл сюда</translation> </message> <message> <source>Up</source> <translation>Вверх</translation> </message> <message> <source>Go up one directory</source> <translation>Вверх на 1 директорию</translation> </message> <message> <source>Back</source> <translation>Назад</translation> </message> <message> <source>Go back one directory</source> <translation>Назад на одну директорию</translation> </message> <message> <source>Home</source> <translation>Домой</translation> </message> <message> <source>Go to home directory</source> <translation>В домашнюю директорию</translation> </message> <message> <source>Detail view</source> <translation>Детальный вид</translation> </message> <message> <source>Toggle detailed list</source> <translation>Переключить на детальный вид</translation> </message> <message> <source>Icon view</source> <translation>Значки</translation> </message> <message> <source>Toggle icon view</source> <translation>Переключить на показ значков</translation> </message> <message> <source>Hidden files</source> <translation>Скрытые файлы</translation> </message> <message> <source>Toggle hidden files</source> <translation>Показать скрытые файлы</translation> </message> <message> <source>Add bookmark</source> <translation>Добавить закладку</translation> </message> <message> <source>Add this folder to bookmarks</source> <translation>Добавить эту папку в закладки</translation> </message> <message> <source>Add separator</source> <translation>Добавить разделитель</translation> </message> <message> <source>Add separator to bookmarks list</source> <translation>Добавить разделитель в закладки</translation> </message> <message> <source>Remove bookmark</source> <translation>Удалить закладку</translation> </message> <message> <source>Remove this bookmark</source> <translation>Удалить эту закладку</translation> </message> <message> <source>Edit icon</source> <translation>Редактировать иконку</translation> </message> <message> <source>Change bookmark icon</source> <translation>Изменить иконку закладки</translation> </message> <message> <source>Wrap bookmarks</source> <translation>Переносить по столбцам</translation> </message> <message> <source>Delete</source> <translation>Удалить</translation> </message> <message> <source>Delete selected file</source> <translation>Удалить выбранный файл</translation> </message> <message> <source>Custom actions</source> <translation>Настраиваемые действия</translation> </message> <message> <source>Edit custom actions</source> <translation>Редактировать действия</translation> </message> <message> <source>Configure shortcuts</source> <translation>Настроить комбинации клавиш</translation> </message> <message> <source>Edit keybindings</source> <translation>Редактировать комбинации клавиш</translation> </message> <message> <source>Edit filetype</source> <translation>Редактировать типы файлов</translation> </message> <message> <source>Set default program for opening selected filetype</source> <translatorcomment>Программу по-умолчанию/стандартную программу - слишком длинно</translatorcomment> <translation>Выбрать программу для открытия указанного типа файлов</translation> </message> <message> <source>Rename</source> <translation>Переименовать</translation> </message> <message> <source>Rename file</source> <translation>Переименовать файл</translation> </message> <message> <source>Terminal</source> <translation>Терминал</translation> </message> <message> <source>Open virtual terminal</source> <translatorcomment>Виртуальный терминал?</translatorcomment> <translation>Открыть терминал</translation> </message> <message> <source>Open</source> <translation>Открыть</translation> </message> <message> <source>Open the file</source> <translation>Открыть файл</translation> </message> <message> <source>Enter folder</source> <translation>Войди в папку</translation> </message> <message> <source>Run</source> <translation>Запустить</translation> </message> <message> <source>Run this program</source> <translation>Запустить эту программу</translation> </message> <message> <source>Quit</source> <translation>Выход</translation> </message> <message> <source>Quit qtFM and stop the daemon</source> <translation>Выйти из qtFM и остановить демон</translation> </message> <message> <source>Close</source> <translation>Закрыть</translation> </message> <message> <source>Close qtFM</source> <translation>Закрыть qtFM</translation> </message> <message> <source>Show thumbs</source> <translation>Показывать миниатюры</translation> </message> <message> <source>View thumbnails in icon view</source> <translation>Показывать миниатюры при просмотре в виде значков</translation> </message> <message> <source>Properties</source> <translation>Свойства</translation> </message> <message> <source>View properties of selected items</source> <oldsource>View properties of selected folder</oldsource> <translation>Показать свойства выбранного объекта</translation> </message> <message> <source>Lock layout</source> <translation>Закрепить размещение</translation> </message> <message> <source>Refresh</source> <translation>Обновить</translation> </message> <message> <source>Cancel</source> <translation>Отмена</translation> </message> <message> <source>Zoom in</source> <translation>Увеличить</translation> </message> <message> <source>Zoom out</source> <translation>Уменьшить</translation> </message> <message> <source>Focus address</source> <translatorcomment>Не вижу в интерфейсе</translatorcomment> <translation>Фокус на строку адреса</translation> </message> <message> <source>Action</source> <translation>Действие</translation> </message> <message> <source>Shortcut</source> <translation>Комбинация</translation> </message> <message> <source>Duplicate shortcuts detected:&lt;p&gt;%1</source> <translatorcomment>Это про keybindings</translatorcomment> <translation>Обнаружены дубликаты комбинаций:&lt;p&gt;%1</translation> </message> <message> <source>Warning</source> <translation>Внимание</translation> </message> <message> <source>File</source> <translation>Файл</translation> </message> <message> <source>Edit</source> <translation>Правка</translation> </message> <message> <source>View</source> <translation>Вид</translation> </message> <message> <source>Layout</source> <translation>Размещение</translation> </message> <message> <source>Menu</source> <translation>Меню</translation> </message> <message> <source>Navigate</source> <translation>Управление</translation> </message> <message> <source>Address</source> <translation>Адрес</translation> </message> <message> <source>Zoom: %1</source> <translation>Увеличение: %1</translation> </message> <message> <source>Tree</source> <translation>Дерево</translation> </message> <message> <source>Bookmarks</source> <translation>Закладки</translation> </message> <message> <source>Setting</source> <translation>Настройки</translation> </message> <message> <source>Default terminal:</source> <translation>Терминал по-умолчанию:</translation> </message> <message> <source>Read only...cannot create folder</source> <translation>Только для чтения...невозможно создать папку</translation> </message> <message> <source>Read only...cannot create file</source> <translation>Только для чтения...невозможно создать файл</translation> </message> <message> <source>Delete confirmation</source> <translation>Подтверждение удаления</translation> </message> <message> <source>Do you want to confirm all delete operations?</source> <translation>Подтверждаете ВСЕ операции удаления?</translation> </message> <message> <source>Careful</source> <translation>Осторожно</translation> </message> <message> <source>Are you sure you want to delete &lt;p&gt;&lt;b&gt;&quot;</source> <translation>Вы действительно хотите удалить &lt;p&gt;&lt;b&gt;</translation> </message> <message> <source>Failed</source> <translation>Неудачно</translation> </message> <message> <source>Could not delete some items...do you have the right permissions?</source> <translatorcomment>items - файлы и папки, записи, пункты</translatorcomment> <translation>Некоторые файлы невозможно удалить... у Вас достаточно прав?</translation> </message> <message> <source>No paste for you!</source> <translatorcomment>Не вижу в интерфейсе</translatorcomment> <translation>Нечего вставлять!</translation> </message> <message> <source>File no longer exists!</source> <translation>Файл больше не существует!</translation> </message> <message> <source>Existing folder</source> <translation>Созданная папка</translation> </message> <message> <source>Merge</source> <translation>Объеденить</translation> </message> <message> <source>Replace</source> <translation>Заменить</translation> </message> <message> <source>Copying...</source> <translation>Копирую...</translation> </message> <message> <source>Moving...</source> <translation>Перемещаю...</translation> </message> <message> <source>Paste failed...do you have write permissions?</source> <translation>Не удалось ... у вас есть права на запись?</translation> </message> <message> <source>Too big!</source> <translation>Слишком большой!</translation> </message> <message> <source>There is not enough space on the destination drive!</source> <translation>Не хватает места на диске назначения!</translation> </message> <message> <source>Unlock layout</source> <translation>Разблокировать размещение</translation> </message> <message> <source>Configure filetype</source> <translation>Настройка типа файла</translation> </message> <message> <source>Input</source> <translation>Ввод</translation> </message> <message> <source>Error - Custom action</source> <translation>Ошибка - Настраиваемые действия</translation> </message> <message> <source>Output - Custom action</source> <translation>Вывод - Настраиваемые действия</translation> </message> <message> <source>Filetype:</source> <translation>тип файла:</translation> </message> <message> <source>Open with:</source> <translation>Открыть с помощью:</translation> </message> </context> <context> <name>customActionsDialog</name> <message> <source>Filetype</source> <translation>Тип файла</translation> </message> <message> <source>Text</source> <translation>Текст</translation> </message> <message> <source>Icon</source> <translation>Иконка</translation> </message> <message> <source>Command</source> <translation>Команда</translation> </message> <message> <source>Custom Actions</source> <translation>Настраиваемое действие</translation> </message> <message> <source>Usage</source> <translation>Использование</translation> </message> <message> <source>Use &apos;folder&apos; to match all folders.&lt;br&gt;Use a folder name to match a specific folder.&lt;br&gt;Set text to &apos;Open&apos; to override xdg default.&lt;p&gt;%f - selected files&lt;br&gt;%F - selected files with full path&lt;br&gt;%n - current filename&lt;/p&gt;&lt;p&gt;[] - tick checkbox to monitor output and errors.&lt;/p&gt;&lt;p&gt;See &lt;a href=&apos;http://www.qtfm.org/home/readme&apos;&gt;readme&lt;/a&gt; for more help.&lt;/p&gt;</source> <translation>Используйте &apos;folder&apos; для совпадения со всеми папками.&lt;br&gt;Используйте &apos;имя папки&apos; для совпадения с отдельными папками.&lt;br&gt;Используйте &apos;Open&apos; для переопределения xdg по-умолчанию.&lt;p&gt;%f - выбранные файлы&lt;br&gt;%F - выбранные файлы с указанием полного пути&lt;br&gt;%n - текущее имя файла&lt;/p&gt;&lt;p&gt;[] - отметьте чекбокс для того, чтобы программа возвращала вывод и ошибки в qtfm.&lt;/p&gt;&lt;p&gt;Больше информации см в &lt;a href=&apos;http://www.qtfm.org/home/readme&apos;&gt;readme&lt;/a&gt; .&lt;/p&gt;</translation> </message> <message> <source>Extract here</source> <translation>Извлечь сюда</translation> </message> <message> <source>Term here</source> <translation>Открыть терминал здесь</translation> </message> <message> <source>Compress</source> <translation>Сжать</translation> </message> </context> <context> <name>icondlg</name> <message> <source>Select icon</source> <translation>Выбрать иконку</translation> </message> </context> <context> <name>myModel</name> <message> <source>Name</source> <translation>Имя</translation> </message> <message> <source>Size</source> <translation>Размер</translation> </message> <message> <source>Type</source> <translation>Тип</translation> </message> <message> <source>Owner</source> <translation>Владелец</translation> </message> <message> <source>Date Modified</source> <translation>Изменен</translation> </message> </context> <context> <name>myProgressDialog</name> <message> <source>Initializing...</source> <translation>Инициализация...</translation> </message> <message> <source>&lt;p&gt;Transfer rate: %2 MB/s&lt;br&gt;Time remaining: %3&lt;/p&gt;</source> <translation>&lt;p&gt;Скорость: %2 MB/s&lt;br&gt;Оставшееся время: %3&lt;/p&gt;</translation> </message> </context> <context> <name>propertiesDialog</name> <message> <source>Properties</source> <translation>Свойства</translation> </message> <message> <source>Size:</source> <translation>Размер:</translation> </message> <message> <source>Contains:</source> <translation>Содержит:</translation> </message> <message> <source>Modified:</source> <translation>Изменен:</translation> </message> <message> <source>Read</source> <translation>Чтение</translation> </message> <message><|fim▁hole|> </message> <message> <source>Execute</source> <translation>Выполнение</translation> </message> <message> <source>Owner</source> <translation>Владелец</translation> </message> <message> <source>Group</source> <translation>Группа</translation> </message> <message> <source>Other</source> <translation>Другие</translation> </message> <message> <source>Numeric</source> <translatorcomment>Где это в интерфейсе?</translatorcomment> <translation>Числовой</translation> </message> <message> <source>&lt;b&gt;%1 files, %2 folders</source> <translation>&lt;b&gt;%1 файл(ов), %2 папка(ок)</translation> </message> <message> <source>&lt;b&gt;%1 folders</source> <translation>&lt;b&gt;%1 папка(ок)</translation> </message> <message> <source>&lt;b&gt;%1 files</source> <translation>&lt;b&gt;%1 файл(ов)</translation> </message> <message> <source>Total:</source> <translation>Всего:</translation> </message> <message> <source>%1 Files, %2 folders</source> <translation>%1 файлов, %2 папок</translation> </message> <message> <source>Filetype:</source> <translation>тип файла:</translation> </message> </context> </TS><|fim▁end|>
<source>Write</source> <translation>Запись</translation>
<|file_name|>build.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>}<|fim▁end|>
fn main() { println!("cargo:rerun-if-changed=lib.c"); cc::Build::new().file("lib.c").compile("lib");
<|file_name|>0005_news_image.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11.5 on 2017-10-23 15:47 from __future__ import unicode_literals from django.db import migrations, models import oktansite.models class Migration(migrations.Migration): dependencies = [ ('oktansite', '0004_news_attachment'), ] operations = [<|fim▁hole|> ), ]<|fim▁end|>
migrations.AddField( model_name='news', name='image', field=models.ImageField(null=True, upload_to=oktansite.models.get_upload_path_news_attachment),
<|file_name|>shexmap-simple.js<|end_file_name|><|fim▁begin|>// shexmap-simple - Simple ShEx2 validator for HTML. // Copyright 2017 Eric Prud'hommeux // Release under MIT License. const ShEx = ShExWebApp; // @@ rename globally const ShExJsUrl = 'https://github.com/shexSpec/shex.js' const RdfJs = N3js; const ShExApi = ShEx.Api({ fetch: window.fetch.bind(window), rdfjs: RdfJs, jsonld: null }) const MapModule = ShEx.Map({rdfjs: RdfJs, Validator: ShEx.Validator}); ShEx.ShapeMap.start = ShEx.Validator.start const SharedForTests = {} // an object to share state with a test harness const START_SHAPE_LABEL = "START"; const START_SHAPE_INDEX_ENTRY = "- start -"; // specificially not a JSON-LD @id form. const INPUTAREA_TIMEOUT = 250; const NO_MANIFEST_LOADED = "no manifest loaded"; const LOG_PROGRESS = false; const DefaultBase = location.origin + location.pathname; const Caches = {}; Caches.inputSchema = makeSchemaCache($("#inputSchema textarea.schema")); Caches.inputData = makeTurtleCache($("#inputData textarea")); Caches.manifest = makeManifestCache($("#manifestDrop")); Caches.extension = makeExtensionCache($("#extensionDrop")); Caches.shapeMap = makeShapeMapCache($("#textMap")); // @@ rename to #shapeMap Caches.bindings = makeJSONCache($("#bindings1 textarea")); Caches.statics = makeJSONCache($("#staticVars textarea")); Caches.outputSchema = makeSchemaCache($("#outputSchema textarea")); // let ShExRSchema; // defined in calling page const ParseTriplePattern = (function () { const uri = "<[^>]*>|[a-zA-Z0-9_-]*:[a-zA-Z0-9_-]*"; const literal = "((?:" + "'(?:[^'\\\\]|\\\\')*'" + "|" + "\"(?:[^\"\\\\]|\\\\\")*\"" + "|" + "'''(?:(?:'|'')?[^'\\\\]|\\\\')*'''" + "|" + "\"\"\"(?:(?:\"|\"\")?[^\"\\\\]|\\\\\")*\"\"\"" + ")" + "(?:@[a-zA-Z-]+|\\^\\^(?:" + uri + "))?)"; const uriOrKey = uri + "|FOCUS|_"; // const termOrKey = uri + "|" + literal + "|FOCUS|_"; return "(\\s*{\\s*)("+ uriOrKey+")?(\\s*)("+ uri+"|a)?(\\s*)("+ uriOrKey+"|" + literal + ")?(\\s*)(})?(\\s*)"; })(); const Getables = [ {queryStringParm: "schema", location: Caches.inputSchema.selection, cache: Caches.inputSchema}, {queryStringParm: "data", location: Caches.inputData.selection, cache: Caches.inputData }, {queryStringParm: "manifest", location: Caches.manifest.selection, cache: Caches.manifest , fail: e => $("#manifestDrop li").text(NO_MANIFEST_LOADED)}, {queryStringParm: "extension", location: Caches.extension.selection, cache: Caches.extension }, {queryStringParm: "shape-map", location: $("#textMap"), cache: Caches.shapeMap }, {queryStringParm: "bindings", location: Caches.bindings.selection, cache: Caches.bindings }, {queryStringParm: "statics", location: Caches.statics.selection, cache: Caches.statics }, {queryStringParm: "outSchema", location: Caches.outputSchema.selection,cache: Caches.outputSchema}, ]; const QueryParams = Getables.concat([ {queryStringParm: "interface", location: $("#interface"), deflt: "human" }, {queryStringParm: "success", location: $("#success"), deflt: "proof" }, {queryStringParm: "regexpEngine", location: $("#regexpEngine"), deflt: "eval-threaded-nerr" }, ]); // utility functions function parseTurtle (text, meta, base) { const ret = new RdfJs.Store(); RdfJs.Parser._resetBlankNodePrefix(); const parser = new RdfJs.Parser({baseIRI: base, format: "text/turtle" }); const quads = parser.parse(text); if (quads !== undefined) ret.addQuads(quads); meta.base = parser._base; meta.prefixes = parser._prefixes; return ret; } const shexParser = ShEx.Parser.construct(DefaultBase, null, {index: true}); function parseShEx (text, meta, base) { shexParser._setOptions({duplicateShape: $("#duplicateShape").val()}); shexParser._setBase(base); const ret = shexParser.parse(text); // ret = ShEx.Util.canonicalize(ret, DefaultBase); meta.base = ret._base; // base set above. meta.prefixes = ret._prefixes || {}; // @@ revisit after separating shexj from meta and indexes return ret; } function sum (s) { // cheap way to identify identical strings return s.replace(/\s/g, "").split("").reduce(function (a,b){ a = ((a<<5) - a) + b.charCodeAt(0); return a&a },0); } // <n3.js-specific> function rdflib_termToLex (node, resolver) { if (node === "http://www.w3.org/1999/02/22-rdf-syntax-ns#type") return "a"; if (node === ShEx.Validator.start) return START_SHAPE_LABEL; if (node === resolver._base) return "<>"; if (node.indexOf(resolver._base) === 0/* && ['#', '?'].indexOf(node.substr(resolver._base.length)) !== -1 */) return "<" + node.substr(resolver._base.length) + ">"; if (node.indexOf(resolver._basePath) === 0 && ['#', '?', '/', '\\'].indexOf(node.substr(resolver._basePath.length)) === -1) return "<" + node.substr(resolver._basePath.length) + ">"; return ShEx.ShExTerm.intermalTermToTurtle(node, resolver.meta.base, resolver.meta.prefixes); } function rdflib_lexToTerm (lex, resolver) { return lex === START_SHAPE_LABEL ? ShEx.Validator.start : lex === "a" ? "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" : new RdfJs.Lexer().tokenize(lex + " ") // need " " to parse "chat"@en .map(token => { const left = token.type === "typeIRI" ? "^^" : token.type === "langcode" ? "@" : token.type === "type" ? "^^" + resolver.meta.prefixes[token.prefix] : token.type === "prefixed" ? resolver.meta.prefixes[token.prefix] : token.type === "blank" ? "_:" : ""; const right = token.type === "IRI" || token.type === "typeIRI" ? resolver._resolveAbsoluteIRI(token) : token.value; return left + right; }).join(""); return lex === ShEx.Validator.start ? lex : lex[0] === "<" ? lex.substr(1, lex.length - 2) : lex; } // </n3.js-specific> // caches for textarea parsers function _makeCache (selection) { let _dirty = true; const ret = { selection: selection, parsed: null, // a Promise meta: { prefixes: {}, base: DefaultBase }, dirty: function (newVal) { const ret = _dirty; _dirty = newVal; return ret; }, get: function () { return selection.val(); }, set: async function (text, base) { _dirty = true; selection.val(text); this.meta.base = base; if (base !== DefaultBase) { this.url = base; // @@crappyHack1 -- parms should differntiate: // working base: base for URL resolution. // loaded base: place where you can GET current doc. // Note that Caches.manifest.set takes a 3rd parm. } }, refresh: async function () { if (!_dirty) return this.parsed; this.parsed = await this.parse(selection.val(), this.meta.base); await this.parsed; _dirty = false; return this.parsed; }, asyncGet: async function (url) { url = new URL(url, window.location).href const _cache = this; let resp try { resp = await fetch(url, {headers: { accept: 'text/shex,text/turtle,*/*;q=0.9, test/html;q=0.8', cache: 'no-cache' }}) } catch (e) { throw Error("unable to fetch <" + url + ">: " + '\n' + e.message); } if (!resp.ok) throw Error("fetch <" + url + "> got error response " + resp.status + ": " + resp.statusText); const data = await resp.text(); _cache.meta.base = url; try { await _cache.set(data, url, undefined, resp.headers.get('content-type')); } catch (e) { throw Error("error setting " + this.queryStringParm + " with <" + url + ">: " + '\n' + e.message); } $("#loadForm").dialog("close"); toggleControls(); return { url: url, data: data }; }, url: undefined // only set if inputarea caches some web resource. }; ret.meta.termToLex = function (trm) { return rdflib_termToLex(trm, new IRIResolver(ret.meta)); }; ret.meta.lexToTerm = function (lex) { return rdflib_lexToTerm(lex, new IRIResolver(ret.meta)); }; return ret; } function makeSchemaCache (selection) { const ret = _makeCache(selection); let graph = null; ret.language = null; ret.parse = async function (text, base) { const isJSON = text.match(/^\s*\{/); graph = isJSON ? null : tryN3(text); this.language = isJSON ? "ShExJ" : graph ? "ShExR" : "ShExC"; $("#results .status").text("parsing "+this.language+" schema...").show(); const schema = isJSON ? ShEx.Util.ShExJtoAS(JSON.parse(text)) : graph ? parseShExR() : parseShEx(text, ret.meta, base); $("#results .status").hide(); markEditMapDirty(); // ShapeMap validity may have changed. return schema; function tryN3 (text) { try { if (text.match(/^\s*$/)) return null; const db = parseTurtle (text, ret.meta, DefaultBase); // interpret empty schema as ShExC if (db.getQuads().length === 0) return null; return db; } catch (e) { return null; } } function parseShExR () { const graphParser = ShEx.Validator.construct( parseShEx(ShExRSchema, {}, base), // !! do something useful with the meta parm (prefixes and base) ShEx.Util.rdfjsDB(graph), {} ); const schemaRoot = graph.getQuads(null, ShEx.Util.RDF.type, "http://www.w3.org/ns/shex#Schema")[0].subject; // !!check const val = graphParser.validate(schemaRoot, ShEx.Validator.start); // start shape return ShEx.Util.ShExJtoAS(ShEx.Util.ShExRtoShExJ(ShEx.Util.valuesToSchema(ShEx.Util.valToValues(val)))); } }; ret.getItems = async function () { const obj = await this.refresh(); const start = "start" in obj ? [START_SHAPE_LABEL] : []; const rest = "shapes" in obj ? obj.shapes.map(se => Caches.inputSchema.meta.termToLex(se.id)) : []; return start.concat(rest); }; return ret; } function makeTurtleCache (selection) { const ret = _makeCache(selection); ret.parse = async function (text, base) { const res = ShEx.Util.rdfjsDB(parseTurtle(text, ret.meta, base)); markEditMapDirty(); // ShapeMap validity may have changed. return res; }; ret.getItems = async function () { const data = await this.refresh(); return data.getQuads().map(t => { return Caches.inputData.meta.termToLex(t.subject); // !!check }); }; return ret; } function makeManifestCache (selection) { const ret = _makeCache(selection); ret.set = async function (textOrObj, url, source) { $("#inputSchema .manifest li").remove(); $("#inputData .passes li, #inputData .fails li").remove(); if (typeof textOrObj !== "object") { if (url !== DefaultBase) { this.url = url; // @@crappyHack1 -- parms should differntiate: } try { // exceptions pass through to caller (asyncGet) textOrObj = JSON.parse(textOrObj); } catch (e) { $("#inputSchema .manifest").append($("<li/>").text(NO_MANIFEST_LOADED)); const throwMe = Error(e + '\n' + textOrObj); throwMe.action = 'load manifest' throw throwMe // @@DELME(2017-12-29) // transform deprecated examples.js structure // textOrObj = eval(textOrObj).reduce(function (acc, schema) { // function x (data, status) { // return { // schemaLabel: schema.name, // schema: schema.schema, // dataLabel: data.name, // data: data.data, // queryMap: data.queryMap, // outputSchema: data.outputSchema, // outputShape: data.outputShape, // staticVars: data.staticVars, // createRoot: data.createRoot, // status: status // }; // } // return acc.concat( // schema.passes.map(data => x(data, "conformant")), // schema.fails.map(data => x(data, "nonconformant")) // ); // }, []); } } if (!Array.isArray(textOrObj)) textOrObj = [textOrObj]; const demos = textOrObj.reduce((acc, elt) => { if ("action" in elt) { // compatibility with test suite structure. const action = elt.action; let schemaLabel = action.schema.substr(action.schema.lastIndexOf('/')+1); let dataLabel = elt["@id"]; let match = null; const emptyGraph = "-- empty graph --"; if ("comment" in elt) { if ((match = elt.comment.match(/^(.*?) \/ { (.*?) }$/))) { schemaLabel = match[1]; dataLabel = match[2] || emptyGraph; } else if ((match = elt.comment.match(/^(.*?) on { (.*?) }$/))) { schemaLabel = match[1]; dataLabel = match[2] || emptyGraph; } else if ((match = elt.comment.match(/^(.*?) as { (.*?) }$/))) { schemaLabel = match[2]; dataLabel = match[1] || emptyGraph; } } const queryMap = "map" in action ? null : ldToTurtle(action.focus, Caches.inputData.meta.termToLex) + "@" + ("shape" in action ? ldToTurtle(action.shape, Caches.inputSchema.meta.termToLex) : START_SHAPE_LABEL); const queryMapURL = "map" in action ? action.map : null; elt = Object.assign( { schemaLabel: schemaLabel, schemaURL: action.schema || url, // dataLabel: "comment" in elt ? elt.comment : (queryMap || dataURL), dataLabel: dataLabel, dataURL: action.data || DefaultBase }, (queryMap ? { queryMap: queryMap } : { queryMapURL: queryMapURL }), { status: elt["@type"] === "sht:ValidationFailure" ? "nonconformant" : "conformant" } ); if ("termResolver" in action || "termResolverURL" in action) { elt.meta = action.termResolver; elt.metaURL = action.termResolverURL || DefaultBase; } } ["schemaURL", "dataURL", "queryMapURL"].forEach(parm => { if (parm in elt) { elt[parm] = new URL(elt[parm], new URL(url, DefaultBase).href).href; } else { delete elt[parm]; } }); return acc.concat(elt); }, []); await prepareManifest(demos, url); $("#manifestDrop").show(); // may have been hidden if no manifest loaded. }; ret.parse = async function (text, base) { throw Error("should not try to parse manifest cache"); }; ret.getItems = async function () { throw Error("should not try to get manifest cache items"); }; return ret; function maybeGET(obj, base, key, accept) { // !!not used if (obj[key] != null) { // Take the passed data, guess base if not provided. if (!(key + "URL" in obj)) obj[key + "URL"] = base; obj[key] = Promise.resolve(obj[key]); } else if (key + "URL" in obj) { // absolutize the URL obj[key + "URL"] = ret.meta.lexToTerm("<"+obj[key + "URL"]+">"); // Load the remote resource. obj[key] = new Promise((resolve, reject) => { $.ajax({ accepts: { mycustomtype: accept }, url: ret.meta.lexToTerm("<"+obj[key + "URL"]+">"), dataType: "text" }).then(text => { resolve(text); }).fail(e => { results.append($("<pre/>").text( "Error " + e.status + " " + e.statusText + " on GET " + obj[key + "URL"] ).addClass("error")); reject(e); }); }); } else { // Ignore this parameter. obj[key] = Promise.resolve(obj[key]); } } } function makeExtensionCache (selection) { const ret = _makeCache(selection); ret.set = async function (code, url, source, mediaType) { this.url = url; // @@crappyHack1 -- parms should differntiate: try { // exceptions pass through to caller (asyncGet) // const resp = await fetch('http://localhost/checkouts/shexSpec/extensions/Eval/') // const text = await resp.text(); if (mediaType.startsWith('text/html')) return this.grepHtmlIndexForPackage(code, url, source) const extension = Function(`"use strict"; const module = {exports: {}}; ${code} return module.exports; `)() const name = extension.name; const id = "extension_" + name; // Delete any old li associated with this extension. const old = $(`.extensionControl[data-url="${extension.url}"]`) if (old.length) { results.append($("<div/>").append( $("<span/>").text(`removing old ${old.attr('data-name')} extension`) )); old.parent().remove(); } // Create a new li. const elt = $("<li/>", { class: "menuItem", title: extension.description }).append( $("<input/>", { type: "checkbox", checked: "checked", class: "extensionControl", id: id, "data-name": name, "data-url": extension.url }), $("<label/>", { for: id }).append( $("<a/>", {href: extension.url, text: name}) ) ); elt.insertBefore("#load-extension-button"); $("#" + id).data("code", extension); Caches.extension.url = url; // @@ cheesy hack that only works to remember one extension URL results.append($("<div/>").append( $("<span/>").text(`extension ${name} loaded from <${url}>`) )); } catch (e) { // $("#inputSchema .extension").append($("<li/>").text(NO_EXTENSION_LOADED)); const throwMe = Error(e + '\n' + code); throwMe.action = 'load extension' throw throwMe } // $("#extensionDrop").show(); // may have been hidden if no extension loaded. }; /* Poke around in HTML for a PACKAGE link in <table class="implementations"> <td property="code:softwareAgent" resource="https://github.com/shexSpec/shex.js">shexjs</td> <td><a property="shex:package" href="PACKAGE"/>...</td>... </table> */ ret.grepHtmlIndexForPackage = async function (code, url, source) { const jq = $(code); const impls = $(jq.find('table.implementations')) if (impls.length !== 1) { results.append($("<div/>").append( $("<span/>").text("unparsable extension index at " + url) ).addClass("error")); return; } const tr = $(impls).find(`tr td[resource="${ShExJsUrl}"]`).parent() if (tr.length !== 1) { results.append($("<div/>").append( $("<span/>").text("no entry for shexjs in index HTML at " + url) ).addClass("error")); return; } const href = tr.find('[property="shex:package"]').attr('href') if (!href) { results.append($("<div/>").append( $("<span/>").text("no package for shexjs in index HTML at " + url) ).addClass("error")); return; } const refd = await fetch(href); if (!refd.ok) { results.append($("<div/>").append( $("<span/>").text(`error fetching implementation: ${refd.status} (${refd.statusText}) for URL <${href}>`) ).addClass("error")); } else { code = await refd.text(); await this.set(code, url, source, refd.headers.get('content-type')); } }; ret.parse = async function (text, base) { throw Error("should not try to parse extension cache"); }; ret.getItems = async function () { throw Error("should not try to get extension cache items"); }; return ret; } function ldToTurtle (ld, termToLex) { return typeof ld === "object" ? lit(ld) : termToLex(ld); function lit (o) { let ret = "\""+o["@value"].replace(/["\r\n\t]/g, (c) => { return {'"': "\\\"", "\r": "\\r", "\n": "\\n", "\t": "\\t"}[c]; }) +"\""; if ("@type" in o) ret += "^^<" + o["@type"] + ">"; if ("@language" in o) ret += "@" + o["@language"]; return ret; } } function makeShapeMapCache (selection) { const ret = _makeCache(selection); ret.parse = async function (text) { removeEditMapPair(null); $("#textMap").val(text); copyTextMapToEditMap(); await copyEditMapToFixedMap(); }; // ret.parse = function (text, base) { }; ret.getItems = async function () { throw Error("should not try to get manifest cache items"); }; return ret; } function makeJSONCache(selection) { const ret = _makeCache(selection); ret.parse = async function (text) { return Promise.resolve(JSON.parse(text)); }; return ret; } // controls for manifest buttons async function paintManifest (selector, list, func, listItems, side) { $(selector).empty(); await Promise.all(list.map(async entry => { // build button disabled and with leading "..." to indicate that it's being loaded const button = $("<button/>").text("..." + entry.label.substr(3)).attr("disabled", "disabled"); const li = $("<li/>").append(button); $(selector).append(li); if (entry.text === undefined) { entry.text = await fetchOK(entry.url).catch(responseOrError => { // leave a message in the schema or data block return "# " + renderErrorMessage( responseOrError instanceof Error ? { url: entry.url, status: -1, statusText: responseOrError.message } : responseOrError, side); }) textLoaded(); } else { textLoaded(); } function textLoaded () { li.on("click", async () => { SharedForTests.promise = func(entry.name, entry, li, listItems, side); }); listItems[side][sum(entry.text)] = li; // enable and get rid of the "..." in the label now that it's loaded button.text(entry.label).removeAttr("disabled"); } })) setTextAreaHandlers(listItems); } function fetchOK (url) { return fetch(url).then(responseOrError => { if (!responseOrError.ok) { throw responseOrError; } return responseOrError.text() }); } function renderErrorMessage (response, what) { const message = "failed to load " + "queryMap" + " from <" + response.url + ">, got: " + response.status + " " + response.statusText; results.append($("<pre/>").text(message).addClass("error")); return message; } async function clearData () { // Clear out data textarea. await Caches.inputData.set("", DefaultBase); $("#inputData .status").text(" "); // Clear out every form of ShapeMap. $("#textMap").val("").removeClass("error"); makeFreshEditMap(); $("#fixedMap").empty(); results.clear(); } async function clearAll () { $("#results .status").hide(); await Caches.inputSchema.set("", DefaultBase); $(".inputShape").val(""); $("#inputSchema .status").text(" "); $("#inputSchema li.selected").removeClass("selected"); clearData(); $("#inputData .passes, #inputData .fails").hide(); $("#inputData .passes p:first").text(""); $("#inputData .fails p:first").text(""); $("#inputData .passes ul, #inputData .fails ul").empty(); } async function pickSchema (name, schemaTest, elt, listItems, side) { if ($(elt).hasClass("selected")) { await clearAll(); } else { await Caches.inputSchema.set(schemaTest.text, new URL((schemaTest.url || ""), DefaultBase).href); Caches.inputSchema.url = undefined; // @@ crappyHack1 $("#inputSchema .status").text(name); clearData(); const headings = { "passes": "Passing:", "fails": "Failing:", "indeterminant": "Data:" }; await Promise.all(Object.keys(headings).map(async function (key) { if (key in schemaTest) { $("#inputData ." + key + "").show(); $("#inputData ." + key + " p:first").text(headings[key]); await paintManifest("#inputData ." + key + " ul", schemaTest[key], pickData, listItems, "inputData"); } else { $("#inputData ." + key + " ul").empty(); } })); $("#inputSchema li.selected").removeClass("selected"); $(elt).addClass("selected"); try { await Caches.inputSchema.refresh(); } catch (e) { failMessage(e, "parsing schema"); } } } async function pickData (name, dataTest, elt, listItems, side) { clearData(); if ($(elt).hasClass("selected")) { $(elt).removeClass("selected"); } else { // Update data pane. await Caches.inputData.set(dataTest.text, new URL((dataTest.url || ""), DefaultBase).href); Caches.inputData.url = undefined; // @@ crappyHack1 $("#inputData .status").text(name); $("#inputData li.selected").removeClass("selected"); $(elt).addClass("selected"); try { await Caches.inputData.refresh(); } catch (e) { failMessage(e, "parsing data"); } // Update ShapeMap pane. removeEditMapPair(null); if (dataTest.entry.queryMap !== undefined) { await queryMapLoaded(dataTest.entry.queryMap); } else if (dataTest.entry.queryMapURL !== undefined) { try { const resp = await fetchOK(dataTest.entry.queryMapURL) queryMapLoaded(resp); } catch (e) { renderErrorMessage(e, "queryMap"); } } else { results.append($("<div/>").text("No queryMap or queryMapURL supplied in manifest").addClass("warning")); } async function queryMapLoaded (text) { dataTest.entry.queryMap = text; try { $("#textMap").val(JSON.parse(dataTest.entry.queryMap).map(entry => `<${entry.node}>@<${entry.shape}>`).join(",\n")); } catch (e) { $("#textMap").val(dataTest.entry.queryMap); } await copyTextMapToEditMap(); Caches.outputSchema.set(dataTest.entry.outputSchema, dataTest.outputSchemaUrl); $("#outputSchema .status").text(name); Caches.statics.set(JSON.stringify(dataTest.entry.staticVars, null, " ")); $("#staticVars .status").text(name); $("#outputShape").val(dataTest.entry.outputShape); // targetSchema.start in Map-test $("#createRoot").val(dataTest.entry.createRoot); // createRoot in Map-test // callValidator(); } } } // Control results area content. const results = (function () { const resultsElt = document.querySelector("#results div"); const resultsSel = $("#results div"); return { replace: function (text) { return resultsSel.text(text); }, append: function (text) { return resultsSel.append(text); }, clear: function () { resultsSel.removeClass("passes fails error"); $("#results .status").text("").hide(); $("#shapeMap-tabs").removeAttr("title"); return resultsSel.text(""); }, start: function () { resultsSel.removeClass("passes fails error"); $("#results").addClass("running"); }, finish: function () { $("#results").removeClass("running"); const height = resultsSel.height(); resultsSel.height(1); resultsSel.animate({height:height}, 100); }, text: function () { return $(resultsElt).text(); } }; })(); let LastFailTime = 0; // Validation UI function disableResultsAndValidate (evt) { if (new Date().getTime() - LastFailTime < 100) { results.append( $("<div/>").addClass("warning").append( $("<h2/>").text("see shape map errors above"), $("<button/>").text("validate (ctl-enter)").on("click", disableResultsAndValidate), " again to continue." ) ); return; // return if < 100ms since last error. } results.clear(); results.start(); SharedForTests.promise = new Promise((resolve, reject) => { setTimeout(async function () { const errors = await copyEditMapToTextMap() // will update if #editMap is dirty if (errors.length === 0) resolve(await callValidator()) }, 0); }) } function hasFocusNode () { return $(".focus").map((idx, elt) => { return $(elt).val(); }).get().some(str => { return str.length > 0; }); } let Mapper = null async function callValidator (done) { $("#fixedMap .pair").removeClass("passes fails"); $("#results .status").hide(); let currentAction = "parsing input schema"; try { await Caches.inputSchema.refresh(); // @@ throw away parser stack? $("#schemaDialect").text(Caches.inputSchema.language); if (hasFocusNode()) { currentAction = "parsing input data"; $("#results .status").text("parsing data...").show(); const inputData = await Caches.inputData.refresh(); // need prefixes for ShapeMap // $("#shapeMap-tabs").tabs("option", "active", 2); // select fixedMap currentAction = "parsing shape map"; const fixedMap = fixedShapeMapToTerms($("#fixedMap tr").map((idx, tr) => { return { node: Caches.inputData.meta.lexToTerm($(tr).find("input.focus").val()), shape: Caches.inputSchema.meta.lexToTerm($(tr).find("input.inputShape").val()) }; }).get()); currentAction = "creating validator"; $("#results .status").text("creating validator...").show(); // const dataURL = "data:text/json," + // JSON.stringify( // ShEx.Util.AStoShExJ( // ShEx.Util.canonicalize( // Caches.inputSchema.refresh()))); const alreadLoaded = { schema: await Caches.inputSchema.refresh(), url: Caches.inputSchema.url || DefaultBase }; // shex-node loads IMPORTs and tests the schema for structural faults. try { const loaded = await ShExApi.load([alreadLoaded], [], [], []); let time; const validator = ShEx.Validator.construct( loaded.schema, inputData, { results: "api", regexModule: ShEx[$("#regexpEngine").val()] }); $(".extensionControl:checked").each(function () { $(this).data("code").register(validator, ShEx); }) Mapper = MapModule.register(validator, ShEx); currentAction = "validating"; $("#results .status").text("validating...").show(); time = new Date(); const ret = validator.validate(fixedMap, LOG_PROGRESS ? makeConsoleTracker() : null); time = new Date() - time; $("#shapeMap-tabs").attr("title", "last validation: " + time + " ms") // const dated = Object.assign({ _when: new Date().toISOString() }, ret); $("#results .status").text("rendering results...").show(); await Promise.all(ret.map(renderEntry)); // for debugging values and schema formats: // try { // const x = ShExUtil.valToValues(ret); // // const x = ShExUtil.ShExJtoAS(valuesToSchema(valToValues(ret))); // res = results.replace(JSON.stringify(x, null, " ")); // const y = ShExUtil.valuesToSchema(x); // res = results.append(JSON.stringify(y, null, " ")); // } catch (e) { // console.dir(e); // } finishRendering(); return { validationResults: ret }; // for tester or whoever is awaiting this promise } catch (e) { $("#results .status").text("validation errors:").show(); failMessage(e, currentAction); console.error(e); // dump details to console. return { validationError: e }; } } else { const outputLanguage = Caches.inputSchema.language === "ShExJ" ? "ShExC" : "ShExJ"; $("#results .status"). text("parsed "+Caches.inputSchema.language+" schema, generated "+outputLanguage+" "). append($("<button>(copy to input)</button>"). css("border-radius", ".5em"). on("click", async function () { await Caches.inputSchema.set($("#results div").text(), DefaultBase); })). append(":"). show(); let parsedSchema; if (Caches.inputSchema.language === "ShExJ") { const opts = { simplifyParentheses: false, base: Caches.inputSchema.meta.base, prefixes: Caches.inputSchema.meta.prefixes } new ShEx.Writer(opts).writeSchema(Caches.inputSchema.parsed, (error, text) => { if (error) { $("#results .status").text("unwritable ShExJ schema:\n" + error).show(); // res.addClass("error"); } else { results.append($("<pre/>").text(text).addClass("passes")); } }); } else { const pre = $("<pre/>"); pre.text(JSON.stringify(ShEx.Util.AStoShExJ(ShEx.Util.canonicalize(Caches.inputSchema.parsed)), null, " ")).addClass("passes"); results.append(pre); } results.finish(); return { transformation: { from: Caches.inputSchema.language, to: outputLanguage } } } } catch (e) { failMessage(e, currentAction); console.error(e); // dump details to console. return { inputError: e }; } function makeConsoleTracker () { function padding (depth) { return (new Array(depth + 1)).join(" "); } // AKA " ".repeat(depth) function sm (node, shape) { return `${Caches.inputData.meta.termToLex(node)}@${Caches.inputSchema.meta.termToLex(shape)}`; } const logger = { recurse: x => { console.log(`${padding(logger.depth)}↻ ${sm(x.node, x.shape)}`); return x; }, known: x => { console.log(`${padding(logger.depth)}↵ ${sm(x.node, x.shape)}`); return x; }, enter: (point, label) => { console.log(`${padding(logger.depth)}→ ${sm(point, label)}`); ++logger.depth; }, exit: (point, label, ret) => { --logger.depth; console.log(`${padding(logger.depth)}← ${sm(point, label)}`); }, depth: 0 }; return logger; } } async function renderEntry (entry) { const fails = entry.status === "nonconformant"; // locate FixedMap entry const shapeString = entry.shape === ShEx.Validator.start ? START_SHAPE_INDEX_ENTRY : entry.shape; const fixedMapEntry = $("#fixedMap .pair"+ "[data-node='"+entry.node+"']"+ "[data-shape='"+shapeString+"']"); const klass = (fails ^ fixedMapEntry.find(".shapeMap-joiner").hasClass("nonconformant")) ? "fails" : "passes"; const resultStr = fails ? "✗" : "✓"; let elt = null; if (!fails) { if ($("#success").val() === "query" || $("#success").val() === "remainder") { const proofStore = new RdfJs.Store(); ShEx.Util.getProofGraph(entry.appinfo, proofStore, RdfJs.DataFactory); entry.graph = proofStore.getQuads(); } if ($("#success").val() === "remainder") { const remainder = new RdfJs.Store(); remainder.addQuads((await Caches.inputData.refresh()).getQuads()); entry.graph.forEach(q => remainder.removeQuad(q)); entry.graph = remainder.getQuads(); } } if (entry.graph) { const wr = new RdfJs.Writer(Caches.inputData.meta); wr.addQuads(entry.graph); wr.end((error, results) => { if (error) throw error; entry.turtle = "" + "# node: " + entry.node + "\n" + "# shape: " + entry.shape + "\n" + results.trim(); elt = $("<pre/>").text(entry.turtle).addClass(klass); }); delete entry.graph; } else { let renderMe = entry switch ($("#interface").val()) { case "human": elt = $("<div class='human'/>").append( $("<span/>").text(resultStr), $("<span/>").text( `${Caches.inputData.meta.termToLex(entry.node)}@${fails ? "!" : ""}${Caches.inputSchema.meta.termToLex(entry.shape)}` )).addClass(klass); if (fails) elt.append($("<pre>").text(ShEx.Util.errsToSimple(entry.appinfo).join("\n"))); break; case "minimal": if (fails) entry.reason = ShEx.Util.errsToSimple(entry.appinfo).join("\n"); renderMe = Object.keys(entry).reduce((acc, key) => { if (key !== "appinfo") acc[key] = entry[key]; return acc }, {}); // falling through to default covers the appinfo case default: elt = $("<pre/>").text(JSON.stringify(renderMe, null, " ")).addClass(klass); } } results.append(elt); // update the FixedMap fixedMapEntry.addClass(klass).find("a").text(resultStr); const nodeLex = fixedMapEntry.find("input.focus").val(); const shapeLex = fixedMapEntry.find("input.inputShape").val(); const anchor = encodeURIComponent(nodeLex) + "@" + encodeURIComponent(shapeLex); elt.attr("id", anchor); fixedMapEntry.find("a").attr("href", "#" + anchor); fixedMapEntry.attr("title", entry.elapsed + " ms") if (entry.status === "conformant") { const resultBindings = ShEx.Util.valToExtension(entry.appinfo, MapModule.url); await Caches.bindings.set(JSON.stringify(resultBindings, null, " ")); } else { await Caches.bindings.set("{}"); } } function finishRendering (done) { $("#results .status").text("rendering results...").show(); // Add commas to JSON results. if ($("#interface").val() !== "human") $("#results div *").each((idx, elt) => { if (idx === 0) $(elt).prepend("["); $(elt).append(idx === $("#results div *").length - 1 ? "]" : ","); }); $("#results .status").hide(); // for debugging values and schema formats: // try { // const x = ShEx.Util.valToValues(ret); // // const x = ShEx.Util.ShExJtoAS(valuesToSchema(valToValues(ret))); // res = results.replace(JSON.stringify(x, null, " ")); // const y = ShEx.Util.valuesToSchema(x); // res = results.append(JSON.stringify(y, null, " ")); // } catch (e) { // console.dir(e); // } results.finish(); } function failMessage (e, action, text) { $("#results .status").empty().text("Errors encountered:").show() const div = $("<div/>").addClass("error"); div.append($("<h3/>").text("error " + action + ":\n")); div.append($("<pre/>").text(e.message)); if (text) div.append($("<pre/>").text(text)); results.append(div); LastFailTime = new Date().getTime(); } async function materialize () { SharedForTests.promise = materializeAsync() } async function materializeAsync () { if (Caches.bindings.get().trim().length === 0) { results.replace("You must validate data against a ShExMap schema to populate mappings bindings."). removeClass("passes fails").addClass("error"); return null; } results.start(); const parsing = "output schema"; try { const outputSchemaText = Caches.outputSchema.selection.val(); const outputSchemaIsJSON = outputSchemaText.match(/^\s*\{/); const outputSchema = await Caches.outputSchema.refresh(); // const resultBindings = Object.assign( // await Caches.statics.refresh(), // await Caches.bindings.refresh() // ); function _dup (obj) { return JSON.parse(JSON.stringify(obj)); } const resultBindings = _dup(await Caches.bindings.refresh()); if (Caches.statics.get().trim().length === 0) await Caches.statics.set("{ }"); const _t = await Caches.statics.refresh(); if (_t && Object.keys(_t) > 0) { if (!Array.isArray(resultBindings)) resultBindings = [resultBindings]; resultBindings.unshift(_t); } // const trivialMaterializer = Mapper.trivialMaterializer(outputSchema); const outputShapeMap = fixedShapeMapToTerms([{ node: Caches.inputData.meta.lexToTerm($("#createRoot").val()), shape: Caches.outputSchema.meta.lexToTerm($("#outputShape").val()) // resolve with Caches.outputSchema }]); const binder = Mapper.binder(resultBindings); await Caches.bindings.set(JSON.stringify(resultBindings, null, " ")); // const outputGraph = trivialMaterializer.materialize(binder, lexToTerm($("#createRoot").val()), outputShape); // binder = Mapper.binder(resultBindings); const generatedGraph = new RdfJs.Store(); $("#results div").empty(); $("#results .status").text("materializing data...").show(); outputShapeMap.forEach(pair => { try { const materializer = MapModule.materializer.construct(outputSchema, Mapper, {}); const res = materializer.validate(binder, pair.node, pair.shape); if ("errors" in res) { renderEntry( { node: pair.node, shape: pair.shape, status: "errors" in res ? "nonconformant" : "conformant", appinfo: res, elapsed: -1 }) // $("#results .status").text("validation errors:").show(); // $("#results .status").text("synthesis errors:").show(); // failMessage(e, currentAction); } else { // console.log("g:", ShEx.Util.valToTurtle(res)); generatedGraph.addQuads(ShEx.Util.valToN3js(res, RdfJs.DataFactory)); } } catch (e) { console.dir(e); } }); finishRendering(); $("#results .status").text("materialization results").show(); const writer = new RdfJs.Writer({ prefixes: Caches.outputSchema.parsed._prefixes }); writer.addQuads(generatedGraph.getQuads()); writer.end(function (error, result) { results.append( $("<div/>", {class: "passes"}).append( $("<span/>", {class: "shapeMap"}).append( "# ", $("<span/>", {class: "data"}).text($("#createRoot").val()), $("<span/>", {class: "valStatus"}).text("@"), $("<span/>", {class: "schema"}).text($("#outputShape").val()), ), $("<pre/>").text(result) ) ) // results.append($("<pre/>").text(result)); }); results.finish(); return { materializationResults: generatedGraph }; } catch (e) { results.replace("error parsing " + parsing + ":\n" + e). removeClass("passes fails").addClass("error"); // results.finish(); return null; } } function addEmptyEditMapPair (evt) { addEditMapPairs(null, $(evt.target).parent().parent()); markEditMapDirty(); return false; } function addEditMapPairs (pairs, target) { (pairs || [{node: {type: "empty"}}]).forEach(pair => { const nodeType = (typeof pair.node !== "object" || "@value" in pair.node) ? "node" : pair.node.type; let skip = false; let node, shape; switch (nodeType) { case "empty": node = shape = ""; break; case "node": node = ldToTurtle(pair.node, Caches.inputData.meta.termToLex); shape = startOrLdToTurtle(pair.shape); break; case "TriplePattern": node = renderTP(pair.node); shape = startOrLdToTurtle(pair.shape); break; case "Extension": failMessage(Error("unsupported extension: <" + pair.node.language + ">"), "parsing Query Map", pair.node.lexical); skip = true; // skip this entry. break; default: results.append($("<div/>").append( $("<span/>").text("unrecognized ShapeMap:"), $("<pre/>").text(JSON.stringify(pair)) ).addClass("error")); skip = true; // skip this entry. break; } if (!skip) { const spanElt = $("<tr/>", {class: "pair"}); const focusElt = $("<textarea/>", { rows: '1', type: 'text', class: 'data focus' }).text(node).on("change", markEditMapDirty); const joinerElt = $("<span>", { class: 'shapeMap-joiner' }).append("@").addClass(pair.status); joinerElt.append( $("<input>", {style: "border: none; width: .2em;", readonly: "readonly"}).val(pair.status === "nonconformant" ? "!" : " ").on("click", function (evt) { const status = $(this).parent().hasClass("nonconformant") ? "conformant" : "nonconformant"; $(this).parent().removeClass("conformant nonconformant"); $(this).parent().addClass(status); $(this).val(status === "nonconformant" ? "!" : ""); markEditMapDirty(); evt.preventDefault(); }) ); // if (pair.status === "nonconformant") { // joinerElt.append("!"); // } const shapeElt = $("<input/>", { type: 'text', value: shape, class: 'schema inputShape' }).on("change", markEditMapDirty); const addElt = $("<button/>", { class: "addPair", title: "add a node/shape pair"}).text("+"); const removeElt = $("<button/>", { class: "removePair", title: "remove this node/shape pair"}).text("-"); addElt.on("click", addEmptyEditMapPair); removeElt.on("click", removeEditMapPair); spanElt.append([focusElt, joinerElt, shapeElt, addElt, removeElt].map(elt => { return $("<td/>").append(elt); })); if (target) { target.after(spanElt); } else { $("#editMap").append(spanElt); } } }); if ($("#editMap .removePair").length === 1) $("#editMap .removePair").css("visibility", "hidden"); else $("#editMap .removePair").css("visibility", "visible"); $("#editMap .pair").each(idx => { addContextMenus("#editMap .pair:nth("+idx+") .focus", Caches.inputData); addContextMenus(".pair:nth("+idx+") .inputShape", Caches.inputSchema); }); return false; function renderTP (tp) { const ret = ["subject", "predicate", "object"].map(k => { const ld = tp[k]; if (ld === ShEx.ShapeMap.focus) return "FOCUS"; if (!ld) // ?? ShEx.Uti.any return "_"; return ldToTurtle(ld, Caches.inputData.meta.termToLex); }); return "{" + ret.join(" ") + "}"; } function startOrLdToTurtle (term) { return term === ShEx.Validator.start ? START_SHAPE_LABEL : ldToTurtle(term, Caches.inputSchema.meta.termToLex); } } function removeEditMapPair (evt) { markEditMapDirty(); if (evt) { $(evt.target).parent().parent().remove(); } else { $("#editMap .pair").remove(); } if ($("#editMap .removePair").length === 1) $("#editMap .removePair").css("visibility", "hidden"); return false; } function prepareControls () { $("#menu-button").on("click", toggleControls); $("#interface").on("change", setInterface); $("#success").on("change", setInterface); $("#regexpEngine").on("change", toggleControls); $("#validate").on("click", disableResultsAndValidate); $("#clear").on("click", clearAll); $("#materialize").on("click", materialize); $("#download-results-button").on("click", downloadResults); $("#loadForm").dialog({ autoOpen: false, modal: true, buttons: { "GET": function (evt, ui) { results.clear(); const target = Getables.find(g => g.queryStringParm === $("#loadForm span.whatToLoad").text()); const url = $("#loadInput").val(); const tips = $(".validateTips"); function updateTips (t) { tips .text( t ) .addClass( "ui-state-highlight" ); setTimeout(function() { tips.removeClass( "ui-state-highlight", 1500 ); }, 500 ); } if (url.length < 5) { $("#loadInput").addClass("ui-state-error"); updateTips("URL \"" + url + "\" is way too short."); return; } tips.removeClass("ui-state-highlight").text(); SharedForTests.promise = target.cache.asyncGet(url).catch(function (e) { updateTips(e.message); }); }, "Cancel": function() { $("#loadInput").removeClass("ui-state-error"); $("#loadForm").dialog("close"); toggleControls(); } }, close: function() { $("#loadInput").removeClass("ui-state-error"); $("#loadForm").dialog("close"); toggleControls(); } }); Getables.forEach(target => { const type = target.queryStringParm $("#load-"+type+"-button").click(evt => { const prefillURL = target.url ? target.url : target.cache.meta.base && target.cache.meta.base !== DefaultBase ? target.cache.meta.base : ""; $("#loadInput").val(prefillURL); $("#loadForm").attr("class", type).find("span.whatToLoad").text(type); $("#loadForm").dialog("open"); }); }); $("#about").dialog({ autoOpen: false, modal: true, width: "50%", buttons: { "Dismiss": dismissModal }, close: dismissModal }); $("#about-button").click(evt => { $("#about").dialog("open"); }); $("#shapeMap-tabs").tabs({ activate: async function (event, ui) { if (ui.oldPanel.get(0) === $("#editMap-tab").get(0)) await copyEditMapToTextMap(); else if (ui.oldPanel.get(0) === $("#textMap").get(0)) await copyTextMapToEditMap() } }); $("#textMap").on("change", evt => { results.clear(); SharedForTests.promise = copyTextMapToEditMap(); }); Caches.inputData.selection.on("change", dataInputHandler); // input + paste? // $("#copyEditMapToFixedMap").on("click", copyEditMapToFixedMap); // may add this button to tutorial function dismissModal (evt) { // $.unblockUI(); $("#about").dialog("close"); toggleControls(); return true; } // Prepare file uploads $("input.inputfile").each((idx, elt) => { $(elt).on("change", function (evt) { const reader = new FileReader(); reader.onload = function(evt) { if(evt.target.readyState != 2) return; if(evt.target.error) { alert("Error while reading file"); return; } $($(elt).attr("data-target")).val(evt.target.result); }; reader.readAsText(evt.target.files[0]); }); }); } async function dataInputHandler (evt) { const active = $('#shapeMap-tabs ul li.ui-tabs-active a').attr('href'); if (active === "#editMap-tab") return await copyEditMapToTextMap(); else // if (active === "#textMap") return await copyTextMapToEditMap(); } async function toggleControls (evt) { // don't use `return false` 'cause the browser doesn't wait around for a promise before looking at return false to decide the event is handled if (evt) evt.preventDefault(); const revealing = evt && $("#controls").css("display") !== "flex"; $("#controls").css("display", revealing ? "flex" : "none"); toggleControlsArrow(revealing ? "up" : "down"); if (revealing) { let target = evt.target; while (target.tagName !== "BUTTON") target = target.parentElement; if ($("#menuForm").css("position") === "absolute") { $("#controls"). css("top", 0). css("left", $("#menu-button").css("margin-left")); } else { const bottonBBox = target.getBoundingClientRect(); const controlsBBox = $("#menuForm").get(0).getBoundingClientRect(); const left = bottonBBox.right - bottonBBox.width; // - controlsBBox.width; $("#controls").css("top", bottonBBox.bottom).css("left", left); } $("#permalink a").removeAttr("href"); // can't click until ready const permalink = await getPermalink(); $("#permalink a").attr("href", permalink); } } function toggleControlsArrow (which) { // jQuery can't find() a prefixed attribute (xlink:href); fall back to DOM: if (document.getElementById("menu-button") === null) return; const down = $(document.getElementById("menu-button"). querySelectorAll('use[*|href="#down-arrow"]')); const up = $(document.getElementById("menu-button"). querySelectorAll('use[*|href="#up-arrow"]')); switch (which) { case "down": down.show(); up.hide(); break; case "up": down.hide(); up.show(); break; default: throw Error("toggleControlsArrow expected [up|down], got \"" + which + "\""); } } function setInterface (evt) { toggleControls(); customizeInterface(); } function downloadResults (evt) { const typed = [ { type: "text/plain", name: "results.txt" }, { type: "application/json", name: "results.json" } ][$("#interface").val() === "appinfo" ? 1 : 0]; const blob = new Blob([results.text()], {type: typed.type}); $("#download-results-button") .attr("href", window.URL.createObjectURL(blob)) .attr("download", typed.name); toggleControls(); console.log(results.text()); } /** * * location.search: e.g. "?schema=asdf&data=qwer&shape-map=ab%5Ecd%5E%5E_ef%5Egh" */ const parseQueryString = function(query) { if (query[0]==='?') query=query.substr(1); // optional leading '?' const map = {}; query.replace(/([^&,=]+)=?([^&,]*)(?:[&,]+|$)/g, function(match, key, value) { key=decodeURIComponent(key);value=decodeURIComponent(value); (map[key] = map[key] || []).push(value); }); return map; }; function markEditMapDirty () { $("#editMap").attr("data-dirty", true); } function markEditMapClean () { $("#editMap").attr("data-dirty", false); } /** getShapeMap -- zip a node list and a shape list into a ShapeMap * use {Caches.inputData,Caches.inputSchema}.meta.{prefix,base} to complete IRIs * @return array of encountered errors */ async function copyEditMapToFixedMap () { $("#fixedMap tbody").empty(); // empty out the fixed map. const fixedMapTab = $("#shapeMap-tabs").find('[href="#fixedMap-tab"]'); const restoreText = fixedMapTab.text(); fixedMapTab.text("resolving Fixed Map").addClass("running"); $("#fixedMap .pair").remove(); // clear out existing edit map (make optional?) const nodeShapePromises = $("#editMap .pair").get().reduce((acc, queryPair) => { $(queryPair).find(".error").removeClass("error"); // remove previous error markers const node = $(queryPair).find(".focus").val(); const shape = $(queryPair).find(".inputShape").val(); const status = $(queryPair).find(".shapeMap-joiner").hasClass("nonconformant") ? "nonconformant" : "conformant"; if (!node || !shape) return acc; const smparser = ShEx.ShapeMapParser.construct( Caches.shapeMap.meta.base, Caches.inputSchema.meta, Caches.inputData.meta); const nodes = []; try { const sm = smparser.parse(node + '@' + shape)[0]; const added = typeof sm.node === "string" || "@value" in sm.node ? Promise.resolve({nodes: [node], shape: shape, status: status}) : getQuads(sm.node.subject, sm.node.predicate, sm.node.object) .then(nodes => Promise.resolve({nodes: nodes, shape: shape, status: status})); return acc.concat(added); } catch (e) { // find which cell was broken try { smparser.parse(node + '@' + "START"); } catch (e) { $(queryPair).find(".focus").addClass("error"); } try { smparser.parse("<>" + '@' + shape); } catch (e) { $(queryPair).find(".inputShape").addClass("error"); } failMessage(e, "parsing Edit Map", node + '@' + shape); nodes = Promise.resolve([]); // skip this entry return acc; } }, []); const pairs = await Promise.all(nodeShapePromises) pairs.reduce((acc, pair) => { pair.nodes.forEach(node => { const nodeTerm = Caches.inputData.meta.lexToTerm(node + " "); // for langcode lookahead let shapeTerm = Caches.inputSchema.meta.lexToTerm(pair.shape); if (shapeTerm === ShEx.Validator.start) shapeTerm = START_SHAPE_INDEX_ENTRY; const key = nodeTerm + "|" + shapeTerm; if (key in acc) return; const spanElt = createEntry(node, nodeTerm, pair.shape, shapeTerm, pair.status); acc[key] = spanElt; // just needs the key so far. }); return acc; }, {}) // scroll inputs to right $("#fixedMap input").each((idx, focusElt) => { focusElt.scrollLeft = focusElt.scrollWidth; }); fixedMapTab.text(restoreText).removeClass("running"); return []; // no errors async function getQuads (s, p, o) { const get = s === ShEx.ShapeMap.focus ? "subject" : "object"; return (await Caches.inputData.refresh()).getQuads(mine(s), mine(p), mine(o)).map(t => { return Caches.inputData.meta.termToLex(t[get]);// !!check }); function mine (term) { return term === ShEx.ShapeMap.focus || term === ShEx.ShapeMap.wildcard ? null : term; } } function createEntry (node, nodeTerm, shape, shapeTerm, status) { const spanElt = $("<tr/>", {class: "pair" ,"data-node": nodeTerm ,"data-shape": shapeTerm }); const focusElt = $("<input/>", { type: 'text', value: node, class: 'data focus', disabled: "disabled" }); const joinerElt = $("<span>", { class: 'shapeMap-joiner' }).append("@").addClass(status); if (status === "nonconformant") { joinerElt.addClass("negated"); joinerElt.append("!"); } const shapeElt = $("<input/>", { type: 'text', value: shape, class: 'schema inputShape', disabled: "disabled" }); const removeElt = $("<button/>", { class: "removePair", title: "remove this node/shape pair"}).text("-"); removeElt.on("click", evt => { // Remove related result. let href, result; if ((href = $(evt.target).closest("tr").find("a").attr("href")) && (result = document.getElementById(href.substr(1)))) $(result).remove(); // Remove FixedMap entry. $(evt.target).closest("tr").remove(); }); spanElt.append([focusElt, joinerElt, shapeElt, removeElt, $("<a/>")].map(elt => { return $("<td/>").append(elt); })); $("#fixedMap").append(spanElt); return spanElt; } } function lexifyFirstColumn (row) { // !!not used return Caches.inputData.meta.termToLex(row[0]); // row[0] is the first column. } /** * @return list of errors encountered */ async function copyEditMapToTextMap () { if ($("#editMap").attr("data-dirty") === "true") { const text = $("#editMap .pair").get().reduce((acc, queryPair) => { const node = $(queryPair).find(".focus").val(); const shape = $(queryPair).find(".inputShape").val(); if (!node || !shape) return acc; const status = $(queryPair).find(".shapeMap-joiner").hasClass("nonconformant") ? "!" : ""; return acc.concat([node+"@"+status+shape]); }, []).join(",\n"); $("#textMap").empty().val(text); const ret = await copyEditMapToFixedMap(); markEditMapClean(); return ret; } else { return []; // no errors } } /** * Parse query map to populate #editMap and #fixedMap. * @returns list of errors. ([] means everything was good.) */ async function copyTextMapToEditMap () { $("#textMap").removeClass("error"); const shapeMap = $("#textMap").val(); results.clear(); try { await Caches.inputSchema.refresh(); await Caches.inputData.refresh(); const smparser = ShEx.ShapeMapParser.construct( Caches.shapeMap.meta.base, Caches.inputSchema.meta, Caches.inputData.meta); const sm = smparser.parse(shapeMap); removeEditMapPair(null); addEditMapPairs(sm.length ? sm : null); const ret = await copyEditMapToFixedMap(); markEditMapClean(); results.clear(); return ret; } catch (e) { $("#textMap").addClass("error"); failMessage(e, "parsing Query Map"); makeFreshEditMap() return [e]; } } function makeFreshEditMap () { removeEditMapPair(null); addEditMapPairs(null, null); markEditMapClean(); return []; } /** fixedShapeMapToTerms -- map ShapeMap to API terms * @@TODO: add to ShExValidator so API accepts ShapeMap */ function fixedShapeMapToTerms (shapeMap) { return shapeMap; /*.map(pair => { return {node: Caches.inputData.meta.lexToTerm(pair.node + " "), shape: Caches.inputSchema.meta.lexToTerm(pair.shape)}; });*/ } /** * Load URL search parameters */ async function loadSearchParameters () { // don't overwrite if we arrived here from going back and forth in history if (Caches.inputSchema.selection.val() !== "" || Caches.inputData.selection.val() !== "") return Promise.resolve(); const iface = parseQueryString(location.search); toggleControlsArrow("down"); $(".manifest li").text("no manifest schemas loaded"); if ("examples" in iface) { // deprecated ?examples= interface iface.manifestURL = iface.examples; delete iface.examples; } if (!("manifest" in iface) && !("manifestURL" in iface)) { iface.manifestURL = ["../examples/manifest.json"]; } if ("output-map" in iface) parseShapeMap("output-map", function (node, shape) { // only works for one n/s pair $("#createNode").val(node); $("#outputShape").val(shape); }); // Load all known query parameters. Save load results into array like: /* [ [ "data", { "skipped": "skipped" } ], [ "manifest", { "fromUrl": { "url": "http://...", "data": "..." } } ], ] */ const loadedAsArray = await Promise.all(QueryParams.map(async input => { const label = input.queryStringParm; const parm = label; if (parm + "URL" in iface) { const url = iface[parm + "URL"][0]; if (url.length > 0) { // manifest= loads no manifest // !!! set anyways in asyncGet? input.cache.url = url; // all fooURL query parms are caches. try { const got = await input.cache.asyncGet(url) return [label, {fromUrl: got}] } catch(e) { if ("fail" in input) { input.fail(e); } else { input.location.val(e.message); } results.append($("<pre/>").text(e).addClass("error")); return [label, { loadFailure: e instanceof Error ? e : Error(e) }]; }; } } else if (parm in iface) { const prepend = input.location.prop("tagName") === "TEXTAREA" ? input.location.val() : ""; const value = prepend + iface[parm].join(""); const origValue = input.location.val(); try { if ("cache" in input) { await input.cache.set(value, location.href); } else { input.location.val(prepend + value); if (input.location.val() === null) throw Error(`Unable to set value to ${prepend + value}`) } return [label, { literal: value }] } catch (e) { input.location.val(origValue); if ("fail" in input) { input.fail(e); } results.append($("<pre/>").text( "error setting " + label + ":\n" + e + "\n" + value ).addClass("error")); return [label, { failure: e }] } } else if ("deflt" in input) { input.location.val(input.deflt); return [label, { deflt: "deflt" }]; // flag that it was a default } return [label, { skipped: "skipped" }] })) // convert loaded array into Object: /* { "data": { "skipped": "skipped" }, "manifest": { "fromUrl": { "url": "http://...", "data": "..." } }, } */ const loaded = loadedAsArray.reduce((acc, fromArray) => { acc[fromArray[0]] = fromArray[1] return acc }, {}) // Parse the shape-map using the prefixes and base. const shapeMapErrors = $("#textMap").val().trim().length > 0 ? copyTextMapToEditMap() : makeFreshEditMap(); customizeInterface(); $("body").keydown(async function (e) { // keydown because we need to preventDefault const code = e.keyCode || e.charCode; // standards anyone? if (e.ctrlKey && (code === 10 || code === 13)) { // ctrl-enter // const at = $(":focus"); const smErrors = await dataInputHandler(); if (smErrors.length === 0) $("#validate")/*.focus()*/.click(); // at.focus(); return false; // same as e.preventDefault(); } else if (e.ctrlKey && e.key === "\\") { $("#materialize").click(); return false; // same as e.preventDefault(); } else if (e.ctrlKey && e.key === "[") { bindingsToTable() return false; // same as e.preventDefault(); } else if (e.ctrlKey && e.key === "]") { tableToBindings() return false; // same as e.preventDefault(); } else { return true; } }); addContextMenus("#focus0", Caches.inputData); addContextMenus("#inputShape0", Caches.inputSchema); addContextMenus("#outputShape", Caches.outputSchema); if ("schemaURL" in iface || // some schema is non-empty ("schema" in iface && iface.schema.reduce((r, elt) => { return r+elt.length; }, 0)) && shapeMapErrors.length === 0) { return callValidator(); } return loaded; } function setTextAreaHandlers (listItems) { const textAreaCaches = ["inputSchema", "inputData", "shapeMap"] const timeouts = Object.keys(Caches).reduce((acc, k) => { acc[k] = undefined; return acc; }, {}); Object.keys(Caches).forEach(function (cache) { Caches[cache].selection.keyup(function (e) { // keyup to capture backspace const code = e.keyCode || e.charCode; // if (!(e.ctrlKey)) { // results.clear(); // } if (!(e.ctrlKey && (code === 10 || code === 13))) { later(e.target, cache, Caches[cache]); } }); }); function later (target, side, cache) { cache.dirty(true); if (timeouts[side]) clearTimeout(timeouts[side]); timeouts[side] = setTimeout(() => { timeouts[side] = undefined; const curSum = sum($(target).val()); if (curSum in listItems[side]) listItems[side][curSum].addClass("selected"); else $("#"+side+" .selected").removeClass("selected"); delete cache.url; }, INPUTAREA_TIMEOUT); } } /** * update location with a current values of some inputs */ async function getPermalink () { let parms = []; await copyEditMapToTextMap(); parms = parms.concat(QueryParams.reduce((acc, input) => { let parm = input.queryStringParm; let val = input.location.val(); if (input.cache && input.cache.url && // Specifically avoid loading from DefaultBase?schema=blah // because that will load the HTML page. !input.cache.url.startsWith(DefaultBase)) { parm += "URL"; val = input.cache.url; } return val.length > 0 ? acc.concat(parm + "=" + encodeURIComponent(val)) : acc; }, [])); const s = parms.join("&"); return location.origin + location.pathname + "?" + s; } function customizeInterface () { if ($("#interface").val() === "minimal") { $("#inputSchema .status").html("schema (<span id=\"schemaDialect\">ShEx</span>)").show(); $("#inputData .status").html("data (<span id=\"dataDialect\">Turtle</span>)").show(); $("#actions").parent().children().not("#actions").hide(); $("#title img, #title h1").hide(); $("#menuForm").css("position", "absolute").css( "left", $("#inputSchema .status").get(0).getBoundingClientRect().width - $("#menuForm").get(0).getBoundingClientRect().width ); $("#controls").css("position", "relative"); } else { $("#inputSchema .status").html("schema (<span id=\"schemaDialect\">ShEx</span>)").hide(); $("#inputData .status").html("data (<span id=\"dataDialect\">Turtle</span>)").hide(); $("#actions").parent().children().not("#actions").show(); $("#title img, #title h1").show(); $("#menuForm").removeAttr("style"); $("#controls").css("position", "absolute"); } } /** * Prepare drag and drop into text areas */ async function prepareDragAndDrop () { QueryParams.filter(q => { return "cache" in q; }).map(q => { return { location: q.location, targets: [{ ext: "", // Will match any file media: "", // or media type. target: q.cache }] }; }).concat([ {location: $("body"), targets: [ {media: "application/json", target: Caches.manifest}, {ext: ".shex", media: "text/shex", target: Caches.inputSchema},<|fim▁hole|> {ext: ".smap", media: "text/plain", target: Caches.shapeMap}]} ]).forEach(desc => { const droparea = desc.location; // kudos to http://html5demos.com/dnd-upload desc.location. on("drag dragstart dragend dragover dragenter dragleave drop", function (e) { e.preventDefault(); e.stopPropagation(); }). on("dragover dragenter", (evt) => { desc.location.addClass("hover"); }). on("dragend dragleave drop", (evt) => { desc.location.removeClass("hover"); }). on("drop", (evt) => { evt.preventDefault(); droparea.removeClass("droppable"); $("#results .status").removeClass("error"); results.clear(); let xfer = evt.originalEvent.dataTransfer; const prefTypes = [ {type: "files"}, {type: "application/json"}, {type: "text/uri-list"}, {type: "text/plain"} ]; const promises = []; if (prefTypes.find(l => { if (l.type.indexOf("/") === -1) { if (l.type in xfer && xfer[l.type].length > 0) { $("#results .status").text("handling "+xfer[l.type].length+" files...").show(); promises.push(readfiles(xfer[l.type], desc.targets)); return true; } } else { if (xfer.getData(l.type)) { const val = xfer.getData(l.type); $("#results .status").text("handling "+l.type+"...").show(); if (l.type === "application/json") { if (desc.location.get(0) === $("body").get(0)) { let parsed = JSON.parse(val); if (!(Array.isArray(parsed))) { parsed = [parsed]; } parsed.map(elt => { const action = "action" in elt ? elt.action: elt; action.schemaURL = action.schema; delete action.schema; action.dataURL = action.data; delete action.data; }); promises.push(Caches.manifest.set(parsed, DefaultBase, "drag and drop")); } else { promises.push(inject(desc.targets, DefaultBase, val, l.type)); } } else if (l.type === "text/uri-list") { $.ajax({ accepts: { mycustomtype: 'text/shex,text/turtle,*/*' }, url: val, dataType: "text" }).fail(function (jqXHR, textStatus) { const error = jqXHR.statusText === "OK" ? textStatus : jqXHR.statusText; results.append($("<pre/>").text("GET <" + val + "> failed: " + error)); }).done(function (data, status, jqXhr) { try { promises.push(inject(desc.targets, val, data, (jqXhr.getResponseHeader("Content-Type") || "unknown-media-type").split(/[ ;,]/)[0])); $("#loadForm").dialog("close"); toggleControls(); } catch (e) { results.append($("<pre/>").text("unable to evaluate <" + val + ">: " + (e.stack || e))); } }); } else if (l.type === "text/plain") { promises.push(inject(desc.targets, DefaultBase, val, l.type)); } $("#results .status").text("").hide(); // desc.targets.text(xfer.getData(l.type)); return true; async function inject (targets, url, data, mediaType) { const target = targets.length === 1 ? targets[0].target : targets.reduce((ret, elt) => { return ret ? ret : mediaType === elt.media ? elt.target : null; }, null); if (target) { const appendTo = $("#append").is(":checked") ? target.get() : ""; await target.set(appendTo + data, url, 'drag and drop', mediaType); } else { results.append("don't know what to do with " + mediaType + "\n"); } } } } return false; }) === undefined) results.append($("<pre/>").text( "drag and drop not recognized:\n" + JSON.stringify({ dropEffect: xfer.dropEffect, effectAllowed: xfer.effectAllowed, files: xfer.files.length, items: [].slice.call(xfer.items).map(i => { return {kind: i.kind, type: i.type}; }) }, null, 2) )); SharedForTests.promise = Promise.all(promises); }); }); /*async*/ function readfiles(files, targets) { // returns promise but doesn't use await const formData = new FormData(); let successes = 0; const promises = []; for (let i = 0; i < files.length; i++) { const file = files[i], name = file.name; const target = targets.reduce((ret, elt) => { return ret ? ret : name.endsWith(elt.ext) ? elt.target : null; }, null); if (target) { promises.push(new Promise((resolve, reject) => { formData.append("file", file); const reader = new FileReader(); reader.onload = (function (target) { return async function (event) { const appendTo = $("#append").is(":checked") ? target.get() : ""; await target.set(appendTo + event.target.result, DefaultBase); ++successes; resolve() }; })(target); reader.readAsText(file); })) } else { results.append("don't know what to do with " + name + "\n"); } } return Promise.all(promises).then(() => { $("#results .status").text("loaded "+successes+" files.").show(); }) } } async function prepareManifest (demoList, base) { const listItems = Object.keys(Caches).reduce((acc, k) => { acc[k] = {}; return acc; }, {}); const nesting = demoList.reduce(function (acc, elt) { const key = elt.schemaLabel + "|" + elt.schema; if (!(key in acc)) { // first entry with this schema acc[key] = { label: elt.schemaLabel, text: elt.schema, url: elt.schemaURL || (elt.schema ? base : undefined) }; } else { // nth entry with this schema } if ("dataLabel" in elt) { const dataEntry = { label: elt.dataLabel, text: elt.data, url: elt.dataURL || (elt.data ? base : undefined), outputSchemaUrl: elt.outputSchemaURL || (elt.outputSchema ? base : undefined), entry: elt }; const target = elt.status === "nonconformant" ? "fails" : elt.status === "conformant" ? "passes" : "indeterminant"; if (!(target in acc[key])) { // first entry with this data acc[key][target] = [dataEntry]; } else { // n'th entry with this data acc[key][target].push(dataEntry); } } else { // this is a schema-only example } return acc; }, {}); const nestingAsList = Object.keys(nesting).map(e => nesting[e]); await paintManifest("#inputSchema .manifest ul", nestingAsList, pickSchema, listItems, "inputSchema"); } function addContextMenus (inputSelector, cache) { // !!! terribly stateful; only one context menu at a time! const DATA_HANDLE = 'runCallbackThingie' let terms = null, nodeLex = null, target, scrollLeft, m, addSpace = ""; $(inputSelector).on('contextmenu', rightClickHandler) $.contextMenu({ trigger: 'none', selector: inputSelector, build: function($trigger, e) { // return callback set by the mouseup handler return $trigger.data(DATA_HANDLE)(); } }); async function buildMenuItemsPromise (elt, evt) { if (elt.hasClass("data")) { nodeLex = elt.val(); const shapeLex = elt.parent().parent().find(".schema").val() // Would like to use SMParser but that means users can't fix bad SMs. /* const sm = smparser.parse(nodeLex + '@START')[0]; const m = typeof sm.node === "string" || "@value" in sm.node ? null : tpToM(sm.node); */ m = nodeLex.match(RegExp("^"+ParseTriplePattern+"$")); if (m) { target = evt.target; const selStart = target.selectionStart; scrollLeft = target.scrollLeft; terms = [0, 1, 2].reduce((acc, ord) => { if (m[(ord+1)*2-1] !== undefined) { const at = acc.start + m[(ord+1)*2-1].length; const len = m[(ord+1)*2] ? m[(ord+1)*2].length : 0; return { start: at + len, tz: acc.tz.concat([[at, len]]), match: acc.match === null && at + len >= selStart ? ord : acc.match }; } else { return acc; } }, {start: 0, tz: [], match: null }); function norm (tz) { return tz.map(t => { return t.startsWith('!') ? "- " + t.substr(1) + " -" : Caches.inputData.meta.termToLex(t); // !!check }); } const queryMapKeywords = ["FOCUS", "_"]; const getTermsFunctions = [ () => { return queryMapKeywords.concat(norm(store.getSubjects())); }, () => { return norm(store.getPredicates()); }, () => { return queryMapKeywords.concat(norm(store.getObjects())); }, ]; const store = await Caches.inputData.refresh(); if (terms.match === null) return false; // prevent contextMenu from whining about an empty list return listToCTHash(getTermsFunctions[terms.match]()) } } terms = nodeLex = null; try { return listToCTHash(await cache.getItems()) } catch (e) { failMessage(e, cache === Caches.inputSchema ? "parsing schema" : "parsing data"); let items = {}; const failContent = "no choices found"; items[failContent] = failContent; return items } // hack to emulate regex parsing product /* function tpToM (tp) { return [nodeLex, '{', lex(tp.subject), " ", lex(tp.predicate), " ", lex(tp.object), "", "}", ""]; function lex (node) { return node === ShEx.ShapeMap.focus ? "FOCUS" : node === null ? "_" : Caches.inputData.meta.termToLex(node); } } */ } function rightClickHandler (e) { e.preventDefault(); const $this = $(this); $this.off('contextmenu', rightClickHandler); // when the items are ready, const p = buildMenuItemsPromise($this, e) p.then(items => { // store a callback on the trigger $this.data(DATA_HANDLE, function () { return { callback: menuCallback, items: items }; }); const _offset = $this.offset(); $this.contextMenu({ x: _offset.left + 10, y: _offset.top + 10 }) $this.on('contextmenu', rightClickHandler) }); } function menuCallback (key, options) { markEditMapDirty(); if (options.items[key].ignore) { // ignore the event } else if (terms) { const term = terms.tz[terms.match]; let val = nodeLex.substr(0, term[0]) + key + addSpace + nodeLex.substr(term[0] + term[1]); if (terms.match === 2 && !m[9]) val = val + "}"; else if (term[0] + term[1] === nodeLex.length) val = val + " "; $(options.selector).val(val); // target.scrollLeft = scrollLeft + val.length - nodeLex.length; target.scrollLeft = target.scrollWidth; } else { $(options.selector).val(key); } } function listToCTHash (items) { return items.reduce((acc, item) => { acc[item] = { name: item } return acc }, {}) } } function bindingsToTable () { let d = JSON.parse($("#bindings1 textarea").val()) let div = $("<div/>").css("overflow", "auto").css("border", "thin solid red") div.css("width", $("#bindings1 textarea").width()+10) div.css("height", $("#bindings1 textarea").height()+12) $("#bindings1 textarea").hide() let thead = $("<thead/>") let tbody = $("<tbody/>") let table = $("<table>").append(thead, tbody) $("#bindings1").append(div.append(table)) let vars = []; function varsIn (a) { return a.forEach(elt => { if (Array.isArray(elt)) { varsIn(elt) } else { let tr = $("<tr/>") let cols = [] Object.keys(elt).forEach(k => { if (vars.indexOf(k) === -1) vars.push(k) let i = vars.indexOf(k) cols[i] = elt[k] }) // tr.append(cols.map(c => $("<td/>").text(c))) for (let colI = 0; colI < cols.length; ++colI) tr.append($("<td/>").text(cols[colI] ? Caches.inputData.meta.termToLex(n3ify(cols[colI])) : "").css("background-color", "#f7f7f7")) tbody.append(tr) } }) } varsIn(Array.isArray(d) ? d : [d]) vars.forEach(v => { thead.append($("<th/>").css("font-size", "small").text(v.substr(v.lastIndexOf("#")+1, 999))) }) } function tableToBindings () { $("#bindings1 div").remove() $("#bindings1 textarea").show() } prepareControls(); const dndPromise = prepareDragAndDrop(); // async 'cause it calls Cache.X.set("") const loads = loadSearchParameters(); const ready = Promise.all([ dndPromise, loads ]); if ('_testCallback' in window) { SharedForTests.promise = ready.then(ab => ({drop: ab[0], loads: ab[1]})); window._testCallback(SharedForTests); } ready.then(resolves => { if (!('_testCallback' in window)) console.log('search parameters:', resolves[1]); // Update UI to say we're done loading everything? }, e => { // Drop catch on the floor presuming thrower updated the UI. }); function n3ify (ldterm) { if (typeof ldterm !== "object") return ldterm; const ret = "\"" + ldterm.value + "\""; if ("language" in ldterm) return ret + "@" + ldterm.language; if ("type" in ldterm) return ret + "^^" + ldterm.type; return ret; }<|fim▁end|>
{ext: ".ttl", media: "text/turtle", target: Caches.inputData}, {ext: ".json", media: "application/json", target: Caches.manifest},
<|file_name|>delivery_note_dashboard.py<|end_file_name|><|fim▁begin|>from frappe import _ def get_data(): return { 'fieldname': 'delivery_note', 'non_standard_fieldnames': { 'Stock Entry': 'delivery_note_no', 'Quality Inspection': 'reference_name', 'Auto Repeat': 'reference_document', }, 'internal_links': { 'Sales Order': ['items', 'against_sales_order'], }, 'transactions': [ {<|fim▁hole|> 'label': _('Reference'), 'items': ['Sales Order', 'Quality Inspection'] }, { 'label': _('Returns'), 'items': ['Stock Entry'] }, { 'label': _('Subscription'), 'items': ['Auto Repeat'] }, ] }<|fim▁end|>
'label': _('Related'), 'items': ['Sales Invoice', 'Packing Slip', 'Delivery Trip'] }, {
<|file_name|>NodoTipo.cpp<|end_file_name|><|fim▁begin|>#include "NodoTipo.h" NodoTipo::NodoTipo(Tipo* obj) { setTipo(obj); setDer(NULL); setIzq(NULL); _listaISBN = new ListaISBN(); } NodoTipo*&NodoTipo::getIzq() { return _izq; } void NodoTipo::setIzq(NodoTipo* _izq) { this->_izq = _izq; } NodoTipo*& NodoTipo::getDer() { return _der; } void NodoTipo::setDer(NodoTipo* _der) { this->_der = _der; } Tipo* NodoTipo::getTipo()const { return _Tipo; } void NodoTipo::setTipo(Tipo* _Tipo) { this->_Tipo = _Tipo; } <|fim▁hole|> delete _listaISBN; delete _Tipo; } ListaISBN* NodoTipo::getListaISBN(){ return _listaISBN; } void NodoTipo::setListaISBN(ListaISBN* l){ _listaISBN = l; } void NodoTipo::agregarISBN(int isbn){ _listaISBN->Inserta(isbn); } bool NodoTipo::borrarISBN(int isbn){ return _listaISBN->borrar(isbn); } void NodoTipo::destruirISBN(){ _listaISBN->destruir(); } string NodoTipo::MostrarListaISBN(){ return _listaISBN->toString(); }<|fim▁end|>
NodoTipo::~NodoTipo() { _listaISBN->destruir();
<|file_name|>uart.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust. // Copyright 2014 Vladimir "farcaller" Pouzanov <farcaller@gmail.com> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License.<|fim▁hole|>// // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /*! UART interface. UARTConf is a MCU-specific struct. UART objects implement CharIO trait to perform actual data transmission. */ /// UART parity mode. #[derive(Copy)] pub enum Parity { /// Partity disabled. Disabled, /// Partity bit added to make number of 1s odd. Odd, /// Partity bit added to make number of 1s even. Even, /// Partity bit forced to 1. Forced1, /// Partity bit forced to 0. Forced0, }<|fim▁end|>
// You may obtain a copy of the License at
<|file_name|>htmlmetaelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::attr::Attr; use dom::bindings::cell::DOMRefCell; use dom::bindings::codegen::Bindings::HTMLMetaElementBinding; use dom::bindings::codegen::Bindings::HTMLMetaElementBinding::HTMLMetaElementMethods; use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods; use dom::bindings::inheritance::Castable; use dom::bindings::js::{MutNullableJS, Root, RootedReference}; use dom::bindings::str::DOMString; use dom::cssstylesheet::CSSStyleSheet; use dom::document::Document; use dom::element::{AttributeMutation, Element}; use dom::htmlelement::HTMLElement; use dom::htmlheadelement::HTMLHeadElement; use dom::node::{Node, UnbindContext, document_from_node, window_from_node}; use dom::virtualmethods::VirtualMethods; use dom_struct::dom_struct; use html5ever_atoms::LocalName; use parking_lot::RwLock; use servo_config::prefs::PREFS; use std::ascii::AsciiExt; use std::sync::Arc; use std::sync::atomic::AtomicBool; use style::attr::AttrValue; use style::str::HTML_SPACE_CHARACTERS; use style::stylesheets::{Stylesheet, CssRule, CssRules, Origin}; use style::viewport::ViewportRule; #[dom_struct] pub struct HTMLMetaElement { htmlelement: HTMLElement, #[ignore_heap_size_of = "Arc"] stylesheet: DOMRefCell<Option<Arc<Stylesheet>>>, cssom_stylesheet: MutNullableJS<CSSStyleSheet>, } impl HTMLMetaElement { fn new_inherited(local_name: LocalName, prefix: Option<DOMString>, document: &Document) -> HTMLMetaElement { HTMLMetaElement { htmlelement: HTMLElement::new_inherited(local_name, prefix, document), stylesheet: DOMRefCell::new(None), cssom_stylesheet: MutNullableJS::new(None), } } #[allow(unrooted_must_root)] pub fn new(local_name: LocalName, prefix: Option<DOMString>, document: &Document) -> Root<HTMLMetaElement> { Node::reflect_node(box HTMLMetaElement::new_inherited(local_name, prefix, document), document, HTMLMetaElementBinding::Wrap) } pub fn get_stylesheet(&self) -> Option<Arc<Stylesheet>> { self.stylesheet.borrow().clone() } pub fn get_cssom_stylesheet(&self) -> Option<Root<CSSStyleSheet>> { self.get_stylesheet().map(|sheet| { self.cssom_stylesheet.or_init(|| { CSSStyleSheet::new(&window_from_node(self), self.upcast::<Element>(), "text/css".into(), None, // todo handle location None, // todo handle title sheet) }) }) } fn process_attributes(&self) { let element = self.upcast::<Element>(); if let Some(name) = element.get_attribute(&ns!(), &local_name!("name")).r() { let name = name.value().to_ascii_lowercase(); let name = name.trim_matches(HTML_SPACE_CHARACTERS); if name == "viewport" { self.apply_viewport(); } if name == "referrer" { self.apply_referrer(); } } } fn apply_viewport(&self) { if !PREFS.get("layout.viewport.enabled").as_boolean().unwrap_or(false) { return; } let element = self.upcast::<Element>(); if let Some(content) = element.get_attribute(&ns!(), &local_name!("content")).r() { let content = content.value(); if !content.is_empty() { if let Some(translated_rule) = ViewportRule::from_meta(&**content) { *self.stylesheet.borrow_mut() = Some(Arc::new(Stylesheet { rules: CssRules::new(vec![CssRule::Viewport(Arc::new(RwLock::new(translated_rule)))]), origin: Origin::Author, base_url: window_from_node(self).get_url(), namespaces: Default::default(), media: Default::default(), // Viewport constraints are always recomputed on resize; they don't need to // force all styles to be recomputed. dirty_on_viewport_size_change: AtomicBool::new(false), disabled: AtomicBool::new(false), })); let doc = document_from_node(self); doc.invalidate_stylesheets(); } } } } fn process_referrer_attribute(&self) { let element = self.upcast::<Element>(); if let Some(name) = element.get_attribute(&ns!(), &local_name!("name")).r() { let name = name.value().to_ascii_lowercase(); let name = name.trim_matches(HTML_SPACE_CHARACTERS); if name == "referrer" { self.apply_referrer(); } } } /// https://html.spec.whatwg.org/multipage/#meta-referrer fn apply_referrer(&self) { if let Some(parent) = self.upcast::<Node>().GetParentElement() { if let Some(head) = parent.downcast::<HTMLHeadElement>() { head.set_document_referrer(); } } } } impl HTMLMetaElementMethods for HTMLMetaElement { // https://html.spec.whatwg.org/multipage/#dom-meta-name make_getter!(Name, "name"); // https://html.spec.whatwg.org/multipage/#dom-meta-name make_atomic_setter!(SetName, "name"); // https://html.spec.whatwg.org/multipage/#dom-meta-content make_getter!(Content, "content"); // https://html.spec.whatwg.org/multipage/#dom-meta-content make_setter!(SetContent, "content"); } impl VirtualMethods for HTMLMetaElement { fn super_type(&self) -> Option<&VirtualMethods> {<|fim▁hole|> if let Some(ref s) = self.super_type() { s.bind_to_tree(tree_in_doc); } if tree_in_doc { self.process_attributes(); } } fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue { match name { &local_name!("name") => AttrValue::from_atomic(value.into()), _ => self.super_type().unwrap().parse_plain_attribute(name, value), } } fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) { if let Some(s) = self.super_type() { s.attribute_mutated(attr, mutation); } self.process_referrer_attribute(); } fn unbind_from_tree(&self, context: &UnbindContext) { if let Some(ref s) = self.super_type() { s.unbind_from_tree(context); } if context.tree_in_doc { self.process_referrer_attribute(); } } }<|fim▁end|>
Some(self.upcast::<HTMLElement>() as &VirtualMethods) } fn bind_to_tree(&self, tree_in_doc: bool) {
<|file_name|>point_getter.rs<|end_file_name|><|fim▁begin|>// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0. use kvproto::kvrpcpb::IsolationLevel; use engine_traits::{CF_DEFAULT, CF_LOCK, CF_WRITE}; use std::borrow::Cow; use txn_types::{Key, Lock, TimeStamp, TsSet, Value, WriteRef, WriteType}; use crate::storage::kv::{Cursor, CursorBuilder, ScanMode, Snapshot, Statistics}; use crate::storage::mvcc::{default_not_found_error, NewerTsCheckState, Result}; /// `PointGetter` factory. pub struct PointGetterBuilder<S: Snapshot> { snapshot: S, multi: bool, fill_cache: bool, omit_value: bool, isolation_level: IsolationLevel, ts: TimeStamp, bypass_locks: TsSet, check_has_newer_ts_data: bool, } impl<S: Snapshot> PointGetterBuilder<S> { /// Initialize a new `PointGetterBuilder`. pub fn new(snapshot: S, ts: TimeStamp) -> Self { Self { snapshot, multi: true, fill_cache: true, omit_value: false, isolation_level: IsolationLevel::Si, ts, bypass_locks: Default::default(), check_has_newer_ts_data: false, } } /// Set whether or not to get multiple keys. /// /// Defaults to `true`. #[inline] pub fn multi(mut self, multi: bool) -> Self { self.multi = multi; self } /// Set whether or not read operations should fill the cache. /// /// Defaults to `true`. #[inline] pub fn fill_cache(mut self, fill_cache: bool) -> Self { self.fill_cache = fill_cache; self } /// Set whether values of the user key should be omitted. When `omit_value` is `true`, the /// length of returned value will be 0. /// /// Previously this option is called `key_only`. /// /// Defaults to `false`. #[inline] pub fn omit_value(mut self, omit_value: bool) -> Self { self.omit_value = omit_value; self } /// Set the isolation level. /// /// Defaults to `IsolationLevel::Si`. #[inline] pub fn isolation_level(mut self, isolation_level: IsolationLevel) -> Self { self.isolation_level = isolation_level; self } /// Set a set to locks that the reading process can bypass. /// /// Defaults to none. #[inline] pub fn bypass_locks(mut self, locks: TsSet) -> Self { self.bypass_locks = locks; self } /// Check whether there is data with newer ts. The result of `met_newer_ts_data` is Unknown /// if this option is not set. /// /// Default is false. #[inline] pub fn check_has_newer_ts_data(mut self, enabled: bool) -> Self { self.check_has_newer_ts_data = enabled; self } /// Build `PointGetter` from the current configuration. pub fn build(self) -> Result<PointGetter<S>> { // If we only want to get single value, we can use prefix seek. let write_cursor = CursorBuilder::new(&self.snapshot, CF_WRITE) .fill_cache(self.fill_cache) .prefix_seek(true) .scan_mode(if self.multi { ScanMode::Mixed } else { ScanMode::Forward }) .build()?; Ok(PointGetter { snapshot: self.snapshot, multi: self.multi, omit_value: self.omit_value, isolation_level: self.isolation_level, ts: self.ts, bypass_locks: self.bypass_locks, met_newer_ts_data: if self.check_has_newer_ts_data { NewerTsCheckState::NotMetYet } else { NewerTsCheckState::Unknown }, statistics: Statistics::default(), write_cursor, drained: false, }) } } /// This struct can be used to get the value of user keys. Internally, rollbacks are ignored and /// smaller version will be tried. If the isolation level is Si, locks will be checked first. /// /// Use `PointGetterBuilder` to build `PointGetter`. pub struct PointGetter<S: Snapshot> { snapshot: S, multi: bool, omit_value: bool, isolation_level: IsolationLevel, ts: TimeStamp, bypass_locks: TsSet, met_newer_ts_data: NewerTsCheckState, statistics: Statistics, write_cursor: Cursor<S::Iter>, /// Indicating whether or not this structure can serve more requests. It is meaningful only /// when `multi == false`, to protect from producing undefined values when trying to get /// multiple values under `multi == false`. drained: bool, } impl<S: Snapshot> PointGetter<S> { /// Take out and reset the statistics collected so far. #[inline] pub fn take_statistics(&mut self) -> Statistics { std::mem::take(&mut self.statistics) } /// Whether we met newer ts data. /// The result is always `Unknown` if `check_has_newer_ts_data` is not set. #[inline] pub fn met_newer_ts_data(&self) -> NewerTsCheckState { self.met_newer_ts_data } /// Get the value of a user key. /// /// If `multi == false`, this function must be called only once. Future calls return nothing. pub fn get(&mut self, user_key: &Key) -> Result<Option<Value>> { if !self.multi { // Protect from calling `get()` multiple times when `multi == false`. if self.drained { return Ok(None); } else { self.drained = true; } } match self.isolation_level { IsolationLevel::Si => { // Check for locks that signal concurrent writes in Si. self.load_and_check_lock(user_key)?; } IsolationLevel::Rc => {} } self.load_data(user_key) } /// Get a lock of a user key in the lock CF. If lock exists, it will be checked to /// see whether it conflicts with the given `ts`. /// /// In common cases we expect to get nothing in lock cf. Using a `get_cf` instead of `seek` /// is fast in such cases due to no need for RocksDB to continue move and skip deleted entries /// until find a user key. fn load_and_check_lock(&mut self, user_key: &Key) -> Result<()> { self.statistics.lock.get += 1; let lock_value = self.snapshot.get_cf(CF_LOCK, user_key)?; if let Some(ref lock_value) = lock_value { let lock = Lock::parse(lock_value)?; if self.met_newer_ts_data == NewerTsCheckState::NotMetYet { self.met_newer_ts_data = NewerTsCheckState::Met; } if let Err(e) = Lock::check_ts_conflict(Cow::Owned(lock), user_key, self.ts, &self.bypass_locks) { self.statistics.lock.processed_keys += 1; Err(e.into()) } else { Ok(()) } } else { Ok(()) } } /// Load the value. /// /// First, a correct version info in the Write CF will be sought. Then, value will be loaded /// from Default CF if necessary. fn load_data(&mut self, user_key: &Key) -> Result<Option<Value>> { let mut use_near_seek = false; let mut seek_key = user_key.clone(); if self.met_newer_ts_data == NewerTsCheckState::NotMetYet { seek_key = seek_key.append_ts(TimeStamp::max()); if !self .write_cursor .seek(&seek_key, &mut self.statistics.write)? { return Ok(None); } seek_key = seek_key.truncate_ts()?; use_near_seek = true; let cursor_key = self.write_cursor.key(&mut self.statistics.write); if !Key::is_user_key_eq(cursor_key, user_key.as_encoded().as_slice()) { return Ok(None); } if Key::decode_ts_from(cursor_key)? > self.ts { self.met_newer_ts_data = NewerTsCheckState::Met; } } seek_key = seek_key.append_ts(self.ts); let data_found = if use_near_seek { if self.write_cursor.key(&mut self.statistics.write) >= seek_key.as_encoded().as_slice() { // we call near_seek with ScanMode::Mixed set, if the key() > seek_key, // it will call prev() several times, whereas we just want to seek forward here // so cmp them in advance true } else { self.write_cursor .near_seek(&seek_key, &mut self.statistics.write)? } } else { self.write_cursor .seek(&seek_key, &mut self.statistics.write)? }; if !data_found { return Ok(None); } loop { // We may seek to another key. In this case, it means we cannot find the specified key. { let cursor_key = self.write_cursor.key(&mut self.statistics.write); if !Key::is_user_key_eq(cursor_key, user_key.as_encoded().as_slice()) { return Ok(None); } } let write = WriteRef::parse(self.write_cursor.value(&mut self.statistics.write))?; if !write.check_gc_fence_as_latest_version(self.ts) { return Ok(None); } match write.write_type { WriteType::Put => { self.statistics.write.processed_keys += 1; if self.omit_value { return Ok(Some(vec![])); } match write.short_value { Some(value) => { // Value is carried in `write`. self.statistics.processed_size += user_key.len() + value.len(); return Ok(Some(value.to_vec())); } None => { let start_ts = write.start_ts; let value = self.load_data_from_default_cf(start_ts, user_key)?; self.statistics.processed_size += user_key.len() + value.len(); return Ok(Some(value)); } } } WriteType::Delete => { return Ok(None); } WriteType::Lock | WriteType::Rollback => { // Continue iterate next `write`. } } if !self.write_cursor.next(&mut self.statistics.write) { return Ok(None); } } } /// Load the value from default CF. /// /// We assume that mostly the keys given to batch get keys are not very close to each other. /// `near_seek` will likely fall back to `seek` in such scenario, which takes 2x time /// compared to `get_cf`. Thus we use `get_cf` directly here. fn load_data_from_default_cf( &mut self, write_start_ts: TimeStamp, user_key: &Key, ) -> Result<Value> { self.statistics.data.get += 1; // TODO: We can avoid this clone. let value = self .snapshot .get_cf(CF_DEFAULT, &user_key.clone().append_ts(write_start_ts))?; if let Some(value) = value { self.statistics.data.processed_keys += 1; Ok(value) } else { Err(default_not_found_error( user_key.to_raw()?, "load_data_from_default_cf", )) } } } #[cfg(test)] mod tests { use super::*; use txn_types::SHORT_VALUE_MAX_LEN; use crate::storage::kv::{ CfStatistics, Engine, PerfStatisticsInstant, RocksEngine, TestEngineBuilder, }; use crate::storage::txn::tests::{ must_acquire_pessimistic_lock, must_cleanup_with_gc_fence, must_commit, must_gc, must_pessimistic_prewrite_delete, must_prewrite_delete, must_prewrite_lock, must_prewrite_put, must_rollback, }; fn new_multi_point_getter<E: Engine>(engine: &E, ts: TimeStamp) -> PointGetter<E::Snap> { let snapshot = engine.snapshot(Default::default()).unwrap(); PointGetterBuilder::new(snapshot, ts) .isolation_level(IsolationLevel::Si) .build() .unwrap() } fn new_single_point_getter<E: Engine>(engine: &E, ts: TimeStamp) -> PointGetter<E::Snap> { let snapshot = engine.snapshot(Default::default()).unwrap(); PointGetterBuilder::new(snapshot, ts) .isolation_level(IsolationLevel::Si) .multi(false) .build() .unwrap() } fn must_get_key<S: Snapshot>(point_getter: &mut PointGetter<S>, key: &[u8]) { assert!(point_getter.get(&Key::from_raw(key)).unwrap().is_some()); } fn must_get_value<S: Snapshot>(point_getter: &mut PointGetter<S>, key: &[u8], prefix: &[u8]) { let val = point_getter.get(&Key::from_raw(key)).unwrap().unwrap(); assert!(val.starts_with(prefix)); } fn must_met_newer_ts_data<E: Engine>( engine: &E, getter_ts: impl Into<TimeStamp>, key: &[u8], value: &[u8], expected_met_newer_ts_data: bool, ) { let snapshot = engine.snapshot(Default::default()).unwrap(); let ts = getter_ts.into(); let mut point_getter = PointGetterBuilder::new(snapshot.clone(), ts) .isolation_level(IsolationLevel::Si) .check_has_newer_ts_data(true) .build() .unwrap(); let val = point_getter.get(&Key::from_raw(key)).unwrap().unwrap(); assert_eq!(val, value); let expected = if expected_met_newer_ts_data { NewerTsCheckState::Met } else { NewerTsCheckState::NotMetYet }; assert_eq!(expected, point_getter.met_newer_ts_data()); let mut point_getter = PointGetterBuilder::new(snapshot, ts) .isolation_level(IsolationLevel::Si) .check_has_newer_ts_data(false) .build() .unwrap(); let val = point_getter.get(&Key::from_raw(key)).unwrap().unwrap(); assert_eq!(val, value); assert_eq!(NewerTsCheckState::Unknown, point_getter.met_newer_ts_data()); } fn must_get_none<S: Snapshot>(point_getter: &mut PointGetter<S>, key: &[u8]) { assert!(point_getter.get(&Key::from_raw(key)).unwrap().is_none()); } fn must_get_err<S: Snapshot>(point_getter: &mut PointGetter<S>, key: &[u8]) { assert!(point_getter.get(&Key::from_raw(key)).is_err()); } fn assert_seek_next_prev(stat: &CfStatistics, seek: usize, next: usize, prev: usize) { assert_eq!( stat.seek, seek, "expect seek to be {}, got {}", seek, stat.seek ); assert_eq!( stat.next, next, "expect next to be {}, got {}", next, stat.next ); assert_eq!( stat.prev, prev, "expect prev to be {}, got {}", prev, stat.prev ); } /// Builds a sample engine with the following data: /// LOCK bar (commit at 11) /// PUT bar -> barvvv... (commit at 5) /// PUT box -> boxvv.... (commit at 9) /// DELETE foo1 (commit at 9) /// PUT foo1 -> foo1vv... (commit at 3) /// LOCK foo2 (commit at 101) /// ... /// LOCK foo2 (commit at 23) /// LOCK foo2 (commit at 21) /// PUT foo2 -> foo2vv... (commit at 5) /// DELETE xxx (commit at 7) /// PUT zz -> zvzv.... (commit at 103) fn new_sample_engine() -> RocksEngine { let suffix = "v".repeat(SHORT_VALUE_MAX_LEN + 1); let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put( &engine, b"foo1", &format!("foo1{}", suffix).into_bytes(), b"foo1", 2, ); must_commit(&engine, b"foo1", 2, 3); must_prewrite_put( &engine, b"foo2", &format!("foo2{}", suffix).into_bytes(), b"foo2", 4, ); must_prewrite_put( &engine, b"bar", &format!("bar{}", suffix).into_bytes(), b"foo2", 4, ); must_commit(&engine, b"foo2", 4, 5); must_commit(&engine, b"bar", 4, 5); must_prewrite_delete(&engine, b"xxx", b"xxx", 6); must_commit(&engine, b"xxx", 6, 7); must_prewrite_put( &engine, b"box", &format!("box{}", suffix).into_bytes(), b"box", 8, ); must_prewrite_delete(&engine, b"foo1", b"box", 8); must_commit(&engine, b"box", 8, 9); must_commit(&engine, b"foo1", 8, 9); must_prewrite_lock(&engine, b"bar", b"bar", 10); must_commit(&engine, b"bar", 10, 11); for i in 20..100 { if i % 2 == 0 { must_prewrite_lock(&engine, b"foo2", b"foo2", i); must_commit(&engine, b"foo2", i, i + 1); } } must_prewrite_put( &engine, b"zz", &format!("zz{}", suffix).into_bytes(), b"zz", 102, ); must_commit(&engine, b"zz", 102, 103); engine } /// Builds a sample engine that contains transactions on the way and some short /// values embedded in the write CF. The data is as follows: /// DELETE bar (start at 4) /// PUT bar -> barval (commit at 3) /// PUT foo1 -> foo1vv... (commit at 3) /// PUT foo2 -> foo2vv... (start at 4) fn new_sample_engine_2() -> RocksEngine { let suffix = "v".repeat(SHORT_VALUE_MAX_LEN + 1); let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put( &engine, b"foo1", &format!("foo1{}", suffix).into_bytes(), b"foo1", 2, ); must_prewrite_put(&engine, b"bar", b"barval", b"foo1", 2); must_commit(&engine, b"foo1", 2, 3); must_commit(&engine, b"bar", 2, 3); must_prewrite_put( &engine, b"foo2", &format!("foo2{}", suffix).into_bytes(), b"foo2", 4, ); must_prewrite_delete(&engine, b"bar", b"foo2", 4); engine } /// No ts larger than get ts #[test] fn test_multi_basic_1() { let engine = new_sample_engine(); let mut getter = new_multi_point_getter(&engine, 200.into()); // Get a deleted key must_get_none(&mut getter, b"foo1"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, 0); // Get again must_get_none(&mut getter, b"foo1"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, 0); // Get a key that exists must_get_value(&mut getter, b"foo2", b"foo2v"); let s = getter.take_statistics(); // We have to check every version assert_seek_next_prev(&s.write, 1, 40, 0); assert_eq!( s.processed_size, Key::from_raw(b"foo2").len() + b"foo2".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); // Get again must_get_value(&mut getter, b"foo2", b"foo2v"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 40, 0); assert_eq!( s.processed_size, Key::from_raw(b"foo2").len() + b"foo2".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); // Get a smaller key must_get_none(&mut getter, b"foo1"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, 0); // Get a key that does not exist must_get_none(&mut getter, b"z"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, 0); // Get a key that exists must_get_value(&mut getter, b"zz", b"zzv"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!( s.processed_size, Key::from_raw(b"zz").len() + b"zz".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); // Get again must_get_value(&mut getter, b"zz", b"zzv"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!( s.processed_size, Key::from_raw(b"zz").len() + b"zz".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); } #[test] fn test_multi_tombstone() { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put(&engine, b"foo", b"bar", b"foo", 10); must_prewrite_put(&engine, b"foo1", b"bar1", b"foo", 10); must_prewrite_put(&engine, b"foo2", b"bar2", b"foo", 10); must_prewrite_put(&engine, b"foo3", b"bar3", b"foo", 10); must_commit(&engine, b"foo", 10, 20); must_commit(&engine, b"foo1", 10, 20); must_commit(&engine, b"foo2", 10, 20); must_commit(&engine, b"foo3", 10, 20); must_prewrite_delete(&engine, b"foo1", b"foo1", 30); must_prewrite_delete(&engine, b"foo2", b"foo1", 30); must_commit(&engine, b"foo1", 30, 40); must_commit(&engine, b"foo2", 30, 40); must_gc(&engine, b"foo", 50); must_gc(&engine, b"foo1", 50); must_gc(&engine, b"foo2", 50); must_gc(&engine, b"foo3", 50); let mut getter = new_multi_point_getter(&engine, TimeStamp::max()); let perf_statistics = PerfStatisticsInstant::new(); must_get_value(&mut getter, b"foo", b"bar"); assert_eq!(perf_statistics.delta().0.internal_delete_skipped_count, 0); let perf_statistics = PerfStatisticsInstant::new(); must_get_none(&mut getter, b"foo1"); assert_eq!(perf_statistics.delta().0.internal_delete_skipped_count, 2); let perf_statistics = PerfStatisticsInstant::new(); must_get_none(&mut getter, b"foo2"); assert_eq!(perf_statistics.delta().0.internal_delete_skipped_count, 2); let perf_statistics = PerfStatisticsInstant::new(); must_get_value(&mut getter, b"foo3", b"bar3"); assert_eq!(perf_statistics.delta().0.internal_delete_skipped_count, 0); } #[test] fn test_multi_with_iter_lower_bound() { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put(&engine, b"foo", b"bar", b"foo", 10); must_commit(&engine, b"foo", 10, 20); let snapshot = engine.snapshot(Default::default()).unwrap(); let write_cursor = CursorBuilder::new(&snapshot, CF_WRITE) .prefix_seek(true) .scan_mode(ScanMode::Mixed) .range(Some(Key::from_raw(b"a")), None) .build() .unwrap(); let mut getter = PointGetter { snapshot, multi: true, omit_value: false, isolation_level: IsolationLevel::Si, ts: TimeStamp::new(30), bypass_locks: Default::default(), met_newer_ts_data: NewerTsCheckState::NotMetYet, statistics: Statistics::default(), write_cursor, drained: false, }; must_get_value(&mut getter, b"foo", b"bar"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, Key::from_raw(b"foo").len() + b"bar".len()); } /// Some ts larger than get ts #[test] fn test_multi_basic_2() { let engine = new_sample_engine(); let mut getter = new_multi_point_getter(&engine, 5.into()); must_get_value(&mut getter, b"bar", b"barv"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!( s.processed_size, Key::from_raw(b"bar").len() + b"bar".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); must_get_value(&mut getter, b"bar", b"barv"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!( s.processed_size, Key::from_raw(b"bar").len() + b"bar".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); must_get_none(&mut getter, b"bo"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, 0); must_get_none(&mut getter, b"box"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, 0); must_get_value(&mut getter, b"foo1", b"foo1"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!( s.processed_size, Key::from_raw(b"foo1").len() + b"foo1".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); must_get_none(&mut getter, b"zz"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, 0); must_get_value(&mut getter, b"foo1", b"foo1"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!( s.processed_size, Key::from_raw(b"foo1").len() + b"foo1".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); must_get_value(&mut getter, b"bar", b"barv"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!( s.processed_size, Key::from_raw(b"bar").len() + b"bar".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); } /// All ts larger than get ts #[test] fn test_multi_basic_3() { let engine = new_sample_engine(); let mut getter = new_multi_point_getter(&engine, 2.into()); must_get_none(&mut getter, b"foo1"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, 0); must_get_none(&mut getter, b"non_exist"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 1, 0, 0); assert_eq!(s.processed_size, 0); must_get_none(&mut getter, b"foo1"); must_get_none(&mut getter, b"foo0"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 2, 0, 0); assert_eq!(s.processed_size, 0); } /// There are some locks in the Lock CF. #[test] fn test_multi_locked() { let engine = new_sample_engine_2(); let mut getter = new_multi_point_getter(&engine, 1.into()); must_get_none(&mut getter, b"a"); must_get_none(&mut getter, b"bar"); must_get_none(&mut getter, b"foo1"); must_get_none(&mut getter, b"foo2"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 4, 0, 0); assert_eq!(s.processed_size, 0); let mut getter = new_multi_point_getter(&engine, 3.into()); must_get_none(&mut getter, b"a"); must_get_value(&mut getter, b"bar", b"barv"); must_get_value(&mut getter, b"bar", b"barv"); must_get_value(&mut getter, b"foo1", b"foo1v"); must_get_value(&mut getter, b"foo1", b"foo1v"); must_get_none(&mut getter, b"foo2"); must_get_none(&mut getter, b"foo2"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 7, 0, 0); assert_eq!( s.processed_size, (Key::from_raw(b"bar").len() + b"barval".len()) * 2 + (Key::from_raw(b"foo1").len() + b"foo1".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len()) * 2 ); let mut getter = new_multi_point_getter(&engine, 4.into()); must_get_none(&mut getter, b"a"); must_get_err(&mut getter, b"bar"); must_get_err(&mut getter, b"bar"); must_get_value(&mut getter, b"foo1", b"foo1v"); must_get_err(&mut getter, b"foo2"); must_get_none(&mut getter, b"zz"); let s = getter.take_statistics(); assert_seek_next_prev(&s.write, 3, 0, 0); assert_eq!( s.processed_size, Key::from_raw(b"foo1").len() + b"foo1".len() + "v".repeat(SHORT_VALUE_MAX_LEN + 1).len() ); } /// Single Point Getter can only get once. #[test] fn test_single_basic() { let engine = new_sample_engine_2(); let mut getter = new_single_point_getter(&engine, 1.into()); must_get_none(&mut getter, b"foo1"); let mut getter = new_single_point_getter(&engine, 3.into()); must_get_value(&mut getter, b"bar", b"barv"); must_get_none(&mut getter, b"bar"); must_get_none(&mut getter, b"foo1"); <|fim▁hole|> let mut getter = new_single_point_getter(&engine, 3.into()); must_get_none(&mut getter, b"foo2"); must_get_none(&mut getter, b"foo2"); let mut getter = new_single_point_getter(&engine, 4.into()); must_get_err(&mut getter, b"bar"); must_get_none(&mut getter, b"bar"); must_get_none(&mut getter, b"a"); must_get_none(&mut getter, b"foo1"); let mut getter = new_single_point_getter(&engine, 4.into()); must_get_value(&mut getter, b"foo1", b"foo1v"); must_get_none(&mut getter, b"foo1"); } #[test] fn test_omit_value() { let engine = new_sample_engine_2(); let snapshot = engine.snapshot(Default::default()).unwrap(); let mut getter = PointGetterBuilder::new(snapshot.clone(), 4.into()) .isolation_level(IsolationLevel::Si) .omit_value(true) .build() .unwrap(); must_get_err(&mut getter, b"bar"); must_get_key(&mut getter, b"foo1"); must_get_err(&mut getter, b"foo2"); must_get_none(&mut getter, b"foo3"); fn new_omit_value_single_point_getter<S>(snapshot: S, ts: TimeStamp) -> PointGetter<S> where S: Snapshot, { PointGetterBuilder::new(snapshot, ts) .isolation_level(IsolationLevel::Si) .omit_value(true) .multi(false) .build() .unwrap() } let mut getter = new_omit_value_single_point_getter(snapshot.clone(), 4.into()); must_get_err(&mut getter, b"bar"); must_get_none(&mut getter, b"bar"); let mut getter = new_omit_value_single_point_getter(snapshot.clone(), 4.into()); must_get_key(&mut getter, b"foo1"); must_get_none(&mut getter, b"foo1"); let mut getter = new_omit_value_single_point_getter(snapshot, 4.into()); must_get_none(&mut getter, b"foo3"); must_get_none(&mut getter, b"foo3"); } #[test] fn test_get_latest_value() { let engine = TestEngineBuilder::new().build().unwrap(); let (key, val) = (b"foo", b"bar"); must_prewrite_put(&engine, key, val, key, 10); must_commit(&engine, key, 10, 20); let mut getter = new_single_point_getter(&engine, TimeStamp::max()); must_get_value(&mut getter, key, val); // Ignore the primary lock if read with max ts. must_prewrite_delete(&engine, key, key, 30); let mut getter = new_single_point_getter(&engine, TimeStamp::max()); must_get_value(&mut getter, key, val); must_rollback(&engine, key, 30, false); // Should not ignore the secondary lock even though reading the latest version must_prewrite_delete(&engine, key, b"bar", 40); let mut getter = new_single_point_getter(&engine, TimeStamp::max()); must_get_err(&mut getter, key); must_rollback(&engine, key, 40, false); // Should get the latest committed value if there is a primary lock with a ts less than // the latest Write's commit_ts. // // write.start_ts(10) < primary_lock.start_ts(15) < write.commit_ts(20) must_acquire_pessimistic_lock(&engine, key, key, 15, 50); must_pessimistic_prewrite_delete(&engine, key, key, 15, 50, true); let mut getter = new_single_point_getter(&engine, TimeStamp::max()); must_get_value(&mut getter, key, val); } #[test] fn test_get_bypass_locks() { let engine = TestEngineBuilder::new().build().unwrap(); let (key, val) = (b"foo", b"bar"); must_prewrite_put(&engine, key, val, key, 10); must_commit(&engine, key, 10, 20); must_prewrite_delete(&engine, key, key, 30); let snapshot = engine.snapshot(Default::default()).unwrap(); let mut getter = PointGetterBuilder::new(snapshot, 60.into()) .isolation_level(IsolationLevel::Si) .bypass_locks(TsSet::from_u64s(vec![30, 40, 50])) .build() .unwrap(); must_get_value(&mut getter, key, val); let snapshot = engine.snapshot(Default::default()).unwrap(); let mut getter = PointGetterBuilder::new(snapshot, 60.into()) .isolation_level(IsolationLevel::Si) .bypass_locks(TsSet::from_u64s(vec![31, 29])) .build() .unwrap(); must_get_err(&mut getter, key); } #[test] fn test_met_newer_ts_data() { let engine = TestEngineBuilder::new().build().unwrap(); let (key, val1) = (b"foo", b"bar1"); must_prewrite_put(&engine, key, val1, key, 10); must_commit(&engine, key, 10, 20); let (key, val2) = (b"foo", b"bar2"); must_prewrite_put(&engine, key, val2, key, 30); must_commit(&engine, key, 30, 40); must_met_newer_ts_data(&engine, 20, key, val1, true); must_met_newer_ts_data(&engine, 30, key, val1, true); must_met_newer_ts_data(&engine, 40, key, val2, false); must_met_newer_ts_data(&engine, 50, key, val2, false); must_prewrite_lock(&engine, key, key, 60); must_met_newer_ts_data(&engine, 50, key, val2, true); must_met_newer_ts_data(&engine, 60, key, val2, true); } #[test] fn test_point_get_check_gc_fence() { let engine = TestEngineBuilder::new().build().unwrap(); // PUT, Read // `--------------^ must_prewrite_put(&engine, b"k1", b"v1", b"k1", 10); must_commit(&engine, b"k1", 10, 20); must_cleanup_with_gc_fence(&engine, b"k1", 20, 0, 50, true); // PUT, Read // `---------^ must_prewrite_put(&engine, b"k2", b"v2", b"k2", 11); must_commit(&engine, b"k2", 11, 20); must_cleanup_with_gc_fence(&engine, b"k2", 20, 0, 40, true); // PUT, Read // `-----^ must_prewrite_put(&engine, b"k3", b"v3", b"k3", 12); must_commit(&engine, b"k3", 12, 20); must_cleanup_with_gc_fence(&engine, b"k3", 20, 0, 30, true); // PUT, PUT, Read // `-----^ `----^ must_prewrite_put(&engine, b"k4", b"v4", b"k4", 13); must_commit(&engine, b"k4", 13, 14); must_prewrite_put(&engine, b"k4", b"v4x", b"k4", 15); must_commit(&engine, b"k4", 15, 20); must_cleanup_with_gc_fence(&engine, b"k4", 14, 0, 20, false); must_cleanup_with_gc_fence(&engine, b"k4", 20, 0, 30, true); // PUT, DEL, Read // `-----^ `----^ must_prewrite_put(&engine, b"k5", b"v5", b"k5", 13); must_commit(&engine, b"k5", 13, 14); must_prewrite_delete(&engine, b"k5", b"v5", 15); must_commit(&engine, b"k5", 15, 20); must_cleanup_with_gc_fence(&engine, b"k5", 14, 0, 20, false); must_cleanup_with_gc_fence(&engine, b"k5", 20, 0, 30, true); // PUT, LOCK, LOCK, Read // `------------------------^ must_prewrite_put(&engine, b"k6", b"v6", b"k6", 16); must_commit(&engine, b"k6", 16, 20); must_prewrite_lock(&engine, b"k6", b"k6", 25); must_commit(&engine, b"k6", 25, 26); must_prewrite_lock(&engine, b"k6", b"k6", 28); must_commit(&engine, b"k6", 28, 29); must_cleanup_with_gc_fence(&engine, b"k6", 20, 0, 50, true); // PUT, LOCK, LOCK, Read // `---------^ must_prewrite_put(&engine, b"k7", b"v7", b"k7", 16); must_commit(&engine, b"k7", 16, 20); must_prewrite_lock(&engine, b"k7", b"k7", 25); must_commit(&engine, b"k7", 25, 26); must_cleanup_with_gc_fence(&engine, b"k7", 20, 0, 27, true); must_prewrite_lock(&engine, b"k7", b"k7", 28); must_commit(&engine, b"k7", 28, 29); // PUT, Read // * (GC fence ts is 0) must_prewrite_put(&engine, b"k8", b"v8", b"k8", 17); must_commit(&engine, b"k8", 17, 30); must_cleanup_with_gc_fence(&engine, b"k8", 30, 0, 0, true); // PUT, LOCK, Read // `-----------^ must_prewrite_put(&engine, b"k9", b"v9", b"k9", 18); must_commit(&engine, b"k9", 18, 20); must_prewrite_lock(&engine, b"k9", b"k9", 25); must_commit(&engine, b"k9", 25, 26); must_cleanup_with_gc_fence(&engine, b"k9", 20, 0, 27, true); let expected_results = vec![ (b"k1", Some(b"v1")), (b"k2", None), (b"k3", None), (b"k4", None), (b"k5", None), (b"k6", Some(b"v6")), (b"k7", None), (b"k8", Some(b"v8")), (b"k9", None), ]; for (k, v) in &expected_results { let mut single_getter = new_single_point_getter(&engine, 40.into()); let value = single_getter.get(&Key::from_raw(*k)).unwrap(); assert_eq!(value, v.map(|v| v.to_vec())); } let mut multi_getter = new_multi_point_getter(&engine, 40.into()); for (k, v) in &expected_results { let value = multi_getter.get(&Key::from_raw(*k)).unwrap(); assert_eq!(value, v.map(|v| v.to_vec())); } } }<|fim▁end|>
let mut getter = new_single_point_getter(&engine, 3.into()); must_get_value(&mut getter, b"foo1", b"foo1v"); must_get_none(&mut getter, b"foo2");
<|file_name|>issue-57362-1.rs<|end_file_name|><|fim▁begin|>// Test for issue #57362, ensuring that the self ty is shown in cases of higher-ranked lifetimes // conflicts: the `expected` and `found` trait refs would otherwise be printed the same, leading // to confusing notes such as: // = note: expected type `Trait` // found type `Trait` // from issue #57362 trait Trait { fn f(self); } impl<T> Trait for fn(&T) { fn f(self) { println!("f"); } }<|fim▁hole|> a.f(); //~ ERROR no method named `f` } fn main() {}<|fim▁end|>
fn f() { let a: fn(_) = |_: &u8| {};
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import itertools import sys from flask import abort, g, render_template, request, redirect, Blueprint, flash, url_for, current_app from flask.ext.login import login_required, current_user from realms.lib.util import to_canonical, remove_ext, gravatar_url from .models import PageNotFound blueprint = Blueprint('wiki', __name__) @blueprint.route("/_commit/<sha>/<path:name>") def commit(name, sha): if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous(): return current_app.login_manager.unauthorized() cname = to_canonical(name) data = g.current_wiki.get_page(cname, sha=sha) if not data: abort(404) return render_template('wiki/page.html', name=name, page=data, commit=sha) @blueprint.route(r"/_compare/<path:name>/<regex('\w+'):fsha><regex('\.{2,3}'):dots><regex('\w+'):lsha>") def compare(name, fsha, dots, lsha): if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous(): return current_app.login_manager.unauthorized() diff = g.current_wiki.compare(name, fsha, lsha) return render_template('wiki/compare.html', name=name, diff=diff, old=fsha, new=lsha) @blueprint.route("/_revert", methods=['POST']) @login_required def revert(): cname = to_canonical(request.form.get('name')) commit = request.form.get('commit') message = request.form.get('message', "Reverting %s" % cname) if not current_app.config.get('ALLOW_ANON') and current_user.is_anonymous(): return dict(error=True, message="Anonymous posting not allowed"), 403 if cname in current_app.config.get('WIKI_LOCKED_PAGES'): return dict(error=True, message="Page is locked"), 403 try: sha = g.current_wiki.revert_page(cname, commit, message=message, username=current_user.username, email=current_user.email) except PageNotFound as e: return dict(error=True, message=e.message), 404 if sha: flash("Page reverted") return dict(sha=sha) @blueprint.route("/_history/<path:name>") def history(name): if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous(): return current_app.login_manager.unauthorized() hist = g.current_wiki.get_history(name) for item in hist: item['gravatar'] = gravatar_url(item['author_email']) return render_template('wiki/history.html', name=name, history=hist) @blueprint.route("/_edit/<path:name>") @login_required def edit(name): cname = to_canonical(name) page = g.current_wiki.get_page(name) if not page: # Page doesn't exist return redirect(url_for('wiki.create', name=cname)) name = remove_ext(page['path']) g.assets['js'].append('editor.js') return render_template('wiki/edit.html', name=name, content=page.get('data'), info=page.get('info'), sha=page.get('sha'), partials=page.get('partials')) @blueprint.route("/_create/", defaults={'name': None}) @blueprint.route("/_create/<path:name>") @login_required def create(name): cname = to_canonical(name) if name else "" if cname and g.current_wiki.get_page(cname): # Page exists, edit instead return redirect(url_for('wiki.edit', name=cname)) g.assets['js'].append('editor.js') return render_template('wiki/edit.html', name=cname, content="", info={}) def _get_subdir(path, depth): parts = path.split('/', depth) if len(parts) > depth: return parts[-2] def _tree_index(items, path=""): depth = len(path.split("/")) items = filter(lambda x: x['name'].startswith(path), items) items = sorted(items, key=lambda x: x['name']) for subdir, items in itertools.groupby(items, key=lambda x: _get_subdir(x['name'], depth)): if not subdir: for item in items: yield dict(item, dir=False) else: size = 0 ctime = sys.maxint mtime = 0 for item in items: size += item['size'] ctime = min(item['ctime'], ctime) mtime = max(item['mtime'], mtime) yield dict(name=path + subdir + "/", mtime=mtime, ctime=ctime, size=size, dir=True) @blueprint.route("/_index", defaults={"path": ""}) @blueprint.route("/_index/<path:path>") def index(path): if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous(): return current_app.login_manager.unauthorized() items = g.current_wiki.get_index() if path: path = to_canonical(path) + "/" return render_template('wiki/index.html', index=_tree_index(items, path=path), path=path) @blueprint.route("/<path:name>", methods=['POST', 'PUT', 'DELETE']) @login_required def page_write(name): cname = to_canonical(name) if not cname: return dict(error=True, message="Invalid name") if not current_app.config.get('ALLOW_ANON') and current_user.is_anonymous(): return dict(error=True, message="Anonymous posting not allowed"), 403 if request.method == 'POST': # Create if cname in current_app.config.get('WIKI_LOCKED_PAGES'): return dict(error=True, message="Page is locked"), 403 sha = g.current_wiki.write_page(cname, request.form['content'], message=request.form['message'], create=True, username=current_user.username, email=current_user.email) elif request.method == 'PUT': edit_cname = to_canonical(request.form['name']) if edit_cname in current_app.config.get('WIKI_LOCKED_PAGES'): return dict(error=True, message="Page is locked"), 403 if edit_cname != cname: g.current_wiki.rename_page(cname, edit_cname) sha = g.current_wiki.write_page(edit_cname, request.form['content'], message=request.form['message'], username=current_user.username, email=current_user.email) return dict(sha=sha) elif request.method == 'DELETE': # DELETE if cname in current_app.config.get('WIKI_LOCKED_PAGES'): return dict(error=True, message="Page is locked"), 403 sha = g.current_wiki.delete_page(cname,<|fim▁hole|> email=current_user.email) return dict(sha=sha) @blueprint.route("/", defaults={'name': 'home'}) @blueprint.route("/<path:name>") def page(name): if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous(): return current_app.login_manager.unauthorized() cname = to_canonical(name) if cname != name: return redirect(url_for('wiki.page', name=cname)) data = g.current_wiki.get_page(cname) if data: return render_template('wiki/page.html', name=cname, page=data, partials=data.get('partials')) else: return redirect(url_for('wiki.create', name=cname))<|fim▁end|>
username=current_user.username,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # This module copyright (C) 2013 Savoir-faire Linux # (<http://www.savoirfairelinux.com>). # # This program is free software: you can redistribute it and/or modify<|fim▁hole|># This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import travel_accommodation_import # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
# it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. #
<|file_name|>mini-files.js<|end_file_name|><|fim▁begin|>var _ = require("underscore"); var os = require("os"); var path = require("path"); var assert = require("assert"); // All of these functions are attached to files.js for the tool; // they live here because we need them in boot.js as well to avoid duplicating // a lot of the code. // // Note that this file does NOT contain any of the "perform I/O maybe // synchronously" functions from files.js; this is intentional, because we want // to make it very hard to accidentally use fs.*Sync functions in the app server // after bootup (since they block all concurrency!) var files = module.exports; var toPosixPath = function (p, partialPath) { // Sometimes, you can have a path like \Users\IEUser on windows, and this // actually means you want C:\Users\IEUser if (p[0] === "\\" && (! partialPath)) { p = process.env.SystemDrive + p; } p = p.replace(/\\/g, '/'); if (p[1] === ':' && ! partialPath) { // transform "C:/bla/bla" to "/c/bla/bla" p = '/' + p[0] + p.slice(2); } return p; }; var toDosPath = function (p, partialPath) { if (p[0] === '/' && ! partialPath) { if (! /^\/[A-Za-z](\/|$)/.test(p)) throw new Error("Surprising path: " + p); // transform a previously windows path back<|fim▁hole|> // "/C/something" to "c:/something" p = p[1] + ":" + p.slice(2); } p = p.replace(/\//g, '\\'); return p; }; var convertToOSPath = function (standardPath, partialPath) { if (process.platform === "win32") { return toDosPath(standardPath, partialPath); } return standardPath; }; var convertToStandardPath = function (osPath, partialPath) { if (process.platform === "win32") { return toPosixPath(osPath, partialPath); } return osPath; } var convertToOSLineEndings = function (fileContents) { return fileContents.replace(/\n/g, os.EOL); }; var convertToStandardLineEndings = function (fileContents) { // Convert all kinds of end-of-line chars to linuxy "\n". return fileContents.replace(new RegExp("\r\n", "g"), "\n") .replace(new RegExp("\r", "g"), "\n"); }; // Return the Unicode Normalization Form of the passed in path string, using // "Normalization Form Canonical Composition" const unicodeNormalizePath = (path) => { return (path) ? path.normalize('NFC') : path; }; // wrappings for path functions that always run as they were on unix (using // forward slashes) var wrapPathFunction = function (name, partialPaths) { var f = path[name]; assert.strictEqual(typeof f, "function"); return function (/* args */) { if (process.platform === 'win32') { var args = _.toArray(arguments); args = _.map(args, function (p, i) { // if partialPaths is turned on (for path.join mostly) // forget about conversion of absolute paths for Windows return toDosPath(p, partialPaths); }); var result = f.apply(path, args); if (typeof result === "string") { result = toPosixPath(result, partialPaths); } return result; } return f.apply(path, arguments); }; }; files.pathJoin = wrapPathFunction("join", true); files.pathNormalize = wrapPathFunction("normalize"); files.pathRelative = wrapPathFunction("relative"); files.pathResolve = wrapPathFunction("resolve"); files.pathDirname = wrapPathFunction("dirname"); files.pathBasename = wrapPathFunction("basename"); files.pathExtname = wrapPathFunction("extname"); // The path.isAbsolute function is implemented in Node v4. files.pathIsAbsolute = wrapPathFunction("isAbsolute"); files.pathSep = '/'; files.pathDelimiter = ':'; files.pathOsDelimiter = path.delimiter; files.convertToStandardPath = convertToStandardPath; files.convertToOSPath = convertToOSPath; files.convertToWindowsPath = toDosPath; files.convertToPosixPath = toPosixPath; files.convertToStandardLineEndings = convertToStandardLineEndings; files.convertToOSLineEndings = convertToOSLineEndings; files.unicodeNormalizePath = unicodeNormalizePath;<|fim▁end|>
<|file_name|>int_arithmetic_tests.rs<|end_file_name|><|fim▁begin|>extern crate libudis86_sys; extern crate regex; extern crate unwindmc; mod analysis_helper; mod gcc_tools; mod source_tester; use source_tester::*; #[test] fn add() { let code = " int add(int a, int b) { return a + b; }"; let expected = " int sub_000000(int arg0, int arg1) { int var0; var0 = arg1; var0 = var0 + arg0; return var0; } "; test_decompiler(code, expected); } #[test] fn subtract() { let code = " int subtract(int a, int b) { return a - b; }"; let expected = " int sub_000000(int arg0, int arg1) { int var0; var0 = arg0; var0 = var0 - arg1; return var0; } "; test_decompiler(code, expected); } #[test] fn negate() { let code = " int negate(int a) { return -a; }"; let expected = " int sub_000000(int arg0) { int var0; var0 = arg0; var0 = -var0; return var0; } "; test_decompiler(code, expected); } #[test] fn multiply() { let code = " int multiply(int a, int b) { return a * b; }"; let expected = " int sub_000000(int arg0, int arg1) { int var0; var0 = arg1; var0 = var0 * arg0; return var0; } "; test_decompiler(code, expected); } #[test] fn divide() { let code = " int divide(int a, int b) { return a / b; }"; let expected = " int sub_000000(int arg0, int arg1) { int var0; var0 = arg0; var0 = var0 / arg1; return var0; } "; test_decompiler(code, expected); } #[ignore = "TODO: support multiple-output instructions"] #[test] fn modulo() { let code = " int divide(int a, int b) { return a % b; }"; let expected = " int sub_000000(int arg0, int arg1) { int var0; var0 = arg0; var0 = var0 % arg1; return var0; } "; test_decompiler(code, expected); } #[test] fn not() { let code = " int _not(int a) { return ~a; }"; let expected = " int sub_000000(int arg0) { int var0; var0 = arg0; var0 = ~var0; return var0; } "; test_decompiler(code, expected); } #[test] fn and() { let code = " int _and(int a, int b) { return a & b; }"; let expected = " int sub_000000(int arg0, int arg1) { int var0; var0 = arg1; var0 = var0 & arg0; return var0; } "; test_decompiler(code, expected); } #[test] fn or() { let code = " int _or(int a, int b) { return a | b; }"; let expected = " int sub_000000(int arg0, int arg1) { int var0; var0 = arg1; var0 = var0 | arg0; return var0; } "; test_decompiler(code, expected); } #[test] fn xor() { let code = " int _xor(int a, int b) {<|fim▁hole|> { int var0; var0 = arg1; var0 = var0 ^ arg0; return var0; } "; test_decompiler(code, expected); } #[ignore = "TODO: support word registers"] #[test] fn left_shift() { let code = " int xor(int a, int b) { return a << b; }"; let expected = " int sub_000000(int arg0, int arg1) { int var0; var0 = arg0; var0 = var0 << arg1; return var0; } "; test_decompiler(code, expected); } #[ignore = "TODO: support word registers"] #[test] fn right_shift() { let code = " int xor(int a, int b) { return a >> b; }"; let expected = " int sub_000000(int arg0, int arg1) { int var0; var0 = arg0; var0 = var0 >> arg1; return var0; } "; test_decompiler(code, expected); }<|fim▁end|>
return a ^ b; }"; let expected = " int sub_000000(int arg0, int arg1)
<|file_name|>Courses.js<|end_file_name|><|fim▁begin|>"use strict"; import React from 'react' import Course from '../partials/Course.js' import Modal from '../partials/Modal.js' import MetricModal from '../partials/MetricModal.js' export default class Courses extends React.Component { constructor(props) { super(props); this.state = { course: props.course, sections: props.course.sections, showModal: false, showMetricModal: false, modalInfo: { modalType: "ADD_QUIZ", title: "Add Quiz" } }; } componentDidMount() { this.getCoursesAndSections(this.state.course.id); } componentWillReceiveProps(newProps) { if(newProps.course != undefined) { this.getCoursesAndSections(newProps.course.id); } } getCoursesAndSections(courseId) { if(courseId == -1) return; var me = this; $.when( $.post("/course/find", {id: courseId}), $.post("/section/find", {course: courseId}) ).then(function(course, sections) { console.log("course", course[0]); console.log("sections", sections[0]); if(course == undefined) return; // if there are no courses, then there are no sections me.setState({ course: course[0], sections: sections[0] }); }); } closeModal() { this.setState({ showModal: false, showMetricModal: false }); } showMetricModal(quiz) { console.log("showMetricModal!", quiz); var modalInfo = this.state.modalInfo; modalInfo.title = quiz.title; this.setState({ showModal: false, showMetricModal: true, modalInfo: modalInfo }); } showCourseModal() { var modalInfo = this.state.modalInfo; modalInfo.modalType = "ADD_COURSE"; modalInfo.title = "Add Course or Section"; this.setState({ showModal: true, showMetricModal: false, modalInfo: modalInfo }); } showQuizModal(quizIndex) { var modalInfo = this.state.modalInfo; modalInfo.title = "Add Quiz"; modalInfo.modalType = "ADD_QUIZ"; modalInfo.quizIndex = quizIndex; this.setState({ showModal: true, showMetricModal: false, modalInfo: modalInfo<|fim▁hole|> console.log("showQuizInModal::quizIndex", quizIndex); this.showQuizModal(quizIndex); } showStudentsModal(section) { var modalInfo = this.state.modalInfo; modalInfo.modalType = "ADD_STUDENTS"; modalInfo.title = "Add Students"; modalInfo.section = section; this.setState({ showModal: true, showMetricModal: false, modalInfo: modalInfo }); } addQuizToCourse(quiz, quizIndex) { console.log("Adding quiz '" + quiz.title + "' in course " + this.props.course.title); var me = this; if(quizIndex > -1) { $.post('/quiz/update/' + quiz.id, { title: quiz.title }) .then(function(quiz) { console.log(quiz); var course = me.state.course; course.quizzes[quizIndex] = quiz; me.setState({course: course}); me.closeModal(); }); } else { $.post('/quiz/create/', { title: quiz.title, course: me.props.course.id } ) .then(function(quiz) { console.log(quiz); var course = me.state.course; course.quizzes.push(quiz); me.setState({course: course}); me.closeModal(); }); } } addSectionToCourse(section) { var me = this; if(section.title == '') { return; } $.post('/section/create/', { title: section.title, course: me.state.course.id }) .then(function(section) { console.log("created section", section); var sections = me.state.sections; sections.push(section); me.setState({sections: sections}); me.closeModal(); }); } addCourseToProfessor(course, term) { var me = this; this.props.addCourseToProfessor(course, term) .then(function(newCourse) { me.setState({course: newCourse}); me.closeModal(); }); } addStudentsToSection(sectionId, studentIds) { var me = this; this.props.addStudentsToSection(sectionId, studentIds) .then(function() { me.closeModal(); }); } deleteSectionFromCourse(sectionIndex) { var me = this; var sections = me.state.sections; if(sections[sectionIndex] == undefined) return $.when(null); $.post('/section/destroy/' + sections[sectionIndex].id) .then(function(section) { console.log("section", section); sections.splice(sectionIndex, 1); me.setState({sections: sections}); me.closeModal(); }); } deleteQuizFromCourse(quizIndex) { var me = this; var quizzes = this.state.course.quizzes; $.post('/quiz/find/' + quizzes[quizIndex].id) .then(function(quiz) { return $.post('/quiz/destroy/' + quizzes[quizIndex].id); }) // .then(function(quiz) { // if(quiz.questions.length == 0) return $.when(null); // var questionIds = quiz.questions.map(function(question){return question.id;}); // return $.post('/question/multidestroy', {ids: questionIds}); // }) .then(function() { quizzes.splice(quizIndex, 1); var course = me.state.course; course.quizzes = quizzes; me.setState({course: course}); me.closeModal(); }); } deleteCourseFromProfessor(course) { var me = this; this.props.deleteCourseFromProfessor(course) .then(function() { var course = { id: -1, title: "FAKE 101", quizzes: [], sections: [] }; me.setState({course: course}); }); } render() { return ( <div> <div id="courses" className="quizzlyContent"> {(() => { if(this.state.course.id > -1) { return ( <Course course={this.state.course} isCourse={true} ref={'course'} showQuizModal={this.showQuizModal.bind(this)} showQuizInModal={this.showQuizInModal.bind(this)} showMetricModal={this.showMetricModal.bind(this)} deleteQuizFromCourse={this.deleteQuizFromCourse.bind(this)} sectionIndex={-1} deleteCourseFromProfessor={this.deleteCourseFromProfessor.bind(this)} deleteSectionFromCourse={this.deleteSectionFromCourse.bind(this)} /> ); } })()} {this.state.sections.map(function(section, sectionIndex) { // this is section, not course! return ( <Course section={section} sectionIndex={sectionIndex} course={this.state.course} isCourse={false} key={sectionIndex} showQuizInModal={this.showQuizInModal.bind(this)} showMetricModal={this.showMetricModal.bind(this)} showStudentsModal={this.showStudentsModal.bind(this)} deleteSectionFromCourse={this.deleteSectionFromCourse.bind(this)} /> ); }, this)} <div className="addEntityButton" onClick={this.showCourseModal.bind(this)}>+</div> </div> {(() => { if(this.state.showModal) return ( <Modal modalInfo={this.state.modalInfo} showModal={this.state.showModal} course={this.state.course} quizzes={this.state.course.quizzes} key={this.state.showModal} closeModal={this.closeModal.bind(this)} addQuizToCourse={this.addQuizToCourse.bind(this)} addCourseToProfessor={this.addCourseToProfessor.bind(this)} addSectionToCourse={this.addSectionToCourse.bind(this)} addStudentsToSection={this.addStudentsToSection.bind(this)} /> ); })()} {(() => { if(this.state.showMetricModal) return ( <MetricModal modalInfo={this.state.modalInfo} showMetricModal={this.state.showMetricModal} key={this.state.showMetricModal} closeModal={this.closeModal.bind(this)} /> ); })()} </div> ); } }<|fim▁end|>
}); } showQuizInModal(quizIndex) {
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ //! This crate contains a GraphQL schema representation. #![deny(warnings)] #![deny(rust_2018_idioms)] #![deny(clippy::all)] pub mod definitions; mod errors; mod flatbuffer; mod graphql_schema; mod in_memory; mod schema; pub use crate::schema::SDLSchema; use common::{DiagnosticsResult, SourceLocationKey}; pub use definitions::{ Argument, ArgumentDefinitions, ArgumentValue, Directive, DirectiveValue, Enum, EnumID, EnumValue, Field, FieldID, InputObject, InputObjectID, Interface, InterfaceID, Object, ObjectID, Scalar, ScalarID, Type, TypeReference, TypeWithFields, Union, UnionID, }; pub use errors::{Result, SchemaError}; use flatbuffer::FlatBufferSchema; pub use flatbuffer::SchemaWrapper; pub use graphql_schema::Schema; use graphql_syntax::SchemaDocument; pub use graphql_syntax::{DirectiveLocation, TypeSystemDefinition}; pub use in_memory::InMemorySchema; const BUILTINS: &str = include_str!("./builtins.graphql"); pub use flatbuffer::serialize_as_flatbuffer;<|fim▁hole|> pub fn build_schema(sdl: &str) -> DiagnosticsResult<SDLSchema> { build_schema_with_extensions::<_, &str>(&[sdl], &[]) } pub fn build_schema_with_extensions<T: AsRef<str>, U: AsRef<str>>( server_sdls: &[T], extension_sdls: &[(U, SourceLocationKey)], ) -> DiagnosticsResult<SDLSchema> { let mut server_documents = vec![builtins()?]; let mut combined_sdl: String = String::new(); for server_sdl in server_sdls { combined_sdl.push_str(server_sdl.as_ref()); combined_sdl.push('\n'); } server_documents.push(graphql_syntax::parse_schema_document( &combined_sdl, SourceLocationKey::generated(), )?); let mut client_schema_documents = Vec::new(); for (extension_sdl, location_key) in extension_sdls { client_schema_documents.push(graphql_syntax::parse_schema_document( extension_sdl.as_ref(), *location_key, )?); } SDLSchema::build(&server_documents, &client_schema_documents) } pub fn build_schema_with_flat_buffer(bytes: Vec<u8>) -> SDLSchema { SDLSchema::FlatBuffer(SchemaWrapper::from_vec(bytes)) } pub fn build_schema_from_flat_buffer(bytes: &[u8]) -> DiagnosticsResult<FlatBufferSchema<'_>> { Ok(FlatBufferSchema::build(bytes)) } pub fn builtins() -> DiagnosticsResult<SchemaDocument> { graphql_syntax::parse_schema_document(BUILTINS, SourceLocationKey::generated()) }<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict'; var url = require('url'); var zlib = require('zlib'); var _ = require('./helpers'); module.exports = function(options, callback){ var callbackDone = false, httpProtocol = options.url.indexOf('https') === 0 ? 'https' : 'http', requestData = url.parse(options.url), method = (options.method || 'get').toLowerCase(), isJson = options.json || false, headers = options.headers || {}, isPost = method === 'post', postBody = isPost ? JSON.stringify(options.body) : null, contentLength = !!postBody ? Buffer.byteLength(postBody) : null, timeout = options.timeout || 5, setHeader = function(v, k){ requestData.headers[k] = v; }; var respond = function(body, details){ body = body.toString('utf-8'); var error = details.response.statusCode !== 200 ? details.response.statusCode : null, response; if(isJson){ try { callback(error, JSON.parse(body), details); } catch(e){ return callback('json parsing error', null, details); }<|fim▁hole|> } }; requestData.headers = {}; requestData.method = method; _.each(headers, setHeader); setHeader('gzip', 'accept-encoding'); if(isPost){ setHeader(contentLength, 'content-length'); setHeader('application/json', 'content-type'); } var req = require(httpProtocol).request(requestData).on('response', function(response) { var body = []; var details = { response: { headers: response.headers, statusCode: response.statusCode } }; response.on('data', function(chunk){ body.push(chunk); }).on('end', function(){ body = Buffer.concat(body); if(!callbackDone){ callbackDone = true; if(response.headers['content-encoding'] === 'gzip'){ zlib.gunzip(body, function(err, dezipped) { if(!!err){ return callback(err); } respond(dezipped, details); }); } else { respond(body, details); } } }); }).on('error', function(e){ if(!callbackDone){ callbackDone = true; callback(e); } }); req.setTimeout(1000 * timeout, function(){ if(!callbackDone){ callbackDone = true; callback('timeout'); } }); if(isPost){ req.write(postBody); } req.end(); };<|fim▁end|>
} else { callback(error, body, details);
<|file_name|>thrift_hql.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import sys<|fim▁hole|>import time from hypertable.thriftclient import * from hyperthrift.gen.ttypes import * if (len(sys.argv) < 2): print sys.argv[0], "<hql>" sys.exit(1); try: client = ThriftClient("localhost", 15867) namespace = client.open_namespace("/") res = client.hql_query(namespace, sys.argv[1]); print res client.close_namespace(namespace) except ClientException, e: print '%s' % (e.message)<|fim▁end|>
<|file_name|>rpcdump.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2012 Bitcoin Developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include "init.h" // for pwalletMain #include "bitcoinrpc.h" #include "ui_interface.h" #include "base58.h" #include <boost/lexical_cast.hpp> #define printf OutputDebugStringF using namespace json_spirit; using namespace std; class CTxDump { public: CBlockIndex *pindex; int64 nValue; bool fSpent; CWalletTx* ptx; int nOut; CTxDump(CWalletTx* ptx = NULL, int nOut = -1) { pindex = NULL; nValue = 0; fSpent = false; this->ptx = ptx; this->nOut = nOut;<|fim▁hole|>Value importprivkey(const Array& params, bool fHelp) { if (fHelp || params.size() < 1 || params.size() > 3) throw runtime_error( "importprivkey <Volumeprivkey> [label] [rescan=true]\n" "Adds a private key (as returned by dumpprivkey) to your wallet."); string strSecret = params[0].get_str(); string strLabel = ""; if (params.size() > 1) strLabel = params[1].get_str(); // Whether to perform rescan after import bool fRescan = true; if (params.size() > 2) fRescan = params[2].get_bool(); CBitcoinSecret vchSecret; bool fGood = vchSecret.SetString(strSecret); if (!fGood) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid private key"); CKey key = vchSecret.GetKey(); CPubKey pubkey = key.GetPubKey(); CKeyID vchAddress = pubkey.GetID(); { LOCK2(cs_main, pwalletMain->cs_wallet); pwalletMain->MarkDirty(); pwalletMain->SetAddressBookName(vchAddress, strLabel); if (!pwalletMain->AddKeyPubKey(key, pubkey)) throw JSONRPCError(RPC_WALLET_ERROR, "Error adding key to wallet"); if (fRescan) { pwalletMain->ScanForWalletTransactions(pindexGenesisBlock, true); pwalletMain->ReacceptWalletTransactions(); } } return Value::null; } Value dumpprivkey(const Array& params, bool fHelp) { if (fHelp || params.size() != 1) throw runtime_error( "dumpprivkey <Volumeaddress>\n" "Reveals the private key corresponding to <Volumeaddress>."); string strAddress = params[0].get_str(); CBitcoinAddress address; if (!address.SetString(strAddress)) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Volume address"); CKeyID keyID; if (!address.GetKeyID(keyID)) throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to a key"); CKey vchSecret; if (!pwalletMain->GetKey(keyID, vchSecret)) throw JSONRPCError(RPC_WALLET_ERROR, "Private key for address " + strAddress + " is not known"); return CBitcoinSecret(vchSecret).ToString(); }<|fim▁end|>
} };
<|file_name|>benchmarking_canvas.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "skia/ext/benchmarking_canvas.h" #include <memory> #include <sstream> #include <utility> #include "base/check_op.h" #include "base/memory/ptr_util.h" #include "base/time/time.h" #include "third_party/skia/include/core/SkBitmap.h" #include "third_party/skia/include/core/SkColorFilter.h" #include "third_party/skia/include/core/SkImage.h" #include "third_party/skia/include/core/SkImageFilter.h" #include "third_party/skia/include/core/SkPaint.h" #include "third_party/skia/include/core/SkPath.h" #include "third_party/skia/include/core/SkPicture.h" #include "third_party/skia/include/core/SkRRect.h" #include "third_party/skia/include/core/SkRegion.h" #include "third_party/skia/include/core/SkString.h" #include "third_party/skia/include/core/SkTextBlob.h" namespace { class FlagsBuilder { public: FlagsBuilder(char separator) : separator_(separator) {} void addFlag(bool flag_val, const char flag_name[]) { if (!flag_val) return; if (!oss_.str().empty()) oss_ << separator_; oss_ << flag_name; } std::string str() const { return oss_.str(); } private: char separator_; std::ostringstream oss_; }; std::unique_ptr<base::Value> AsValue(bool b) { std::unique_ptr<base::Value> val(new base::Value(b)); return val; } std::unique_ptr<base::Value> AsValue(SkScalar scalar) { std::unique_ptr<base::Value> val(new base::Value(scalar)); return val; } std::unique_ptr<base::Value> AsValue(const SkSize& size) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); val->SetKey("width", base::Value::FromUniquePtrValue(AsValue(size.width()))); val->SetKey("height", base::Value::FromUniquePtrValue(AsValue(size.height()))); return std::move(val); } std::unique_ptr<base::Value> AsValue(const SkPoint& point) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); val->SetKey("x", base::Value::FromUniquePtrValue(AsValue(point.x()))); val->SetKey("y", base::Value::FromUniquePtrValue(AsValue(point.y()))); return std::move(val); } std::unique_ptr<base::Value> AsValue(const SkRect& rect) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); val->SetKey("left", base::Value::FromUniquePtrValue(AsValue(rect.fLeft))); val->SetKey("top", base::Value::FromUniquePtrValue(AsValue(rect.fTop))); val->SetKey("right", base::Value::FromUniquePtrValue(AsValue(rect.fRight))); val->SetKey("bottom", base::Value::FromUniquePtrValue(AsValue(rect.fBottom))); return std::move(val); } std::unique_ptr<base::Value> AsValue(const SkRRect& rrect) { base::Value radii_val(base::Value::Type::DICTIONARY); radii_val.SetKey("upper-left", base::Value::FromUniquePtrValue(AsValue( rrect.radii(SkRRect::kUpperLeft_Corner)))); radii_val.SetKey("upper-right", base::Value::FromUniquePtrValue( AsValue(rrect.radii(SkRRect::kUpperRight_Corner)))); radii_val.SetKey("lower-right", base::Value::FromUniquePtrValue( AsValue(rrect.radii(SkRRect::kLowerRight_Corner)))); radii_val.SetKey("lower-left", base::Value::FromUniquePtrValue(AsValue( rrect.radii(SkRRect::kLowerLeft_Corner)))); std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); val->SetKey("rect", base::Value::FromUniquePtrValue(AsValue(rrect.rect()))); val->SetKey("radii", std::move(radii_val)); return std::move(val); } std::unique_ptr<base::Value> AsValue(const SkMatrix& matrix) { std::unique_ptr<base::ListValue> val(new base::ListValue()); for (int i = 0; i < 9; ++i) val->Append(AsValue(matrix[i])); return std::move(val); } std::unique_ptr<base::Value> AsValue(SkColor color) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); val->SetInteger("a", SkColorGetA(color)); val->SetInteger("r", SkColorGetR(color)); val->SetInteger("g", SkColorGetG(color)); val->SetInteger("b", SkColorGetB(color)); return std::move(val); } std::unique_ptr<base::Value> AsValue(SkBlendMode mode) { std::unique_ptr<base::Value> val(new base::Value(SkBlendMode_Name(mode))); return val; } std::unique_ptr<base::Value> AsValue(SkCanvas::PointMode mode) { static const char* gModeStrings[] = { "Points", "Lines", "Polygon" }; DCHECK_LT(static_cast<size_t>(mode), SK_ARRAY_COUNT(gModeStrings)); std::unique_ptr<base::Value> val(new base::Value(gModeStrings[mode])); return val; } std::unique_ptr<base::Value> AsValue(const SkColorFilter& filter) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); if (filter.isAlphaUnchanged()) { FlagsBuilder builder('|'); builder.addFlag(true, "kAlphaUnchanged_Flag"); val->SetString("flags", builder.str()); } SkScalar color_matrix[20]; if (filter.asAColorMatrix(color_matrix)) { std::unique_ptr<base::ListValue> color_matrix_val(new base::ListValue()); for (unsigned i = 0; i < 20; ++i) color_matrix_val->Append(AsValue(color_matrix[i])); val->SetKey("color_matrix", base::Value::FromUniquePtrValue(std::move(color_matrix_val))); } return std::move(val); } std::unique_ptr<base::Value> AsValue(const SkImageFilter& filter) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); val->SetInteger("inputs", filter.countInputs()); SkColorFilter* color_filter; if (filter.asColorFilter(&color_filter)) { val->SetKey("color_filter", base::Value::FromUniquePtrValue(AsValue(*color_filter))); SkSafeUnref(color_filter); // ref'd in asColorFilter } return std::move(val); } std::unique_ptr<base::Value> AsValue(const SkPaint& paint) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); SkPaint default_paint; if (paint.getColor() != default_paint.getColor()) val->SetKey("Color", base::Value::FromUniquePtrValue(AsValue(paint.getColor()))); if (paint.getStyle() != default_paint.getStyle()) { static const char* gStyleStrings[] = { "Fill", "Stroke", "StrokeFill" }; DCHECK_LT(static_cast<size_t>(paint.getStyle()), SK_ARRAY_COUNT(gStyleStrings)); val->SetString("Style", gStyleStrings[paint.getStyle()]); } if (paint.asBlendMode() != default_paint.asBlendMode()) { val->SetKey("Xfermode", base::Value::FromUniquePtrValue(AsValue( paint.getBlendMode_or(SkBlendMode::kSrcOver)))); } if (paint.isAntiAlias() || paint.isDither()) { FlagsBuilder builder('|'); builder.addFlag(paint.isAntiAlias(), "AntiAlias"); builder.addFlag(paint.isDither(), "Dither"); val->SetString("Flags", builder.str()); } if (paint.getColorFilter()) val->SetKey("ColorFilter", base::Value::FromUniquePtrValue( AsValue(*paint.getColorFilter()))); if (paint.getImageFilter()) val->SetKey("ImageFilter", base::Value::FromUniquePtrValue( AsValue(*paint.getImageFilter()))); return std::move(val); } std::unique_ptr<base::Value> SaveLayerFlagsAsValue( SkCanvas::SaveLayerFlags flags) { std::unique_ptr<base::Value> val(new base::Value(static_cast<int>(flags))); return val; } std::unique_ptr<base::Value> AsValue(SkClipOp op) { static const char* gOpStrings[] = { "Difference", "Intersect", "Union", "XOR", "ReverseDifference", "Replace" }; size_t index = static_cast<size_t>(op); DCHECK_LT(index, SK_ARRAY_COUNT(gOpStrings)); std::unique_ptr<base::Value> val(new base::Value(gOpStrings[index])); return val; } std::unique_ptr<base::Value> AsValue(const SkRegion& region) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); val->SetKey("bounds", base::Value::FromUniquePtrValue( AsValue(SkRect::Make(region.getBounds())))); return std::move(val); } std::unique_ptr<base::Value> AsValue(const SkImage& image) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); val->SetKey("size", base::Value::FromUniquePtrValue(AsValue( SkSize::Make(image.width(), image.height())))); return std::move(val); } std::unique_ptr<base::Value> AsValue(const SkTextBlob& blob) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); val->SetKey("bounds", base::Value::FromUniquePtrValue(AsValue(blob.bounds()))); return std::move(val); } std::unique_ptr<base::Value> AsValue(const SkPath& path) { std::unique_ptr<base::DictionaryValue> val(new base::DictionaryValue()); static const char* gFillStrings[] = { "winding", "even-odd", "inverse-winding", "inverse-even-odd" }; size_t index = static_cast<size_t>(path.getFillType()); DCHECK_LT(index, SK_ARRAY_COUNT(gFillStrings)); val->SetString("fill-type", gFillStrings[index]); val->SetBoolean("convex", path.isConvex()); val->SetBoolean("is-rect", path.isRect(nullptr)); val->SetKey("bounds", base::Value::FromUniquePtrValue(AsValue(path.getBounds()))); static const char* gVerbStrings[] = { "move", "line", "quad", "conic", "cubic", "close", "done" }; static const int gPtsPerVerb[] = { 1, 1, 2, 2, 3, 0, 0 }; static const int gPtOffsetPerVerb[] = { 0, 1, 1, 1, 1, 0, 0 }; static_assert( SK_ARRAY_COUNT(gVerbStrings) == static_cast<size_t>(SkPath::kDone_Verb + 1), "gVerbStrings size mismatch"); static_assert( SK_ARRAY_COUNT(gVerbStrings) == SK_ARRAY_COUNT(gPtsPerVerb), "gPtsPerVerb size mismatch"); static_assert( SK_ARRAY_COUNT(gVerbStrings) == SK_ARRAY_COUNT(gPtOffsetPerVerb), "gPtOffsetPerVerb size mismatch"); base::Value verbs_val(base::Value::Type::LIST); SkPath::RawIter iter(const_cast<SkPath&>(path)); SkPoint points[4]; for (SkPath::Verb verb = iter.next(points); verb != SkPath::kDone_Verb; verb = iter.next(points)) { DCHECK_LT(static_cast<size_t>(verb), SK_ARRAY_COUNT(gVerbStrings)); base::Value verb_val(base::Value::Type::DICTIONARY); base::Value pts_val(base::Value::Type::LIST); for (int i = 0; i < gPtsPerVerb[verb]; ++i) pts_val.Append(base::Value::FromUniquePtrValue( AsValue(points[i + gPtOffsetPerVerb[verb]]))); verb_val.SetKey(gVerbStrings[verb], std::move(pts_val)); if (SkPath::kConic_Verb == verb) verb_val.SetKey("weight", base::Value::FromUniquePtrValue( AsValue(iter.conicWeight()))); verbs_val.Append(std::move(verb_val)); } val->SetKey("verbs", std::move(verbs_val)); return std::move(val); } template <typename T> std::unique_ptr<base::Value> AsListValue(const T array[], size_t count) {<|fim▁hole|> std::unique_ptr<base::ListValue> val(new base::ListValue()); for (size_t i = 0; i < count; ++i) val->Append(AsValue(array[i])); return std::move(val); } } // namespace namespace skia { class BenchmarkingCanvas::AutoOp { public: // AutoOp objects are always scoped within draw call frames, // so the paint is guaranteed to be valid for their lifetime. AutoOp(BenchmarkingCanvas* canvas, const char op_name[], const SkPaint* paint = nullptr) : canvas_(canvas), op_record_(new base::DictionaryValue()) { DCHECK(canvas); DCHECK(op_name); op_record_->SetString("cmd_string", op_name); op_params_ = op_record_->SetList("info", std::make_unique<base::ListValue>()); if (paint) { this->addParam("paint", AsValue(*paint)); filtered_paint_ = *paint; } start_ticks_ = base::TimeTicks::Now(); } ~AutoOp() { base::TimeDelta ticks = base::TimeTicks::Now() - start_ticks_; op_record_->SetDouble("cmd_time", ticks.InMillisecondsF()); canvas_->op_records_.Append(std::move(op_record_)); } void addParam(const char name[], std::unique_ptr<base::Value> value) { std::unique_ptr<base::DictionaryValue> param(new base::DictionaryValue()); param->SetKey(name, base::Value::FromUniquePtrValue(std::move(value))); op_params_->Append(std::move(param)); } const SkPaint* paint() const { return &filtered_paint_; } private: BenchmarkingCanvas* canvas_; std::unique_ptr<base::DictionaryValue> op_record_; base::ListValue* op_params_; base::TimeTicks start_ticks_; SkPaint filtered_paint_; }; BenchmarkingCanvas::BenchmarkingCanvas(SkCanvas* canvas) : INHERITED(canvas->imageInfo().width(), canvas->imageInfo().height()) { addCanvas(canvas); } BenchmarkingCanvas::~BenchmarkingCanvas() = default; size_t BenchmarkingCanvas::CommandCount() const { return op_records_.GetList().size(); } const base::ListValue& BenchmarkingCanvas::Commands() const { return op_records_; } double BenchmarkingCanvas::GetTime(size_t index) { const base::Value& op = op_records_.GetList()[index]; if (!op.is_dict()) return 0; return op.FindDoubleKey("cmd_time").value_or(0); } void BenchmarkingCanvas::willSave() { AutoOp op(this, "Save"); INHERITED::willSave(); } SkCanvas::SaveLayerStrategy BenchmarkingCanvas::getSaveLayerStrategy( const SaveLayerRec& rec) { AutoOp op(this, "SaveLayer", rec.fPaint); if (rec.fBounds) op.addParam("bounds", AsValue(*rec.fBounds)); if (rec.fSaveLayerFlags) op.addParam("flags", SaveLayerFlagsAsValue(rec.fSaveLayerFlags)); return INHERITED::getSaveLayerStrategy(rec); } void BenchmarkingCanvas::willRestore() { AutoOp op(this, "Restore"); INHERITED::willRestore(); } void BenchmarkingCanvas::didConcat44(const SkM44& m) { SkScalar values[16]; m.getColMajor(values); AutoOp op(this, "Concat"); op.addParam("matrix", AsListValue(values, 16)); INHERITED::didConcat44(m); } void BenchmarkingCanvas::didScale(SkScalar x, SkScalar y) { AutoOp op(this, "Scale"); op.addParam("scale-x", AsValue(x)); op.addParam("scale-y", AsValue(y)); INHERITED::didScale(x, y); } void BenchmarkingCanvas::didTranslate(SkScalar x, SkScalar y) { AutoOp op(this, "Translate"); op.addParam("translate-x", AsValue(x)); op.addParam("translate-y", AsValue(y)); INHERITED::didTranslate(x, y); } void BenchmarkingCanvas::didSetM44(const SkM44& m) { SkScalar values[16]; m.getColMajor(values); AutoOp op(this, "SetMatrix"); op.addParam("matrix", AsListValue(values, 16)); INHERITED::didSetM44(m); } void BenchmarkingCanvas::onClipRect(const SkRect& rect, SkClipOp region_op, SkCanvas::ClipEdgeStyle style) { AutoOp op(this, "ClipRect"); op.addParam("rect", AsValue(rect)); op.addParam("op", AsValue(region_op)); op.addParam("anti-alias", AsValue(style == kSoft_ClipEdgeStyle)); INHERITED::onClipRect(rect, region_op, style); } void BenchmarkingCanvas::onClipRRect(const SkRRect& rrect, SkClipOp region_op, SkCanvas::ClipEdgeStyle style) { AutoOp op(this, "ClipRRect"); op.addParam("rrect", AsValue(rrect)); op.addParam("op", AsValue(region_op)); op.addParam("anti-alias", AsValue(style == kSoft_ClipEdgeStyle)); INHERITED::onClipRRect(rrect, region_op, style); } void BenchmarkingCanvas::onClipPath(const SkPath& path, SkClipOp region_op, SkCanvas::ClipEdgeStyle style) { AutoOp op(this, "ClipPath"); op.addParam("path", AsValue(path)); op.addParam("op", AsValue(region_op)); op.addParam("anti-alias", AsValue(style == kSoft_ClipEdgeStyle)); INHERITED::onClipPath(path, region_op, style); } void BenchmarkingCanvas::onClipRegion(const SkRegion& region, SkClipOp region_op) { AutoOp op(this, "ClipRegion"); op.addParam("region", AsValue(region)); op.addParam("op", AsValue(region_op)); INHERITED::onClipRegion(region, region_op); } void BenchmarkingCanvas::onDrawPaint(const SkPaint& paint) { AutoOp op(this, "DrawPaint", &paint); INHERITED::onDrawPaint(*op.paint()); } void BenchmarkingCanvas::onDrawPoints(PointMode mode, size_t count, const SkPoint pts[], const SkPaint& paint) { AutoOp op(this, "DrawPoints", &paint); op.addParam("mode", AsValue(mode)); op.addParam("points", AsListValue(pts, count)); INHERITED::onDrawPoints(mode, count, pts, *op.paint()); } void BenchmarkingCanvas::onDrawRect(const SkRect& rect, const SkPaint& paint) { AutoOp op(this, "DrawRect", &paint); op.addParam("rect", AsValue(rect)); INHERITED::onDrawRect(rect, *op.paint()); } void BenchmarkingCanvas::onDrawOval(const SkRect& rect, const SkPaint& paint) { AutoOp op(this, "DrawOval", &paint); op.addParam("rect", AsValue(rect)); INHERITED::onDrawOval(rect, *op.paint()); } void BenchmarkingCanvas::onDrawRRect(const SkRRect& rrect, const SkPaint& paint) { AutoOp op(this, "DrawRRect", &paint); op.addParam("rrect", AsValue(rrect)); INHERITED::onDrawRRect(rrect, *op.paint()); } void BenchmarkingCanvas::onDrawDRRect(const SkRRect& outer, const SkRRect& inner, const SkPaint& paint) { AutoOp op(this, "DrawDRRect", &paint); op.addParam("outer", AsValue(outer)); op.addParam("inner", AsValue(inner)); INHERITED::onDrawDRRect(outer, inner, *op.paint()); } void BenchmarkingCanvas::onDrawPath(const SkPath& path, const SkPaint& paint) { AutoOp op(this, "DrawPath", &paint); op.addParam("path", AsValue(path)); INHERITED::onDrawPath(path, *op.paint()); } void BenchmarkingCanvas::onDrawPicture(const SkPicture* picture, const SkMatrix* matrix, const SkPaint* paint) { DCHECK(picture); AutoOp op(this, "DrawPicture", paint); op.addParam("picture", AsValue(picture)); if (matrix) op.addParam("matrix", AsValue(*matrix)); INHERITED::onDrawPicture(picture, matrix, op.paint()); } void BenchmarkingCanvas::onDrawImage2(const SkImage* image, SkScalar left, SkScalar top, const SkSamplingOptions& sampling, const SkPaint* paint) { DCHECK(image); AutoOp op(this, "DrawImage", paint); op.addParam("image", AsValue(*image)); op.addParam("left", AsValue(left)); op.addParam("top", AsValue(top)); INHERITED::onDrawImage2(image, left, top, sampling, op.paint()); } void BenchmarkingCanvas::onDrawImageRect2(const SkImage* image, const SkRect& src, const SkRect& dst, const SkSamplingOptions& sampling, const SkPaint* paint, SrcRectConstraint constraint) { DCHECK(image); AutoOp op(this, "DrawImageRect", paint); op.addParam("image", AsValue(*image)); op.addParam("src", AsValue(src)); op.addParam("dst", AsValue(dst)); INHERITED::onDrawImageRect2(image, src, dst, sampling, op.paint(), constraint); } void BenchmarkingCanvas::onDrawTextBlob(const SkTextBlob* blob, SkScalar x, SkScalar y, const SkPaint& paint) { DCHECK(blob); AutoOp op(this, "DrawTextBlob", &paint); op.addParam("blob", AsValue(*blob)); op.addParam("x", AsValue(x)); op.addParam("y", AsValue(y)); INHERITED::onDrawTextBlob(blob, x, y, *op.paint()); } } // namespace skia<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate iron; extern crate mysql; // insert database connection into request use iron::{BeforeMiddleware, typemap}; use iron::prelude::{IronResult, Request}; use mysql::Pool; pub struct DBPool { pub pool: Pool, } impl typemap::Key for DBPool { type Value = Pool; } impl BeforeMiddleware for DBPool { fn before(&self, req: &mut Request) -> IronResult<()> { req.extensions.insert::<DBPool>(self.pool.clone()); Ok(())<|fim▁hole|> } }<|fim▁end|>
<|file_name|>CertCommands.cpp<|end_file_name|><|fim▁begin|>#include "pch.h" #include "CertCommands.h" #include "Console.h" #include "ProcessHost.h" #include<vector> #include "TerminalHelper.h" using namespace std; void CertsCommand::ProcessCommand(IConsole *pConsole, ParsedCommandLine *pCmdLine) { <|fim▁hole|> pConsole->WriteLine("SYNTAX: certs storename"); else { // Convert char* string to a wchar_t* string. std:string str = pCmdLine->GetArgs().at(1); std::wstring wsTmp(str.begin(), str.end()); HANDLE hStoreHandle = NULL; PCCERT_CONTEXT pCertContext = NULL; char * pszStoreName = "CA"; pConsole->WriteLine("Listing certs"); //-------------------------------------------------------------------- // Open a system certificate store. if (hStoreHandle = (*CertOpenStore)(CERT_STORE_PROV_SYSTEM_W, // The store provider type 0, // The encoding type is // not needed NULL, // Use the default HCRYPTPROV CERT_SYSTEM_STORE_CURRENT_USER, // Set the store location in a // registry location wsTmp.c_str() // The store name as a Unicode // string )) { pConsole->WriteLine("Opened certificate store"); } else { pConsole->WriteLine(GetLastErrorAsString()); return; } while (pCertContext = CertEnumCertificatesInStore( hStoreHandle, pCertContext)) { char buf[1000]; (*CertNameToStrA)(1, &pCertContext->pCertInfo->Subject, 2, (char *)buf, sizeof(buf)); pConsole->WriteLine("Found %s", buf); } //-------------------------------------------------------------------- // Clean up. if (!(*CertCloseStore)( hStoreHandle, 0)) { pConsole->WriteLine("Failed CertCloseStore"); } } } CommandInfo CertsCommand::GetInfo() { return CommandInfo("certs", "", "Certificate Manager"); }<|fim▁end|>
if (pCmdLine->GetArgs().size()<2)
<|file_name|>0005_footballtype_comments.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('type_page', '0004_auto_20170711_1241'), ] operations = [ migrations.AddField( model_name='footballtype', name='comments', field=models.CharField(max_length=128, null=True), ), ]<|fim▁end|>
# Generated by Django 1.11.2 on 2017-07-12 14:48
<|file_name|>htmltablecaptionelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use crate::dom::bindings::codegen::Bindings::HTMLTableCaptionElementBinding; use crate::dom::bindings::root::DomRoot; use crate::dom::document::Document; use crate::dom::htmlelement::HTMLElement; use crate::dom::node::Node; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; #[dom_struct] pub struct HTMLTableCaptionElement { htmlelement: HTMLElement, } impl HTMLTableCaptionElement { fn new_inherited( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> HTMLTableCaptionElement { HTMLTableCaptionElement { htmlelement: HTMLElement::new_inherited(local_name, prefix, document), } } #[allow(unrooted_must_root)] pub fn new( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> DomRoot<HTMLTableCaptionElement> {<|fim▁hole|> Node::reflect_node( Box::new(HTMLTableCaptionElement::new_inherited( local_name, prefix, document, )), document, HTMLTableCaptionElementBinding::Wrap, ) } }<|fim▁end|>
<|file_name|>channelSync.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" "time" ) func worker(done chan bool) { fmt.Println("Started") time.Sleep(time.Second) fmt.Println("Done") done <- true } <|fim▁hole|>func main() { done := make(chan bool) go worker(done) <-done }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from flask import Flask, render_template, flash from flask_material_lite import Material_Lite from flask_appconfig import AppConfig from flask_wtf import Form, RecaptchaField from flask_wtf.file import FileField from wtforms import TextField, HiddenField, ValidationError, RadioField,\ BooleanField, SubmitField, IntegerField, FormField, validators from wtforms.validators import Required # straight from the wtforms docs: class TelephoneForm(Form): country_code = IntegerField('Country Code', [validators.required()]) area_code = IntegerField('Area Code/Exchange', [validators.required()]) number = TextField('Number') class ExampleForm(Form): field1 = TextField('First Field', description='This is field one.') field2 = TextField('Second Field', description='This is field two.', validators=[Required()]) hidden_field = HiddenField('You cannot see this', description='Nope') recaptcha = RecaptchaField('A sample recaptcha field') radio_field = RadioField('This is a radio field', choices=[ ('head_radio', 'Head radio'), ('radio_76fm', "Radio '76 FM"), ('lips_106', 'Lips 106'), ('wctr', 'WCTR'), ]) checkbox_field = BooleanField('This is a checkbox', description='Checkboxes can be tricky.') # subforms mobile_phone = FormField(TelephoneForm) # you can change the label as well office_phone = FormField(TelephoneForm, label='Your office phone') ff = FileField('Sample upload') submit_button = SubmitField('Submit Form') def validate_hidden_field(form, field): raise ValidationError('Always wrong') def create_app(configfile=None): app = Flask(__name__) AppConfig(app, configfile) # Flask-Appconfig is not necessary, but # highly recommend =) # https://github.com/mbr/flask-appconfig Material_Lite(app) # in a real app, these should be configured through Flask-Appconfig app.config['SECRET_KEY'] = 'devkey' app.config['RECAPTCHA_PUBLIC_KEY'] = \ '6Lfol9cSAAAAADAkodaYl9wvQCwBMr3qGR_PPHcw'<|fim▁hole|> @app.route('/', methods=('GET', 'POST')) def index(): form = ExampleForm() form.validate_on_submit() # to get error messages to the browser flash('critical message', 'critical') flash('error message', 'error') flash('warning message', 'warning') flash('info message', 'info') flash('debug message', 'debug') flash('different message', 'different') flash('uncategorized message') return render_template('index.html', form=form) return app if __name__ == '__main__': create_app().run(debug=True)<|fim▁end|>
<|file_name|>_msiecookiejar.py<|end_file_name|><|fim▁begin|>"""Microsoft Internet Explorer cookie loading on Windows. Copyright 2002-2003 Johnny Lee <typo_pl@hotmail.com> (MSIE Perl code) Copyright 2002-2006 John J Lee <jjl@pobox.com> (The Python port) This code is free software; you can redistribute it and/or modify it under the terms of the BSD or ZPL 2.1 licenses (see the file COPYING.txt included with the distribution). """ # XXX names and comments are not great here import os, re, time, struct, logging from _clientcookie import FileCookieJar, CookieJar, Cookie, \ MISSING_FILENAME_TEXT, LoadError if os.name == "nt": import _winreg debug = logging.getLogger("mechanize").debug def regload(path, leaf): key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0, _winreg.KEY_ALL_ACCESS) try: value = _winreg.QueryValueEx(key, leaf)[0] except WindowsError: value = None return value WIN32_EPOCH = 0x019db1ded53e8000L # 1970 Jan 01 00:00:00 in Win32 FILETIME def epoch_time_offset_from_win32_filetime(filetime): """Convert from win32 filetime to seconds-since-epoch value. MSIE stores create and expire times as Win32 FILETIME, which is 64 bits of 100 nanosecond intervals since Jan 01 1601. mechanize expects time in 32-bit value expressed in seconds since the epoch (Jan 01 1970). """ if filetime < WIN32_EPOCH: raise ValueError("filetime (%d) is before epoch (%d)" % (filetime, WIN32_EPOCH)) return divmod((filetime - WIN32_EPOCH), 10000000L)[0] def binary_to_char(c): return "%02X" % ord(c) def binary_to_str(d): return "".join(map(binary_to_char, list(d))) class MSIEBase: magic_re = re.compile(r"Client UrlCache MMF Ver \d\.\d.*") padding = "\x0d\xf0\xad\x0b" msie_domain_re = re.compile(r"^([^/]+)(/.*)$") cookie_re = re.compile("Cookie\:.+\@([\x21-\xFF]+).*?" "(.+\@[\x21-\xFF]+\.txt)") # path under HKEY_CURRENT_USER from which to get location of index.dat reg_path = r"software\microsoft\windows" \ r"\currentversion\explorer\shell folders" reg_key = "Cookies" def __init__(self): self._delayload_domains = {} def _delayload_domain(self, domain): # if necessary, lazily load cookies for this domain delayload_info = self._delayload_domains.get(domain) if delayload_info is not None: cookie_file, ignore_discard, ignore_expires = delayload_info try: self.load_cookie_data(cookie_file, ignore_discard, ignore_expires) except (LoadError, IOError): debug("error reading cookie file, skipping: %s", cookie_file) else: del self._delayload_domains[domain] def _load_cookies_from_file(self, filename): debug("Loading MSIE cookies file: %s", filename) cookies = [] cookies_fh = open(filename) try: while 1: key = cookies_fh.readline() if key == "": break rl = cookies_fh.readline def getlong(rl=rl): return long(rl().rstrip()) def getstr(rl=rl): return rl().rstrip() key = key.rstrip() value = getstr() domain_path = getstr() flags = getlong() # 0x2000 bit is for secure I think lo_expire = getlong() hi_expire = getlong() lo_create = getlong() hi_create = getlong() sep = getstr() if "" in (key, value, domain_path, flags, hi_expire, lo_expire, hi_create, lo_create, sep) or (sep != "*"): break m = self.msie_domain_re.search(domain_path) if m: domain = m.group(1) path = m.group(2) cookies.append({"KEY": key, "VALUE": value, "DOMAIN": domain, "PATH": path, "FLAGS": flags, "HIXP": hi_expire, "LOXP": lo_expire, "HICREATE": hi_create, "LOCREATE": lo_create}) finally: cookies_fh.close() return cookies def load_cookie_data(self, filename, ignore_discard=False, ignore_expires=False): """Load cookies from file containing actual cookie data. Old cookies are kept unless overwritten by newly loaded ones. You should not call this method if the delayload attribute is set. I think each of these files contain all cookies for one user, domain, and path. filename: file containing cookies -- usually found in a file like C:\WINNT\Profiles\joe\Cookies\joe@blah[1].txt """ now = int(time.time()) cookie_data = self._load_cookies_from_file(filename) for cookie in cookie_data: flags = cookie["FLAGS"] secure = ((flags & 0x2000) != 0) filetime = (cookie["HIXP"] << 32) + cookie["LOXP"] expires = epoch_time_offset_from_win32_filetime(filetime) if expires < now: discard = True else: discard = False domain = cookie["DOMAIN"] initial_dot = domain.startswith(".") if initial_dot: domain_specified = True else: # MSIE 5 does not record whether the domain cookie-attribute # was specified. # Assuming it wasn't is conservative, because with strict # domain matching this will match less frequently; with regular # Netscape tail-matching, this will match at exactly the same # times that domain_specified = True would. It also means we # don't have to prepend a dot to achieve consistency with our # own & Mozilla's domain-munging scheme. domain_specified = False # assume path_specified is false # XXX is there other stuff in here? -- e.g. comment, commentURL? c = Cookie(0, cookie["KEY"], cookie["VALUE"], None, False, domain, domain_specified, initial_dot, cookie["PATH"], False, secure, expires, discard, None, None, {"flags": flags}) if not ignore_discard and c.discard: continue if not ignore_expires and c.is_expired(now): continue CookieJar.set_cookie(self, c) def load_from_registry(self, ignore_discard=False, ignore_expires=False, username=None): """ username: only required on win9x """ cookies_dir = regload(self.reg_path, self.reg_key) filename = os.path.normpath(os.path.join(cookies_dir, "INDEX.DAT")) self.load(filename, ignore_discard, ignore_expires, username) def _really_load(self, index, filename, ignore_discard, ignore_expires, username): now = int(time.time()) if username is None: username = os.environ['USERNAME'].lower() cookie_dir = os.path.dirname(filename) data = index.read(256) if len(data) != 256: raise LoadError("%s file is too short" % filename) # Cookies' index.dat file starts with 32 bytes of signature # followed by an offset to the first record, stored as a little- # endian DWORD. sig, size, data = data[:32], data[32:36], data[36:] size = struct.unpack("<L", size)[0] # check that sig is valid if not self.magic_re.match(sig) or size != 0x4000: raise LoadError("%s ['%s' %s] does not seem to contain cookies" % (str(filename), sig, size)) # skip to start of first record index.seek(size, 0) sector = 128 # size of sector in bytes while 1: data = "" # Cookies are usually in two contiguous sectors, so read in two # sectors and adjust if not a Cookie. to_read = 2 * sector d = index.read(to_read) if len(d) != to_read: break data = data + d # Each record starts with a 4-byte signature and a count # (little-endian DWORD) of sectors for the record. sig, size, data = data[:4], data[4:8], data[8:] size = struct.unpack("<L", size)[0] to_read = (size - 2) * sector ## from urllib import quote ## print "data", quote(data) ## print "sig", quote(sig) ## print "size in sectors", size ## print "size in bytes", size*sector ## print "size in units of 16 bytes", (size*sector) / 16 ## print "size to read in bytes", to_read ## print if sig != "URL ": assert sig in ("HASH", "LEAK", \ self.padding, "\x00\x00\x00\x00"), \ "unrecognized MSIE index.dat record: %s" % \ binary_to_str(sig) if sig == "\x00\x00\x00\x00": # assume we've got all the cookies, and stop break if sig == self.padding: continue # skip the rest of this record assert to_read >= 0 if size != 2: assert to_read != 0 index.seek(to_read, 1) continue # read in rest of record if necessary if size > 2: more_data = index.read(to_read) if len(more_data) != to_read: break data = data + more_data cookie_re = ("Cookie\:%s\@([\x21-\xFF]+).*?" % username + "(%s\@[\x21-\xFF]+\.txt)" % username) m = re.search(cookie_re, data, re.I) if m: cookie_file = os.path.join(cookie_dir, m.group(2)) if not self.delayload: try: self.load_cookie_data(cookie_file, ignore_discard, ignore_expires) except (LoadError, IOError): debug("error reading cookie file, skipping: %s", cookie_file) else: domain = m.group(1) i = domain.find("/") if i != -1: domain = domain[:i] self._delayload_domains[domain] = ( cookie_file, ignore_discard, ignore_expires) class MSIECookieJar(MSIEBase, FileCookieJar): """FileCookieJar that reads from the Windows MSIE cookies database. MSIECookieJar can read the cookie files of Microsoft Internet Explorer (MSIE) for Windows version 5 on Windows NT and version 6 on Windows XP and Windows 98. Other configurations may also work, but are untested. Saving cookies in MSIE format is NOT supported. If you save cookies, they'll be in the usual Set-Cookie3 format, which you can read back in using an instance of the plain old CookieJar class. Don't save using the same filename that you loaded cookies from, because you may succeed in clobbering your MSIE cookies index file! You should be able to have LWP share Internet Explorer's cookies like this (note you need to supply a username to load_from_registry if you're on Windows 9x or Windows ME): cj = MSIECookieJar(delayload=1) # find cookies index file in registry and load cookies from it cj.load_from_registry() opener = mechanize.build_opener(mechanize.HTTPCookieProcessor(cj)) response = opener.open("http://example.com/") Iterating over a delayloaded MSIECookieJar instance will not cause any cookies to be read from disk. To force reading of all cookies from disk, call read_all_cookies. Note that the following methods iterate over self: clear_temporary_cookies, clear_expired_cookies, __len__, __repr__, __str__ and as_string. Additional methods: load_from_registry(ignore_discard=False, ignore_expires=False, username=None) load_cookie_data(filename, ignore_discard=False, ignore_expires=False) read_all_cookies() """ def __init__(self, filename=None, delayload=False, policy=None): MSIEBase.__init__(self) FileCookieJar.__init__(self, filename, delayload, policy) def set_cookie(self, cookie): if self.delayload: self._delayload_domain(cookie.domain) CookieJar.set_cookie(self, cookie)<|fim▁hole|> def _cookies_for_request(self, request): """Return a list of cookies to be returned to server.""" domains = self._cookies.copy() domains.update(self._delayload_domains) domains = domains.keys() cookies = [] for domain in domains: cookies.extend(self._cookies_for_domain(domain, request)) return cookies def _cookies_for_domain(self, domain, request): if not self._policy.domain_return_ok(domain, request): return [] debug("Checking %s for cookies to return", domain) if self.delayload: self._delayload_domain(domain) return CookieJar._cookies_for_domain(self, domain, request) def read_all_cookies(self): """Eagerly read in all cookies.""" if self.delayload: for domain in self._delayload_domains.keys(): self._delayload_domain(domain) def load(self, filename, ignore_discard=False, ignore_expires=False, username=None): """Load cookies from an MSIE 'index.dat' cookies index file. filename: full path to cookie index file username: only required on win9x """ if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) index = open(filename, "rb") try: self._really_load(index, filename, ignore_discard, ignore_expires, username) finally: index.close()<|fim▁end|>
<|file_name|>solarSwitch.py<|end_file_name|><|fim▁begin|>import omf.cosim glw = omf.cosim.GridLabWorld('6267', 'localhost', 'GC-solarAdd.glm', '2000-01-01 0:00:00') glw.start()<|fim▁hole|>print (glw.readClock()) # Changing solar gen status. print (glw.read('test_solar', 'generator_status')) glw.write('test_solar','generator_status', 'OFFLINE') print ('Switched off solar') print (glw.read('test_solar', 'generator_status')) # Changing reactive power output. print (glw.read('test_solar_inverter', 'Q_Out')) glw.write('test_solar_inverter','Q_Out', '1000') print ('Change Q_Out') print (glw.read('test_solar_inverter', 'Q_Out')) #glw.waitUntil('2000-01-01 0:30:00') #print ('Stepped ahead 12 hours') print (glw.readClock()) glw.resume() print (glw.readClock()) glw.shutdown()<|fim▁end|>
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>from rest_framework.serializers import ( HyperlinkedIdentityField, ModelSerializer, SerializerMethodField, ) from comments.api.serializers import CommentSerializer from accounts.api.serializers import UserDetailSerializer from comments.models import Comment from posts.models import Post class PostCreateUpdateSerializer(ModelSerializer): class Meta: model = Post fields = [ #'id', 'title', #'slug', 'content', 'publish', ] post_detail_url = HyperlinkedIdentityField( view_name = 'posts-api:detail', lookup_field = 'slug', ) class PostDetailSerializer(ModelSerializer): url = post_detail_url user = UserDetailSerializer(read_only=True) image = SerializerMethodField() html = SerializerMethodField() comments = SerializerMethodField() class Meta: model = Post fields = [ 'url', 'id', 'user', 'title', 'slug', 'content', 'html', 'publish', 'image', 'comments', ] def get_html(self, obj): return obj.get_markdown() def get_image(self, obj): try: image = obj.image.url except: image = None return image def get_comments(self, obj): #content_type = obj.get_content_type<|fim▁hole|> class PostListSerializer(ModelSerializer): url = post_detail_url user = UserDetailSerializer(read_only=True) class Meta: model = Post fields = [ 'url', 'user', 'title', 'slug', 'content', 'publish', ]<|fim▁end|>
#object_id = obj.id c_qs = Comment.objects.filter_by_instance(obj) comments = CommentSerializer(c_qs, many=True).data return comments
<|file_name|>safe.rs<|end_file_name|><|fim▁begin|>use std::fmt; <|fim▁hole|>extern { fn ccosf(z: Complex) -> Complex; } // safe wrapper fn cos(z: Complex) -> Complex { unsafe { ccosf(z) } } fn main() { // z = 0 + 1i let z = Complex { re: 0., im: 1. }; println!("cos({}) = {}", z, cos(z)); } // Minimal implementation of single precision complex numbers #[repr(C)] #[deriving(Copy)] struct Complex { re: f32, im: f32, } impl fmt::Show for Complex { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.im < 0. { write!(f, "{}-{}i", self.re, -self.im) } else { write!(f, "{}+{}i", self.re, self.im) } } }<|fim▁end|>
#[link(name = "m")]
<|file_name|>permissions.py<|end_file_name|><|fim▁begin|>from rest_framework import permissions class IsOwnerOrReadOnly(permissions.BasePermission): """ Object-level permission to only allow owners of an object to edit it. Assumes the model instance has an `owner` attribute. """ def has_object_permission(self, request, view, obj): # Read permissions are allowed to any request, # so we'll always allow GET, HEAD or OPTIONS requests. if request.method in permissions.SAFE_METHODS: return True # Instance must have an attribute named `owner`.<|fim▁hole|><|fim▁end|>
return obj.owner == request.user
<|file_name|>module.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # ex:set fileencoding=utf-8: from __future__ import unicode_literals from django.conf.urls import patterns from django.conf.urls import url from django.contrib.admin.sites import AlreadyRegistered from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ImproperlyConfigured from django.db.models import signals from django.http import Http404 from django.utils import six from django.utils.text import slugify from rest_framework.reverse import reverse from djangobmf.core.relationship import DocumentRelationship from djangobmf.core.serializers.document import DocumentSerializer from djangobmf.core.workflow import Workflow from djangobmf.models import Document from djangobmf.permissions import ModulePermission from djangobmf.views import ModuleCreateView from djangobmf.views import ModuleDeleteView from djangobmf.views import ModuleDetail from djangobmf.views import ModuleFormAPI from djangobmf.views import ModuleUpdateView from djangobmf.views import ModuleWorkflowView from collections import OrderedDict import logging logger = logging.getLogger(__name__) class Module(object): """ Under the ``Module`-class the framework stores every informations needed to display and manage views and API's. It also provides many functions used in the whole framework. """ open_relation = None workflow_class = None workflow_field_name = "state" detail_view = ModuleDetail def __init__(self, bmfconfig): # validation if not hasattr(self, 'model'): raise ImproperlyConfigured( 'No model defined in %s.' % self.__class__ ) self.bmfconfig = bmfconfig self._class_reports = {} self._object_reports = {} self._relations = [] self.signals_setup() self.validate_workflow() # auto add document relationship if hasattr(self.model, '_bmfmeta') and self.model._bmfmeta.has_files: class FileDownload(DocumentRelationship): model_to = self.model serializer = DocumentSerializer self.add_relation(FileDownload, Document) # TODO: OLD OLD OLD self.create_view = self.create self.delete_view = self.delete self.update_view = self.update # --- misc ---------------------------------------------------------------- def get_contenttype(self): # pragma: no cover """ returns the models contenttype """ return ContentType.objects.get_for_model(self.model) # --- single views -------------------------------------------------------- # TODO def get_update_view(self): """ """ pass # TODO def get_delete_view(self): """ """ pass def get_detail_view(self, request, *args, **kwargs): """ generates a detail-view response """ if hasattr(self, '_detail_view'): return self._detail_view(request, *args, **kwargs) self._detail_view = self.detail_view.as_view( module=self, model=self.model ) return self._detail_view(request, *args, **kwargs) # --- serialization ------------------------------------------------------- # TODO def serialize_class(self, request=None): """ """ return OrderedDict([ ('app', self.model._meta.app_label), ('creates', self.get_create_views()), ('ct', self.get_contenttype().pk), ('model', self.model._meta.model_name), ('name', self.model._meta.verbose_name_plural), ('open_relation', self.open_relation), ('relations', self.get_relations(request)), ]) # TODO def serialize_object(self, obj): """ """ return {} # --- workflow ------------------------------------------------------------ # TODO def validate_workflow(self): """ """ if self.workflow_class: if not issubclass(self.workflow_class, Workflow): raise ImproperlyConfigured( "%s is not a Workflow in %s" % ( self.workflow_class.__name__, self.__name__<|fim▁hole|> # self.workflow = self.workflow_class() def has_workflow(self): """ """ return bool(self.workflow_class) # TODO def get_workflow_states(self, obj): """ """ pass # TODO def get_workflow_transitions(self, obj, state_name): """ """ pass # --- permissions --------------------------------------------------------- # TODO def get_permissions(self, obj): """ """ pass # --- Create views -------------------------------------------------------- def has_create_views(self): """ return True if the module has one or more create views """ return getattr(self, '_has_create_views', False) # TODO def get_create_views(self): """ """ if self.bmfconfig: namespace_api = '%s:moduleapi_%s_%s' % ( self.bmfconfig.label, self.model._meta.app_label, self.model._meta.model_name, ) return [{ 'name': i[1], 'url': reverse(namespace_api + ':create', kwargs={"key": i[0]}), } for i in self.list_creates()] return [] # TODO def get_create_view(self, name): """ """ pass # TODO def add_create_view(self, name, view): """ """ pass self._has_create_views = True # --- Clone views --------------------------------------------------------- def has_clone_views(self): """ return True if the module has one or more clone views """ return getattr(self, '_has_clone_views', False) # TODO def get_clone_views(self): """ """ pass # TODO def get_clone_view(self, name): """ """ pass # TODO def add_clone_view(self, name, view): """ """ pass self._has_clone_views = True # --- Functions for both report types ------------------------------------- def add_report(self, report): """ """ if not getattr(report, "renderer_class", None): raise ImproperlyConfigured( '%s needs a renderer_class attribute', report, ) if report.has_object: return self.add_object_report(report) else: return self.add_class_report(report) # --- Class specific reports ---------------------------------------------- # TODO def get_class_reports(self): """ """ pass # TODO def get_class_report(self, name): """ """ pass # TODO def add_class_report(self, report): """ """ self._class_reports[report.__name__] = { 'class': report, } # --- Object specific reports --------------------------------------------- def get_object_reports(self): """ Returns all available reports """ qs = self.bmfconfig.get_model("Report").objects.filter( contenttype=self.get_contenttype(), enabled=True ).values('pk', 'name', 'slug', 'renderer_view') items = [] for data in qs: cls = self._object_reports[data['renderer_view']] if data['renderer_view'] in self._object_reports: items.append({ 'name': data['name'], 'slug': data['slug'], 'verbose_name': cls['class'].verbose_name, 'has_form': bool(cls['class'].form_class), }) else: self.bmfconfig.get_model("Report").objects.filter(pk=data['pk']).update(enabled=False) return items def get_object_report(self, slug): """ """ obj = self.bmfconfig.get_model("Report").objects.get( contenttype=self.get_contenttype(), enabled=True, slug=slug, ) if not obj.renderer: logger.error('No renderer defined') raise Http404 if obj.renderer_view in self._object_reports: report = self._object_reports[obj.renderer_view] if not report["view"]: report["view"] = report["class"].as_view() return report['view'], obj.renderer else: raise Http404 def add_object_report(self, report): """ """ name = report.__module__ + '.' + report.__name__ self._object_reports[name] = { 'class': report, 'view': None, # the view is added by get_object_report } # --- Class specific custom apis ------------------------------------------ # TODO def get_class_apis(self): """ """ pass # TODO def get_class_api(self, name): """ """ pass # TODO def add_class_api(self, name, view): """ """ pass # --- Object specific custom apis ----------------------------------------- # TODO def get_object_apis(self): """ """ pass # TODO def get_object_api(self, name): """ """ pass # TODO def add_object_api(self, name, view): """ """ pass # --- Object specific custom apis ----------------------------------------- def has_relations(self): """ return True if the module has one or more relations """ return bool(self._relations) # TODO def get_relations(self, request): """ """ relations = [] for relation in self._relations: perm = '%s.view_%s' info = (relation._model_to._meta.app_label, relation._model_to._meta.model_name) if not request.user.has_perms([perm % info]): continue data = OrderedDict([ ('app_label', relation._model_from._meta.app_label), ('model_name', relation._model_from._meta.model_name), ('name', relation.name), ('slug', relation.slug), ('template', relation.template), ]) relations.append(data) return relations # TODO def get_relation(self, name): """ """ pass # TODO def add_relation(self, cls, model_from): """ """ relation = cls() relation._model_from = model_from for obj in self._relations: if obj == relation: raise AlreadyRegistered( 'Can not register the relationship %s' % cls.__name__ ) self._relations.append(relation) # --- number ranges ------------------------------------------------------- def has_numberranges(self): """ """ pass # TODO def get_numberranges(self): """ """ pass # TODO def get_numberrange(self, name): """ """ pass # TODO def add_numberrange(self, name, number_range): """ """ pass # --- Signals ------------------------------------------------------------- def signals_setup(self): """ Bind own signal methods to the djangos signals """ logger.debug("Setup signals for %s", self.__class__.__name__) signals.pre_delete.connect(self.signal_pre_delete, sender=self.model) signals.pre_init.connect(self.signal_pre_init, sender=self.model) signals.pre_save.connect(self.signal_pre_save, sender=self.model) signals.post_delete.connect(self.signal_post_delete, sender=self.model) signals.post_init.connect(self.signal_post_init, sender=self.model) signals.post_save.connect(self.signal_post_save, sender=self.model) def signal_pre_delete(self, *args, **kwargs): """ This function is called bevor a model instance is deleted """ pass def signal_pre_init(self, *args, **kwargs): """ This function is called bevor a model instance is initialized """ pass def signal_pre_save(self, *args, **kwargs): """ This function is called bevor a model instance is saved """ pass def signal_post_delete(self, *args, **kwargs): """ This function is called after a model instance is deleted """ pass def signal_post_init(self, *args, **kwargs): """ This function is called after a model instance is initialized """ pass def signal_post_save(self, *args, **kwargs): """ This function is called after a model instance is saved """ pass # TODO: OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD detail = ModuleDetail create = ModuleCreateView delete = ModuleDeleteView update = ModuleUpdateView permissions = ModulePermission detail_urlpatterns = None api_urlpatterns = None def list_creates(self): if hasattr(self, 'listed_creates'): return self.listed_creates self.listed_creates = [] if isinstance(self.create, dict): for label, view in six.iteritems(self.create): key = slugify(label) if isinstance(view, (list, tuple)) and len(view) == 2: # overwrite the label, and use the correct the view function label = view[0] view = view[1] self.listed_creates.append((key, label, view)) elif issubclass(self.create, ModuleCreateView): self.listed_creates.append(('default', 'default', self.create)) return self.listed_creates def get_detail_urls(self): # add custom url patterns if self.detail_urlpatterns: return self.detail_urlpatterns return patterns('') def get_api_urls(self): creates = self.list_creates() urlpatterns = patterns( '', url( r'^update/(?P<pk>[0-9]+)/$', self.update.as_view( module=self, model=self.model ), name='update', ), url( r'^update/(?P<pk>[0-9]+)/form/$', ModuleFormAPI.as_view( module=self, model=self.model, form_view=self.update, ), name='update-form', ), url( r'^delete/(?P<pk>[0-9]+)/$', self.delete.as_view( module=self, model=self.model ), name='delete', ), ) if self.model._bmfmeta.can_clone: urlpatterns += patterns( '', url( r'^clone/(?P<pk>[0-9]+)/$', self.clone.as_view( module=self, model=self.model ), name='clone', ), url( r'^clone/(?P<pk>[0-9]+)/form/$', ModuleFormAPI.as_view( module=self, model=self.model, form_view=self.clone, ), name='clone-form', ), ) for key, label, view in creates: urlpatterns += patterns( '', url( r'^create/(?P<key>%s)/$' % key, view.as_view( module=self, model=self.model ), name='create', ), url( r'^create/(?P<key>%s)/form/$' % key, ModuleFormAPI.as_view( module=self, model=self.model, form_view=view, ), name='create-form', ), ) # workflow interactions if self.model._bmfmeta.has_workflow: urlpatterns += patterns( '', url( r'^workflow/(?P<pk>[0-9]+)/(?P<transition>\w+)/$', ModuleWorkflowView.as_view( module=self, model=self.model ), name='workflow', ), ) # add custom url patterns if self.api_urlpatterns: urlpatterns += self.api_urlpatterns return urlpatterns<|fim▁end|>
) )
<|file_name|>GraphQLErrorPage.tsx<|end_file_name|><|fim▁begin|>import React from "react" import ServerError from "./ServerError" import NotFoundError from "./NotFoundError" import OtherError from "./OtherError" import { ApolloError } from "@apollo/client" type Props = { /** GraphQL error object */ error: ApolloError } /** * Displays any errors found when issuing a GraphQL query or mutation. * Returns one of the other error components based on the error code. */ const GraphQLErrorPage = ({ error }: Props) => { if (!error || !error.message) return null if (error.networkError) { console.error(error.networkError) return <ServerError /> } let errorCode, errorMsg if (error.graphQLErrors && error.graphQLErrors[0].extensions) { errorCode = error.graphQLErrors[0].extensions.code errorMsg = error.graphQLErrors[0].message } if (errorCode === "Unavailable") { return <ServerError /><|fim▁hole|> <NotFoundError error={errorMsg.charAt(0).toUpperCase() + errorMsg.slice(1)} /> ) } return <OtherError /> } GraphQLErrorPage.defaultProps = { error: {}, } export default GraphQLErrorPage<|fim▁end|>
} if (errorCode === "NotFound" && errorMsg) { return (
<|file_name|>t_SymmetricMatrix_std.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python from openturns import * from math import * TESTPREAMBLE() try : # TEST NUMBER ZERO : DEFAULT CONSTRUCTOR AND STRING CONVERTER print "test number zero : default constructor and string converter" # Default constructor symmetricMatrix0 =SymmetricMatrix() # String converter print "symmetricMatrix0 = " , repr(symmetricMatrix0) # TEST NUMBER ONE : CONSTRUCTOR WITH SIZE, OPERATOR() AND STRING CONVERTER print "test number one : constructor with size, operator() and string converter" # Constructor with size symmetricMatrix1 = SymmetricMatrix(2) # Check operator() methods symmetricMatrix1[0,0]=1. symmetricMatrix1[1,0]=2. symmetricMatrix1[0,1]=3. symmetricMatrix1[1,1]=4. # String converter print "symmetricMatrix1 = " , repr(symmetricMatrix1) # TEST NUMBER TWO : COPY CONSTRUCTOR AND STRING CONVERTER print "test number two : copy constructor and string converter" # Copy constructor symmetricMatrix2 = SymmetricMatrix(symmetricMatrix1) # String converter print "symmetricMatrix2 = " , repr(symmetricMatrix2) # TEST NUMBER THREE : GET DIMENSIONS METHODS print "test number three : get dimensions methods" # Get dimension methods print "symmetricMatrix1's nbRows = " , symmetricMatrix1.getNbRows() print "symmetricMatrix1's nbColumns = " , symmetricMatrix1.getNbColumns() # TEST NUMBER FIVE : ASSIGNMENT METHOD print "test number five : assignment method" # Assignment method # No sense with pyton # TEST NUMBER SIX : TRANSPOSITION METHOD print "test number six : transposition method" # Check transpose method symmetricMatrix4 = symmetricMatrix1.transpose() print "symmetricMatrix1 transposed = " , repr(symmetricMatrix4) # TEST NUMBER SEVEN : ADDITION METHOD print "test number seven : addition method" # Check addition method : we check the operator and the symmetry of the operator, thus testing the comparison operator sum1 = symmetricMatrix1 + symmetricMatrix4 sum2 = symmetricMatrix4 + symmetricMatrix1 print "sum1 = " , repr(sum1) print "sum2 = " , repr(sum2) print "sum1 equals sum2 = " , sum1 == sum2 <|fim▁hole|> # Check substraction method diff = symmetricMatrix1-symmetricMatrix4 print "diff = " , repr(diff) # TEST NUMBER NINE : MATRIX MULTIPLICATION METHOD print "test number nine : matrix multiplication method" # Check multiplication method prod = symmetricMatrix1*symmetricMatrix4 print "prod = " , repr(prod) # TEST NUMBER TEN : MULTIPLICATION WITH A NUMERICAL POINT METHOD print "test number ten : multiplication with a numerical point method" # Create the numerical point pt = NumericalPoint() pt.add(1.) pt.add(2.) print "pt = " , repr(pt) # Check the product method ptResult = symmetricMatrix1* pt print "ptResult = " , repr(ptResult) # TEST NUMBER ELEVEN : MULTIPLICATION AND DIVISION BY A NUMERICAL SCALAR METHODS print "test number eleven : multiplication and division by a numerical scalar methods" # Check the multiplication method s=3. scalprod1 = symmetricMatrix1 * s # bug PYTHON scalprod2 = s * symmetricMatrix1 scalprod3 = symmetricMatrix1 * s print "scalprod1 = " , repr(scalprod1) # print "scalprod2 = " , scalprod2 print "scalprod3 = " , repr(scalprod3) # print "scalprod1 equals scalprod2 = " , (scalprod1 == scalprod2) print "scalprod1 equals scalprod3 = " , (scalprod1 == scalprod3) # print "scalprod2 equals scalprod3 = " , (scalprod2 == scalprod3) # Check the division method scaldiv1 = symmetricMatrix1/s scaldiv2 = symmetricMatrix1/s print "scaldiv1 = " , repr(scaldiv1) print "scaldiv2 = " , repr(scaldiv2) print "scaldiv1 equals scaldiv2 = " , (scaldiv1 == scaldiv2) # TEST NUMBER TWELVE : ISEMPTY METHOD print "test number twelve : isEmpty method" # Check method isEmpty symmetricMatrix5 = SymmetricMatrix() symmetricMatrix6 = SymmetricMatrix() print "symmetricMatrix0 is empty = " , symmetricMatrix0.isEmpty() print "symmetricMatrix1 is empty = " , symmetricMatrix1.isEmpty() print "symmetricMatrix5 is empty = " , symmetricMatrix5.isEmpty() except : import sys print "t_SymmetricMatrix_std.py", sys.exc_type, sys.exc_value<|fim▁end|>
# TEST NUMBER EIGHT : SUBSTRACTION METHOD print "test number eight : substraction method"
<|file_name|>rand_utils.rs<|end_file_name|><|fim▁begin|>//! Utility functions for random functionality. //! //! This module provides sampling and shuffling which are used //! within the learning modules. use rand::{Rng, thread_rng}; /// ``` /// use rusty_machine::learning::toolkit::rand_utils; /// /// let mut pool = &mut [1,2,3,4]; /// let sample = rand_utils::reservoir_sample(pool, 3); /// /// println!("{:?}", sample); /// ``` pub fn reservoir_sample<T: Copy>(pool: &[T], reservoir_size: usize) -> Vec<T> { assert!(pool.len() >= reservoir_size, "Sample size is greater than total."); let mut pool_mut = &pool[..]; let mut res = pool_mut[..reservoir_size].to_vec(); pool_mut = &pool_mut[reservoir_size..]; let mut ele_seen = reservoir_size; let mut rng = thread_rng(); while !pool_mut.is_empty() { ele_seen += 1; let r = rng.gen_range(0, ele_seen); let p_0 = pool_mut[0]; pool_mut = &pool_mut[1..]; if r < reservoir_size { res[r] = p_0; } } res } /// The inside out Fisher-Yates algorithm. /// /// # Examples /// /// ``` /// use rusty_machine::learning::toolkit::rand_utils; /// /// // Collect the numbers 0..5 /// let a = (0..5).collect::<Vec<_>>(); /// /// // Perform a Fisher-Yates shuffle to get a random permutation /// let permutation = rand_utils::fisher_yates(&a); /// ``` pub fn fisher_yates<T: Copy>(arr: &[T]) -> Vec<T> { let n = arr.len(); let mut rng = thread_rng(); let mut shuffled_arr = Vec::with_capacity(n); unsafe { // We set the length here // We only access data which has been initialized in the algorithm shuffled_arr.set_len(n); } for i in 0..n { let j = rng.gen_range(0, i + 1); // If j isn't the last point in the active shuffled array if j != i { // Copy value at position j to the end of the shuffled array // This is safe as we only read initialized data (j < i) let x = shuffled_arr[j]; shuffled_arr[i] = x; } // Place value at end of active array into shuffled array shuffled_arr[j] = arr[i]; } shuffled_arr } /// The in place Fisher-Yates shuffle. /// /// # Examples /// /// ``` /// use rusty_machine::learning::toolkit::rand_utils; /// /// // Collect the numbers 0..5 /// let mut a = (0..5).collect::<Vec<_>>(); /// /// // Permute the values in place with Fisher-Yates /// rand_utils::in_place_fisher_yates(&mut a); /// ``` pub fn in_place_fisher_yates<T>(arr: &mut [T]) { let n = arr.len(); let mut rng = thread_rng(); for i in 0..n { // Swap i with a random point after it let j = rng.gen_range(0, n - i); arr.swap(i, i + j); } } #[cfg(test)] mod tests { use super::*; #[test] fn test_reservoir_sample() { let a = vec![1, 2, 3, 4, 5, 6, 7]; let b = reservoir_sample(&a, 3); assert_eq!(b.len(), 3); } #[test] fn test_fisher_yates() { let a = (0..10).collect::<Vec<_>>(); let b = fisher_yates(&a); for val in a.iter() { assert!(b.contains(val)); } } #[test] fn test_in_place_fisher_yates() { let mut a = (0..10).collect::<Vec<_>>(); in_place_fisher_yates(&mut a); for val in 0..10 {<|fim▁hole|><|fim▁end|>
assert!(a.contains(&val)); } } }
<|file_name|>mercator.js<|end_file_name|><|fim▁begin|>// @license // Redistribution and use in source and binary forms ... // Copyright 2012 Carnegie Mellon University. All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are // permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of // conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list // of conditions and the following disclaimer in the documentation and/or other materials // provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY CARNEGIE MELLON UNIVERSITY ''AS IS'' AND ANY EXPRESS OR IMPLIED // WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND // FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CARNEGIE MELLON UNIVERSITY OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON // ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF // ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // The views and conclusions contained in the software and documentation are those of the // authors and should not be interpreted as representing official policies, either expressed // or implied, of Carnegie Mellon University. // // Author: // Randy Sargent (randy.sargent@cs.cmu.edu) "use strict"; var org; org = org || {}; org.gigapan = org.gigapan || {}; org.gigapan.timelapse = org.gigapan.timelapse || {}; org.gigapan.timelapse.MercatorProjection = function(west, north, east, south, width, height) { function rawProjectLat(lat) { return Math.log((1 + Math.sin(lat * Math.PI / 180)) / Math.cos(lat * Math.PI / 180)); } <|fim▁hole|> function interpolate(x, fromLow, fromHigh, toLow, toHigh) { return (x - fromLow) / (fromHigh - fromLow) * (toHigh - toLow) + toLow; } this.latlngToPoint = function(latlng) { var x = interpolate(latlng.lng, west, east, 0, width); var y = interpolate(rawProjectLat(latlng.lat), rawProjectLat(north), rawProjectLat(south), 0, height); return { "x": x, "y": y }; }; this.pointToLatlng = function(point) { var lng = interpolate(point.x, 0, width, west, east); var lat = rawUnprojectLat(interpolate(point.y, 0, height, rawProjectLat(north), rawProjectLat(south))); return { "lat": lat, "lng": lng }; }; };<|fim▁end|>
function rawUnprojectLat(y) { return (2 * Math.atan(Math.exp(y)) - Math.PI / 2) * 180 / Math.PI; }
<|file_name|>asttokens.py<|end_file_name|><|fim▁begin|># Copyright 2016 Grist Labs, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import ast import bisect import token import tokenize import io import six from six.moves import xrange # pylint: disable=redefined-builtin from .line_numbers import LineNumbers from .util import Token, match_token, is_non_coding_token from .mark_tokens import MarkTokens class ASTTokens(object): """<|fim▁hole|> as tokens, and is used to mark and access token and position information. ``source_text`` must be a unicode or UTF8-encoded string. If you pass in UTF8 bytes, remember that all offsets you'll get are to the unicode text, which is available as the ``.text`` property. If ``parse`` is set, the ``source_text`` will be parsed with ``ast.parse()``, and the resulting tree marked with token info and made available as the ``.tree`` property. If ``tree`` is given, it will be marked and made available as the ``.tree`` property. In addition to the trees produced by the ``ast`` module, ASTTokens will also mark trees produced using ``astroid`` library <https://www.astroid.org>. If only ``source_text`` is given, you may use ``.mark_tokens(tree)`` to mark the nodes of an AST tree created separately. """ def __init__(self, source_text, parse=False, tree=None, filename='<unknown>'): self._filename = filename self._tree = ast.parse(source_text, filename) if parse else tree # Decode source after parsing to let Python 2 handle coding declarations. # (If the encoding was not utf-8 compatible, then even if it parses correctly, # we'll fail with a unicode error here.) if isinstance(source_text, six.binary_type): source_text = source_text.decode('utf8') self._text = source_text self._line_numbers = LineNumbers(source_text) # Tokenize the code. self._tokens = list(self._generate_tokens(source_text)) # Extract the start positions of all tokens, so that we can quickly map positions to tokens. self._token_offsets = [tok.startpos for tok in self._tokens] if self._tree: self.mark_tokens(self._tree) def mark_tokens(self, root_node): """ Given the root of the AST or Astroid tree produced from source_text, visits all nodes marking them with token and position information by adding ``.first_token`` and ``.last_token``attributes. This is done automatically in the constructor when ``parse`` or ``tree`` arguments are set, but may be used manually with a separate AST or Astroid tree. """ # The hard work of this class is done by MarkTokens MarkTokens(self).visit_tree(root_node) def _generate_tokens(self, text): """ Generates tokens for the given code. """ # This is technically an undocumented API for Python3, but allows us to use the same API as for # Python2. See http://stackoverflow.com/a/4952291/328565. for index, tok in enumerate(tokenize.generate_tokens(io.StringIO(text).readline)): tok_type, tok_str, start, end, line = tok yield Token(tok_type, tok_str, start, end, line, index, self._line_numbers.line_to_offset(start[0], start[1]), self._line_numbers.line_to_offset(end[0], end[1])) @property def text(self): """The source code passed into the constructor.""" return self._text @property def tokens(self): """The list of tokens corresponding to the source code from the constructor.""" return self._tokens @property def tree(self): """The root of the AST tree passed into the constructor or parsed from the source code.""" return self._tree @property def filename(self): """The filename that was parsed""" return self._filename def get_token_from_offset(self, offset): """ Returns the token containing the given character offset (0-based position in source text), or the preceeding token if the position is between tokens. """ return self._tokens[bisect.bisect(self._token_offsets, offset) - 1] def get_token(self, lineno, col_offset): """ Returns the token containing the given (lineno, col_offset) position, or the preceeding token if the position is between tokens. """ # TODO: add test for multibyte unicode. We need to translate offsets from ast module (which # are in utf8) to offsets into the unicode text. tokenize module seems to use unicode offsets # but isn't explicit. return self.get_token_from_offset(self._line_numbers.line_to_offset(lineno, col_offset)) def get_token_from_utf8(self, lineno, col_offset): """ Same as get_token(), but interprets col_offset as a UTF8 offset, which is what `ast` uses. """ return self.get_token(lineno, self._line_numbers.from_utf8_col(lineno, col_offset)) def next_token(self, tok, include_extra=False): """ Returns the next token after the given one. If include_extra is True, includes non-coding tokens from the tokenize module, such as NL and COMMENT. """ i = tok.index + 1 if not include_extra: while is_non_coding_token(self._tokens[i].type): i += 1 return self._tokens[i] def prev_token(self, tok, include_extra=False): """ Returns the previous token before the given one. If include_extra is True, includes non-coding tokens from the tokenize module, such as NL and COMMENT. """ i = tok.index - 1 if not include_extra: while is_non_coding_token(self._tokens[i].type): i -= 1 return self._tokens[i] def find_token(self, start_token, tok_type, tok_str=None, reverse=False): """ Looks for the first token, starting at start_token, that matches tok_type and, if given, the token string. Searches backwards if reverse is True. Returns ENDMARKER token if not found (you can check it with `token.ISEOF(t.type)`. """ t = start_token advance = self.prev_token if reverse else self.next_token while not match_token(t, tok_type, tok_str) and not token.ISEOF(t.type): t = advance(t, include_extra=True) return t def token_range(self, first_token, last_token, include_extra=False): """ Yields all tokens in order from first_token through and including last_token. If include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT. """ for i in xrange(first_token.index, last_token.index + 1): if include_extra or not is_non_coding_token(self._tokens[i].type): yield self._tokens[i] def get_tokens(self, node, include_extra=False): """ Yields all tokens making up the given node. If include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT. """ return self.token_range(node.first_token, node.last_token, include_extra=include_extra) def get_text_range(self, node): """ After mark_tokens() has been called, returns the (startpos, endpos) positions in source text corresponding to the given node. Returns (0, 0) for nodes (like `Load`) that don't correspond to any particular text. """ if not hasattr(node, 'first_token'): return (0, 0) start = node.first_token.startpos if any(match_token(t, token.NEWLINE) for t in self.get_tokens(node)): # Multi-line nodes would be invalid unless we keep the indentation of the first node. start = self._text.rfind('\n', 0, start) + 1 return (start, node.last_token.endpos) def get_text(self, node): """ After mark_tokens() has been called, returns the text corresponding to the given node. Returns '' for nodes (like `Load`) that don't correspond to any particular text. """ start, end = self.get_text_range(node) return self._text[start : end]<|fim▁end|>
ASTTokens maintains the text of Python code in several forms: as a string, as line numbers, and
<|file_name|>JSAtomic3D.cpp<|end_file_name|><|fim▁begin|>// // Copyright (c) 2014-2015, THUNDERBEAST GAMES LLC All rights reserved // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // #include <Atomic/Atomic3D/StaticModel.h> #include <Atomic/Atomic3D/CustomGeometry.h> #include <Atomic/Atomic3D/BillboardSet.h> #include "JSAtomic3D.h" namespace Atomic { static int StaticModel_SetMaterialIndex(duk_context* ctx) { unsigned index = (unsigned) duk_require_number(ctx, 0); Material* material = js_to_class_instance<Material>(ctx, 1, 0); duk_push_this(ctx); // event receiver StaticModel* model = js_to_class_instance<StaticModel>(ctx, -1, 0); model->SetMaterial(index, material); return 0; } static int CustomGeometry_SetMaterialIndex(duk_context* ctx) { unsigned index = (unsigned)duk_require_number(ctx, 0); Material* material = js_to_class_instance<Material>(ctx, 1, 0); duk_push_this(ctx); // event receiver CustomGeometry* geometry = js_to_class_instance<CustomGeometry>(ctx, -1, 0); geometry->SetMaterial(index, material); return 0;<|fim▁hole|> duk_push_this(ctx); BillboardSet* billboardSet = js_to_class_instance<BillboardSet>(ctx, -1, 0); unsigned index = (unsigned)duk_to_number(ctx, 0); Billboard* billboard = billboardSet->GetBillboard(index); js_push_class_object_instance(ctx, billboard, "Billboard"); return 1; } void jsapi_init_atomic3d(JSVM* vm) { duk_context* ctx = vm->GetJSContext(); js_class_get_prototype(ctx, "Atomic", "StaticModel"); duk_push_c_function(ctx, StaticModel_SetMaterialIndex, 2); duk_put_prop_string(ctx, -2, "setMaterialIndex"); duk_pop(ctx); // pop AObject prototype js_class_get_prototype(ctx, "Atomic", "CustomGeometry"); duk_push_c_function(ctx, CustomGeometry_SetMaterialIndex, 2); duk_put_prop_string(ctx, -2, "setMaterialIndex"); duk_pop(ctx); // pop AObject prototype js_class_get_prototype(ctx, "Atomic", "BillboardSet"); duk_push_c_function(ctx, BillboardSet_GetBillboard, 1); duk_put_prop_string(ctx, -2, "getBillboard"); duk_pop(ctx); // pop AObject prototype } }<|fim▁end|>
} static int BillboardSet_GetBillboard(duk_context* ctx) {
<|file_name|>queue.py<|end_file_name|><|fim▁begin|>""" @author: Deniz Altinbuken, Emin Gun Sirer @note: Queue proxy<|fim▁hole|>@copyright: See LICENSE """ from concoord.clientproxy import ClientProxy class Queue: def __init__(self, bootstrap, timeout=60, debug=False, token=None): self.proxy = ClientProxy(bootstrap, timeout, debug, token) def __concoordinit__(self): return self.proxy.invoke_command('__init__') def append(self, item): return self.proxy.invoke_command('append', item) def remove(self): return self.proxy.invoke_command('remove') def get_size(self): return self.proxy.invoke_command('get_size') def get_queue(self): return self.proxy.invoke_command('get_queue') def __str__(self): return self.proxy.invoke_command('__str__')<|fim▁end|>
<|file_name|>initializers_v2.py<|end_file_name|><|fim▁begin|># Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Keras initializers for TF 2. """ # pylint: disable=g-classes-have-attributes from __future__ import absolute_import from __future__ import division from __future__ import print_function import math from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.keras import backend from tensorflow.python.ops import array_ops from tensorflow.python.ops import gen_linalg_ops from tensorflow.python.ops import linalg_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import stateless_random_ops from tensorflow.python.util.tf_export import keras_export _PARTITION_SHAPE = 'partition_shape' _PARTITION_OFFSET = 'partition_offset' @keras_export('keras.initializers.Initializer') class Initializer(object): """Initializer base class: all Keras initializers inherit from this class. Initializers should implement a `__call__` method with the following signature: ```python def __call__(self, shape, dtype=None, **kwargs): # returns a tensor of shape `shape` and dtype `dtype` # containing values drawn from a distribution of your choice. ``` Optionally, you an also implement the method `get_config` and the class method `from_config` in order to support serialization -- just like with any Keras object. Here's a simple example: a random normal initializer. ```python import tensorflow as tf class ExampleRandomNormal(tf.keras.initializers.Initializer): def __init__(self, mean, stddev): self.mean = mean self.stddev = stddev def __call__(self, shape, dtype=None, **kwargs): return tf.random.normal( shape, mean=self.mean, stddev=self.stddev, dtype=dtype) def get_config(self): # To support serialization return {"mean": self.mean, "stddev": self.stddev} ``` Note that we don't have to implement `from_config` in the example above since the constructor arguments of the class the keys in the config returned by `get_config` are the same. In this case, the default `from_config` works fine. """ def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized as specified by the initializer. Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. **kwargs: Additional keyword arguments. """ raise NotImplementedError def get_config(self): """Returns the configuration of the initializer as a JSON-serializable dict. Returns: A JSON-serializable Python dict. """ return {} @classmethod def from_config(cls, config): """Instantiates an initializer from a configuration dictionary. Example: ```python initializer = RandomUniform(-1, 1) config = initializer.get_config() initializer = RandomUniform.from_config(config) ``` Args: config: A Python dictionary, the output of `get_config`. Returns: A `tf.keras.initializers.Initializer` instance. """ config.pop('dtype', None) return cls(**config) @keras_export('keras.initializers.Zeros', 'keras.initializers.zeros', v1=[]) class Zeros(Initializer): """Initializer that generates tensors initialized to 0. Also available via the shortcut function `tf.keras.initializers.zeros`. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.Zeros() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.Zeros() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) """ def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized as specified by the initializer. Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. Only numeric or boolean dtypes are supported. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.backend.set_floatx(float_dtype)`). **kwargs: Additional keyword arguments. """ _validate_kwargs(self.__class__.__name__, kwargs) dtype = _get_dtype(dtype) if not dtype.is_numpy_compatible or dtype == dtypes.string: raise ValueError('Expected numeric or boolean dtype, got %s.' % dtype) if _PARTITION_SHAPE in kwargs: shape = kwargs[_PARTITION_SHAPE] return array_ops.zeros(shape, dtype) @keras_export('keras.initializers.Ones', 'keras.initializers.ones', v1=[]) class Ones(Initializer): """Initializer that generates tensors initialized to 1. Also available via the shortcut function `tf.keras.initializers.ones`. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.Ones() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.Ones() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) """ def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized as specified by the initializer. Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. Only numeric or boolean dtypes are supported. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.backend.set_floatx(float_dtype)`). **kwargs: Additional keyword arguments. """ _validate_kwargs(self.__class__.__name__, kwargs) dtype = _get_dtype(dtype) if not dtype.is_numpy_compatible or dtype == dtypes.string: raise ValueError('Expected numeric or boolean dtype, got %s.' % dtype) if _PARTITION_SHAPE in kwargs: shape = kwargs[_PARTITION_SHAPE] return array_ops.ones(shape, dtype) @keras_export('keras.initializers.Constant', 'keras.initializers.constant', v1=[]) class Constant(Initializer): """Initializer that generates tensors with constant values. Also available via the shortcut function `tf.keras.initializers.constant`. Only scalar values are allowed. The constant value provided must be convertible to the dtype requested when calling the initializer. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.Constant(3.) >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.Constant(3.) >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: value: A Python scalar. """ def __init__(self, value=0): self.value = value def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized to `self.value`. Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.backend.set_floatx(float_dtype)`). **kwargs: Additional keyword arguments. """ del kwargs return constant_op.constant( self.value, dtype=_get_dtype(dtype), shape=shape) def get_config(self): return {'value': self.value} @keras_export('keras.initializers.RandomUniform', 'keras.initializers.random_uniform', v1=[]) class RandomUniform(Initializer): """Initializer that generates tensors with a uniform distribution. Also available via the shortcut function `tf.keras.initializers.random_uniform`. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.RandomUniform(minval=0., maxval=1.) >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.RandomUniform(minval=0., maxval=1.) >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: minval: A python scalar or a scalar tensor. Lower bound of the range of random values to generate (inclusive). maxval: A python scalar or a scalar tensor. Upper bound of the range of random values to generate (exclusive). seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. """ def __init__(self, minval=-0.05, maxval=0.05, seed=None): self.minval = minval self.maxval = maxval self.seed = seed self._random_generator = _RandomGenerator(seed) def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized as specified by the initializer. Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. Only floating point and integer types are supported. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.backend.set_floatx(float_dtype)`). **kwargs: Additional keyword arguments. """ _validate_kwargs(self.__class__.__name__, kwargs) dtype = _get_dtype(dtype) if not dtype.is_floating and not dtype.is_integer: raise ValueError('Expected float or integer dtype, got %s.' % dtype) if _PARTITION_SHAPE in kwargs: shape = kwargs[_PARTITION_SHAPE] return self._random_generator.random_uniform(shape, self.minval, self.maxval, dtype) def get_config(self): return { 'minval': self.minval, 'maxval': self.maxval, 'seed': self.seed } @keras_export('keras.initializers.RandomNormal', 'keras.initializers.random_normal', v1=[]) class RandomNormal(Initializer): """Initializer that generates tensors with a normal distribution. Also available via the shortcut function `tf.keras.initializers.random_normal`. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.RandomNormal(mean=0., stddev=1.) >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.RandomNormal(mean=0., stddev=1.) >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: mean: a python scalar or a scalar tensor. Mean of the random values to generate. stddev: a python scalar or a scalar tensor. Standard deviation of the random values to generate. seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. """ def __init__(self, mean=0.0, stddev=0.05, seed=None): self.mean = mean self.stddev = stddev self.seed = seed self._random_generator = _RandomGenerator(seed) def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized to random normal values. Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. Only floating point types are supported. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.backend.set_floatx(float_dtype)`) **kwargs: Additional keyword arguments. """ _validate_kwargs(self.__class__.__name__, kwargs) dtype = _assert_float_dtype(_get_dtype(dtype)) if _PARTITION_SHAPE in kwargs: shape = kwargs[_PARTITION_SHAPE] return self._random_generator.random_normal(shape, self.mean, self.stddev, dtype) def get_config(self): return { 'mean': self.mean, 'stddev': self.stddev, 'seed': self.seed } @keras_export('keras.initializers.TruncatedNormal', 'keras.initializers.truncated_normal', v1=[]) class TruncatedNormal(Initializer): """Initializer that generates a truncated normal distribution. Also available via the shortcut function `tf.keras.initializers.truncated_normal`. The values generated are similar to values from a `tf.keras.initializers.RandomNormal` initializer except that values more than two standard deviations from the mean are discarded and re-drawn. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.TruncatedNormal(mean=0., stddev=1.) >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.TruncatedNormal(mean=0., stddev=1.) >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: mean: a python scalar or a scalar tensor. Mean of the random values to generate. stddev: a python scalar or a scalar tensor. Standard deviation of the random values to generate before truncation. seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. """ def __init__(self, mean=0.0, stddev=0.05, seed=None): self.mean = mean self.stddev = stddev self.seed = seed self._random_generator = _RandomGenerator(seed) def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized to random normal values (truncated). Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. Only floating point types are supported. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.backend.set_floatx(float_dtype)`) **kwargs: Additional keyword arguments. """ _validate_kwargs(self.__class__.__name__, kwargs) dtype = _assert_float_dtype(_get_dtype(dtype)) if _PARTITION_SHAPE in kwargs: shape = kwargs[_PARTITION_SHAPE] return self._random_generator.truncated_normal(shape, self.mean, self.stddev, dtype) def get_config(self): return { 'mean': self.mean, 'stddev': self.stddev, 'seed': self.seed } @keras_export('keras.initializers.VarianceScaling', 'keras.initializers.variance_scaling', v1=[]) class VarianceScaling(Initializer): """Initializer capable of adapting its scale to the shape of weights tensors. Also available via the shortcut function `tf.keras.initializers.variance_scaling`. With `distribution="truncated_normal" or "untruncated_normal"`, samples are drawn from a truncated/untruncated normal distribution with a mean of zero and a standard deviation (after truncation, if used) `stddev = sqrt(scale / n)`, where `n` is: - number of input units in the weight tensor, if `mode="fan_in"` - number of output units, if `mode="fan_out"` - average of the numbers of input and output units, if `mode="fan_avg"` With `distribution="uniform"`, samples are drawn from a uniform distribution within `[-limit, limit]`, where `limit = sqrt(3 * scale / n)`. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.VarianceScaling( ... scale=0.1, mode='fan_in', distribution='uniform') >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.VarianceScaling( ... scale=0.1, mode='fan_in', distribution='uniform') >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: scale: Scaling factor (positive float). mode: One of "fan_in", "fan_out", "fan_avg". distribution: Random distribution to use. One of "truncated_normal", "untruncated_normal" and "uniform". seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. """ def __init__(self, scale=1.0, mode='fan_in', distribution='truncated_normal', seed=None): if scale <= 0.: raise ValueError('`scale` must be positive float.') if mode not in {'fan_in', 'fan_out', 'fan_avg'}: raise ValueError('Invalid `mode` argument:', mode) distribution = distribution.lower() # Compatibility with keras-team/keras. if distribution == 'normal': distribution = 'truncated_normal' if distribution not in {'uniform', 'truncated_normal', 'untruncated_normal'}: raise ValueError('Invalid `distribution` argument:', distribution) self.scale = scale self.mode = mode self.distribution = distribution self.seed = seed self._random_generator = _RandomGenerator(seed) def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized as specified by the initializer. Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. Only floating point types are supported. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.backend.set_floatx(float_dtype)`) **kwargs: Additional keyword arguments. """ _validate_kwargs(self.__class__.__name__, kwargs) dtype = _assert_float_dtype(_get_dtype(dtype)) scale = self.scale fan_in, fan_out = _compute_fans(shape) if _PARTITION_SHAPE in kwargs: shape = kwargs[_PARTITION_SHAPE] if self.mode == 'fan_in': scale /= max(1., fan_in) elif self.mode == 'fan_out': scale /= max(1., fan_out) else: scale /= max(1., (fan_in + fan_out) / 2.) if self.distribution == 'truncated_normal': # constant from scipy.stats.truncnorm.std(a=-2, b=2, loc=0., scale=1.) stddev = math.sqrt(scale) / .87962566103423978 return self._random_generator.truncated_normal(shape, 0.0, stddev, dtype) elif self.distribution == 'untruncated_normal': stddev = math.sqrt(scale) return self._random_generator.random_normal(shape, 0.0, stddev, dtype) else: limit = math.sqrt(3.0 * scale) return self._random_generator.random_uniform(shape, -limit, limit, dtype) def get_config(self): return { 'scale': self.scale, 'mode': self.mode, 'distribution': self.distribution, 'seed': self.seed } @keras_export('keras.initializers.Orthogonal', 'keras.initializers.orthogonal', v1=[]) class Orthogonal(Initializer): """Initializer that generates an orthogonal matrix. Also available via the shortcut function `tf.keras.initializers.orthogonal`. If the shape of the tensor to initialize is two-dimensional, it is initialized with an orthogonal matrix obtained from the QR decomposition of a matrix of random numbers drawn from a normal distribution. If the matrix has fewer rows than columns then the output will have orthogonal rows. Otherwise, the output will have orthogonal columns. If the shape of the tensor to initialize is more than two-dimensional, a matrix of shape `(shape[0] * ... * shape[n - 2], shape[n - 1])` is initialized, where `n` is the length of the shape vector. The matrix is subsequently reshaped to give a tensor of the desired shape. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.Orthogonal() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.Orthogonal() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: gain: multiplicative factor to apply to the orthogonal matrix seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. References: [Saxe et al., 2014](https://openreview.net/forum?id=_wzZwKpTDF_9C) ([pdf](https://arxiv.org/pdf/1312.6120.pdf)) """ def __init__(self, gain=1.0, seed=None): self.gain = gain self.seed = seed self._random_generator = _RandomGenerator(seed) def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized to an orthogonal matrix. Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. Only floating point types are supported. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.backend.set_floatx(float_dtype)`) **kwargs: Additional keyword arguments. """ _validate_kwargs(self.__class__.__name__, kwargs, support_partition=False) dtype = _assert_float_dtype(_get_dtype(dtype)) # Check the shape if len(shape) < 2: raise ValueError('The tensor to initialize must be ' 'at least two-dimensional') # Flatten the input shape with the last dimension remaining # its original shape so it works for conv2d num_rows = 1 for dim in shape[:-1]: num_rows *= dim num_cols = shape[-1] flat_shape = (max(num_cols, num_rows), min(num_cols, num_rows)) # Generate a random matrix a = self._random_generator.random_normal(flat_shape, dtype=dtype) # Compute the qr factorization q, r = gen_linalg_ops.qr(a, full_matrices=False) # Make Q uniform d = array_ops.tensor_diag_part(r) q *= math_ops.sign(d) if num_rows < num_cols: q = array_ops.matrix_transpose(q) return self.gain * array_ops.reshape(q, shape) def get_config(self): return {'gain': self.gain, 'seed': self.seed} @keras_export('keras.initializers.Identity', 'keras.initializers.identity', v1=[]) class Identity(Initializer): """Initializer that generates the identity matrix. Also available via the shortcut function `tf.keras.initializers.identity`. Only usable for generating 2D matrices. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.Identity() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.Identity() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: gain: Multiplicative factor to apply to the identity matrix. """ def __init__(self, gain=1.0): self.gain = gain def __call__(self, shape, dtype=None, **kwargs): """Returns a tensor object initialized to a 2D identity matrix. Args: shape: Shape of the tensor. It should have exactly rank 2. dtype: Optional dtype of the tensor. Only floating point types are supported. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.backend.set_floatx(float_dtype)`) **kwargs: Additional keyword arguments. """ _validate_kwargs(self.__class__.__name__, kwargs, support_partition=False) dtype = _assert_float_dtype(_get_dtype(dtype)) if len(shape) != 2: raise ValueError( 'Identity matrix initializer can only be used for 2D matrices.') initializer = linalg_ops.eye(*shape, dtype=dtype) return self.gain * initializer def get_config(self): return {'gain': self.gain} @keras_export('keras.initializers.GlorotUniform', 'keras.initializers.glorot_uniform', v1=[]) class GlorotUniform(VarianceScaling): """The Glorot uniform initializer, also called Xavier uniform initializer. Also available via the shortcut function `tf.keras.initializers.glorot_uniform`. Draws samples from a uniform distribution within `[-limit, limit]`, where `limit = sqrt(6 / (fan_in + fan_out))` (`fan_in` is the number of input units in the weight tensor and `fan_out` is the number of output units). Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.GlorotUniform() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.GlorotUniform() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. References: [Glorot et al., 2010](http://proceedings.mlr.press/v9/glorot10a.html) ([pdf](http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf)) """ def __init__(self, seed=None): super(GlorotUniform, self).__init__( scale=1.0, mode='fan_avg', distribution='uniform', seed=seed) def get_config(self): return {'seed': self.seed} @keras_export('keras.initializers.GlorotNormal', 'keras.initializers.glorot_normal', v1=[]) class GlorotNormal(VarianceScaling): """The Glorot normal initializer, also called Xavier normal initializer. Also available via the shortcut function `tf.keras.initializers.glorot_normal`. Draws samples from a truncated normal distribution centered on 0 with `stddev = sqrt(2 / (fan_in + fan_out))` where `fan_in` is the number of input units in the weight tensor and `fan_out` is the number of output units in the weight tensor. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.GlorotNormal() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.GlorotNormal() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. References: [Glorot et al., 2010](http://proceedings.mlr.press/v9/glorot10a.html) ([pdf](http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf)) """ def __init__(self, seed=None): super(GlorotNormal, self).__init__( scale=1.0, mode='fan_avg', distribution='truncated_normal', seed=seed) def get_config(self): return {'seed': self.seed} @keras_export('keras.initializers.LecunNormal', 'keras.initializers.lecun_normal', v1=[]) class LecunNormal(VarianceScaling): """Lecun normal initializer. Also available via the shortcut function `tf.keras.initializers.lecun_normal`. Initializers allow you to pre-specify an initialization strategy, encoded in the Initializer object, without knowing the shape and dtype of the variable being initialized. Draws samples from a truncated normal distribution centered on 0 with `stddev = sqrt(1 / fan_in)` where `fan_in` is the number of input units in the weight tensor. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.LecunNormal() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.LecunNormal() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: seed: A Python integer. Used to seed the random generator. References: - Self-Normalizing Neural Networks, [Klambauer et al., 2017] (https://papers.nips.cc/paper/6698-self-normalizing-neural-networks) ([pdf] (https://papers.nips.cc/paper/6698-self-normalizing-neural-networks.pdf)) - Efficient Backprop, [Lecun et al., 1998](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf) """ def __init__(self, seed=None): super(LecunNormal, self).__init__( scale=1., mode='fan_in', distribution='truncated_normal', seed=seed) def get_config(self): return {'seed': self.seed} @keras_export('keras.initializers.LecunUniform', 'keras.initializers.lecun_uniform', v1=[]) class LecunUniform(VarianceScaling): """Lecun uniform initializer. Also available via the shortcut function `tf.keras.initializers.lecun_uniform`. Draws samples from a uniform distribution within `[-limit, limit]`, where `limit = sqrt(3 / fan_in)` (`fan_in` is the number of input units in the weight tensor). Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.LecunUniform() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.LecunUniform() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. References: - Self-Normalizing Neural Networks, [Klambauer et al., 2017](https://papers.nips.cc/paper/6698-self-normalizing-neural-networks) # pylint: disable=line-too-long ([pdf](https://papers.nips.cc/paper/6698-self-normalizing-neural-networks.pdf)) - Efficient Backprop, [Lecun et al., 1998](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf) """ def __init__(self, seed=None): super(LecunUniform, self).__init__( scale=1., mode='fan_in', distribution='uniform', seed=seed) def get_config(self): return {'seed': self.seed} @keras_export('keras.initializers.HeNormal', 'keras.initializers.he_normal', v1=[]) class HeNormal(VarianceScaling): """He normal initializer. Also available via the shortcut function `tf.keras.initializers.he_normal`. It draws samples from a truncated normal distribution centered on 0 with `stddev = sqrt(2 / fan_in)` where `fan_in` is the number of input units in the weight tensor. Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.HeNormal() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.HeNormal() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. References: [He et al., 2015](https://www.cv-foundation.org/openaccess/content_iccv_2015/html/He_Delving_Deep_into_ICCV_2015_paper.html) # pylint: disable=line-too-long ([pdf](https://www.cv-foundation.org/openaccess/content_iccv_2015/papers/He_Delving_Deep_into_ICCV_2015_paper.pdf)) """ def __init__(self, seed=None): super(HeNormal, self).__init__( scale=2., mode='fan_in', distribution='truncated_normal', seed=seed) def get_config(self): return {'seed': self.seed} @keras_export('keras.initializers.HeUniform', 'keras.initializers.he_uniform', v1=[]) class HeUniform(VarianceScaling): """He uniform variance scaling initializer. Also available via the shortcut function `tf.keras.initializers.he_uniform`. Draws samples from a uniform distribution within `[-limit, limit]`, where `limit = sqrt(6 / fan_in)` (`fan_in` is the number of input units in the weight tensor). Examples: >>> # Standalone usage: >>> initializer = tf.keras.initializers.HeUniform() >>> values = initializer(shape=(2, 2)) >>> # Usage in a Keras layer: >>> initializer = tf.keras.initializers.HeUniform() >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer) Args: seed: A Python integer. An initializer created with a given seed will always produce the same random tensor for a given shape and dtype. References: [He et al., 2015](https://www.cv-foundation.org/openaccess/content_iccv_2015/html/He_Delving_Deep_into_ICCV_2015_paper.html) # pylint: disable=line-too-long ([pdf](https://www.cv-foundation.org/openaccess/content_iccv_2015/papers/He_Delving_Deep_into_ICCV_2015_paper.pdf)) """ def __init__(self, seed=None): super(HeUniform, self).__init__( scale=2., mode='fan_in', distribution='uniform', seed=seed) def get_config(self): return {'seed': self.seed} def _get_dtype(dtype): if dtype is None: dtype = backend.floatx() return dtypes.as_dtype(dtype) def _assert_float_dtype(dtype): """Validate and return floating point type based on `dtype`. `dtype` must be a floating point type. Args: dtype: The data type to validate. Returns: Validated type. Raises: ValueError: if `dtype` is not a floating point type. """ dtype = dtypes.as_dtype(dtype) if not dtype.is_floating: raise ValueError('Expected floating point type, got %s.' % dtype) return dtype class _RandomGenerator(object): """Random generator that selects appropriate random ops.""" def __init__(self, seed=None): super(_RandomGenerator, self).__init__() if seed is not None: # Stateless random ops requires 2-int seed.<|fim▁hole|> def random_normal(self, shape, mean=0.0, stddev=1, dtype=dtypes.float32): """A deterministic random normal if seed is passed.""" if self.seed: op = stateless_random_ops.stateless_random_normal else: op = random_ops.random_normal return op( shape=shape, mean=mean, stddev=stddev, dtype=dtype, seed=self.seed) def random_uniform(self, shape, minval, maxval, dtype): """A deterministic random uniform if seed is passed.""" if self.seed: op = stateless_random_ops.stateless_random_uniform else: op = random_ops.random_uniform return op( shape=shape, minval=minval, maxval=maxval, dtype=dtype, seed=self.seed) def truncated_normal(self, shape, mean, stddev, dtype): """A deterministic truncated normal if seed is passed.""" if self.seed: op = stateless_random_ops.stateless_truncated_normal else: op = random_ops.truncated_normal return op( shape=shape, mean=mean, stddev=stddev, dtype=dtype, seed=self.seed) def _compute_fans(shape): """Computes the number of input and output units for a weight shape. Args: shape: Integer shape tuple or TF tensor shape. Returns: A tuple of integer scalars (fan_in, fan_out). """ if len(shape) < 1: # Just to avoid errors for constants. fan_in = fan_out = 1 elif len(shape) == 1: fan_in = fan_out = shape[0] elif len(shape) == 2: fan_in = shape[0] fan_out = shape[1] else: # Assuming convolution kernels (2D, 3D, or more). # kernel shape: (..., input_depth, depth) receptive_field_size = 1 for dim in shape[:-2]: receptive_field_size *= dim fan_in = shape[-2] * receptive_field_size fan_out = shape[-1] * receptive_field_size return int(fan_in), int(fan_out) def _validate_kwargs(cls_name, kwargs, support_partition=True): for kwarg in kwargs: if kwarg not in [_PARTITION_SHAPE, _PARTITION_OFFSET]: raise TypeError('Unknown keyword arguments: %s' % kwarg) elif not support_partition: raise ValueError('%s initializer doesn\'t support partition-related ' 'arguments' % cls_name)<|fim▁end|>
self.seed = [seed, 0] else: self.seed = None
<|file_name|>field.py<|end_file_name|><|fim▁begin|># encoding: utf-8 from mongoengine.fields import BaseField from marrow.package.canonical import name from marrow.package.loader import load class PythonReferenceField(BaseField): """A field that transforms a callable into a string reference using marrow.package on assignment, then back to the callable when accessing.""" def to_python(self, value): if callable(value): return value return load(value) def to_mongo(self, value): return name(value) def validate(self, value, clean=True): if not callable(value): self.error('Only callables may be used in a %s' % self.__class__.__name__)<|fim▁hole|> def prepare_query_value(self, op, value): if not callable(value): return value return name(value)<|fim▁end|>
<|file_name|>synapsecollection.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # synapsecollection.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. """ Example script to show some of the possibilities of the SynapseCollection class. We connect neurons, and get the SynapseCollection with a GetConnections call. To get a better understanding of the connections, we plot the weights between the source and targets. """ import nest import matplotlib.pyplot as plt import numpy as np def makeMatrix(sources, targets, weights): """ Returns a matrix with the weights between the source and target node_ids. """ aa = np.zeros((max(sources)+1, max(targets)+1)) for src, trg, wght in zip(sources, targets, weights): aa[src, trg] += wght return aa def plotMatrix(srcs, tgts, weights, title, pos): """ Plots weight matrix. """ plt.subplot(pos) plt.matshow(makeMatrix(srcs, tgts, weights), fignum=False) plt.xlim([min(tgts)-0.5, max(tgts)+0.5])<|fim▁hole|> plt.title(title) plt.colorbar(fraction=0.046, pad=0.04) """ Start with a simple, one_to_one example. We create the neurons, connect them, and get the connections. From this we can get the connected sources, targets, and weights. The corresponding matrix will be the identity matrix, as we have a one_to_one connection. """ nest.ResetKernel() nrns = nest.Create('iaf_psc_alpha', 10) nest.Connect(nrns, nrns, 'one_to_one') conns = nest.GetConnections(nrns, nrns) # This returns a SynapseCollection # We can get desired information of the SynapseCollection with simple get() call. g = conns.get(['source', 'target', 'weight']) srcs = g['source'] tgts = g['target'] weights = g['weight'] # Plot the matrix consisting of the weights between the sources and targets plt.figure(figsize=(12, 10)) plotMatrix(srcs, tgts, weights, 'Uniform weight', 121) """ Add some weights to the connections, and plot the updated weight matrix. """ # We can set data of the connections with a simple set() call. w = [{'weight': x*1.0} for x in range(1, 11)] conns.set(w) weights = conns.weight plotMatrix(srcs, tgts, weights, 'Set weight', 122) """ We can also plot an all_to_all connection, with uniformly distributed weights, and different number of sources and targets. """ nest.ResetKernel() pre = nest.Create('iaf_psc_alpha', 10) post = nest.Create('iaf_psc_delta', 5) nest.Connect(pre, post, syn_spec={'weight': {'distribution': 'uniform', 'low': 0.5, 'high': 4.5}}) # Get a SynapseCollection with all connections conns = nest.GetConnections() srcs = conns.source tgts = conns.target weights = conns.weight plt.figure(figsize=(12, 10)) plotMatrix(srcs, tgts, weights, 'All to all connection', 111) """ Lastly, we'll do an exmple that is a bit more complex. We connect different neurons with different rules, synapse models and weight distributions, and get different SynapseCollections by calling GetConnections with different inputs. """ nest.ResetKernel() nrns = nest.Create('iaf_psc_alpha', 15) nest.Connect(nrns[:5], nrns[:5], 'one_to_one', {'synapse_model': 'stdp_synapse', 'weight': {'distribution': 'normal', 'mu': 5.0, 'sigma': 2.0}}) nest.Connect(nrns[:10], nrns[5:12], {'rule': 'pairwise_bernoulli', 'p': 0.4}, {'weight': 4.0}) nest.Connect(nrns[5:10], nrns[:5], {'rule': 'fixed_total_number', 'N': 5}, {'weight': 3.0}) nest.Connect(nrns[10:], nrns[:12], 'all_to_all', {'synapse_model': 'stdp_synapse', 'weight': {'distribution': 'uniform', 'low': 1., 'high': 5.}}) nest.Connect(nrns, nrns[12:], {'rule': 'fixed_indegree', 'indegree': 3}) # First get a SynapseCollection consisting of all the connections conns = nest.GetConnections() srcs = conns.source tgts = conns.target weights = conns.weight plt.figure(figsize=(14, 12)) plotMatrix(list(srcs), list(tgts), weights, 'All connections', 221) # Get SynapseCollection consisting of a subset of connections conns = nest.GetConnections(nrns[:10], nrns[:10]) g = conns.get(['source', 'target', 'weight']) srcs = g['source'] tgts = g['target'] weights = g['weight'] plotMatrix(srcs, tgts, weights, 'Connections of the first ten neurons', 222) # Get SynapseCollection consisting of just the stdp_synapses conns = nest.GetConnections(synapse_model='stdp_synapse') g = conns.get(['source', 'target', 'weight']) srcs = g['source'] tgts = g['target'] weights = g['weight'] plotMatrix(srcs, tgts, weights, 'Connections with stdp_synapse', 223) # Get SynapseCollection consisting of the fixed_total_number connections, but set # weight before plotting conns = nest.GetConnections(nrns[5:10], nrns[:5]) w = [{'weight': x*1.0} for x in range(1, 6)] conns.set(w) g = conns.get(['source', 'target', 'weight']) srcs = g['source'] tgts = g['target'] weights = g['weight'] plotMatrix(srcs, tgts, weights, 'fixed_total_number, set weight', 224) plt.show()<|fim▁end|>
plt.xlabel('target') plt.ylim([max(srcs)+0.5, min(srcs)-0.5]) plt.ylabel('source')
<|file_name|>pailgun_service.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import logging import select import sys import traceback from contextlib import contextmanager from pants.init.options_initializer import BuildConfigInitializer, OptionsInitializer from pants.option.options_bootstrapper import OptionsBootstrapper from pants.pantsd.pailgun_server import PailgunServer from pants.pantsd.service.pants_service import PantsService class PailgunService(PantsService): """A service that runs the Pailgun server.""" def __init__(self, bind_addr, exiter_class, runner_class, target_roots_calculator, scheduler_service): """ :param tuple bind_addr: The (hostname, port) tuple to bind the Pailgun server to. :param class exiter_class: The `Exiter` class to be used for Pailgun runs. :param class runner_class: The `PantsRunner` class to be used for Pailgun runs. :param class target_roots_calculator: The `TargetRootsCalculator` class to be used for target root parsing. :param SchedulerService scheduler_service: The SchedulerService instance for access to the<|fim▁hole|> self._bind_addr = bind_addr self._exiter_class = exiter_class self._runner_class = runner_class self._target_roots_calculator = target_roots_calculator self._scheduler_service = scheduler_service self._logger = logging.getLogger(__name__) self._pailgun = None @property def pailgun(self): if not self._pailgun: self._pailgun = self._setup_pailgun() return self._pailgun @property def pailgun_port(self): return self.pailgun.server_port def _setup_pailgun(self): """Sets up a PailgunServer instance.""" # Constructs and returns a runnable PantsRunner. def runner_factory(sock, arguments, environment): exiter = self._exiter_class(sock) graph_helper = None deferred_exc = None self._logger.debug('execution commandline: %s', arguments) options_bootstrapper = OptionsBootstrapper(args=arguments) build_config = BuildConfigInitializer.get(options_bootstrapper) options = OptionsInitializer.create(options_bootstrapper, build_config) graph_helper, target_roots = None, None try: self._logger.debug('warming the product graph via %s', self._scheduler_service) # N.B. This call is made in the pre-fork daemon context for reach and reuse of the # resident scheduler. graph_helper, target_roots = self._scheduler_service.warm_product_graph( options, self._target_roots_calculator ) except Exception: deferred_exc = sys.exc_info() self._logger.warning( 'encountered exception during SchedulerService.warm_product_graph(), deferring:\n%s', ''.join(traceback.format_exception(*deferred_exc)) ) return self._runner_class( sock, exiter, arguments, environment, target_roots, graph_helper, self.fork_lock, deferred_exc ) # Plumb the daemon's lifecycle lock to the `PailgunServer` to safeguard teardown. @contextmanager def lifecycle_lock(): with self.lifecycle_lock: yield return PailgunServer(self._bind_addr, runner_factory, lifecycle_lock) def run(self): """Main service entrypoint. Called via Thread.start() via PantsDaemon.run().""" self._logger.info('starting pailgun server on port {}'.format(self.pailgun_port)) try: # Manually call handle_request() in a loop vs serve_forever() for interruptability. while not self.is_killed: self.pailgun.handle_request() except select.error: # SocketServer can throw `error: (9, 'Bad file descriptor')` on teardown. Ignore it. self._logger.warning('pailgun service shutting down') def terminate(self): """Override of PantsService.terminate() that cleans up when the Pailgun server is terminated.""" # Tear down the Pailgun TCPServer. if self.pailgun: self.pailgun.server_close() super(PailgunService, self).terminate()<|fim▁end|>
resident scheduler. """ super(PailgunService, self).__init__()
<|file_name|>feature_gate.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Feature gating //! //! This module implements the gating necessary for preventing certain compiler //! features from being used by default. This module will crawl a pre-expanded //! AST to ensure that there are no features which are used that are not //! enabled. //! //! Features are enabled in programs via the crate-level attributes of //! `#![feature(...)]` with a comma-separated list of features. //! //! For the purpose of future feature-tracking, once code for detection of feature //! gate usage is added, *do not remove it again* even once the feature //! becomes stable. use self::Status::*; use self::AttributeType::*; use abi::Abi; use ast::NodeId; use ast; use attr; use attr::AttrMetaMethods; use codemap::{CodeMap, Span}; use diagnostic::SpanHandler; use visit; use visit::Visitor; use parse::token::{self, InternedString}; use std::ascii::AsciiExt; // If you change this list without updating src/doc/reference.md, @cmr will be sad // Don't ever remove anything from this list; set them to 'Removed'. // The version numbers here correspond to the version in which the current status // was set. This is most important for knowing when a particular feature became // stable (active). // NB: The featureck.py script parses this information directly out of the source // so take care when modifying it. const KNOWN_FEATURES: &'static [(&'static str, &'static str, Status)] = &[ ("globs", "1.0.0", Accepted), ("macro_rules", "1.0.0", Accepted), ("struct_variant", "1.0.0", Accepted), ("asm", "1.0.0", Active), ("managed_boxes", "1.0.0", Removed), ("non_ascii_idents", "1.0.0", Active), ("thread_local", "1.0.0", Active), ("link_args", "1.0.0", Active), ("plugin_registrar", "1.0.0", Active), ("log_syntax", "1.0.0", Active), ("trace_macros", "1.0.0", Active), ("concat_idents", "1.0.0", Active), ("intrinsics", "1.0.0", Active), ("lang_items", "1.0.0", Active), ("simd", "1.0.0", Active), ("default_type_params", "1.0.0", Accepted), ("quote", "1.0.0", Active), ("link_llvm_intrinsics", "1.0.0", Active), ("linkage", "1.0.0", Active), ("struct_inherit", "1.0.0", Removed), ("quad_precision_float", "1.0.0", Removed), ("rustc_diagnostic_macros", "1.0.0", Active), ("unboxed_closures", "1.0.0", Active), ("reflect", "1.0.0", Active), ("import_shadowing", "1.0.0", Removed), ("advanced_slice_patterns", "1.0.0", Active), ("tuple_indexing", "1.0.0", Accepted), ("associated_types", "1.0.0", Accepted), ("visible_private_types", "1.0.0", Active), ("slicing_syntax", "1.0.0", Accepted), ("box_syntax", "1.0.0", Active), ("on_unimplemented", "1.0.0", Active), ("simd_ffi", "1.0.0", Active), ("allocator", "1.0.0", Active), ("if_let", "1.0.0", Accepted), ("while_let", "1.0.0", Accepted), ("plugin", "1.0.0", Active), ("start", "1.0.0", Active), ("main", "1.0.0", Active), ("fundamental", "1.0.0", Active), // A temporary feature gate used to enable parser extensions needed // to bootstrap fix for #5723. ("issue_5723_bootstrap", "1.0.0", Accepted), // A way to temporarily opt out of opt in copy. This will *never* be accepted. ("opt_out_copy", "1.0.0", Removed), // OIBIT specific features ("optin_builtin_traits", "1.0.0", Active), // macro reexport needs more discussion and stabilization ("macro_reexport", "1.0.0", Active), // These are used to test this portion of the compiler, they don't actually // mean anything ("test_accepted_feature", "1.0.0", Accepted), ("test_removed_feature", "1.0.0", Removed), // Allows use of #[staged_api] ("staged_api", "1.0.0", Active), // Allows using items which are missing stability attributes ("unmarked_api", "1.0.0", Active), // Allows using #![no_std] ("no_std", "1.0.0", Active), // Allows using `box` in patterns; RFC 469 ("box_patterns", "1.0.0", Active), // Allows using the unsafe_no_drop_flag attribute (unlikely to // switch to Accepted; see RFC 320) ("unsafe_no_drop_flag", "1.0.0", Active), // Allows the use of custom attributes; RFC 572 ("custom_attribute", "1.0.0", Active), // Allows the use of #[derive(Anything)] as sugar for // #[derive_Anything]. ("custom_derive", "1.0.0", Active), // Allows the use of rustc_* attributes; RFC 572 ("rustc_attrs", "1.0.0", Active), // Allows the use of #[allow_internal_unstable]. This is an // attribute on macro_rules! and can't use the attribute handling // below (it has to be checked before expansion possibly makes // macros disappear). ("allow_internal_unstable", "1.0.0", Active), // #23121. Array patterns have some hazards yet. ("slice_patterns", "1.0.0", Active), // Allows use of unary negate on unsigned integers, e.g. -e for e: u8 ("negate_unsigned", "1.0.0", Active), // Allows the definition of associated constants in `trait` or `impl` // blocks. ("associated_consts", "1.0.0", Active), // Allows the definition of `const fn` functions. ("const_fn", "1.2.0", Active), ]; // (changing above list without updating src/doc/reference.md makes @cmr sad) enum Status { /// Represents an active feature that is currently being implemented or /// currently being considered for addition/removal. Active, /// Represents a feature which has since been removed (it was once Active) Removed, /// This language feature has since been Accepted (it was once Active) Accepted, } // Attributes that have a special meaning to rustc or rustdoc pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType)] = &[ // Normal attributes ("warn", Normal), ("allow", Normal), ("forbid", Normal), ("deny", Normal), ("macro_reexport", Normal), ("macro_use", Normal), ("macro_export", Normal), ("plugin_registrar", Normal), ("cfg", Normal), ("cfg_attr", Normal), ("main", Normal), ("start", Normal), ("test", Normal), ("bench", Normal), ("simd", Normal), ("repr", Normal), ("path", Normal), ("abi", Normal), ("automatically_derived", Normal), ("no_mangle", Normal), ("no_link", Normal), ("derive", Normal), ("should_panic", Normal), ("ignore", Normal), ("no_implicit_prelude", Normal), ("reexport_test_harness_main", Normal), ("link_args", Normal), ("macro_escape", Normal), ("staged_api", Gated("staged_api", "staged_api is for use by rustc only")), ("plugin", Gated("plugin", "compiler plugins are experimental \ and possibly buggy")), ("no_std", Gated("no_std", "no_std is experimental")), ("lang", Gated("lang_items", "language items are subject to change")), ("linkage", Gated("linkage", "the `linkage` attribute is experimental \ and not portable across platforms")), ("thread_local", Gated("thread_local", "`#[thread_local]` is an experimental feature, and does not \ currently handle destructors. There is no corresponding \ `#[task_local]` mapping to the task model")), ("rustc_on_unimplemented", Gated("on_unimplemented", "the `#[rustc_on_unimplemented]` attribute \ is an experimental feature")), ("allocator", Gated("allocator", "the `#[allocator]` attribute is an experimental feature")), ("rustc_variance", Gated("rustc_attrs", "the `#[rustc_variance]` attribute \ is an experimental feature")), ("rustc_error", Gated("rustc_attrs", "the `#[rustc_error]` attribute \ is an experimental feature")), ("rustc_move_fragments", Gated("rustc_attrs", "the `#[rustc_move_fragments]` attribute \ is an experimental feature")), ("allow_internal_unstable", Gated("allow_internal_unstable", EXPLAIN_ALLOW_INTERNAL_UNSTABLE)), ("fundamental", Gated("fundamental", "the `#[fundamental]` attribute \ is an experimental feature")), // FIXME: #14408 whitelist docs since rustdoc looks at them ("doc", Whitelisted), // FIXME: #14406 these are processed in trans, which happens after the // lint pass ("cold", Whitelisted), ("export_name", Whitelisted), ("inline", Whitelisted), ("link", Whitelisted), ("link_name", Whitelisted), ("link_section", Whitelisted), ("no_builtins", Whitelisted), ("no_mangle", Whitelisted), ("no_stack_check", Whitelisted), ("no_debug", Whitelisted), ("omit_gdb_pretty_printer_section", Whitelisted), ("unsafe_no_drop_flag", Gated("unsafe_no_drop_flag", "unsafe_no_drop_flag has unstable semantics \ and may be removed in the future")), // used in resolve ("prelude_import", Whitelisted), // FIXME: #14407 these are only looked at on-demand so we can't // guarantee they'll have already been checked ("deprecated", Whitelisted), ("must_use", Whitelisted), ("stable", Whitelisted), ("unstable", Whitelisted), ("rustc_paren_sugar", Gated("unboxed_closures", "unboxed_closures are still evolving")), ("rustc_reflect_like", Gated("reflect", "defining reflective traits is still evolving")), // Crate level attributes ("crate_name", CrateLevel), ("crate_type", CrateLevel), ("crate_id", CrateLevel), ("feature", CrateLevel), ("no_start", CrateLevel), ("no_main", CrateLevel), ("no_builtins", CrateLevel), ("recursion_limit", CrateLevel), ]; #[derive(PartialEq, Copy, Clone, Debug)] pub enum AttributeType { /// Normal, builtin attribute that is consumed /// by the compiler before the unused_attribute check Normal, /// Builtin attribute that may not be consumed by the compiler /// before the unused_attribute check. These attributes /// will be ignored by the unused_attribute lint Whitelisted, /// Is gated by a given feature gate and reason /// These get whitelisted too Gated(&'static str, &'static str), /// Builtin attribute that is only allowed at the crate level CrateLevel, } /// A set of features to be used by later passes. pub struct Features { pub unboxed_closures: bool, pub rustc_diagnostic_macros: bool, pub visible_private_types: bool, pub allow_quote: bool, pub allow_asm: bool, pub allow_log_syntax: bool, pub allow_concat_idents: bool, pub allow_trace_macros: bool, pub allow_internal_unstable: bool, pub allow_custom_derive: bool, pub simd_ffi: bool, pub unmarked_api: bool, pub negate_unsigned: bool, /// spans of #![feature] attrs for stable language features. for error reporting pub declared_stable_lang_features: Vec<Span>, /// #![feature] attrs for non-language (library) features pub declared_lib_features: Vec<(InternedString, Span)>, pub const_fn: bool, } impl Features { pub fn new() -> Features { Features { unboxed_closures: false, rustc_diagnostic_macros: false, visible_private_types: false, allow_quote: false, allow_asm: false, allow_log_syntax: false, allow_concat_idents: false, allow_trace_macros: false, allow_internal_unstable: false, allow_custom_derive: false, simd_ffi: false, unmarked_api: false, negate_unsigned: false, declared_stable_lang_features: Vec::new(), declared_lib_features: Vec::new(), const_fn: false, } } } struct Context<'a> { features: Vec<&'static str>, span_handler: &'a SpanHandler, cm: &'a CodeMap, plugin_attributes: &'a [(String, AttributeType)], } impl<'a> Context<'a> { fn gate_feature(&self, feature: &str, span: Span, explain: &str) { let has_feature = self.has_feature(feature); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", feature, span, has_feature); if !has_feature { emit_feature_err(self.span_handler, feature, span, explain); } } fn has_feature(&self, feature: &str) -> bool { self.features.iter().any(|&n| n == feature) } fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { debug!("check_attribute(attr = {:?})", attr); let name = &*attr.name(); for &(n, ty) in KNOWN_ATTRIBUTES { if n == name { if let Gated(gate, desc) = ty { self.gate_feature(gate, attr.span, desc); } debug!("check_attribute: {:?} is known, {:?}", name, ty); return; } } for &(ref n, ref ty) in self.plugin_attributes { if &*n == name { // Plugins can't gate attributes, so we don't check for it // unlike the code above; we only use this loop to // short-circuit to avoid the checks below debug!("check_attribute: {:?} is registered by a plugin, {:?}", name, ty); return; } } if name.starts_with("rustc_") { self.gate_feature("rustc_attrs", attr.span, "unless otherwise specified, attributes \ with the prefix `rustc_` \ are reserved for internal compiler diagnostics"); } else if name.starts_with("derive_") { self.gate_feature("custom_derive", attr.span, "attributes of the form `#[derive_*]` are reserved \ for the compiler"); } else { // Only run the custom attribute lint during regular // feature gate checking. Macro gating runs // before the plugin attributes are registered // so we skip this then if !is_macro { self.gate_feature("custom_attribute", attr.span, &format!("The attribute `{}` is currently \ unknown to the compiler and \ may have meaning \ added to it in the future", name)); } } } } pub fn emit_feature_err(diag: &SpanHandler, feature: &str, span: Span, explain: &str) { diag.span_err(span, explain); // #23973: do not suggest `#![feature(...)]` if we are in beta/stable if option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some() { return; } diag.fileline_help(span, &format!("add #![feature({})] to the \ crate attributes to enable", feature)); } pub fn emit_feature_warn(diag: &SpanHandler, feature: &str, span: Span, explain: &str) { diag.span_warn(span, explain); // #23973: do not suggest `#![feature(...)]` if we are in beta/stable if option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some() { return; } if diag.handler.can_emit_warnings { diag.fileline_help(span, &format!("add #![feature({})] to the \ crate attributes to silence this warning", feature)); } } pub const EXPLAIN_ASM: &'static str = "inline assembly is not stable enough for use and is subject to change"; pub const EXPLAIN_LOG_SYNTAX: &'static str = "`log_syntax!` is not stable enough for use and is subject to change"; pub const EXPLAIN_CONCAT_IDENTS: &'static str = "`concat_idents` is not stable enough for use and is subject to change"; pub const EXPLAIN_TRACE_MACROS: &'static str = "`trace_macros` is not stable enough for use and is subject to change"; pub const EXPLAIN_ALLOW_INTERNAL_UNSTABLE: &'static str = "allow_internal_unstable side-steps feature gating and stability checks"; pub const EXPLAIN_CUSTOM_DERIVE: &'static str = "`#[derive]` for custom traits is not stable enough for use and is subject to change"; struct MacroVisitor<'a> { context: &'a Context<'a> } impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> { fn visit_mac(&mut self, mac: &ast::Mac) { let ast::MacInvocTT(ref path, _, _) = mac.node; let id = path.segments.last().unwrap().identifier; // Issue 22234: If you add a new case here, make sure to also // add code to catch the macro during or after expansion. // // We still keep this MacroVisitor (rather than *solely* // relying on catching cases during or after expansion) to // catch uses of these macros within conditionally-compiled // code, e.g. `#[cfg]`-guarded functions. if id == token::str_to_ident("asm") { self.context.gate_feature("asm", path.span, EXPLAIN_ASM); } else if id == token::str_to_ident("log_syntax") { self.context.gate_feature("log_syntax", path.span, EXPLAIN_LOG_SYNTAX); } else if id == token::str_to_ident("trace_macros") { self.context.gate_feature("trace_macros", path.span, EXPLAIN_TRACE_MACROS); } else if id == token::str_to_ident("concat_idents") { self.context.gate_feature("concat_idents", path.span, EXPLAIN_CONCAT_IDENTS); } } fn visit_attribute(&mut self, attr: &'v ast::Attribute) { self.context.check_attribute(attr, true); } } struct PostExpansionVisitor<'a> { context: &'a Context<'a> } impl<'a> PostExpansionVisitor<'a> { fn gate_feature(&self, feature: &str, span: Span, explain: &str) { if !self.context.cm.span_allows_unstable(span) { self.context.gate_feature(feature, span, explain) } } } impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { fn visit_attribute(&mut self, attr: &ast::Attribute) { if !self.context.cm.span_allows_unstable(attr.span) { self.context.check_attribute(attr, false); } } fn visit_name(&mut self, sp: Span, name: ast::Name) { if !token::get_name(name).is_ascii() { self.gate_feature("non_ascii_idents", sp, "non-ascii idents are not fully supported."); } } fn visit_item(&mut self, i: &ast::Item) { match i.node { ast::ItemExternCrate(_) => { if attr::contains_name(&i.attrs[..], "macro_reexport") { self.gate_feature("macro_reexport", i.span, "macros reexports are experimental \ and possibly buggy"); } } ast::ItemForeignMod(ref foreign_module) => { if attr::contains_name(&i.attrs[..], "link_args") { self.gate_feature("link_args", i.span, "the `link_args` attribute is not portable \ across platforms, it is recommended to \ use `#[link(name = \"foo\")]` instead") } if foreign_module.abi == Abi::RustIntrinsic { self.gate_feature("intrinsics", i.span, "intrinsics are subject to change") } } ast::ItemFn(..) => { if attr::contains_name(&i.attrs[..], "plugin_registrar") { self.gate_feature("plugin_registrar", i.span, "compiler plugins are experimental and possibly buggy"); } if attr::contains_name(&i.attrs[..], "start") { self.gate_feature("start", i.span, "a #[start] function is an experimental \ feature whose signature may change \ over time"); } if attr::contains_name(&i.attrs[..], "main") { self.gate_feature("main", i.span, "declaration of a nonstandard #[main] \ function may change over time, for now \ a top-level `fn main()` is required"); } } ast::ItemStruct(..) => { if attr::contains_name(&i.attrs[..], "simd") { self.gate_feature("simd", i.span, "SIMD types are experimental and possibly buggy"); } } ast::ItemDefaultImpl(..) => { self.gate_feature("optin_builtin_traits", i.span, "default trait implementations are experimental \ and possibly buggy"); } ast::ItemImpl(_, polarity, _, _, _, _) => { match polarity { ast::ImplPolarity::Negative => { self.gate_feature("optin_builtin_traits", i.span, "negative trait bounds are not yet fully implemented; \ use marker types for now"); }, _ => {} } } _ => {} } visit::walk_item(self, i); } fn visit_foreign_item(&mut self, i: &ast::ForeignItem) { let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs, "link_name") { Some(val) => val.starts_with("llvm."), _ => false }; if links_to_llvm { self.gate_feature("link_llvm_intrinsics", i.span, "linking to LLVM intrinsics is experimental"); } visit::walk_foreign_item(self, i) } fn visit_expr(&mut self, e: &ast::Expr) {<|fim▁hole|> "box expression syntax is experimental; \ you can call `Box::new` instead."); } _ => {} } visit::walk_expr(self, e); } fn visit_pat(&mut self, pattern: &ast::Pat) { match pattern.node { ast::PatVec(_, Some(_), ref last) if !last.is_empty() => { self.gate_feature("advanced_slice_patterns", pattern.span, "multiple-element slice matches anywhere \ but at the end of a slice (e.g. \ `[0, ..xs, 0]`) are experimental") } ast::PatVec(..) => { self.gate_feature("slice_patterns", pattern.span, "slice pattern syntax is experimental"); } ast::PatBox(..) => { self.gate_feature("box_patterns", pattern.span, "box pattern syntax is experimental"); } _ => {} } visit::walk_pat(self, pattern) } fn visit_fn(&mut self, fn_kind: visit::FnKind<'v>, fn_decl: &'v ast::FnDecl, block: &'v ast::Block, span: Span, _node_id: NodeId) { // check for const fn declarations match fn_kind { visit::FkItemFn(_, _, _, ast::Constness::Const, _, _) => { self.gate_feature("const_fn", span, "const fn is unstable"); } _ => { // stability of const fn methods are covered in // visit_trait_item and visit_impl_item below; this is // because default methods don't pass through this // point. } } match fn_kind { visit::FkItemFn(_, _, _, _, abi, _) if abi == Abi::RustIntrinsic => { self.gate_feature("intrinsics", span, "intrinsics are subject to change") } visit::FkItemFn(_, _, _, _, abi, _) | visit::FkMethod(_, &ast::MethodSig { abi, .. }, _) if abi == Abi::RustCall => { self.gate_feature("unboxed_closures", span, "rust-call ABI is subject to change") } _ => {} } visit::walk_fn(self, fn_kind, fn_decl, block, span); } fn visit_trait_item(&mut self, ti: &'v ast::TraitItem) { match ti.node { ast::ConstTraitItem(..) => { self.gate_feature("associated_consts", ti.span, "associated constants are experimental") } ast::MethodTraitItem(ref sig, _) => { if sig.constness == ast::Constness::Const { self.gate_feature("const_fn", ti.span, "const fn is unstable"); } } _ => {} } visit::walk_trait_item(self, ti); } fn visit_impl_item(&mut self, ii: &'v ast::ImplItem) { match ii.node { ast::ConstImplItem(..) => { self.gate_feature("associated_consts", ii.span, "associated constants are experimental") } ast::MethodImplItem(ref sig, _) => { if sig.constness == ast::Constness::Const { self.gate_feature("const_fn", ii.span, "const fn is unstable"); } } _ => {} } visit::walk_impl_item(self, ii); } } fn check_crate_inner<F>(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::Crate, plugin_attributes: &[(String, AttributeType)], check: F) -> Features where F: FnOnce(&mut Context, &ast::Crate) { let mut cx = Context { features: Vec::new(), span_handler: span_handler, cm: cm, plugin_attributes: plugin_attributes, }; let mut accepted_features = Vec::new(); let mut unknown_features = Vec::new(); for attr in &krate.attrs { if !attr.check_name("feature") { continue } match attr.meta_item_list() { None => { span_handler.span_err(attr.span, "malformed feature attribute, \ expected #![feature(...)]"); } Some(list) => { for mi in list { let name = match mi.node { ast::MetaWord(ref word) => (*word).clone(), _ => { span_handler.span_err(mi.span, "malformed feature, expected just \ one word"); continue } }; match KNOWN_FEATURES.iter() .find(|& &(n, _, _)| name == n) { Some(&(name, _, Active)) => { cx.features.push(name); } Some(&(_, _, Removed)) => { span_handler.span_err(mi.span, "feature has been removed"); } Some(&(_, _, Accepted)) => { accepted_features.push(mi.span); } None => { unknown_features.push((name, mi.span)); } } } } } } check(&mut cx, krate); // FIXME (pnkfelix): Before adding the 99th entry below, change it // to a single-pass (instead of N calls to `.has_feature`). Features { unboxed_closures: cx.has_feature("unboxed_closures"), rustc_diagnostic_macros: cx.has_feature("rustc_diagnostic_macros"), visible_private_types: cx.has_feature("visible_private_types"), allow_quote: cx.has_feature("quote"), allow_asm: cx.has_feature("asm"), allow_log_syntax: cx.has_feature("log_syntax"), allow_concat_idents: cx.has_feature("concat_idents"), allow_trace_macros: cx.has_feature("trace_macros"), allow_internal_unstable: cx.has_feature("allow_internal_unstable"), allow_custom_derive: cx.has_feature("custom_derive"), simd_ffi: cx.has_feature("simd_ffi"), unmarked_api: cx.has_feature("unmarked_api"), negate_unsigned: cx.has_feature("negate_unsigned"), declared_stable_lang_features: accepted_features, declared_lib_features: unknown_features, const_fn: cx.has_feature("const_fn"), } } pub fn check_crate_macros(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::Crate) -> Features { check_crate_inner(cm, span_handler, krate, &[] as &'static [_], |ctx, krate| visit::walk_crate(&mut MacroVisitor { context: ctx }, krate)) } pub fn check_crate(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::Crate, plugin_attributes: &[(String, AttributeType)]) -> Features { check_crate_inner(cm, span_handler, krate, plugin_attributes, |ctx, krate| visit::walk_crate(&mut PostExpansionVisitor { context: ctx }, krate)) }<|fim▁end|>
match e.node { ast::ExprBox(..) | ast::ExprUnary(ast::UnOp::UnUniq, _) => { self.gate_feature("box_syntax", e.span,
<|file_name|>count_lines_fixed.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python f = open("birds.txt", "r") data = f.read() f.close() <|fim▁hole|>for l in lines: if not l: # Can also do this: if len(l) == 0 lines.remove(l) print("Right: The number of lines is", len(lines))<|fim▁end|>
lines = data.split("\n") print("Wrong: The number of lines is", len(lines))
<|file_name|>navigation-redirect-body.py<|end_file_name|><|fim▁begin|>import os filename = os.path.basename(__file__)<|fim▁hole|> return 302, [('Location', './%s?redirect' % filename)], '' return [('Content-Type', 'text/plain')], request.request_path<|fim▁end|>
def main(request, response): if request.method == 'POST':
<|file_name|>wMotion.cc<|end_file_name|><|fim▁begin|>// // Created by anton on 21.03.15.<|fim▁hole|> #include "Matrix.h" #include <nan.h> v8::Persistent<FunctionTemplate> wMotion::constructor; int buffersSize; unsigned char *cur; float *recent; float *bg; int motionThreshold, presenceThreshold; float motionWeight, presenceWeight; void wMotion::Init(Handle<Object> target) { NanScope(); //Class Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(wMotion::New); NanAssignPersistent(constructor, ctor); ctor->InstanceTemplate()->SetInternalFieldCount(1); ctor->SetClassName(NanNew("wMotion")); // Prototype NODE_SET_PROTOTYPE_METHOD(ctor, "process", Process); target->Set(NanNew("wMotion"), ctor->GetFunction()); } NAN_METHOD(wMotion::New) { NanScope(); NanReturnValue(args.Holder()); } wMotion::wMotion(): ObjectWrap() { if( cur ) delete[] cur; if( bg ) delete[] bg; if( recent ) delete[] recent; buffersSize = 0; cur = NULL; bg = recent = NULL; motionThreshold = 8; // min 0 max 255 presenceThreshold = 8; // min 0 max 255 motionWeight = 0.1; // min 0 max 2 presenceWeight = 0.0001; // min 0 max 2 } void quarterScale( unsigned char *to, unsigned char *from, int w, int h ) { for( int y=0; y<h-1; y+=2 ) { int yw = (y*w); for( int x=0; x<w-1; x+=2 ) { to[ (yw/4) + (x/2) ] = ( from[ yw + x ] +from[ yw + x + 1 ] +from[ yw + w + x ] +from[ yw + w + x + 1 ] ) / 4; } } } NAN_METHOD(wMotion::Reset) { NanScope(); buffersSize = -1; NanReturnNull(); } NAN_METHOD(wMotion::Process) { NanScope(); Matrix *src = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject()); cv::Mat yuv; cv::cvtColor(src->mat, yuv, CV_RGB2YCrCb); motionThreshold = args[1]->IntegerValue(); presenceThreshold = args[2]->IntegerValue(); motionWeight = (float)args[3]->NumberValue(); presenceWeight = (float)args[4]->NumberValue(); int sz = yuv.cols*yuv.rows; int sz4 = ( (yuv.cols/2)*(yuv.rows/2)); unsigned char *Y = yuv.data; unsigned char *U = Y + sz; unsigned char *V = U + sz4; if( buffersSize != sz4 ) { if( cur ) delete[] cur; if( recent ) delete[] recent; if( bg ) delete[] bg; buffersSize = sz4; cur = new unsigned char[sz4]; recent = new float[sz4]; bg = new float[sz4]; quarterScale( cur, Y, yuv.cols, yuv.rows ); for( int i=0; i<sz4; i++ ) recent[i]=bg[i]=cur[i]; } else { quarterScale( cur, Y, yuv.cols, yuv.rows ); } unsigned char mthresh = motionThreshold; unsigned char pthresh = presenceThreshold; unsigned char *P = U; unsigned char *M = V; float pw = presenceWeight; float pwn = 1.-pw; float mw = motionWeight; float mwn = 1.-mw; for( int i=0; i<sz4; i++ ) { // 0-255, threshold //M[i] = abs( late[i]-cur[i] )>mthresh?255:0; //P[i] = abs( bg[i]-cur[i] )>pthresh?255:0; // good looking M[i] = abs( recent[i]-cur[i] )>mthresh?64:128; P[i] = abs( bg[i]-cur[i] )>pthresh?64:128; // "real" //M[i] = 128+(late[i]-cur[i]); //P[i] = 128+(bg[i]-cur[i]); bg[i] *= pwn; bg[i] += cur[i]*pw; recent[i] *= mwn; recent[i] += cur[i]*mw; } v8::Local<v8::Array> arr = NanNew<Array>(2); v8::Handle<v8::Object> currentArray = NanNew<v8::Object>(); currentArray->SetIndexedPropertiesToExternalArrayData(&cur, v8::kExternalUnsignedByteArray, sz4); v8::Handle<v8::Object> recentArray = NanNew<v8::Object>(); recentArray->SetIndexedPropertiesToExternalArrayData(&cur, v8::kExternalUnsignedByteArray, sz4); arr->Set(0, currentArray); arr->Set(1, recentArray); NanReturnValue(arr); }<|fim▁end|>
// #include "wMotion.h"
<|file_name|>platformOverrides.js<|end_file_name|><|fim▁begin|><|fim▁hole|> scriptElem.setAttribute('src', 'scripts/android2.3-jscompat.js'); if (document.body) { document.body.appendChild(scriptElem); } else { document.head.appendChild(scriptElem); } }());<|fim▁end|>
(function () { // Append the bind() polyfill var scriptElem = document.createElement('script');
<|file_name|>step_create_disk.go<|end_file_name|><|fim▁begin|>package iso import ( "fmt"<|fim▁hole|> "github.com/mitchellh/packer/packer" "strconv" ) // This step creates the virtual disk that will be used as the // hard drive for the virtual machine. type stepCreateDisk struct{} func (s *stepCreateDisk) Run(state multistep.StateBag) multistep.StepAction { config := state.Get("config").(*config) driver := state.Get("driver").(parallelscommon.Driver) ui := state.Get("ui").(packer.Ui) vmName := state.Get("vmName").(string) command := []string{ "set", vmName, "--device-set", "hdd0", "--size", strconv.FormatUint(uint64(config.DiskSize), 10), "--iface", config.HardDriveInterface, } ui.Say("Creating hard drive...") err := driver.Prlctl(command...) if err != nil { err := fmt.Errorf("Error creating hard drive: %s", err) state.Put("error", err) ui.Error(err.Error()) return multistep.ActionHalt } return multistep.ActionContinue } func (s *stepCreateDisk) Cleanup(state multistep.StateBag) {}<|fim▁end|>
"github.com/mitchellh/multistep" parallelscommon "github.com/mitchellh/packer/builder/parallels/common"
<|file_name|>miscsettings_menu.cpp<|end_file_name|><|fim▁begin|>/* $port: miscsettings_menu.cpp,v 1.3 2010/12/05 22:32:12 tuxbox-cvs Exp $ miscsettings_menu implementation - Neutrino-GUI Copyright (C) 2010 T. Graf 'dbt' Homepage: http://www.dbox2-tuning.net/ License: GPL This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ #ifdef HAVE_CONFIG_H #include <config.h> #endif #include <global.h> #include <neutrino.h> #include <mymenu.h> #include <neutrino_menue.h> #include <system/setting_helpers.h> #include <system/helpers.h> #include <system/debug.h> #include <gui/miscsettings_menu.h> #include <gui/cec_setup.h> #include <gui/filebrowser.h> #include <gui/keybind_setup.h> #include <gui/plugins.h> #include <gui/sleeptimer.h> #include <gui/zapit_setup.h> #if HAVE_SPARK_HARDWARE || HAVE_DUCKBOX_HARDWARE #include <gui/kerneloptions.h> #endif #include <gui/widget/icons.h> #include <gui/widget/stringinput.h> #include <gui/widget/messagebox.h> #include <driver/screen_max.h> #include <driver/scanepg.h> #include <zapit/femanager.h> #include <eitd/sectionsd.h> #include <cs_api.h> #include <video.h> extern CPlugins * g_PluginList; extern cVideo *videoDecoder; CMiscMenue::CMiscMenue() { width = 40; epg_save = NULL; epg_save_standby = NULL; epg_save_frequently = NULL; epg_read = NULL; epg_dir = NULL; } CMiscMenue::~CMiscMenue() { } int CMiscMenue::exec(CMenuTarget* parent, const std::string &actionKey) { printf("init extended settings menu...\n"); if(parent != NULL) parent->hide(); if(actionKey == "epgdir") { const char *action_str = "epg"; if(chooserDir(g_settings.epg_dir, true, action_str)) CNeutrinoApp::getInstance()->SendSectionsdConfig(); return menu_return::RETURN_REPAINT; } else if(actionKey == "plugin_dir") { const char *action_str = "plugin"; if(chooserDir(g_settings.plugin_hdd_dir, false, action_str)) g_PluginList->loadPlugins(); return menu_return::RETURN_REPAINT; } else if(actionKey == "movieplayer_plugin") { CMenuWidget MoviePluginSelector(LOCALE_MOVIEPLAYER_DEFPLUGIN, NEUTRINO_ICON_FEATURES); MoviePluginSelector.addItem(GenericMenuSeparator); char id[5]; int cnt = 0; int enabled_count = 0; for(unsigned int count=0;count < (unsigned int) g_PluginList->getNumberOfPlugins();count++) { if (!g_PluginList->isHidden(count)) { sprintf(id, "%d", count); enabled_count++; MoviePluginSelector.addItem(new CMenuForwarder(g_PluginList->getName(count), true, NULL, new CMoviePluginChangeExec(), id, CRCInput::convertDigitToKey(count)), (cnt == 0)); cnt++; } } MoviePluginSelector.exec(NULL, ""); return menu_return::RETURN_REPAINT; } else if(actionKey == "info") { unsigned num = CEitManager::getInstance()->getEventsCount(); char str[128]; sprintf(str, "Event count: %d", num); ShowMsg(LOCALE_MESSAGEBOX_INFO, str, CMessageBox::mbrBack, CMessageBox::mbBack); return menu_return::RETURN_REPAINT; } else if(actionKey == "energy") { return showMiscSettingsMenuEnergy(); } else if(actionKey == "channellist") { return showMiscSettingsMenuChanlist(); } return showMiscSettingsMenu(); } #define MISCSETTINGS_FB_DESTINATION_OPTION_COUNT 3 const CMenuOptionChooser::keyval MISCSETTINGS_FB_DESTINATION_OPTIONS[MISCSETTINGS_FB_DESTINATION_OPTION_COUNT] = { { 0, LOCALE_OPTIONS_NULL }, { 1, LOCALE_OPTIONS_SERIAL }, { 2, LOCALE_OPTIONS_FB } }; #define MISCSETTINGS_FILESYSTEM_IS_UTF8_OPTION_COUNT 2 const CMenuOptionChooser::keyval MISCSETTINGS_FILESYSTEM_IS_UTF8_OPTIONS[MISCSETTINGS_FILESYSTEM_IS_UTF8_OPTION_COUNT] = { { 0, LOCALE_FILESYSTEM_IS_UTF8_OPTION_ISO8859_1 }, { 1, LOCALE_FILESYSTEM_IS_UTF8_OPTION_UTF8 } }; #define CHANNELLIST_NEW_ZAP_MODE_OPTION_COUNT 3 const CMenuOptionChooser::keyval CHANNELLIST_NEW_ZAP_MODE_OPTIONS[CHANNELLIST_NEW_ZAP_MODE_OPTION_COUNT] = { { 0, LOCALE_CHANNELLIST_NEW_ZAP_MODE_OFF }, { 1, LOCALE_CHANNELLIST_NEW_ZAP_MODE_ALLOW }, { 2, LOCALE_CHANNELLIST_NEW_ZAP_MODE_ACTIVE } }; #ifdef CPU_FREQ #if HAVE_SPARK_HARDWARE || HAVE_DUCKBOX_HARDWARE #define CPU_FREQ_OPTION_COUNT 6 const CMenuOptionChooser::keyval_ext CPU_FREQ_OPTIONS[CPU_FREQ_OPTION_COUNT] = { { 0, LOCALE_CPU_FREQ_DEFAULT, NULL }, { 450, NONEXISTANT_LOCALE, "450 Mhz"}, { 500, NONEXISTANT_LOCALE, "500 Mhz"}, { 550, NONEXISTANT_LOCALE, "550 Mhz"}, { 600, NONEXISTANT_LOCALE, "600 Mhz"}, { 650, NONEXISTANT_LOCALE, "650 Mhz"} }; #define CPU_FREQ_OPTION_STANDBY_COUNT 11 const CMenuOptionChooser::keyval_ext CPU_FREQ_OPTIONS_STANDBY[CPU_FREQ_OPTION_STANDBY_COUNT] = { { 0, LOCALE_CPU_FREQ_DEFAULT, NULL }, { 200, NONEXISTANT_LOCALE, "200 Mhz"}, { 250, NONEXISTANT_LOCALE, "250 Mhz"}, { 300, NONEXISTANT_LOCALE, "300 Mhz"}, { 350, NONEXISTANT_LOCALE, "350 Mhz"}, { 400, NONEXISTANT_LOCALE, "400 Mhz"}, { 450, NONEXISTANT_LOCALE, "450 Mhz"}, { 500, NONEXISTANT_LOCALE, "500 Mhz"}, { 550, NONEXISTANT_LOCALE, "550 Mhz"}, { 600, NONEXISTANT_LOCALE, "600 Mhz"}, { 650, NONEXISTANT_LOCALE, "650 Mhz"} }; #else #define CPU_FREQ_OPTION_COUNT 13 const CMenuOptionChooser::keyval_ext CPU_FREQ_OPTIONS[CPU_FREQ_OPTION_COUNT] = { { 0, LOCALE_CPU_FREQ_DEFAULT, NULL }, { 50, NONEXISTANT_LOCALE, "50 Mhz"}, { 100, NONEXISTANT_LOCALE, "100 Mhz"}, { 150, NONEXISTANT_LOCALE, "150 Mhz"}, { 200, NONEXISTANT_LOCALE, "200 Mhz"}, { 250, NONEXISTANT_LOCALE, "250 Mhz"}, { 300, NONEXISTANT_LOCALE, "300 Mhz"}, { 350, NONEXISTANT_LOCALE, "350 Mhz"}, { 400, NONEXISTANT_LOCALE, "400 Mhz"}, { 450, NONEXISTANT_LOCALE, "450 Mhz"}, { 500, NONEXISTANT_LOCALE, "500 Mhz"}, { 550, NONEXISTANT_LOCALE, "550 Mhz"}, { 600, NONEXISTANT_LOCALE, "600 Mhz"} }; #endif #endif /*CPU_FREQ*/ const CMenuOptionChooser::keyval EPG_SCAN_OPTIONS[] = { { CEpgScan::SCAN_CURRENT, LOCALE_MISCSETTINGS_EPG_SCAN_BQ }, { CEpgScan::SCAN_FAV, LOCALE_MISCSETTINGS_EPG_SCAN_FAV }, { CEpgScan::SCAN_SEL, LOCALE_MISCSETTINGS_EPG_SCAN_SEL } }; #define EPG_SCAN_OPTION_COUNT (sizeof(EPG_SCAN_OPTIONS)/sizeof(CMenuOptionChooser::keyval)) const CMenuOptionChooser::keyval EPG_SCAN_MODE_OPTIONS[] = { { CEpgScan::MODE_OFF, LOCALE_OPTIONS_OFF }, { CEpgScan::MODE_STANDBY, LOCALE_MISCSETTINGS_EPG_SCAN_STANDBY }, { CEpgScan::MODE_LIVE, LOCALE_MISCSETTINGS_EPG_SCAN_LIVE }, { CEpgScan::MODE_ALWAYS, LOCALE_MISCSETTINGS_EPG_SCAN_ALWAYS } }; #define EPG_SCAN_MODE_OPTION_COUNT (sizeof(EPG_SCAN_MODE_OPTIONS)/sizeof(CMenuOptionChooser::keyval)) #define SLEEPTIMER_MIN_OPTION_COUNT 7 const CMenuOptionChooser::keyval_ext SLEEPTIMER_MIN_OPTIONS[SLEEPTIMER_MIN_OPTION_COUNT] = { { 0, NONEXISTANT_LOCALE, "EPG" }, { 30, NONEXISTANT_LOCALE, "30 min" }, { 60, NONEXISTANT_LOCALE, "60 min" }, { 90, NONEXISTANT_LOCALE, "90 min" }, { 120, NONEXISTANT_LOCALE, "120 min" }, { 150, NONEXISTANT_LOCALE, "150 min" } }; //show misc settings menue int CMiscMenue::showMiscSettingsMenu() { //misc settings fanNotifier = new CFanControlNotifier(); sectionsdConfigNotifier = new CSectionsdConfigNotifier(); CMenuWidget misc_menue(LOCALE_MAINSETTINGS_HEAD, NEUTRINO_ICON_SETTINGS, width, MN_WIDGET_ID_MISCSETUP); misc_menue.addIntroItems(LOCALE_MISCSETTINGS_HEAD); //general CMenuWidget misc_menue_general(LOCALE_MISCSETTINGS_HEAD, NEUTRINO_ICON_SETTINGS, width, MN_WIDGET_ID_MISCSETUP_GENERAL); showMiscSettingsMenuGeneral(&misc_menue_general); CMenuForwarder * mf = new CMenuForwarder(LOCALE_MISCSETTINGS_GENERAL, true, NULL, &misc_menue_general, NULL, CRCInput::RC_red); mf->setHint("", LOCALE_MENU_HINT_MISC_GENERAL); misc_menue.addItem(mf); //energy, shutdown if (g_info.hw_caps->can_shutdown) { mf = new CMenuForwarder(LOCALE_MISCSETTINGS_ENERGY, true, NULL, this, "energy", CRCInput::RC_green); mf->setHint("", LOCALE_MENU_HINT_MISC_ENERGY); misc_menue.addItem(mf); } //epg CMenuWidget misc_menue_epg(LOCALE_MISCSETTINGS_HEAD, NEUTRINO_ICON_SETTINGS, width, MN_WIDGET_ID_MISCSETUP_EPG); showMiscSettingsMenuEpg(&misc_menue_epg); mf = new CMenuForwarder(LOCALE_MISCSETTINGS_EPG_HEAD, true, NULL, &misc_menue_epg, NULL, CRCInput::RC_yellow); mf->setHint("", LOCALE_MENU_HINT_MISC_EPG); misc_menue.addItem(mf); //filebrowser settings CMenuWidget misc_menue_fbrowser(LOCALE_MISCSETTINGS_HEAD, NEUTRINO_ICON_SETTINGS, width, MN_WIDGET_ID_MISCSETUP_FILEBROWSER); showMiscSettingsMenuFBrowser(&misc_menue_fbrowser); mf = new CMenuForwarder(LOCALE_FILEBROWSER_HEAD, true, NULL, &misc_menue_fbrowser, NULL, CRCInput::RC_blue); mf->setHint("", LOCALE_MENU_HINT_MISC_FILEBROWSER); misc_menue.addItem(mf); misc_menue.addItem(GenericMenuSeparatorLine); //cec settings CCECSetup cecsetup; if (g_info.hw_caps->can_cec) { mf = new CMenuForwarder(LOCALE_VIDEOMENU_HDMI_CEC, true, NULL, &cecsetup, NULL, CRCInput::RC_1); mf->setHint("", LOCALE_MENU_HINT_MISC_CEC); misc_menue.addItem(mf); } if (!g_info.hw_caps->can_shutdown) { /* we don't have the energy menu, but put the sleeptimer directly here */ mf = new CMenuDForwarder(LOCALE_MISCSETTINGS_SLEEPTIMER, true, NULL, new CSleepTimerWidget(true)); mf->setHint("", LOCALE_MENU_HINT_INACT_TIMER); misc_menue.addItem(mf); } //channellist mf = new CMenuForwarder(LOCALE_MISCSETTINGS_CHANNELLIST, true, NULL, this, "channellist", CRCInput::RC_2); mf->setHint("", LOCALE_MENU_HINT_MISC_CHANNELLIST); misc_menue.addItem(mf); //start channels CZapitSetup zapitsetup; mf = new CMenuForwarder(LOCALE_ZAPITSETUP_HEAD, true, NULL, &zapitsetup, NULL, CRCInput::RC_3); mf->setHint("", LOCALE_MENU_HINT_MISC_ZAPIT); misc_menue.addItem(mf); #ifdef CPU_FREQ //CPU CMenuWidget misc_menue_cpu(LOCALE_MAINSETTINGS_HEAD, NEUTRINO_ICON_SETTINGS, width); showMiscSettingsMenuCPUFreq(&misc_menue_cpu); mf = new CMenuForwarder(LOCALE_MISCSETTINGS_CPU, true, NULL, &misc_menue_cpu, NULL, CRCInput::RC_4); mf->setHint("", LOCALE_MENU_HINT_MISC_CPUFREQ); misc_menue.addItem(mf); #endif /*CPU_FREQ*/ #if HAVE_SPARK_HARDWARE || HAVE_DUCKBOX_HARDWARE // kerneloptions CKernelOptions kernelOptions; mf = new CMenuForwarder(LOCALE_KERNELOPTIONS_HEAD, true, NULL, &kernelOptions, NULL, CRCInput::RC_5); mf->setHint("", LOCALE_MENU_HINT_MISC_KERNELOPTIONS); misc_menue.addItem(mf); #endif int res = misc_menue.exec(NULL, ""); delete fanNotifier; delete sectionsdConfigNotifier; return res; } const CMenuOptionChooser::keyval DEBUG_MODE_OPTIONS[DEBUG_MODES] = { { DEBUG_NORMAL , LOCALE_DEBUG_LEVEL_1 }, { DEBUG_INFO , LOCALE_DEBUG_LEVEL_2 }, { DEBUG_DEBUG , LOCALE_DEBUG_LEVEL_3 } }; //general settings void CMiscMenue::showMiscSettingsMenuGeneral(CMenuWidget *ms_general) { ms_general->addIntroItems(LOCALE_MISCSETTINGS_GENERAL); //standby after boot CMenuOptionChooser * mc = new CMenuOptionChooser(LOCALE_EXTRA_START_TOSTANDBY, &g_settings.power_standby, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_START_TOSTANDBY); ms_general->addItem(mc); mc = new CMenuOptionChooser(LOCALE_EXTRA_CACHE_TXT, (int *)&g_settings.cacheTXT, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_CACHE_TXT); ms_general->addItem(mc); //fan speed if (g_info.has_fan) { #if defined (BOXMODEL_IPBOX9900) || defined (BOXMODEL_IPBOX99) CMenuOptionNumberChooser * mn = new CMenuOptionNumberChooser(LOCALE_FAN_SPEED, &g_settings.fan_speed, true, 0, 1, fanNotifier, CRCInput::RC_nokey, NULL, 0, 0, LOCALE_OPTIONS_OFF); #else CMenuOptionNumberChooser * mn = new CMenuOptionNumberChooser(LOCALE_FAN_SPEED, &g_settings.fan_speed, true, 1, 14, fanNotifier, CRCInput::RC_nokey, NULL, 0, 0, LOCALE_OPTIONS_OFF); #endif mn->setHint("", LOCALE_MENU_HINT_FAN_SPEED); ms_general->addItem(mn); } ms_general->addItem(GenericMenuSeparatorLine); CMenuForwarder * mf = new CMenuForwarder(LOCALE_PLUGINS_HDD_DIR, true, g_settings.plugin_hdd_dir, this, "plugin_dir"); mf->setHint("", LOCALE_MENU_HINT_PLUGINS_HDD_DIR); ms_general->addItem(mf); mf = new CMenuForwarder(LOCALE_MPKEY_PLUGIN, true, g_settings.movieplayer_plugin, this, "movieplayer_plugin"); mf->setHint("", LOCALE_MENU_HINT_MOVIEPLAYER_PLUGIN); ms_general->addItem(mf); //set debug level ms_general->addItem(new CMenuSeparator(CMenuSeparator::LINE | CMenuSeparator::STRING, LOCALE_DEBUG)); CMenuOptionChooser * md = new CMenuOptionChooser(LOCALE_DEBUG_LEVEL, &debug, DEBUG_MODE_OPTIONS, DEBUG_MODES, true); // mc->setHint("", LOCALE_MENU_HINT_START_TOSTANDBY); ms_general->addItem(md); } #define VIDEOMENU_HDMI_CEC_MODE_OPTION_COUNT 2 const CMenuOptionChooser::keyval VIDEOMENU_HDMI_CEC_MODE_OPTIONS[VIDEOMENU_HDMI_CEC_MODE_OPTION_COUNT] = { { VIDEO_HDMI_CEC_MODE_OFF , LOCALE_OPTIONS_OFF }, { VIDEO_HDMI_CEC_MODE_TUNER , LOCALE_OPTIONS_ON } }; //energy and shutdown settings int CMiscMenue::showMiscSettingsMenuEnergy() { CMenuWidget *ms_energy = new CMenuWidget(LOCALE_MISCSETTINGS_HEAD, NEUTRINO_ICON_SETTINGS, width, MN_WIDGET_ID_MISCSETUP_ENERGY); ms_energy->addIntroItems(LOCALE_MISCSETTINGS_ENERGY); CMenuOptionChooser *m1 = new CMenuOptionChooser(LOCALE_MISCSETTINGS_SHUTDOWN_REAL_RCDELAY, &g_settings.shutdown_real_rcdelay, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, !g_settings.shutdown_real); m1->setHint("", LOCALE_MENU_HINT_SHUTDOWN_RCDELAY); std::string shutdown_count = to_string(g_settings.shutdown_count); if (shutdown_count.length() < 3) shutdown_count.insert(0, 3 - shutdown_count.length(), ' '); CStringInput * miscSettings_shutdown_count = new CStringInput(LOCALE_MISCSETTINGS_SHUTDOWN_COUNT, &shutdown_count, 3, LOCALE_MISCSETTINGS_SHUTDOWN_COUNT_HINT1, LOCALE_MISCSETTINGS_SHUTDOWN_COUNT_HINT2, "0123456789 "); CMenuForwarder *m2 = new CMenuDForwarder(LOCALE_MISCSETTINGS_SHUTDOWN_COUNT, !g_settings.shutdown_real, shutdown_count, miscSettings_shutdown_count); m2->setHint("", LOCALE_MENU_HINT_SHUTDOWN_COUNT); COnOffNotifier * miscNotifier = new COnOffNotifier(1); miscNotifier->addItem(m1); miscNotifier->addItem(m2); CMenuOptionChooser * mc = new CMenuOptionChooser(LOCALE_MISCSETTINGS_SHUTDOWN_REAL, &g_settings.shutdown_real, OPTIONS_OFF1_ON0_OPTIONS, OPTIONS_OFF1_ON0_OPTION_COUNT, true, miscNotifier); mc->setHint("", LOCALE_MENU_HINT_SHUTDOWN_REAL); ms_energy->addItem(mc); ms_energy->addItem(m1); ms_energy->addItem(m2); m2 = new CMenuDForwarder(LOCALE_MISCSETTINGS_SLEEPTIMER, true, NULL, new CSleepTimerWidget(true)); m2->setHint("", LOCALE_MENU_HINT_INACT_TIMER); ms_energy->addItem(m2); CMenuOptionChooser * m4 = new CMenuOptionChooser(LOCALE_MISCSETTINGS_SLEEPTIMER_MIN, &g_settings.sleeptimer_min, SLEEPTIMER_MIN_OPTIONS, SLEEPTIMER_MIN_OPTION_COUNT, true); m4->setHint("", LOCALE_MENU_HINT_SLEEPTIMER_MIN); ms_energy->addItem(m4); int res = ms_energy->exec(NULL, ""); g_settings.shutdown_count = atoi(shutdown_count.c_str()); delete ms_energy; delete miscNotifier; return res; } //EPG settings void CMiscMenue::showMiscSettingsMenuEpg(CMenuWidget *ms_epg) { ms_epg->addIntroItems(LOCALE_MISCSETTINGS_EPG_HEAD); ms_epg->addKey(CRCInput::RC_info, this, "info"); epg_save = new CMenuOptionChooser(LOCALE_MISCSETTINGS_EPG_SAVE, &g_settings.epg_save, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true, this); epg_save->setHint("", LOCALE_MENU_HINT_EPG_SAVE); epg_save_standby = new CMenuOptionChooser(LOCALE_MISCSETTINGS_EPG_SAVE_STANDBY, &g_settings.epg_save_standby, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, g_settings.epg_save); epg_save_standby->setHint("", LOCALE_MENU_HINT_EPG_SAVE_STANDBY); epg_save_frequently = new CMenuOptionChooser(LOCALE_MISCSETTINGS_EPG_SAVE_FREQUENTLY, &g_settings.epg_save_frequently, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, g_settings.epg_save, sectionsdConfigNotifier); epg_save_frequently->setHint("", LOCALE_MENU_HINT_EPG_SAVE_FREQUENTLY); epg_read = new CMenuOptionChooser(LOCALE_MISCSETTINGS_EPG_READ, &g_settings.epg_read, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true, this); epg_read->setHint("", LOCALE_MENU_HINT_EPG_READ); epg_dir = new CMenuForwarder(LOCALE_MISCSETTINGS_EPG_DIR, (g_settings.epg_save || g_settings.epg_read), g_settings.epg_dir, this, "epgdir"); epg_dir->setHint("", LOCALE_MENU_HINT_EPG_DIR); epg_cache = to_string(g_settings.epg_cache); if (epg_cache.length() < 2) epg_cache.insert(0, 2 - epg_cache.length(), ' '); CStringInput * miscSettings_epg_cache = new CStringInput(LOCALE_MISCSETTINGS_EPG_CACHE, &epg_cache, 2,LOCALE_MISCSETTINGS_EPG_CACHE_HINT1, LOCALE_MISCSETTINGS_EPG_CACHE_HINT2 , "0123456789 ", sectionsdConfigNotifier); CMenuForwarder * mf = new CMenuDForwarder(LOCALE_MISCSETTINGS_EPG_CACHE, true, epg_cache, miscSettings_epg_cache); mf->setHint("", LOCALE_MENU_HINT_EPG_CACHE); epg_extendedcache = to_string(g_settings.epg_extendedcache); if (epg_extendedcache.length() < 3) epg_extendedcache.insert(0, 3 - epg_extendedcache.length(), ' '); CStringInput * miscSettings_epg_cache_e = new CStringInput(LOCALE_MISCSETTINGS_EPG_EXTENDEDCACHE, &epg_extendedcache, 3,LOCALE_MISCSETTINGS_EPG_EXTENDEDCACHE_HINT1, LOCALE_MISCSETTINGS_EPG_EXTENDEDCACHE_HINT2 , "0123456789 ", sectionsdConfigNotifier); CMenuForwarder * mf1 = new CMenuDForwarder(LOCALE_MISCSETTINGS_EPG_EXTENDEDCACHE, true, epg_extendedcache, miscSettings_epg_cache_e); mf1->setHint("", LOCALE_MENU_HINT_EPG_EXTENDEDCACHE); epg_old_events = to_string(g_settings.epg_old_events); if (epg_old_events.length() < 3) epg_old_events.insert(0, 3 - epg_old_events.length(), ' '); CStringInput * miscSettings_epg_old_events = new CStringInput(LOCALE_MISCSETTINGS_EPG_OLD_EVENTS, &epg_old_events, 3,LOCALE_MISCSETTINGS_EPG_OLD_EVENTS_HINT1, LOCALE_MISCSETTINGS_EPG_OLD_EVENTS_HINT2 , "0123456789 ", sectionsdConfigNotifier); CMenuForwarder * mf2 = new CMenuDForwarder(LOCALE_MISCSETTINGS_EPG_OLD_EVENTS, true, epg_old_events, miscSettings_epg_old_events); mf2->setHint("", LOCALE_MENU_HINT_EPG_OLD_EVENTS); epg_max_events = to_string(g_settings.epg_max_events); if (epg_max_events.length() < 6) epg_max_events.insert(0, 6 - epg_max_events.length(), ' '); CStringInput * miscSettings_epg_max_events = new CStringInput(LOCALE_MISCSETTINGS_EPG_MAX_EVENTS, &epg_max_events, 6,LOCALE_MISCSETTINGS_EPG_MAX_EVENTS_HINT1, LOCALE_MISCSETTINGS_EPG_MAX_EVENTS_HINT2 , "0123456789 ", sectionsdConfigNotifier); CMenuForwarder * mf3 = new CMenuDForwarder(LOCALE_MISCSETTINGS_EPG_MAX_EVENTS, true, epg_max_events, miscSettings_epg_max_events); mf3->setHint("", LOCALE_MENU_HINT_EPG_MAX_EVENTS); epg_scan = new CMenuOptionChooser(LOCALE_MISCSETTINGS_EPG_SCAN_BOUQUETS, &g_settings.epg_scan, EPG_SCAN_OPTIONS, EPG_SCAN_OPTION_COUNT, g_settings.epg_scan_mode != CEpgScan::MODE_OFF && g_settings.epg_save_mode == 0); epg_scan->setHint("", LOCALE_MENU_HINT_EPG_SCAN); CMenuOptionChooser * mc3 = new CMenuOptionChooser(LOCALE_MISCSETTINGS_EPG_SCAN, &g_settings.epg_scan_mode, EPG_SCAN_MODE_OPTIONS, CFEManager::getInstance()->getEnabledCount() > 1 ? EPG_SCAN_MODE_OPTION_COUNT : 2, true, this); mc3->setHint("", LOCALE_MENU_HINT_EPG_SCAN_MODE); CMenuOptionChooser * mc4 = new CMenuOptionChooser(LOCALE_MISCSETTINGS_EPG_SAVE_MODE, &g_settings.epg_save_mode, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true, this); mc4->setHint("", LOCALE_MENU_HINT_EPG_SAVE_MODE); ms_epg->addItem(epg_save); ms_epg->addItem(epg_save_standby); ms_epg->addItem(epg_save_frequently); ms_epg->addItem(epg_read); ms_epg->addItem(epg_dir); ms_epg->addItem(GenericMenuSeparatorLine); ms_epg->addItem(mf); ms_epg->addItem(mf1); ms_epg->addItem(mf2); ms_epg->addItem(mf3); ms_epg->addItem(mc4); ms_epg->addItem(GenericMenuSeparatorLine); ms_epg->addItem(mc3); ms_epg->addItem(epg_scan); } //filebrowser settings void CMiscMenue::showMiscSettingsMenuFBrowser(CMenuWidget *ms_fbrowser) { ms_fbrowser->addIntroItems(LOCALE_FILEBROWSER_HEAD); CMenuOptionChooser * mc; mc = new CMenuOptionChooser(LOCALE_FILESYSTEM_IS_UTF8 , &g_settings.filesystem_is_utf8 , MISCSETTINGS_FILESYSTEM_IS_UTF8_OPTIONS, MISCSETTINGS_FILESYSTEM_IS_UTF8_OPTION_COUNT, true ); mc->setHint("", LOCALE_MENU_HINT_FILESYSTEM_IS_UTF8); ms_fbrowser->addItem(mc); mc = new CMenuOptionChooser(LOCALE_FILEBROWSER_SHOWRIGHTS , &g_settings.filebrowser_showrights , MESSAGEBOX_NO_YES_OPTIONS , MESSAGEBOX_NO_YES_OPTION_COUNT , true ); mc->setHint("", LOCALE_MENU_HINT_FILEBROWSER_SHOWRIGHTS); ms_fbrowser->addItem(mc); mc = new CMenuOptionChooser(LOCALE_FILEBROWSER_DENYDIRECTORYLEAVE, &g_settings.filebrowser_denydirectoryleave, MESSAGEBOX_NO_YES_OPTIONS , MESSAGEBOX_NO_YES_OPTION_COUNT , true ); mc->setHint("", LOCALE_MENU_HINT_FILEBROWSER_DENYDIRECTORYLEAVE); ms_fbrowser->addItem(mc); } //channellist int CMiscMenue::showMiscSettingsMenuChanlist() { CMenuWidget * ms_chanlist = new CMenuWidget(LOCALE_MISCSETTINGS_HEAD, NEUTRINO_ICON_SETTINGS, width, MN_WIDGET_ID_MISCSETUP_CHANNELLIST); ms_chanlist->addIntroItems(LOCALE_MISCSETTINGS_CHANNELLIST); bool make_hd_list = g_settings.make_hd_list; bool make_webtv_list = g_settings.make_webtv_list; bool show_empty_favorites = g_settings.show_empty_favorites; CMenuOptionChooser * mc; mc = new CMenuOptionChooser(LOCALE_CHANNELLIST_MAKE_HDLIST , &g_settings.make_hd_list , OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_MAKE_HDLIST); ms_chanlist->addItem(mc); mc = new CMenuOptionChooser(LOCALE_CHANNELLIST_MAKE_WEBTVLIST , &g_settings.make_webtv_list , OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_MAKE_WEBTVLIST); ms_chanlist->addItem(mc); mc = new CMenuOptionChooser(LOCALE_CHANNELLIST_MAKE_NEWLIST, &g_settings.make_new_list , OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_MAKE_NEWLIST); ms_chanlist->addItem(mc); mc = new CMenuOptionChooser(LOCALE_CHANNELLIST_MAKE_REMOVEDLIST, &g_settings.make_removed_list , OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_MAKE_REMOVEDLIST); ms_chanlist->addItem(mc); mc = new CMenuOptionChooser(LOCALE_CHANNELLIST_KEEP_NUMBERS, &g_settings.keep_channel_numbers , OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_KEEP_NUMBERS); ms_chanlist->addItem(mc); mc = new CMenuOptionChooser(LOCALE_EXTRA_ZAP_CYCLE , &g_settings.zap_cycle , OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_ZAP_CYCLE); ms_chanlist->addItem(mc); mc = new CMenuOptionChooser(LOCALE_CHANNELLIST_NEW_ZAP_MODE, &g_settings.channellist_new_zap_mode, CHANNELLIST_NEW_ZAP_MODE_OPTIONS, CHANNELLIST_NEW_ZAP_MODE_OPTION_COUNT, true ); mc->setHint("", LOCALE_MENU_HINT_NEW_ZAP_MODE); ms_chanlist->addItem(mc); mc = new CMenuOptionChooser(LOCALE_CHANNELLIST_NUMERIC_ADJUST, &g_settings.channellist_numeric_adjust, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_NUMERIC_ADJUST); ms_chanlist->addItem(mc); mc = new CMenuOptionChooser(LOCALE_CHANNELLIST_SHOW_EMPTY_FAVS, &g_settings.show_empty_favorites, OPTIONS_OFF0_ON1_OPTIONS, OPTIONS_OFF0_ON1_OPTION_COUNT, true); mc->setHint("", LOCALE_MENU_HINT_CHANNELLIST_SHOW_EMPTY_FAVS); ms_chanlist->addItem(mc); int res = ms_chanlist->exec(NULL, ""); delete ms_chanlist; if (make_hd_list != g_settings.make_hd_list || make_webtv_list != g_settings.make_webtv_list || show_empty_favorites != g_settings.show_empty_favorites) g_RCInput->postMsg(NeutrinoMessages::EVT_SERVICESCHANGED, 0); return res; } #ifdef CPU_FREQ //CPU void CMiscMenue::showMiscSettingsMenuCPUFreq(CMenuWidget *ms_cpu) { ms_cpu->addIntroItems(LOCALE_MISCSETTINGS_CPU); CCpuFreqNotifier * cpuNotifier = new CCpuFreqNotifier(); ms_cpu->addItem(new CMenuOptionChooser(LOCALE_CPU_FREQ_NORMAL, &g_settings.cpufreq, CPU_FREQ_OPTIONS, CPU_FREQ_OPTION_COUNT, true, cpuNotifier)); #if HAVE_SPARK_HARDWARE || HAVE_DUCKBOX_HARDWARE ms_cpu->addItem(new CMenuOptionChooser(LOCALE_CPU_FREQ_STANDBY, &g_settings.standby_cpufreq, CPU_FREQ_OPTIONS_STANDBY, CPU_FREQ_OPTION_STANDBY_COUNT, true)); #else ms_cpu->addItem(new CMenuOptionChooser(LOCALE_CPU_FREQ_STANDBY, &g_settings.standby_cpufreq, CPU_FREQ_OPTIONS, CPU_FREQ_OPTION_COUNT, true)); #endif } #endif /*CPU_FREQ*/ bool CMiscMenue::changeNotify(const neutrino_locale_t OptionName, void * /*data*/) { int ret = menu_return::RETURN_NONE; if (ARE_LOCALES_EQUAL(OptionName, LOCALE_VIDEOMENU_HDMI_CEC)) { printf("[neutrino CEC Settings] %s set CEC settings...\n", __FUNCTION__); g_settings.hdmi_cec_standby = 0; g_settings.hdmi_cec_view_on = 0; if (g_settings.hdmi_cec_mode != VIDEO_HDMI_CEC_MODE_OFF) { g_settings.hdmi_cec_standby = 1; g_settings.hdmi_cec_view_on = 1; g_settings.hdmi_cec_mode = VIDEO_HDMI_CEC_MODE_TUNER; } videoDecoder->SetCECAutoStandby(g_settings.hdmi_cec_standby == 1); videoDecoder->SetCECAutoView(g_settings.hdmi_cec_view_on == 1); videoDecoder->SetCECMode((VIDEO_HDMI_CEC_MODE)g_settings.hdmi_cec_mode); } else if (ARE_LOCALES_EQUAL(OptionName, LOCALE_MISCSETTINGS_EPG_SAVE)) { if (g_settings.epg_save) g_settings.epg_read = true; epg_save_standby->setActive(g_settings.epg_save); epg_save_frequently->setActive(g_settings.epg_save); epg_dir->setActive(g_settings.epg_save || g_settings.epg_read); CNeutrinoApp::getInstance()->SendSectionsdConfig(); ret = menu_return::RETURN_REPAINT; } else if (ARE_LOCALES_EQUAL(OptionName, LOCALE_MISCSETTINGS_EPG_READ)) { epg_dir->setActive(g_settings.epg_save || g_settings.epg_read); } else if (ARE_LOCALES_EQUAL(OptionName, LOCALE_MISCSETTINGS_EPG_SCAN)) { epg_scan->setActive(g_settings.epg_scan_mode != CEpgScan::MODE_OFF && g_settings.epg_save_mode == 0); } else if (ARE_LOCALES_EQUAL(OptionName, LOCALE_MISCSETTINGS_EPG_SAVE_MODE)) { g_settings.epg_scan = CEpgScan::SCAN_FAV;<|fim▁hole|> ret = menu_return::RETURN_REPAINT; } return ret; }<|fim▁end|>
epg_scan->setActive(g_settings.epg_scan_mode != CEpgScan::MODE_OFF && g_settings.epg_save_mode == 0);
<|file_name|>simple.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from sitemaps import SiteMapRoot, SiteMap from datetime import datetime def generate_sitemap(): """ build the sitemap """ sitemap = SiteMap() sitemap.append("http://www.xxx.com", datetime.now(), "weekly", 0.9) sitemap.append("http://www.xxx.com/a1", datetime.now(), "monthly", 0.7) sitemap.save_xml("sitemap.xml") def generate_sitemap_gz(): """ get the gzip sitemap format """ sitemap = SiteMap() sitemap.append("http://www.xxx.com", datetime.now(), "weekly", 0.9) sitemap.append("http://www.xxx.com/a1", datetime.now(), "monthly", 0.7)<|fim▁hole|> xml_string = sitemap.to_string sitemap_root = SiteMapRoot("http://www.new.com", "root_sitemap.xml", False) sitemap_root.append("sitemap1.xml.gz", xml_string) sitemap_root.save_xml() if __name__ == "__main__": generate_sitemap() generate_sitemap_gz()<|fim▁end|>
<|file_name|>configure.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import glob, os, sys import sipconfig from PyQt4 import pyqtconfig def get_diana_version(): depends = filter(lambda line: line.startswith("Depends:"), open("debian/control").readlines()) for line in depends: pieces = line.split() for piece in pieces: name_pieces = piece.strip(",").split("-") if len(name_pieces) == 2 and name_pieces[0] == "diana": return name_pieces[1] return None def get_python_diana_version(): line = open("debian/changelog").readline() pieces = line.split() return pieces[1][1:-1] if __name__ == "__main__": if len(sys.argv) not in (1, 3, 5): sys.stderr.write("Usage: %s [<directory containing diana headers> <directory containing libdiana>] " "[<directory containing metlibs headers> <directory containing metlibs libraries>]\n" % sys.argv[0]) sys.exit(1) if len(sys.argv) == 5: metlibs_inc_dir = sys.argv[3] metlibs_lib_dir = sys.argv[4] else: metlibs_inc_dir = "/usr/include/metlibs" metlibs_lib_dir = "/usr/lib" if len(sys.argv) >= 3: diana_inc_dir = sys.argv[1] diana_lib_dir = sys.argv[2] else: diana_inc_dir = "/usr/include/diana" diana_lib_dir = "/usr/lib" qt_pkg_dir = os.getenv("qt_pkg_dir") python_diana_pkg_dir = os.getenv("python_diana_pkg_dir") dest_pkg_dir = os.path.join(python_diana_pkg_dir, "metno") config = pyqtconfig.Configuration() # The name of the SIP build file generated by SIP and used by the build # system. sip_files_dir = "sip" modules = ["std", "metlibs", "diana"] if not os.path.exists("modules"): os.mkdir("modules") # Run SIP to generate the code. output_dirs = [] for module in modules: output_dir = os.path.join("modules", module) build_file = module + ".sbf" build_path = os.path.join(output_dir, build_file) if not os.path.exists(output_dir): os.mkdir(output_dir) sip_file = os.path.join("sip", module, module+".sip") command = " ".join([config.sip_bin, "-c", output_dir, "-b", build_path, "-I"+config.sip_inc_dir, "-I"+config.pyqt_sip_dir, "-I"+diana_inc_dir, "-I/usr/include", "-I"+metlibs_inc_dir, "-I"+qt_pkg_dir+"/include", "-I"+qt_pkg_dir+"/share/sip/PyQt4", "-Isip", config.pyqt_sip_flags, "-w", "-o", # generate docstrings for signatures<|fim▁hole|> sys.stdout.write(command+"\n") sys.stdout.flush() if os.system(command) != 0: sys.exit(1) # Create the Makefile (within the diana directory). makefile = pyqtconfig.QtGuiModuleMakefile( config, build_file, dir=output_dir, install_dir=dest_pkg_dir, qt=["QtCore", "QtGui", "QtNetwork", "QtXml", "QtXmlPatterns"] ) if module == "diana": makefile.extra_include_dirs += [ diana_inc_dir, os.path.join(diana_inc_dir, "PaintGL"), metlibs_inc_dir, qt_pkg_dir+"/include" ] makefile.extra_lib_dirs += [diana_lib_dir, qt_pkg_dir+"/lib"] makefile.extra_lflags += ["-Wl,-rpath="+diana_lib_dir, "-Wl,-fPIC"] makefile.extra_libs += ["diana"] if module == "metlibs": makefile.extra_include_dirs.append(diana_inc_dir) makefile.extra_include_dirs.append("/usr/include/metlibs") makefile.extra_lib_dirs += [diana_lib_dir, "/usr/lib", metlibs_lib_dir, qt_pkg_dir+"/lib"] makefile.extra_lflags += ["-Wl,-rpath="+diana_lib_dir, "-Wl,-fPIC"] makefile.extra_libs += ["miLogger", "coserver", "diana"] makefile.generate() output_dirs.append(output_dir) # Update the metno package version. diana_version = get_diana_version() python_diana_version = get_python_diana_version() if not diana_version or not python_diana_version: sys.stderr.write("Failed to find version information for Diana (%s) " "or python-diana (%s)\n" % (repr(diana_version), repr(python_diana_version))) sys.exit(1) f = open("python/metno/versions.py", "w") f.write('\ndiana_version = "%s"\npython_diana_version = "%s"\n' % ( diana_version, python_diana_version)) # Generate the top-level Makefile. python_files = glob.glob(os.path.join("python", "metno", "*.py")) sipconfig.ParentMakefile( configuration = config, subdirs = output_dirs, installs = [(python_files, dest_pkg_dir)] ).generate() sys.exit()<|fim▁end|>
sip_file])
<|file_name|>test_raise_error.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Katsuya Noguchi # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT<|fim▁hole|> import unittest import slack.http_client from slack.exception import SlackError, \ InvalidAuthError, \ NotAuthedError, \ AccountInactiveError, \ ChannelNotFoundError, \ ChannelArchivedError, \ NotInChannelError, \ RateLimitedError class TestRaiseErrorClient(unittest.TestCase): def test_ok_response(self): # does not raise error if response is ok slack.http_client._raise_error_if_not_ok({ 'ok': True }) def test_invalid_auth(self): self.assertRaises(InvalidAuthError, slack.http_client._raise_error_if_not_ok, { 'ok': False, 'error': 'invalid_auth' }) def test_not_authed(self): self.assertRaises(NotAuthedError, slack.http_client._raise_error_if_not_ok, { 'ok': False, 'error': 'not_authed' }) def test_account_inactive(self): self.assertRaises(AccountInactiveError, slack.http_client._raise_error_if_not_ok, { 'ok': False, 'error': 'account_inactive' }) def test_channel_not_found(self): self.assertRaises(ChannelNotFoundError, slack.http_client._raise_error_if_not_ok, { 'ok': False, 'error': 'channel_not_found' }) def test_is_archived(self): self.assertRaises(ChannelArchivedError, slack.http_client._raise_error_if_not_ok, { 'ok': False, 'error': 'is_archived' }) def test_not_in_channel(self): self.assertRaises(NotInChannelError, slack.http_client._raise_error_if_not_ok, { 'ok': False, 'error': 'not_in_channel' }) def test_rate_limited(self): self.assertRaises(RateLimitedError, slack.http_client._raise_error_if_not_ok, { 'ok': False, 'error': 'rate_limited' }) def test_slack_error(self): self.assertRaises(SlackError, slack.http_client._raise_error_if_not_ok, { 'ok': False, 'error': 'unknown_error' })<|fim▁end|>
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE.
<|file_name|>hcHandlerUpdateDemographic.js<|end_file_name|><|fim▁begin|>var _hc_windowTimeout; var _hc_newDemographicHandler = function(args) { clearTimeout(_hc_windowTimeout); var window = jQuery("#_hc_window"); jQuery(window).find("#_hc_message, #_hc_read, #_hc_actions, #_hc_match, #_hc_noMatch, #_hc_matchSearch, #_hc_closeBtn").hide(); jQuery(window).find("._hc_mismatch").removeClass("_hc_mismatch"); jQuery(window).find("#_hc_errors").children().remove(); if (!(typeof args["error"] == "undefined")) { jQuery(window).find("#_hc_closeBtn").hide(); jQuery(window).find("#_hc_status_text_success").hide(); jQuery(window).find("#_hc_status_text_error, #_hc_message_tryAgain, #_hc_message").show(); jQuery(window).find("#_hc_status_icon").attr("class", "_hc_inlineBlock _hc_status_error"); if (args["error"] == "INVALID") { jQuery(window).find("#_hc_message_readError").css("display", "inline-block"); jQuery(window).find("#_hc_message_issuerError").css("display", "none"); } else if (args["error"] == "ISSUER") { jQuery(window).find("#_hc_message_readError").css("display", "none"); jQuery(window).find("#_hc_message_issuerError").css("display", "inline-block"); } else { jQuery(window).find("#_hc_message_readError").css("display", "none"); jQuery(window).find("#_hc_message_issuerError").css("display", "none"); } _hc_windowTimeout = setTimeout(function() { jQuery("#_hc_window").css("display", "none"); }, 3000); } else { jQuery(window).find("#_hc_status_text_success, #_hc_read, #_hc_layout").show(); jQuery(window).find("#_hc_message, #_hc_status_text_error").hide(); jQuery(window).find("#_hc_status_icon").attr("class", "_hc_inlineBlock _hc_status_success"); <|fim▁hole|> jQuery(window).find("#_hc_layout_hin_ver").text(args["hinVer"]); jQuery(window).find("#_hc_layout_info_dob").text(args["dob"].substring(0,4) + "/" + args["dob"].substring(4,6) + "/" + args["dob"].substring(6,8)); jQuery(window).find("#_hc_layout_info_sex").text((args["sex"] == "1" ? "M" : (args["sex"] == "2" ? "F" : ""))); var issueDate = (args["issueDate"].substring(0,2) <= 30 ? "20" : "19") + args["issueDate"]; jQuery(window).find("#_hc_layout_valid_from").text(issueDate.substring(0,4) + "/" + issueDate.substring(4,6) + "/" + issueDate.substring(6,8)); var hinExp = (args["hinExp"].substring(0,2) <= 30 ? "20" : "19") + args["hinExp"]; jQuery(window).find("#_hc_layout_valid_to").text(hinExp.substring(0,4) + "/" + hinExp.substring(4,6)); if (hinExp != "0000") { var hinExp = (args["hinExp"].substring(0,2) <= 30 ? "20" : "19") + args["hinExp"] + args["dob"].substring(6,8); jQuery(window).find("#_hc_layout_valid_to").text(hinExp.substring(0,4) + "/" + hinExp.substring(4,6) + "/" + hinExp.substring(6,8)); var date = new Date(); var hinExpDate = new Date(hinExp.substring(0,4) + "/" + hinExp.substring(4,6) + "/" + hinExp.substring(6, 8)); if (hinExpDate <= new Date()) { jQuery(window).find("#_hc_layout_valid_to").addClass("_hc_mismatch"); jQuery(window).find("#_hc_errors").append("<div class='_hc_error'>This health card has expired.</div>"); } jQuery("input[name='end_date_year']").val(hinExp.substring(0,4)); jQuery("input[name='end_date_month']").val(hinExp.substring(4,6)); jQuery("input[name='end_date_date']").val(hinExp.substring(6,8)); } else { jQuery(window).find("#_hc_layout_valid_to").text("No Expiry"); jQuery("input[name='end_date_year']").val(""); jQuery("input[name='end_date_month']").val(""); jQuery("input[name='end_date_date']").val(""); } // Add all of these values to the correct fields on the page jQuery("input[name='keyword']").val(""); jQuery("select[name='hc_type']").val("ON"); if (jQuery("select[name='hc_type']").val() != "ON"){ jQuery("input[name='hc_type']").css({'background-color' : 'yellow'}); jQuery("input[name='hc_type']").val("ON"); } if (jQuery("input[name='last_name']").val() != args["lastName"]){ jQuery("input[name='last_name']").css({'background-color' : 'yellow'}); jQuery("input[name='last_name']").val(args["lastName"]); } if (jQuery("input[name='first_name']").val() != args["firstName"]){ jQuery("input[name='first_name']").css({'background-color' : 'yellow'}); jQuery("input[name='first_name']").val(args["firstName"]); } if (jQuery("input[name='hin']").val() != args["hin"]){ jQuery("input[name='hin']").css({'background-color' : 'yellow'}); jQuery("input[name='hin']").val(args["hin"]); } if (jQuery("input[name='year_of_birth']").val() != args["dob"].substring(0,4)){ jQuery("input[name='year_of_birth']").css({'background-color' : 'yellow'}); jQuery("input[name='year_of_birth']").val(args["dob"].substring(0,4)); } if (jQuery("input[name='month_of_birth']").val() != args["dob"].substring(4,6)){ jQuery("input[name='month_of_birth']").css({'background-color' : 'yellow'}); jQuery("input[name='month_of_birth']").val(args["dob"].substring(4,6)); } if (jQuery("input[name='date_of_birth']").val() != args["dob"].substring(6,8)){ jQuery("input[name='date_of_birth']").css({'background-color' : 'yellow'}); jQuery("input[name='date_of_birth']").val(args["dob"].substring(6,8)); } if (jQuery("input[name='ver']").val() != args["hinVer"]){ jQuery("input[name='ver']").css({'background-color' : 'yellow'}); jQuery("input[name='ver']").val(args["hinVer"]); } if (jQuery("input[name='sex']").val() != (args["sex"] == "1" ? "M" : (args["sex"] == "2" ? "F" : ""))){ jQuery("input[name='sex']").css({'background-color' : 'yellow'}); jQuery("input[name='sex']").val((args["sex"] == "1" ? "M" : (args["sex"] == "2" ? "F" : ""))); } if (jQuery("input[name='eff_date_year']").val() != issueDate.substring(0,4)){ jQuery("input[name='eff_date_year']").css({'background-color' : 'yellow'}); jQuery("input[name='eff_date_year']").val(issueDate.substring(0,4)); } if (jQuery("input[name='eff_date_year']").val() != issueDate.substring(0,4)){ jQuery("input[name='eff_date_year']").css({'background-color' : 'yellow'}); jQuery("input[name='eff_date_year']").val(issueDate.substring(0,4)); } if (jQuery("input[name='eff_date_year']").val() != issueDate.substring(0,4)){ jQuery("input[name='eff_date_year']").css({'background-color' : 'yellow'}); jQuery("input[name='eff_date_year']").val(issueDate.substring(0,4)); } showEdit(); _hc_windowTimeout = setTimeout(function() { jQuery("#_hc_window").css("display", "none"); }, 3000); } jQuery(window).css("display", "block"); } jQuery(document).ready(function() { jQuery("#_hc_window #_hc_matchSearch img").attr("src", window.pageContext + "/images/DMSLoader.gif"); jQuery("#_hc_window #_hc_closeBtn").click(function() { jQuery("#_hc_window").hide(); }); new HealthCardHandler(_hc_newDemographicHandler); });<|fim▁end|>
jQuery(window).find("#_hc_layout_name").text(args["lastName"] + ", " + args["firstName"]); jQuery(window).find("#_hc_layout_hin_num").html(args["hin"].substring(0,4) + "&#149; " + args["hin"].substring(4,7) + "&#149; " + args["hin"].substring(7,10) + "&#149;");
<|file_name|>capability.go<|end_file_name|><|fim▁begin|>// Copyright 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package cmd import ( "fmt" specs "github.com/opencontainers/runtime-spec/specs-go" "github.com/syndtr/gocapability/capability" "gvisor.dev/gvisor/pkg/log" ) var allCapTypes = []capability.CapType{ capability.BOUNDS, capability.EFFECTIVE, capability.PERMITTED, capability.INHERITABLE, capability.AMBIENT, } // applyCaps applies the capabilities in the spec to the current thread. // // Note that it must be called with current thread locked. func applyCaps(caps *specs.LinuxCapabilities) error { // Load current capabilities to trim the ones not permitted. curCaps, err := capability.NewPid2(0) if err != nil { return err } if err := curCaps.Load(); err != nil { return err } // Create an empty capability set to populate. newCaps, err := capability.NewPid2(0) if err != nil { return err } for _, c := range allCapTypes { if !newCaps.Empty(c) { panic("unloaded capabilities must be empty") } set, err := trimCaps(getCaps(c, caps), curCaps) if err != nil { return err } newCaps.Set(c, set...) } if err := newCaps.Apply(capability.CAPS | capability.BOUNDS | capability.AMBS); err != nil { return err } log.Infof("Capabilities applied: %+v", newCaps) return nil } func getCaps(which capability.CapType, caps *specs.LinuxCapabilities) []string { switch which { case capability.BOUNDS: return caps.Bounding case capability.EFFECTIVE: return caps.Effective case capability.PERMITTED: return caps.Permitted case capability.INHERITABLE: return caps.Inheritable case capability.AMBIENT: return caps.Ambient } panic(fmt.Sprint("invalid capability type:", which)) } func trimCaps(names []string, setter capability.Capabilities) ([]capability.Cap, error) { wantedCaps, err := capsFromNames(names) if err != nil { return nil, err } // Trim down capabilities that aren't possible to acquire. var caps []capability.Cap for _, c := range wantedCaps { // Capability rules are more complicated than this, but this catches most // problems with tests running with non-privileged user. if setter.Get(capability.PERMITTED, c) { caps = append(caps, c) } else { log.Warningf("Capability %q is not permitted, dropping it.", c) } } return caps, nil } func capsFromNames(names []string) ([]capability.Cap, error) { var caps []capability.Cap for _, name := range names { cap, ok := capFromName[name] if !ok { return nil, fmt.Errorf("invalid capability %q", name) } caps = append(caps, cap) } return caps, nil } var capFromName = map[string]capability.Cap{ "CAP_CHOWN": capability.CAP_CHOWN, "CAP_DAC_OVERRIDE": capability.CAP_DAC_OVERRIDE, "CAP_DAC_READ_SEARCH": capability.CAP_DAC_READ_SEARCH, "CAP_FOWNER": capability.CAP_FOWNER, "CAP_FSETID": capability.CAP_FSETID, "CAP_KILL": capability.CAP_KILL, "CAP_SETGID": capability.CAP_SETGID, "CAP_SETUID": capability.CAP_SETUID, "CAP_SETPCAP": capability.CAP_SETPCAP, "CAP_LINUX_IMMUTABLE": capability.CAP_LINUX_IMMUTABLE, "CAP_NET_BIND_SERVICE": capability.CAP_NET_BIND_SERVICE, "CAP_NET_BROADCAST": capability.CAP_NET_BROADCAST, "CAP_NET_ADMIN": capability.CAP_NET_ADMIN, "CAP_NET_RAW": capability.CAP_NET_RAW, "CAP_IPC_LOCK": capability.CAP_IPC_LOCK, "CAP_IPC_OWNER": capability.CAP_IPC_OWNER, "CAP_SYS_MODULE": capability.CAP_SYS_MODULE, "CAP_SYS_RAWIO": capability.CAP_SYS_RAWIO, "CAP_SYS_CHROOT": capability.CAP_SYS_CHROOT, "CAP_SYS_PTRACE": capability.CAP_SYS_PTRACE, "CAP_SYS_PACCT": capability.CAP_SYS_PACCT, "CAP_SYS_ADMIN": capability.CAP_SYS_ADMIN, "CAP_SYS_BOOT": capability.CAP_SYS_BOOT, "CAP_SYS_NICE": capability.CAP_SYS_NICE, "CAP_SYS_RESOURCE": capability.CAP_SYS_RESOURCE, "CAP_SYS_TIME": capability.CAP_SYS_TIME, "CAP_SYS_TTY_CONFIG": capability.CAP_SYS_TTY_CONFIG, "CAP_MKNOD": capability.CAP_MKNOD, "CAP_LEASE": capability.CAP_LEASE, "CAP_AUDIT_WRITE": capability.CAP_AUDIT_WRITE, "CAP_AUDIT_CONTROL": capability.CAP_AUDIT_CONTROL, "CAP_SETFCAP": capability.CAP_SETFCAP, "CAP_MAC_OVERRIDE": capability.CAP_MAC_OVERRIDE, "CAP_MAC_ADMIN": capability.CAP_MAC_ADMIN, "CAP_SYSLOG": capability.CAP_SYSLOG,<|fim▁hole|><|fim▁end|>
"CAP_WAKE_ALARM": capability.CAP_WAKE_ALARM, "CAP_BLOCK_SUSPEND": capability.CAP_BLOCK_SUSPEND, "CAP_AUDIT_READ": capability.CAP_AUDIT_READ, }
<|file_name|>samples.feature.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from "@angular/core"; import { SamplesModule } from "samples/samples.module"; import { SamplesRoutingModule } from "./samples.routing.module"; @NgModule({<|fim▁hole|> SamplesRoutingModule ] }) export class SamplesFeatureModule {}<|fim▁end|>
imports: [ SamplesModule,
<|file_name|>TestHelloWorld.java<|end_file_name|><|fim▁begin|>package xyz.zyzhu.model; <|fim▁hole|>import org.junit.Assert.*; public class TestHelloWorld { @Test public void tesySayHello() { Assert.assertEquals("hello world",new HelloWorld().sayHello()); } }<|fim▁end|>
import org.junit.*;
<|file_name|>test_player.py<|end_file_name|><|fim▁begin|>from game.table import Table from mahjong.constants import EAST, NORTH, SOUTH, WEST from utils.decisions_logger import MeldPrint from utils.test_helpers import make_meld, string_to_136_array def test_can_call_riichi_and_tempai(): table = Table() player = table.player player.in_tempai = False player.in_riichi = False player.scores = 2000 player.table.count_of_remaining_tiles = 40 assert player.formal_riichi_conditions() is False player.in_tempai = True assert player.formal_riichi_conditions() is True def test_can_call_riichi_and_already_in_riichi(): table = Table() player = table.player player.in_tempai = True player.in_riichi = True player.scores = 2000 player.table.count_of_remaining_tiles = 40 assert player.formal_riichi_conditions() is False player.in_riichi = False assert player.formal_riichi_conditions() is True def test_can_call_riichi_and_scores():<|fim▁hole|> player.in_tempai = True player.in_riichi = False player.scores = 0 player.table.count_of_remaining_tiles = 40 assert player.formal_riichi_conditions() is False player.scores = 1000 assert player.formal_riichi_conditions() is True def test_can_call_riichi_and_remaining_tiles(): table = Table() player = table.player player.in_tempai = True player.in_riichi = False player.scores = 2000 player.table.count_of_remaining_tiles = 3 assert player.formal_riichi_conditions() is False player.table.count_of_remaining_tiles = 5 assert player.formal_riichi_conditions() is True def test_can_call_riichi_and_open_hand(): table = Table() player = table.player player.in_tempai = True player.in_riichi = False player.scores = 2000 player.melds = [MeldPrint()] player.table.count_of_remaining_tiles = 40 assert player.formal_riichi_conditions() is False player.melds = [] assert player.formal_riichi_conditions() is True def test_players_wind(): table = Table() player = table.player dealer_seat = 0 table.init_round(0, 0, 0, 0, dealer_seat, []) assert player.player_wind == EAST assert table.get_player(1).player_wind == SOUTH dealer_seat = 1 table.init_round(0, 0, 0, 0, dealer_seat, []) assert player.player_wind == NORTH assert table.get_player(1).player_wind == EAST dealer_seat = 2 table.init_round(0, 0, 0, 0, dealer_seat, []) assert player.player_wind == WEST assert table.get_player(1).player_wind == NORTH dealer_seat = 3 table.init_round(0, 0, 0, 0, dealer_seat, []) assert player.player_wind == SOUTH assert table.get_player(1).player_wind == WEST def test_player_called_meld_and_closed_hand(): table = Table() player = table.player tiles = string_to_136_array(sou="123678", pin="3599", honors="555") player.init_hand(tiles) assert len(player.closed_hand) == 13 player.add_called_meld(make_meld(MeldPrint.PON, honors="555")) assert len(player.closed_hand) == 10<|fim▁end|>
table = Table() player = table.player
<|file_name|>stand-alone-comment.js<|end_file_name|><|fim▁begin|>// https://github.com/isaacs/sax-js/issues/124 require(__dirname).test ( { xml : "<!-- stand alone comment -->" <|fim▁hole|> , expect : [ [ "comment", " stand alone comment " ] ] , strict : true , opt : {} } )<|fim▁end|>
<|file_name|>mod_file_nonascii_with_path_allowed-aux.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub trait Foo {}<|fim▁end|>
<|file_name|>tokens_test.py<|end_file_name|><|fim▁begin|>from temp_tools import TestClient from test_template import ApiTestTemplate class TokensTest(ApiTestTemplate): def setUp(self): super(TokensTest, self).setUp() TestClient.execute("""TRUNCATE auth_token""") self.test_token_data = {'description': 'Test token 1', 'scope_push': True, 'scope_pull': True} def test_tokens_root(self): # test unauthorized r = TestClient.post('api/v1/projects/%s/tokens' % self.project_id, data=self.test_token_data, headers=TestClient.get_job_authorization(self.job_id)) self.assertEqual(r['message'], 'Unauthorized') # test token creation r = TestClient.post('api/v1/projects/%s/tokens' % self.project_id, data=self.test_token_data, headers=TestClient.get_user_authorization(self.user_id)) self.assertEqual(r['message'], 'Successfully added token') self.assertEqual(r['status'], 200) # test token receiving r = TestClient.get('api/v1/projects/%s/tokens' % self.project_id, headers=TestClient.get_user_authorization(self.user_id)) self.assertGreater(len(r), 0)<|fim▁hole|> self.assertEqual(r[0]['scope_pull'], self.test_token_data['scope_pull']) def test_tokens_delete(self): r = TestClient.execute_one(''' INSERT INTO auth_token (description, scope_push, scope_pull, project_id) VALUES (%s, %s, %s, %s) RETURNING id ''', [self.test_token_data['description'], self.test_token_data['scope_push'], self.test_token_data['scope_pull'], self.project_id]) token_id = r['id'] r = TestClient.execute_one("""SELECT count(*) FROM auth_token WHERE id = '%s'""" % token_id) self.assertGreater(r[0], 0) r = TestClient.delete('api/v1/projects/%s/tokens/%s' % (self.project_id, token_id), headers=TestClient.get_user_authorization(self.user_id)) self.assertEqual(r['message'], 'Successfully deleted token') self.assertEqual(r['status'], 200) r = TestClient.execute_one("""SELECT count(*) FROM auth_token WHERE id = '%s'""" % token_id) self.assertEqual(r[0], 0)<|fim▁end|>
self.assertEqual(r[0]['description'], self.test_token_data['description']) self.assertEqual(r[0]['scope_push'], self.test_token_data['scope_push'])
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import { request } from '@octokit/request'; import { getUserAgent } from 'universal-user-agent'; const VERSION = "4.5.6"; class GraphqlError extends Error { constructor(request, response) { const message = response.data.errors[0].message; super(message); Object.assign(this, response.data); Object.assign(this, { headers: response.headers }); this.name = "GraphqlError"; this.request = request; // Maintains proper stack trace (only available on V8) /* istanbul ignore next */ if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } } } const NON_VARIABLE_OPTIONS = [ "method", "baseUrl", "url", "headers", "request", "query", "mediaType", ]; const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; function graphql(request, query, options) { if (typeof query === "string" && options && "query" in options) { return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); } const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { if (NON_VARIABLE_OPTIONS.includes(key)) { result[key] = parsedOptions[key]; return result; } if (!result.variables) { result.variables = {}; } result.variables[key] = parsedOptions[key]; return result; }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); } return request(requestOptions).then((response) => { if (response.data.errors) { const headers = {};<|fim▁hole|> throw new GraphqlError(requestOptions, { headers, data: response.data, }); } return response.data.data; }); } function withDefaults(request$1, newDefaults) { const newRequest = request$1.defaults(newDefaults); const newApi = (query, options) => { return graphql(newRequest, query, options); }; return Object.assign(newApi, { defaults: withDefaults.bind(null, newRequest), endpoint: request.endpoint, }); } const graphql$1 = withDefaults(request, { headers: { "user-agent": `octokit-graphql.js/${VERSION} ${getUserAgent()}`, }, method: "POST", url: "/graphql", }); function withCustomRequest(customRequest) { return withDefaults(customRequest, { method: "POST", url: "/graphql", }); } export { graphql$1 as graphql, withCustomRequest }; //# sourceMappingURL=index.js.map<|fim▁end|>
for (const key of Object.keys(response.headers)) { headers[key] = response.headers[key]; }
<|file_name|>html.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #![allow(unrooted_must_root)] use document_loader::DocumentLoader; use dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods; use dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods; use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods; use dom::bindings::codegen::InheritTypes::{CharacterDataCast, DocumentTypeCast}; use dom::bindings::codegen::InheritTypes::{ElementCast, HTMLFormElementDerived}; use dom::bindings::codegen::InheritTypes::{HTMLScriptElementCast, HTMLTemplateElementCast}; use dom::bindings::codegen::InheritTypes::{NodeCast, ProcessingInstructionCast}; use dom::bindings::js::{JS, Root}; use dom::bindings::js::{RootedReference}; use dom::characterdata::CharacterDataTypeId; use dom::comment::Comment; use dom::document::Document; use dom::document::{DocumentSource, IsHTMLDocument}; use dom::documenttype::DocumentType; use dom::element::{Element, ElementCreator}; use dom::htmlscriptelement::HTMLScriptElement; use dom::node::{Node, NodeTypeId}; use dom::node::{document_from_node, window_from_node}; use dom::servohtmlparser; use dom::servohtmlparser::{FragmentContext, ServoHTMLParser}; use encoding::types::Encoding; use html5ever::Attribute; use html5ever::serialize::TraversalScope; use html5ever::serialize::TraversalScope::{ChildrenOnly, IncludeNode}; use html5ever::serialize::{AttrRef, Serializable, Serializer}; use html5ever::tree_builder::{NextParserState, NodeOrText, QuirksMode, TreeSink}; use msg::constellation_msg::PipelineId; use parse::Parser; use std::borrow::Cow; use std::io::{self, Write}; use string_cache::QualName; use tendril::StrTendril; use url::Url; use util::str::DOMString; impl<'a> TreeSink for servohtmlparser::Sink { type Handle = JS<Node>; fn get_document(&mut self) -> JS<Node> { let doc = self.document.root(); let node = NodeCast::from_ref(doc.r()); JS::from_ref(node) } fn get_template_contents(&self, target: JS<Node>) -> JS<Node> { let target = target.root(); let template = HTMLTemplateElementCast::to_ref(&*target) .expect("tried to get template contents of non-HTMLTemplateElement in HTML parsing"); JS::from_ref(NodeCast::from_ref(&*template.Content())) } fn same_node(&self, x: JS<Node>, y: JS<Node>) -> bool { x == y } fn elem_name(&self, target: JS<Node>) -> QualName { let node: Root<Node> = target.root(); let elem = ElementCast::to_ref(node.r()) .expect("tried to get name of non-Element in HTML parsing"); QualName { ns: elem.namespace().clone(), local: elem.local_name().clone(), } } fn create_element(&mut self, name: QualName, attrs: Vec<Attribute>) -> JS<Node> { let doc = self.document.root(); let elem = Element::create(name, None, doc.r(), ElementCreator::ParserCreated); for attr in attrs { elem.r().set_attribute_from_parser(attr.name, attr.value.into(), None); } let node = NodeCast::from_ref(elem.r()); JS::from_ref(node) } fn create_comment(&mut self, text: StrTendril) -> JS<Node> { let doc = self.document.root(); let comment = Comment::new(text.into(), doc.r()); let node = NodeCast::from_root(comment); JS::from_rooted(&node) } fn append_before_sibling(&mut self, sibling: JS<Node>, new_node: NodeOrText<JS<Node>>) -> Result<(), NodeOrText<JS<Node>>> { // If there is no parent, return the node to the parser. let sibling: Root<Node> = sibling.root(); let parent = match sibling.r().GetParentNode() { Some(p) => p, None => return Err(new_node), }; let child = self.get_or_create(new_node); assert!(parent.r().InsertBefore(child.r(), Some(sibling.r())).is_ok()); Ok(()) } fn parse_error(&mut self, msg: Cow<'static, str>) { debug!("Parse error: {}", msg); } fn set_quirks_mode(&mut self, mode: QuirksMode) { let doc = self.document.root(); doc.r().set_quirks_mode(mode); } fn append(&mut self, parent: JS<Node>, child: NodeOrText<JS<Node>>) { let parent: Root<Node> = parent.root(); let child = self.get_or_create(child); // FIXME(#3701): Use a simpler algorithm and merge adjacent text nodes assert!(parent.r().AppendChild(child.r()).is_ok()); } fn append_doctype_to_document(&mut self, name: StrTendril, public_id: StrTendril, system_id: StrTendril) { let doc = self.document.root(); let doc_node = NodeCast::from_ref(doc.r()); let doctype = DocumentType::new( name.into(), Some(public_id.into()), Some(system_id.into()), doc.r()); let node: Root<Node> = NodeCast::from_root(doctype); assert!(doc_node.AppendChild(node.r()).is_ok()); } fn add_attrs_if_missing(&mut self, target: JS<Node>, attrs: Vec<Attribute>) { let node: Root<Node> = target.root(); let elem = ElementCast::to_ref(node.r()) .expect("tried to set attrs on non-Element in HTML parsing"); for attr in attrs { elem.set_attribute_from_parser(attr.name, attr.value.into(), None); } } fn remove_from_parent(&mut self, target: JS<Node>) { let node = target.root(); if let Some(ref parent) = node.r().GetParentNode() { parent.r().RemoveChild(node.r()).unwrap(); } }<|fim▁hole|> let node: Root<Node> = node.root(); let script: Option<&HTMLScriptElement> = HTMLScriptElementCast::to_ref(node.r()); script.map(|script| script.mark_already_started()); } fn complete_script(&mut self, node: JS<Node>) -> NextParserState { let node: Root<Node> = node.root(); let script: Option<&HTMLScriptElement> = HTMLScriptElementCast::to_ref(node.r()); if let Some(script) = script { return script.prepare(); } NextParserState::Continue } fn reparent_children(&mut self, node: JS<Node>, new_parent: JS<Node>) { let new_parent = new_parent.root(); let new_parent = new_parent.r(); let old_parent = node.root(); let old_parent = old_parent.r(); while let Some(ref child) = old_parent.GetFirstChild() { new_parent.AppendChild(child.r()).unwrap(); } } } impl<'a> Serializable for &'a Node { fn serialize<'wr, Wr: Write>(&self, serializer: &mut Serializer<'wr, Wr>, traversal_scope: TraversalScope) -> io::Result<()> { let node = *self; match (traversal_scope, node.type_id()) { (_, NodeTypeId::Element(..)) => { let elem = ElementCast::to_ref(node).unwrap(); let name = QualName::new(elem.namespace().clone(), elem.local_name().clone()); if traversal_scope == IncludeNode { let attrs = elem.attrs().iter().map(|at| { let attr = at.root(); let qname = QualName::new(attr.r().namespace().clone(), attr.r().local_name().clone()); let value = attr.r().value().clone(); (qname, value) }).collect::<Vec<_>>(); let attr_refs = attrs.iter().map(|&(ref qname, ref value)| { let ar: AttrRef = (&qname, &**value); ar }); try!(serializer.start_elem(name.clone(), attr_refs)); } let children = if let Some(tpl) = HTMLTemplateElementCast::to_ref(node) { // https://github.com/w3c/DOM-Parsing/issues/1 NodeCast::from_ref(&*tpl.Content()).children() } else { node.children() }; for handle in children { try!(handle.r().serialize(serializer, IncludeNode)); } if traversal_scope == IncludeNode { try!(serializer.end_elem(name.clone())); } Ok(()) }, (ChildrenOnly, NodeTypeId::Document) => { for handle in node.children() { try!(handle.r().serialize(serializer, IncludeNode)); } Ok(()) }, (ChildrenOnly, _) => Ok(()), (IncludeNode, NodeTypeId::DocumentType) => { let doctype = DocumentTypeCast::to_ref(node).unwrap(); serializer.write_doctype(&doctype.name()) }, (IncludeNode, NodeTypeId::CharacterData(CharacterDataTypeId::Text)) => { let cdata = CharacterDataCast::to_ref(node).unwrap(); serializer.write_text(&cdata.data()) }, (IncludeNode, NodeTypeId::CharacterData(CharacterDataTypeId::Comment)) => { let cdata = CharacterDataCast::to_ref(node).unwrap(); serializer.write_comment(&cdata.data()) }, (IncludeNode, NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction)) => { let pi = ProcessingInstructionCast::to_ref(node).unwrap(); let data = CharacterDataCast::from_ref(pi).data(); serializer.write_processing_instruction(&pi.target(), &data) }, (IncludeNode, NodeTypeId::DocumentFragment) => Ok(()), (IncludeNode, NodeTypeId::Document) => panic!("Can't serialize Document node itself"), } } } pub enum ParseContext<'a> { Fragment(FragmentContext<'a>), Owner(Option<PipelineId>), } pub fn parse_html(document: &Document, input: String, url: &Url, context: ParseContext) { let parser = match context { ParseContext::Owner(owner) => ServoHTMLParser::new(Some(url.clone()), document, owner), ParseContext::Fragment(fc) => ServoHTMLParser::new_for_fragment(Some(url.clone()), document, fc), }; parser.r().parse_chunk(input.into()); } // https://html.spec.whatwg.org/multipage/#parsing-html-fragments pub fn parse_html_fragment(context_node: &Node, input: DOMString, output: &Node) { let window = window_from_node(context_node); let context_document = document_from_node(context_node); let context_document = context_document.r(); let url = context_document.url(); // Step 1. let loader = DocumentLoader::new(&*context_document.loader()); let document = Document::new(window.r(), Some(url.clone()), IsHTMLDocument::HTMLDocument, None, None, DocumentSource::FromParser, loader); // Step 2. document.r().set_quirks_mode(context_document.quirks_mode()); // Step 11. let form = context_node.inclusive_ancestors() .find(|element| element.r().is_htmlformelement()); let fragment_context = FragmentContext { context_elem: context_node, form_elem: form.r(), }; parse_html(document.r(), input, &url, ParseContext::Fragment(fragment_context)); // Step 14. let root_element = document.r().GetDocumentElement().expect("no document element"); let root_node = NodeCast::from_ref(root_element.r()); for child in root_node.children() { output.AppendChild(child.r()).unwrap(); } }<|fim▁end|>
fn mark_script_already_started(&mut self, node: JS<Node>) {
<|file_name|>distributed_mongo.py<|end_file_name|><|fim▁begin|>from dipde.internals.internalpopulation import InternalPopulation from dipde.internals.externalpopulation import ExternalPopulation from dipde.internals.network import Network from dipde.internals.connection import Connection as Connection # from dipde.profiling import profile_simulation, extract_value from mongodistributedconfiguration import MongoDistributedConfiguration import sys import os import logging import time logging.disable(logging.CRITICAL) number_of_processes = int(os.environ.get('NUMBER_OF_NODES',2)) try: rank = int(sys.argv[1]) except: rank = 0 # Settings: t0 = 0. dt = .0001 dv = .0001 tf = .1 update_method = 'approx' approx_order = None tol = 1e-14 # Run simulation: b1 = ExternalPopulation(100) b2 = ExternalPopulation(100) b3 = ExternalPopulation(100) b4 = ExternalPopulation(100) i1 = InternalPopulation(v_min=0, v_max=.02, dv=dv, update_method=update_method, approx_order=approx_order, tol=tol) i2 = InternalPopulation(v_min=0, v_max=.02, dv=dv, update_method=update_method, approx_order=approx_order, tol=tol) i3 = InternalPopulation(v_min=0, v_max=.02, dv=dv, update_method=update_method, approx_order=approx_order, tol=tol) i4 = InternalPopulation(v_min=0, v_max=.02, dv=dv, update_method=update_method, approx_order=approx_order, tol=tol) b1_i1 = Connection(b1, i1, 1, weights=.005) b2_i2 = Connection(b2, i2, 1, weights=.005) b3_i3 = Connection(b3, i3, 1, weights=.005) b4_i4 = Connection(b4, i4, 1, weights=.005) network = Network([b1, b2, b3, b4, i1, i2, i3, i4], [b1_i1, b2_i2, b3_i3, b4_i4]) run_dict = {'t0':t0, 'dt':dt, 'tf':tf, 'distributed_configuration':MongoDistributedConfiguration(rank, number_of_processes=number_of_processes)} import re from dipde.internals.network import Network import cProfile import pstats import StringIO import logging as logging_module prof = cProfile.Profile() prof.runcall(network.run, **run_dict) stream = StringIO.StringIO() p = pstats.Stats(prof, stream=stream) p.strip_dirs().sort_stats('cumtime').print_stats(20) if rank == 0: print stream.getvalue() # network.run(**run_dict) # print time.time() - t0 # profile = profile_simulation(simulation, run_dict, logging=False) # total_time = extract_value(profile, 'simulation.py', 'run') # parallel_overhead = extract_value(profile, 'distributedconfiguration.py', 'update') # parallel_win = extract_value(profile, 'internalpopulation.py', 'update') # if rank == 0: print 'total time: %s' % total_time # if rank == 0: print 'parallel_overhead: %s' % parallel_overhead # print 'parallel_win: %s' % parallel_win # 1001 0.001 0.000 0.193 0.000 distributedconfiguration.py:28(update) # if rank == 0: # print profile <|fim▁hole|> # # Visualize: # i1 = simulation.population_list[1] # fig, ax = plt.subplots(figsize=(3,3)) # i1.plot(ax=ax) # plt.xlim([0,tf]) # plt.ylim(ymin=0) # plt.xlabel('Time (s)') # plt.ylabel('Firing Rate (Hz)') # fig.tight_layout() # plt.show() # # # # # # # # class ThreadCallback(threading.Thread): # # def __init__(self, obj, callback, sleep_time): # super(self.__class__, self).__init__() # self.daemon = True # self.callback = callback # self.sleep_time = sleep_time # self.obj = obj # self.start() # # # def run(self): # while True: # self.callback(self.obj) # time.sleep(self.sleep_time) # # def sleep_callback(self): # time.sleep(1) # pass # # def firing_rate_callback(self): # print self.recent_full_firing_rate_dict # tmp = ThreadCallback(simulation, firing_rate_callback, .5)<|fim▁end|>
# print extract_value(profile, 'cumtime', 'simulation.py', 'run')
<|file_name|>test_score_objects.py<|end_file_name|><|fim▁begin|>import pickle import tempfile import shutil import os import numbers import numpy as np from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_raises_regexp from sklearn.utils.testing import assert_true from sklearn.utils.testing import ignore_warnings from sklearn.utils.testing import assert_not_equal from sklearn.utils.testing import assert_warns_message from sklearn.base import BaseEstimator from sklearn.metrics import (f1_score, r2_score, roc_auc_score, fbeta_score, log_loss, precision_score, recall_score) from sklearn.metrics.cluster import adjusted_rand_score from sklearn.metrics.scorer import (check_scoring, _PredictScorer, _passthrough_scorer) from sklearn.metrics import make_scorer, get_scorer, SCORERS from sklearn.svm import LinearSVC from sklearn.pipeline import make_pipeline from sklearn.cluster import KMeans from sklearn.dummy import DummyRegressor from sklearn.linear_model import Ridge, LogisticRegression from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor from sklearn.datasets import make_blobs from sklearn.datasets import make_classification from sklearn.datasets import make_multilabel_classification from sklearn.datasets import load_diabetes from sklearn.model_selection import train_test_split, cross_val_score from sklearn.model_selection import GridSearchCV from sklearn.multiclass import OneVsRestClassifier from sklearn.externals import joblib REGRESSION_SCORERS = ['r2', 'neg_mean_absolute_error', 'neg_mean_squared_error', 'neg_mean_squared_log_error', 'neg_median_absolute_error', 'mean_absolute_error', 'mean_squared_error', 'median_absolute_error'] CLF_SCORERS = ['accuracy', 'f1', 'f1_weighted', 'f1_macro', 'f1_micro', 'roc_auc', 'average_precision', 'precision', 'precision_weighted', 'precision_macro', 'precision_micro', 'recall', 'recall_weighted', 'recall_macro', 'recall_micro', 'neg_log_loss', 'log_loss', 'adjusted_rand_score' # not really, but works ] MULTILABEL_ONLY_SCORERS = ['precision_samples', 'recall_samples', 'f1_samples'] def _make_estimators(X_train, y_train, y_ml_train): # Make estimators that make sense to test various scoring methods sensible_regr = DummyRegressor(strategy='median') sensible_regr.fit(X_train, y_train) sensible_clf = DecisionTreeClassifier(random_state=0) sensible_clf.fit(X_train, y_train) sensible_ml_clf = DecisionTreeClassifier(random_state=0) sensible_ml_clf.fit(X_train, y_ml_train) return dict( [(name, sensible_regr) for name in REGRESSION_SCORERS] + [(name, sensible_clf) for name in CLF_SCORERS] + [(name, sensible_ml_clf) for name in MULTILABEL_ONLY_SCORERS] ) X_mm, y_mm, y_ml_mm = None, None, None ESTIMATORS = None TEMP_FOLDER = None def setup_module(): # Create some memory mapped data global X_mm, y_mm, y_ml_mm, TEMP_FOLDER, ESTIMATORS TEMP_FOLDER = tempfile.mkdtemp(prefix='sklearn_test_score_objects_') X, y = make_classification(n_samples=30, n_features=5, random_state=0) _, y_ml = make_multilabel_classification(n_samples=X.shape[0], random_state=0) filename = os.path.join(TEMP_FOLDER, 'test_data.pkl') joblib.dump((X, y, y_ml), filename) X_mm, y_mm, y_ml_mm = joblib.load(filename, mmap_mode='r') ESTIMATORS = _make_estimators(X_mm, y_mm, y_ml_mm) def teardown_module(): global X_mm, y_mm, y_ml_mm, TEMP_FOLDER, ESTIMATORS # GC closes the mmap file descriptors X_mm, y_mm, y_ml_mm, ESTIMATORS = None, None, None, None shutil.rmtree(TEMP_FOLDER) class EstimatorWithoutFit(object): """Dummy estimator to test check_scoring""" pass class EstimatorWithFit(BaseEstimator): """Dummy estimator to test check_scoring""" def fit(self, X, y): return self class EstimatorWithFitAndScore(object): """Dummy estimator to test check_scoring""" def fit(self, X, y): return self def score(self, X, y): return 1.0 class EstimatorWithFitAndPredict(object): """Dummy estimator to test check_scoring""" def fit(self, X, y): self.y = y return self def predict(self, X): return self.y class DummyScorer(object): """Dummy scorer that always returns 1.""" def __call__(self, est, X, y): return 1 def test_all_scorers_repr(): # Test that all scorers have a working repr for name, scorer in SCORERS.items(): repr(scorer) def test_check_scoring(): # Test all branches of check_scoring estimator = EstimatorWithoutFit() pattern = (r"estimator should be an estimator implementing 'fit' method," r" .* was passed") assert_raises_regexp(TypeError, pattern, check_scoring, estimator) estimator = EstimatorWithFitAndScore() estimator.fit([[1]], [1]) scorer = check_scoring(estimator) assert_true(scorer is _passthrough_scorer) assert_almost_equal(scorer(estimator, [[1]], [1]), 1.0) estimator = EstimatorWithFitAndPredict() estimator.fit([[1]], [1]) pattern = (r"If no scoring is specified, the estimator passed should have" r" a 'score' method\. The estimator .* does not\.") assert_raises_regexp(TypeError, pattern, check_scoring, estimator) scorer = check_scoring(estimator, "accuracy") assert_almost_equal(scorer(estimator, [[1]], [1]), 1.0) estimator = EstimatorWithFit() scorer = check_scoring(estimator, "accuracy") assert_true(isinstance(scorer, _PredictScorer)) estimator = EstimatorWithFit() scorer = check_scoring(estimator, allow_none=True) assert_true(scorer is None) def test_check_scoring_gridsearchcv(): # test that check_scoring works on GridSearchCV and pipeline. # slightly redundant non-regression test. grid = GridSearchCV(LinearSVC(), param_grid={'C': [.1, 1]}) scorer = check_scoring(grid, "f1") assert_true(isinstance(scorer, _PredictScorer)) pipe = make_pipeline(LinearSVC()) scorer = check_scoring(pipe, "f1") assert_true(isinstance(scorer, _PredictScorer)) # check that cross_val_score definitely calls the scorer # and doesn't make any assumptions about the estimator apart from having a # fit. scores = cross_val_score(EstimatorWithFit(), [[1], [2], [3]], [1, 0, 1], scoring=DummyScorer()) assert_array_equal(scores, 1) def test_make_scorer(): # Sanity check on the make_scorer factory function. f = lambda *args: 0 assert_raises(ValueError, make_scorer, f, needs_threshold=True, needs_proba=True) def test_classification_scores(): # Test classification scorers. X, y = make_blobs(random_state=0, centers=2) X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) clf = LinearSVC(random_state=0) clf.fit(X_train, y_train) for prefix, metric in [('f1', f1_score), ('precision', precision_score), ('recall', recall_score)]: score1 = get_scorer('%s_weighted' % prefix)(clf, X_test, y_test) score2 = metric(y_test, clf.predict(X_test), pos_label=None, average='weighted') assert_almost_equal(score1, score2) score1 = get_scorer('%s_macro' % prefix)(clf, X_test, y_test) score2 = metric(y_test, clf.predict(X_test), pos_label=None, average='macro') assert_almost_equal(score1, score2) score1 = get_scorer('%s_micro' % prefix)(clf, X_test, y_test) score2 = metric(y_test, clf.predict(X_test), pos_label=None, average='micro') assert_almost_equal(score1, score2) score1 = get_scorer('%s' % prefix)(clf, X_test, y_test) score2 = metric(y_test, clf.predict(X_test), pos_label=1) assert_almost_equal(score1, score2) # test fbeta score that takes an argument scorer = make_scorer(fbeta_score, beta=2) score1 = scorer(clf, X_test, y_test) score2 = fbeta_score(y_test, clf.predict(X_test), beta=2) assert_almost_equal(score1, score2) # test that custom scorer can be pickled unpickled_scorer = pickle.loads(pickle.dumps(scorer)) score3 = unpickled_scorer(clf, X_test, y_test) assert_almost_equal(score1, score3) # smoke test the repr: repr(fbeta_score) def test_regression_scorers(): # Test regression scorers. diabetes = load_diabetes() X, y = diabetes.data, diabetes.target X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) clf = Ridge() clf.fit(X_train, y_train) score1 = get_scorer('r2')(clf, X_test, y_test) score2 = r2_score(y_test, clf.predict(X_test)) assert_almost_equal(score1, score2) def test_thresholded_scorers(): # Test scorers that take thresholds. X, y = make_blobs(random_state=0, centers=2) X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) clf = LogisticRegression(random_state=0) clf.fit(X_train, y_train) score1 = get_scorer('roc_auc')(clf, X_test, y_test) score2 = roc_auc_score(y_test, clf.decision_function(X_test)) score3 = roc_auc_score(y_test, clf.predict_proba(X_test)[:, 1]) assert_almost_equal(score1, score2) assert_almost_equal(score1, score3) logscore = get_scorer('neg_log_loss')(clf, X_test, y_test) logloss = log_loss(y_test, clf.predict_proba(X_test)) assert_almost_equal(-logscore, logloss) # same for an estimator without decision_function clf = DecisionTreeClassifier() clf.fit(X_train, y_train) score1 = get_scorer('roc_auc')(clf, X_test, y_test) score2 = roc_auc_score(y_test, clf.predict_proba(X_test)[:, 1]) assert_almost_equal(score1, score2) # test with a regressor (no decision_function) reg = DecisionTreeRegressor() reg.fit(X_train, y_train) score1 = get_scorer('roc_auc')(reg, X_test, y_test) score2 = roc_auc_score(y_test, reg.predict(X_test)) assert_almost_equal(score1, score2) # Test that an exception is raised on more than two classes X, y = make_blobs(random_state=0, centers=3) X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) clf.fit(X_train, y_train) assert_raises(ValueError, get_scorer('roc_auc'), clf, X_test, y_test) def test_thresholded_scorers_multilabel_indicator_data(): # Test that the scorer work with multilabel-indicator format # for multilabel and multi-output multi-class classifier X, y = make_multilabel_classification(allow_unlabeled=False, random_state=0) X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) # Multi-output multi-class predict_proba clf = DecisionTreeClassifier() clf.fit(X_train, y_train) y_proba = clf.predict_proba(X_test) score1 = get_scorer('roc_auc')(clf, X_test, y_test) score2 = roc_auc_score(y_test, np.vstack(p[:, -1] for p in y_proba).T) assert_almost_equal(score1, score2) # Multi-output multi-class decision_function # TODO Is there any yet? clf = DecisionTreeClassifier() clf.fit(X_train, y_train) clf._predict_proba = clf.predict_proba clf.predict_proba = None clf.decision_function = lambda X: [p[:, 1] for p in clf._predict_proba(X)] y_proba = clf.decision_function(X_test) score1 = get_scorer('roc_auc')(clf, X_test, y_test) score2 = roc_auc_score(y_test, np.vstack(p for p in y_proba).T) assert_almost_equal(score1, score2) # Multilabel predict_proba clf = OneVsRestClassifier(DecisionTreeClassifier()) clf.fit(X_train, y_train) score1 = get_scorer('roc_auc')(clf, X_test, y_test) score2 = roc_auc_score(y_test, clf.predict_proba(X_test)) assert_almost_equal(score1, score2) # Multilabel decision function clf = OneVsRestClassifier(LinearSVC(random_state=0)) clf.fit(X_train, y_train) score1 = get_scorer('roc_auc')(clf, X_test, y_test) score2 = roc_auc_score(y_test, clf.decision_function(X_test)) assert_almost_equal(score1, score2) def test_unsupervised_scorers(): # Test clustering scorers against gold standard labeling. # We don't have any real unsupervised Scorers yet. X, y = make_blobs(random_state=0, centers=2) X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) km = KMeans(n_clusters=3) km.fit(X_train) score1 = get_scorer('adjusted_rand_score')(km, X_test, y_test) score2 = adjusted_rand_score(y_test, km.predict(X_test)) assert_almost_equal(score1, score2) @ignore_warnings def test_raises_on_score_list(): # Test that when a list of scores is returned, we raise proper errors. X, y = make_blobs(random_state=0) f1_scorer_no_average = make_scorer(f1_score, average=None)<|fim▁hole|> scoring=f1_scorer_no_average) grid_search = GridSearchCV(clf, scoring=f1_scorer_no_average, param_grid={'max_depth': [1, 2]}) assert_raises(ValueError, grid_search.fit, X, y) @ignore_warnings def test_scorer_sample_weight(): # Test that scorers support sample_weight or raise sensible errors # Unlike the metrics invariance test, in the scorer case it's harder # to ensure that, on the classifier output, weighted and unweighted # scores really should be unequal. X, y = make_classification(random_state=0) _, y_ml = make_multilabel_classification(n_samples=X.shape[0], random_state=0) split = train_test_split(X, y, y_ml, random_state=0) X_train, X_test, y_train, y_test, y_ml_train, y_ml_test = split sample_weight = np.ones_like(y_test) sample_weight[:10] = 0 # get sensible estimators for each metric estimator = _make_estimators(X_train, y_train, y_ml_train) for name, scorer in SCORERS.items(): if name in MULTILABEL_ONLY_SCORERS: target = y_ml_test else: target = y_test try: weighted = scorer(estimator[name], X_test, target, sample_weight=sample_weight) ignored = scorer(estimator[name], X_test[10:], target[10:]) unweighted = scorer(estimator[name], X_test, target) assert_not_equal(weighted, unweighted, msg="scorer {0} behaves identically when " "called with sample weights: {1} vs " "{2}".format(name, weighted, unweighted)) assert_almost_equal(weighted, ignored, err_msg="scorer {0} behaves differently when " "ignoring samples and setting sample_weight to" " 0: {1} vs {2}".format(name, weighted, ignored)) except TypeError as e: assert_true("sample_weight" in str(e), "scorer {0} raises unhelpful exception when called " "with sample weights: {1}".format(name, str(e))) @ignore_warnings # UndefinedMetricWarning for P / R scores def check_scorer_memmap(scorer_name): scorer, estimator = SCORERS[scorer_name], ESTIMATORS[scorer_name] if scorer_name in MULTILABEL_ONLY_SCORERS: score = scorer(estimator, X_mm, y_ml_mm) else: score = scorer(estimator, X_mm, y_mm) assert isinstance(score, numbers.Number), scorer_name def test_scorer_memmap_input(): # Non-regression test for #6147: some score functions would # return singleton memmap when computed on memmap data instead of scalar # float values. for name in SCORERS.keys(): yield check_scorer_memmap, name def test_deprecated_names(): X, y = make_blobs(random_state=0, centers=2) X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) clf = LogisticRegression(random_state=0) clf.fit(X_train, y_train) for name in ('mean_absolute_error', 'mean_squared_error', 'median_absolute_error', 'log_loss'): warning_msg = "Scoring method %s was renamed to" % name for scorer in (get_scorer(name), SCORERS[name]): assert_warns_message(DeprecationWarning, warning_msg, scorer, clf, X, y) assert_warns_message(DeprecationWarning, warning_msg, cross_val_score, clf, X, y, scoring=name) def test_scoring_is_not_metric(): assert_raises_regexp(ValueError, 'make_scorer', check_scoring, LogisticRegression(), f1_score) assert_raises_regexp(ValueError, 'make_scorer', check_scoring, LogisticRegression(), roc_auc_score) assert_raises_regexp(ValueError, 'make_scorer', check_scoring, Ridge(), r2_score) assert_raises_regexp(ValueError, 'make_scorer', check_scoring, KMeans(), adjusted_rand_score)<|fim▁end|>
clf = DecisionTreeClassifier() assert_raises(ValueError, cross_val_score, clf, X, y,
<|file_name|>test_connectors.py<|end_file_name|><|fim▁begin|>from twisted.internet.defer import inlineCallbacks, returnValue from vumi.connectors import ( BaseConnector, ReceiveInboundConnector, ReceiveOutboundConnector, IgnoreMessage) from vumi.tests.utils import LogCatcher from vumi.worker import BaseWorker from vumi.message import TransportUserMessage from vumi.middleware.tests.utils import RecordingMiddleware from vumi.tests.helpers import VumiTestCase, MessageHelper, WorkerHelper class DummyWorker(BaseWorker): def setup_connectors(self): pass def setup_worker(self): pass def teardown_worker(self): pass class BaseConnectorTestCase(VumiTestCase): connector_class = None def setUp(self): self.msg_helper = self.add_helper(MessageHelper()) self.worker_helper = self.add_helper(WorkerHelper()) @inlineCallbacks def mk_connector(self, worker=None, connector_name=None, prefetch_count=None, middlewares=None, setup=False): if worker is None: worker = yield self.worker_helper.get_worker(DummyWorker, {}) if connector_name is None: connector_name = "dummy_connector" connector = self.connector_class(worker, connector_name, prefetch_count=prefetch_count, middlewares=middlewares) if setup: yield connector.setup() returnValue(connector) @inlineCallbacks def mk_consumer(self, *args, **kwargs): conn = yield self.mk_connector(*args, **kwargs) consumer = yield conn._setup_consumer('inbound', TransportUserMessage, lambda msg: None) returnValue((conn, consumer)) class TestBaseConnector(BaseConnectorTestCase): connector_class = BaseConnector @inlineCallbacks def test_creation(self): conn = yield self.mk_connector(connector_name="foo") self.assertEqual(conn.name, "foo") self.assertTrue(isinstance(conn.worker, BaseWorker)) @inlineCallbacks def test_middlewares_consume(self): worker = yield self.worker_helper.get_worker(DummyWorker, {}) middlewares = [RecordingMiddleware( str(i), {'consume_priority': 0, 'publish_priority': 0}, worker) for i in range(3)] conn, consumer = yield self.mk_consumer( worker=worker, connector_name='foo', middlewares=middlewares) consumer.unpause() msgs = [] conn._set_default_endpoint_handler('inbound', msgs.append) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') record = msgs[0].payload.pop('record') self.assertEqual(record, [(str(i), 'inbound', 'foo') for i in range(3)]) @inlineCallbacks def test_middlewares_publish(self): worker = yield self.worker_helper.get_worker(DummyWorker, {}) middlewares = [RecordingMiddleware( str(i), {'consume_priority': 0, 'publish_priority': 0}, worker) for i in range(3)] conn = yield self.mk_connector( worker=worker, connector_name='foo', middlewares=middlewares) yield conn._setup_publisher('outbound') msg = self.msg_helper.make_outbound("outbound") yield conn._publish_message('outbound', msg, 'dummy_endpoint') msgs = self.worker_helper.get_dispatched_outbound('foo') record = msgs[0].payload.pop('record') self.assertEqual(record, [[str(i), 'outbound', 'foo'] for i in range(2, -1, -1)]) @inlineCallbacks def test_pretech_count(self): conn, consumer = yield self.mk_consumer(prefetch_count=10) self.assertEqual(consumer.channel.qos_prefetch_count, 10) @inlineCallbacks def test_setup_raises(self): conn = yield self.mk_connector() self.assertRaises(NotImplementedError, conn.setup) @inlineCallbacks def test_teardown(self): conn, consumer = yield self.mk_consumer() self.assertTrue(consumer.keep_consuming) yield conn.teardown() self.assertFalse(consumer.keep_consuming) @inlineCallbacks def test_paused(self): conn, consumer = yield self.mk_consumer() consumer.pause() self.assertTrue(conn.paused) consumer.unpause() self.assertFalse(conn.paused) @inlineCallbacks def test_pause(self): conn, consumer = yield self.mk_consumer() consumer.unpause() self.assertFalse(consumer.paused) conn.pause() self.assertTrue(consumer.paused) @inlineCallbacks def test_unpause(self): conn, consumer = yield self.mk_consumer() consumer.pause() self.assertTrue(consumer.paused) conn.unpause() self.assertFalse(consumer.paused) @inlineCallbacks def test_setup_publisher(self): conn = yield self.mk_connector(connector_name='foo') publisher = yield conn._setup_publisher('outbound') self.assertEqual(publisher.routing_key, 'foo.outbound') @inlineCallbacks def test_setup_consumer(self): conn, consumer = yield self.mk_consumer(connector_name='foo') self.assertTrue(consumer.paused) self.assertEqual(consumer.routing_key, 'foo.inbound') self.assertEqual(consumer.message_class, TransportUserMessage) @inlineCallbacks def test_set_endpoint_handler(self): conn, consumer = yield self.mk_consumer(connector_name='foo') consumer.unpause() msgs = [] conn._set_endpoint_handler('inbound', msgs.append, 'dummy_endpoint') msg = self.msg_helper.make_inbound("inbound") msg.set_routing_endpoint('dummy_endpoint') yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_none_endpoint_handler(self): conn, consumer = yield self.mk_consumer(connector_name='foo') consumer.unpause() msgs = [] conn._set_endpoint_handler('inbound', msgs.append, None) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_default_endpoint_handler(self): conn, consumer = yield self.mk_consumer(connector_name='foo') consumer.unpause() msgs = [] conn._set_default_endpoint_handler('inbound', msgs.append) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_publish_message_with_endpoint(self): conn = yield self.mk_connector(connector_name='foo') yield conn._setup_publisher('outbound') msg = self.msg_helper.make_outbound("outbound") yield conn._publish_message('outbound', msg, 'dummy_endpoint') msgs = self.worker_helper.get_dispatched_outbound('foo') self.assertEqual(msgs, [msg]) class TestReceiveInboundConnector(BaseConnectorTestCase): connector_class = ReceiveInboundConnector @inlineCallbacks def test_setup(self): conn = yield self.mk_connector(connector_name='foo') yield conn.setup() conn.unpause() with LogCatcher() as lc: msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') [msg_log] = lc.messages() self.assertTrue(msg_log.startswith("No inbound handler for 'foo'")) with LogCatcher() as lc: event = self.msg_helper.make_ack() yield self.worker_helper.dispatch_event(event, 'foo') [event_log] = lc.messages() self.assertTrue(event_log.startswith("No event handler for 'foo'")) msg = self.msg_helper.make_outbound("outbound") yield conn.publish_outbound(msg) msgs = self.worker_helper.get_dispatched_outbound('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_default_inbound_handler(self): conn = yield self.mk_connector(connector_name='foo', setup=True) with LogCatcher() as lc: conn.default_inbound_handler( self.msg_helper.make_inbound("inbound")) [log] = lc.messages() self.assertTrue(log.startswith("No inbound handler for 'foo'")) @inlineCallbacks def test_default_event_handler(self): conn = yield self.mk_connector(connector_name='foo', setup=True) with LogCatcher() as lc: conn.default_event_handler(self.msg_helper.make_ack()) [log] = lc.messages() self.assertTrue(log.startswith("No event handler for 'foo'")) @inlineCallbacks def test_set_inbound_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_inbound_handler(msgs.append) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_default_inbound_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_inbound_handler(msgs.append) msg = self.msg_helper.make_inbound("inbound") yield self.worker_helper.dispatch_inbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_event_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_event_handler(msgs.append) msg = self.msg_helper.make_ack() yield self.worker_helper.dispatch_event(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_default_event_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_event_handler(msgs.append) msg = self.msg_helper.make_ack() yield self.worker_helper.dispatch_event(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_publish_outbound(self): conn = yield self.mk_connector(connector_name='foo', setup=True) msg = self.msg_helper.make_outbound("outbound") yield conn.publish_outbound(msg) msgs = self.worker_helper.get_dispatched_outbound('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_inbound_handler_ignore_message(self): def im_handler(msg): raise IgnoreMessage() conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_inbound_handler(im_handler) msg = self.msg_helper.make_inbound("inbound") with LogCatcher() as lc: yield self.worker_helper.dispatch_inbound(msg, 'foo') [log] = lc.messages() self.assertTrue(log.startswith( "Ignoring msg due to IgnoreMessage(): <Message")) class TestReceiveOutboundConnector(BaseConnectorTestCase): connector_class = ReceiveOutboundConnector @inlineCallbacks def test_setup(self): conn = yield self.mk_connector(connector_name='foo')<|fim▁hole|> yield conn.setup() conn.unpause() with LogCatcher() as lc: msg = self.msg_helper.make_outbound("outbound") yield self.worker_helper.dispatch_outbound(msg, 'foo') [log] = lc.messages() self.assertTrue(log.startswith("No outbound handler for 'foo'")) msg = self.msg_helper.make_inbound("inbound") yield conn.publish_inbound(msg) msgs = self.worker_helper.get_dispatched_inbound('foo') self.assertEqual(msgs, [msg]) msg = self.msg_helper.make_ack() yield conn.publish_event(msg) msgs = self.worker_helper.get_dispatched_events('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_default_outbound_handler(self): conn = yield self.mk_connector(connector_name='foo', setup=True) with LogCatcher() as lc: conn.default_outbound_handler( self.msg_helper.make_outbound("outbound")) [log] = lc.messages() self.assertTrue(log.startswith("No outbound handler for 'foo'")) @inlineCallbacks def test_set_outbound_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_outbound_handler(msgs.append) msg = self.msg_helper.make_outbound("outbound") yield self.worker_helper.dispatch_outbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_set_default_outbound_handler(self): msgs = [] conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_outbound_handler(msgs.append) msg = self.msg_helper.make_outbound("outbound") yield self.worker_helper.dispatch_outbound(msg, 'foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_publish_inbound(self): conn = yield self.mk_connector(connector_name='foo', setup=True) msg = self.msg_helper.make_inbound("inbound") yield conn.publish_inbound(msg) msgs = self.worker_helper.get_dispatched_inbound('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_publish_event(self): conn = yield self.mk_connector(connector_name='foo', setup=True) msg = self.msg_helper.make_ack() yield conn.publish_event(msg) msgs = self.worker_helper.get_dispatched_events('foo') self.assertEqual(msgs, [msg]) @inlineCallbacks def test_outbound_handler_nack_message(self): def im_handler(msg): raise IgnoreMessage() conn = yield self.mk_connector(connector_name='foo', setup=True) conn.unpause() conn.set_default_outbound_handler(im_handler) msg = self.msg_helper.make_inbound("inbound") with LogCatcher() as lc: yield self.worker_helper.dispatch_outbound(msg, 'foo') [log] = lc.messages() self.assertTrue(log.startswith( "Ignoring msg (with NACK) due to IgnoreMessage(): <Message")) [event] = self.worker_helper.get_dispatched_events('foo') self.assertEqual(event['event_type'], 'nack')<|fim▁end|>
<|file_name|>generate_private_key.go<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2015-2020 Virgil Security Inc. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * (1) Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. *<|fim▁hole|> * distribution. * * (3) Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * Lead Maintainer: Virgil Security Inc. <support@virgilsecurity.com> */ package kms import ( "encoding/base64" "fmt" "github.com/VirgilSecurity/virgil-sdk-go/v6/crypto/wrapper/phe" "github.com/urfave/cli/v2" "github.com/VirgilSecurity/virgil-cli/utils" ) // // KMSPrivateKey generates KMS Private Key // func KMSPrivateKey() *cli.Command { return &cli.Command{ Name: "client-private", Aliases: []string{"pk"}, Usage: "Generate a new KMS Client Private key", Action: func(context *cli.Context) error { err := printKMSPrivateKey() if err != nil { return utils.CliExit(err) } return err }, } } func GenerateKMSPrivateKey() ([]byte, error) { kmsClient := phe.NewUokmsClient() if err := kmsClient.SetupDefaults(); err != nil { return []byte{}, err } return kmsClient.GenerateClientPrivateKey() } func printKMSPrivateKey() error { key, err := GenerateKMSPrivateKey() if err != nil { return err } fmt.Println(base64.StdEncoding.EncodeToString(key)) return nil }<|fim▁end|>
* (2) Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the
<|file_name|>DecisionTreeLearner.cpp<|end_file_name|><|fim▁begin|>#include "DecisionTreeLearner.h" #include "RowListUtil.h" #include <stdexcept> #include <vector> #include <sstream> using namespace std; using namespace ml; typedef vector<DecisionTree*>::iterator DTIterator; typedef vector<DecisionTree*>::const_iterator DTCIterator; typedef vector<DecisionTree*>::reverse_iterator DTRIterator; void DecisionTreeLearner::copy(const DecisionTreeLearner & other) { trees = other.trees; rand = other.rand; } void DecisionTreeLearner::free() { for (DTIterator i = trees.begin(); i != trees.end(); i++) { DecisionTree::RetireInstance(*i); } trees.clear(); } DecisionTreeLearner::DecisionTreeLearner() : trees(), rand() { } DecisionTreeLearner::DecisionTreeLearner(const Rand & r) : trees(), rand(r) { } DecisionTreeLearner::DecisionTreeLearner(const DecisionTreeLearner & other) : trees(other.trees), rand(other.rand) { } DecisionTreeLearner & DecisionTreeLearner::operator = (const DecisionTreeLearner & other) { if (&other != this) { free(); copy(other); } return *this; } DecisionTreeLearner::~DecisionTreeLearner() { free(); } void DecisionTreeLearner::train(Matrix & features, Matrix & labels, Matrix *testSet, Matrix * testLabels) { free(); if (features.rows() != labels.rows()) { stringstream ss; ss << "Features and Labels must be equinumerous" << " at " << __LINE__ << " in " << __FILE__; throw std::runtime_error(ss.str()); } std::vector<int> FeatureAttrs; FeatureAttrs.resize(features.cols());<|fim▁hole|> FeatureCommonValues.resize(features.cols()); for (int i = 0; i < FeatureAttrs.size(); i++) { FeatureCommonValues[i] = features.mostCommonValue(i); FeatureAttrs[i] = features.valueCount(i); } trees.resize(labels.cols()); Matrix TurnedLabels; TransposeMatrix(labels, TurnedLabels); for (int i = 0; i < trees.size(); i++) { trees[i] = DecisionTree::CreateInstance(FeatureAttrs, FeatureCommonValues, labels.valueCount(i), labels.mostCommonValue(i), features, TurnedLabels[i]); } } void DecisionTreeLearner::predict(const std::vector<double> & features, std::vector<double> & labels) { for (int i = 0; i < trees.size(); i++) { labels[i] = (trees[i])->Classify(features); } }<|fim▁end|>
std::vector<int> FeatureCommonValues;
<|file_name|>lockdirectory.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package upgrades import ( "fmt" "path" "launchpad.net/juju-core/utils/exec" ) var ubuntuHome = "/home/ubuntu" // Previously the lock directory was created when the uniter started. This // allows serialization of all of the hook execution across units running on a // single machine. This lock directory is now also used but the juju-run // command on the host machine. juju-run also gets a lock on the hook // execution fslock prior to execution. However, the lock directory was owned // by root, and the juju-run process was being executed by the ubuntu user, so // we need to change the ownership of the lock directory to ubuntu:ubuntu. // Also we need to make sure that this directory exists on machines with no // units. func ensureLockDirExistsAndUbuntuWritable(context Context) error { lockDir := path.Join(context.AgentConfig().DataDir(), "locks") // We only try to change ownership if there is an ubuntu user // defined, and we determine this by the existance of the home dir. command := fmt.Sprintf(""+ "mkdir -p %s\n"+ "[ -e %s ] && chown ubuntu:ubuntu %s\n", lockDir, ubuntuHome, lockDir) logger.Tracef("command: %s", command) result, err := exec.RunCommands(exec.RunParams{ Commands: command, }) if err != nil { return err<|fim▁hole|> } logger.Tracef("stdout: %s", result.Stdout) return nil }<|fim▁end|>
<|file_name|>decentral_module_management.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|> * * This file depends on decentral_module_management.hpp **/ #include "decentral_module_management.hpp" namespace module_management { ModManDecentral::ModManDecentral(ros::NodeHandle roshandle):ModManBase(roshandle, false) { std::ostringstream temp; temp.str(""); temp << name.project_namespace << "robot_registration"; logIn = node.rosHandle.serviceClient<voraus::robot_registration>(temp.str().c_str()); robot.state = voraus::Voraus::unregistered; robot.lastHeartbeatWithMyIdCounter = 0; temp.str(""); temp << name.project_namespace << "known_robot_list"; serverHeartbeat = node.rosHandle.subscribe(temp.str().c_str(), 50, &ModManDecentral::serverHeartbeatCallback, this); temp.str(""); temp << name.project_namespace << "robot_state"; robotHeartbeat = node.rosHandle.advertise<voraus::heartbeat>(temp.str().c_str(), 50); } int ModManDecentral::loop(const ros::TimerEvent& event) { if (robot.state == voraus::Voraus::idle || robot.state == voraus::Voraus::working || robot.state == voraus::Voraus::failure) { //the robot will be unregistered if he didn't send a heartbeat-message after this time if (robot.lastHeartbeatTime < event.current_real - robot.maxTimeWithoutHeartbeat) { ROS_ERROR("%s master is lost. Trying to reconnect.", name.full.c_str()); registerAtServer(); } //a warning will be printed if the master didn't send a heartbeat-message after this time if (robot.lastHeartbeatTime < event.current_real - robot.maxTimeWithoutHeartbeat * 0.7) { ROS_WARN("%s no heartbeat from master since %.2f sec", name.full.c_str(), event.current_real.toSec() - robot.lastHeartbeatTime.toSec()); } sendHeartbeat(); } } int ModManDecentral::sendHeartbeat() { voraus::heartbeat temp_heartbeat; temp_heartbeat.header.stamp = ros::Time::now(); temp_heartbeat.current_known_robot_count = 1; temp_heartbeat.id = name.id; temp_heartbeat.name = name.name; temp_heartbeat.robotState = robot.state; robotHeartbeat.publish(temp_heartbeat); return 0; } int ModManDecentral::registerAtServer() { voraus::robot_registration logInTemp; logInTemp.request.name = name.name; if (logIn.call(logInTemp)) { robot.state = voraus::Voraus::idle; name.id = logInTemp.response.id; robot.maxTimeWithoutHeartbeat = ros::Duration(logInTemp.response.timeout); central.task_scheduling = logInTemp.response.task_scheduling; central.localisation = logInTemp.response.localisation; central.path_planing = logInTemp.response.path_planing; central.demo = logInTemp.response.demo; } else { robot.state = voraus::Voraus::unregistered; name.id = 0; robot.maxTimeWithoutHeartbeat = ros::Duration(3600); central.task_scheduling = true; central.localisation = true; central.path_planing = true; central.demo = true; } robot.lastHeartbeatWithMyIdCounter = 0; buildFullName(); if (name.id == 0) { ROS_ERROR("%s server unreachable", name.full.c_str()); } else { std::ostringstream temp; temp.str(""); temp << "Module-Config: "; temp << "task_scheduling = " << ((central.task_scheduling) ? "central" : "decentral"); temp << "; localisation = " << ((central.localisation) ? "central" : "decentral"); temp << "; path_planing = " << ((central.path_planing) ? "central" : "decentral"); temp << "; demo = " << ((central.demo) ? "central" : "decentral"); ROS_INFO("%s Connected to server; TimeOut: %.2fs; %s", name.full.c_str(), robot.maxTimeWithoutHeartbeat.toSec(), temp.str().c_str()); } return 0; } void ModManDecentral::serverHeartbeatCallback(const voraus::heartbeat event) { if (event.id == name.id) { robot.lastHeartbeatWithMyIdCounter = 0; if (event.robotState != robot.state) { ROS_WARN("%s server has a wrong state for this robot (%i instead of %i)", name.full.c_str(), event.robotState, robot.state);} } else { robot.lastHeartbeatWithMyIdCounter++; } if (robot.lastHeartbeatWithMyIdCounter >= event.current_known_robot_count * 2) { registerAtServer(); } robot.lastHeartbeatTime = event.header.stamp; } }<|fim▁end|>
/** \file decentral_module_management.cpp * Contains the class functions of the decentral module managemener in * the \link voraus::Voraus Voraus \endlink framework. * Author : Martin Seidel
<|file_name|>common.py<|end_file_name|><|fim▁begin|>import util, pexpect, time, math from pymavlink import mavwp # a list of pexpect objects to read while waiting for # messages. This keeps the output to stdout flowing expect_list = [] def expect_list_clear(): '''clear the expect list''' global expect_list for p in expect_list[:]: expect_list.remove(p) def expect_list_extend(list): '''extend the expect list''' global expect_list expect_list.extend(list) def idle_hook(mav): '''called when waiting for a mavlink message''' global expect_list for p in expect_list: util.pexpect_drain(p) def message_hook(mav, msg): '''called as each mavlink msg is received''' idle_hook(mav) def expect_callback(e): '''called when waiting for a expect pattern''' global expect_list for p in expect_list: if p == e:<|fim▁hole|>def get_distance(loc1, loc2): '''get ground distance between two locations''' dlat = loc2.lat - loc1.lat dlong = loc2.lng - loc1.lng return math.sqrt((dlat*dlat) + (dlong*dlong)) * 1.113195e5 def get_bearing(loc1, loc2): '''get bearing from loc1 to loc2''' off_x = loc2.lng - loc1.lng off_y = loc2.lat - loc1.lat bearing = 90.00 + math.atan2(-off_y, off_x) * 57.2957795 if bearing < 0: bearing += 360.00 return bearing; def wait_altitude(mav, alt_min, alt_max, timeout=30): climb_rate = 0 previous_alt = 0 '''wait for a given altitude range''' tstart = time.time() print("Waiting for altitude between %u and %u" % (alt_min, alt_max)) while time.time() < tstart + timeout: m = mav.recv_match(type='VFR_HUD', blocking=True) climb_rate = m.alt - previous_alt previous_alt = m.alt print("Wait Altitude: Cur:%u, min_alt:%u, climb_rate: %u" % (m.alt, alt_min , climb_rate)) if abs(climb_rate) > 0: tstart = time.time(); if m.alt >= alt_min and m.alt <= alt_max: print("Altitude OK") return True print("Failed to attain altitude range") return False def wait_groundspeed(mav, gs_min, gs_max, timeout=30): '''wait for a given ground speed range''' tstart = time.time() print("Waiting for groundspeed between %.1f and %.1f" % (gs_min, gs_max)) while time.time() < tstart + timeout: m = mav.recv_match(type='VFR_HUD', blocking=True) print("Wait groundspeed %.1f, target:%.1f" % (m.groundspeed, gs_min)) if m.groundspeed >= gs_min and m.groundspeed <= gs_max: return True print("Failed to attain groundspeed range") return False def wait_roll(mav, roll, accuracy, timeout=30): '''wait for a given roll in degrees''' tstart = time.time() print("Waiting for roll of %u" % roll) while time.time() < tstart + timeout: m = mav.recv_match(type='ATTITUDE', blocking=True) r = math.degrees(m.roll) print("Roll %u" % r) if math.fabs(r - roll) <= accuracy: print("Attained roll %u" % roll) return True print("Failed to attain roll %u" % roll) return False def wait_pitch(mav, pitch, accuracy, timeout=30): '''wait for a given pitch in degrees''' tstart = time.time() print("Waiting for pitch of %u" % pitch) while time.time() < tstart + timeout: m = mav.recv_match(type='ATTITUDE', blocking=True) r = math.degrees(m.pitch) print("Pitch %u" % r) if math.fabs(r - pitch) <= accuracy: print("Attained pitch %u" % pitch) return True print("Failed to attain pitch %u" % pitch) return False def wait_heading(mav, heading, accuracy=5, timeout=30): '''wait for a given heading''' tstart = time.time() print("Waiting for heading %u with accuracy %u" % (heading, accuracy)) while time.time() < tstart + timeout: m = mav.recv_match(type='VFR_HUD', blocking=True) print("Heading %u" % m.heading) if math.fabs(m.heading - heading) <= accuracy: print("Attained heading %u" % heading) return True print("Failed to attain heading %u" % heading) return False def wait_distance(mav, distance, accuracy=5, timeout=30): '''wait for flight of a given distance''' tstart = time.time() start = mav.location() while time.time() < tstart + timeout: pos = mav.location() delta = get_distance(start, pos) print("Distance %.2f meters" % delta) if math.fabs(delta - distance) <= accuracy: print("Attained distance %.2f meters OK" % delta) return True if delta > (distance + accuracy): print("Failed distance - overshoot delta=%f distance=%f" % (delta, distance)) return False print("Failed to attain distance %u" % distance) return False def wait_location(mav, loc, accuracy=5, timeout=30, target_altitude=None, height_accuracy=-1): '''wait for arrival at a location''' tstart = time.time() if target_altitude is None: target_altitude = loc.alt print("Waiting for location %.4f,%.4f at altitude %.1f height_accuracy=%.1f" % ( loc.lat, loc.lng, target_altitude, height_accuracy)) while time.time() < tstart + timeout: pos = mav.location() delta = get_distance(loc, pos) print("Distance %.2f meters alt %.1f" % (delta, pos.alt)) if delta <= accuracy: if height_accuracy != -1 and math.fabs(pos.alt - target_altitude) > height_accuracy: continue print("Reached location (%.2f meters)" % delta) return True print("Failed to attain location") return False def wait_waypoint(mav, wpnum_start, wpnum_end, allow_skip=True, max_dist=2, timeout=400, mode=None): '''wait for waypoint ranges''' tstart = time.time() # this message arrives after we set the current WP start_wp = mav.waypoint_current() current_wp = start_wp print("\ntest: wait for waypoint ranges start=%u end=%u\n\n" % (wpnum_start, wpnum_end)) # if start_wp != wpnum_start: # print("test: Expected start waypoint %u but got %u" % (wpnum_start, start_wp)) # return False while time.time() < tstart + timeout: seq = mav.waypoint_current() m = mav.recv_match(type='NAV_CONTROLLER_OUTPUT', blocking=True) wp_dist = m.wp_dist m = mav.recv_match(type='VFR_HUD', blocking=True) # if we exited the required mode, finish if mode is not None and mav.flightmode != mode: print('Exited %s mode' % mode) return True print("test: WP %u (wp_dist=%u Alt=%d), current_wp: %u, wpnum_end: %u" % (seq, wp_dist, m.alt, current_wp, wpnum_end)) if seq == current_wp+1 or (seq > current_wp+1 and allow_skip): print("test: Starting new waypoint %u" % seq) tstart = time.time() current_wp = seq # the wp_dist check is a hack until we can sort out the right seqnum # for end of mission #if current_wp == wpnum_end or (current_wp == wpnum_end-1 and wp_dist < 2): if (current_wp == wpnum_end and wp_dist < max_dist): print("Reached final waypoint %u" % seq) return True if (seq >= 255): print("Reached final waypoint %u" % seq) return True if seq > current_wp+1: print("Failed: Skipped waypoint! Got wp %u expected %u" % (seq, current_wp+1)) return False print("Failed: Timed out waiting for waypoint %u of %u" % (wpnum_end, wpnum_end)) return False def save_wp(mavproxy, mav): mavproxy.send('rc 7 2000\n') mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==2000', blocking=True) mavproxy.send('rc 7 1000\n') mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==1000', blocking=True) def wait_mode(mav, mode): '''wait for a flight mode to be engaged''' print("Waiting for mode %s" % mode) mav.recv_match(condition='MAV.flightmode.upper()=="%s".upper()' % mode, blocking=True) print("Got mode %s" % mode) def mission_count(filename): '''load a mission from a file and return number of waypoints''' wploader = mavwp.MAVWPLoader() wploader.load(filename) num_wp = wploader.count() return num_wp<|fim▁end|>
continue util.pexpect_drain(p)
<|file_name|>ThreadUserTime.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2004, 2015, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 4997799 * @summary Basic test of ThreadMXBean.getThreadUserTime and * getCurrentThreadUserTime.<|fim▁hole|> import java.lang.management.*; public class ThreadUserTime { private static ThreadMXBean mbean = ManagementFactory.getThreadMXBean(); private static boolean testFailed = false; private static boolean done = false; private static Object obj = new Object(); private static final int NUM_THREADS = 10; private static Thread[] threads = new Thread[NUM_THREADS]; private static long[] times = new long[NUM_THREADS]; // careful about this value private static final int DELTA = 100; public static void main(String[] argv) throws Exception { if (!mbean.isCurrentThreadCpuTimeSupported()) { return; } // disable user time if (mbean.isThreadCpuTimeEnabled()) { mbean.setThreadCpuTimeEnabled(false); } Thread curThread = Thread.currentThread(); long t = mbean.getCurrentThreadUserTime(); if (t != -1) { throw new RuntimeException("Invalid CurrenThreadUserTime returned = " + t + " expected = -1"); } if (mbean.isThreadCpuTimeSupported()) { long t1 = mbean.getThreadUserTime(curThread.getId()); if (t1 != -1) { throw new RuntimeException("Invalid ThreadUserTime returned = " + t1 + " expected = -1"); } } // Enable CPU Time measurement if (!mbean.isThreadCpuTimeEnabled()) { mbean.setThreadCpuTimeEnabled(true); } if (!mbean.isThreadCpuTimeEnabled()) { throw new RuntimeException("ThreadUserTime is expected to be enabled"); } long time = mbean.getCurrentThreadUserTime(); if (time < 0) { throw new RuntimeException("Invalid user time returned = " + time); } if (!mbean.isThreadCpuTimeSupported()) { return; } // Expected to be time1 >= time long time1 = mbean.getThreadUserTime(curThread.getId()); if (time1 < time) { throw new RuntimeException("User time " + time1 + " expected >= " + time); } System.out.println(curThread.getName() + " Current Thread User Time = " + time + " user time = " + time1); for (int i = 0; i < NUM_THREADS; i++) { threads[i] = new MyThread("MyThread-" + i); threads[i].start(); } waitUntilThreadBlocked(); for (int i = 0; i < NUM_THREADS; i++) { times[i] = mbean.getThreadUserTime(threads[i].getId()); } goSleep(200); for (int i = 0; i < NUM_THREADS; i++) { long newTime = mbean.getThreadUserTime(threads[i].getId()); if (times[i] > newTime) { throw new RuntimeException("TEST FAILED: " + threads[i].getName() + " previous user user time = " + times[i] + " > current user user time = " + newTime); } if ((times[i] + DELTA) < newTime) { throw new RuntimeException("TEST FAILED: " + threads[i].getName() + " user time = " + newTime + " previous user time " + times[i] + " out of expected range"); } System.out.println(threads[i].getName() + " Previous User Time = " + times[i] + " Current User time = " + newTime); } synchronized (obj) { done = true; obj.notifyAll(); } for (int i = 0; i < NUM_THREADS; i++) { try { threads[i].join(); } catch (InterruptedException e) { System.out.println("Unexpected exception is thrown."); e.printStackTrace(System.out); testFailed = true; break; } } if (testFailed) { throw new RuntimeException("TEST FAILED"); } System.out.println("Test passed"); } private static void goSleep(long ms) throws Exception { try { Thread.sleep(ms); } catch (InterruptedException e) { System.out.println("Unexpected exception is thrown."); throw e; } } private static void waitUntilThreadBlocked() throws Exception { int count = 0; while (count != NUM_THREADS) { goSleep(100); count = 0; for (int i = 0; i < NUM_THREADS; i++) { ThreadInfo info = mbean.getThreadInfo(threads[i].getId()); if (info.getThreadState() == Thread.State.WAITING) { count++; } } } } static class MyThread extends Thread { public MyThread(String name) { super(name); } public void run() { double sum = 0; for (int i = 0; i < 5000; i++) { double r = Math.random(); double x = Math.pow(3, r); sum += x - r; } synchronized (obj) { while (!done) { try { obj.wait(); } catch (InterruptedException e) { System.out.println("Unexpected exception is thrown."); e.printStackTrace(System.out); testFailed = true; break; } } } sum = 0; for (int i = 0; i < 5000; i++) { double r = Math.random(); double x = Math.pow(3, r); sum += x - r; } long time1 = mbean.getCurrentThreadCpuTime(); long utime1 = mbean.getCurrentThreadUserTime(); long time2 = mbean.getThreadCpuTime(getId()); long utime2 = mbean.getThreadUserTime(getId()); System.out.println(getName() + ":"); System.out.println("CurrentThreadUserTime = " + utime1 + " ThreadUserTime = " + utime2); System.out.println("CurrentThreadCpuTime = " + time1 + " ThreadCpuTime = " + time2); if (time1 > time2) { throw new RuntimeException("TEST FAILED: " + getName() + " CurrentThreadCpuTime = " + time1 + " > ThreadCpuTime = " + time2); } if (utime1 > utime2) { throw new RuntimeException("TEST FAILED: " + getName() + " CurrentThreadUserTime = " + utime1 + " > ThreadUserTime = " + utime2); } } } }<|fim▁end|>
* @author Mandy Chung * @modules java.management */
<|file_name|>3b54bf9e29f7_nec_plugin_sharednet.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may<|fim▁hole|># # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """NEC plugin sharednet Revision ID: 3b54bf9e29f7 Revises: 511471cc46b Create Date: 2013-02-17 09:21:48.287134 """ # revision identifiers, used by Alembic. revision = '3b54bf9e29f7' down_revision = '511471cc46b' # Change to ['*'] if this migration applies to all plugins migration_for_plugins = [ 'neutron.plugins.nec.nec_plugin.NECPluginV2' ] from alembic import op import sqlalchemy as sa from neutron.db import migration def upgrade(active_plugin=None, options=None): if not migration.should_run(active_plugin, migration_for_plugins): return op.create_table( 'ofctenantmappings', sa.Column('ofc_id', sa.String(length=255), nullable=False), sa.Column('quantum_id', sa.String(length=36), nullable=False), sa.PrimaryKeyConstraint('quantum_id'), sa.UniqueConstraint('ofc_id') ) op.create_table( 'ofcnetworkmappings', sa.Column('ofc_id', sa.String(length=255), nullable=False), sa.Column('quantum_id', sa.String(length=36), nullable=False), sa.PrimaryKeyConstraint('quantum_id'), sa.UniqueConstraint('ofc_id') ) op.create_table( 'ofcportmappings', sa.Column('ofc_id', sa.String(length=255), nullable=False), sa.Column('quantum_id', sa.String(length=36), nullable=False), sa.PrimaryKeyConstraint('quantum_id'), sa.UniqueConstraint('ofc_id') ) op.create_table( 'ofcfiltermappings', sa.Column('ofc_id', sa.String(length=255), nullable=False), sa.Column('quantum_id', sa.String(length=36), nullable=False), sa.PrimaryKeyConstraint('quantum_id'), sa.UniqueConstraint('ofc_id') ) def downgrade(active_plugin=None, options=None): if not migration.should_run(active_plugin, migration_for_plugins): return op.drop_table('ofcfiltermappings') op.drop_table('ofcportmappings') op.drop_table('ofcnetworkmappings') op.drop_table('ofctenantmappings')<|fim▁end|>
# not use this file except in compliance with the License. You may obtain # a copy of the License at
<|file_name|>to_tx2.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Thu Nov 6 15:32:36 2014 @author: commun Converts filtered data sets to tx2 format used in Aarhusinv. CHANGELOG: """ #import of needed modules import numpy as np #~ def to_tx2(path_filt, path_tx2, electrode_spacing, ngates=20, pulselength=2): #~ path_filt = '../shiprock_filt/v3/l6sk0n_1_1b.dat' path_filt = '../shiprock_filt/v4/l8_1_mg_4b.dat' #~ path_tx2 = '../shiprock_tx2/' path_tx2 = '../' electrode_spacing = 2 pulselength=2 ngates=20 frags = path_filt.split('/') lid = frags[-1][:-4] ngates = ngates elec_sp = electrode_spacing pl = pulselength path_filt = path_filt path_tx2 = path_tx2 filt = np.genfromtxt(path_filt, delimiter='\t', skip_header=1) ######################################################################## # DATA WRANGLING # ######################################################################## out = np.zeros((len(filt), 28+4*ngates)) #x-electrode position along measuring tape (m) out[:,0:4] = (filt[:,0:4]-1)*elec_sp #x-position of electrodes #eg use of gps coordinates if available out[:,4:8] = (filt[:,0:4]-1)*elec_sp #y-position of electrodes #~ out[:,8:12] #z-position of electrodes #~ out[:,12:16] #apparent resistivity a = (filt[:,0]-1)*elec_sp b = (filt[:,1]-1)*elec_sp m = (filt[:,2]-1)*elec_sp n = (filt[:,3]-1)*elec_sp am = np.sqrt((m-a)**2) an = np.sqrt((n-a)**2) bm = np.sqrt((m-b)**2) bn = np.sqrt((n-b)**2) k = 2*np.pi*(1/(1/am-1/an-1/bm+1/bn)) #~ out[:,16] = filt[:,4] out[:,16] = k[:]*filt[:,4] #deviation resistance out[:,17] = filt[:,6]/10 #resistance flag (0 keeps data, 1 removes data) #~ out[:,18] #number of ip gates out[:,19] = ngates #ip values [mV/V] per gate out[:,20:20+ngates] = filt[:,9:29] #mdelay [ms] #~ out[:,20+ngates] = filt[0,29] out[:,20+ngates] = filt[0,29] #gate lengths [ms] #~ out[:,21+ngates:21+ngates*2] = filt[:,30:50] out[:,21+ngates:21+ngates*2] = filt[:,30:50] #deviation of every window #for syscal files put 0.0 until proper error model is introduced #for ares: use values given by device #check for device c, d = np.shape(filt) if d>51: out[:,21+ngates*2:21+ngates*3] = filt[:,51:] else: out[:,21+ngates*2:21+ngates*3] = 0.1 #ip flag #~ out[:,21+ngats*3:21+ngates*4] #stacking out[:,21+ngates*4] = filt[:,50] <|fim▁hole|>out[:,23+ngates*4] = pl/2 #Ton = pulse length [s] out[:,24+ngates*4] = pl*1000 #Toff = pulse length [s] out[:,25+ngates*4] = pl*1000 #Tend = time used to collect decay curve [s] out[:,26+ngates*4] = (np.sum(filt[0,30:50])+filt[0,29]) #Tstart = time at which the decay curve is starting to be measured out[:,27+ngates*4] = filt[0,29] ####################################################################### # WRITE TO FILE # ####################################################################### M = [] G = [] STD = [] IP_flag = [] Mfmt = [] Gfmt = [] STDfmt = [] IP_flagfmt = [] for num in range(ngates): M.append('M' + str(num+1)) Mfmt.append('%.3f\t') G.append('Gate' + str(num+1)) Gfmt.append('%.3f\t') STD.append('Std' + str(num+1)) STDfmt.append('%.2f\t') IP_flag.append('IP_Flag' + str(num+1)) IP_flagfmt.append('%d\t') #~ print(M) M = '\t'.join(M) G = '\t'.join(G) STD = '\t'.join(STD) IP_flag = '\t'.join(IP_flag) Mfmt = ''.join(Mfmt) Gfmt = ''.join(Gfmt) STDfmt = ''.join(STDfmt) IP_flagfmt = ''.join(IP_flagfmt) np.savetxt(path_tx2 + lid + '.tx2', out, #~ fmt='%3.6f', fmt='%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t' + #electrode positions '%f\t%.3f\t%d\t%d\t' + #resistance, devR, ResFlag, Ngates Mfmt + '%.3f\t' + Gfmt + STDfmt + IP_flagfmt + '%d\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f', delimiter='\t', header='xA xB xM xN UTMxA UTMxB UTMxM UTMxN UTMyA UTMyB\t' + 'UTMyM UTMyN zA zB zM zN Res Dev ResFlag Ngates\t' + M + '\tMdly\t' + G + '\t' + STD + '\t' + IP_flag + '\tStack Current\tWaveType Ton Toff Tend Tstart', comments='')<|fim▁end|>
#current = (50% of pulse length) out[:,22+ngates*4] = pl/2 #wave type
<|file_name|>integrations.py<|end_file_name|><|fim▁begin|>from funkybomb import Template, Text from application.util import route from templates import documentation from templates.util import template @route('/docs/integrations') @template(documentation.tmpl) async def docs_integrations_home(req): tmpl = Template() tmpl.p + 'Coming soon.' <|fim▁hole|> 'content': tmpl, 'headline': Text('Integrations') } @route('/docs/integrations/flask') @template(documentation.tmpl) async def docs_integrations_flask(req): tmpl = Template() tmpl.p + 'Coming soon.' return { 'content': tmpl, 'headline': Text('Integrating with Flask') }<|fim▁end|>
return {
<|file_name|>lock.test.js<|end_file_name|><|fim▁begin|><|fim▁hole|> module("lock"); test('create', function () { var lock1 = $.lock(), lock2 = $.lock(false), lock3 = $.lock(true); expect(3); ok(!lock1.isLocked()); ok(!lock2.isLocked()); ok(lock3.isLocked()); }); var lock = $.lock(); test('lock', function () { lock.unlock().lock(); expect(1); ok(lock.isLocked()); }); test('unlock', function () { lock.lock().unlock(); expect(1); ok(!lock.isLocked()); }); })<|fim▁end|>
$(function(){ function notOk(s, m) {equal(!!s,false,m);}
<|file_name|>genericpilot.go<|end_file_name|><|fim▁begin|>package genericpilot import ( "sync" "time" "github.com/golang/glog" "k8s.io/client-go/kubernetes" "k8s.io/client-go/tools/record" clientset "github.com/jetstack/navigator/pkg/client/clientset/versioned" listersv1alpha1 "github.com/jetstack/navigator/pkg/client/listers/navigator/v1alpha1" "github.com/jetstack/navigator/pkg/pilot/genericpilot/controller" "github.com/jetstack/navigator/pkg/pilot/genericpilot/leaderelection" "github.com/jetstack/navigator/pkg/pilot/genericpilot/processmanager" ) type GenericPilot struct { Options Options // TODO: remove use of the kubernetes clientset. Absorb required // functionality into the navigator api group kubeClientset kubernetes.Interface client clientset.Interface pilotLister listersv1alpha1.PilotLister recorder record.EventRecorder controller *controller.Controller // process is a reference to a process manager for the application this // Pilot manages process processmanager.Interface // shutdown is true when the process has been told to gracefully exit shutdown bool // lock is used internally to coordinate updates to fields on the // GenericPilot structure lock sync.Mutex elector leaderelection.Interface } func (g *GenericPilot) Run() error { glog.Infof("Starting generic pilot controller") // setup healthz handlers g.serveHealthz() ctrlStopCh := make(chan struct{}) defer close(ctrlStopCh) var err error<|fim▁hole|> case <-g.Options.StopCh: glog.Infof("Shutdown signal received") case <-g.waitForProcess(): if err = g.process.Error(); err != nil { glog.Errorf("Underlying process failed with error: %s", err) } else { glog.Errorf("Underlying process unexpectedly exited") } case err = <-g.runController(ctrlStopCh): if err != nil { glog.Errorf("Control loop failed with error: %s", err) } else { glog.Errorf("Control loop unexpectedly exited") } case err = <-g.runElector(ctrlStopCh): if err != nil { glog.Errorf("Leader elector failed with error: %s", err) } else { glog.Errorf("Leader elector unexpectedly exited") } } thisPilot, err := g.controller.ThisPilot() if err != nil { return err } return g.stop(thisPilot) } // waitForProcess will return a chan that will be closed once the underlying // subprocess exits. This function exists to 'mask' the fact the process may // not ever exist/be started (as starting the process relies on the Pilot // resource existing in the API). func (g *GenericPilot) waitForProcess() <-chan struct{} { out := make(chan struct{}) go func() { defer close(out) for { if g.process != nil { break } time.Sleep(2) } <-g.process.Wait() }() return out } func (g *GenericPilot) runController(stopCh <-chan struct{}) <-chan error { out := make(chan error, 1) go func() { defer close(out) out <- g.controller.Run(stopCh) }() return out } func (g *GenericPilot) runElector(stopCh <-chan struct{}) <-chan error { out := make(chan error, 1) go func() { defer close(out) out <- g.elector.Run() }() return out } func (g *GenericPilot) Elector() leaderelection.Interface { return g.elector }<|fim▁end|>
// block until told to shutdown select {
<|file_name|>recovery.go<|end_file_name|><|fim▁begin|>package cart import ( "bytes" "fmt" "io" "io/ioutil" "log" "net/http/httputil" "runtime" ) var ( dunno = []byte("???") centerDot = []byte("·") dot = []byte(".") slash = []byte("/") ) // Recovery returns a middleware that recovers from any panics and writes a 500 if there was one. func Recovery() Handler { return RecoveryWithWriter(DefaultErrorWriter) } func RecoveryWithWriter(out io.Writer) Handler { var logger *log.Logger if out != nil { logger = log.New(out, "\n\n\x1b[31m", log.LstdFlags) } return func(c *Context, next Next) { defer func() { if err := recover(); err != nil { if logger != nil { stack := stack(3) httprequest, _ := httputil.DumpRequest(c.Request, false) logger.Printf("[Recovery] panic recovered:\n%s\n%s\n%s%s", string(httprequest), err, stack, reset) } c.AbortWithStatus(500) } }() next() } } func RecoveryRender(out io.Writer) Handler { var logger *log.Logger if out != nil { logger = log.New(out, "\n\n\x1b[31m", log.LstdFlags) } return func(c *Context, next Next) { defer func() {<|fim▁hole|> if err := recover(); err != nil { httprequest, _ := httputil.DumpRequest(c.Request, false) if logger != nil { stack := stack(3) logger.Printf("[Recovery] panic recovered:\n%s\n%s\n%s%s", string(httprequest), err, stack, reset) } c.AbortRender(500, string(httprequest), err) } }() next() } } // stack returns a nicely formated stack frame, skipping skip frames func stack(skip int) []byte { buf := new(bytes.Buffer) // the returned data // As we loop, we open files and read them. These variables record the currently // loaded file. var lines [][]byte var lastFile string for i := skip; ; i++ { // Skip the expected number of frames pc, file, line, ok := runtime.Caller(i) if !ok { break } // Print this much at least. If we can't find the source, it won't show. fmt.Fprintf(buf, "%s:%d (0x%x)\n", file, line, pc) if file != lastFile { data, err := ioutil.ReadFile(file) if err != nil { continue } lines = bytes.Split(data, []byte{'\n'}) lastFile = file } fmt.Fprintf(buf, "\t%s: %s\n", function(pc), source(lines, line)) } return buf.Bytes() } // source returns a space-trimmed slice of the n'th line. func source(lines [][]byte, n int) []byte { n-- // in stack trace, lines are 1-indexed but our array is 0-indexed if n < 0 || n >= len(lines) { return dunno } return bytes.TrimSpace(lines[n]) } // function returns, if possible, the name of the function containing the PC. func function(pc uintptr) []byte { fn := runtime.FuncForPC(pc) if fn == nil { return dunno } name := []byte(fn.Name()) // The name includes the path name to the package, which is unnecessary // since the file name is already included. Plus, it has center dots. // That is, we see // runtime/debug.*T·ptrmethod // and want // *T.ptrmethod // Also the package path might contains dot (e.g. code.google.com/...), // so first eliminate the path prefix if lastslash := bytes.LastIndex(name, slash); lastslash >= 0 { name = name[lastslash+1:] } if period := bytes.Index(name, dot); period >= 0 { name = name[period+1:] } name = bytes.Replace(name, centerDot, dot, -1) return name }<|fim▁end|>
<|file_name|>mp_viewport_pan_tool.py<|end_file_name|><|fim▁begin|>from traits.api import Int, Tuple from enable.tools.api import ViewportPanTool class MPViewportPanTool(ViewportPanTool): cur_bid = Int(-1) _last_blob_pos = Tuple def normal_blob_down(self, event): if self.cur_bid == -1 and self.is_draggable(event.x, event.y): self.cur_bid = event.bid self.drag_start(event) def dragging_blob_up(self, event): if event.bid == self.cur_bid: self.cur_bid = -1 self.drag_end(event) def dragging_blob_move(self, event): if event.bid == self.cur_bid: self._last_blob_pos = (event.x, event.y) self.dragging(event) def drag_start(self, event): if self.component: self.original_padding = self.component.padding<|fim▁hole|> event.net_transform()) else: event.window.set_mouse_owner(self, event.net_transform()) self._last_blob_pos = (event.x, event.y) self.mouse_down_position = (event.x,event.y) self.event_state = "dragging" event.handled = True ViewportPanTool.drag_start(self, event) return def drag_end(self, event): event.x, event.y = self._last_blob_pos if hasattr(event, "bid"): event.window.release_blob(event.bid) self.event_state = "normal" ViewportPanTool.drag_end(self, event)<|fim▁end|>
if hasattr(event, "bid"): event.window.capture_blob(self, event.bid,
<|file_name|>prescript.py<|end_file_name|><|fim▁begin|>import os import sys import argparse from pandaharvester.harvesterconfig import harvester_config from pandaharvester.harvestermisc.selfcheck import harvesterPackageInfo def main(): oparser = argparse.ArgumentParser(prog='prescript', add_help=True) oparser.add_argument('-f', '--local_info_file', action='store', dest='local_info_file', help='path of harvester local info file') if len(sys.argv) == 1: print('No argument or flag specified. Did nothing') sys.exit(0) args = oparser.parse_args(sys.argv[1:]) local_info_file = os.path.normpath(args.local_info_file) <|fim▁hole|> print('Harvester package changed') #TODO pass hpi.renew_local_info() else: print('Harvester package unchanged. Skipped') if __name__ == '__main__': main()<|fim▁end|>
hpi = harvesterPackageInfo(local_info_file=local_info_file) if hpi.package_changed:
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import copy import datetime import json import os from typing import Dict, List, Optional import jinja2 import jsonschema import yaml from ray_release.anyscale_util import find_cloud_by_name from ray_release.exception import ReleaseTestConfigError from ray_release.logger import logger from ray_release.util import deep_update class Test(dict): pass DEFAULT_WHEEL_WAIT_TIMEOUT = 7200 # Two hours DEFAULT_COMMAND_TIMEOUT = 1800 DEFAULT_BUILD_TIMEOUT = 1800 DEFAULT_CLUSTER_TIMEOUT = 1800 DEFAULT_CLOUD_ID = "cld_4F7k8814aZzGG8TNUGPKnc" DEFAULT_ENV = { "DATESTAMP": str(datetime.datetime.now().strftime("%Y%m%d")), "TIMESTAMP": str(int(datetime.datetime.now().timestamp())), "EXPIRATION_1D": str( (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%d") ), "EXPIRATION_2D": str( (datetime.datetime.now() + datetime.timedelta(days=2)).strftime("%Y-%m-%d") ), "EXPIRATION_3D": str( (datetime.datetime.now() + datetime.timedelta(days=3)).strftime("%Y-%m-%d") ), } RELEASE_PACKAGE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) RELEASE_TEST_SCHEMA_FILE = os.path.join( RELEASE_PACKAGE_DIR, "ray_release", "schema.json" ) class TestEnvironment(dict): pass _test_env = None <|fim▁hole|>def get_test_environment(): global _test_env if _test_env: return _test_env _test_env = TestEnvironment(**DEFAULT_ENV) return _test_env def set_test_env_var(key: str, value: str): test_env = get_test_environment() test_env[key] = value def get_test_env_var(key: str, default: Optional[str] = None): test_env = get_test_environment() return test_env.get(key, default) def read_and_validate_release_test_collection(config_file: str) -> List[Test]: """Read and validate test collection from config file""" with open(config_file, "rt") as fp: test_config = yaml.safe_load(fp) validate_release_test_collection(test_config) return test_config def load_schema_file(path: Optional[str] = None) -> Dict: path = path or RELEASE_TEST_SCHEMA_FILE with open(path, "rt") as fp: return json.load(fp) def validate_release_test_collection(test_collection: List[Test]): try: schema = load_schema_file() except Exception as e: raise ReleaseTestConfigError( f"Could not load release test validation schema: {e}" ) from e num_errors = 0 for test in test_collection: error = validate_test(test, schema) if error: logger.error( f"Failed to validate test {test.get('name', '(unnamed)')}: {error}" ) num_errors += 1 if num_errors > 0: raise ReleaseTestConfigError( f"Release test configuration error: Found {num_errors} test " f"validation errors." ) def validate_test(test: Test, schema: Optional[Dict] = None) -> Optional[str]: schema = schema or load_schema_file() try: jsonschema.validate(test, schema=schema) except (jsonschema.ValidationError, jsonschema.SchemaError) as e: return str(e.message) except Exception as e: return str(e) def find_test(test_collection: List[Test], test_name: str) -> Optional[Test]: """Find test with `test_name` in `test_collection`""" for test in test_collection: if test["name"] == test_name: return test return None def as_smoke_test(test: Test) -> Test: if "smoke_test" not in test: logger.warning( f"Requested smoke test, but test with name {test['name']} does " f"not have any smoke test configuration." ) return test smoke_test_config = test.pop("smoke_test") new_test = deep_update(test, smoke_test_config) return new_test def get_wheels_sanity_check(commit: Optional[str] = None): if not commit: cmd = ( "python -c 'import ray; print(" '"No commit sanity check available, but this is the ' "Ray wheel commit:\", ray.__commit__)'" ) else: cmd = ( f"python -c 'import ray; " f'assert ray.__commit__ == "{commit}", ray.__commit__\'' ) return cmd def load_and_render_yaml_template( template_path: str, env: Optional[Dict] = None ) -> Optional[Dict]: if not template_path: return None if not os.path.exists(template_path): raise ReleaseTestConfigError( f"Cannot load yaml template from {template_path}: Path not found." ) with open(template_path, "rt") as f: content = f.read() render_env = copy.deepcopy(os.environ) if env: render_env.update(env) try: content = jinja2.Template(content).render(env=env) return yaml.safe_load(content) except Exception as e: raise ReleaseTestConfigError( f"Error rendering/loading yaml template: {e}" ) from e def load_test_cluster_env(test: Test, ray_wheels_url: str) -> Optional[Dict]: cluster_env_file = test["cluster"]["cluster_env"] cluster_env_path = os.path.join( RELEASE_PACKAGE_DIR, test.get("working_dir", ""), cluster_env_file ) env = get_test_environment() commit = env.get("RAY_COMMIT", None) env["RAY_WHEELS_SANITY_CHECK"] = get_wheels_sanity_check(commit) env["RAY_WHEELS"] = ray_wheels_url return load_and_render_yaml_template(cluster_env_path, env=env) def load_test_cluster_compute(test: Test) -> Optional[Dict]: cluster_compute_file = test["cluster"]["cluster_compute"] cluster_compute_path = os.path.join( RELEASE_PACKAGE_DIR, test.get("working_dir", ""), cluster_compute_file ) env = get_test_environment() cloud_id = get_test_cloud_id(test) env["ANYSCALE_CLOUD_ID"] = cloud_id return load_and_render_yaml_template(cluster_compute_path, env=env) def get_test_cloud_id(test: Test) -> str: cloud_id = test["cluster"].get("cloud_id", None) cloud_name = test["cluster"].get("cloud_name", None) if cloud_id and cloud_name: raise RuntimeError( f"You can't supply both a `cloud_name` ({cloud_name}) and a " f"`cloud_id` ({cloud_id}) in the test cluster configuration. " f"Please provide only one." ) elif cloud_name and not cloud_id: cloud_id = find_cloud_by_name(cloud_name) if not cloud_id: raise RuntimeError(f"Couldn't find cloud with name `{cloud_name}`.") else: cloud_id = cloud_id or DEFAULT_CLOUD_ID return cloud_id<|fim▁end|>
<|file_name|>benchmarks.rs<|end_file_name|><|fim▁begin|>#![feature(test)] extern crate test; use test::Bencher; use lonlat_bng; use rand::distributions::{IndependentSample, Range}; <|fim▁hole|> let between_lat = Range::new(49.871159, 55.811741); let mut rng = rand::thread_rng(); let mut lon_vec = vec![between_lon.ind_sample(&mut rng); num_coords]; let mut lat_vec = vec![between_lat.ind_sample(&mut rng); num_coords]; b.iter(|| { lonlat_bng::convert_to_bng_threaded_vec(&mut lon_vec, &mut lat_vec); }); }<|fim▁end|>
#[bench] fn bench_threads(b: &mut Bencher) { let num_coords = 100000; let between_lon = Range::new(-6.379880, 1.768960);
<|file_name|>winuser.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015-2017 winapi-rs developers // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option. // All files in the project carrying such notice may not be copied, modified, or distributed // except according to those terms. //! USER procedure declarations, constant definitions and macros use ctypes::{c_int, c_long, c_short, c_uint}; use shared::basetsd::{ DWORD_PTR, INT32, INT_PTR, PDWORD_PTR, UINT16, UINT32, UINT64, UINT_PTR, ULONG_PTR, }; use shared::guiddef::{GUID, LPCGUID}; use shared::minwindef::{ ATOM, BOOL, BYTE, DWORD, HINSTANCE, HIWORD, HKL, HRGN, HWINSTA, INT, LOWORD, LPARAM, LPBYTE, LPDWORD, LPVOID, LPWORD, LRESULT, PBYTE, PUINT, PULONG, TRUE, UCHAR, UINT, ULONG, USHORT, WORD, WPARAM, }; use shared::windef::{ COLORREF, HACCEL, HBITMAP, HBRUSH, HCURSOR, HDC, HDESK, HICON, HMENU, HMONITOR, HWINEVENTHOOK,HWND, LPRECT, POINT, RECT, }; use um::minwinbase::LPSECURITY_ATTRIBUTES; use um::wingdi::{BLENDFUNCTION, DEVMODEA, DEVMODEW, LOGFONTA, LOGFONTW}; use um::winnt::{ ACCESS_MASK, BOOLEAN, CHAR, HANDLE, LONG, LPCSTR, LPCWSTR, LPSTR, LPWSTR, LUID, PSECURITY_DESCRIPTOR, PSECURITY_INFORMATION, PVOID, SHORT, VOID, WCHAR, }; use vc::limits::UINT_MAX; use vc::vadefs::va_list; pub type HDWP = HANDLE; pub type MENUTEMPLATEA = VOID; pub type MENUTEMPLATEW = VOID; pub type LPMENUTEMPLATEA = PVOID; pub type LPMENUTEMPLATEW = PVOID; FN!{stdcall WNDPROC( HWND, UINT, WPARAM, LPARAM, ) -> LRESULT} FN!{stdcall DLGPROC( HWND, UINT, WPARAM, LPARAM, ) -> INT_PTR} FN!{stdcall TIMERPROC( HWND, UINT, UINT_PTR, DWORD, ) -> ()} FN!{stdcall GRAYSTRINGPROC( HDC, LPARAM, c_int, ) -> BOOL} FN!{stdcall WNDENUMPROC( HWND, LPARAM, ) -> BOOL} FN!{stdcall HOOKPROC( code: c_int, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT} FN!{stdcall SENDASYNCPROC( HWND, UINT, ULONG_PTR, LRESULT, ) -> ()} FN!{stdcall PROPENUMPROCA( HWND, LPCSTR, HANDLE, ) -> BOOL} FN!{stdcall PROPENUMPROCW( HWND, LPCWSTR, HANDLE, ) -> BOOL} FN!{stdcall PROPENUMPROCEXA( HWND, LPSTR, HANDLE, ULONG_PTR, ) -> BOOL} FN!{stdcall PROPENUMPROCEXW( HWND, LPWSTR, HANDLE, ULONG_PTR, ) -> BOOL} FN!{stdcall EDITWORDBREAKPROCA( lpch: LPSTR, ichCurrent: c_int, cch: c_int, code: c_int, ) -> c_int} FN!{stdcall EDITWORDBREAKPROCW( lpch: LPWSTR, ichCurrent: c_int, cch: c_int, code: c_int, ) -> c_int} FN!{stdcall DRAWSTATEPROC( hdc: HDC, lData: LPARAM, wData: WPARAM, cx: c_int, cy: c_int, ) -> BOOL} FN!{stdcall NAMEENUMPROCA( LPSTR, LPARAM, ) -> BOOL} FN!{stdcall NAMEENUMPROCW( LPWSTR, LPARAM, ) -> BOOL} pub type WINSTAENUMPROCA = NAMEENUMPROCA; pub type DESKTOPENUMPROCA = NAMEENUMPROCA; pub type WINSTAENUMPROCW = NAMEENUMPROCW; pub type DESKTOPENUMPROCW = NAMEENUMPROCW; #[inline] pub fn IS_INTRESOURCE(r: ULONG_PTR) -> bool { (r >> 16) == 0 } #[inline] pub fn MAKEINTRESOURCEA(i: WORD) -> LPSTR { i as ULONG_PTR as LPSTR } #[inline] pub fn MAKEINTRESOURCEW(i: WORD) -> LPWSTR { i as ULONG_PTR as LPWSTR } macro_rules! MAKEINTRESOURCE { ($i:expr) => { $i as ULONG_PTR as LPWSTR } } pub const RT_CURSOR: LPWSTR = MAKEINTRESOURCE!(1); pub const RT_BITMAP: LPWSTR = MAKEINTRESOURCE!(2); pub const RT_ICON: LPWSTR = MAKEINTRESOURCE!(3); pub const RT_MENU: LPWSTR = MAKEINTRESOURCE!(4); pub const RT_DIALOG: LPWSTR = MAKEINTRESOURCE!(5); pub const RT_STRING: LPWSTR = MAKEINTRESOURCE!(6); pub const RT_FONTDIR: LPWSTR = MAKEINTRESOURCE!(7); pub const RT_FONT: LPWSTR = MAKEINTRESOURCE!(8); pub const RT_ACCELERATOR: LPWSTR = MAKEINTRESOURCE!(9); pub const RT_RCDATA: LPWSTR = MAKEINTRESOURCE!(10); pub const RT_MESSAGETABLE: LPWSTR = MAKEINTRESOURCE!(11); pub const DIFFERENCE: WORD = 11; pub const RT_GROUP_CURSOR: LPWSTR = MAKEINTRESOURCE!(1 + DIFFERENCE); pub const RT_GROUP_ICON: LPWSTR = MAKEINTRESOURCE!(3 + DIFFERENCE); pub const RT_VERSION: LPWSTR = MAKEINTRESOURCE!(16); pub const RT_DLGINCLUDE: LPWSTR = MAKEINTRESOURCE!(17); pub const RT_PLUGPLAY: LPWSTR = MAKEINTRESOURCE!(19); pub const RT_VXD: LPWSTR = MAKEINTRESOURCE!(20); pub const RT_ANICURSOR: LPWSTR = MAKEINTRESOURCE!(21); pub const RT_ANIICON: LPWSTR = MAKEINTRESOURCE!(22); pub const RT_HTML: LPWSTR = MAKEINTRESOURCE!(23); pub const RT_MANIFEST: LPWSTR = MAKEINTRESOURCE!(24); pub const CREATEPROCESS_MANIFEST_RESOURCE_ID: LPWSTR = MAKEINTRESOURCE!(1); pub const ISOLATIONAWARE_MANIFEST_RESOURCE_ID: LPWSTR = MAKEINTRESOURCE!(2); pub const ISOLATIONAWARE_NOSTATICIMPORT_MANIFEST_RESOURCE_ID: LPWSTR = MAKEINTRESOURCE!(3); pub const MINIMUM_RESERVED_MANIFEST_RESOURCE_ID: LPWSTR = MAKEINTRESOURCE!(1); pub const MAXIMUM_RESERVED_MANIFEST_RESOURCE_ID: LPWSTR = MAKEINTRESOURCE!(16); extern "system" { pub fn wvsprintfA( _: LPSTR, _: LPCSTR, arglist: va_list, ) -> c_int; pub fn wvsprintfW( _: LPWSTR, _: LPCWSTR, arglist: va_list, ) -> c_int; } extern "C" { pub fn wsprintfA( _: LPSTR, _: LPCSTR, ... ) -> c_int; pub fn wsprintfW( _: LPWSTR, _: LPCWSTR, ... ) -> c_int; } pub const SETWALLPAPER_DEFAULT: LPWSTR = -1isize as LPWSTR; pub const SB_HORZ: UINT = 0; pub const SB_VERT: UINT = 1; pub const SB_CTL: UINT = 2;<|fim▁hole|>pub const SB_LINERIGHT: LPARAM = 1; pub const SB_PAGEUP: LPARAM = 2; pub const SB_PAGELEFT: LPARAM = 2; pub const SB_PAGEDOWN: LPARAM = 3; pub const SB_PAGERIGHT: LPARAM = 3; pub const SB_THUMBPOSITION: LPARAM = 4; pub const SB_THUMBTRACK: LPARAM = 5; pub const SB_TOP: LPARAM = 6; pub const SB_LEFT: LPARAM = 6; pub const SB_BOTTOM: LPARAM = 7; pub const SB_RIGHT: LPARAM = 7; pub const SB_ENDSCROLL: LPARAM = 8; pub const SW_HIDE: c_int = 0; pub const SW_SHOWNORMAL: c_int = 1; pub const SW_NORMAL: c_int = 1; pub const SW_SHOWMINIMIZED: c_int = 2; pub const SW_SHOWMAXIMIZED: c_int = 3; pub const SW_MAXIMIZE: c_int = 3; pub const SW_SHOWNOACTIVATE: c_int = 4; pub const SW_SHOW: c_int = 5; pub const SW_MINIMIZE: c_int = 6; pub const SW_SHOWMINNOACTIVE: c_int = 7; pub const SW_SHOWNA: c_int = 8; pub const SW_RESTORE: c_int = 9; pub const SW_SHOWDEFAULT: c_int = 10; pub const SW_FORCEMINIMIZE: c_int = 11; pub const SW_MAX: c_int = 11; pub const HIDE_WINDOW: c_int = 0; pub const SHOW_OPENWINDOW: c_int = 1; pub const SHOW_ICONWINDOW: c_int = 2; pub const SHOW_FULLSCREEN: c_int = 3; pub const SHOW_OPENNOACTIVATE: c_int = 4; pub const SW_PARENTCLOSING: LPARAM = 1; pub const SW_OTHERZOOM: LPARAM = 2; pub const SW_PARENTOPENING: LPARAM = 3; pub const SW_OTHERUNZOOM: LPARAM = 4; pub const AW_HOR_POSITIVE: DWORD = 0x00000001; pub const AW_HOR_NEGATIVE: DWORD = 0x00000002; pub const AW_VER_POSITIVE: DWORD = 0x00000004; pub const AW_VER_NEGATIVE: DWORD = 0x00000008; pub const AW_CENTER: DWORD = 0x00000010; pub const AW_HIDE: DWORD = 0x00010000; pub const AW_ACTIVATE: DWORD = 0x00020000; pub const AW_SLIDE: DWORD = 0x00040000; pub const AW_BLEND: DWORD = 0x00080000; pub const KF_EXTENDED: WORD = 0x0100; pub const KF_DLGMODE: WORD = 0x0800; pub const KF_MENUMODE: WORD = 0x1000; pub const KF_ALTDOWN: WORD = 0x2000; pub const KF_REPEAT: WORD = 0x4000; pub const KF_UP: WORD = 0x8000; pub const VK_LBUTTON: c_int = 0x01; pub const VK_RBUTTON: c_int = 0x02; pub const VK_CANCEL: c_int = 0x03; pub const VK_MBUTTON: c_int = 0x04; pub const VK_XBUTTON1: c_int = 0x05; pub const VK_XBUTTON2: c_int = 0x06; pub const VK_BACK: c_int = 0x08; pub const VK_TAB: c_int = 0x09; pub const VK_CLEAR: c_int = 0x0C; pub const VK_RETURN: c_int = 0x0D; pub const VK_SHIFT: c_int = 0x10; pub const VK_CONTROL: c_int = 0x11; pub const VK_MENU: c_int = 0x12; pub const VK_PAUSE: c_int = 0x13; pub const VK_CAPITAL: c_int = 0x14; pub const VK_KANA: c_int = 0x15; pub const VK_HANGEUL: c_int = 0x15; pub const VK_HANGUL: c_int = 0x15; pub const VK_JUNJA: c_int = 0x17; pub const VK_FINAL: c_int = 0x18; pub const VK_HANJA: c_int = 0x19; pub const VK_KANJI: c_int = 0x19; pub const VK_ESCAPE: c_int = 0x1B; pub const VK_CONVERT: c_int = 0x1C; pub const VK_NONCONVERT: c_int = 0x1D; pub const VK_ACCEPT: c_int = 0x1E; pub const VK_MODECHANGE: c_int = 0x1F; pub const VK_SPACE: c_int = 0x20; pub const VK_PRIOR: c_int = 0x21; pub const VK_NEXT: c_int = 0x22; pub const VK_END: c_int = 0x23; pub const VK_HOME: c_int = 0x24; pub const VK_LEFT: c_int = 0x25; pub const VK_UP: c_int = 0x26; pub const VK_RIGHT: c_int = 0x27; pub const VK_DOWN: c_int = 0x28; pub const VK_SELECT: c_int = 0x29; pub const VK_PRINT: c_int = 0x2A; pub const VK_EXECUTE: c_int = 0x2B; pub const VK_SNAPSHOT: c_int = 0x2C; pub const VK_INSERT: c_int = 0x2D; pub const VK_DELETE: c_int = 0x2E; pub const VK_HELP: c_int = 0x2F; pub const VK_LWIN: c_int = 0x5B; pub const VK_RWIN: c_int = 0x5C; pub const VK_APPS: c_int = 0x5D; pub const VK_SLEEP: c_int = 0x5F; pub const VK_NUMPAD0: c_int = 0x60; pub const VK_NUMPAD1: c_int = 0x61; pub const VK_NUMPAD2: c_int = 0x62; pub const VK_NUMPAD3: c_int = 0x63; pub const VK_NUMPAD4: c_int = 0x64; pub const VK_NUMPAD5: c_int = 0x65; pub const VK_NUMPAD6: c_int = 0x66; pub const VK_NUMPAD7: c_int = 0x67; pub const VK_NUMPAD8: c_int = 0x68; pub const VK_NUMPAD9: c_int = 0x69; pub const VK_MULTIPLY: c_int = 0x6A; pub const VK_ADD: c_int = 0x6B; pub const VK_SEPARATOR: c_int = 0x6C; pub const VK_SUBTRACT: c_int = 0x6D; pub const VK_DECIMAL: c_int = 0x6E; pub const VK_DIVIDE: c_int = 0x6F; pub const VK_F1: c_int = 0x70; pub const VK_F2: c_int = 0x71; pub const VK_F3: c_int = 0x72; pub const VK_F4: c_int = 0x73; pub const VK_F5: c_int = 0x74; pub const VK_F6: c_int = 0x75; pub const VK_F7: c_int = 0x76; pub const VK_F8: c_int = 0x77; pub const VK_F9: c_int = 0x78; pub const VK_F10: c_int = 0x79; pub const VK_F11: c_int = 0x7A; pub const VK_F12: c_int = 0x7B; pub const VK_F13: c_int = 0x7C; pub const VK_F14: c_int = 0x7D; pub const VK_F15: c_int = 0x7E; pub const VK_F16: c_int = 0x7F; pub const VK_F17: c_int = 0x80; pub const VK_F18: c_int = 0x81; pub const VK_F19: c_int = 0x82; pub const VK_F20: c_int = 0x83; pub const VK_F21: c_int = 0x84; pub const VK_F22: c_int = 0x85; pub const VK_F23: c_int = 0x86; pub const VK_F24: c_int = 0x87; pub const VK_NAVIGATION_VIEW: c_int = 0x88; pub const VK_NAVIGATION_MENU: c_int = 0x89; pub const VK_NAVIGATION_UP: c_int = 0x8A; pub const VK_NAVIGATION_DOWN: c_int = 0x8B; pub const VK_NAVIGATION_LEFT: c_int = 0x8C; pub const VK_NAVIGATION_RIGHT: c_int = 0x8D; pub const VK_NAVIGATION_ACCEPT: c_int = 0x8E; pub const VK_NAVIGATION_CANCEL: c_int = 0x8F; pub const VK_NUMLOCK: c_int = 0x90; pub const VK_SCROLL: c_int = 0x91; pub const VK_OEM_NEC_EQUAL: c_int = 0x92; pub const VK_OEM_FJ_JISHO: c_int = 0x92; pub const VK_OEM_FJ_MASSHOU: c_int = 0x93; pub const VK_OEM_FJ_TOUROKU: c_int = 0x94; pub const VK_OEM_FJ_LOYA: c_int = 0x95; pub const VK_OEM_FJ_ROYA: c_int = 0x96; pub const VK_LSHIFT: c_int = 0xA0; pub const VK_RSHIFT: c_int = 0xA1; pub const VK_LCONTROL: c_int = 0xA2; pub const VK_RCONTROL: c_int = 0xA3; pub const VK_LMENU: c_int = 0xA4; pub const VK_RMENU: c_int = 0xA5; pub const VK_BROWSER_BACK: c_int = 0xA6; pub const VK_BROWSER_FORWARD: c_int = 0xA7; pub const VK_BROWSER_REFRESH: c_int = 0xA8; pub const VK_BROWSER_STOP: c_int = 0xA9; pub const VK_BROWSER_SEARCH: c_int = 0xAA; pub const VK_BROWSER_FAVORITES: c_int = 0xAB; pub const VK_BROWSER_HOME: c_int = 0xAC; pub const VK_VOLUME_MUTE: c_int = 0xAD; pub const VK_VOLUME_DOWN: c_int = 0xAE; pub const VK_VOLUME_UP: c_int = 0xAF; pub const VK_MEDIA_NEXT_TRACK: c_int = 0xB0; pub const VK_MEDIA_PREV_TRACK: c_int = 0xB1; pub const VK_MEDIA_STOP: c_int = 0xB2; pub const VK_MEDIA_PLAY_PAUSE: c_int = 0xB3; pub const VK_LAUNCH_MAIL: c_int = 0xB4; pub const VK_LAUNCH_MEDIA_SELECT: c_int = 0xB5; pub const VK_LAUNCH_APP1: c_int = 0xB6; pub const VK_LAUNCH_APP2: c_int = 0xB7; pub const VK_OEM_1: c_int = 0xBA; pub const VK_OEM_PLUS: c_int = 0xBB; pub const VK_OEM_COMMA: c_int = 0xBC; pub const VK_OEM_MINUS: c_int = 0xBD; pub const VK_OEM_PERIOD: c_int = 0xBE; pub const VK_OEM_2: c_int = 0xBF; pub const VK_OEM_3: c_int = 0xC0; pub const VK_GAMEPAD_A: c_int = 0xC3; pub const VK_GAMEPAD_B: c_int = 0xC4; pub const VK_GAMEPAD_X: c_int = 0xC5; pub const VK_GAMEPAD_Y: c_int = 0xC6; pub const VK_GAMEPAD_RIGHT_SHOULDER: c_int = 0xC7; pub const VK_GAMEPAD_LEFT_SHOULDER: c_int = 0xC8; pub const VK_GAMEPAD_LEFT_TRIGGER: c_int = 0xC9; pub const VK_GAMEPAD_RIGHT_TRIGGER: c_int = 0xCA; pub const VK_GAMEPAD_DPAD_UP: c_int = 0xCB; pub const VK_GAMEPAD_DPAD_DOWN: c_int = 0xCC; pub const VK_GAMEPAD_DPAD_LEFT: c_int = 0xCD; pub const VK_GAMEPAD_DPAD_RIGHT: c_int = 0xCE; pub const VK_GAMEPAD_MENU: c_int = 0xCF; pub const VK_GAMEPAD_VIEW: c_int = 0xD0; pub const VK_GAMEPAD_LEFT_THUMBSTICK_BUTTON: c_int = 0xD1; pub const VK_GAMEPAD_RIGHT_THUMBSTICK_BUTTON: c_int = 0xD2; pub const VK_GAMEPAD_LEFT_THUMBSTICK_UP: c_int = 0xD3; pub const VK_GAMEPAD_LEFT_THUMBSTICK_DOWN: c_int = 0xD4; pub const VK_GAMEPAD_LEFT_THUMBSTICK_RIGHT: c_int = 0xD5; pub const VK_GAMEPAD_LEFT_THUMBSTICK_LEFT: c_int = 0xD6; pub const VK_GAMEPAD_RIGHT_THUMBSTICK_UP: c_int = 0xD7; pub const VK_GAMEPAD_RIGHT_THUMBSTICK_DOWN: c_int = 0xD8; pub const VK_GAMEPAD_RIGHT_THUMBSTICK_RIGHT: c_int = 0xD9; pub const VK_GAMEPAD_RIGHT_THUMBSTICK_LEFT: c_int = 0xDA; pub const VK_OEM_4: c_int = 0xDB; pub const VK_OEM_5: c_int = 0xDC; pub const VK_OEM_6: c_int = 0xDD; pub const VK_OEM_7: c_int = 0xDE; pub const VK_OEM_8: c_int = 0xDF; pub const VK_OEM_AX: c_int = 0xE1; pub const VK_OEM_102: c_int = 0xE2; pub const VK_ICO_HELP: c_int = 0xE3; pub const VK_ICO_00: c_int = 0xE4; pub const VK_PROCESSKEY: c_int = 0xE5; pub const VK_ICO_CLEAR: c_int = 0xE6; pub const VK_PACKET: c_int = 0xE7; pub const VK_OEM_RESET: c_int = 0xE9; pub const VK_OEM_JUMP: c_int = 0xEA; pub const VK_OEM_PA1: c_int = 0xEB; pub const VK_OEM_PA2: c_int = 0xEC; pub const VK_OEM_PA3: c_int = 0xED; pub const VK_OEM_WSCTRL: c_int = 0xEE; pub const VK_OEM_CUSEL: c_int = 0xEF; pub const VK_OEM_ATTN: c_int = 0xF0; pub const VK_OEM_FINISH: c_int = 0xF1; pub const VK_OEM_COPY: c_int = 0xF2; pub const VK_OEM_AUTO: c_int = 0xF3; pub const VK_OEM_ENLW: c_int = 0xF4; pub const VK_OEM_BACKTAB: c_int = 0xF5; pub const VK_ATTN: c_int = 0xF6; pub const VK_CRSEL: c_int = 0xF7; pub const VK_EXSEL: c_int = 0xF8; pub const VK_EREOF: c_int = 0xF9; pub const VK_PLAY: c_int = 0xFA; pub const VK_ZOOM: c_int = 0xFB; pub const VK_NONAME: c_int = 0xFC; pub const VK_PA1: c_int = 0xFD; pub const VK_OEM_CLEAR: c_int = 0xFE; pub const WH_MIN: c_int = -1; pub const WH_MSGFILTER: c_int = -1; pub const WH_JOURNALRECORD: c_int = 0; pub const WH_JOURNALPLAYBACK: c_int = 1; pub const WH_KEYBOARD: c_int = 2; pub const WH_GETMESSAGE: c_int = 3; pub const WH_CALLWNDPROC: c_int = 4; pub const WH_CBT: c_int = 5; pub const WH_SYSMSGFILTER: c_int = 6; pub const WH_MOUSE: c_int = 7; pub const WH_HARDWARE: c_int = 8; pub const WH_DEBUG: c_int = 9; pub const WH_SHELL: c_int = 10; pub const WH_FOREGROUNDIDLE: c_int = 11; pub const WH_CALLWNDPROCRET: c_int = 12; pub const WH_KEYBOARD_LL: c_int = 13; pub const WH_MOUSE_LL: c_int = 14; pub const WH_MAX: c_int = 14; pub const WH_MINHOOK: c_int = WH_MIN; pub const WH_MAXHOOK: c_int = WH_MAX; pub const HC_ACTION: c_int = 0; pub const HC_GETNEXT: c_int = 1; pub const HC_SKIP: c_int = 2; pub const HC_NOREMOVE: c_int = 3; pub const HC_NOREM: c_int = HC_NOREMOVE; pub const HC_SYSMODALON: c_int = 4; pub const HC_SYSMODALOFF: c_int = 5; pub const HCBT_MOVESIZE: c_int = 0; pub const HCBT_MINMAX: c_int = 1; pub const HCBT_QS: c_int = 2; pub const HCBT_CREATEWND: c_int = 3; pub const HCBT_DESTROYWND: c_int = 4; pub const HCBT_ACTIVATE: c_int = 5; pub const HCBT_CLICKSKIPPED: c_int = 6; pub const HCBT_KEYSKIPPED: c_int = 7; pub const HCBT_SYSCOMMAND: c_int = 8; pub const HCBT_SETFOCUS: c_int = 9; STRUCT!{struct CBT_CREATEWNDA { lpcs: *mut CREATESTRUCTA, hwndInsertAfter: HWND, }} pub type LPCBT_CREATEWNDA = *mut CBT_CREATEWNDA; STRUCT!{struct CBT_CREATEWNDW { lpcs: *mut CREATESTRUCTW, hwndInsertAfter: HWND, }} pub type LPCBT_CREATEWNDW = *mut CBT_CREATEWNDW; STRUCT!{struct CBTACTIVATESTRUCT { fMouse: BOOL, hWndActive: HWND, }} pub type LPCBTACTIVATESTRUCT = *mut CBTACTIVATESTRUCT; STRUCT!{struct WTSSESSION_NOTIFICATION { cbSize: DWORD, dwSessionId: DWORD, }} pub type PWTSSESSION_NOTIFICATION = *mut WTSSESSION_NOTIFICATION; pub const WTS_CONSOLE_CONNECT: WPARAM = 0x1; pub const WTS_CONSOLE_DISCONNECT: WPARAM = 0x2; pub const WTS_REMOTE_CONNECT: WPARAM = 0x3; pub const WTS_REMOTE_DISCONNECT: WPARAM = 0x4; pub const WTS_SESSION_LOGON: WPARAM = 0x5; pub const WTS_SESSION_LOGOFF: WPARAM = 0x6; pub const WTS_SESSION_LOCK: WPARAM = 0x7; pub const WTS_SESSION_UNLOCK: WPARAM = 0x8; pub const WTS_SESSION_REMOTE_CONTROL: WPARAM = 0x9; pub const WTS_SESSION_CREATE: WPARAM = 0xa; pub const WTS_SESSION_TERMINATE: WPARAM = 0xb; pub const MSGF_DIALOGBOX: c_int = 0; pub const MSGF_MESSAGEBOX: c_int = 1; pub const MSGF_MENU: c_int = 2; pub const MSGF_SCROLLBAR: c_int = 5; pub const MSGF_NEXTWINDOW: c_int = 6; pub const MSGF_MAX: c_int = 8; pub const MSGF_USER: c_int = 4096; pub const HSHELL_WINDOWCREATED: c_int = 1; pub const HSHELL_WINDOWDESTROYED: c_int = 2; pub const HSHELL_ACTIVATESHELLWINDOW: c_int = 3; pub const HSHELL_WINDOWACTIVATED: c_int = 4; pub const HSHELL_GETMINRECT: c_int = 5; pub const HSHELL_REDRAW: c_int = 6; pub const HSHELL_TASKMAN: c_int = 7; pub const HSHELL_LANGUAGE: c_int = 8; pub const HSHELL_SYSMENU: c_int = 9; pub const HSHELL_ENDTASK: c_int = 10; pub const HSHELL_ACCESSIBILITYSTATE: c_int = 11; pub const HSHELL_APPCOMMAND: c_int = 12; pub const HSHELL_WINDOWREPLACED: c_int = 13; pub const HSHELL_WINDOWREPLACING: c_int = 14; pub const HSHELL_MONITORCHANGED: c_int = 16; pub const HSHELL_HIGHBIT: c_int = 0x8000; pub const HSHELL_FLASH: c_int = HSHELL_REDRAW | HSHELL_HIGHBIT; pub const HSHELL_RUDEAPPACTIVATED: c_int = HSHELL_WINDOWACTIVATED | HSHELL_HIGHBIT; pub const APPCOMMAND_BROWSER_BACKWARD: c_short = 1; pub const APPCOMMAND_BROWSER_FORWARD: c_short = 2; pub const APPCOMMAND_BROWSER_REFRESH: c_short = 3; pub const APPCOMMAND_BROWSER_STOP: c_short = 4; pub const APPCOMMAND_BROWSER_SEARCH: c_short = 5; pub const APPCOMMAND_BROWSER_FAVORITES: c_short = 6; pub const APPCOMMAND_BROWSER_HOME: c_short = 7; pub const APPCOMMAND_VOLUME_MUTE: c_short = 8; pub const APPCOMMAND_VOLUME_DOWN: c_short = 9; pub const APPCOMMAND_VOLUME_UP: c_short = 10; pub const APPCOMMAND_MEDIA_NEXTTRACK: c_short = 11; pub const APPCOMMAND_MEDIA_PREVIOUSTRACK: c_short = 12; pub const APPCOMMAND_MEDIA_STOP: c_short = 13; pub const APPCOMMAND_MEDIA_PLAY_PAUSE: c_short = 14; pub const APPCOMMAND_LAUNCH_MAIL: c_short = 15; pub const APPCOMMAND_LAUNCH_MEDIA_SELECT: c_short = 16; pub const APPCOMMAND_LAUNCH_APP1: c_short = 17; pub const APPCOMMAND_LAUNCH_APP2: c_short = 18; pub const APPCOMMAND_BASS_DOWN: c_short = 19; pub const APPCOMMAND_BASS_BOOST: c_short = 20; pub const APPCOMMAND_BASS_UP: c_short = 21; pub const APPCOMMAND_TREBLE_DOWN: c_short = 22; pub const APPCOMMAND_TREBLE_UP: c_short = 23; pub const APPCOMMAND_MICROPHONE_VOLUME_MUTE: c_short = 24; pub const APPCOMMAND_MICROPHONE_VOLUME_DOWN: c_short = 25; pub const APPCOMMAND_MICROPHONE_VOLUME_UP: c_short = 26; pub const APPCOMMAND_HELP: c_short = 27; pub const APPCOMMAND_FIND: c_short = 28; pub const APPCOMMAND_NEW: c_short = 29; pub const APPCOMMAND_OPEN: c_short = 30; pub const APPCOMMAND_CLOSE: c_short = 31; pub const APPCOMMAND_SAVE: c_short = 32; pub const APPCOMMAND_PRINT: c_short = 33; pub const APPCOMMAND_UNDO: c_short = 34; pub const APPCOMMAND_REDO: c_short = 35; pub const APPCOMMAND_COPY: c_short = 36; pub const APPCOMMAND_CUT: c_short = 37; pub const APPCOMMAND_PASTE: c_short = 38; pub const APPCOMMAND_REPLY_TO_MAIL: c_short = 39; pub const APPCOMMAND_FORWARD_MAIL: c_short = 40; pub const APPCOMMAND_SEND_MAIL: c_short = 41; pub const APPCOMMAND_SPELL_CHECK: c_short = 42; pub const APPCOMMAND_DICTATE_OR_COMMAND_CONTROL_TOGGLE: c_short = 43; pub const APPCOMMAND_MIC_ON_OFF_TOGGLE: c_short = 44; pub const APPCOMMAND_CORRECTION_LIST: c_short = 45; pub const APPCOMMAND_MEDIA_PLAY: c_short = 46; pub const APPCOMMAND_MEDIA_PAUSE: c_short = 47; pub const APPCOMMAND_MEDIA_RECORD: c_short = 48; pub const APPCOMMAND_MEDIA_FAST_FORWARD: c_short = 49; pub const APPCOMMAND_MEDIA_REWIND: c_short = 50; pub const APPCOMMAND_MEDIA_CHANNEL_UP: c_short = 51; pub const APPCOMMAND_MEDIA_CHANNEL_DOWN: c_short = 52; pub const APPCOMMAND_DELETE: c_short = 53; pub const APPCOMMAND_DWM_FLIP3D: c_short = 54; pub const FAPPCOMMAND_MOUSE: WORD = 0x8000; pub const FAPPCOMMAND_KEY: WORD = 0; pub const FAPPCOMMAND_OEM: WORD = 0x1000; pub const FAPPCOMMAND_MASK: WORD = 0xF000; #[inline] pub fn GET_APPCOMMAND_LPARAM(lParam: LPARAM) -> c_short { (HIWORD(lParam as DWORD) & !FAPPCOMMAND_MASK) as c_short } #[inline] pub fn GET_DEVICE_LPARAM(lParam: LPARAM) -> WORD { HIWORD(lParam as DWORD) & FAPPCOMMAND_MASK } pub use self::GET_DEVICE_LPARAM as GET_MOUSEORKEY_LPARAM; pub use shared::minwindef::LOWORD as GET_FLAGS_LPARAM; pub use self::GET_FLAGS_LPARAM as GET_KEYSTATE_LPARAM; STRUCT!{struct SHELLHOOKINFO { hwnd: HWND, rc: RECT, }} pub type LPSHELLHOOKINFO = *mut SHELLHOOKINFO; STRUCT!{struct EVENTMSG { message: UINT, paramL: UINT, paramH: UINT, time: DWORD, hwnd: HWND, }} pub type PEVENTMSGMSG = *mut EVENTMSG; pub type NPEVENTMSGMSG = *mut EVENTMSG; pub type LPEVENTMSGMSG = *mut EVENTMSG; pub type PEVENTMSG = *mut EVENTMSG; pub type NPEVENTMSG = *mut EVENTMSG; pub type LPEVENTMSG = *mut EVENTMSG; STRUCT!{struct CWPSTRUCT { lParam: LPARAM, wParam: WPARAM, message: UINT, hwnd: HWND, }} pub type PCWPSTRUCT = *mut CWPSTRUCT; pub type NPCWPSTRUCT = *mut CWPSTRUCT; pub type LPCWPSTRUCT = *mut CWPSTRUCT; STRUCT!{struct CWPRETSTRUCT { lResult: LRESULT, lParam: LPARAM, wParam: WPARAM, message: UINT, hwnd: HWND, }} pub type PCWPRETSTRUCT = *mut CWPRETSTRUCT; pub type NPCWPRETSTRUCT = *mut CWPRETSTRUCT; pub type LPCWPRETSTRUCT = *mut CWPRETSTRUCT; pub const LLKHF_EXTENDED: DWORD = (KF_EXTENDED >> 8) as DWORD; pub const LLKHF_INJECTED: DWORD = 0x00000010; pub const LLKHF_ALTDOWN: DWORD = (KF_ALTDOWN >> 8) as DWORD; pub const LLKHF_UP: DWORD = (KF_UP >> 8) as DWORD; pub const LLKHF_LOWER_IL_INJECTED: DWORD = 0x00000002; pub const LLMHF_INJECTED: DWORD = 0x00000001; pub const LLMHF_LOWER_IL_INJECTED: DWORD = 0x00000002; STRUCT!{struct KBDLLHOOKSTRUCT { vkCode: DWORD, scanCode: DWORD, flags: DWORD, time: DWORD, dwExtraInfo: ULONG_PTR, }} pub type LPKBDLLHOOKSTRUCT = *mut KBDLLHOOKSTRUCT; pub type PKBDLLHOOKSTRUCT = *mut KBDLLHOOKSTRUCT; STRUCT!{struct MSLLHOOKSTRUCT { pt: POINT, mouseData: DWORD, flags: DWORD, time: DWORD, dwExtraInfo: ULONG_PTR, }} pub type LPMSLLHOOKSTRUCT = *mut MSLLHOOKSTRUCT; pub type PMSLLHOOKSTRUCT = *mut MSLLHOOKSTRUCT; STRUCT!{struct DEBUGHOOKINFO { idThread: DWORD, idThreadInstaller: DWORD, lParam: LPARAM, wParam: WPARAM, code: c_int, }} pub type PDEBUGHOOKINFO = *mut DEBUGHOOKINFO; pub type NPDEBUGHOOKINFO = *mut DEBUGHOOKINFO; pub type LPDEBUGHOOKINFO = *mut DEBUGHOOKINFO; STRUCT!{struct MOUSEHOOKSTRUCT { pt: POINT, hwnd: HWND, wHitTestCode: UINT, dwExtraInfo: ULONG_PTR, }} pub type LPMOUSEHOOKSTRUCT = *mut MOUSEHOOKSTRUCT; pub type PMOUSEHOOKSTRUCT = *mut MOUSEHOOKSTRUCT; STRUCT!{struct MOUSEHOOKSTRUCTEX { parent: MOUSEHOOKSTRUCT, mouseData: DWORD, }} pub type LPMOUSEHOOKSTRUCTEX = *mut MOUSEHOOKSTRUCTEX; pub type PMOUSEHOOKSTRUCTEX = *mut MOUSEHOOKSTRUCTEX; STRUCT!{struct HARDWAREHOOKSTRUCT { hwnd: HWND, message: UINT, wParam: WPARAM, lParam: LPARAM, }} pub type LPHARDWAREHOOKSTRUCT = *mut HARDWAREHOOKSTRUCT; pub type PHARDWAREHOOKSTRUCT = *mut HARDWAREHOOKSTRUCT; pub const HKL_PREV: HKL = 0 as HKL; pub const HKL_NEXT: HKL = 1 as HKL; pub const KLF_ACTIVATE: UINT = 0x00000001; pub const KLF_SUBSTITUTE_OK: UINT = 0x00000002; pub const KLF_REORDER: UINT = 0x00000008; pub const KLF_REPLACELANG: UINT = 0x00000010; pub const KLF_NOTELLSHELL: UINT = 0x00000080; pub const KLF_SETFORPROCESS: UINT = 0x00000100; pub const KLF_SHIFTLOCK: UINT = 0x00010000; pub const KLF_RESET: UINT = 0x40000000; pub const INPUTLANGCHANGE_SYSCHARSET: WPARAM = 0x0001; pub const INPUTLANGCHANGE_FORWARD: WPARAM = 0x0002; pub const INPUTLANGCHANGE_BACKWARD: WPARAM = 0x0004; pub const KL_NAMELENGTH: usize = 9; extern "system" { pub fn LoadKeyboardLayoutA( pwszKLID: LPCSTR, Flags: DWORD, ) -> HKL; pub fn LoadKeyboardLayoutW( pwszKLID: LPCWSTR, Flags: DWORD, ) -> HKL; pub fn ActivateKeyboardLayout( hkl: HKL, Flags: UINT, ) -> HKL; pub fn ToUnicodeEx( wVirtKey: UINT, wScanCode: UINT, lpKeyState: *const BYTE, pwszBuff: LPWSTR, cchBuff: c_int, wFlags: UINT, dwhkl: HKL, ) -> c_int; pub fn UnloadKeyboardLayout( hkl: HKL, ) -> BOOL; pub fn GetKeyboardLayoutNameA( pwszKLID: LPSTR, ) -> BOOL; pub fn GetKeyboardLayoutNameW( pwszKLID: LPWSTR, ) -> BOOL; pub fn GetKeyboardLayoutList( nBuff: c_int, lpList: *mut HKL, ) -> c_int; pub fn GetKeyboardLayout( idThread: DWORD, ) -> HKL; } STRUCT!{struct MOUSEMOVEPOINT { x: c_int, y: c_int, time: DWORD, dwExtraInfo: ULONG_PTR, }} pub type PMOUSEMOVEPOINT = *mut MOUSEMOVEPOINT; pub type LPMOUSEMOVEPOINT = *mut MOUSEMOVEPOINT; pub const GMMP_USE_DISPLAY_POINTS: DWORD = 1; pub const GMMP_USE_HIGH_RESOLUTION_POINTS: DWORD = 2; extern "system" { pub fn GetMouseMovePointsEx( cbSize: UINT, lppt: LPMOUSEMOVEPOINT, lpptBuf: LPMOUSEMOVEPOINT, nBufPoints: c_int, resolution: DWORD, ) -> c_int; } pub const DESKTOP_READOBJECTS: DWORD = 0x0001; pub const DESKTOP_CREATEWINDOW: DWORD = 0x0002; pub const DESKTOP_CREATEMENU: DWORD = 0x0004; pub const DESKTOP_HOOKCONTROL: DWORD = 0x0008; pub const DESKTOP_JOURNALRECORD: DWORD = 0x0010; pub const DESKTOP_JOURNALPLAYBACK: DWORD = 0x0020; pub const DESKTOP_ENUMERATE: DWORD = 0x0040; pub const DESKTOP_WRITEOBJECTS: DWORD = 0x0080; pub const DESKTOP_SWITCHDESKTOP: DWORD = 0x0100; pub const DF_ALLOWOTHERACCOUNTHOOK: DWORD = 0x0001; extern "system" { pub fn CreateDesktopA( lpszDesktop: LPCSTR, lpszDevice: LPCSTR, pDevmode: *mut DEVMODEA, dwFlags: DWORD, dwDesiredAccess: ACCESS_MASK, lpsa: LPSECURITY_ATTRIBUTES, ) -> HDESK; pub fn CreateDesktopW( lpszDesktop: LPCWSTR, lpszDevice: LPCWSTR, pDevmode: *mut DEVMODEW, dwFlags: DWORD, dwDesiredAccess: ACCESS_MASK, lpsa: LPSECURITY_ATTRIBUTES, ) -> HDESK; pub fn CreateDesktopExA( lpszDesktop: LPCSTR, lpszDevice: LPCSTR, pDevmode: *mut DEVMODEA, dwFlags: DWORD, dwDesiredAccess: ACCESS_MASK, lpsa: LPSECURITY_ATTRIBUTES, ulHeapSize: ULONG, pvoid: PVOID, ) -> HDESK; pub fn CreateDesktopExW( lpszDesktop: LPCWSTR, lpszDevice: LPCWSTR, pDevmode: *mut DEVMODEW, dwFlags: DWORD, dwDesiredAccess: ACCESS_MASK, lpsa: LPSECURITY_ATTRIBUTES, ulHeapSize: ULONG, pvoid: PVOID, ) -> HDESK; pub fn OpenDesktopA( lpszDesktop: LPCSTR, dwFlags: DWORD, fInherit: BOOL, dwDesiredAccess: ACCESS_MASK, ) -> HDESK; pub fn OpenDesktopW( lpszDesktop: LPCWSTR, dwFlags: DWORD, fInherit: BOOL, dwDesiredAccess: ACCESS_MASK, ) -> HDESK; pub fn OpenInputDesktop( dwFlags: DWORD, fInherit: BOOL, dwDesiredAccess: ACCESS_MASK, ) -> HDESK; pub fn EnumDesktopsA( hwinsta: HWINSTA, lpEnumFunc: DESKTOPENUMPROCA, lParam: LPARAM, ) -> BOOL; pub fn EnumDesktopsW( hwinsta: HWINSTA, lpEnumFunc: DESKTOPENUMPROCW, lParam: LPARAM, ) -> BOOL; pub fn EnumDesktopWindows( hDesktop: HDESK, lpfn: WNDENUMPROC, lParam: LPARAM, ) -> BOOL; pub fn SwitchDesktop( hDesktop: HDESK, ) -> BOOL; pub fn SetThreadDesktop( hDesktop: HDESK, ) -> BOOL; pub fn CloseDesktop( hDesktop: HDESK, ) -> BOOL; pub fn GetThreadDesktop( dwThreadId: DWORD, ) -> HDESK; } pub const WINSTA_ENUMDESKTOPS: DWORD = 0x0001; pub const WINSTA_READATTRIBUTES: DWORD = 0x0002; pub const WINSTA_ACCESSCLIPBOARD: DWORD = 0x0004; pub const WINSTA_CREATEDESKTOP: DWORD = 0x0008; pub const WINSTA_WRITEATTRIBUTES: DWORD = 0x0010; pub const WINSTA_ACCESSGLOBALATOMS: DWORD = 0x0020; pub const WINSTA_EXITWINDOWS: DWORD = 0x0040; pub const WINSTA_ENUMERATE: DWORD = 0x0100; pub const WINSTA_READSCREEN: DWORD = 0x0200; pub const WINSTA_ALL_ACCESS: DWORD = WINSTA_ENUMDESKTOPS | WINSTA_READATTRIBUTES | WINSTA_ACCESSCLIPBOARD | WINSTA_CREATEDESKTOP | WINSTA_WRITEATTRIBUTES | WINSTA_ACCESSGLOBALATOMS | WINSTA_EXITWINDOWS | WINSTA_ENUMERATE | WINSTA_READSCREEN; pub const CWF_CREATE_ONLY: DWORD = 0x00000001; pub const WSF_VISIBLE: DWORD = 0x0001; extern "system" { pub fn CreateWindowStationA( lpwinsta: LPCSTR, dwFlags: DWORD, dwDesiredAccess: ACCESS_MASK, lpsa: LPSECURITY_ATTRIBUTES, ) -> HWINSTA; pub fn CreateWindowStationW( lpwinsta: LPCWSTR, dwFlags: DWORD, dwDesiredAccess: ACCESS_MASK, lpsa: LPSECURITY_ATTRIBUTES, ) -> HWINSTA; pub fn OpenWindowStationA( lpszWinSta: LPCSTR, fInherit: BOOL, dwDesiredAccess: ACCESS_MASK, ) -> HWINSTA; pub fn OpenWindowStationW( lpszWinSta: LPCWSTR, fInherit: BOOL, dwDesiredAccess: ACCESS_MASK, ) -> HWINSTA; pub fn EnumWindowStationsA( lpEnumFunc: WINSTAENUMPROCA, lParam: LPARAM, ) -> BOOL; pub fn EnumWindowStationsW( lpEnumFunc: WINSTAENUMPROCW, lParam: LPARAM, ) -> BOOL; pub fn CloseWindowStation( hWinSta: HWINSTA, ) -> BOOL; pub fn SetProcessWindowStation( hWinSta: HWINSTA, ) -> BOOL; pub fn GetProcessWindowStation( ) -> HWINSTA; pub fn SetUserObjectSecurity( hObj: HANDLE, pSIRequested: PSECURITY_INFORMATION, pSID: PSECURITY_DESCRIPTOR, ) -> BOOL; pub fn GetUserObjectSecurity( hObj: HANDLE, pSIRequested: PSECURITY_INFORMATION, pSID: PSECURITY_DESCRIPTOR, nLength: DWORD, lpnLengthNeeded: LPDWORD, ) -> BOOL; } pub const UOI_FLAGS: DWORD = 1; pub const UOI_NAME: DWORD = 2; pub const UOI_TYPE: DWORD = 3; pub const UOI_USER_SID: DWORD = 4; pub const UOI_HEAPSIZE: DWORD = 5; pub const UOI_IO: DWORD = 6; pub const UOI_TIMERPROC_EXCEPTION_SUPPRESSION: DWORD = 7; STRUCT!{struct USEROBJECTFLAGS { fInherit: BOOL, fReserved: BOOL, dwFlags: DWORD, }} pub type PUSEROBJECTFLAGS = *mut USEROBJECTFLAGS; extern "system" { pub fn GetUserObjectInformationA( hObj: HANDLE, nIndex: c_int, pvInfo: PVOID, nLength: DWORD, lpnLengthNeeded: LPDWORD, ) -> BOOL; pub fn GetUserObjectInformationW( hObj: HANDLE, nIndex: c_int, pvInfo: PVOID, nLength: DWORD, lpnLengthNeeded: LPDWORD, ) -> BOOL; pub fn SetUserObjectInformationA( hObj: HANDLE, nIndex: c_int, pvInfo: PVOID, nLength: DWORD, ) -> BOOL; pub fn SetUserObjectInformationW( hObj: HANDLE, nIndex: c_int, pvInfo: PVOID, nLength: DWORD, ) -> BOOL; } STRUCT!{struct WNDCLASSEXA { cbSize: UINT, style: UINT, lpfnWndProc: WNDPROC, cbClsExtra: c_int, cbWndExtra: c_int, hInstance: HINSTANCE, hIcon: HICON, hCursor: HCURSOR, hbrBackground: HBRUSH, lpszMenuName: LPCSTR, lpszClassName: LPCSTR, hIconSm: HICON, }} pub type PWNDCLASSEXA = *mut WNDCLASSEXA; pub type NPWNDCLASSEXA = *mut WNDCLASSEXA; pub type LPWNDCLASSEXA = *mut WNDCLASSEXA; STRUCT!{struct WNDCLASSEXW { cbSize: UINT, style: UINT, lpfnWndProc: WNDPROC, cbClsExtra: c_int, cbWndExtra: c_int, hInstance: HINSTANCE, hIcon: HICON, hCursor: HCURSOR, hbrBackground: HBRUSH, lpszMenuName: LPCWSTR, lpszClassName: LPCWSTR, hIconSm: HICON, }} pub type PWNDCLASSEXW = *mut WNDCLASSEXW; pub type NPWNDCLASSEXW = *mut WNDCLASSEXW; pub type LPWNDCLASSEXW = *mut WNDCLASSEXW; STRUCT!{struct WNDCLASSA { style: UINT, lpfnWndProc: WNDPROC, cbClsExtra: c_int, cbWndExtra: c_int, hInstance: HINSTANCE, hIcon: HICON, hCursor: HCURSOR, hbrBackground: HBRUSH, lpszMenuName: LPCSTR, lpszClassName: LPCSTR, }} pub type PWNDCLASSA = *mut WNDCLASSA; pub type NPWNDCLASSA = *mut WNDCLASSA; pub type LPWNDCLASSA = *mut WNDCLASSA; STRUCT!{struct WNDCLASSW { style: UINT, lpfnWndProc: WNDPROC, cbClsExtra: c_int, cbWndExtra: c_int, hInstance: HINSTANCE, hIcon: HICON, hCursor: HCURSOR, hbrBackground: HBRUSH, lpszMenuName: LPCWSTR, lpszClassName: LPCWSTR, }} pub type PWNDCLASSW = *mut WNDCLASSW; pub type NPWNDCLASSW = *mut WNDCLASSW; pub type LPWNDCLASSW = *mut WNDCLASSW; extern "system" { pub fn IsHungAppWindow( hwnd: HWND, ) -> BOOL; pub fn DisableProcessWindowsGhosting(); } STRUCT!{struct MSG { hwnd: HWND, message: UINT, wParam: WPARAM, lParam: LPARAM, time: DWORD, pt: POINT, }} pub type PMSG = *mut MSG; pub type NPMSG = *mut MSG; pub type LPMSG = *mut MSG; //POINTSTOPOINT //POINTTOPOINTS //MAKEWPARAM //MAKELPARAM //MAKELRESULT pub const GWL_WNDPROC: c_int = -4; pub const GWL_HINSTANCE: c_int = -6; pub const GWL_HWNDPARENT: c_int = -8; pub const GWL_STYLE: c_int = -16; pub const GWL_EXSTYLE: c_int = -20; pub const GWL_USERDATA: c_int = -21; pub const GWL_ID: c_int = -12; pub const GWLP_WNDPROC: c_int = -4; pub const GWLP_HINSTANCE: c_int = -6; pub const GWLP_HWNDPARENT: c_int = -8; pub const GWLP_USERDATA: c_int = -21; pub const GWLP_ID: c_int = -12; pub const GCL_MENUNAME: c_int = -8; pub const GCL_HBRBACKGROUND: c_int = -10; pub const GCL_HCURSOR: c_int = -12; pub const GCL_HICON: c_int = -14; pub const GCL_HMODULE: c_int = -16; pub const GCL_CBWNDEXTRA: c_int = -18; pub const GCL_CBCLSEXTRA: c_int = -20; pub const GCL_WNDPROC: c_int = -24; pub const GCL_STYLE: c_int = -26; pub const GCW_ATOM: c_int = -32; pub const GCL_HICONSM: c_int = -34; pub const GCLP_MENUNAME: c_int = -8; pub const GCLP_HBRBACKGROUND: c_int = -10; pub const GCLP_HCURSOR: c_int = -12; pub const GCLP_HICON: c_int = -14; pub const GCLP_HMODULE: c_int = -16; pub const GCLP_WNDPROC: c_int = -24; pub const GCLP_HICONSM: c_int = -34; pub const WM_NULL: UINT = 0x0000; pub const WM_CREATE: UINT = 0x0001; pub const WM_DESTROY: UINT = 0x0002; pub const WM_MOVE: UINT = 0x0003; pub const WM_SIZE: UINT = 0x0005; pub const WM_ACTIVATE: UINT = 0x0006; pub const WA_INACTIVE: WORD = 0; pub const WA_ACTIVE: WORD = 1; pub const WA_CLICKACTIVE: WORD = 2; pub const WM_SETFOCUS: UINT = 0x0007; pub const WM_KILLFOCUS: UINT = 0x0008; pub const WM_ENABLE: UINT = 0x000A; pub const WM_SETREDRAW: UINT = 0x000B; pub const WM_SETTEXT: UINT = 0x000C; pub const WM_GETTEXT: UINT = 0x000D; pub const WM_GETTEXTLENGTH: UINT = 0x000E; pub const WM_PAINT: UINT = 0x000F; pub const WM_CLOSE: UINT = 0x0010; pub const WM_QUERYENDSESSION: UINT = 0x0011; pub const WM_QUERYOPEN: UINT = 0x0013; pub const WM_ENDSESSION: UINT = 0x0016; pub const WM_QUIT: UINT = 0x0012; pub const WM_ERASEBKGND: UINT = 0x0014; pub const WM_SYSCOLORCHANGE: UINT = 0x0015; pub const WM_SHOWWINDOW: UINT = 0x0018; pub const WM_WININICHANGE: UINT = 0x001A; pub const WM_SETTINGCHANGE: UINT = WM_WININICHANGE; pub const WM_DEVMODECHANGE: UINT = 0x001B; pub const WM_ACTIVATEAPP: UINT = 0x001C; pub const WM_FONTCHANGE: UINT = 0x001D; pub const WM_TIMECHANGE: UINT = 0x001E; pub const WM_CANCELMODE: UINT = 0x001F; pub const WM_SETCURSOR: UINT = 0x0020; pub const WM_MOUSEACTIVATE: UINT = 0x0021; pub const WM_CHILDACTIVATE: UINT = 0x0022; pub const WM_QUEUESYNC: UINT = 0x0023; pub const WM_GETMINMAXINFO: UINT = 0x0024; STRUCT!{struct MINMAXINFO { ptReserved: POINT, ptMaxSize: POINT, ptMaxPosition: POINT, ptMinTrackSize: POINT, ptMaxTrackSize: POINT, }} pub type PMINMAXINFO = *mut MINMAXINFO; pub type LPMINMAXINFO = *mut MINMAXINFO; pub const WM_PAINTICON: UINT = 0x0026; pub const WM_ICONERASEBKGND: UINT = 0x0027; pub const WM_NEXTDLGCTL: UINT = 0x0028; pub const WM_SPOOLERSTATUS: UINT = 0x002A; pub const WM_DRAWITEM: UINT = 0x002B; pub const WM_MEASUREITEM: UINT = 0x002C; pub const WM_DELETEITEM: UINT = 0x002D; pub const WM_VKEYTOITEM: UINT = 0x002E; pub const WM_CHARTOITEM: UINT = 0x002F; pub const WM_SETFONT: UINT = 0x0030; pub const WM_GETFONT: UINT = 0x0031; pub const WM_SETHOTKEY: UINT = 0x0032; pub const WM_GETHOTKEY: UINT = 0x0033; pub const WM_QUERYDRAGICON: UINT = 0x0037; pub const WM_COMPAREITEM: UINT = 0x0039; pub const WM_GETOBJECT: UINT = 0x003D; pub const WM_COMPACTING: UINT = 0x0041; pub const WM_COMMNOTIFY: UINT = 0x0044; pub const WM_WINDOWPOSCHANGING: UINT = 0x0046; pub const WM_WINDOWPOSCHANGED: UINT = 0x0047; pub const WM_POWER: UINT = 0x0048; pub const PWR_OK: WPARAM = 1; pub const PWR_FAIL: WPARAM = -1isize as WPARAM; pub const PWR_SUSPENDREQUEST: WPARAM = 1; pub const PWR_SUSPENDRESUME: WPARAM = 2; pub const PWR_CRITICALRESUME: WPARAM = 3; pub const WM_COPYDATA: UINT = 0x004A; pub const WM_CANCELJOURNAL: UINT = 0x004B; STRUCT!{struct COPYDATASTRUCT { dwData: ULONG_PTR, cbData: DWORD, lpData: PVOID, }} pub type PCOPYDATASTRUCT = *mut COPYDATASTRUCT; STRUCT!{struct MDINEXTMENU { hmenuIn: HMENU, hmenuNext: HMENU, hwndNext: HWND, }} pub type PMDINEXTMENU = *mut MDINEXTMENU; pub type LPMDINEXTMENU = *mut MDINEXTMENU; pub const WM_NOTIFY: UINT = 0x004E; pub const WM_INPUTLANGCHANGEREQUEST: UINT = 0x0050; pub const WM_INPUTLANGCHANGE: UINT = 0x0051; pub const WM_TCARD: UINT = 0x0052; pub const WM_HELP: UINT = 0x0053; pub const WM_USERCHANGED: UINT = 0x0054; pub const WM_NOTIFYFORMAT: UINT = 0x0055; pub const NFR_ANSI: LRESULT = 1; pub const NFR_UNICODE: LRESULT = 2; pub const NF_QUERY: LPARAM = 3; pub const NF_REQUERY: LPARAM = 4; pub const WM_CONTEXTMENU: UINT = 0x007B; pub const WM_STYLECHANGING: UINT = 0x007C; pub const WM_STYLECHANGED: UINT = 0x007D; pub const WM_DISPLAYCHANGE: UINT = 0x007E; pub const WM_GETICON: UINT = 0x007F; pub const WM_SETICON: UINT = 0x0080; pub const WM_NCCREATE: UINT = 0x0081; pub const WM_NCDESTROY: UINT = 0x0082; pub const WM_NCCALCSIZE: UINT = 0x0083; pub const WM_NCHITTEST: UINT = 0x0084; pub const WM_NCPAINT: UINT = 0x0085; pub const WM_NCACTIVATE: UINT = 0x0086; pub const WM_GETDLGCODE: UINT = 0x0087; pub const WM_SYNCPAINT: UINT = 0x0088; pub const WM_NCMOUSEMOVE: UINT = 0x00A0; pub const WM_NCLBUTTONDOWN: UINT = 0x00A1; pub const WM_NCLBUTTONUP: UINT = 0x00A2; pub const WM_NCLBUTTONDBLCLK: UINT = 0x00A3; pub const WM_NCRBUTTONDOWN: UINT = 0x00A4; pub const WM_NCRBUTTONUP: UINT = 0x00A5; pub const WM_NCRBUTTONDBLCLK: UINT = 0x00A6; pub const WM_NCMBUTTONDOWN: UINT = 0x00A7; pub const WM_NCMBUTTONUP: UINT = 0x00A8; pub const WM_NCMBUTTONDBLCLK: UINT = 0x00A9; pub const WM_NCXBUTTONDOWN: UINT = 0x00AB; pub const WM_NCXBUTTONUP: UINT = 0x00AC; pub const WM_NCXBUTTONDBLCLK: UINT = 0x00AD; pub const WM_INPUT_DEVICE_CHANGE: UINT = 0x00FE; pub const WM_INPUT: UINT = 0x00FF; pub const WM_KEYFIRST: UINT = 0x0100; pub const WM_KEYDOWN: UINT = 0x0100; pub const WM_KEYUP: UINT = 0x0101; pub const WM_CHAR: UINT = 0x0102; pub const WM_DEADCHAR: UINT = 0x0103; pub const WM_SYSKEYDOWN: UINT = 0x0104; pub const WM_SYSKEYUP: UINT = 0x0105; pub const WM_SYSCHAR: UINT = 0x0106; pub const WM_SYSDEADCHAR: UINT = 0x0107; pub const WM_UNICHAR: UINT = 0x0109; pub const WM_KEYLAST: UINT = 0x0109; pub const UNICODE_NOCHAR: WPARAM = 0xFFFF; pub const WM_IME_STARTCOMPOSITION: UINT = 0x010D; pub const WM_IME_ENDCOMPOSITION: UINT = 0x010E; pub const WM_IME_COMPOSITION: UINT = 0x010F; pub const WM_IME_KEYLAST: UINT = 0x010F; pub const WM_INITDIALOG: UINT = 0x0110; pub const WM_COMMAND: UINT = 0x0111; pub const WM_SYSCOMMAND: UINT = 0x0112; pub const WM_TIMER: UINT = 0x0113; pub const WM_HSCROLL: UINT = 0x0114; pub const WM_VSCROLL: UINT = 0x0115; pub const WM_INITMENU: UINT = 0x0116; pub const WM_INITMENUPOPUP: UINT = 0x0117; pub const WM_GESTURE: UINT = 0x0119; pub const WM_GESTURENOTIFY: UINT = 0x011A; pub const WM_MENUSELECT: UINT = 0x011F; pub const WM_MENUCHAR: UINT = 0x0120; pub const WM_ENTERIDLE: UINT = 0x0121; pub const WM_MENURBUTTONUP: UINT = 0x0122; pub const WM_MENUDRAG: UINT = 0x0123; pub const WM_MENUGETOBJECT: UINT = 0x0124; pub const WM_UNINITMENUPOPUP: UINT = 0x0125; pub const WM_MENUCOMMAND: UINT = 0x0126; pub const WM_CHANGEUISTATE: UINT = 0x0127; pub const WM_UPDATEUISTATE: UINT = 0x0128; pub const WM_QUERYUISTATE: UINT = 0x0129; pub const UIS_SET: WORD = 1; pub const UIS_CLEAR: WORD = 2; pub const UIS_INITIALIZE: WORD = 3; pub const UISF_HIDEFOCUS: WORD = 0x1; pub const UISF_HIDEACCEL: WORD = 0x2; pub const UISF_ACTIVE: WORD = 0x4; pub const WM_CTLCOLORMSGBOX: UINT = 0x0132; pub const WM_CTLCOLOREDIT: UINT = 0x0133; pub const WM_CTLCOLORLISTBOX: UINT = 0x0134; pub const WM_CTLCOLORBTN: UINT = 0x0135; pub const WM_CTLCOLORDLG: UINT = 0x0136; pub const WM_CTLCOLORSCROLLBAR: UINT = 0x0137; pub const WM_CTLCOLORSTATIC: UINT = 0x0138; pub const MN_GETHMENU: UINT = 0x01E1; pub const WM_MOUSEFIRST: UINT = 0x0200; pub const WM_MOUSEMOVE: UINT = 0x0200; pub const WM_LBUTTONDOWN: UINT = 0x0201; pub const WM_LBUTTONUP: UINT = 0x0202; pub const WM_LBUTTONDBLCLK: UINT = 0x0203; pub const WM_RBUTTONDOWN: UINT = 0x0204; pub const WM_RBUTTONUP: UINT = 0x0205; pub const WM_RBUTTONDBLCLK: UINT = 0x0206; pub const WM_MBUTTONDOWN: UINT = 0x0207; pub const WM_MBUTTONUP: UINT = 0x0208; pub const WM_MBUTTONDBLCLK: UINT = 0x0209; pub const WM_MOUSEWHEEL: UINT = 0x020A; pub const WM_XBUTTONDOWN: UINT = 0x020B; pub const WM_XBUTTONUP: UINT = 0x020C; pub const WM_XBUTTONDBLCLK: UINT = 0x020D; pub const WM_MOUSEHWHEEL: UINT = 0x020E; pub const WM_MOUSELAST: UINT = 0x020E; pub const WHEEL_DELTA: c_short = 120; #[inline] pub fn GET_WHEEL_DELTA_WPARAM(wParam: WPARAM) -> c_short { HIWORD(wParam as DWORD) as c_short } pub const WHEEL_PAGESCROLL: UINT = UINT_MAX; #[inline] pub fn GET_KEYSTATE_WPARAM(wParam: WPARAM) -> WORD { LOWORD(wParam as DWORD) } #[inline] pub fn GET_NCHITTEST_WPARAM(wParam: WPARAM) -> c_short { LOWORD(wParam as DWORD) as c_short } #[inline] pub fn GET_XBUTTON_WPARAM(wParam: WPARAM) -> WORD { HIWORD(wParam as DWORD) } pub const XBUTTON1: WORD = 0x0001; pub const XBUTTON2: WORD = 0x0002; pub const WM_PARENTNOTIFY: UINT = 0x0210; pub const WM_ENTERMENULOOP: UINT = 0x0211; pub const WM_EXITMENULOOP: UINT = 0x0212; pub const WM_NEXTMENU: UINT = 0x0213; pub const WM_SIZING: UINT = 0x0214; pub const WM_CAPTURECHANGED: UINT = 0x0215; pub const WM_MOVING: UINT = 0x0216; pub const WM_POWERBROADCAST: UINT = 0x0218; pub const PBT_APMQUERYSUSPEND: WPARAM = 0x0000; pub const PBT_APMQUERYSTANDBY: WPARAM = 0x0001; pub const PBT_APMQUERYSUSPENDFAILED: WPARAM = 0x0002; pub const PBT_APMQUERYSTANDBYFAILED: WPARAM = 0x0003; pub const PBT_APMSUSPEND: WPARAM = 0x0004; pub const PBT_APMSTANDBY: WPARAM = 0x0005; pub const PBT_APMRESUMECRITICAL: WPARAM = 0x0006; pub const PBT_APMRESUMESUSPEND: WPARAM = 0x0007; pub const PBT_APMRESUMESTANDBY: WPARAM = 0x0008; pub const PBTF_APMRESUMEFROMFAILURE: LPARAM = 0x00000001; pub const PBT_APMBATTERYLOW: WPARAM = 0x0009; pub const PBT_APMPOWERSTATUSCHANGE: WPARAM = 0x000A; pub const PBT_APMOEMEVENT: WPARAM = 0x000B; pub const PBT_APMRESUMEAUTOMATIC: WPARAM = 0x0012; pub const PBT_POWERSETTINGCHANGE: WPARAM = 0x8013; STRUCT!{struct POWERBROADCAST_SETTING { PowerSetting: GUID, DataLength: DWORD, Data: [UCHAR; 1], }} pub type PPOWERBROADCAST_SETTING = *mut POWERBROADCAST_SETTING; pub const WM_DEVICECHANGE: UINT = 0x0219; pub const WM_MDICREATE: UINT = 0x0220; pub const WM_MDIDESTROY: UINT = 0x0221; pub const WM_MDIACTIVATE: UINT = 0x0222; pub const WM_MDIRESTORE: UINT = 0x0223; pub const WM_MDINEXT: UINT = 0x0224; pub const WM_MDIMAXIMIZE: UINT = 0x0225; pub const WM_MDITILE: UINT = 0x0226; pub const WM_MDICASCADE: UINT = 0x0227; pub const WM_MDIICONARRANGE: UINT = 0x0228; pub const WM_MDIGETACTIVE: UINT = 0x0229; pub const WM_MDISETMENU: UINT = 0x0230; pub const WM_ENTERSIZEMOVE: UINT = 0x0231; pub const WM_EXITSIZEMOVE: UINT = 0x0232; pub const WM_DROPFILES: UINT = 0x0233; pub const WM_MDIREFRESHMENU: UINT = 0x0234; pub const WM_POINTERDEVICECHANGE: UINT = 0x238; pub const WM_POINTERDEVICEINRANGE: UINT = 0x239; pub const WM_POINTERDEVICEOUTOFRANGE: UINT = 0x23A; pub const WM_TOUCH: UINT = 0x0240; pub const WM_NCPOINTERUPDATE: UINT = 0x0241; pub const WM_NCPOINTERDOWN: UINT = 0x0242; pub const WM_NCPOINTERUP: UINT = 0x0243; pub const WM_POINTERUPDATE: UINT = 0x0245; pub const WM_POINTERDOWN: UINT = 0x0246; pub const WM_POINTERUP: UINT = 0x0247; pub const WM_POINTERENTER: UINT = 0x0249; pub const WM_POINTERLEAVE: UINT = 0x024A; pub const WM_POINTERACTIVATE: UINT = 0x024B; pub const WM_POINTERCAPTURECHANGED: UINT = 0x024C; pub const WM_TOUCHHITTESTING: UINT = 0x024D; pub const WM_POINTERWHEEL: UINT = 0x024E; pub const WM_POINTERHWHEEL: UINT = 0x024F; pub const DM_POINTERHITTEST: UINT = 0x0250; pub const WM_POINTERROUTEDTO: UINT = 0x0251; pub const WM_POINTERROUTEDAWAY: UINT = 0x0252; pub const WM_POINTERROUTEDRELEASED: UINT = 0x0253; pub const WM_IME_SETCONTEXT: UINT = 0x0281; pub const WM_IME_NOTIFY: UINT = 0x0282; pub const WM_IME_CONTROL: UINT = 0x0283; pub const WM_IME_COMPOSITIONFULL: UINT = 0x0284; pub const WM_IME_SELECT: UINT = 0x0285; pub const WM_IME_CHAR: UINT = 0x0286; pub const WM_IME_REQUEST: UINT = 0x0288; pub const WM_IME_KEYDOWN: UINT = 0x0290; pub const WM_IME_KEYUP: UINT = 0x0291; pub const WM_MOUSEHOVER: UINT = 0x02A1; pub const WM_MOUSELEAVE: UINT = 0x02A3; pub const WM_NCMOUSEHOVER: UINT = 0x02A0; pub const WM_NCMOUSELEAVE: UINT = 0x02A2; pub const WM_WTSSESSION_CHANGE: UINT = 0x02B1; pub const WM_TABLET_FIRST: UINT = 0x02c0; pub const WM_TABLET_LAST: UINT = 0x02df; pub const WM_DPICHANGED: UINT = 0x02E0; pub const WM_CUT: UINT = 0x0300; pub const WM_COPY: UINT = 0x0301; pub const WM_PASTE: UINT = 0x0302; pub const WM_CLEAR: UINT = 0x0303; pub const WM_UNDO: UINT = 0x0304; pub const WM_RENDERFORMAT: UINT = 0x0305; pub const WM_RENDERALLFORMATS: UINT = 0x0306; pub const WM_DESTROYCLIPBOARD: UINT = 0x0307; pub const WM_DRAWCLIPBOARD: UINT = 0x0308; pub const WM_PAINTCLIPBOARD: UINT = 0x0309; pub const WM_VSCROLLCLIPBOARD: UINT = 0x030A; pub const WM_SIZECLIPBOARD: UINT = 0x030B; pub const WM_ASKCBFORMATNAME: UINT = 0x030C; pub const WM_CHANGECBCHAIN: UINT = 0x030D; pub const WM_HSCROLLCLIPBOARD: UINT = 0x030E; pub const WM_QUERYNEWPALETTE: UINT = 0x030F; pub const WM_PALETTEISCHANGING: UINT = 0x0310; pub const WM_PALETTECHANGED: UINT = 0x0311; pub const WM_HOTKEY: UINT = 0x0312; pub const WM_PRINT: UINT = 0x0317; pub const WM_PRINTCLIENT: UINT = 0x0318; pub const WM_APPCOMMAND: UINT = 0x0319; pub const WM_THEMECHANGED: UINT = 0x031A; pub const WM_CLIPBOARDUPDATE: UINT = 0x031D; pub const WM_DWMCOMPOSITIONCHANGED: UINT = 0x031E; pub const WM_DWMNCRENDERINGCHANGED: UINT = 0x031F; pub const WM_DWMCOLORIZATIONCOLORCHANGED: UINT = 0x0320; pub const WM_DWMWINDOWMAXIMIZEDCHANGE: UINT = 0x0321; pub const WM_DWMSENDICONICTHUMBNAIL: UINT = 0x0323; pub const WM_DWMSENDICONICLIVEPREVIEWBITMAP: UINT = 0x0326; pub const WM_GETTITLEBARINFOEX: UINT = 0x033F; pub const WM_HANDHELDFIRST: UINT = 0x0358; pub const WM_HANDHELDLAST: UINT = 0x035F; pub const WM_AFXFIRST: UINT = 0x0360; pub const WM_AFXLAST: UINT = 0x037F; pub const WM_PENWINFIRST: UINT = 0x0380; pub const WM_PENWINLAST: UINT = 0x038F; pub const WM_APP: UINT = 0x8000; pub const WM_USER: UINT = 0x0400; pub const WMSZ_LEFT: UINT = 1; pub const WMSZ_RIGHT: UINT = 2; pub const WMSZ_TOP: UINT = 3; pub const WMSZ_TOPLEFT: UINT = 4; pub const WMSZ_TOPRIGHT: UINT = 5; pub const WMSZ_BOTTOM: UINT = 6; pub const WMSZ_BOTTOMLEFT: UINT = 7; pub const WMSZ_BOTTOMRIGHT: UINT = 8; pub const HTERROR: LRESULT = (-2); pub const HTTRANSPARENT: LRESULT = (-1); pub const HTNOWHERE: LRESULT = 0; pub const HTCLIENT: LRESULT = 1; pub const HTCAPTION: LRESULT = 2; pub const HTSYSMENU: LRESULT = 3; pub const HTGROWBOX: LRESULT = 4; pub const HTSIZE: LRESULT = HTGROWBOX; pub const HTMENU: LRESULT = 5; pub const HTHSCROLL: LRESULT = 6; pub const HTVSCROLL: LRESULT = 7; pub const HTMINBUTTON: LRESULT = 8; pub const HTMAXBUTTON: LRESULT = 9; pub const HTLEFT: LRESULT = 10; pub const HTRIGHT: LRESULT = 11; pub const HTTOP: LRESULT = 12; pub const HTTOPLEFT: LRESULT = 13; pub const HTTOPRIGHT: LRESULT = 14; pub const HTBOTTOM: LRESULT = 15; pub const HTBOTTOMLEFT: LRESULT = 16; pub const HTBOTTOMRIGHT: LRESULT = 17; pub const HTBORDER: LRESULT = 18; pub const HTREDUCE: LRESULT = HTMINBUTTON; pub const HTZOOM: LRESULT = HTMAXBUTTON; pub const HTSIZEFIRST: LRESULT = HTLEFT; pub const HTSIZELAST: LRESULT = HTBOTTOMRIGHT; pub const HTOBJECT: LRESULT = 19; pub const HTCLOSE: LRESULT = 20; pub const HTHELP: LRESULT = 21; pub const SMTO_NORMAL: UINT = 0x0000; pub const SMTO_BLOCK: UINT = 0x0001; pub const SMTO_ABORTIFHUNG: UINT = 0x0002; pub const SMTO_NOTIMEOUTIFNOTHUNG: UINT = 0x0008; pub const SMTO_ERRORONEXIT: UINT = 0x0020; pub const MA_ACTIVATE: UINT = 1; pub const MA_ACTIVATEANDEAT: UINT = 2; pub const MA_NOACTIVATE: UINT = 3; pub const MA_NOACTIVATEANDEAT: UINT = 4; pub const ICON_SMALL: UINT = 0; pub const ICON_BIG: UINT = 1; pub const ICON_SMALL2: UINT = 2; extern "system" { pub fn RegisterWindowMessageA( lpString: LPCSTR, ) -> UINT; pub fn RegisterWindowMessageW( lpString: LPCWSTR, ) -> UINT; } pub const SIZE_RESTORED: WPARAM = 0; pub const SIZE_MINIMIZED: WPARAM = 1; pub const SIZE_MAXIMIZED: WPARAM = 2; pub const SIZE_MAXSHOW: WPARAM = 3; pub const SIZE_MAXHIDE: WPARAM = 4; pub const SIZENORMAL: WPARAM = SIZE_RESTORED; pub const SIZEICONIC: WPARAM = SIZE_MINIMIZED; pub const SIZEFULLSCREEN: WPARAM = SIZE_MAXIMIZED; pub const SIZEZOOMSHOW: WPARAM = SIZE_MAXSHOW; pub const SIZEZOOMHIDE: WPARAM = SIZE_MAXHIDE; STRUCT!{struct WINDOWPOS { hwnd: HWND, hwndInsertAfter: HWND, x: c_int, y: c_int, cx: c_int, cy: c_int, flags: UINT, }} pub type LPWINDOWPOS = *mut WINDOWPOS; pub type PWINDOWPOS = *mut WINDOWPOS; STRUCT!{struct NCCALCSIZE_PARAMS { rgrc: [RECT; 3], lppos: PWINDOWPOS, }} pub type LPNCCALCSIZE_PARAMS = *mut NCCALCSIZE_PARAMS; pub const WVR_ALIGNTOP: LRESULT = 0x0010; pub const WVR_ALIGNLEFT: LRESULT = 0x0020; pub const WVR_ALIGNBOTTOM: LRESULT = 0x0040; pub const WVR_ALIGNRIGHT: LRESULT = 0x0080; pub const WVR_HREDRAW: LRESULT = 0x0100; pub const WVR_VREDRAW: LRESULT = 0x0200; pub const WVR_REDRAW: LRESULT = WVR_HREDRAW | WVR_VREDRAW; pub const WVR_VALIDRECTS: LRESULT = 0x0400; pub const MK_LBUTTON: WPARAM = 0x0001; pub const MK_RBUTTON: WPARAM = 0x0002; pub const MK_SHIFT: WPARAM = 0x0004; pub const MK_CONTROL: WPARAM = 0x0008; pub const MK_MBUTTON: WPARAM = 0x0010; pub const MK_XBUTTON1: WPARAM = 0x0020; pub const MK_XBUTTON2: WPARAM = 0x0040; pub const TME_HOVER: DWORD = 0x00000001; pub const TME_LEAVE: DWORD = 0x00000002; pub const TME_NONCLIENT: DWORD = 0x00000010; pub const TME_QUERY: DWORD = 0x40000000; pub const TME_CANCEL: DWORD = 0x80000000; pub const HOVER_DEFAULT: DWORD = 0xFFFFFFFF; STRUCT!{struct TRACKMOUSEEVENT { cbSize: DWORD, dwFlags: DWORD, hwndTrack: HWND, dwHoverTime: DWORD, }} pub type LPTRACKMOUSEEVENT = *mut TRACKMOUSEEVENT; extern "system" { pub fn TrackMouseEvent( lpEventTrack: LPTRACKMOUSEEVENT, ) -> BOOL; } pub const WS_OVERLAPPED: DWORD = 0x00000000; pub const WS_POPUP: DWORD = 0x80000000; pub const WS_CHILD: DWORD = 0x40000000; pub const WS_MINIMIZE: DWORD = 0x20000000; pub const WS_VISIBLE: DWORD = 0x10000000; pub const WS_DISABLED: DWORD = 0x08000000; pub const WS_CLIPSIBLINGS: DWORD = 0x04000000; pub const WS_CLIPCHILDREN: DWORD = 0x02000000; pub const WS_MAXIMIZE: DWORD = 0x01000000; pub const WS_CAPTION: DWORD = 0x00C00000; pub const WS_BORDER: DWORD = 0x00800000; pub const WS_DLGFRAME: DWORD = 0x00400000; pub const WS_VSCROLL: DWORD = 0x00200000; pub const WS_HSCROLL: DWORD = 0x00100000; pub const WS_SYSMENU: DWORD = 0x00080000; pub const WS_THICKFRAME: DWORD = 0x00040000; pub const WS_GROUP: DWORD = 0x00020000; pub const WS_TABSTOP: DWORD = 0x00010000; pub const WS_MINIMIZEBOX: DWORD = 0x00020000; pub const WS_MAXIMIZEBOX: DWORD = 0x00010000; pub const WS_TILED: DWORD = WS_OVERLAPPED; pub const WS_ICONIC: DWORD = WS_MINIMIZE; pub const WS_SIZEBOX: DWORD = WS_THICKFRAME; pub const WS_TILEDWINDOW: DWORD = WS_OVERLAPPEDWINDOW; pub const WS_OVERLAPPEDWINDOW: DWORD = WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_THICKFRAME | WS_MINIMIZEBOX | WS_MAXIMIZEBOX; pub const WS_POPUPWINDOW: DWORD = WS_POPUP | WS_BORDER | WS_SYSMENU; pub const WS_CHILDWINDOW: DWORD = WS_CHILD; pub const WS_EX_DLGMODALFRAME: DWORD = 0x00000001; pub const WS_EX_NOPARENTNOTIFY: DWORD = 0x00000004; pub const WS_EX_TOPMOST: DWORD = 0x00000008; pub const WS_EX_ACCEPTFILES: DWORD = 0x00000010; pub const WS_EX_TRANSPARENT: DWORD = 0x00000020; pub const WS_EX_MDICHILD: DWORD = 0x00000040; pub const WS_EX_TOOLWINDOW: DWORD = 0x00000080; pub const WS_EX_WINDOWEDGE: DWORD = 0x00000100; pub const WS_EX_CLIENTEDGE: DWORD = 0x00000200; pub const WS_EX_CONTEXTHELP: DWORD = 0x00000400; pub const WS_EX_RIGHT: DWORD = 0x00001000; pub const WS_EX_LEFT: DWORD = 0x00000000; pub const WS_EX_RTLREADING: DWORD = 0x00002000; pub const WS_EX_LTRREADING: DWORD = 0x00000000; pub const WS_EX_LEFTSCROLLBAR: DWORD = 0x00004000; pub const WS_EX_RIGHTSCROLLBAR: DWORD = 0x00000000; pub const WS_EX_CONTROLPARENT: DWORD = 0x00010000; pub const WS_EX_STATICEDGE: DWORD = 0x00020000; pub const WS_EX_APPWINDOW: DWORD = 0x00040000; pub const WS_EX_OVERLAPPEDWINDOW: DWORD = WS_EX_WINDOWEDGE | WS_EX_CLIENTEDGE; pub const WS_EX_PALETTEWINDOW: DWORD = WS_EX_WINDOWEDGE | WS_EX_TOOLWINDOW | WS_EX_TOPMOST; pub const WS_EX_LAYERED: DWORD = 0x00080000; pub const WS_EX_NOINHERITLAYOUT: DWORD = 0x00100000; pub const WS_EX_NOREDIRECTIONBITMAP: DWORD = 0x00200000; pub const WS_EX_LAYOUTRTL: DWORD = 0x00400000; pub const WS_EX_COMPOSITED: DWORD = 0x02000000; pub const WS_EX_NOACTIVATE: DWORD = 0x08000000; pub const CS_VREDRAW: UINT = 0x0001; pub const CS_HREDRAW: UINT = 0x0002; pub const CS_DBLCLKS: UINT = 0x0008; pub const CS_OWNDC: UINT = 0x0020; pub const CS_CLASSDC: UINT = 0x0040; pub const CS_PARENTDC: UINT = 0x0080; pub const CS_NOCLOSE: UINT = 0x0200; pub const CS_SAVEBITS: UINT = 0x0800; pub const CS_BYTEALIGNCLIENT: UINT = 0x1000; pub const CS_BYTEALIGNWINDOW: UINT = 0x2000; pub const CS_GLOBALCLASS: UINT = 0x4000; pub const CS_IME: UINT = 0x00010000; pub const CS_DROPSHADOW: UINT = 0x00020000; pub const PRF_CHECKVISIBLE: UINT = 0x00000001; pub const PRF_NONCLIENT: UINT = 0x00000002; pub const PRF_CLIENT: UINT = 0x00000004; pub const PRF_ERASEBKGND: UINT = 0x00000008; pub const PRF_CHILDREN: UINT = 0x00000010; pub const PRF_OWNED: UINT = 0x00000020; pub const BDR_RAISEDOUTER: UINT = 0x0001; pub const BDR_SUNKENOUTER: UINT = 0x0002; pub const BDR_RAISEDINNER: UINT = 0x0004; pub const BDR_SUNKENINNER: UINT = 0x0008; pub const BDR_OUTER: UINT = BDR_RAISEDOUTER | BDR_SUNKENOUTER; pub const BDR_INNER: UINT = BDR_RAISEDINNER | BDR_SUNKENINNER; pub const BDR_RAISED: UINT = BDR_RAISEDOUTER | BDR_RAISEDINNER; pub const BDR_SUNKEN: UINT = BDR_SUNKENOUTER | BDR_SUNKENINNER; pub const EDGE_RAISED: UINT = BDR_RAISEDOUTER | BDR_RAISEDINNER; pub const EDGE_SUNKEN: UINT = BDR_SUNKENOUTER | BDR_SUNKENINNER; pub const EDGE_ETCHED: UINT = BDR_SUNKENOUTER | BDR_RAISEDINNER; pub const EDGE_BUMP: UINT = BDR_RAISEDOUTER | BDR_SUNKENINNER; pub const BF_LEFT: UINT = 0x0001; pub const BF_TOP: UINT = 0x0002; pub const BF_RIGHT: UINT = 0x0004; pub const BF_BOTTOM: UINT = 0x0008; pub const BF_TOPLEFT: UINT = BF_TOP | BF_LEFT; pub const BF_TOPRIGHT: UINT = BF_TOP | BF_RIGHT; pub const BF_BOTTOMLEFT: UINT = BF_BOTTOM | BF_LEFT; pub const BF_BOTTOMRIGHT: UINT = BF_BOTTOM | BF_RIGHT; pub const BF_RECT: UINT = BF_LEFT | BF_TOP | BF_RIGHT | BF_BOTTOM; pub const BF_DIAGONAL: UINT = 0x0010; pub const BF_DIAGONAL_ENDTOPRIGHT: UINT = BF_DIAGONAL | BF_TOP | BF_RIGHT; pub const BF_DIAGONAL_ENDTOPLEFT: UINT = BF_DIAGONAL | BF_TOP | BF_LEFT; pub const BF_DIAGONAL_ENDBOTTOMLEFT: UINT = BF_DIAGONAL | BF_BOTTOM | BF_LEFT; pub const BF_DIAGONAL_ENDBOTTOMRIGHT: UINT = BF_DIAGONAL | BF_BOTTOM | BF_RIGHT; pub const BF_MIDDLE: UINT = 0x0800; pub const BF_SOFT: UINT = 0x1000; pub const BF_ADJUST: UINT = 0x2000; pub const BF_FLAT: UINT = 0x4000; pub const BF_MONO: UINT = 0x8000; extern "system" { pub fn DrawEdge( hdc: HDC, qrc: LPRECT, edge: UINT, grfFlags: UINT, ) -> BOOL; } pub const DFC_CAPTION: UINT = 1; pub const DFC_MENU: UINT = 2; pub const DFC_SCROLL: UINT = 3; pub const DFC_BUTTON: UINT = 4; pub const DFC_POPUPMENU: UINT = 5; pub const DFCS_CAPTIONCLOSE: UINT = 0x0000; pub const DFCS_CAPTIONMIN: UINT = 0x0001; pub const DFCS_CAPTIONMAX: UINT = 0x0002; pub const DFCS_CAPTIONRESTORE: UINT = 0x0003; pub const DFCS_CAPTIONHELP: UINT = 0x0004; pub const DFCS_MENUARROW: UINT = 0x0000; pub const DFCS_MENUCHECK: UINT = 0x0001; pub const DFCS_MENUBULLET: UINT = 0x0002; pub const DFCS_MENUARROWRIGHT: UINT = 0x0004; pub const DFCS_SCROLLUP: UINT = 0x0000; pub const DFCS_SCROLLDOWN: UINT = 0x0001; pub const DFCS_SCROLLLEFT: UINT = 0x0002; pub const DFCS_SCROLLRIGHT: UINT = 0x0003; pub const DFCS_SCROLLCOMBOBOX: UINT = 0x0005; pub const DFCS_SCROLLSIZEGRIP: UINT = 0x0008; pub const DFCS_SCROLLSIZEGRIPRIGHT: UINT = 0x0010; pub const DFCS_BUTTONCHECK: UINT = 0x0000; pub const DFCS_BUTTONRADIOIMAGE: UINT = 0x0001; pub const DFCS_BUTTONRADIOMASK: UINT = 0x0002; pub const DFCS_BUTTONRADIO: UINT = 0x0004; pub const DFCS_BUTTON3STATE: UINT = 0x0008; pub const DFCS_BUTTONPUSH: UINT = 0x0010; pub const DFCS_INACTIVE: UINT = 0x0100; pub const DFCS_PUSHED: UINT = 0x0200; pub const DFCS_CHECKED: UINT = 0x0400; pub const DFCS_TRANSPARENT: UINT = 0x0800; pub const DFCS_HOT: UINT = 0x1000; pub const DFCS_ADJUSTRECT: UINT = 0x2000; pub const DFCS_FLAT: UINT = 0x4000; pub const DFCS_MONO: UINT = 0x8000; extern "system" { pub fn DrawFrameControl( hdc: HDC, lprc: LPRECT, uType: UINT, uState: UINT, ) -> BOOL; } pub const DC_ACTIVE: UINT = 0x0001; pub const DC_SMALLCAP: UINT = 0x0002; pub const DC_ICON: UINT = 0x0004; pub const DC_TEXT: UINT = 0x0008; pub const DC_INBUTTON: UINT = 0x0010; pub const DC_GRADIENT: UINT = 0x0020; pub const DC_BUTTONS: UINT = 0x1000; extern "system" { pub fn DrawCaption( hwnd: HWND, hdc: HDC, lprect: *const RECT, flags: UINT, ) -> BOOL; } pub const IDANI_OPEN: c_int = 1; pub const IDANI_CAPTION: c_int = 3; extern "system" { pub fn DrawAnimatedRects( hwnd: HWND, idAni: c_int, lprcFrom: *const RECT, lprcTo: *const RECT, ) -> BOOL; } pub const CF_TEXT: UINT = 1; pub const CF_BITMAP: UINT = 2; pub const CF_METAFILEPICT: UINT = 3; pub const CF_SYLK: UINT = 4; pub const CF_DIF: UINT = 5; pub const CF_TIFF: UINT = 6; pub const CF_OEMTEXT: UINT = 7; pub const CF_DIB: UINT = 8; pub const CF_PALETTE: UINT = 9; pub const CF_PENDATA: UINT = 10; pub const CF_RIFF: UINT = 11; pub const CF_WAVE: UINT = 12; pub const CF_UNICODETEXT: UINT = 13; pub const CF_ENHMETAFILE: UINT = 14; pub const CF_HDROP: UINT = 15; pub const CF_LOCALE: UINT = 16; pub const CF_DIBV5: UINT = 17; pub const CF_MAX: UINT = 18; pub const CF_OWNERDISPLAY: UINT = 0x0080; pub const CF_DSPTEXT: UINT = 0x0081; pub const CF_DSPBITMAP: UINT = 0x0082; pub const CF_DSPMETAFILEPICT: UINT = 0x0083; pub const CF_DSPENHMETAFILE: UINT = 0x008E; pub const CF_PRIVATEFIRST: UINT = 0x0200; pub const CF_PRIVATELAST: UINT = 0x02FF; pub const CF_GDIOBJFIRST: UINT = 0x0300; pub const CF_GDIOBJLAST: UINT = 0x03FF; pub const FVIRTKEY: BYTE = TRUE as BYTE; pub const FNOINVERT: BYTE = 0x02; pub const FSHIFT: BYTE = 0x04; pub const FCONTROL: BYTE = 0x08; pub const FALT: BYTE = 0x10; STRUCT!{struct ACCEL { fVirt: BYTE, key: WORD, cmd: WORD, }} pub type LPACCEL = *mut ACCEL; STRUCT!{struct PAINTSTRUCT { hdc: HDC, fErase: BOOL, rcPaint: RECT, fRestore: BOOL, fIncUpdate: BOOL, rgbReserved: [BYTE; 32], }} pub type PPAINTSTRUCT = *mut PAINTSTRUCT; pub type NPPAINTSTRUCT = *mut PAINTSTRUCT; pub type LPPAINTSTRUCT = *mut PAINTSTRUCT; STRUCT!{struct CREATESTRUCTA { lpCreateParams: LPVOID, hInstance: HINSTANCE, hMenu: HMENU, hwndParent: HWND, cy: c_int, cx: c_int, y: c_int, x: c_int, style: LONG, lpszName: LPCSTR, lpszClass: LPCSTR, dwExStyle: DWORD, }} pub type LPCREATESTRUCTA = *mut CREATESTRUCTA; STRUCT!{struct CREATESTRUCTW { lpCreateParams: LPVOID, hInstance: HINSTANCE, hMenu: HMENU, hwndParent: HWND, cy: c_int, cx: c_int, y: c_int, x: c_int, style: LONG, lpszName: LPCWSTR, lpszClass: LPCWSTR, dwExStyle: DWORD, }} pub type LPCREATESTRUCTW = *mut CREATESTRUCTW; STRUCT!{struct WINDOWPLACEMENT { length: UINT, flags: UINT, showCmd: UINT, ptMinPosition: POINT, ptMaxPosition: POINT, rcNormalPosition: RECT, }} pub type PWINDOWPLACEMENT = *mut WINDOWPLACEMENT; pub type LPWINDOWPLACEMENT = *mut WINDOWPLACEMENT; pub const WPF_SETMINPOSITION: UINT = 0x0001; pub const WPF_RESTORETOMAXIMIZED: UINT = 0x0002; pub const WPF_ASYNCWINDOWPLACEMENT: UINT = 0x0004; STRUCT!{struct NMHDR { hwndFrom: HWND, idFrom: UINT_PTR, code: UINT, }} pub type LPNMHDR = *mut NMHDR; STRUCT!{struct STYLESTRUCT { styleOld: DWORD, styleNew: DWORD, }} pub type LPSTYLESTRUCT = *mut STYLESTRUCT; pub const ODT_MENU: UINT = 1; pub const ODT_LISTBOX: UINT = 2; pub const ODT_COMBOBOX: UINT = 3; pub const ODT_BUTTON: UINT = 4; pub const ODT_STATIC: UINT = 5; pub const ODA_DRAWENTIRE: UINT = 0x0001; pub const ODA_SELECT: UINT = 0x0002; pub const ODA_FOCUS: UINT = 0x0004; pub const ODS_SELECTED: UINT = 0x0001; pub const ODS_GRAYED: UINT = 0x0002; pub const ODS_DISABLED: UINT = 0x0004; pub const ODS_CHECKED: UINT = 0x0008; pub const ODS_FOCUS: UINT = 0x0010; pub const ODS_DEFAULT: UINT = 0x0020; pub const ODS_COMBOBOXEDIT: UINT = 0x1000; pub const ODS_HOTLIGHT: UINT = 0x0040; pub const ODS_INACTIVE: UINT = 0x0080; pub const ODS_NOACCEL: UINT = 0x0100; pub const ODS_NOFOCUSRECT: UINT = 0x0200; STRUCT!{struct MEASUREITEMSTRUCT { CtlType: UINT, CtlID: UINT, itemID: UINT, itemWidth: UINT, itemHeight: UINT, itemData: ULONG_PTR, }} pub type PMEASUREITEMSTRUCT = *mut MEASUREITEMSTRUCT; pub type LPMEASUREITEMSTRUCT = *mut MEASUREITEMSTRUCT; STRUCT!{struct DRAWITEMSTRUCT { CtlType: UINT, CtlID: UINT, itemID: UINT, itemAction: UINT, itemState: UINT, hwndItem: HWND, hDC: HDC, rcItem: RECT, itemData: ULONG_PTR, }} pub type PDRAWITEMSTRUCT = *mut DRAWITEMSTRUCT; pub type LPDRAWITEMSTRUCT = *mut DRAWITEMSTRUCT; STRUCT!{struct DELETEITEMSTRUCT { CtlType: UINT, CtlID: UINT, itemID: UINT, hwndItem: HWND, itemData: ULONG_PTR, }} pub type PDELETEITEMSTRUCT = *mut DELETEITEMSTRUCT; pub type LPDELETEITEMSTRUCT = *mut DELETEITEMSTRUCT; STRUCT!{struct COMPAREITEMSTRUCT { CtlType: UINT, CtlID: UINT, hwndItem: HWND, itemID1: UINT, itemData1: ULONG_PTR, itemID2: UINT, itemData2: ULONG_PTR, dwLocaleId: DWORD, }} pub type PCOMPAREITEMSTRUCT = *mut COMPAREITEMSTRUCT; pub type LPCOMPAREITEMSTRUCT = *mut COMPAREITEMSTRUCT; extern "system" { pub fn GetMessageA( lpMsg: LPMSG, hWnd: HWND, wMsgFilterMin: UINT, wMsgFilterMax: UINT, ) -> BOOL; pub fn GetMessageW( lpMsg: LPMSG, hWnd: HWND, wMsgFilterMin: UINT, wMsgFilterMax: UINT, ) -> BOOL; pub fn TranslateMessage( lpmsg: *const MSG, ) -> BOOL; pub fn DispatchMessageA( lpmsg: *const MSG, ) -> LRESULT; pub fn DispatchMessageW( lpmsg: *const MSG, ) -> LRESULT; pub fn SetMessageQueue( cMessagesMax: c_int, ) -> BOOL; pub fn PeekMessageA( lpMsg: LPMSG, hWnd: HWND, wMsgFilterMin: UINT, wMsgFilterMax: UINT, wRemoveMsg: UINT, ) -> BOOL; pub fn PeekMessageW( lpMsg: LPMSG, hWnd: HWND, wMsgFilterMin: UINT, wMsgFilterMax: UINT, wRemoveMsg: UINT, ) -> BOOL; } pub const PM_NOREMOVE: UINT = 0x0000; pub const PM_REMOVE: UINT = 0x0001; pub const PM_NOYIELD: UINT = 0x0002; pub const PM_QS_INPUT: UINT = QS_INPUT << 16; pub const PM_QS_POSTMESSAGE: UINT = (QS_POSTMESSAGE | QS_HOTKEY | QS_TIMER) << 16; pub const PM_QS_PAINT: UINT = QS_PAINT << 16; pub const PM_QS_SENDMESSAGE: UINT = QS_SENDMESSAGE << 16; extern "system" { pub fn RegisterHotKey( hwnd: HWND, id: c_int, fsModifiers: UINT, vk: UINT, ) -> BOOL; pub fn UnregisterHotKey( hWnd: HWND, id: c_int, ) -> BOOL; } pub const MOD_ALT: LPARAM = 0x0001; pub const MOD_CONTROL: LPARAM = 0x0002; pub const MOD_SHIFT: LPARAM = 0x0004; pub const MOD_WIN: LPARAM = 0x0008; pub const MOD_NOREPEAT: LPARAM = 0x4000; pub const IDHOT_SNAPWINDOW: WPARAM = -1isize as usize; pub const IDHOT_SNAPDESKTOP: WPARAM = -2isize as usize; pub const ENDSESSION_CLOSEAPP: UINT = 0x00000001; pub const ENDSESSION_CRITICAL: UINT = 0x40000000; pub const ENDSESSION_LOGOFF: UINT = 0x80000000; pub const EWX_LOGOFF: UINT = 0x00000000; pub const EWX_SHUTDOWN: UINT = 0x00000001; pub const EWX_REBOOT: UINT = 0x00000002; pub const EWX_FORCE: UINT = 0x00000004; pub const EWX_POWEROFF: UINT = 0x00000008; pub const EWX_FORCEIFHUNG: UINT = 0x00000010; pub const EWX_QUICKRESOLVE: UINT = 0x00000020; pub const EWX_RESTARTAPPS: UINT = 0x00000040; pub const EWX_HYBRID_SHUTDOWN: UINT = 0x00400000; pub const EWX_BOOTOPTIONS: UINT = 0x01000000; // ExitWindows extern "system" { pub fn ExitWindowsEx( uFlags: UINT, dwReason: DWORD, ) -> BOOL; pub fn SwapMouseButton( fSwap: BOOL, ) -> BOOL; pub fn GetMessagePos() -> DWORD; pub fn GetMessageTime() -> LONG; pub fn GetMessageExtraInfo() -> LPARAM; pub fn GetUnpredictedMessagePos() -> DWORD; pub fn IsWow64Message() -> BOOL; pub fn SetMessageExtraInfo( lParam: LPARAM, ) -> LPARAM; pub fn SendMessageA( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn SendMessageW( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn SendMessageTimeoutA( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, fuFlags: UINT, uTimeout: UINT, lpdwResult: PDWORD_PTR, ) -> LRESULT; pub fn SendMessageTimeoutW( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, fuFlags: UINT, uTimeout: UINT, lpdwResult: PDWORD_PTR, ) -> LRESULT; pub fn SendNotifyMessageA( hWnd: HWND, msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> BOOL; pub fn SendNotifyMessageW( hWnd: HWND, msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> BOOL; pub fn SendMessageCallbackA( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, lpResultCallBack: SENDASYNCPROC, dwData: ULONG_PTR, ) -> BOOL; pub fn SendMessageCallbackW( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, lpResultCallBack: SENDASYNCPROC, dwData: ULONG_PTR, ) -> BOOL; } STRUCT!{struct BSMINFO { cbSize: UINT, hdesk: HDESK, hwnd: HWND, luid: LUID, }} pub type PBSMINFO = *mut BSMINFO; extern "system" { pub fn BroadcastSystemMessageExA( flags: DWORD, lpInfo: LPDWORD, Msg: UINT, wParam: WPARAM, lParam: LPARAM, pbsmInfo: PBSMINFO, ) -> c_long; pub fn BroadcastSystemMessageExW( flags: DWORD, lpInfo: LPDWORD, Msg: UINT, wParam: WPARAM, lParam: LPARAM, pbsmInfo: PBSMINFO, ) -> c_long; pub fn BroadcastSystemMessageA( flags: DWORD, lpInfo: LPDWORD, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LONG; pub fn BroadcastSystemMessageW( flags: DWORD, lpInfo: LPDWORD, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LONG; } pub const BSM_ALLCOMPONENTS: DWORD = 0x00000000; pub const BSM_VXDS: DWORD = 0x00000001; pub const BSM_NETDRIVER: DWORD = 0x00000002; pub const BSM_INSTALLABLEDRIVERS: DWORD = 0x00000004; pub const BSM_APPLICATIONS: DWORD = 0x00000008; pub const BSM_ALLDESKTOPS: DWORD = 0x00000010; pub const BSF_QUERY: DWORD = 0x00000001; pub const BSF_IGNORECURRENTTASK: DWORD = 0x00000002; pub const BSF_FLUSHDISK: DWORD = 0x00000004; pub const BSF_NOHANG: DWORD = 0x00000008; pub const BSF_POSTMESSAGE: DWORD = 0x00000010; pub const BSF_FORCEIFHUNG: DWORD = 0x00000020; pub const BSF_NOTIMEOUTIFNOTHUNG: DWORD = 0x00000040; pub const BSF_ALLOWSFW: DWORD = 0x00000080; pub const BSF_SENDNOTIFYMESSAGE: DWORD = 0x00000100; pub const BSF_RETURNHDESK: DWORD = 0x00000200; pub const BSF_LUID: DWORD = 0x00000400; pub const BROADCAST_QUERY_DENY: DWORD = 0x424D5144; pub type HDEVNOTIFY = PVOID; pub type PHDEVNOTIFY = *mut HDEVNOTIFY; pub const DEVICE_NOTIFY_WINDOW_HANDLE: DWORD = 0x00000000; pub const DEVICE_NOTIFY_SERVICE_HANDLE: DWORD = 0x00000001; pub const DEVICE_NOTIFY_ALL_INTERFACE_CLASSES: DWORD = 0x00000004; extern "system" { pub fn RegisterDeviceNotificationA( hRecipient: HANDLE, notificationFilter: LPVOID, flags: DWORD, ) -> HDEVNOTIFY; pub fn RegisterDeviceNotificationW( hRecipient: HANDLE, notificationFilter: LPVOID, flags: DWORD, ) -> HDEVNOTIFY; pub fn UnregisterDeviceNotification( Handle: HDEVNOTIFY, ) -> BOOL; } pub type HPOWERNOTIFY = PVOID; pub type PHPOWERNOTIFY = *mut HPOWERNOTIFY; extern "system" { pub fn RegisterPowerSettingNotification( hRecipient: HANDLE, PowerSettingGuid: LPCGUID, Flags: DWORD, ) -> HPOWERNOTIFY; pub fn UnregisterPowerSettingNotification( Handle: HPOWERNOTIFY, ) -> BOOL; pub fn RegisterSuspendResumeNotification( hRecipient: HANDLE, Flags: DWORD, ) -> HPOWERNOTIFY; pub fn UnregisterSuspendResumeNotification( Handle: HPOWERNOTIFY, ) -> BOOL; pub fn PostMessageA( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> BOOL; pub fn PostMessageW( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> BOOL; pub fn PostThreadMessageA( idThread: DWORD, msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> BOOL; pub fn PostThreadMessageW( idThread: DWORD, msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> BOOL; } // PostAppMessageA // PostAppMessageW pub const HWND_BROADCAST: HWND = 0xffff as HWND; pub const HWND_MESSAGE: HWND = -3isize as HWND; extern "system" { pub fn AttachThreadInput( idAttach: DWORD, idAttachTo: DWORD, fAttach: BOOL, ) -> BOOL; pub fn ReplyMessage( lResult: LRESULT, ) -> BOOL; pub fn WaitMessage() -> BOOL; pub fn WaitForInputIdle( hProcess: HANDLE, dwMilliseconds: DWORD, ) -> DWORD; pub fn DefWindowProcA( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn DefWindowProcW( hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn PostQuitMessage( nExitCode: c_int, ); pub fn CallWindowProcA( lpPrevWndFunc: WNDPROC, hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn CallWindowProcW( lpPrevWndFunc: WNDPROC, hWnd: HWND, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn InSendMessage() -> BOOL; pub fn InSendMessageEx( lpReserved: LPVOID, ) -> DWORD; } pub const ISMEX_NOSEND: DWORD = 0x00000000; pub const ISMEX_SEND: DWORD = 0x00000001; pub const ISMEX_NOTIFY: DWORD = 0x00000002; pub const ISMEX_CALLBACK: DWORD = 0x00000004; pub const ISMEX_REPLIED: DWORD = 0x00000008; extern "system" { pub fn GetDoubleClickTime() -> UINT; pub fn SetDoubleClickTime( uInterval: UINT, ) -> BOOL; pub fn RegisterClassA( lpWndClass: *const WNDCLASSA, ) -> ATOM; pub fn RegisterClassW( lpWndClass: *const WNDCLASSW, ) -> ATOM; pub fn UnregisterClassA( lpClassName: LPCSTR, hInstance: HINSTANCE, ) -> BOOL; pub fn UnregisterClassW( lpClassName: LPCWSTR, hInstance: HINSTANCE, ) -> BOOL; pub fn GetClassInfoA( hInstance: HINSTANCE, lpClassName: LPCSTR, lpWndClass: LPWNDCLASSA, ) -> BOOL; pub fn GetClassInfoW( hInstance: HINSTANCE, lpClassName: LPCWSTR, lpWndClass: LPWNDCLASSW, ) -> BOOL; pub fn RegisterClassExA( lpWndClass: *const WNDCLASSEXA, ) -> ATOM; pub fn RegisterClassExW( lpWndClass: *const WNDCLASSEXW, ) -> ATOM; pub fn GetClassInfoExA( hinst: HINSTANCE, lpszClass: LPCSTR, lpwcx: LPWNDCLASSEXA, ) -> BOOL; pub fn GetClassInfoExW( hinst: HINSTANCE, lpszClass: LPCWSTR, lpwcx: LPWNDCLASSEXW, ) -> BOOL; } pub const CW_USEDEFAULT: c_int = 0x80000000u32 as i32; pub const HWND_DESKTOP: HWND = 0 as HWND; FN!{stdcall PREGISTERCLASSNAMEW( LPCWSTR, ) -> BOOLEAN} extern "system" { pub fn CreateWindowExA( dwExStyle: DWORD, lpClassName: LPCSTR, lpWindowName: LPCSTR, dwStyle: DWORD, x: c_int, y: c_int, nWidth: c_int, nHeight: c_int, hWndParent: HWND, hMenu: HMENU, hInstance: HINSTANCE, lpParam: LPVOID, ) -> HWND; pub fn CreateWindowExW( dwExStyle: DWORD, lpClassName: LPCWSTR, lpWindowName: LPCWSTR, dwStyle: DWORD, x: c_int, y: c_int, nWidth: c_int, nHeight: c_int, hWndParent: HWND, hMenu: HMENU, hInstance: HINSTANCE, lpParam: LPVOID, ) -> HWND; } // CreateWindowA // CreateWindowW extern "system" { pub fn IsWindow( hWnd: HWND, ) -> BOOL; pub fn IsMenu( hMenu: HMENU, ) -> BOOL; pub fn IsChild( hWndParent: HWND, hWnd: HWND, ) -> BOOL; pub fn DestroyWindow( hWnd: HWND, ) -> BOOL; pub fn ShowWindow( hWnd: HWND, nCmdShow: c_int, ) -> BOOL; pub fn AnimateWindow( hWnd: HWND, dwTime: DWORD, dwFlags: DWORD, ) -> BOOL; pub fn UpdateLayeredWindow( hWnd: HWND, hdcDst: HDC, pptDst: *mut POINT, psize: *mut SIZE, hdcSrc: HDC, pptSrc: *mut POINT, crKey: COLORREF, pblend: *mut BLENDFUNCTION, dwFlags: DWORD, ) -> BOOL; } STRUCT!{struct UPDATELAYEREDWINDOWINFO { cbSize: DWORD, hdcDst: HDC, pptDst: *const POINT, psize: *const SIZE, hdcSrc: HDC, pptSrc: *const POINT, crKey: COLORREF, pblend: *const BLENDFUNCTION, dwFlags: DWORD, prcDirty: *const RECT, }} pub type PUPDATELAYEREDWINDOWINFO = *mut UPDATELAYEREDWINDOWINFO; extern "system" { pub fn UpdateLayeredWindowIndirect( hWnd: HWND, pULWInfo: *mut UPDATELAYEREDWINDOWINFO, ) -> BOOL; pub fn GetLayeredWindowAttributes( hwnd: HWND, pcrKey: *mut COLORREF, pbAlpha: *mut BYTE, pdwFlags: *mut DWORD, ) -> BOOL; } pub const PW_CLIENTONLY: DWORD = 0x00000001; pub const PW_RENDERFULLCONTENT: DWORD = 0x00000002; extern "system" { pub fn PrintWindow( hwnd: HWND, hdcBlt: HDC, nFlags: UINT, ) -> BOOL; pub fn SetLayeredWindowAttributes( hwnd: HWND, crKey: COLORREF, bAlpha: BYTE, dwFlags: DWORD, ) -> BOOL; } pub const LWA_COLORKEY: DWORD = 0x00000001; pub const LWA_ALPHA: DWORD = 0x00000002; pub const ULW_COLORKEY: DWORD = 0x00000001; pub const ULW_ALPHA: DWORD = 0x00000002; pub const ULW_OPAQUE: DWORD = 0x00000004; pub const ULW_EX_NORESIZE: DWORD = 0x00000008; extern "system" { pub fn ShowWindowAsync( hWnd: HWND, nCmdShow: c_int, ) -> BOOL; pub fn FlashWindow( hwnd: HWND, bInvert: BOOL, ) -> BOOL; } STRUCT!{struct FLASHWINFO { cbSize: UINT, hwnd: HWND, dwFlags: DWORD, uCount: UINT, dwTimeout: DWORD, }} pub type PFLASHWINFO = *mut FLASHWINFO; extern "system" { pub fn FlashWindowEx( pfwi: PFLASHWINFO, ) -> BOOL; } pub const FLASHW_STOP: DWORD = 0; pub const FLASHW_CAPTION: DWORD = 0x00000001; pub const FLASHW_TRAY: DWORD = 0x00000002; pub const FLASHW_ALL: DWORD = FLASHW_CAPTION | FLASHW_TRAY; pub const FLASHW_TIMER: DWORD = 0x00000004; pub const FLASHW_TIMERNOFG: DWORD = 0x0000000C; extern "system" { pub fn ShowOwnedPopups( hWnd: HWND, fShow: BOOL, ) -> BOOL; pub fn OpenIcon( hWnd: HWND, ) -> BOOL; pub fn CloseWindow( hWnd: HWND, ) -> BOOL; pub fn MoveWindow( hWnd: HWND, X: c_int, Y: c_int, nWidth: c_int, nHeight: c_int, bRepaint: BOOL, ) -> BOOL; pub fn SetWindowPos( hWnd: HWND, hWndInsertAfter: HWND, X: c_int, Y: c_int, cx: c_int, cy: c_int, uFlags: UINT, ) -> BOOL; pub fn GetWindowPlacement( hWnd: HWND, lpwndpl: *mut WINDOWPLACEMENT, ) -> BOOL; pub fn SetWindowPlacement( hWnd: HWND, lpwndpl: *const WINDOWPLACEMENT, ) -> BOOL; } pub const WDA_NONE: DWORD = 0x00000000; pub const WDA_MONITOR: DWORD = 0x00000001; extern "system" { pub fn GetWindowDisplayAffinity( hWnd: HWND, pdwAffinity: *mut DWORD, ) -> BOOL; pub fn SetWindowDisplayAffinity( hWnd: HWND, dwAffinity: DWORD, ) -> BOOL; pub fn BeginDeferWindowPos( nNumWindows: c_int, ) -> HDWP; pub fn DeferWindowPos( hWinPosInfo: HDWP, hWnd: HWND, hWndInserAfter: HWND, x: c_int, y: c_int, cx: c_int, cy: c_int, uFlags: UINT, ) -> HDWP; pub fn EndDeferWindowPos( hWinPosInfo: HDWP, ) -> BOOL; pub fn IsWindowVisible( hWnd: HWND, ) -> BOOL; pub fn IsIconic( hWnd: HWND, ) -> BOOL; pub fn AnyPopup() -> BOOL; pub fn BringWindowToTop( hWnd: HWND, ) -> BOOL; pub fn IsZoomed( hwnd: HWND, ) -> BOOL; } pub const SWP_NOSIZE: UINT = 0x0001; pub const SWP_NOMOVE: UINT = 0x0002; pub const SWP_NOZORDER: UINT = 0x0004; pub const SWP_NOREDRAW: UINT = 0x0008; pub const SWP_NOACTIVATE: UINT = 0x0010; pub const SWP_FRAMECHANGED: UINT = 0x0020; pub const SWP_SHOWWINDOW: UINT = 0x0040; pub const SWP_HIDEWINDOW: UINT = 0x0080; pub const SWP_NOCOPYBITS: UINT = 0x0100; pub const SWP_NOOWNERZORDER: UINT = 0x0200; pub const SWP_NOSENDCHANGING: UINT = 0x0400; pub const SWP_DRAWFRAME: UINT = SWP_FRAMECHANGED; pub const SWP_NOREPOSITION: UINT = SWP_NOOWNERZORDER; pub const SWP_DEFERERASE: UINT = 0x2000; pub const SWP_ASYNCWINDOWPOS: UINT = 0x4000; pub const HWND_TOP: HWND = 0 as HWND; pub const HWND_BOTTOM: HWND = 1 as HWND; pub const HWND_TOPMOST: HWND = -1isize as HWND; pub const HWND_NOTOPMOST: HWND = -2isize as HWND; STRUCT!{struct DLGTEMPLATE { style: DWORD, dwExtendedStyle: DWORD, cdit: WORD, x: c_short, y: c_short, cx: c_short, cy: c_short, }} pub type LPDLGTEMPLATEA = *mut DLGTEMPLATE; pub type LPDLGTEMPLATEW = *mut DLGTEMPLATE; pub type LPCDLGTEMPLATEA = *const DLGTEMPLATE; pub type LPCDLGTEMPLATEW = *const DLGTEMPLATE; STRUCT!{struct DLGITEMTEMPLATE { style: DWORD, dwExtendedStyle: DWORD, x: c_short, y: c_short, cx: c_short, cy: c_short, id: WORD, }} pub type PDLGITEMTEMPLATEA = *mut DLGITEMTEMPLATE; pub type PDLGITEMTEMPLATEW = *mut DLGITEMTEMPLATE; pub type LPDLGITEMTEMPLATEA = *mut DLGITEMTEMPLATE; pub type LPDLGITEMTEMPLATEW = *mut DLGITEMTEMPLATE; extern "system" { pub fn CreateDialogParamA( hInstance: HINSTANCE, lpTemplateName: LPCSTR, hWndParent: HWND, lpDialogFunc: DLGPROC, dwInitParam: LPARAM, ) -> HWND; pub fn CreateDialogParamW( hInstance: HINSTANCE, lpTemplateName: LPCWSTR, hWndParent: HWND, lpDialogFunc: DLGPROC, dwInitParam: LPARAM, ) -> HWND; pub fn CreateDialogIndirectParamA( hInstance: HINSTANCE, lpTemplate: LPCDLGTEMPLATEA, hWndParent: HWND, lpDialogFunc: DLGPROC, dwInitParam: LPARAM, ) -> HWND; pub fn CreateDialogIndirectParamW( hInstance: HINSTANCE, lpTemplate: LPCDLGTEMPLATEW, hWndParent: HWND, lpDialogFunc: DLGPROC, dwInitParam: LPARAM, ) -> HWND; } // CreateDialogA // CreateDialogW // CreateDialogIndirectA // CreateDialogIndirectW extern "system" { pub fn DialogBoxParamA( hInstance: HINSTANCE, lpTemplateName: LPCSTR, hWndParent: HWND, lpDialogFunc: DLGPROC, dwInitParam: LPARAM, ) -> INT_PTR; pub fn DialogBoxParamW( hInstance: HINSTANCE, lpTemplateName: LPCWSTR, hWndParent: HWND, lpDialogFunc: DLGPROC, dwInitParam: LPARAM, ) -> INT_PTR; pub fn DialogBoxIndirectParamA( hInstance: HINSTANCE, hDialogTemplate: LPCDLGTEMPLATEA, hWndParent: HWND, lpDialogFunc: DLGPROC, dwInitParam: LPARAM, ) -> INT_PTR; pub fn DialogBoxIndirectParamW( hInstance: HINSTANCE, hDialogTemplate: LPCDLGTEMPLATEW, hWndParent: HWND, lpDialogFunc: DLGPROC, dwInitParam: LPARAM, ) -> INT_PTR; } // DialogBoxA // DialogBoxW // DialogBoxIndirectA // DialogBoxIndirectW extern "system" { pub fn EndDialog( hDlg: HWND, nResult: INT_PTR, ) -> BOOL; pub fn GetDlgItem( hDlg: HWND, nIDDlgItem: c_int, ) -> HWND; pub fn SetDlgItemInt( hDlg: HWND, nIDDlgItem: c_int, uValue: UINT, bSigned: BOOL, ) -> BOOL; pub fn GetDlgItemInt( hDlg: HWND, nIDDlgItem: c_int, lpTranslated: *mut BOOL, bSigned: BOOL, ) -> UINT; pub fn SetDlgItemTextA( hDlg: HWND, nIDDlgItem: c_int, lpString: LPCSTR, ) -> BOOL; pub fn SetDlgItemTextW( hDlg: HWND, nIDDlgItem: c_int, lpString: LPCWSTR, ) -> BOOL; pub fn GetDlgItemTextA( hDlg: HWND, nIDDlgItem: c_int, lpString: LPSTR, nMaxCount: c_int, ) -> UINT; pub fn GetDlgItemTextW( hDlg: HWND, nIDDlgItem: c_int, lpString: LPWSTR, nMaxCount: c_int, ) -> UINT; pub fn CheckDlgButton( hDlg: HWND, nIDButton: c_int, uCheck: UINT, ) -> BOOL; pub fn CheckRadioButton( hDlg: HWND, nIDFirstButton: c_int, nIDLasatButton: c_int, nIDCheckButton: c_int, ) -> BOOL; pub fn IsDlgButtonChecked( hDlg: HWND, nIDButton: c_int, ) -> UINT; pub fn SendDlgItemMessageA( hDlg: HWND, nIDDlgItem: c_int, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn SendDlgItemMessageW( hDlg: HWND, nIDDlgItem: c_int, Msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn GetNextDlgGroupItem( hDlg: HWND, hCtl: HWND, bPrevious: BOOL, ) -> HWND; pub fn GetNextDlgTabItem( hDlg: HWND, hCtl: HWND, bPrevious: BOOL, ) -> HWND; pub fn GetDlgCtrlID( hwnd: HWND, ) -> c_int; pub fn GetDialogBaseUnits() -> LONG; pub fn DefDlgProcA( hDlg: HWND, msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn DefDlgProcW( hDlg: HWND, msg: UINT, wParam: WPARAM, lParam: LPARAM, ) -> LRESULT; pub fn CallMsgFilterA( lpMsg: LPMSG, nCode: c_int, ) -> BOOL; pub fn CallMsgFilterW( lpMsg: LPMSG, nCode: c_int, ) -> BOOL; pub fn OpenClipboard( hWnd: HWND, ) -> BOOL; pub fn CloseClipboard() -> BOOL; pub fn GetClipboardSequenceNumber() -> DWORD; pub fn GetClipboardOwner() -> HWND; pub fn SetClipboardViewer( hWndNewViewer: HWND, ) -> HWND; pub fn GetClipboardViewer() -> HWND; pub fn ChangeClipboardChain( hwndRemove: HWND, hwndNewNext: HWND, ) -> BOOL; pub fn SetClipboardData( uFormat: UINT, hMem: HANDLE, ) -> HANDLE; pub fn GetClipboardData( uFormat: UINT, ) -> HANDLE; pub fn RegisterClipboardFormatA( lpszFormat: LPCSTR, ) -> UINT; pub fn RegisterClipboardFormatW( lpszFormat: LPCWSTR, ) -> UINT; pub fn CountClipboardFormats() -> c_int; pub fn EnumClipboardFormats( format: UINT, ) -> UINT; pub fn GetClipboardFormatNameA( format: UINT, lpszFormatName: LPSTR, cchMaxCount: c_int, ) -> c_int; pub fn GetClipboardFormatNameW( format: UINT, lpszFormatName: LPWSTR, cchMaxCount: c_int, ) -> c_int; pub fn EmptyClipboard() -> BOOL; pub fn IsClipboardFormatAvailable( format: UINT, ) -> BOOL; pub fn GetPriorityClipboardFormat( paFormatPriorityList: *mut UINT, cFormats: c_int, ) -> c_int; pub fn GetOpenClipboardWindow() -> HWND; pub fn AddClipboardFormatListener( hWnd: HWND, ) -> BOOL; pub fn RemoveClipboardFormatListener( hWnd: HWND, ) -> BOOL; pub fn GetUpdatedClipboardFormats( lpuiFormats: PUINT, cFormats: UINT, pcFormatsOUT: PUINT, ) -> BOOL; pub fn CharToOemA( pSrc: LPCSTR, pDst: LPSTR, ) -> BOOL; pub fn CharToOemW( pSrc: LPCWSTR, pDst: LPSTR, ) -> BOOL; pub fn OemToCharA( pSrc: LPCSTR, pDst: LPSTR, ) -> BOOL; pub fn OemToCharW( pSrc: LPCSTR, pDst: LPWSTR, ) -> BOOL; pub fn CharToOemBuffA( lpszSrc: LPCSTR, lpszDst: LPSTR, cchDstLength: DWORD, ) -> BOOL; pub fn CharToOemBuffW( lpszSrc: LPCWSTR, lpszDst: LPSTR, cchDstLength: DWORD, ) -> BOOL; pub fn OemToCharBuffA( lpszSrc: LPCSTR, lpszDst: LPSTR, cchDstLength: DWORD, ) -> BOOL; pub fn OemToCharBuffW( lpszSrc: LPCSTR, lpszDst: LPWSTR, cchDstLength: DWORD, ) -> BOOL; pub fn CharUpperA( lpsz: LPSTR, ) -> LPSTR; pub fn CharUpperW( lpsz: LPWSTR, ) -> LPWSTR; pub fn CharUpperBuffA( lpsz: LPSTR, cchLength: DWORD, ) -> DWORD; pub fn CharUpperBuffW( lpsz: LPWSTR, cchLength: DWORD, ) -> DWORD; pub fn CharLowerA( lpsz: LPSTR, ) -> LPSTR; pub fn CharLowerW( lpsz: LPWSTR, ) -> LPWSTR; pub fn CharLowerBuffA( lpsz: LPSTR, cchLength: DWORD, ) -> DWORD; pub fn CharLowerBuffW( lpsz: LPWSTR, cchLength: DWORD, ) -> DWORD; pub fn CharNextA( lpsz: LPCSTR, ) -> LPSTR; pub fn CharNextW( lpsz: LPCWSTR, ) -> LPWSTR; pub fn CharPrevA( lpszStart: LPCSTR, lpszCurrent: LPCSTR, ) -> LPSTR; pub fn CharPrevW( lpszStart: LPCWSTR, lpszCurrent: LPCWSTR, ) -> LPWSTR; pub fn CharNextExA( codePage: WORD, lpCurrentChar: LPSTR, dwFlags: DWORD, ) -> LPSTR; pub fn CharPrevExA( codePage: WORD, lpStart: LPCSTR, lpCurrentChar: LPCSTR, dwFlags: DWORD, ) -> LPSTR; } // AnsiToOem // OemToAnsi // AnsiToOemBuff // OemToAnsiBuff // AnsiUpper // AnsiUpperBuff // AnsiLower // AnsiLowerBuff // AnsiNext // AnsiPrev extern "system" { pub fn IsCharAlphaA( ch: CHAR, ) -> BOOL; pub fn IsCharAlphaW( ch: WCHAR, ) -> BOOL; pub fn IsCharAlphaNumericA( ch: CHAR, ) -> BOOL; pub fn IsCharAlphaNumericW( ch: WCHAR, ) -> BOOL; pub fn IsCharUpperA( ch: CHAR, ) -> BOOL; pub fn IsCharUpperW( ch: WCHAR, ) -> BOOL; pub fn IsCharLowerA( ch: CHAR, ) -> BOOL; pub fn IsCharLowerW( ch: WCHAR, ) -> BOOL; pub fn SetFocus( hWnd: HWND, ) -> HWND; pub fn GetActiveWindow() -> HWND; pub fn GetFocus() -> HWND; pub fn GetKBCodePage() -> UINT; pub fn GetKeyState( nVirtKey: c_int, ) -> SHORT; pub fn GetAsyncKeyState( vKey: c_int, ) -> SHORT; pub fn GetKeyboardState( lpKeyState: PBYTE, ) -> BOOL; pub fn SetKeyboardState( lpKeyState: LPBYTE, ) -> BOOL; pub fn GetKeyNameTextA( lparam: LONG, lpString: LPSTR, cchSize: c_int, ) -> c_int; pub fn GetKeyNameTextW( lParam: LONG, lpString: LPWSTR, cchSize: c_int, ) -> c_int; pub fn GetKeyboardType( nTypeFlag: c_int, ) -> c_int; pub fn ToAscii( uVirtKey: UINT, uScanCode: UINT, lpKeyState: *const BYTE, lpChar: LPWORD, uFlags: UINT, ) -> c_int; pub fn ToAsciiEx( uVirtKey: UINT, uScanCode: UINT, lpKeyState: *const BYTE, lpChar: LPWORD, uFlags: UINT, dwhkl: HKL, ) -> c_int; pub fn ToUnicode( wVirtKey: UINT, wScanCode: UINT, lpKeyState: *const BYTE, lwszBuff: LPWSTR, cchBuff: c_int, wFlags: UINT, ) -> c_int; pub fn OemKeyScan( wOemChar: WORD, ) -> DWORD; pub fn VkKeyScanA( ch: CHAR, ) -> SHORT; pub fn VkKeyScanW( ch: WCHAR, ) -> SHORT; pub fn VkKeyScanExA( ch: CHAR, dwhkl: HKL, ) -> SHORT; pub fn VkKeyScanExW( ch: WCHAR, dwhkl: HKL, ) -> SHORT; } pub const KEYEVENTF_EXTENDEDKEY: DWORD = 0x0001; pub const KEYEVENTF_KEYUP: DWORD = 0x0002; pub const KEYEVENTF_UNICODE: DWORD = 0x0004; pub const KEYEVENTF_SCANCODE: DWORD = 0x0008; extern "system" { pub fn keybd_event( bVk: BYTE, bScan: BYTE, dwFlags: DWORD, dwExtraInfo: ULONG_PTR, ); } pub const MOUSEEVENTF_MOVE: DWORD = 0x0001; pub const MOUSEEVENTF_LEFTDOWN: DWORD = 0x0002; pub const MOUSEEVENTF_LEFTUP: DWORD = 0x0004; pub const MOUSEEVENTF_RIGHTDOWN: DWORD = 0x0008; pub const MOUSEEVENTF_RIGHTUP: DWORD = 0x0010; pub const MOUSEEVENTF_MIDDLEDOWN: DWORD = 0x0020; pub const MOUSEEVENTF_MIDDLEUP: DWORD = 0x0040; pub const MOUSEEVENTF_XDOWN: DWORD = 0x0080; pub const MOUSEEVENTF_XUP: DWORD = 0x0100; pub const MOUSEEVENTF_WHEEL: DWORD = 0x0800; pub const MOUSEEVENTF_HWHEEL: DWORD = 0x01000; pub const MOUSEEVENTF_MOVE_NOCOALESCE: DWORD = 0x2000; pub const MOUSEEVENTF_VIRTUALDESK: DWORD = 0x4000; pub const MOUSEEVENTF_ABSOLUTE: DWORD = 0x8000; extern "system" { pub fn mouse_event( dwFlags: DWORD, dx: DWORD, dy: DWORD, dwData: DWORD, dwExtraInfo: ULONG_PTR, ); } STRUCT!{struct MOUSEINPUT { dx: LONG, dy: LONG, mouseData: DWORD, dwFlags: DWORD, time: DWORD, dwExtraInfo: ULONG_PTR, }} pub type PMOUSEINPUT = *mut MOUSEINPUT; pub type LPMOUSEINPUT = *mut MOUSEINPUT; STRUCT!{struct KEYBDINPUT { wVk: WORD, wScan: WORD, dwFlags: DWORD, time: DWORD, dwExtraInfo: ULONG_PTR, }} pub type PKEYBDINPUT = *mut KEYBDINPUT; pub type LPKEYBDINPUT = *mut KEYBDINPUT; STRUCT!{struct HARDWAREINPUT { uMsg: DWORD, wParamL: WORD, wParamH: WORD, }} pub type PHARDWAREINPUT = *mut HARDWAREINPUT; pub type LPHARDWAREINPUT= *mut HARDWAREINPUT; pub const INPUT_MOUSE: DWORD = 0; pub const INPUT_KEYBOARD: DWORD = 1; pub const INPUT_HARDWARE: DWORD = 2; #[cfg(target_arch = "x86")] STRUCT!{struct INPUT { type_: DWORD, u: [u32; 6], }} #[cfg(target_arch = "x86_64")] STRUCT!{struct INPUT { type_: DWORD, u: [u64; 4], }} UNION!{INPUT, u, mi, mi_mut, MOUSEINPUT} UNION!{INPUT, u, ki, ki_mut, KEYBDINPUT} UNION!{INPUT, u, hi, hi_mut, HARDWAREINPUT} pub type PINPUT = *mut INPUT; pub type LPINPUT = *mut INPUT; extern "system" { pub fn SendInput( cInputs: UINT, pInputs: LPINPUT, cbSize: c_int, ) -> UINT; } DECLARE_HANDLE!(HTOUCHINPUT, HTOUCHINPUT__); STRUCT!{struct TOUCHINPUT { x: LONG, y: LONG, hSource: HANDLE, dwID: DWORD, dwFlags: DWORD, dwMask: DWORD, dwTime: DWORD, dwExtraInfo: ULONG_PTR, cxContact: DWORD, cyContact: DWORD, }} pub type PTOUCHINPUT = *mut TOUCHINPUT; pub type PCTOUCHINPUT = *const TOUCHINPUT; // TOUCH_COORD_TO_PIXEL pub const TOUCHEVENTF_MOVE: DWORD = 0x0001; pub const TOUCHEVENTF_DOWN: DWORD = 0x0002; pub const TOUCHEVENTF_UP: DWORD = 0x0004; pub const TOUCHEVENTF_INRANGE: DWORD = 0x0008; pub const TOUCHEVENTF_PRIMARY: DWORD = 0x0010; pub const TOUCHEVENTF_NOCOALESCE: DWORD = 0x0020; pub const TOUCHEVENTF_PEN: DWORD = 0x0040; pub const TOUCHEVENTF_PALM: DWORD = 0x0080; pub const TOUCHINPUTMASKF_TIMEFROMSYSTEM: DWORD = 0x0001; pub const TOUCHINPUTMASKF_EXTRAINFO: DWORD = 0x0002; pub const TOUCHINPUTMASKF_CONTACTAREA: DWORD = 0x0004; extern "system" { pub fn GetTouchInputInfo( hTouchInput: HTOUCHINPUT, cInputs: c_uint, pInputs: PTOUCHINPUT, cbSize: c_int, ) -> BOOL; pub fn CloseTouchInputHandle( hTouchInput: HTOUCHINPUT, ) -> BOOL; } pub const TWF_FINETOUCH: DWORD = 0x00000001; pub const TWF_WANTPALM: DWORD = 0x00000002; extern "system" { pub fn RegisterTouchWindow( hWnd: HWND, flags: ULONG, ) -> BOOL; pub fn UnregisterTouchWindow( hwnd: HWND, ) -> BOOL; pub fn IsTouchWindow( hwnd: HWND, pulFlags: PULONG, ) -> BOOL; } ENUM!{enum POINTER_INPUT_TYPE { PT_POINTER = 0x00000001, PT_TOUCH = 0x00000002, PT_PEN = 0x00000003, PT_MOUSE = 0x00000004, PT_TOUCHPAD = 0x00000005, }} ENUM!{enum POINTER_FLAGS { POINTER_FLAG_NONE = 0x00000000, POINTER_FLAG_NEW = 0x00000001, POINTER_FLAG_INRANGE = 0x00000002, POINTER_FLAG_INCONTACT = 0x00000004, POINTER_FLAG_FIRSTBUTTON = 0x00000010, POINTER_FLAG_SECONDBUTTON = 0x00000020, POINTER_FLAG_THIRDBUTTON = 0x00000040, POINTER_FLAG_FOURTHBUTTON = 0x00000080, POINTER_FLAG_FIFTHBUTTON = 0x00000100, POINTER_FLAG_PRIMARY = 0x00002000, POINTER_FLAG_CONFIDENCE = 0x00004000, POINTER_FLAG_CANCELED = 0x00008000, POINTER_FLAG_DOWN = 0x00010000, POINTER_FLAG_UPDATE = 0x00020000, POINTER_FLAG_UP = 0x00040000, POINTER_FLAG_WHEEL = 0x00080000, POINTER_FLAG_HWHEEL = 0x00100000, POINTER_FLAG_CAPTURECHANGED = 0x00200000, POINTER_FLAG_HASTRANSFORM = 0x00400000, }} pub const POINTER_MOD_SHIFT: DWORD = 0x0004; pub const POINTER_MOD_CTRL: DWORD = 0x0008; ENUM!{enum POINTER_BUTTON_CHANGE_TYPE { POINTER_CHANGE_NONE, POINTER_CHANGE_FIRSTBUTTON_DOWN, POINTER_CHANGE_FIRSTBUTTON_UP, POINTER_CHANGE_SECONDBUTTON_DOWN, POINTER_CHANGE_SECONDBUTTON_UP, POINTER_CHANGE_THIRDBUTTON_DOWN, POINTER_CHANGE_THIRDBUTTON_UP, POINTER_CHANGE_FOURTHBUTTON_DOWN, POINTER_CHANGE_FOURTHBUTTON_UP, POINTER_CHANGE_FIFTHBUTTON_DOWN, POINTER_CHANGE_FIFTHBUTTON_UP, }} STRUCT!{struct POINTER_INFO { pointerType: POINTER_INPUT_TYPE, pointerId: UINT32, frameId: UINT32, pointerFlags: POINTER_FLAGS, sourceDevice: HANDLE, hwndTarget: HWND, ptPixelLocation: POINT, ptHimetricLocation: POINT, ptPixelLocationRaw: POINT, ptHimetricLocationRaw: POINT, dwTime: DWORD, historyCount: UINT32, InputData: INT32, dwKeyStates: DWORD, PerformanceCount: UINT64, ButtonChangeType: POINTER_BUTTON_CHANGE_TYPE, }} ENUM!{enum TOUCH_FLAGS { TOUCH_FLAG_NONE = 0x00000000, }} ENUM!{enum TOUCH_MASK { TOUCH_MASK_NONE = 0x00000000, TOUCH_MASK_CONTACTAREA = 0x00000001, TOUCH_MASK_ORIENTATION = 0x00000002, TOUCH_MASK_PRESSURE = 0x00000004, }} STRUCT!{struct POINTER_TOUCH_INFO { pointerInfo: POINTER_INFO, touchFlags: TOUCH_FLAGS, touchMask: TOUCH_MASK, rcContact: RECT, rcContactRaw: RECT, orientation: UINT32, pressure: UINT32, }} ENUM!{enum PEN_FLAGS { PEN_FLAG_NONE = 0x00000000, PEN_FLAG_BARREL = 0x00000001, PEN_FLAG_INVERTED = 0x00000002, PEN_FLAG_ERASER = 0x00000004, }} ENUM!{enum PEN_MASK { PEN_MASK_NONE = 0x00000000, PEN_MASK_PRESSURE = 0x00000001, PEN_MASK_ROTATION = 0x00000002, PEN_MASK_TILT_X = 0x00000004, PEN_MASK_TILT_Y = 0x00000008, }} STRUCT!{struct POINTER_PEN_INFO { pointerInfo: POINTER_INFO, penFlags: PEN_FLAGS, penMask: PEN_MASK, pressure: UINT32, rotation: UINT32, tiltX: INT32, tiltY: INT32, }} pub const POINTER_MESSAGE_FLAG_NEW: DWORD = 0x00000001; pub const POINTER_MESSAGE_FLAG_INRANGE: DWORD = 0x00000002; pub const POINTER_MESSAGE_FLAG_INCONTACT: DWORD = 0x00000004; pub const POINTER_MESSAGE_FLAG_FIRSTBUTTON: DWORD = 0x00000010; pub const POINTER_MESSAGE_FLAG_SECONDBUTTON: DWORD = 0x00000020; pub const POINTER_MESSAGE_FLAG_THIRDBUTTON: DWORD = 0x00000040; pub const POINTER_MESSAGE_FLAG_FOURTHBUTTON: DWORD = 0x00000080; pub const POINTER_MESSAGE_FLAG_FIFTHBUTTON: DWORD = 0x00000100; pub const POINTER_MESSAGE_FLAG_PRIMARY: DWORD = 0x00002000; pub const POINTER_MESSAGE_FLAG_CONFIDENCE: DWORD = 0x00004000; pub const POINTER_MESSAGE_FLAG_CANCELED: DWORD = 0x00008000; pub const PA_ACTIVATE: UINT = MA_ACTIVATE; pub const PA_NOACTIVATE: UINT = MA_NOACTIVATE; pub const MAX_TOUCH_COUNT: UINT32 = 256; pub const TOUCH_FEEDBACK_DEFAULT: DWORD = 0x1; pub const TOUCH_FEEDBACK_INDIRECT: DWORD = 0x2; pub const TOUCH_FEEDBACK_NONE: DWORD = 0x3; extern "system" { pub fn InitializeTouchInjection( maxCount: UINT32, dwMode: DWORD, ) -> BOOL; pub fn InjectTouchInput( count: UINT32, contacts: *const POINTER_TOUCH_INFO, ) -> BOOL; } STRUCT!{struct USAGE_PROPERTIES { level: USHORT, page: USHORT, usage: USHORT, logicalMinimum: INT32, logicalMaximum: INT32, unit: USHORT, exponent: USHORT, count: BYTE, physicalMinimum: INT32, physicalMaximum: INT32, }} pub type PUSAGE_PROPERTIES = *mut USAGE_PROPERTIES; UNION2!{union POINTER_TYPE_INFO_u { [u64; 17] [u64; 18], touchInfo touchInfo_mut: POINTER_TOUCH_INFO, penInfo penInfo_mut: POINTER_PEN_INFO, }} STRUCT!{struct POINTER_TYPE_INFO { type_: POINTER_INPUT_TYPE, u: POINTER_TYPE_INFO_u, }} pub type PPOINTER_TYPE_INFO = *mut POINTER_TYPE_INFO; STRUCT!{struct INPUT_INJECTION_VALUE { page: USHORT, usage: USHORT, value: INT32, index: USHORT, }} pub type PINPUT_INJECTION_VALUE = *mut INPUT_INJECTION_VALUE; extern "system" { pub fn GetPointerType( pointerId: UINT32, pointerType: *mut POINTER_INPUT_TYPE, ) -> BOOL; pub fn GetPointerCursorId( pointerId: UINT32, cursorId: *mut UINT32, ) -> BOOL; pub fn GetPointerInfo( pointerId: UINT32, pointerInfo: *mut POINTER_INFO, ) -> BOOL; pub fn GetPointerInfoHistory( pointerId: UINT32, entriesCount: *mut UINT32, pointerInfo: *mut POINTER_INFO, ) -> BOOL; pub fn GetPointerFrameInfo( pointerId: UINT32, pointerCount: *mut UINT32, pointerInfo: *mut POINTER_INFO, ) -> BOOL; pub fn GetPointerFrameInfoHistory( pointerId: UINT32, entriesCount: *mut UINT32, pointerCount: *mut UINT32, pointerInfo: *mut POINTER_INFO, ) -> BOOL; pub fn GetPointerTouchInfo( pointerId: UINT32, touchInfo: *mut POINTER_TOUCH_INFO, ) -> BOOL; pub fn GetPointerTouchInfoHistory( pointerId: UINT32, entriesCount: *mut UINT32, touchInfo: *mut POINTER_TOUCH_INFO, ) -> BOOL; pub fn GetPointerFrameTouchInfo( pointerId: UINT32, pointerCount: *mut UINT32, touchInfo: *mut POINTER_TOUCH_INFO, ) -> BOOL; pub fn GetPointerFrameTouchInfoHistory( pointerId: UINT32, entriesCount: *mut UINT32, pointerCount: *mut UINT32, touchInfo: *mut POINTER_TOUCH_INFO, ) -> BOOL; pub fn GetPointerPenInfo( pointerId: UINT32, penInfo: *mut POINTER_PEN_INFO, ) -> BOOL; pub fn GetPointerPenInfoHistory( pointerId: UINT32, entriesCount: *mut UINT32, penInfo: *mut POINTER_PEN_INFO, ) -> BOOL; pub fn GetPointerFramePenInfo( pointerId: UINT32, pointerCount: *mut UINT32, penInfo: *mut POINTER_PEN_INFO, ) -> BOOL; pub fn GetPointerFramePenInfoHistory( pointerId: UINT32, entriesCount: *mut UINT32, pointerCount: *mut UINT32, penInfo: *mut POINTER_PEN_INFO, ) -> BOOL; pub fn SkipPointerFrameMessages( pointerId: UINT32, ) -> BOOL; pub fn RegisterPointerInputTarget( hwnd: HWND, pointerType: POINTER_INPUT_TYPE, ) -> BOOL; pub fn UnregisterPointerInputTarget( hwnd: HWND, pointerType: POINTER_INPUT_TYPE, ) -> BOOL; pub fn RegisterPointerInputTargetEx( hwnd: HWND, pointerType: POINTER_INPUT_TYPE, fObserve: BOOL, ) -> BOOL; pub fn UnregisterPointerInputTargetEx( hwnd: HWND, pointerType: POINTER_INPUT_TYPE, ) -> BOOL; pub fn EnableMouseInPointer( fEnable: BOOL, ) -> BOOL; pub fn IsMouseInPointerEnabled() -> BOOL; } pub const TOUCH_HIT_TESTING_DEFAULT: ULONG = 0x0; pub const TOUCH_HIT_TESTING_CLIENT: ULONG = 0x1; pub const TOUCH_HIT_TESTING_NONE: ULONG = 0x2; extern "system" { pub fn RegisterTouchHitTestingWindow( hwnd: HWND, value: ULONG, ) -> BOOL; } STRUCT!{struct TOUCH_HIT_TESTING_PROXIMITY_EVALUATION { score: UINT16, adjustedPoint: POINT, }} pub type PTOUCH_HIT_TESTING_PROXIMITY_EVALUATION = *mut TOUCH_HIT_TESTING_PROXIMITY_EVALUATION; STRUCT!{struct TOUCH_HIT_TESTING_INPUT { pointerId: UINT32, point: POINT, boundingBox: RECT, nonOccludedBoundingBox: RECT, orientation: UINT32, }} pub type PTOUCH_HIT_TESTING_INPUT = *mut TOUCH_HIT_TESTING_INPUT; pub const TOUCH_HIT_TESTING_PROXIMITY_CLOSEST: UINT16 = 0x0; pub const TOUCH_HIT_TESTING_PROXIMITY_FARTHEST: UINT16 = 0xFFF; extern "system" { pub fn EvaluateProximityToRect( controlBoundingBox: *const RECT, pHitTestingInput: *const TOUCH_HIT_TESTING_INPUT, pProximityEval: *mut TOUCH_HIT_TESTING_PROXIMITY_EVALUATION, ) -> BOOL; pub fn EvaluateProximityToPolygon( numVertices: UINT32, controlPolygon: *const POINT, pHitTestingInput: *const TOUCH_HIT_TESTING_INPUT, pProximityEval: *mut TOUCH_HIT_TESTING_PROXIMITY_EVALUATION, ) -> BOOL; pub fn PackTouchHitTestingProximityEvaluation( pHitTestingInput: *const TOUCH_HIT_TESTING_INPUT, pProximityEval: *const TOUCH_HIT_TESTING_PROXIMITY_EVALUATION, ) -> LRESULT; } ENUM!{enum FEEDBACK_TYPE { FEEDBACK_TOUCH_CONTACTVISUALIZATION = 1, FEEDBACK_PEN_BARRELVISUALIZATION = 2, FEEDBACK_PEN_TAP = 3, FEEDBACK_PEN_DOUBLETAP = 4, FEEDBACK_PEN_PRESSANDHOLD = 5, FEEDBACK_PEN_RIGHTTAP = 6, FEEDBACK_TOUCH_TAP = 7, FEEDBACK_TOUCH_DOUBLETAP = 8, FEEDBACK_TOUCH_PRESSANDHOLD = 9, FEEDBACK_TOUCH_RIGHTTAP = 10, FEEDBACK_GESTURE_PRESSANDTAP = 11, FEEDBACK_MAX = 0xFFFFFFFF, }} pub const GWFS_INCLUDE_ANCESTORS: DWORD = 0x00000001; extern "system" { pub fn GetWindowFeedbackSetting( hwnd: HWND, feedback: FEEDBACK_TYPE, dwFlags: DWORD, pSize: *mut UINT32, config: *mut VOID, ) -> BOOL; pub fn SetWindowFeedbackSetting( hwnd: HWND, feedback: FEEDBACK_TYPE, dwFlags: DWORD, size: UINT32, configuration: *const VOID, ) -> BOOL; } STRUCT!{struct INPUT_TRANSFORM { m: [[f32; 4]; 4], }} extern "system" { pub fn GetPointerInputTransform( pointerId: UINT32, historyCount: UINT32, inputTransform: *mut INPUT_TRANSFORM, ) -> BOOL; } STRUCT!{struct LASTINPUTINFO { cbSize: UINT, dwTime: DWORD, }} pub type PLASTINPUTINFO = *mut LASTINPUTINFO; extern "system" { pub fn GetLastInputInfo( plii: PLASTINPUTINFO, ) -> BOOL; pub fn MapVirtualKeyA( nCode: UINT, uMapType: UINT, ) -> UINT; pub fn MapVirtualKeyW( nCode: UINT, uMapType: UINT, ) -> UINT; pub fn MapVirtualKeyExA( nCode: UINT, uMapType: UINT, dwhkl: HKL, ) -> UINT; pub fn MapVirtualKeyExW( nCode: UINT, uMapType: UINT, dwhkl: HKL, ) -> UINT; } pub const MAPVK_VK_TO_VSC: UINT = 0; pub const MAPVK_VSC_TO_VK: UINT = 1; pub const MAPVK_VK_TO_CHAR: UINT = 2; pub const MAPVK_VSC_TO_VK_EX: UINT = 3; pub const MAPVK_VK_TO_VSC_EX: UINT = 4; extern "system" { pub fn GetInputState() -> BOOL; pub fn GetQueueStatus( flags: UINT, ) -> DWORD; pub fn GetCapture() -> HWND; pub fn SetCapture( hWnd: HWND, ) -> HWND; pub fn ReleaseCapture() -> BOOL; pub fn MsgWaitForMultipleObjects( nCount: DWORD, pHandles: *const HANDLE, fWaitAll: BOOL, dwMilliseconds: DWORD, dwWakeMask: DWORD, ) -> DWORD; pub fn MsgWaitForMultipleObjectsEx( nCount: DWORD, pHandles: *const HANDLE, dwMilliseconds: DWORD, dwWakeMask: DWORD, dwFlags: DWORD, ) -> DWORD; } pub const MWMO_WAITALL: UINT = 0x0001; pub const MWMO_ALERTABLE: UINT = 0x0002; pub const MWMO_INPUTAVAILABLE: UINT = 0x0004; pub const QS_KEY: UINT = 0x0001; pub const QS_MOUSEMOVE: UINT = 0x0002; pub const QS_MOUSEBUTTON: UINT = 0x0004; pub const QS_POSTMESSAGE: UINT = 0x0008; pub const QS_TIMER: UINT = 0x0010; pub const QS_PAINT: UINT = 0x0020; pub const QS_SENDMESSAGE: UINT = 0x0040; pub const QS_HOTKEY: UINT = 0x0080; pub const QS_ALLPOSTMESSAGE: UINT = 0x0100; pub const QS_RAWINPUT: UINT = 0x0400; pub const QS_TOUCH: UINT = 0x0800; pub const QS_POINTER: UINT = 0x1000; pub const QS_MOUSE: UINT = QS_MOUSEMOVE | QS_MOUSEBUTTON; pub const QS_INPUT: UINT = QS_MOUSE | QS_KEY | QS_RAWINPUT | QS_TOUCH | QS_POINTER; pub const QS_ALLEVENTS: UINT = QS_INPUT | QS_POSTMESSAGE | QS_TIMER | QS_PAINT | QS_HOTKEY; pub const QS_ALLINPUT: UINT = QS_INPUT | QS_POSTMESSAGE | QS_TIMER | QS_PAINT | QS_HOTKEY | QS_SENDMESSAGE; pub const USER_TIMER_MAXIMUM: UINT = 0x7FFFFFFF; pub const USER_TIMER_MINIMUM: UINT = 0x0000000A; extern "system" { pub fn SetTimer( hWnd: HWND, nIDEvent: UINT_PTR, uElapse: UINT, lpTimerFunc: TIMERPROC, ) -> UINT_PTR; } pub const TIMERV_DEFAULT_COALESCING: ULONG = 0; pub const TIMERV_NO_COALESCING: ULONG = 0xFFFFFFFF; pub const TIMERV_COALESCING_MIN: ULONG = 1; pub const TIMERV_COALESCING_MAX: ULONG = 0x7FFFFFF5; extern "system" { pub fn SetCoalescableTimer( hWnd: HWND, nIDEvent: UINT_PTR, uElapse: UINT, lpTimerFunc: TIMERPROC, uToleranceDelay: ULONG, ) -> UINT_PTR; pub fn KillTimer( hWnd: HWND, uIDEvent: UINT_PTR, ) -> BOOL; pub fn IsWindowUnicode( hWnd: HWND, ) -> BOOL; pub fn EnableWindow( hWnd: HWND, bEnable: BOOL, ) -> BOOL; pub fn IsWindowEnabled( hWnd: HWND, ) -> BOOL; pub fn LoadAcceleratorsA( hInstance: HINSTANCE, lpTableName: LPCSTR, ) -> HACCEL; pub fn LoadAcceleratorsW( hInstance: HINSTANCE, lpTableName: LPCWSTR, ) -> HACCEL; pub fn CreateAcceleratorTableA( paccel: LPACCEL, cAccel: c_int, ) -> HACCEL; pub fn CreateAcceleratorTableW( paccel: LPACCEL, cAccel: c_int, ) -> HACCEL; pub fn DestroyAcceleratorTable( hAccel: HACCEL, ) -> BOOL; pub fn CopyAcceleratorTableA( hAccelSrc: HACCEL, lpAccelDst: LPACCEL, cAccelEntries: c_int, ) -> c_int; pub fn CopyAcceleratorTableW( hAccelSrc: HACCEL, lpAccelDst: LPACCEL, cAccelEntries: c_int, ) -> c_int; pub fn TranslateAcceleratorA( hWnd: HWND, hAccTable: HACCEL, lpMsg: LPMSG, ) -> c_int; pub fn TranslateAcceleratorW( hWnd: HWND, hAccTable: HACCEL, lpMsg: LPMSG, ) -> c_int; } pub const SM_CXSCREEN: c_int = 0; pub const SM_CYSCREEN: c_int = 1; pub const SM_CXVSCROLL: c_int = 2; pub const SM_CYHSCROLL: c_int = 3; pub const SM_CYCAPTION: c_int = 4; pub const SM_CXBORDER: c_int = 5; pub const SM_CYBORDER: c_int = 6; pub const SM_CXDLGFRAME: c_int = 7; pub const SM_CYDLGFRAME: c_int = 8; pub const SM_CYVTHUMB: c_int = 9; pub const SM_CXHTHUMB: c_int = 10; pub const SM_CXICON: c_int = 11; pub const SM_CYICON: c_int = 12; pub const SM_CXCURSOR: c_int = 13; pub const SM_CYCURSOR: c_int = 14; pub const SM_CYMENU: c_int = 15; pub const SM_CXFULLSCREEN: c_int = 16; pub const SM_CYFULLSCREEN: c_int = 17; pub const SM_CYKANJIWINDOW: c_int = 18; pub const SM_MOUSEPRESENT: c_int = 19; pub const SM_CYVSCROLL: c_int = 20; pub const SM_CXHSCROLL: c_int = 21; pub const SM_DEBUG: c_int = 22; pub const SM_SWAPBUTTON: c_int = 23; pub const SM_RESERVED1: c_int = 24; pub const SM_RESERVED2: c_int = 25; pub const SM_RESERVED3: c_int = 26; pub const SM_RESERVED4: c_int = 27; pub const SM_CXMIN: c_int = 28; pub const SM_CYMIN: c_int = 29; pub const SM_CXSIZE: c_int = 30; pub const SM_CYSIZE: c_int = 31; pub const SM_CXFRAME: c_int = 32; pub const SM_CYFRAME: c_int = 33; pub const SM_CXMINTRACK: c_int = 34; pub const SM_CYMINTRACK: c_int = 35; pub const SM_CXDOUBLECLK: c_int = 36; pub const SM_CYDOUBLECLK: c_int = 37; pub const SM_CXICONSPACING: c_int = 38; pub const SM_CYICONSPACING: c_int = 39; pub const SM_MENUDROPALIGNMENT: c_int = 40; pub const SM_PENWINDOWS: c_int = 41; pub const SM_DBCSENABLED: c_int = 42; pub const SM_CMOUSEBUTTONS: c_int = 43; pub const SM_CXFIXEDFRAME: c_int = SM_CXDLGFRAME; pub const SM_CYFIXEDFRAME: c_int = SM_CYDLGFRAME; pub const SM_CXSIZEFRAME: c_int = SM_CXFRAME; pub const SM_CYSIZEFRAME: c_int = SM_CYFRAME; pub const SM_SECURE: c_int = 44; pub const SM_CXEDGE: c_int = 45; pub const SM_CYEDGE: c_int = 46; pub const SM_CXMINSPACING: c_int = 47; pub const SM_CYMINSPACING: c_int = 48; pub const SM_CXSMICON: c_int = 49; pub const SM_CYSMICON: c_int = 50; pub const SM_CYSMCAPTION: c_int = 51; pub const SM_CXSMSIZE: c_int = 52; pub const SM_CYSMSIZE: c_int = 53; pub const SM_CXMENUSIZE: c_int = 54; pub const SM_CYMENUSIZE: c_int = 55; pub const SM_ARRANGE: c_int = 56; pub const SM_CXMINIMIZED: c_int = 57; pub const SM_CYMINIMIZED: c_int = 58; pub const SM_CXMAXTRACK: c_int = 59; pub const SM_CYMAXTRACK: c_int = 60; pub const SM_CXMAXIMIZED: c_int = 61; pub const SM_CYMAXIMIZED: c_int = 62; pub const SM_NETWORK: c_int = 63; pub const SM_CLEANBOOT: c_int = 67; pub const SM_CXDRAG: c_int = 68; pub const SM_CYDRAG: c_int = 69; pub const SM_SHOWSOUNDS: c_int = 70; pub const SM_CXMENUCHECK: c_int = 71; pub const SM_CYMENUCHECK: c_int = 72; pub const SM_SLOWMACHINE: c_int = 73; pub const SM_MIDEASTENABLED: c_int = 74; pub const SM_MOUSEWHEELPRESENT: c_int = 75; pub const SM_XVIRTUALSCREEN: c_int = 76; pub const SM_YVIRTUALSCREEN: c_int = 77; pub const SM_CXVIRTUALSCREEN: c_int = 78; pub const SM_CYVIRTUALSCREEN: c_int = 79; pub const SM_CMONITORS: c_int = 80; pub const SM_SAMEDISPLAYFORMAT: c_int = 81; pub const SM_IMMENABLED: c_int = 82; pub const SM_CXFOCUSBORDER: c_int = 83; pub const SM_CYFOCUSBORDER: c_int = 84; pub const SM_TABLETPC: c_int = 86; pub const SM_MEDIACENTER: c_int = 87; pub const SM_STARTER: c_int = 88; pub const SM_SERVERR2: c_int = 89; pub const SM_MOUSEHORIZONTALWHEELPRESENT: c_int = 91; pub const SM_CXPADDEDBORDER: c_int = 92; pub const SM_DIGITIZER: c_int = 94; pub const SM_MAXIMUMTOUCHES: c_int = 95; pub const SM_CMETRICS: c_int = 97; pub const SM_REMOTESESSION: c_int = 0x1000; pub const SM_SHUTTINGDOWN: c_int = 0x2000; pub const SM_REMOTECONTROL: c_int = 0x2001; pub const SM_CARETBLINKINGENABLED: c_int = 0x2002; pub const SM_CONVERTIBLESLATEMODE: c_int = 0x2003; pub const SM_SYSTEMDOCKED: c_int = 0x2004; extern "system" { pub fn GetSystemMetrics( nIndex: c_int, ) -> c_int; pub fn GetSystemMetricsForDpi( nIndex: c_int, dpi: UINT, ) -> c_int; pub fn LoadMenuA( hInstance: HINSTANCE, lpMenuName: LPCSTR, ) -> HMENU; pub fn LoadMenuW( hInstance: HINSTANCE, lpMenuName: LPCWSTR, ) -> HMENU; pub fn LoadMenuIndirectA( lpMenuTemplate: *const MENUTEMPLATEA, ) -> HMENU; pub fn LoadMenuIndirectW( lpMenuTemplate: *const MENUTEMPLATEW, ) -> HMENU; pub fn GetMenu( hWnd: HWND, ) -> HMENU; pub fn SetMenu( hWnd: HWND, hMenu: HMENU, ) -> BOOL; pub fn ChangeMenuA( hMenu: HMENU, cmd: UINT, lpszNewItem: LPCSTR, cmdInsert: UINT, flags: UINT, ) -> BOOL; pub fn ChangeMenuW( hMenu: HMENU, cmd: UINT, lpszNewItem: LPCWSTR, cmdInsert: UINT, flags: UINT, ) -> BOOL; pub fn HiliteMenuItem( hWnd: HWND, hMenu: HMENU, uIDHiliteItem: UINT, uHilite: UINT, ) -> BOOL; pub fn GetMenuStringA( hMenu: HMENU, uIDItem: UINT, lpString: LPSTR, cchMax: c_int, flags: UINT, ) -> c_int; pub fn GetMenuStringW( hMenu: HMENU, uIDItem: UINT, lpString: LPWSTR, cchMax: c_int, flags: UINT, ) -> c_int; pub fn GetMenuState( hMenu: HMENU, uId: UINT, uFlags: UINT, ) -> UINT; pub fn DrawMenuBar( hwnd: HWND, ) -> BOOL; } pub const PMB_ACTIVE: DWORD = 0x00000001; extern "system" { pub fn GetSystemMenu( hWnd: HWND, bRevert: BOOL, ) -> HMENU; pub fn CreateMenu() -> HMENU; pub fn CreatePopupMenu() ->HMENU; pub fn DestroyMenu( hMenu: HMENU, ) -> BOOL; pub fn CheckMenuItem( hMenu: HMENU, uIDCheckItem: UINT, uCheck: UINT, ) -> DWORD; pub fn EnableMenuItem( hMenu: HMENU, uIDEnableItem: UINT, uEnable: UINT, ) -> BOOL; pub fn GetSubMenu( hMenu: HMENU, nPos: c_int, ) -> HMENU; pub fn GetMenuItemID( hMenu: HMENU, nPos: c_int, ) -> UINT; pub fn GetMenuItemCount( hMenu: HMENU, ) -> c_int; pub fn InsertMenuA( hMenu: HMENU, uPosition: UINT, uFlags: UINT, uIDNewItem: UINT_PTR, lpNewItem: LPCSTR, ) -> BOOL; pub fn InsertMenuW( hMenu: HMENU, uPosition: UINT, uFlags: UINT, uIDNewItem: UINT_PTR, lpNewItem: LPCWSTR, ) -> BOOL; pub fn AppendMenuA( hMenu: HMENU, uFlags: UINT, uIDNewItem: UINT_PTR, lpNewItem: LPCSTR, ) -> BOOL; pub fn AppendMenuW( hMenu: HMENU, uFlags: UINT, uIDNewItem: UINT_PTR, lpNewItem: LPCWSTR, ) -> BOOL; pub fn ModifyMenuA( hMnu: HMENU, uPosition: UINT, uFlags: UINT, uIDNewItem: UINT_PTR, lpNewItem: LPCSTR, ) -> BOOL; pub fn ModifyMenuW( hMnu: HMENU, uPosition: UINT, uFlags: UINT, uIDNewItem: UINT_PTR, lpNewItem: LPCWSTR, ) -> BOOL; pub fn RemoveMenu( hMenu: HMENU, uPosition: UINT, uFlags: UINT, ) -> BOOL; pub fn DeleteMenu( hMenu: HMENU, uPosition: UINT, uFlags: UINT, ) -> BOOL; pub fn SetMenuItemBitmaps( hMenu: HMENU, uPosition: UINT, uFlags: UINT, hBitmapUnchecked: HBITMAP, hBitmapChecked: HBITMAP, ) -> BOOL; pub fn GetMenuCheckMarkDimensions() -> LONG; pub fn TrackPopupMenu( hMenu: HMENU, uFlags: UINT, x: c_int, y: c_int, nReserved: c_int, hWnd: HWND, prcRect: *const RECT, ) -> BOOL; } pub const MNC_IGNORE: DWORD = 0; pub const MNC_CLOSE: DWORD = 1; pub const MNC_EXECUTE: DWORD = 2; pub const MNC_SELECT: DWORD = 3; STRUCT!{struct TPMPARAMS { cbSize: UINT, rcExclude: RECT, }} pub type LPTPMPARAMS = *mut TPMPARAMS; extern "system" { pub fn TrackPopupMenuEx( hMenu: HMENU, uFlags: UINT, x: INT, y: INT, hwnd: HWND, lptpm: LPTPMPARAMS, ) -> BOOL; pub fn CalculatePopupWindowPosition( anchorPoint: *const POINT, windowSize: *const SIZE, flags: UINT, excludeRect: *mut RECT, popupWindowPosition: *mut RECT, ) -> BOOL; } pub const MNS_NOCHECK: DWORD = 0x80000000; pub const MNS_MODELESS: DWORD = 0x40000000; pub const MNS_DRAGDROP: DWORD = 0x20000000; pub const MNS_AUTODISMISS: DWORD = 0x10000000; pub const MNS_NOTIFYBYPOS: DWORD = 0x08000000; pub const MNS_CHECKORBMP: DWORD = 0x04000000; pub const MIM_MAXHEIGHT: DWORD = 0x00000001; pub const MIM_BACKGROUND: DWORD = 0x00000002; pub const MIM_HELPID: DWORD = 0x00000004; pub const MIM_MENUDATA: DWORD = 0x00000008; pub const MIM_STYLE: DWORD = 0x00000010; pub const MIM_APPLYTOSUBMENUS: DWORD = 0x80000000; STRUCT!{struct MENUINFO { cbSize: DWORD, fMask: DWORD, dwStyle: DWORD, cyMax: UINT, hbrBack: HBRUSH, dwContextHelpID: DWORD, dwMenuData: ULONG_PTR, }} pub type LPMENUINFO = *mut MENUINFO; pub type LPCMENUINFO = *const MENUINFO; extern "system" { pub fn GetMenuInfo( hMenu: HMENU, lpcmi: LPMENUINFO, ) -> BOOL; pub fn SetMenuInfo( hMenu: HMENU, lpcmi: LPCMENUINFO, ) -> BOOL; pub fn EndMenu( hMenu: HMENU, uFlags: UINT, uIDNewItem: UINT_PTR, lpNewItem: LPCSTR, ) -> BOOL; } pub const MND_CONTINUE: DWORD = 0; pub const MND_ENDMENU: DWORD = 1; STRUCT!{struct MENUGETOBJECTINFO { dwFlags: DWORD, uPos: UINT, hmenu: HMENU, riid: PVOID, pvObj: PVOID, }} pub type PMENUGETOBJECTINFO = *mut MENUGETOBJECTINFO; pub const MNGOF_TOPGAP: DWORD = 0x00000001; pub const MNGOF_BOTTOMGAP: DWORD = 0x00000002; pub const MNGO_NOINTERFACE: DWORD = 0x00000000; pub const MNGO_NOERROR: DWORD = 0x00000001; pub const MIIM_STATE: DWORD = 0x00000001; pub const MIIM_ID: DWORD = 0x00000002; pub const MIIM_SUBMENU: DWORD = 0x00000004; pub const MIIM_CHECKMARKS: DWORD = 0x00000008; pub const MIIM_TYPE: DWORD = 0x00000010; pub const MIIM_DATA: DWORD = 0x00000020; pub const MIIM_STRING: DWORD = 0x00000040; pub const MIIM_BITMAP: DWORD = 0x00000080; pub const MIIM_FTYPE: DWORD = 0x00000100; pub const HBMMENU_CALLBACK: HBITMAP = -1isize as HBITMAP; pub const HBMMENU_SYSTEM: HBITMAP = 1 as HBITMAP; pub const HBMMENU_MBAR_RESTORE: HBITMAP = 2 as HBITMAP; pub const HBMMENU_MBAR_MINIMIZE: HBITMAP = 3 as HBITMAP; pub const HBMMENU_MBAR_CLOSE: HBITMAP = 5 as HBITMAP; pub const HBMMENU_MBAR_CLOSE_D: HBITMAP = 6 as HBITMAP; pub const HBMMENU_MBAR_MINIMIZE_D: HBITMAP = 7 as HBITMAP; pub const HBMMENU_POPUP_CLOSE: HBITMAP = 8 as HBITMAP; pub const HBMMENU_POPUP_RESTORE: HBITMAP = 9 as HBITMAP; pub const HBMMENU_POPUP_MAXIMIZE: HBITMAP = 10 as HBITMAP; pub const HBMMENU_POPUP_MINIMIZE: HBITMAP = 11 as HBITMAP; STRUCT!{struct MENUITEMINFOA { cbSize: UINT, fMask: UINT, fType: UINT, fState: UINT, wID: UINT, hSubMenu: HMENU, hbmpChecked: HBITMAP, hbmpUnchecked: HBITMAP, dwItemData: ULONG_PTR, dwTypeData: LPSTR, cch: UINT, hbmpItem: HBITMAP, }} pub type LPMENUITEMINFOA = *mut MENUITEMINFOA; pub type LPCMENUITEMINFOA = *const MENUITEMINFOA; STRUCT!{struct MENUITEMINFOW { cbSize: UINT, fMask: UINT, fType: UINT, fState: UINT, wID: UINT, hSubMenu: HMENU, hbmpChecked: HBITMAP, hbmpUnchecked: HBITMAP, dwItemData: ULONG_PTR, dwTypeData: LPWSTR, cch: UINT, hbmpItem: HBITMAP, }} pub type LPMENUITEMINFOW = *mut MENUITEMINFOW; pub type LPCMENUITEMINFOW = *const MENUITEMINFOW; extern "system" { pub fn InsertMenuItemA( hmenu: HMENU, item: UINT, fByPosition: BOOL, lpmi: LPCMENUITEMINFOA, ) -> BOOL; pub fn InsertMenuItemW( hmenu: HMENU, item: UINT, fByPosition: BOOL, lpmi: LPCMENUITEMINFOW, ) -> BOOL; pub fn GetMenuItemInfoA( hMenu: HMENU, uItem: UINT, fByPosition: BOOL, lpmii: LPMENUITEMINFOA ) -> BOOL; pub fn GetMenuItemInfoW( hMenu: HMENU, uItem: UINT, fByPosition: BOOL, lpmii: LPMENUITEMINFOW ) -> BOOL; pub fn SetMenuItemInfoA( hmenu: HMENU, item: UINT, fByPositon: BOOL, lpmii: LPCMENUITEMINFOA, ) -> BOOL; pub fn SetMenuItemInfoW( hmenu: HMENU, item: UINT, fByPositon: BOOL, lpmii: LPCMENUITEMINFOW, ) -> BOOL; } pub const GMDI_USEDISABLED: DWORD = 0x0001; pub const GMDI_GOINTOPOPUPS: DWORD = 0x0002; extern "system" { pub fn GetMenuDefaultItem( hMenu: HMENU, fByPos: UINT, gmdiFlags: UINT, ) -> UINT; pub fn SetMenuDefaultItem( hMenu: HMENU, uItem: UINT, fByPos: UINT, ) -> BOOL; pub fn GetMenuItemRect( hWnd: HWND, hMenu: HMENU, uItem: UINT, lprcItem: LPRECT, ) -> BOOL; pub fn MenuItemFromPoint( hWnd: HWND, hMenu: HMENU, ptScreen: POINT, ) -> c_int; } pub const TPM_LEFTBUTTON: UINT = 0x0000; pub const TPM_RIGHTBUTTON: UINT = 0x0002; pub const TPM_LEFTALIGN: UINT = 0x0000; pub const TPM_CENTERALIGN: UINT = 0x0004; pub const TPM_RIGHTALIGN: UINT = 0x0008; pub const TPM_TOPALIGN: UINT = 0x0000; pub const TPM_VCENTERALIGN: UINT = 0x0010; pub const TPM_BOTTOMALIGN: UINT = 0x0020; pub const TPM_HORIZONTAL: UINT = 0x0000; pub const TPM_VERTICAL: UINT = 0x0040; pub const TPM_NONOTIFY: UINT = 0x0080; pub const TPM_RETURNCMD: UINT = 0x0100; pub const TPM_RECURSE: UINT = 0x0001; pub const TPM_HORPOSANIMATION: UINT = 0x0400; pub const TPM_HORNEGANIMATION: UINT = 0x0800; pub const TPM_VERPOSANIMATION: UINT = 0x1000; pub const TPM_VERNEGANIMATION: UINT = 0x2000; pub const TPM_NOANIMATION: UINT = 0x4000; pub const TPM_LAYOUTRTL: UINT = 0x8000; pub const TPM_WORKAREA: UINT = 0x10000; STRUCT!{struct DROPSTRUCT { hwndSource: HWND, hwndSink: HWND, wFmt: DWORD, dwData: ULONG_PTR, ptDrop: POINT, dwControlData: DWORD, }} pub type PDROPSTRUCT = *mut DROPSTRUCT; pub type LPDROPSTRUCT = *mut DROPSTRUCT; pub const DOF_EXECUTABLE: DWORD = 0x8001; pub const DOF_DOCUMENT: DWORD = 0x8002; pub const DOF_DIRECTORY: DWORD = 0x8003; pub const DOF_MULTIPLE: DWORD = 0x8004; pub const DOF_PROGMAN: DWORD = 0x0001; pub const DOF_SHELLDATA: DWORD = 0x0002; pub const DO_DROPFILE: DWORD = 0x454C4946; pub const DO_PRINTFILE: DWORD = 0x544E5250; extern "system" { pub fn DragObject( hwndParent: HWND, hwndFrom: HWND, fmt: UINT, data: ULONG_PTR, hcur: HCURSOR, ) -> DWORD; pub fn DragDetect( hwnd: HWND, pt: POINT, ) -> BOOL; pub fn DrawIcon( hDC: HDC, x: c_int, y: c_int, hIcon: HICON, ) -> BOOL; } pub const DT_TOP: UINT = 0x00000000; pub const DT_LEFT: UINT = 0x00000000; pub const DT_CENTER: UINT = 0x00000001; pub const DT_RIGHT: UINT = 0x00000002; pub const DT_VCENTER: UINT = 0x00000004; pub const DT_BOTTOM: UINT = 0x00000008; pub const DT_WORDBREAK: UINT = 0x00000010; pub const DT_SINGLELINE: UINT = 0x00000020; pub const DT_EXPANDTABS: UINT = 0x00000040; pub const DT_TABSTOP: UINT = 0x00000080; pub const DT_NOCLIP: UINT = 0x00000100; pub const DT_EXTERNALLEADING: UINT = 0x00000200; pub const DT_CALCRECT: UINT = 0x00000400; pub const DT_NOPREFIX: UINT = 0x00000800; pub const DT_INTERNAL: UINT = 0x00001000; pub const DT_EDITCONTROL: UINT = 0x00002000; pub const DT_PATH_ELLIPSIS: UINT = 0x00004000; pub const DT_END_ELLIPSIS: UINT = 0x00008000; pub const DT_MODIFYSTRING: UINT = 0x00010000; pub const DT_RTLREADING: UINT = 0x00020000; pub const DT_WORD_ELLIPSIS: UINT = 0x00040000; pub const DT_NOFULLWIDTHCHARBREAK: UINT = 0x00080000; pub const DT_HIDEPREFIX: UINT = 0x00100000; pub const DT_PREFIXONLY: UINT = 0x00200000; STRUCT!{struct DRAWTEXTPARAMS { cbSize: UINT, iTabLength: c_int, iLeftMargin: c_int, iRightMargin: c_int, uiLengthDrawn: UINT, }} pub type LPDRAWTEXTPARAMS = *mut DRAWTEXTPARAMS; extern "system" { pub fn DrawTextA( hdc: HDC, lpchText: LPCSTR, cchText: c_int, lprc: LPRECT, format: UINT, ) -> c_int; pub fn DrawTextW( hdc: HDC, lpchText: LPCWSTR, cchText: c_int, lprc: LPRECT, format: UINT, ) -> c_int; pub fn DrawTextExA( hdc: HDC, lpchText: LPCSTR, cchText: c_int, lprc: LPRECT, format: UINT, lpdtp: LPDRAWTEXTPARAMS, ) -> c_int; pub fn DrawTextExW( hdc: HDC, lpchText: LPCWSTR, cchText: c_int, lprc: LPRECT, format: UINT, lpdtp: LPDRAWTEXTPARAMS, ) -> c_int; pub fn GrayStringA( hDC: HDC, hBrush: HBRUSH, lpOutputFunc: GRAYSTRINGPROC, lpData: LPARAM, nCount: c_int, X: c_int, Y: c_int, nWidth: c_int, nHeight: c_int, ) -> BOOL; pub fn GrayStringW( hDC: HDC, hBrush: HBRUSH, lpOutputFunc: GRAYSTRINGPROC, lpData: LPARAM, nCount: c_int, X: c_int, Y: c_int, nWidth: c_int, nHeight: c_int, ) -> BOOL; } pub const DST_COMPLEX: UINT = 0x0000; pub const DST_TEXT: UINT = 0x0001; pub const DST_PREFIXTEXT: UINT = 0x0002; pub const DST_ICON: UINT = 0x0003; pub const DST_BITMAP: UINT = 0x0004; pub const DSS_NORMAL: UINT = 0x0000; pub const DSS_UNION: UINT = 0x0010; pub const DSS_DISABLED: UINT = 0x0020; pub const DSS_MONO: UINT = 0x0080; pub const DSS_HIDEPREFIX: UINT = 0x0200; pub const DSS_PREFIXONLY: UINT = 0x0400; pub const DSS_RIGHT: UINT = 0x8000; extern "system" { pub fn DrawStateA( hdc: HDC, hbrFore: HBRUSH, qfnCallBack: DRAWSTATEPROC, lData: LPARAM, wData: WPARAM, x: c_int, y: c_int, cx: c_int, cy: c_int, uFlags: UINT, ) -> BOOL; pub fn DrawStateW( hdc: HDC, hbrFore: HBRUSH, qfnCallBack: DRAWSTATEPROC, lData: LPARAM, wData: WPARAM, x: c_int, y: c_int, cx: c_int, cy: c_int, uFlags: UINT, ) -> BOOL; pub fn TabbedTextOutA( hdc: HDC, x: c_int, y: c_int, lpString: LPCSTR, chCount: c_int, nTabPositions: c_int, lpnTabStopPositions: *const INT, nTabOrigin: c_int, ) -> LONG; pub fn TabbedTextOutW( hdc: HDC, x: c_int, y: c_int, lpString: LPCWSTR, chCount: c_int, nTabPositions: c_int, lpnTabStopPositions: *const INT, nTabOrigin: c_int, ) -> LONG; pub fn GetTabbedTextExtentA( hdc: HDC, lpString: LPCSTR, chCount: c_int, nTabPositions: c_int, lpnTabStopPositions: *const INT, ) -> DWORD; pub fn GetTabbedTextExtentW( hdc: HDC, lpString: LPCWSTR, chCount: c_int, nTabPositions: c_int, lpnTabStopPositions: *const INT, ) -> DWORD; pub fn UpdateWindow( hWnd: HWND, ) -> BOOL; pub fn SetActiveWindow( hWnd: HWND, ) -> HWND; pub fn GetForegroundWindow() -> HWND; pub fn PaintDesktop( hdc: HDC, ) -> BOOL; pub fn SwitchToThisWindow( hwnd: HWND, fUnknown: BOOL, ); pub fn SetForegroundWindow( hWnd: HWND, ) -> BOOL; pub fn AllowSetForegroundWindow( dwProcessId: DWORD, ) -> BOOL; } pub const ASFW_ANY: DWORD = -1i32 as u32; extern "system" { pub fn LockSetForegroundWindow( uLockCode: UINT, ) -> BOOL; } pub const LSFW_LOCK: UINT = 1; pub const LSFW_UNLOCK: UINT = 2; extern "system" { pub fn WindowFromDC( hDC: HDC, ) -> HWND; pub fn GetDC( hWnd: HWND, ) -> HDC; pub fn GetDCEx( hWnd: HWND, hrgnClip: HRGN, flags: DWORD, ) -> HDC; } pub const DCX_WINDOW: DWORD = 0x00000001; pub const DCX_CACHE: DWORD = 0x00000002; pub const DCX_NORESETATTRS: DWORD = 0x00000004; pub const DCX_CLIPCHILDREN: DWORD = 0x00000008; pub const DCX_CLIPSIBLINGS: DWORD = 0x00000010; pub const DCX_PARENTCLIP: DWORD = 0x00000020; pub const DCX_EXCLUDERGN: DWORD = 0x00000040; pub const DCX_INTERSECTRGN: DWORD = 0x00000080; pub const DCX_EXCLUDEUPDATE: DWORD = 0x00000100; pub const DCX_INTERSECTUPDATE: DWORD = 0x00000200; pub const DCX_LOCKWINDOWUPDATE: DWORD = 0x00000400; pub const DCX_VALIDATE: DWORD = 0x00200000; /********* * CUTOFF * *********/ pub const IDOK: c_int = 1; pub const IDCANCEL: c_int = 2; pub const IDABORT: c_int = 3; pub const IDRETRY: c_int = 4; pub const IDIGNORE: c_int = 5; pub const IDYES: c_int = 6; pub const IDNO: c_int = 7; pub const IDCLOSE: c_int = 8; pub const IDHELP: c_int = 9; pub const IDTRYAGAIN: c_int = 10; pub const IDCONTINUE: c_int = 11; pub const IDTIMEOUT: c_int = 32000; // Edit Control Styles // pub const ES_LEFT: DWORD = 0x0000; pub const ES_CENTER: DWORD = 0x0001; pub const ES_RIGHT: DWORD = 0x0002; pub const ES_MULTILINE: DWORD = 0x0004; pub const ES_UPPERCASE: DWORD = 0x0008; pub const ES_LOWERCASE: DWORD = 0x0010; pub const ES_PASSWORD: DWORD = 0x0020; pub const ES_AUTOVSCROLL: DWORD = 0x0040; pub const ES_AUTOHSCROLL: DWORD = 0x0080; pub const ES_NOHIDESEL: DWORD = 0x0100; pub const ES_OEMCONVERT: DWORD = 0x0400; pub const ES_READONLY: DWORD = 0x0800; pub const ES_WANTRETURN: DWORD = 0x1000; pub const ES_NUMBER: DWORD = 0x2000; // Edit Control Notification Codes // pub const EN_SETFOCUS: WORD = 0x0100; pub const EN_KILLFOCUS: WORD = 0x0200; pub const EN_CHANGE: WORD = 0x0300; pub const EN_UPDATE: WORD = 0x0400; pub const EN_ERRSPACE: WORD = 0x0500; pub const EN_MAXTEXT: WORD = 0x0501; pub const EN_HSCROLL: WORD = 0x0601; pub const EN_VSCROLL: WORD = 0x0602; pub const EN_ALIGN_LTR_EC: WORD = 0x0700; pub const EN_ALIGN_RTL_EC: WORD = 0x0701; // Edit control EM_SETMARGIN parameters pub const EC_LEFTMARGIN: WORD = 0x0001; pub const EC_RIGHTMARGIN: WORD = 0x0002; pub const EC_USEFONTINFO: WORD = 0xffff; // wParam of EM_GET/SETIMESTATUS pub const EMSIS_COMPOSITIONSTRING: WORD = 0x0001; // lParam for EMSIS_COMPOSITIONSTRING pub const EIMES_GETCOMPSTRATONCE: WORD = 0x0001; pub const EIMES_CANCELCOMPSTRINFOCUS: WORD = 0x0002; pub const EIMES_COMPLETECOMPSTRKILLFOCUS: WORD = 0x0004; // Edit Control Messages // pub const EM_GETSEL: WORD = 0x00B0; pub const EM_SETSEL: WORD = 0x00B1; pub const EM_GETRECT: WORD = 0x00B2; pub const EM_SETRECT: WORD = 0x00B3; pub const EM_SETRECTNP: WORD = 0x00B4; pub const EM_SCROLL: WORD = 0x00B5; pub const EM_LINESCROLL: WORD = 0x00B6; pub const EM_SCROLLCARET: WORD = 0x00B7; pub const EM_GETMODIFY: WORD = 0x00B8; pub const EM_SETMODIFY: WORD = 0x00B9; pub const EM_GETLINECOUNT: WORD = 0x00BA; pub const EM_LINEINDEX: WORD = 0x00BB; pub const EM_SETHANDLE: WORD = 0x00BC; pub const EM_GETHANDLE: WORD = 0x00BD; pub const EM_GETTHUMB: WORD = 0x00BE; pub const EM_LINELENGTH: WORD = 0x00C1; pub const EM_REPLACESEL: WORD = 0x00C2; pub const EM_GETLINE: WORD = 0x00C4; pub const EM_LIMITTEXT: WORD = 0x00C5; pub const EM_CANUNDO: WORD = 0x00C6; pub const EM_UNDO: WORD = 0x00C7; pub const EM_FMTLINES: WORD = 0x00C8; pub const EM_LINEFROMCHAR: WORD = 0x00C9; pub const EM_SETTABSTOPS: WORD = 0x00CB; pub const EM_SETPASSWORDCHAR: WORD = 0x00CC; pub const EM_EMPTYUNDOBUFFER: WORD = 0x00CD; pub const EM_GETFIRSTVISIBLELINE: WORD = 0x00CE; pub const EM_SETREADONLY: WORD = 0x00CF; pub const EM_SETWORDBREAKPROC: WORD = 0x00D0; pub const EM_GETWORDBREAKPROC: WORD = 0x00D1; pub const EM_GETPASSWORDCHAR: WORD = 0x00D2; pub const EM_SETMARGINS: WORD = 0x00D3; pub const EM_GETMARGINS: WORD = 0x00D4; pub const EM_SETLIMITTEXT: WORD = EM_LIMITTEXT; pub const EM_GETLIMITTEXT: WORD = 0x00D5; pub const EM_POSFROMCHAR: WORD = 0x00D6; pub const EM_CHARFROMPOS: WORD = 0x00D7; pub const EM_SETIMESTATUS: WORD = 0x00D8; pub const EM_GETIMESTATUS: WORD = 0x00D9; // EDITWORDBREAKPROC code values // pub const WB_LEFT: WORD = 0; pub const WB_RIGHT: WORD = 1; pub const WB_ISDELIMITER: WORD = 2; pub const BN_CLICKED: WORD = 0; pub const BN_PAINT: WORD = 1; pub const BN_HILITE: WORD = 2; pub const BN_UNHILITE: WORD = 3; pub const BN_DISABLE: WORD = 4; pub const BN_DOUBLECLICKED: WORD = 5; pub const BN_PUSHED: WORD = BN_HILITE; pub const BN_UNPUSHED: WORD = BN_UNHILITE; pub const BN_DBLCLK: WORD = BN_DOUBLECLICKED; pub const BN_SETFOCUS: WORD = 6; pub const BN_KILLFOCUS: WORD = 7; pub const BS_PUSHBUTTON: DWORD = 0x00000000; pub const BS_DEFPUSHBUTTON: DWORD = 0x00000001; pub const BS_CHECKBOX: DWORD = 0x00000002; pub const BS_AUTOCHECKBOX: DWORD = 0x00000003; pub const BS_RADIOBUTTON: DWORD = 0x00000004; pub const BS_3STATE: DWORD = 0x00000005; pub const BS_AUTO3STATE: DWORD = 0x00000006; pub const BS_GROUPBOX: DWORD = 0x00000007; pub const BS_USERBUTTON: DWORD = 0x00000008; pub const BS_AUTORADIOBUTTON: DWORD = 0x00000009; pub const BS_PUSHBOX: DWORD = 0x0000000A; pub const BS_OWNERDRAW: DWORD = 0x0000000B; pub const BS_TYPEMASK: DWORD = 0x0000000F; pub const BS_LEFTTEXT: DWORD = 0x00000020; pub const BS_TEXT: DWORD = 0x00000000; pub const BS_ICON: DWORD = 0x00000040; pub const BS_BITMAP: DWORD = 0x00000080; pub const BS_LEFT: DWORD = 0x00000100; pub const BS_RIGHT: DWORD = 0x00000200; pub const BS_CENTER: DWORD = 0x00000300; pub const BS_TOP: DWORD = 0x00000400; pub const BS_BOTTOM: DWORD = 0x00000800; pub const BS_VCENTER: DWORD = 0x00000C00; pub const BS_PUSHLIKE: DWORD = 0x00001000; pub const BS_MULTILINE: DWORD = 0x00002000; pub const BS_NOTIFY: DWORD = 0x00004000; pub const BS_FLAT: DWORD = 0x00008000; pub const BS_RIGHTBUTTON: DWORD = BS_LEFTTEXT; pub const BM_GETCHECK: UINT = 0x00F0; pub const BM_SETCHECK: UINT = 0x00F1; pub const BM_GETSTATE: UINT = 0x00F2; pub const BM_SETSTATE: UINT = 0x00F3; pub const BM_SETSTYLE: UINT = 0x00F4; pub const BM_CLICK: UINT = 0x00F5; pub const BM_GETIMAGE: UINT = 0x00F6; pub const BM_SETIMAGE: UINT = 0x00F7; pub const BM_SETDONTCLICK: UINT = 0x00F8; pub const BST_UNCHECKED: WPARAM = 0x0000; pub const BST_CHECKED: WPARAM = 0x0001; pub const BST_INDETERMINATE: WPARAM = 0x0002; pub const BST_PUSHED: LRESULT = 0x0004; pub const BST_FOCUS: LRESULT = 0x0008; pub const SS_LEFT: DWORD = 0x00000000; pub const SS_CENTER: DWORD = 0x00000001; pub const SS_RIGHT: DWORD = 0x00000002; pub const SS_ICON: DWORD = 0x00000003; pub const SS_BLACKRECT: DWORD = 0x00000004; pub const SS_GRAYRECT: DWORD = 0x00000005; pub const SS_WHITERECT: DWORD = 0x00000006; pub const SS_BLACKFRAME: DWORD = 0x00000007; pub const SS_GRAYFRAME: DWORD = 0x00000008; pub const SS_WHITEFRAME: DWORD = 0x00000009; pub const SS_USERITEM: DWORD = 0x0000000A; pub const SS_SIMPLE: DWORD = 0x0000000B; pub const SS_LEFTNOWORDWRAP: DWORD = 0x0000000C; pub const SS_OWNERDRAW: DWORD = 0x0000000D; pub const SS_BITMAP: DWORD = 0x0000000E; pub const SS_ENHMETAFILE: DWORD = 0x0000000F; pub const SS_ETCHEDHORZ: DWORD = 0x00000010; pub const SS_ETCHEDVERT: DWORD = 0x00000011; pub const SS_ETCHEDFRAME: DWORD = 0x00000012; pub const SS_TYPEMASK: DWORD = 0x0000001F; pub const SS_REALSIZECONTROL: DWORD = 0x00000040; pub const SS_NOPREFIX: DWORD = 0x00000080; pub const SS_NOTIFY: DWORD = 0x00000100; pub const SS_CENTERIMAGE: DWORD = 0x00000200; pub const SS_RIGHTJUST: DWORD = 0x00000400; pub const SS_REALSIZEIMAGE: DWORD = 0x00000800; pub const SS_SUNKEN: DWORD = 0x00001000; pub const SS_EDITCONTROL: DWORD = 0x00002000; pub const SS_ENDELLIPSIS: DWORD = 0x00004000; pub const SS_PATHELLIPSIS: DWORD = 0x00008000; pub const SS_WORDELLIPSIS: DWORD = 0x0000C000; pub const SS_ELLIPSISMASK: DWORD = 0x0000C000; pub const STM_SETICON: UINT = 0x0170; pub const STM_GETICON: UINT = 0x0171; pub const STM_SETIMAGE: UINT = 0x0172; pub const STM_GETIMAGE: UINT = 0x0173; pub const STN_CLICKED: WORD = 0; pub const STN_DBLCLK: WORD = 1; pub const STN_ENABLE: WORD = 2; pub const STN_DISABLE: WORD = 3; pub const STM_MSGMAX: WORD = 0x0174; pub const DS_ABSALIGN: DWORD = 0x01; pub const DS_SYSMODAL: DWORD = 0x02; pub const DS_LOCALEDIT: DWORD = 0x20; pub const DS_SETFONT: DWORD = 0x40; pub const DS_MODALFRAME: DWORD = 0x80; pub const DS_NOIDLEMSG: DWORD = 0x100; pub const DS_SETFOREGROUND: DWORD = 0x200; pub const DS_3DLOOK: DWORD = 0x0004; pub const DS_FIXEDSYS: DWORD = 0x0008; pub const DS_NOFAILCREATE: DWORD = 0x0010; pub const DS_CONTROL: DWORD = 0x0400; pub const DS_CENTER: DWORD = 0x0800; pub const DS_CENTERMOUSE: DWORD = 0x1000; pub const DS_CONTEXTHELP: DWORD = 0x2000; pub const DS_SHELLFONT: DWORD = DS_SETFONT | DS_FIXEDSYS; pub const DS_USEPIXELS: DWORD = 0x8000; pub const DM_GETDEFID: UINT = WM_USER + 0; pub const DM_SETDEFID: UINT = WM_USER + 1; pub const DM_REPOSITION: UINT = WM_USER + 2; pub const DC_HASDEFID: WORD = 0x534B; pub const DLGC_WANTARROWS: LRESULT = 0x0001; pub const DLGC_WANTTAB: LRESULT = 0x0002; pub const DLGC_WANTALLKEYS: LRESULT = 0x0004; pub const DLGC_WANTMESSAGE: LRESULT = 0x0004; pub const DLGC_HASSETSEL: LRESULT = 0x0008; pub const DLGC_DEFPUSHBUTTON: LRESULT = 0x0010; pub const DLGC_UNDEFPUSHBUTTON: LRESULT = 0x0020; pub const DLGC_RADIOBUTTON: LRESULT = 0x0040; pub const DLGC_WANTCHARS: LRESULT = 0x0080; pub const DLGC_STATIC: LRESULT = 0x0100; pub const DLGC_BUTTON: LRESULT = 0x2000; pub const LB_OKAY: LRESULT = 0; pub const LB_ERR: LRESULT = -1; pub const LB_ERRSPACE: LRESULT = -2; pub const LBN_ERRSPACE: WORD = -2i16 as WORD; pub const LBN_SELCHANGE: WORD = 1; pub const LBN_DBLCLK: WORD = 2; pub const LBN_SELCANCEL: WORD = 3; pub const LBN_SETFOCUS: WORD = 4; pub const LBN_KILLFOCUS: WORD = 5; pub const LB_ADDSTRING: UINT = 0x0180; pub const LB_INSERTSTRING: UINT = 0x0181; pub const LB_DELETESTRING: UINT = 0x0182; pub const LB_SELITEMRANGEEX: UINT = 0x0183; pub const LB_RESETCONTENT: UINT = 0x0184; pub const LB_SETSEL: UINT = 0x0185; pub const LB_SETCURSEL: UINT = 0x0186; pub const LB_GETSEL: UINT = 0x0187; pub const LB_GETCURSEL: UINT = 0x0188; pub const LB_GETTEXT: UINT = 0x0189; pub const LB_GETTEXTLEN: UINT = 0x018A; pub const LB_GETCOUNT: UINT = 0x018B; pub const LB_SELECTSTRING: UINT = 0x018C; pub const LB_DIR: UINT = 0x018D; pub const LB_GETTOPINDEX: UINT = 0x018E; pub const LB_FINDSTRING: UINT = 0x018F; pub const LB_GETSELCOUNT: UINT = 0x0190; pub const LB_GETSELITEMS: UINT = 0x0191; pub const LB_SETTABSTOPS: UINT = 0x0192; pub const LB_GETHORIZONTALEXTENT: UINT = 0x0193; pub const LB_SETHORIZONTALEXTENT: UINT = 0x0194; pub const LB_SETCOLUMNWIDTH: UINT = 0x0195; pub const LB_ADDFILE: UINT = 0x0196; pub const LB_SETTOPINDEX: UINT = 0x0197; pub const LB_GETITEMRECT: UINT = 0x0198; pub const LB_GETITEMDATA: UINT = 0x0199; pub const LB_SETITEMDATA: UINT = 0x019A; pub const LB_SELITEMRANGE: UINT = 0x019B; pub const LB_SETANCHORINDEX: UINT = 0x019C; pub const LB_GETANCHORINDEX: UINT = 0x019D; pub const LB_SETCARETINDEX: UINT = 0x019E; pub const LB_GETCARETINDEX: UINT = 0x019F; pub const LB_SETITEMHEIGHT: UINT = 0x01A0; pub const LB_GETITEMHEIGHT: UINT = 0x01A1; pub const LB_FINDSTRINGEXACT: UINT = 0x01A2; pub const LB_SETLOCALE: UINT = 0x01A5; pub const LB_GETLOCALE: UINT = 0x01A6; pub const LB_SETCOUNT: UINT = 0x01A7; pub const LB_INITSTORAGE: UINT = 0x01A8; pub const LB_ITEMFROMPOINT: UINT = 0x01A9; pub const LB_MULTIPLEADDSTRING: UINT = 0x01B1; pub const LB_GETLISTBOXINFO: UINT = 0x01B2; pub const LB_MSGMAX: UINT = 0x01B3; pub const LBS_NOTIFY: DWORD = 0x0001; pub const LBS_SORT: DWORD = 0x0002; pub const LBS_NOREDRAW: DWORD = 0x0004; pub const LBS_MULTIPLESEL: DWORD = 0x0008; pub const LBS_OWNERDRAWFIXED: DWORD = 0x0010; pub const LBS_OWNERDRAWVARIABLE: DWORD = 0x0020; pub const LBS_HASSTRINGS: DWORD = 0x0040; pub const LBS_USETABSTOPS: DWORD = 0x0080; pub const LBS_NOINTEGRALHEIGHT: DWORD = 0x0100; pub const LBS_MULTICOLUMN: DWORD = 0x0200; pub const LBS_WANTKEYBOARDINPUT: DWORD = 0x0400; pub const LBS_EXTENDEDSEL: DWORD = 0x0800; pub const LBS_DISABLENOSCROLL: DWORD = 0x1000; pub const LBS_NODATA: DWORD = 0x2000; pub const LBS_NOSEL: DWORD = 0x4000; pub const LBS_COMBOBOX: DWORD = 0x8000; pub const LBS_STANDARD: DWORD = LBS_NOTIFY | LBS_SORT | WS_VSCROLL | WS_BORDER; pub const CB_OKAY: LRESULT = 0; pub const CB_ERR: LRESULT = -1; pub const CB_ERRSPACE: LRESULT = -2; pub const CBN_ERRSPACE: WORD = -1i16 as WORD; pub const CBN_SELCHANGE: WORD = 1; pub const CBN_DBLCLK: WORD = 2; pub const CBN_SETFOCUS: WORD = 3; pub const CBN_KILLFOCUS: WORD = 4; pub const CBN_EDITCHANGE: WORD = 5; pub const CBN_EDITUPDATE: WORD = 6; pub const CBN_DROPDOWN: WORD = 7; pub const CBN_CLOSEUP: WORD = 8; pub const CBN_SELENDOK: WORD = 9; pub const CBN_SELENDCANCEL: WORD = 10; pub const CBS_SIMPLE: DWORD = 0x0001; pub const CBS_DROPDOWN: DWORD = 0x0002; pub const CBS_DROPDOWNLIST: DWORD = 0x0003; pub const CBS_OWNERDRAWFIXED: DWORD = 0x0010; pub const CBS_OWNERDRAWVARIABLE: DWORD = 0x0020; pub const CBS_AUTOHSCROLL: DWORD = 0x0040; pub const CBS_OEMCONVERT: DWORD = 0x0080; pub const CBS_SORT: DWORD = 0x0100; pub const CBS_HASSTRINGS: DWORD = 0x0200; pub const CBS_NOINTEGRALHEIGHT: DWORD = 0x0400; pub const CBS_DISABLENOSCROLL: DWORD = 0x0800; pub const CBS_UPPERCASE: DWORD = 0x2000; pub const CBS_LOWERCASE: DWORD = 0x4000; pub const CB_MULTIPLEADDSTRING: UINT = 0x0163; pub const CB_GETCOMBOBOXINFO: UINT = 0x0164; pub const CB_MSGMAX: UINT = 0x0165; pub const SBS_HORZ: DWORD = 0x0000; pub const SBS_VERT: DWORD = 0x0001; pub const SBS_TOPALIGN: DWORD = 0x0002; pub const SBS_LEFTALIGN: DWORD = 0x0002; pub const SBS_BOTTOMALIGN: DWORD = 0x0004; pub const SBS_RIGHTALIGN: DWORD = 0x0004; pub const SBS_SIZEBOXTOPLEFTALIGN: DWORD = 0x0002; pub const SBS_SIZEBOXBOTTOMRIGHTALIGN: DWORD = 0x0004; pub const SBS_SIZEBOX: DWORD = 0x0008; pub const SBS_SIZEGRIP: DWORD = 0x0010; pub const SBM_SETPOS: UINT = 0x00E0; pub const SBM_GETPOS: UINT = 0x00E1; pub const SBM_SETRANGE: UINT = 0x00E2; pub const SBM_SETRANGEREDRAW: UINT = 0x00E6; pub const SBM_GETRANGE: UINT = 0x00E3; pub const SBM_ENABLE_ARROWS: UINT = 0x00E4; pub const SBM_SETSCROLLINFO: UINT = 0x00E9; pub const SBM_GETSCROLLINFO: UINT = 0x00EA; pub const SBM_GETSCROLLBARINFO: UINT = 0x00EB; pub const CCHILDREN_SCROLLBAR: usize = 5; pub const CDS_UPDATEREGISTRY: DWORD = 0x00000001; pub const CDS_TEST: DWORD = 0x00000002; pub const CDS_FULLSCREEN: DWORD = 0x00000004; pub const CDS_GLOBAL: DWORD = 0x00000008; pub const CDS_SET_PRIMARY: DWORD = 0x00000010; pub const CDS_VIDEOPARAMETERS: DWORD = 0x00000020; pub const CDS_ENABLE_UNSAFE_MODES: DWORD = 0x00000100; pub const CDS_DISABLE_UNSAFE_MODES: DWORD = 0x00000200; pub const CDS_RESET: DWORD = 0x40000000; pub const CDS_RESET_EX: DWORD = 0x20000000; pub const CDS_NORESET: DWORD = 0x10000000; pub const DISP_CHANGE_SUCCESSFUL: LONG = 0; pub const DISP_CHANGE_RESTART: LONG = 1; pub const DISP_CHANGE_FAILED: LONG = -1; pub const DISP_CHANGE_BADMODE: LONG = -2; pub const DISP_CHANGE_NOTUPDATED: LONG = -3; pub const DISP_CHANGE_BADFLAGS: LONG = -4; pub const DISP_CHANGE_BADPARAM: LONG = -5; pub const DISP_CHANGE_BADDUALVIEW: LONG = -6; pub const EDD_GET_DEVICE_INTERFACE_NAME: DWORD = 0x00000001; pub const ENUM_CURRENT_SETTINGS: DWORD = 0xFFFFFFFF; pub const ENUM_REGISTRY_SETTINGS: DWORD = 0xFFFFFFFE; pub const GW_HWNDFIRST: UINT = 0; pub const GW_HWNDLAST: UINT = 1; pub const GW_HWNDNEXT: UINT = 2; pub const GW_HWNDPREV: UINT = 3; pub const GW_OWNER: UINT = 4; pub const GW_CHILD: UINT = 5; pub const GW_ENABLEDPOPUP: UINT = 6; pub const GW_MAX: UINT = 6; pub const MDITILE_VERTICAL: UINT = 0x0000; pub const MDITILE_HORIZONTAL: UINT = 0x0001; pub const MDITILE_SKIPDISABLED: UINT = 0x0002; pub const MDITILE_ZORDER: UINT = 0x0004; pub const MB_OK: DWORD = 0x00000000; pub const MB_OKCANCEL: DWORD = 0x00000001; pub const MB_ABORTRETRYIGNORE: DWORD = 0x00000002; pub const MB_YESNOCANCEL: DWORD = 0x00000003; pub const MB_YESNO: DWORD = 0x00000004; pub const MB_RETRYCANCEL: DWORD = 0x00000005; pub const MB_CANCELTRYCONTINUE: DWORD = 0x00000006; pub const MB_ICONHAND: DWORD = 0x00000010; pub const MB_ICONQUESTION: DWORD = 0x00000020; pub const MB_ICONEXCLAMATION: DWORD = 0x00000030; pub const MB_ICONASTERISK: DWORD = 0x00000040; pub const MB_USERICON: DWORD = 0x00000080; pub const MB_ICONWARNING: DWORD = MB_ICONEXCLAMATION; pub const MB_ICONERROR: DWORD = MB_ICONHAND; pub const MB_ICONINFORMATION: DWORD = MB_ICONASTERISK; pub const MB_ICONSTOP: DWORD = MB_ICONHAND; pub const MB_DEFBUTTON1: DWORD = 0x00000000; pub const MB_DEFBUTTON2: DWORD = 0x00000100; pub const MB_DEFBUTTON3: DWORD = 0x00000200; pub const MB_DEFBUTTON4: DWORD = 0x00000300; pub const MB_APPLMODAL: DWORD = 0x00000000; pub const MB_SYSTEMMODAL: DWORD = 0x00001000; pub const MB_TASKMODAL: DWORD = 0x00002000; pub const MB_HELP: DWORD = 0x00004000; pub const MB_NOFOCUS: DWORD = 0x00008000; pub const MB_SETFOREGROUND: DWORD = 0x00010000; pub const MB_DEFAULT_DESKTOP_ONLY: DWORD = 0x00020000; pub const MB_TOPMOST: DWORD = 0x00040000; pub const MB_RIGHT: DWORD = 0x00080000; pub const MB_RTLREADING: DWORD = 0x00100000; pub const MB_SERVICE_NOTIFICATION: DWORD = 0x00200000; pub const MB_SERVICE_NOTIFICATION_NT3X: DWORD = 0x00040000; pub const MB_TYPEMASK: DWORD = 0x0000000F; pub const MB_ICONMASK: DWORD = 0x000000F0; pub const MB_DEFMASK: DWORD = 0x00000F00; pub const MB_MODEMASK: DWORD = 0x00003000; pub const MB_MISCMASK: DWORD = 0x0000C000; pub const MF_BITMAP: UINT = 0x00000004; pub const MF_CHECKED: UINT = 0x00000008; pub const MF_DISABLED: UINT = 0x00000002; pub const MF_ENABLED: UINT = 0x00000000; pub const MF_GRAYED: UINT = 0x00000001; pub const MF_MENUBARBREAK: UINT = 0x00000020; pub const MF_MENUBREAK: UINT = 0x00000040; pub const MF_UNHILITE: UINT = 0x00000000; pub const MF_HILITE: UINT = 0x00000080; pub const MF_DEFAULT: UINT = 0x00001000; pub const MF_SYSMENU: UINT = 0x00002000; pub const MF_HELP: UINT = 0x00004000; pub const MF_RIGHTJUSTIFY: UINT = 0x00004000; pub const MF_MOUSESELECT: UINT = 0x00008000; pub const MF_END: UINT = 0x00000080; pub const MF_OWNERDRAW: UINT = 0x00000100; pub const MF_POPUP: UINT = 0x00000010; pub const MF_INSERT: UINT = 0x00000000; pub const MF_CHANGE: UINT = 0x00000080; pub const MF_APPEND: UINT = 0x00000100; pub const MF_DELETE: UINT = 0x00000200; pub const MF_REMOVE: UINT = 0x00001000; pub const MF_BYCOMMAND: UINT = 0x00000000; pub const MF_BYPOSITION: UINT = 0x00000400; pub const MF_SEPARATOR: UINT = 0x00000800; pub const MF_STRING: UINT = 0x00000000; pub const MF_UNCHECKED: UINT = 0x00000000; pub const MFT_STRING: UINT = MF_STRING; pub const MFT_BITMAP: UINT = MF_BITMAP; pub const MFT_MENUBARBREAK: UINT = MF_MENUBARBREAK; pub const MFT_MENUBREAK: UINT = MF_MENUBREAK; pub const MFT_OWNERDRAW: UINT = MF_OWNERDRAW; pub const MFT_RADIOCHECK: UINT = 0x00000200; pub const MFT_SEPARATOR: UINT = MF_SEPARATOR; pub const MFT_RIGHTORDER: UINT = 0x00002000; pub const MFT_RIGHTJUSTIFY: UINT = MF_RIGHTJUSTIFY; pub const MFS_GRAYED: UINT = 0x00000003; pub const MFS_DISABLED: UINT = MFS_GRAYED; pub const MFS_CHECKED: UINT = MF_CHECKED; pub const MFS_HILITE: UINT = MF_HILITE; pub const MFS_ENABLED: UINT = MF_ENABLED; pub const MFS_UNCHECKED: UINT = MF_UNCHECKED; pub const MFS_UNHILITE: UINT = MF_UNHILITE; pub const MFS_DEFAULT: UINT = MF_DEFAULT; FN!{stdcall MSGBOXCALLBACK( LPHELPINFO, ) -> ()} FN!{stdcall WINEVENTPROC( HWINEVENTHOOK, DWORD, HWND, LONG, LONG, DWORD, DWORD, ) -> ()} STRUCT!{struct SCROLLBARINFO { cbSize: DWORD, rcScrollBar: RECT, dxyLineButton: c_int, xyThumbTop: c_int, xyThumbBottom: c_int, reserved: c_int, rgstate: [DWORD; CCHILDREN_SCROLLBAR + 1], }} pub type PSCROLLBARINFO = *mut SCROLLBARINFO; pub type LPSCROLLBARINFO = *mut SCROLLBARINFO; STRUCT!{struct SCROLLINFO { cbSize: UINT, fMask: UINT, nMin: c_int, nMax: c_int, nPage: UINT, nPos: c_int, nTrackPos: c_int, }} pub type LPSCROLLINFO = *mut SCROLLINFO; pub type LPCSCROLLINFO = *const SCROLLINFO; STRUCT!{struct SIZE { cx: LONG, cy: LONG, }} pub type PSIZE = *mut SIZE; pub type LPSIZE = *mut SIZE; pub type SIZEL = SIZE; pub type PSIZEL = *mut SIZEL; pub type LPSIZEL = *mut SIZEL; //8855 (Win 7 SDK) STRUCT!{struct ICONINFO { fIcon: BOOL, xHotspot: DWORD, yHotspot: DWORD, hbmMask: HBITMAP, hbmColor: HBITMAP, }} pub type PICONINFO = *mut ICONINFO; //9066 // Color indexes for use in GetSysColor and SetSysColor // 0-18 (after incrementing) are also valid in RegisterClass's WNDCLASS pub const COLOR_SCROLLBAR: c_int = 0; pub const COLOR_BACKGROUND: c_int = 1; pub const COLOR_ACTIVECAPTION: c_int = 2; pub const COLOR_INACTIVECAPTION: c_int = 3; pub const COLOR_MENU: c_int = 4; pub const COLOR_WINDOW: c_int = 5; pub const COLOR_WINDOWFRAME: c_int = 6; pub const COLOR_MENUTEXT: c_int = 7; pub const COLOR_WINDOWTEXT: c_int = 8; pub const COLOR_CAPTIONTEXT: c_int = 9; pub const COLOR_ACTIVEBORDER: c_int = 10; pub const COLOR_INACTIVEBORDER: c_int = 11; pub const COLOR_APPWORKSPACE: c_int = 12; pub const COLOR_HIGHLIGHT: c_int = 13; pub const COLOR_HIGHLIGHTTEXT: c_int = 14; pub const COLOR_BTNFACE: c_int = 15; pub const COLOR_BTNSHADOW: c_int = 16; pub const COLOR_GRAYTEXT: c_int = 17; pub const COLOR_BTNTEXT: c_int = 18; pub const COLOR_INACTIVECAPTIONTEXT: c_int = 19; pub const COLOR_BTNHIGHLIGHT: c_int = 20; // Introduced in Windows 95 (winver 0x0400): pub const COLOR_3DDKSHADOW: c_int = 21; pub const COLOR_3DLIGHT: c_int = 22; pub const COLOR_INFOTEXT: c_int = 23; pub const COLOR_INFOBK: c_int = 24; pub const COLOR_DESKTOP: c_int = COLOR_BACKGROUND; pub const COLOR_3DFACE: c_int = COLOR_BTNFACE; pub const COLOR_3DSHADOW: c_int = COLOR_BTNSHADOW; pub const COLOR_3DHIGHLIGHT: c_int = COLOR_BTNHIGHLIGHT; pub const COLOR_3DHILIGHT: c_int = COLOR_BTNHIGHLIGHT; pub const COLOR_BTNHILIGHT: c_int = COLOR_BTNHIGHLIGHT; // Introduced in Windows 2000 (winver 0x0500) pub const COLOR_HOTLIGHT: c_int = 26; pub const COLOR_GRADIENTACTIVECAPTION: c_int = 27; pub const COLOR_GRADIENTINACTIVECAPTION: c_int = 28; // Introduced in Windows XP (winver 0x0501) pub const COLOR_MENUHILIGHT: c_int = 29; pub const COLOR_MENUBAR: c_int = 30; //10069 pub const IDC_ARROW: LPCWSTR = 32512 as LPCWSTR; pub const IDC_IBEAM: LPCWSTR = 32513 as LPCWSTR; pub const IDC_WAIT: LPCWSTR = 32514 as LPCWSTR; pub const IDC_CROSS: LPCWSTR = 32515 as LPCWSTR; pub const IDC_UPARROW: LPCWSTR = 32516 as LPCWSTR; pub const IDC_SIZE: LPCWSTR = 32640 as LPCWSTR; pub const IDC_ICON: LPCWSTR = 32641 as LPCWSTR; pub const IDC_SIZENWSE: LPCWSTR = 32642 as LPCWSTR; pub const IDC_SIZENESW: LPCWSTR = 32643 as LPCWSTR; pub const IDC_SIZEWE: LPCWSTR = 32644 as LPCWSTR; pub const IDC_SIZENS: LPCWSTR = 32645 as LPCWSTR; pub const IDC_SIZEALL: LPCWSTR = 32646 as LPCWSTR; pub const IDC_NO: LPCWSTR = 32648 as LPCWSTR; pub const IDC_HAND: LPCWSTR = 32649 as LPCWSTR; pub const IDC_APPSTARTING: LPCWSTR = 32650 as LPCWSTR; pub const IDC_HELP: LPCWSTR = 32651 as LPCWSTR; //10492 pub const IDI_APPLICATION: LPCWSTR = 32512 as LPCWSTR; pub const IDI_HAND: LPCWSTR = 32513 as LPCWSTR; pub const IDI_QUESTION: LPCWSTR = 32514 as LPCWSTR; pub const IDI_EXCLAMATION: LPCWSTR = 32515 as LPCWSTR; pub const IDI_ASTERISK: LPCWSTR = 32516 as LPCWSTR; pub const IDI_WINLOGO: LPCWSTR = 32517 as LPCWSTR; pub const IDI_SHIELD: LPCWSTR = 32518 as LPCWSTR; pub const IDI_WARNING: LPCWSTR = IDI_EXCLAMATION; pub const IDI_ERROR: LPCWSTR = IDI_HAND; pub const IDI_INFORMATION: LPCWSTR = IDI_ASTERISK; pub const SPI_GETBEEP: UINT = 0x0001; pub const SPI_SETBEEP: UINT = 0x0002; pub const SPI_GETMOUSE: UINT = 0x0003; pub const SPI_SETMOUSE: UINT = 0x0004; pub const SPI_GETBORDER: UINT = 0x0005; pub const SPI_SETBORDER: UINT = 0x0006; pub const SPI_GETKEYBOARDSPEED: UINT = 0x000A; pub const SPI_SETKEYBOARDSPEED: UINT = 0x000B; pub const SPI_LANGDRIVER: UINT = 0x000C; pub const SPI_ICONHORIZONTALSPACING: UINT = 0x000D; pub const SPI_GETSCREENSAVETIMEOUT: UINT = 0x000E; pub const SPI_SETSCREENSAVETIMEOUT: UINT = 0x000F; pub const SPI_GETSCREENSAVEACTIVE: UINT = 0x0010; pub const SPI_SETSCREENSAVEACTIVE: UINT = 0x0011; pub const SPI_GETGRIDGRANULARITY: UINT = 0x0012; pub const SPI_SETGRIDGRANULARITY: UINT = 0x0013; pub const SPI_SETDESKWALLPAPER: UINT = 0x0014; pub const SPI_SETDESKPATTERN: UINT = 0x0015; pub const SPI_GETKEYBOARDDELAY: UINT = 0x0016; pub const SPI_SETKEYBOARDDELAY: UINT = 0x0017; pub const SPI_ICONVERTICALSPACING: UINT = 0x0018; pub const SPI_GETICONTITLEWRAP: UINT = 0x0019; pub const SPI_SETICONTITLEWRAP: UINT = 0x001A; pub const SPI_GETMENUDROPALIGNMENT: UINT = 0x001B; pub const SPI_SETMENUDROPALIGNMENT: UINT = 0x001C; pub const SPI_SETDOUBLECLKWIDTH: UINT = 0x001D; pub const SPI_SETDOUBLECLKHEIGHT: UINT = 0x001E; pub const SPI_GETICONTITLELOGFONT: UINT = 0x001F; pub const SPI_SETDOUBLECLICKTIME: UINT = 0x0020; pub const SPI_SETMOUSEBUTTONSWAP: UINT = 0x0021; pub const SPI_SETICONTITLELOGFONT: UINT = 0x0022; pub const SPI_GETFASTTASKSWITCH: UINT = 0x0023; pub const SPI_SETFASTTASKSWITCH: UINT = 0x0024; pub const SPI_SETDRAGFULLWINDOWS: UINT = 0x0025; pub const SPI_GETDRAGFULLWINDOWS: UINT = 0x0026; pub const SPI_GETNONCLIENTMETRICS: UINT = 0x0029; pub const SPI_SETNONCLIENTMETRICS: UINT = 0x002A; pub const SPI_GETMINIMIZEDMETRICS: UINT = 0x002B; pub const SPI_SETMINIMIZEDMETRICS: UINT = 0x002C; pub const SPI_GETICONMETRICS: UINT = 0x002D; pub const SPI_SETICONMETRICS: UINT = 0x002E; pub const SPI_SETWORKAREA: UINT = 0x002F; pub const SPI_GETWORKAREA: UINT = 0x0030; pub const SPI_SETPENWINDOWS: UINT = 0x0031; pub const SPI_GETHIGHCONTRAST: UINT = 0x0042; pub const SPI_SETHIGHCONTRAST: UINT = 0x0043; pub const SPI_GETKEYBOARDPREF: UINT = 0x0044; pub const SPI_SETKEYBOARDPREF: UINT = 0x0045; pub const SPI_GETSCREENREADER: UINT = 0x0046; pub const SPI_SETSCREENREADER: UINT = 0x0047; pub const SPI_GETANIMATION: UINT = 0x0048; pub const SPI_SETANIMATION: UINT = 0x0049; pub const SPI_GETFONTSMOOTHING: UINT = 0x004A; pub const SPI_SETFONTSMOOTHING: UINT = 0x004B; pub const SPI_SETDRAGWIDTH: UINT = 0x004C; pub const SPI_SETDRAGHEIGHT: UINT = 0x004D; pub const SPI_SETHANDHELD: UINT = 0x004E; pub const SPI_GETLOWPOWERTIMEOUT: UINT = 0x004F; pub const SPI_GETPOWEROFFTIMEOUT: UINT = 0x0050; pub const SPI_SETLOWPOWERTIMEOUT: UINT = 0x0051; pub const SPI_SETPOWEROFFTIMEOUT: UINT = 0x0052; pub const SPI_GETLOWPOWERACTIVE: UINT = 0x0053; pub const SPI_GETPOWEROFFACTIVE: UINT = 0x0054; pub const SPI_SETLOWPOWERACTIVE: UINT = 0x0055; pub const SPI_SETPOWEROFFACTIVE: UINT = 0x0056; pub const SPI_SETCURSORS: UINT = 0x0057; pub const SPI_SETICONS: UINT = 0x0058; pub const SPI_GETDEFAULTINPUTLANG: UINT = 0x0059; pub const SPI_SETDEFAULTINPUTLANG: UINT = 0x005A; pub const SPI_SETLANGTOGGLE: UINT = 0x005B; pub const SPI_GETWINDOWSEXTENSION: UINT = 0x005C; pub const SPI_SETMOUSETRAILS: UINT = 0x005D; pub const SPI_GETMOUSETRAILS: UINT = 0x005E; pub const SPI_SETSCREENSAVERRUNNING: UINT = 0x0061; pub const SPI_SCREENSAVERRUNNING: UINT = SPI_SETSCREENSAVERRUNNING; pub const SPI_GETFILTERKEYS: UINT = 0x0032; pub const SPI_SETFILTERKEYS: UINT = 0x0033; pub const SPI_GETTOGGLEKEYS: UINT = 0x0034; pub const SPI_SETTOGGLEKEYS: UINT = 0x0035; pub const SPI_GETMOUSEKEYS: UINT = 0x0036; pub const SPI_SETMOUSEKEYS: UINT = 0x0037; pub const SPI_GETSHOWSOUNDS: UINT = 0x0038; pub const SPI_SETSHOWSOUNDS: UINT = 0x0039; pub const SPI_GETSTICKYKEYS: UINT = 0x003A; pub const SPI_SETSTICKYKEYS: UINT = 0x003B; pub const SPI_GETACCESSTIMEOUT: UINT = 0x003C; pub const SPI_SETACCESSTIMEOUT: UINT = 0x003D; pub const SPI_GETSERIALKEYS: UINT = 0x003E; pub const SPI_SETSERIALKEYS: UINT = 0x003F; pub const SPI_GETSOUNDSENTRY: UINT = 0x0040; pub const SPI_SETSOUNDSENTRY: UINT = 0x0041; pub const SPI_GETSNAPTODEFBUTTON: UINT = 0x005F; pub const SPI_SETSNAPTODEFBUTTON: UINT = 0x0060; pub const SPI_GETMOUSEHOVERWIDTH: UINT = 0x0062; pub const SPI_SETMOUSEHOVERWIDTH: UINT = 0x0063; pub const SPI_GETMOUSEHOVERHEIGHT: UINT = 0x0064; pub const SPI_SETMOUSEHOVERHEIGHT: UINT = 0x0065; pub const SPI_GETMOUSEHOVERTIME: UINT = 0x0066; pub const SPI_SETMOUSEHOVERTIME: UINT = 0x0067; pub const SPI_GETWHEELSCROLLLINES: UINT = 0x0068; pub const SPI_SETWHEELSCROLLLINES: UINT = 0x0069; pub const SPI_GETMENUSHOWDELAY: UINT = 0x006A; pub const SPI_SETMENUSHOWDELAY: UINT = 0x006B; pub const SPI_GETWHEELSCROLLCHARS: UINT = 0x006C; pub const SPI_SETWHEELSCROLLCHARS: UINT = 0x006D; pub const SPI_GETSHOWIMEUI: UINT = 0x006E; pub const SPI_SETSHOWIMEUI: UINT = 0x006F; pub const SPI_GETMOUSESPEED: UINT = 0x0070; pub const SPI_SETMOUSESPEED: UINT = 0x0071; pub const SPI_GETSCREENSAVERRUNNING: UINT = 0x0072; pub const SPI_GETDESKWALLPAPER: UINT = 0x0073; pub const SPI_GETAUDIODESCRIPTION: UINT = 0x0074; pub const SPI_SETAUDIODESCRIPTION: UINT = 0x0075; pub const SPI_GETSCREENSAVESECURE: UINT = 0x0076; pub const SPI_SETSCREENSAVESECURE: UINT = 0x0077; pub const SPI_GETHUNGAPPTIMEOUT: UINT = 0x0078; pub const SPI_SETHUNGAPPTIMEOUT: UINT = 0x0079; pub const SPI_GETWAITTOKILLTIMEOUT: UINT = 0x007A; pub const SPI_SETWAITTOKILLTIMEOUT: UINT = 0x007B; pub const SPI_GETWAITTOKILLSERVICETIMEOUT: UINT = 0x007C; pub const SPI_SETWAITTOKILLSERVICETIMEOUT: UINT = 0x007D; pub const SPI_GETMOUSEDOCKTHRESHOLD: UINT = 0x007E; pub const SPI_SETMOUSEDOCKTHRESHOLD: UINT = 0x007F; pub const SPI_GETPENDOCKTHRESHOLD: UINT = 0x0080; pub const SPI_SETPENDOCKTHRESHOLD: UINT = 0x0081; pub const SPI_GETWINARRANGING: UINT = 0x0082; pub const SPI_SETWINARRANGING: UINT = 0x0083; pub const SPI_GETMOUSEDRAGOUTTHRESHOLD: UINT = 0x0084; pub const SPI_SETMOUSEDRAGOUTTHRESHOLD: UINT = 0x0085; pub const SPI_GETPENDRAGOUTTHRESHOLD: UINT = 0x0086; pub const SPI_SETPENDRAGOUTTHRESHOLD: UINT = 0x0087; pub const SPI_GETMOUSESIDEMOVETHRESHOLD: UINT = 0x0088; pub const SPI_SETMOUSESIDEMOVETHRESHOLD: UINT = 0x0089; pub const SPI_GETPENSIDEMOVETHRESHOLD: UINT = 0x008A; pub const SPI_SETPENSIDEMOVETHRESHOLD: UINT = 0x008B; pub const SPI_GETDRAGFROMMAXIMIZE: UINT = 0x008C; pub const SPI_SETDRAGFROMMAXIMIZE: UINT = 0x008D; pub const SPI_GETSNAPSIZING: UINT = 0x008E; pub const SPI_SETSNAPSIZING: UINT = 0x008F; pub const SPI_GETDOCKMOVING: UINT = 0x0090; pub const SPI_SETDOCKMOVING: UINT = 0x0091; pub const SPI_GETACTIVEWINDOWTRACKING: UINT = 0x1000; pub const SPI_SETACTIVEWINDOWTRACKING: UINT = 0x1001; pub const SPI_GETMENUANIMATION: UINT = 0x1002; pub const SPI_SETMENUANIMATION: UINT = 0x1003; pub const SPI_GETCOMBOBOXANIMATION: UINT = 0x1004; pub const SPI_SETCOMBOBOXANIMATION: UINT = 0x1005; pub const SPI_GETLISTBOXSMOOTHSCROLLING: UINT = 0x1006; pub const SPI_SETLISTBOXSMOOTHSCROLLING: UINT = 0x1007; pub const SPI_GETGRADIENTCAPTIONS: UINT = 0x1008; pub const SPI_SETGRADIENTCAPTIONS: UINT = 0x1009; pub const SPI_GETKEYBOARDCUES: UINT = 0x100A; pub const SPI_SETKEYBOARDCUES: UINT = 0x100B; pub const SPI_GETMENUUNDERLINES: UINT = SPI_GETKEYBOARDCUES; pub const SPI_SETMENUUNDERLINES: UINT = SPI_SETKEYBOARDCUES; pub const SPI_GETACTIVEWNDTRKZORDER: UINT = 0x100C; pub const SPI_SETACTIVEWNDTRKZORDER: UINT = 0x100D; pub const SPI_GETHOTTRACKING: UINT = 0x100E; pub const SPI_SETHOTTRACKING: UINT = 0x100F; pub const SPI_GETMENUFADE: UINT = 0x1012; pub const SPI_SETMENUFADE: UINT = 0x1013; pub const SPI_GETSELECTIONFADE: UINT = 0x1014; pub const SPI_SETSELECTIONFADE: UINT = 0x1015; pub const SPI_GETTOOLTIPANIMATION: UINT = 0x1016; pub const SPI_SETTOOLTIPANIMATION: UINT = 0x1017; pub const SPI_GETTOOLTIPFADE: UINT = 0x1018; pub const SPI_SETTOOLTIPFADE: UINT = 0x1019; pub const SPI_GETCURSORSHADOW: UINT = 0x101A; pub const SPI_SETCURSORSHADOW: UINT = 0x101B; pub const SPI_GETMOUSESONAR: UINT = 0x101C; pub const SPI_SETMOUSESONAR: UINT = 0x101D; pub const SPI_GETMOUSECLICKLOCK: UINT = 0x101E; pub const SPI_SETMOUSECLICKLOCK: UINT = 0x101F; pub const SPI_GETMOUSEVANISH: UINT = 0x1020; pub const SPI_SETMOUSEVANISH: UINT = 0x1021; pub const SPI_GETFLATMENU: UINT = 0x1022; pub const SPI_SETFLATMENU: UINT = 0x1023; pub const SPI_GETDROPSHADOW: UINT = 0x1024; pub const SPI_SETDROPSHADOW: UINT = 0x1025; pub const SPI_GETBLOCKSENDINPUTRESETS: UINT = 0x1026; pub const SPI_SETBLOCKSENDINPUTRESETS: UINT = 0x1027; pub const SPI_GETUIEFFECTS: UINT = 0x103E; pub const SPI_SETUIEFFECTS: UINT = 0x103F; pub const SPI_GETDISABLEOVERLAPPEDCONTENT: UINT = 0x1040; pub const SPI_SETDISABLEOVERLAPPEDCONTENT: UINT = 0x1041; pub const SPI_GETCLIENTAREAANIMATION: UINT = 0x1042; pub const SPI_SETCLIENTAREAANIMATION: UINT = 0x1043; pub const SPI_GETCLEARTYPE: UINT = 0x1048; pub const SPI_SETCLEARTYPE: UINT = 0x1049; pub const SPI_GETSPEECHRECOGNITION: UINT = 0x104A; pub const SPI_SETSPEECHRECOGNITION: UINT = 0x104B; pub const SPI_GETFOREGROUNDLOCKTIMEOUT: UINT = 0x2000; pub const SPI_SETFOREGROUNDLOCKTIMEOUT: UINT = 0x2001; pub const SPI_GETACTIVEWNDTRKTIMEOUT: UINT = 0x2002; pub const SPI_SETACTIVEWNDTRKTIMEOUT: UINT = 0x2003; pub const SPI_GETFOREGROUNDFLASHCOUNT: UINT = 0x2004; pub const SPI_SETFOREGROUNDFLASHCOUNT: UINT = 0x2005; pub const SPI_GETCARETWIDTH: UINT = 0x2006; pub const SPI_SETCARETWIDTH: UINT = 0x2007; pub const SPI_GETMOUSECLICKLOCKTIME: UINT = 0x2008; pub const SPI_SETMOUSECLICKLOCKTIME: UINT = 0x2009; pub const SPI_GETFONTSMOOTHINGTYPE: UINT = 0x200A; pub const SPI_SETFONTSMOOTHINGTYPE: UINT = 0x200B; pub const FE_FONTSMOOTHINGSTANDARD: UINT = 0x0001; pub const FE_FONTSMOOTHINGCLEARTYPE: UINT = 0x0002; pub const SPI_GETFONTSMOOTHINGCONTRAST: UINT = 0x200C; pub const SPI_SETFONTSMOOTHINGCONTRAST: UINT = 0x200D; pub const SPI_GETFOCUSBORDERWIDTH: UINT = 0x200E; pub const SPI_SETFOCUSBORDERWIDTH: UINT = 0x200F; pub const SPI_GETFOCUSBORDERHEIGHT: UINT = 0x2010; pub const SPI_SETFOCUSBORDERHEIGHT: UINT = 0x2011; pub const SPI_GETFONTSMOOTHINGORIENTATION: UINT = 0x2012; pub const SPI_SETFONTSMOOTHINGORIENTATION: UINT = 0x2013; pub const FE_FONTSMOOTHINGORIENTATIONBGR: UINT = 0x0000; pub const FE_FONTSMOOTHINGORIENTATIONRGB: UINT = 0x0001; pub const SPI_GETMINIMUMHITRADIUS: UINT = 0x2014; pub const SPI_SETMINIMUMHITRADIUS: UINT = 0x2015; pub const SPI_GETMESSAGEDURATION: UINT = 0x2016; pub const SPI_SETMESSAGEDURATION: UINT = 0x2017; //11264 pub const CB_GETEDITSEL: UINT = 0x0140; pub const CB_LIMITTEXT: UINT = 0x0141; pub const CB_SETEDITSEL: UINT = 0x0142; pub const CB_ADDSTRING: UINT = 0x0143; pub const CB_DELETESTRING: UINT = 0x0144; pub const CB_DIR: UINT = 0x0145; pub const CB_GETCOUNT: UINT = 0x0146; pub const CB_GETCURSEL: UINT = 0x0147; pub const CB_GETLBTEXT: UINT = 0x0148; pub const CB_GETLBTEXTLEN: UINT = 0x0149; pub const CB_INSERTSTRING: UINT = 0x014A; pub const CB_RESETCONTENT: UINT = 0x014B; pub const CB_FINDSTRING: UINT = 0x014C; pub const CB_SELECTSTRING: UINT = 0x014D; pub const CB_SETCURSEL: UINT = 0x014E; pub const CB_SHOWDROPDOWN: UINT = 0x014F; pub const CB_GETITEMDATA: UINT = 0x0150; pub const CB_SETITEMDATA: UINT = 0x0151; pub const CB_GETDROPPEDCONTROLRECT: UINT = 0x0152; pub const CB_SETITEMHEIGHT: UINT = 0x0153; pub const CB_GETITEMHEIGHT: UINT = 0x0154; pub const CB_SETEXTENDEDUI: UINT = 0x0155; pub const CB_GETEXTENDEDUI: UINT = 0x0156; pub const CB_GETDROPPEDSTATE: UINT = 0x0157; pub const CB_FINDSTRINGEXACT: UINT = 0x0158; pub const CB_SETLOCALE: UINT = 0x0159; pub const CB_GETLOCALE: UINT = 0x015A; pub const CB_GETTOPINDEX: UINT = 0x015b; pub const CB_SETTOPINDEX: UINT = 0x015c; pub const CB_GETHORIZONTALEXTENT: UINT = 0x015d; pub const CB_SETHORIZONTALEXTENT: UINT = 0x015e; pub const CB_GETDROPPEDWIDTH: UINT = 0x015f; pub const CB_SETDROPPEDWIDTH: UINT = 0x0160; pub const CB_INITSTORAGE: UINT = 0x0161; //12141 STRUCT!{struct NONCLIENTMETRICSA { cbSize: UINT, iBorderWidth: c_int, iScrollWidth: c_int, iScrollHeight: c_int, iCaptionWidth: c_int, iCaptionHeight: c_int, lfCaptionFont: LOGFONTA, iSmCaptionWidth: c_int, iSmCaptionHeight: c_int, lfSmCaptionFont: LOGFONTA, iMenuWidth: c_int, iMenuHeight: c_int, lfMenuFont: LOGFONTA, lfStatusFont: LOGFONTA, lfMessageFont: LOGFONTA, iPaddedBorderWidth: c_int, }} pub type LPNONCLIENTMETRICSA = *mut NONCLIENTMETRICSA; STRUCT!{struct NONCLIENTMETRICSW { cbSize: UINT, iBorderWidth: c_int, iScrollWidth: c_int, iScrollHeight: c_int, iCaptionWidth: c_int, iCaptionHeight: c_int, lfCaptionFont: LOGFONTW, iSmCaptionWidth: c_int, iSmCaptionHeight: c_int, lfSmCaptionFont: LOGFONTW, iMenuWidth: c_int, iMenuHeight: c_int, lfMenuFont: LOGFONTW, lfStatusFont: LOGFONTW, lfMessageFont: LOGFONTW, iPaddedBorderWidth: c_int, }} pub type LPNONCLIENTMETRICSW = *mut NONCLIENTMETRICSW; //12869 pub const MONITOR_DEFAULTTONULL: DWORD = 0x00000000; pub const MONITOR_DEFAULTTOPRIMARY: DWORD = 0x00000001; pub const MONITOR_DEFAULTTONEAREST: DWORD = 0x00000002; //12900 pub const MONITORINFOF_PRIMARY: DWORD = 1; pub const CCHDEVICENAME: usize = 32; STRUCT!{struct MONITORINFO { cbSize: DWORD, rcMonitor: RECT, rcWork: RECT, dwFlags: DWORD, }} pub type LPMONITORINFO = *mut MONITORINFO; STRUCT!{struct MONITORINFOEXA { cbSize: DWORD, rcMonitor: RECT, rcWork: RECT, dwFlags: DWORD, szDevice: [CHAR; CCHDEVICENAME], }} pub type LPMONITORINFOEXA = *mut MONITORINFOEXA; STRUCT!{struct MONITORINFOEXW { cbSize: DWORD, rcMonitor: RECT, rcWork: RECT, dwFlags: DWORD, szDevice: [WCHAR; CCHDEVICENAME], }} pub type LPMONITORINFOEXW = *mut MONITORINFOEXW; //12971 FN!{stdcall MONITORENUMPROC( HMONITOR, HDC, LPRECT, LPARAM, ) -> BOOL} //14098 DECLARE_HANDLE!(HRAWINPUT, HRAWINPUT__); #[inline] pub fn GET_RAWINPUT_CODE_WPARAM(wParam: WPARAM) -> WPARAM { wParam & 0xff } pub const RIM_INPUT: WPARAM = 0; pub const RIM_INPUTSINK: WPARAM = 1; STRUCT!{struct RAWINPUTHEADER { dwType: DWORD, dwSize: DWORD, hDevice: HANDLE, wParam: WPARAM, }} pub type PRAWINPUTHEADER = *mut RAWINPUTHEADER; pub type LPRAWINPUTHEADER = *mut RAWINPUTHEADER; pub const RIM_TYPEMOUSE: DWORD = 0; pub const RIM_TYPEKEYBOARD: DWORD = 1; pub const RIM_TYPEHID: DWORD = 2; STRUCT!{struct RAWMOUSE { usFlags: USHORT, memory_padding: USHORT, // 16bit Padding for 32bit align in following union usButtonFlags: USHORT, usButtonData: USHORT, ulRawButtons: ULONG, lLastX: LONG, lLastY: LONG, ulExtraInformation: ULONG, }} pub type PRAWMOUSE = *mut RAWMOUSE; pub type LPRAWMOUSE = *mut RAWMOUSE; pub const RI_MOUSE_LEFT_BUTTON_DOWN: USHORT = 0x0001; pub const RI_MOUSE_LEFT_BUTTON_UP: USHORT = 0x0002; pub const RI_MOUSE_RIGHT_BUTTON_DOWN: USHORT = 0x0004; pub const RI_MOUSE_RIGHT_BUTTON_UP: USHORT = 0x0008; pub const RI_MOUSE_MIDDLE_BUTTON_DOWN: USHORT = 0x0010; pub const RI_MOUSE_MIDDLE_BUTTON_UP: USHORT = 0x0020; pub const RI_MOUSE_BUTTON_1_DOWN: USHORT = RI_MOUSE_LEFT_BUTTON_DOWN; pub const RI_MOUSE_BUTTON_1_UP: USHORT = RI_MOUSE_LEFT_BUTTON_UP; pub const RI_MOUSE_BUTTON_2_DOWN: USHORT = RI_MOUSE_RIGHT_BUTTON_DOWN; pub const RI_MOUSE_BUTTON_2_UP: USHORT = RI_MOUSE_RIGHT_BUTTON_UP; pub const RI_MOUSE_BUTTON_3_DOWN: USHORT = RI_MOUSE_MIDDLE_BUTTON_DOWN; pub const RI_MOUSE_BUTTON_3_UP: USHORT = RI_MOUSE_MIDDLE_BUTTON_UP; pub const RI_MOUSE_BUTTON_4_DOWN: USHORT = 0x0040; pub const RI_MOUSE_BUTTON_4_UP: USHORT = 0x0080; pub const RI_MOUSE_BUTTON_5_DOWN: USHORT = 0x0100; pub const RI_MOUSE_BUTTON_5_UP: USHORT = 0x0200; pub const RI_MOUSE_WHEEL: USHORT = 0x0400; pub const MOUSE_MOVE_RELATIVE: USHORT = 0; pub const MOUSE_MOVE_ABSOLUTE: USHORT = 1; pub const MOUSE_VIRTUAL_DESKTOP: USHORT = 0x02; pub const MOUSE_ATTRIBUTES_CHANGED: USHORT = 0x04; pub const MOUSE_MOVE_NOCOALESCE: USHORT = 0x08; STRUCT!{struct RAWKEYBOARD { MakeCode: USHORT, Flags: USHORT, Reserved: USHORT, VKey: USHORT, Message: UINT, ExtraInformation: ULONG, }} pub type PRAWKEYBOARD = *mut RAWKEYBOARD; pub type LPRAWKEYBOARD = *mut RAWKEYBOARD; pub const KEYBOARD_OVERRUN_MAKE_CODE: DWORD = 0xFF; pub const RI_KEY_MAKE: DWORD = 0; pub const RI_KEY_BREAK: DWORD = 1; pub const RI_KEY_E0: DWORD = 2; pub const RI_KEY_E1: DWORD = 4; pub const RI_KEY_TERMSRV_SET_LED: DWORD = 8; pub const RI_KEY_TERMSRV_SHADOW: DWORD = 0x10; STRUCT!{struct RAWHID { dwSizeHid: DWORD, dwCount: DWORD, bRawData: [BYTE; 0], }} pub type PRAWHID = *mut RAWHID; pub type LPRAWHID = *mut RAWHID; STRUCT!{struct RAWINPUT { header: RAWINPUTHEADER, mouse: RAWMOUSE, }} UNION!(RAWINPUT, mouse, mouse, mouse_mut, RAWMOUSE); UNION!(RAWINPUT, mouse, keyboard, keyboard_mut, RAWKEYBOARD); UNION!(RAWINPUT, mouse, hid, hid_mut, RAWHID); pub type PRAWINPUT = *mut RAWINPUT; pub type LPRAWINPUT = *mut RAWINPUT; pub const RID_INPUT: DWORD = 0x10000003; pub const RID_HEADER: DWORD = 0x10000005; pub const RIDI_PREPARSEDDATA: DWORD = 0x20000005; pub const RIDI_DEVICENAME: DWORD = 0x20000007; pub const RIDI_DEVICEINFO: DWORD = 0x2000000b; STRUCT!{struct RID_DEVICE_INFO_MOUSE { dwId: DWORD, dwNumberOfButtons: DWORD, dwSampleRate: DWORD, fHasHorizontalWheel: BOOL, }} pub type PRID_DEVICE_INFO_MOUSE = *mut RID_DEVICE_INFO_MOUSE; STRUCT!{struct RID_DEVICE_INFO_KEYBOARD { dwType: DWORD, dwSubType: DWORD, dwKeyboardMode: DWORD, dwNumberOfFunctionKeys: DWORD, dwNumberOfIndicators: DWORD, dwNumberOfKeysTotal: DWORD, }} pub type PRID_DEVICE_INFO_KEYBOARD = *mut RID_DEVICE_INFO_KEYBOARD; STRUCT!{struct RID_DEVICE_INFO_HID { dwVendorId: DWORD, dwProductId: DWORD, dwVersionNumber: DWORD, usUsagePage: USHORT, usUsage: USHORT, }} pub type PRID_DEVICE_INFO_HID = *mut RID_DEVICE_INFO_HID; STRUCT!{struct RID_DEVICE_INFO { cbSize: DWORD, dwType: DWORD, keyboard: RID_DEVICE_INFO_KEYBOARD, }} UNION!(RID_DEVICE_INFO, keyboard, mouse, mouse_mut, RID_DEVICE_INFO_MOUSE); UNION!(RID_DEVICE_INFO, keyboard, keyboard, keyboard_mut, RID_DEVICE_INFO_KEYBOARD); UNION!(RID_DEVICE_INFO, keyboard, hid, hid_mut, RID_DEVICE_INFO_HID); pub type PRID_DEVICE_INFO = *mut RID_DEVICE_INFO; pub type LPRID_DEVICE_INFO = *mut RID_DEVICE_INFO; STRUCT!{struct RAWINPUTDEVICE { usUsagePage: USHORT, usUsage: USHORT, dwFlags: DWORD, hwndTarget: HWND, }} pub type PRAWINPUTDEVICE = *mut RAWINPUTDEVICE; pub type LPRAWINPUTDEVICE = *mut RAWINPUTDEVICE; pub type PCRAWINPUTDEVICE = *const RAWINPUTDEVICE; pub const RIDEV_REMOVE: DWORD = 0x00000001; pub const RIDEV_EXCLUDE: DWORD = 0x00000010; pub const RIDEV_PAGEONLY: DWORD = 0x00000020; pub const RIDEV_NOLEGACY: DWORD = 0x00000030; pub const RIDEV_INPUTSINK: DWORD = 0x00000100; pub const RIDEV_CAPTUREMOUSE: DWORD = 0x00000200; pub const RIDEV_NOHOTKEYS: DWORD = 0x00000200; pub const RIDEV_APPKEYS: DWORD = 0x00000400; pub const RIDEV_EXINPUTSINK: DWORD = 0x00001000; pub const RIDEV_DEVNOTIFY: DWORD = 0x00002000; pub const RIDEV_EXMODEMASK: DWORD = 0x000000F0; pub const GIDC_ARRIVAL: DWORD = 1; pub const GIDC_REMOVAL: DWORD = 2; STRUCT!{struct RAWINPUTDEVICELIST { hDevice: HANDLE, dwType: DWORD, }} pub type PRAWINPUTDEVICELIST = *mut RAWINPUTDEVICELIST; STRUCT!{struct CHANGEFILTERSTRUCT { cbSize: DWORD, ExtStatus: DWORD, }} pub type PCHANGEFILTERSTRUCT = *mut CHANGEFILTERSTRUCT; STRUCT!{struct MSGBOXPARAMSA { cbSize: UINT, hwndOwner: HWND, hInstance: HINSTANCE, lpszText: LPCSTR, lpszCaption: LPCSTR, dwStyle: DWORD, lpszIcon: LPCSTR, dwContextHelpId: DWORD_PTR, lpfnMsgBoxCallback: MSGBOXCALLBACK, dwLanguageId: DWORD, }} pub type PMSGBOXPARAMSA = *mut MSGBOXPARAMSA; pub type LPMSGBOXPARAMSA = *mut MSGBOXPARAMSA; STRUCT!{struct MSGBOXPARAMSW { cbSize: UINT, hwndOwner: HWND, hInstance: HINSTANCE, lpszText: LPCWSTR, lpszCaption: LPCWSTR, dwStyle: DWORD, lpszIcon: LPCWSTR, dwContextHelpId: DWORD_PTR, lpfnMsgBoxCallback: MSGBOXCALLBACK, dwLanguageId: DWORD, }} pub type PMSGBOXPARAMSW = *mut MSGBOXPARAMSW; pub type LPMSGBOXPARAMSW = *mut MSGBOXPARAMSW; STRUCT!{struct HELPINFO { cbSize: UINT, iContextType: c_int, iCtrlId: c_int, hItemHandle: HANDLE, dwContextId: DWORD, MousePos: POINT, }} pub type LPHELPINFO = *mut HELPINFO; pub const SIF_RANGE: UINT = 0x0001; pub const SIF_PAGE: UINT = 0x0002; pub const SIF_POS: UINT = 0x0004; pub const SIF_DISABLENOSCROLL: UINT = 0x0008; pub const SIF_TRACKPOS: UINT = 0x0010; pub const SIF_ALL: UINT = SIF_RANGE | SIF_PAGE | SIF_POS | SIF_TRACKPOS; pub const SW_SCROLLCHILDREN: UINT = 0x0001; pub const SW_INVALIDATE: UINT = 0x0002; pub const SW_ERASE: UINT = 0x0004; pub const SW_SMOOTHSCROLL: UINT = 0x0010; pub const LR_DEFAULTCOLOR: UINT = 0x00000000; pub const LR_MONOCHROME: UINT = 0x00000001; pub const LR_COLOR: UINT = 0x00000002; pub const LR_COPYRETURNORG: UINT = 0x00000004; pub const LR_COPYDELETEORG: UINT = 0x00000008; pub const LR_LOADFROMFILE: UINT = 0x00000010; pub const LR_LOADTRANSPARENT: UINT = 0x00000020; pub const LR_DEFAULTSIZE: UINT = 0x00000040; pub const LR_VGACOLOR: UINT = 0x00000080; pub const LR_LOADMAP3DCOLORS: UINT = 0x00001000; pub const LR_CREATEDIBSECTION: UINT = 0x00002000; pub const LR_COPYFROMRESOURCE: UINT = 0x00004000; pub const LR_SHARED: UINT = 0x00008000; pub const IMAGE_BITMAP: UINT = 0; pub const IMAGE_ICON: UINT = 1; pub const IMAGE_CURSOR: UINT = 2; pub const IMAGE_ENHMETAFILE: UINT = 3; //RedrawWindow() flags pub const RDW_INVALIDATE: UINT = 0x0001; pub const RDW_INTERNALPAINT: UINT = 0x0002; pub const RDW_ERASE: UINT = 0x0004; pub const RDW_VALIDATE: UINT = 0x0008; pub const RDW_NOINTERNALPAINT: UINT = 0x0010; pub const RDW_NOERASE: UINT = 0x0020; pub const RDW_NOCHILDREN: UINT = 0x0040; pub const RDW_ALLCHILDREN: UINT = 0x0080; pub const RDW_UPDATENOW: UINT = 0x0100; pub const RDW_ERASENOW: UINT = 0x0200; pub const RDW_FRAME: UINT = 0x0400; pub const RDW_NOFRAME: UINT = 0x0800; // if WINVER >= 0x0601 // GetSystemMetrics(SM_DIGITIZER) flag values pub const NID_INTEGRATED_TOUCH: UINT = 0x00000001; pub const NID_EXTERNAL_TOUCH: UINT = 0x00000002; pub const NID_INTEGRATED_PEN: UINT = 0x00000004; pub const NID_EXTERNAL_PEN: UINT = 0x00000008; pub const NID_MULTI_INPUT: UINT = 0x00000040; pub const NID_READY: UINT = 0x00000080; // end if WINVER >= 0x0601 // System Menu Command Values // pub const SC_SIZE: WPARAM = 0xF000; pub const SC_MOVE: WPARAM = 0xF010; pub const SC_MINIMIZE: WPARAM = 0xF020; pub const SC_MAXIMIZE: WPARAM = 0xF030; pub const SC_NEXTWINDOW: WPARAM = 0xF040; pub const SC_PREVWINDOW: WPARAM = 0xF050; pub const SC_CLOSE: WPARAM = 0xF060; pub const SC_VSCROLL: WPARAM = 0xF070; pub const SC_HSCROLL: WPARAM = 0xF080; pub const SC_MOUSEMENU: WPARAM = 0xF090; pub const SC_KEYMENU: WPARAM = 0xF100; pub const SC_ARRANGE: WPARAM = 0xF110; pub const SC_RESTORE: WPARAM = 0xF120; pub const SC_TASKLIST: WPARAM = 0xF130; pub const SC_SCREENSAVE: WPARAM = 0xF140; pub const SC_HOTKEY: WPARAM = 0xF150; // if WINVER >= 0x0400 pub const SC_DEFAULT: WPARAM = 0xF160; pub const SC_MONITORPOWER: WPARAM = 0xF170; pub const SC_CONTEXTHELP: WPARAM = 0xF180; pub const SC_SEPARATOR: WPARAM = 0xF00F; // endif WINVER >= 0x0400 // ANIMATIONINFO and related fields STRUCT!{struct ANIMATIONINFO { cbSize: UINT, iMinAnimate: c_int, }} pub type LPANIMATIONINFO = *mut ANIMATIONINFO; pub const SPIF_UPDATEINIFILE: UINT = 0x0001; pub const SPIF_SENDWININICHANGE: UINT = 0x0002; pub const SPIF_SENDCHANGE: UINT = SPIF_SENDWININICHANGE; extern "system" { pub fn MessageBoxA( hWnd: HWND, lpText: LPCSTR, lpCaption: LPCSTR, uType: UINT, ) -> c_int; pub fn MessageBoxW( hWnd: HWND, lpText: LPCWSTR, lpCaption: LPCWSTR, uType: UINT, ) -> c_int; pub fn LoadIconA( hInstance: HINSTANCE, lpIconName: LPCSTR, ) -> HICON; pub fn LoadIconW( hInstance: HINSTANCE, lpIconName: LPCWSTR, ) -> HICON; }<|fim▁end|>
pub const SB_BOTH: UINT = 3; pub const SB_LINEUP: LPARAM = 0; pub const SB_LINELEFT: LPARAM = 0; pub const SB_LINEDOWN: LPARAM = 1;
<|file_name|>cornetto-add-counts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ add word counts to Cornetto lexical units database file The word count file should have three columns, delimited by white space, containing (1) the count, (2) the lemma, (3) the main POS tag. The tagset is assumed to be the Spoken Dutch Corpus tagset, and the character encoding must be ISO-8859-1. The counts appear as the value of the feature "count" on <form> elements. The updated lexical units xml database is written to standard output. Since we have only the lemma and the POS, and no word sense, the frequency information is added to each matching lexical unit regardless of its sense (i.e. the value of the "c_seq_nr" attribute). """ # TODO: # - deal with multiword counts __author__ = 'Erwin Marsi <e.marsi@gmail.com>' __version__ = '0.6' from sys import stderr, stdout from xml.etree.cElementTree import iterparse, SubElement, tostring, ElementTree from cornetto.argparse import ArgumentParser, RawDescriptionHelpFormatter def read_counts(file): if not hasattr(file, "read"): file = open(file) counts = {} totals = dict(noun=0, verb=0, adj=0, other=0) for l in file: try: count, form, tag = l.strip().split() except ValueError: stderr.write("Warning; ill-formed line: %s\n" % repr(l)) continue # translate CGN tagset to word category if tag in ("N", "VNW", "TW", "SPEC"): cat = "noun" elif tag in ("WW"): cat = "verb" elif tag in ("ADJ", "BW"): cat = "adj" else: # LET LID TSW VG VZ cat = "other" # Cornetto word forms are stored in unicode form = form.decode("iso-8859-1") count = int(count) if form not in counts: counts[form] = dict(noun=0, verb=0, adj=0, other=0) counts[form][cat] += count totals[cat] += count return counts, totals def add_count_attrib(counts, totals, cdb_lu_file): parser = iterparse(cdb_lu_file) for event, elem in parser: if elem.tag == "form": # following the ElementTree conventions, # word form will be ascii or unicode form = elem.get("form-spelling") # lower case because Cornette is not consistent cat = elem.get("form-cat").lower() # fix category flaws in current release of Cornetto if cat == "adjective": cat = "adj" elif cat == "adverb": cat = "other" <|fim▁hole|> try: count = counts[form][cat] except KeyError: # form not found count = 0 elem.set("count", str(count)) # Finally, add totals, per category and overall, to the doc root # Note that all words _not_ in Cornetto are not included in these totals totals["all"] = sum(totals.values()) for cat, count in totals.items(): parser.root.set("count-total-%s" % cat, str(count)) return ElementTree(parser.root) parser = ArgumentParser(description=__doc__, version="%(prog)s version " + __version__, formatter_class=RawDescriptionHelpFormatter) parser.add_argument("cdb_lu", type=file, help="xml file containing the lexical units") parser.add_argument("word_counts", type=file, help="tabular file containing the word counts") args = parser.parse_args() counts, totals = read_counts(args.word_counts) etree = add_count_attrib(counts, totals, args.cdb_lu) etree.write(stdout, encoding="utf-8") #def add_statistics_elem(counts, cdb_lu_file): #""" #adds a separate <statistics> element, #which accomodates for other counts for other sources #""" #parser = iterparse(cdb_lu_file) #for event, elem in parser: #if elem.tag == "cdb_lu": #try: #count = counts[form][cat] #except KeyError: #count = 0 #freq_el = SubElement(elem, "statistics") #SubElement(freq_el, "count", scr="uvt").text = str(count) #elif elem.tag == "form": ## following the ElementTree conventions, ## word form will be ascii or unicode #form = elem.get("form-spelling") #cat = elem.get("form-cat") #return ElementTree(parser.root)<|fim▁end|>
<|file_name|>Customer.py<|end_file_name|><|fim▁begin|>class Customer: def __init__(self, firstname, lastname, country, address, postcode, city, email, phone, password): self.firstname = firstname self.lastname = lastname<|fim▁hole|> self.city= city self.email = email self.phone = phone self.password = password def __repr__(self): return "%s:%s:%s" % (self.email, self.firstname, self.lastname)<|fim▁end|>
self.country = country self.address = address self.postcode = postcode
<|file_name|>api_report.go<|end_file_name|><|fim▁begin|>package app import ( "net/http" "time" "golang.org/x/net/context" "github.com/weaveworks/scope/probe/host" "github.com/weaveworks/scope/report" ) // Raw report handler func makeRawReportHandler(rep Reporter) CtxHandlerFunc { return func(ctx context.Context, w http.ResponseWriter, r *http.Request) { report, err := rep.Report(ctx, time.Now()) if err != nil { respondWith(w, http.StatusInternalServerError, err) return } respondWith(w, http.StatusOK, report) } } type probeDesc struct { ID string `json:"id"` Hostname string `json:"hostname"` Version string `json:"version"` LastSeen time.Time `json:"lastSeen"` } // Probe handler func makeProbeHandler(rep Reporter) CtxHandlerFunc { return func(ctx context.Context, w http.ResponseWriter, r *http.Request) { r.ParseForm() if _, sparse := r.Form["sparse"]; sparse { // if we have reports, we must have connected probes hasProbes, err := rep.HasReports(ctx, time.Now()) if err != nil { respondWith(w, http.StatusInternalServerError, err) } respondWith(w, http.StatusOK, hasProbes) return }<|fim▁hole|> return } result := []probeDesc{} for _, n := range rpt.Host.Nodes { id, _ := n.Latest.Lookup(report.ControlProbeID) hostname, _ := n.Latest.Lookup(host.HostName) version, dt, _ := n.Latest.LookupEntry(host.ScopeVersion) result = append(result, probeDesc{ ID: id, Hostname: hostname, Version: version, LastSeen: dt, }) } respondWith(w, http.StatusOK, result) } }<|fim▁end|>
rpt, err := rep.Report(ctx, time.Now()) if err != nil { respondWith(w, http.StatusInternalServerError, err)
<|file_name|>device_ds.cpp<|end_file_name|><|fim▁begin|>#include <algorithm> #include <sstream> #include <math.h> #include "device_ds.h" #include "device_ds_stream.h" #include "device_ds_buffer.h" #include "debug.h" #include "utility.h" namespace audiere { static const int DEFAULT_BUFFER_LENGTH = 1000; // one second DSAudioDevice* DSAudioDevice::create(const ParameterList& parameters) { ADR_GUARD("DSAudioDevice::create"); // parse parameters int stream_buffer_length = parameters.getInt("buffer", 0); if (stream_buffer_length <= 0) { stream_buffer_length = DEFAULT_BUFFER_LENGTH;<|fim▁hole|> min_buffer_length = std::max(1, min_buffer_length); bool global_focus = parameters.getBoolean("global", true); // initialize COM HRESULT rv = CoInitialize(NULL); if (FAILED(rv)) { return 0; } ADR_LOG("COM initialized properly"); // register anonymous window class // don't worry about failure, if it fails, the window creation will fail WNDCLASSA wc; wc.style = 0; wc.lpfnWndProc = DefWindowProc; wc.cbClsExtra = 0; wc.cbWndExtra = 0; wc.hInstance = GetModuleHandle(NULL); wc.hIcon = NULL; wc.hCursor = NULL; wc.hbrBackground = NULL; wc.lpszMenuName = NULL; wc.lpszClassName = "AudiereHiddenWindow"; RegisterClassA(&wc); // create anonymous window HWND anonymous_window = CreateWindowA( "AudiereHiddenWindow", "", WS_POPUP, 0, 0, 0, 0, NULL, NULL, GetModuleHandle(NULL), NULL); if (!anonymous_window) { return NULL; } ADR_LOG("Anonymous window created successfully"); // create the DirectSound object IDirectSound* direct_sound; rv = CoCreateInstance( CLSID_DirectSound, NULL, CLSCTX_INPROC_SERVER, IID_IDirectSound, (void**)&direct_sound); if (FAILED(rv) || !direct_sound) { DestroyWindow(anonymous_window); return 0; } ADR_LOG("Created DS object"); LPGUID guid = NULL; GUID stack_guid; // so we can point 'guid' to an object that won't be destroyed std::string guid_string = parameters.getValue("device_guid", ""); if (!guid_string.empty()) { if (UuidFromStringA((unsigned char*)guid_string.c_str(), &stack_guid) == RPC_S_OK) { guid = &stack_guid; } } // initialize the DirectSound device rv = direct_sound->Initialize(guid); if (FAILED(rv)) { DestroyWindow(anonymous_window); direct_sound->Release(); return 0; } ADR_LOG("Initialized DS object"); // set the cooperative level rv = direct_sound->SetCooperativeLevel(anonymous_window, DSSCL_NORMAL); if (FAILED(rv)) { DestroyWindow(anonymous_window); direct_sound->Release(); return 0; } ADR_LOG("Set cooperative level"); return new DSAudioDevice( global_focus, stream_buffer_length, min_buffer_length, anonymous_window, direct_sound); } DSAudioDevice::DSAudioDevice( bool global_focus, int stream_buffer_length, int min_buffer_length, HWND anonymous_window, IDirectSound* direct_sound) { m_global_focus = global_focus; m_buffer_length = stream_buffer_length; m_min_buffer_length = min_buffer_length; m_anonymous_window = anonymous_window; m_direct_sound = direct_sound; } DSAudioDevice::~DSAudioDevice() { ADR_ASSERT(m_open_streams.empty(), "DirectSound device should not die with open streams"); ADR_ASSERT(m_open_buffers.empty(), "DirectSound device should not die with open buffers"); // shut down DirectSound if (m_direct_sound) { m_direct_sound->Release(); m_direct_sound = NULL; } // if the anonymous window is open, close it if (m_anonymous_window) { DestroyWindow(m_anonymous_window); m_anonymous_window = NULL; } CoUninitialize(); } void DSAudioDevice::update() { ADR_GUARD("DSAudioDevice::update"); { /* Put the critical section in its own scope so we don't hold the lock while sleeping. --MattC */ SYNCHRONIZED(this); // enumerate all open streams StreamList::iterator i = m_open_streams.begin(); while (i != m_open_streams.end()) { DSOutputStream* s = *i++; s->update(); } // enumerate all open buffers BufferList::iterator j = m_open_buffers.begin(); while (j != m_open_buffers.end()) { DSOutputBuffer* b = *j++; b->update(); } } Sleep(50); } OutputStream* DSAudioDevice::openStream(SampleSource* source) { if (!source) { return 0; } ADR_GUARD("DSAudioDevice::openStream"); int channel_count, sample_rate; SampleFormat sample_format; source->getFormat(channel_count, sample_rate, sample_format); const int frame_size = channel_count * GetSampleSize(sample_format); // calculate an ideal buffer size const int buffer_length = sample_rate * m_buffer_length / 1000; // define the wave format WAVEFORMATEX wfx; memset(&wfx, 0, sizeof(wfx)); wfx.wFormatTag = WAVE_FORMAT_PCM; wfx.nChannels = channel_count; wfx.nSamplesPerSec = sample_rate; wfx.nAvgBytesPerSec = sample_rate * frame_size; wfx.nBlockAlign = frame_size; wfx.wBitsPerSample = GetSampleSize(sample_format) * 8; wfx.cbSize = sizeof(wfx); DSBUFFERDESC dsbd; memset(&dsbd, 0, sizeof(dsbd)); dsbd.dwSize = sizeof(dsbd); dsbd.dwFlags = DSBCAPS_GETCURRENTPOSITION2 | DSBCAPS_CTRLPAN | DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLFREQUENCY; if (m_global_focus) { dsbd.dwFlags |= DSBCAPS_GLOBALFOCUS; } dsbd.dwBufferBytes = frame_size * buffer_length; dsbd.lpwfxFormat = &wfx; // create the DirectSound buffer IDirectSoundBuffer* buffer; HRESULT result = m_direct_sound->CreateSoundBuffer(&dsbd, &buffer, NULL); if (FAILED(result) || !buffer) { return 0; } ADR_LOG("CreateSoundBuffer succeeded"); // now create the output stream DSOutputStream* stream = new DSOutputStream( this, buffer, buffer_length, source); // add it the list of streams and return SYNCHRONIZED(this); m_open_streams.push_back(stream); return stream; } OutputStream* DSAudioDevice::openBuffer( void* samples, int frame_count, int channel_count, int sample_rate, SampleFormat sample_format) { ADR_GUARD("DSAudioDevice::openBuffer"); const int frame_size = channel_count * GetSampleSize(sample_format); WAVEFORMATEX wfx; memset(&wfx, 0, sizeof(wfx)); wfx.wFormatTag = WAVE_FORMAT_PCM; wfx.nChannels = channel_count; wfx.nSamplesPerSec = sample_rate; wfx.nAvgBytesPerSec = sample_rate * frame_size; wfx.nBlockAlign = frame_size; wfx.wBitsPerSample = GetSampleSize(sample_format) * 8; wfx.cbSize = sizeof(wfx); DSBUFFERDESC dsbd; memset(&dsbd, 0, sizeof(dsbd)); dsbd.dwSize = sizeof(dsbd); dsbd.dwFlags = DSBCAPS_GETCURRENTPOSITION2 | DSBCAPS_CTRLPAN | DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLFREQUENCY | DSBCAPS_STATIC | DSBCAPS_CTRLPOSITIONNOTIFY; if (m_global_focus) { dsbd.dwFlags |= DSBCAPS_GLOBALFOCUS; } const int buffer_frame_count = std::max(m_min_buffer_length, frame_count); const int buffer_size = buffer_frame_count * frame_size; dsbd.dwBufferBytes = buffer_size; dsbd.lpwfxFormat = &wfx; // create the DS buffer IDirectSoundBuffer* buffer; HRESULT result = m_direct_sound->CreateSoundBuffer( &dsbd, &buffer, NULL); if (FAILED(result) || !buffer) { return 0; } ADR_IF_DEBUG { DSBCAPS caps; caps.dwSize = sizeof(caps); result = buffer->GetCaps(&caps); if (FAILED(result)) { buffer->Release(); return 0; } else { std::ostringstream ss; ss << "actual buffer size: " << caps.dwBufferBytes << std::endl << "buffer_size: " << buffer_size; ADR_LOG(ss.str().c_str()); } } void* data; DWORD data_size; result = buffer->Lock(0, buffer_size, &data, &data_size, 0, 0, 0); if (FAILED(result)) { buffer->Release(); return 0; } ADR_IF_DEBUG { std::ostringstream ss; ss << "buffer size: " << buffer_size << std::endl << "data size: " << data_size << std::endl << "frame count: " << frame_count; ADR_LOG(ss.str().c_str()); } const int actual_size = frame_count * frame_size; memcpy(data, samples, actual_size); memset((u8*)data + actual_size, 0, buffer_size - actual_size); buffer->Unlock(data, data_size, 0, 0); DSOutputBuffer* b = new DSOutputBuffer( this, buffer, buffer_frame_count, frame_size); SYNCHRONIZED(this); m_open_buffers.push_back(b); return b; } const char* ADR_CALL DSAudioDevice::getName() { return "directsound"; } void DSAudioDevice::removeStream(DSOutputStream* stream) { SYNCHRONIZED(this); m_open_streams.remove(stream); } void DSAudioDevice::removeBuffer(DSOutputBuffer* buffer) { SYNCHRONIZED(this); m_open_buffers.remove(buffer); } int DSAudioDevice::Volume_AudiereToDirectSound(float volume) { if (volume == 0) { return -10000; } else { double attenuate = 1000 * log(1 / volume); return int(-attenuate); } } int DSAudioDevice::Pan_AudiereToDirectSound(float pan) { if (pan < 0) { return -Pan_AudiereToDirectSound(-pan); } else { return -Volume_AudiereToDirectSound(1 - pan); } } }<|fim▁end|>
} int min_buffer_length = parameters.getInt("min_buffer_size", 0);
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use redox::Box; use redox::{cmp, env}; use redox::collections::BTreeMap; use redox::fs::{self, File}; use redox::io::{Read, Seek, SeekFrom}; use redox::time::{self, Duration}; use redox::vec::Vec; use redox::string::{String, ToString}; use orbital::{event, BmpFile, Color, EventOption, MouseEvent, Window}; struct FileType { description: &'static str, icon: BmpFile, } impl FileType { fn new(desc: &'static str, icon: &str) -> FileType { FileType { description: desc, icon: load_icon(icon) } } } struct FileTypesInfo { file_types: BTreeMap<&'static str, FileType>, } impl FileTypesInfo { pub fn new () -> FileTypesInfo { let mut file_types = BTreeMap::<&'static str, FileType>::new(); file_types.insert("/", FileType::new("Folder", "inode-directory")); file_types.insert("wav", FileType::new("WAV audio", "audio-x-wav")); file_types.insert("bin", FileType::new("Executable", "application-x-executable")); file_types.insert("bmp", FileType::new("Bitmap Image", "image-x-generic")); file_types.insert("rs", FileType::new("Rust source code", "text-x-makefile")); file_types.insert("crate", FileType::new("Rust crate", "application-x-archive")); file_types.insert("rlib", FileType::new("Static Rust library", "application-x-object")); file_types.insert("asm", FileType::new("Assembly source", "text-x-makefile")); file_types.insert("list", FileType::new("Disassembly source", "text-x-makefile")); file_types.insert("c", FileType::new("C source code", "text-x-csrc")); file_types.insert("cpp", FileType::new("C++ source code", "text-x-c++src")); file_types.insert("h", FileType::new("C header", "text-x-chdr")); file_types.insert("sh", FileType::new("Shell script", "text-x-script")); file_types.insert("lua", FileType::new("Lua script", "text-x-script")); file_types.insert("txt", FileType::new("Plain text document", "text-x-generic")); file_types.insert("md", FileType::new("Markdown document", "text-x-generic")); file_types.insert("toml", FileType::new("TOML document", "text-x-generic")); file_types.insert("json", FileType::new("JSON document", "text-x-generic")); file_types.insert("REDOX", FileType::new("Redox package", "text-x-generic")); file_types.insert("", FileType::new("Unknown file", "unknown")); FileTypesInfo { file_types: file_types } } pub fn description_for(&self, file_name: &str) -> String { if file_name.ends_with('/') { self.file_types["/"].description.to_string() } else { let pos = file_name.rfind('.').unwrap_or(0) + 1; let ext = &file_name[pos..]; if self.file_types.contains_key(ext) { self.file_types[ext].description.to_string() } else { self.file_types[""].description.to_string() } } } pub fn icon_for(&self, file_name: &str) -> &BmpFile { if file_name.ends_with('/') { &self.file_types["/"].icon } else { let pos = file_name.rfind('.').unwrap_or(0) + 1; let ext = &file_name[pos..]; if self.file_types.contains_key(ext) { &self.file_types[ext].icon } else { &self.file_types[""].icon } } } } enum FileManagerCommand { ChangeDir(String), Execute(String), Redraw, Quit, } pub struct FileManager { file_types_info: FileTypesInfo, files: Vec<String>, file_sizes: Vec<String>, selected: isize, last_mouse_event: MouseEvent, click_time: Duration, window: Box<Window>, } fn load_icon(path: &str) -> BmpFile { let mut vec: Vec<u8> = Vec::new(); if let Some(mut file) = File::open(&("file:///ui/mimetypes/".to_string() + path + ".bmp")) { file.read_to_end(&mut vec); } BmpFile::from_data(&vec) } impl FileManager { pub fn new() -> Self { FileManager { file_types_info: FileTypesInfo::new(), files: Vec::new(), file_sizes: Vec::new(), selected: -1, last_mouse_event: MouseEvent { x: 0, y: 0, left_button: false, middle_button: false, right_button: false, }, click_time: Duration::new(0, 0), window: Window::new(-1,-1,0,0,"").unwrap(), } } fn draw_content(&mut self) { self.window.set(Color::WHITE); let mut i = 0; let mut row = 0; let column = { let mut tmp = [0, 0]; for string in self.files.iter() { if tmp[0] < string.len() { tmp[0] = string.len(); } } tmp[0] += 1; for file_size in self.file_sizes.iter() { if tmp[1] < file_size.len() { tmp[1] = file_size.len(); } } tmp[1] += tmp[0] + 1; tmp }; for (file_name, file_size) in self.files.iter().zip(self.file_sizes.iter()) { if i == self.selected { let width = self.window.width(); self.window.rect(0, 32 * row as isize, width, 32, Color::rgba(224, 224, 224, 255)); } let icon = self.file_types_info.icon_for(&file_name); self.window.image(0, 32 * row as isize, icon.width(), icon.height(), icon.as_slice()); let mut col = 0; for c in file_name.chars() { if c == '\n' { col = 0; row += 1; } else if c == '\t' { col += 8 - col % 8; } else { if col < self.window.width() / 8 && row < self.window.height() / 32 { self.window.char(8 * col as isize + 40, 32 * row as isize + 8, c, Color::BLACK); col += 1; } } if col >= self.window.width() / 8 { col = 0; row += 1; } } col = column[0]; for c in file_size.chars() { if c == '\n' { col = 0; row += 1; } else if c == '\t' { col += 8 - col % 8; } else { if col < self.window.width() / 8 && row < self.window.height() / 32 { self.window.char(8 * col as isize + 40, 32 * row as isize + 8, c, Color::BLACK); col += 1; } } if col >= self.window.width() / 8 { col = 0; row += 1; } } col = column[1]; let description = self.file_types_info.description_for(&file_name); for c in description.chars() { if c == '\n' { col = 0; row += 1; } else if c == '\t' { col += 8 - col % 8; } else { if col < self.window.width() / 8 && row < self.window.height() / 32 { self.window.char(8 * col as isize + 40, 32 * row as isize + 8, c, Color::BLACK); col += 1; } } if col >= self.window.width() / 8 { col = 0; row += 1; } } row += 1; i += 1; } self.window.sync(); } fn set_path(&mut self, path: &str) { let mut width = [48, 48, 48]; let mut height = 0; if let Some(readdir) = fs::read_dir(path) { self.files.clear(); for entry in readdir { self.files.push(entry.path().to_string()); self.file_sizes.push( // When the entry is a folder if entry.path().ends_with('/') { let count = match fs::read_dir(&(path.to_string() + entry.path())) { Some(entry_readdir) => entry_readdir.count(), None => 0 }; if count == 1 { "1 entry".to_string() } else { format!("{} entries", count) } } else { match File::open(&(path.to_string() + entry.path())) { Some(mut file) => match file.seek(SeekFrom::End(0)) { Some(size) => { if size >= 1_000_000_000 { format!("{:.1} GB", (size as f64)/1_000_000_000.0) } else if size >= 1_000_000 { format!("{:.1} MB", (size as f64)/1_000_000.0) } else if size >= 1_000 { format!("{:.1} KB", (size as f64)/1_000.0) } else { format!("{:.1} bytes", size) } } None => "Failed to seek".to_string() }, None => "Failed to open".to_string() } } ); // Unwrapping the last file size will not panic since it has // been at least pushed once in the vector let description = self.file_types_info.description_for(entry.path()); width[0] = cmp::max(width[0], 48 + (entry.path().len()) * 8); width[1] = cmp::max(width[1], 8 + (self.file_sizes.last().unwrap().len()) * 8); width[2] = cmp::max(width[2], 8 + (description.len()) * 8); } if height < self.files.len() * 32 { height = self.files.len() * 32; } } // TODO: HACK ALERT - should use resize whenver that gets added self.window.sync_path(); self.window = Window::new(self.window.x(), self.window.y(), width.iter().sum(), height, &path).unwrap(); self.draw_content(); } fn event_loop(&mut self) -> Option<FileManagerCommand> { let mut redraw = false; let mut command = None; if let Some(event) = self.window.poll() { match event.to_option() { EventOption::Key(key_event) => { if key_event.pressed { match key_event.scancode { event::K_ESC => return Some(FileManagerCommand::Quit), event::K_HOME => self.selected = 0, event::K_UP => if self.selected > 0 { self.selected -= 1; redraw = true; }, event::K_END => self.selected = self.files.len() as isize - 1, event::K_DOWN => if self.selected < self.files.len() as isize - 1 { self.selected += 1; redraw = true; }, _ => match key_event.character { '\0' => (), '\n' => { if self.selected >= 0 && self.selected < self.files.len() as isize { match self.files.get(self.selected as usize) { Some(file) => { if file.ends_with('/') { command = Some(FileManagerCommand::ChangeDir(file.clone())); } else { command = Some(FileManagerCommand::Execute(file.clone())); } }, None => (), } } } _ => { let mut i = 0; for file in self.files.iter() { if file.starts_with(key_event.character) { self.selected = i; break; } i += 1; } } }, } if command.is_none() && redraw { command = Some(FileManagerCommand::Redraw); } } } EventOption::Mouse(mouse_event) => { redraw = true; let mut i = 0; let mut row = 0; for file in self.files.iter() { let mut col = 0; for c in file.chars() { if mouse_event.y >= 32 * row as isize && mouse_event.y < 32 * row as isize + 32 { self.selected = i; } if c == '\n' { col = 0; row += 1; } else if c == '\t' { col += 8 - col % 8; } else { if col < self.window.width() / 8 && row < self.window.height() / 32 { col += 1; } } if col >= self.window.width() / 8 { col = 0; row += 1; } } row += 1; i += 1; } //Check for double click if mouse_event.left_button { let click_time = Duration::realtime(); if click_time - self.click_time < Duration::new(0, 500 * time::NANOS_PER_MILLI) && self.last_mouse_event.x == mouse_event.x && self.last_mouse_event.y == mouse_event.y { if self.selected >= 0 && self.selected < self.files.len() as isize { if let Some(file) = self.files.get(self.selected as usize) { if file.ends_with('/') { command = Some(FileManagerCommand::ChangeDir(file.clone()));<|fim▁hole|> } } self.click_time = Duration::new(0, 0); } else { self.click_time = click_time; } } self.last_mouse_event = mouse_event; if command.is_none() && redraw { command = Some(FileManagerCommand::Redraw); } } EventOption::Quit(quit_event) => command = Some(FileManagerCommand::Quit), _ => (), } } command } fn main(&mut self, path: &str) { let mut current_path = path.to_string(); self.set_path(path); loop { if let Some(event) = self.event_loop() { match event { FileManagerCommand::ChangeDir(dir) => { current_path = current_path + &dir; self.set_path(&current_path); }, FileManagerCommand::Execute(cmd) => { //TODO: What is the best way to request a launch? File::open(&("orbital://launch/".to_string() + &current_path + &cmd)); } , FileManagerCommand::Redraw => (), FileManagerCommand::Quit => break, }; self.draw_content(); } } } } pub fn main() { match env::args().get(1) { Some(arg) => FileManager::new().main(arg), None => FileManager::new().main("file:/"), } }<|fim▁end|>
} else { command = Some(FileManagerCommand::Execute(file.clone())); }
<|file_name|>Thumbnail.py<|end_file_name|><|fim▁begin|>from asposeslides import Settings from com.aspose.slides import Presentation from com.aspose.slides import SaveFormat from javax import ImageIO from java.io import File class Thumbnail: def __init__(self): # Generating a Thumbnail from a Slide self.create_thumbnail() # Generating a Thumbnail from a Slide with User Defined Dimensions self.create_thumbnail_custom_size() # Generating a Thumbnail from a Slide in Notes Slides View self.create_thumbnail_in_notes_slides_view() # Generating a Thumbnail of User Defined Window from a Slide self.create_thumbnail_of_user_defined_window() def create_thumbnail(dataDir): dataDir = Settings.dataDir + 'WorkingWithSlidesInPresentation/Thumbnail/' # Instantiate Presentation class that represents the presentation file pres = Presentation(dataDir + 'demo.pptx') # Access the first slide slide = pres.getSlides().get_Item(0) # Create a full scale image image = slide.getThumbnail() # Save the image to disk in JPEG format imageIO = ImageIO imageIO.write(image, "jpeg", File(dataDir + "ContentBG_tnail.jpg")) print "Created thumbnail, please check the output file." . PHP_EOL def create_thumbnail_custom_size(dataDir): # Instantiate Presentation class that represents the presentation file pres = Presentation(dataDir + 'demo.pptx') # Access the first slide slide = pres.getSlides().get_Item(0) # User defined dimension desired_x = 1200 desired_y = 800 # Getting scaled value of X and Y scale_x = (1.0 / java_values(pres.getSlideSize().getSize().getWidth())) * desired_x scale_y = (1.0 / java_values(pres.getSlideSize().getSize().getHeight())) * desired_y # Create a full scale image image = slide.getThumbnail(scale_x, scale_y) # Save the image to disk in JPEG format <|fim▁hole|> imageIO = ImageIO() imageIO.write(image, "jpeg", File(dataDir + "ContentBG_tnail.jpg")) print "Created thumbnail with custom size, please check the output file.". PHP_EOL def create_thumbnail_in_notes_slides_view(dataDir): # Instantiate Presentation class that represents the presentation file pres = Presentation(dataDir + 'demo.pptx') # Access the first slide slide = pres.getSlides().get_Item(0) # User defined dimension desired_x = 1200 desired_y = 800 # Getting scaled value of X and Y scale_x = (1.0 / java_values(pres.getSlideSize().getSize().getWidth())) * desired_x scale_y = (1.0 / java_values(pres.getSlideSize().getSize().getHeight())) * desired_y # Create a full scale image image = slide.getNotesSlide().getThumbnail(scale_x, scale_y) # Save the image to disk in JPEG format imageIO = ImageIO() imageIO.write(image, "jpeg", File(dataDir + "ContentBG_tnail.jpg")) print "Created thumbnail in notes slides view, please check the output file." . PHP_EOL def create_thumbnail_of_user_defined_window(dataDir): # Instantiate Presentation class that represents the presentation file pres = Presentation(dataDir + 'demo.pptx') # Access the first slide slide = pres.getSlides().get_Item(0) # Create a full scale image image = slide.getThumbnail(1,1) # Getting the image of desired window inside generated slide thumnbnail # BufferedImage window = image.getSubimage(windowX, windowY, windowsWidth, windowHeight) window_image = image.getSubimage(100, 100, 200, 200) # Save the image to disk in JPEG format imageIO = ImageIO() imageIO.write(image, "jpeg", File(dataDir + "ContentBG_tnail.jpg")) print "Created thumbnail of user defined window, please check the output file." . PHP_EOL if __name__ == '__main__': Thumbnail()<|fim▁end|>
<|file_name|>css_section.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (5c017c9) from gir-files (71d73f0) // DO NOT EDIT use CssSectionType; use ffi; use glib::translate::*; glib_wrapper! { pub struct CssSection(Shared<ffi::GtkCssSection>); match fn { ref => |ptr| ffi::gtk_css_section_ref(ptr), unref => |ptr| ffi::gtk_css_section_unref(ptr), } } impl CssSection { pub fn get_end_line(&self) -> u32 { unsafe { ffi::gtk_css_section_get_end_line(self.to_glib_none().0) } } pub fn get_end_position(&self) -> u32 {<|fim▁hole|> ffi::gtk_css_section_get_end_position(self.to_glib_none().0) } } //pub fn get_file(&self) -> /*Ignored*/Option<gio::File> { // unsafe { TODO: call ffi::gtk_css_section_get_file() } //} pub fn get_parent(&self) -> Option<CssSection> { unsafe { from_glib_none(ffi::gtk_css_section_get_parent(self.to_glib_none().0)) } } pub fn get_section_type(&self) -> CssSectionType { unsafe { from_glib(ffi::gtk_css_section_get_section_type(self.to_glib_none().0)) } } pub fn get_start_line(&self) -> u32 { unsafe { ffi::gtk_css_section_get_start_line(self.to_glib_none().0) } } pub fn get_start_position(&self) -> u32 { unsafe { ffi::gtk_css_section_get_start_position(self.to_glib_none().0) } } }<|fim▁end|>
unsafe {
<|file_name|>Projects.js<|end_file_name|><|fim▁begin|>define([ 'jquery', 'underscore', 'backbone', 'app' ], function ( $, _, Backbone, app ) { var Models = {}, Collections = {}, Views = {}; Models.Project = Backbone.Model.extend(); Collections.Projects = Backbone.Model.extend({ model: Models.Project, url: function() { return app.api('projects/' + this.get('platform') + (this.get('uri') ? '/' + this.get('uri') : '')); }, parse: function(res) { return { projects: res }; } }); Models.Project = Backbone.Model.extend({ url: function() { return app.api('projects?' + this.get('params')); } });<|fim▁hole|> Collections: Collections, Views: Views }; });<|fim▁end|>
return { Models: Models,
<|file_name|>copy.rs<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software<|fim▁hole|> * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma version(1) #pragma rs java_package_name(com.android.rs.image2) void root(const uchar4 *v_in, uchar4 *v_out) { *v_out = *v_in; }<|fim▁end|>
<|file_name|>appdirs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Picard, the next-generation MusicBrainz tagger # # Copyright (C) 2021 Philipp Wolfer # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import os import os.path <|fim▁hole|> QCoreApplication, QStandardPaths, ) from picard import ( PICARD_APP_NAME, PICARD_ORG_NAME, ) # Ensure the application is properly configured for the paths to work QCoreApplication.setApplicationName(PICARD_APP_NAME) QCoreApplication.setOrganizationName(PICARD_ORG_NAME) def config_folder(): return os.path.normpath(os.environ.get('PICARD_CONFIG_DIR', QStandardPaths.writableLocation(QStandardPaths.AppConfigLocation))) def cache_folder(): return os.path.normpath(os.environ.get('PICARD_CACHE_DIR', QStandardPaths.writableLocation(QStandardPaths.CacheLocation))) def plugin_folder(): # FIXME: This really should be in QStandardPaths.AppDataLocation instead, # but this is a breaking change that requires data migration return os.path.normpath(os.environ.get('PICARD_PLUGIN_DIR', os.path.join(config_folder(), 'plugins')))<|fim▁end|>
from PyQt5.QtCore import (
<|file_name|>test_rule.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import import os import sys import shutil import unittest import xml.dom.minidom parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, parentdir) from pcs_test_functions import pcs,ac import rule empty_cib = "empty.xml" temp_cib = "temp.xml" class DateValueTest(unittest.TestCase): def testParse(self): for value, item in enumerate(rule.DateCommonValue.allowed_items, 1): self.assertEquals( str(value), rule.DateCommonValue("%s=%s" % (item, value)).parts[item] ) value = rule.DateCommonValue( "hours=1 monthdays=2 weekdays=3 yeardays=4 months=5 weeks=6 " "years=7 weekyears=8 moon=9" ) self.assertEquals("1", value.parts["hours"]) self.assertEquals("2", value.parts["monthdays"]) self.assertEquals("3", value.parts["weekdays"]) self.assertEquals("4", value.parts["yeardays"]) self.assertEquals("5", value.parts["months"]) self.assertEquals("6", value.parts["weeks"]) self.assertEquals("7", value.parts["years"]) self.assertEquals("8", value.parts["weekyears"]) self.assertEquals("9", value.parts["moon"]) value = rule.DateCommonValue("hours=1 monthdays=2 hours=3") self.assertEquals("2", value.parts["monthdays"]) self.assertEquals("3", value.parts["hours"]) value = rule.DateCommonValue(" hours=1 monthdays=2 hours=3 ") self.assertEquals("2", value.parts["monthdays"]) self.assertEquals("3", value.parts["hours"]) self.assertSyntaxError( "missing one of 'hours=', 'monthdays=', 'weekdays=', 'yeardays=', " "'months=', 'weeks=', 'years=', 'weekyears=', 'moon=' in date-spec", "", rule.DateSpecValue ) self.assertSyntaxError( "missing value after 'hours=' in date-spec", "hours=", rule.DateSpecValue ) self.assertSyntaxError( "missing =value after 'hours' in date-spec", "hours", rule.DateSpecValue ) self.assertSyntaxError( "unexpected 'foo=bar' in date-spec", "foo=bar", rule.DateSpecValue ) self.assertSyntaxError( "unexpected 'foo=bar' in date-spec", "hours=1 foo=bar", rule.DateSpecValue ) def testDurationValidate(self): for value, item in enumerate(rule.DateCommonValue.allowed_items, 1): self.assertEquals( str(value), rule.DateDurationValue("%s=%s" % (item, value)).parts[item] ) for item in rule.DateCommonValue.allowed_items: self.assertSyntaxError( "invalid %s '%s' in 'duration'" % (item, "foo"), "%s=foo" % item, rule.DateDurationValue ) self.assertSyntaxError( "invalid %s '%s' in 'duration'" % (item, "-1"), "%s=-1" % item, rule.DateDurationValue ) self.assertSyntaxError( "invalid %s '%s' in 'duration'" % (item, "2foo"), "%s=2foo" % item, rule.DateDurationValue ) def testDateSpecValidation(self): for item in rule.DateCommonValue.allowed_items: value = 1 self.assertEquals( str(value), rule.DateSpecValue("%s=%s" % (item, value)).parts[item] ) self.assertEquals( "%s-%s" % (value, value + 1), rule.DateSpecValue( "%s=%s-%s" % (item, value, value + 1) ).parts[item] ) self.assertEquals( "hours=9-16 weekdays=1-5", str(rule.DateSpecValue("hours=9-16 weekdays=1-5")) ) for item in rule.DateCommonValue.allowed_items: self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "foo"), "%s=foo" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "1-foo"), "%s=1-foo" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "foo-1"), "%s=foo-1" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "1-2-3"), "%s=1-2-3" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "2-1"), "%s=2-1" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid hours '24' in 'date-spec'", "hours=24", rule.DateSpecValue ) self.assertSyntaxError( "invalid monthdays '32' in 'date-spec'", "monthdays=32", rule.DateSpecValue ) self.assertSyntaxError( "invalid weekdays '8' in 'date-spec'", "weekdays=8", rule.DateSpecValue ) self.assertSyntaxError( "invalid yeardays '367' in 'date-spec'", "yeardays=367", rule.DateSpecValue ) self.assertSyntaxError( "invalid months '13' in 'date-spec'", "months=13", rule.DateSpecValue ) self.assertSyntaxError( "invalid weeks '54' in 'date-spec'", "weeks=54", rule.DateSpecValue ) self.assertSyntaxError( "invalid weekyears '54' in 'date-spec'", "weekyears=54", rule.DateSpecValue ) self.assertSyntaxError( "invalid moon '8' in 'date-spec'", "moon=8", rule.DateSpecValue ) self.assertSyntaxError( "invalid hours '12-8' in 'date-spec'", "hours=12-8", rule.DateSpecValue ) def assertSyntaxError(self, syntax_error, parts_string, value_class=None): value_class = value_class if value_class else rule.DateCommonValue self.assertRaises(rule.SyntaxError, value_class, parts_string) try: value_class(parts_string) except rule.SyntaxError as e: self.assertEquals(syntax_error, str(e)) class ParserTest(unittest.TestCase): def setUp(self): self.parser = rule.RuleParser() def testEmptyInput(self): self.assertRaises(rule.UnexpectedEndOfInput, self.parser.parse, []) def testSingleLiteral(self): self.assertSyntaxError( "missing one of 'eq', 'ne', 'lt', 'gt', 'lte', 'gte', 'in_range', " "'defined', 'not_defined', 'date-spec'", ["#uname"] ) self.assertSyntaxError( "missing one of 'eq', 'ne', 'lt', 'gt', 'lte', 'gte', 'in_range', " "'defined', 'not_defined', 'date-spec'", ["string", "node1"] ) def testSingleLiteralDatespec(self): self.assertEquals( "(date-spec (literal hours=1))", str(self.parser.parse(["date-spec", "hours=1"])) ) self.assertEquals( "(date-spec (literal hours=1-14 months=1 monthdays=20-30))", str(self.parser.parse([ "date-spec", "hours=1-14 months=1 monthdays=20-30" ])) ) self.assertUnexpectedEndOfInput(["date-spec"]) def testSimpleExpression(self): self.assertEquals( "(eq (literal #uname) (literal node1))", str(self.parser.parse(["#uname", "eq", "node1"])) ) self.assertEquals( "(ne (literal #uname) (literal node2))", str(self.parser.parse(["#uname", "ne", "node2"])) ) self.assertEquals(<|fim▁hole|> ) self.assertEquals( "(gte (literal int) (literal 123))", str(self.parser.parse(["int", "gte", "123"])) ) self.assertEquals( "(lt (literal int) (literal 123))", str(self.parser.parse(["int", "lt", "123"])) ) self.assertEquals( "(lte (literal int) (literal 123))", str(self.parser.parse(["int", "lte", "123"])) ) def testSimpleExpressionBad(self): self.assertSyntaxError( "unexpected 'eq'", ["eq"] ) self.assertUnexpectedEndOfInput(["#uname", "eq"]) self.assertSyntaxError( "unexpected 'node1'", ["#uname", "node1"] ) self.assertSyntaxError( "unexpected 'eq'", ["eq", "#uname"] ) self.assertSyntaxError( "unexpected 'eq'", ["eq", "lt"] ) self.assertSyntaxError( "unexpected 'string' before 'eq'", ["string", "#uname", "eq", "node1"] ) self.assertSyntaxError( "unexpected 'date-spec' before 'eq'", ["date-spec", "hours=1", "eq", "node1"] ) self.assertSyntaxError( "unexpected 'date-spec' after 'eq'", ["#uname", "eq", "date-spec", "hours=1"] ) self.assertSyntaxError( "unexpected 'duration' before 'eq'", ["duration", "hours=1", "eq", "node1"] ) self.assertSyntaxError( "unexpected 'duration' after 'eq'", ["#uname", "eq", "duration", "hours=1"] ) def testDefinedExpression(self): self.assertEquals( "(defined (literal pingd))", str(self.parser.parse(["defined", "pingd"])) ) self.assertEquals( "(not_defined (literal pingd))", str(self.parser.parse(["not_defined", "pingd"])) ) def testDefinedExpressionBad(self): self.assertUnexpectedEndOfInput(["defined"]) self.assertUnexpectedEndOfInput(["not_defined"]) self.assertSyntaxError( "unexpected 'eq'", ["defined", "eq"] ) self.assertSyntaxError( "unexpected 'and'", ["defined", "and"] ) self.assertSyntaxError( "unexpected 'string' after 'defined'", ["defined", "string", "pingd"] ) self.assertSyntaxError( "unexpected 'date-spec' after 'defined'", ["defined", "date-spec", "hours=1"] ) self.assertSyntaxError( "unexpected 'duration' after 'defined'", ["defined", "duration", "hours=1"] ) def testTypeExpression(self): self.assertEquals( "(eq (literal #uname) (string (literal node1)))", str(self.parser.parse(["#uname", "eq", "string", "node1"])) ) self.assertEquals( "(eq (literal #uname) (integer (literal 12345)))", str(self.parser.parse(["#uname", "eq", "integer", "12345"])) ) self.assertEquals( "(eq (literal #uname) (integer (literal -12345)))", str(self.parser.parse(["#uname", "eq", "integer", "-12345"])) ) self.assertEquals( "(eq (literal #uname) (version (literal 1)))", str(self.parser.parse(["#uname", "eq", "version", "1"])) ) self.assertEquals( "(eq (literal #uname) (version (literal 1.2.3)))", str(self.parser.parse(["#uname", "eq", "version", "1.2.3"])) ) self.assertEquals( "(eq (literal #uname) (string (literal string)))", str(self.parser.parse(["#uname", "eq", "string", "string"])) ) self.assertEquals( "(eq (literal #uname) (string (literal and)))", str(self.parser.parse(["#uname", "eq", "string", "and"])) ) self.assertEquals( "(and " "(ne (literal #uname) (string (literal integer))) " "(ne (literal #uname) (string (literal version)))" ")", str(self.parser.parse([ "#uname", "ne", "string", "integer", "and", "#uname", "ne", "string", "version" ])) ) def testTypeExpressionBad(self): self.assertUnexpectedEndOfInput(["string"]) self.assertUnexpectedEndOfInput(["#uname", "eq", "string"]) self.assertSyntaxError( "unexpected 'string' before 'eq'", ["string", "#uname", "eq", "node1"] ) self.assertSyntaxError( "invalid integer value 'node1'", ["#uname", "eq", "integer", "node1"] ) self.assertSyntaxError( "invalid version value 'node1'", ["#uname", "eq", "version", "node1"] ) def testDateExpression(self): self.assertEquals( "(gt (literal date) (literal 2014-06-26))", str(self.parser.parse(["date", "gt", "2014-06-26"])) ) self.assertEquals( "(lt (literal date) (literal 2014-06-26))", str(self.parser.parse(["date", "lt", "2014-06-26"])) ) self.assertEquals( "(in_range " "(literal date) (literal 2014-06-26) (literal 2014-07-26)" ")", str(self.parser.parse([ "date", "in_range", "2014-06-26", "to", "2014-07-26" ])) ) self.assertEquals( "(in_range " "(literal date) " "(literal 2014-06-26) (duration (literal years=1))" ")", str(self.parser.parse([ "date", "in_range", "2014-06-26", "to", "duration", "years=1" ])) ) def testDateExpressionBad(self): self.assertUnexpectedEndOfInput( ["date", "in_range"] ) self.assertSyntaxError( "missing 'to'", ["date", "in_range", '2014-06-26'] ) self.assertUnexpectedEndOfInput( ["date", "in_range", "2014-06-26", "to"] ) self.assertSyntaxError( "unexpected 'in_range'", ["in_range", '2014-06-26', "to", "2014-07-26"] ) self.assertSyntaxError( "expecting 'to', got 'eq'", ["date", "in_range", '#uname', "eq", "node1", "to", "2014-07-26"] ) self.assertSyntaxError( "invalid date '#uname' in 'in_range ... to'", ["date", "in_range", "2014-06-26", "to", '#uname', "eq", "node1"] ) self.assertSyntaxError( "unexpected 'defined' after 'in_range'", ["date", "in_range", "defined", "pingd", "to", "2014-07-26"] ) self.assertSyntaxError( "unexpected 'defined' after 'in_range ... to'", ["date", "in_range", "2014-06-26", "to", "defined", "pingd"] ) self.assertSyntaxError( "unexpected 'string' before 'in_range'", ["string", "date", "in_range", '2014-06-26', "to", "2014-07-26"] ) self.assertSyntaxError( "unexpected 'string' after 'in_range'", ["date", "in_range", "string", '2014-06-26', "to", "2014-07-26"] ) self.assertSyntaxError( "unexpected 'string' after 'in_range ... to'", ["date", "in_range", '2014-06-26', "to", "string", "2014-07-26"] ) self.assertSyntaxError( "unexpected 'string' after '2014-06-26'", ["date", "in_range", '2014-06-26', "string", "to", "2014-07-26"] ) self.assertSyntaxError( "unexpected '#uname' before 'in_range'", ["#uname", "in_range", '2014-06-26', "to", "2014-07-26"] ) self.assertSyntaxError( "invalid date '2014-13-26' in 'in_range ... to'", ["date", "in_range", '2014-13-26', "to", "2014-07-26"] ) self.assertSyntaxError( "invalid date '2014-13-26' in 'in_range ... to'", ["date", "in_range", '2014-06-26', "to", "2014-13-26"] ) def testAndOrExpression(self): self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "#uname", "ne", "node2" ])) ) self.assertEquals( "(or " "(eq (literal #uname) (literal node1)) " "(eq (literal #uname) (literal node2))" ")", str(self.parser.parse([ "#uname", "eq", "node1", "or", "#uname", "eq", "node2" ])) ) self.assertEquals( "(and " "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ") " "(ne (literal #uname) (literal node3))" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "#uname", "ne", "node2", "and", "#uname", "ne", "node3" ])) ) self.assertEquals( "(or " "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ") " "(eq (literal #uname) (literal node3))" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "#uname", "ne", "node2", "or", "#uname", "eq", "node3" ])) ) self.assertEquals( "(and " "(or " "(eq (literal #uname) (literal node1)) " "(eq (literal #uname) (literal node2))" ") " "(ne (literal #uname) (literal node3))" ")", str(self.parser.parse([ "#uname", "eq", "node1", "or", "#uname", "eq", "node2", "and", "#uname", "ne", "node3" ])) ) self.assertEquals( "(and " "(defined (literal pingd)) " "(lte (literal pingd) (literal 1))" ")", str(self.parser.parse([ "defined", "pingd", "and", "pingd", "lte", "1" ])) ) self.assertEquals( "(or " "(gt (literal pingd) (literal 1)) " "(not_defined (literal pingd))" ")", str(self.parser.parse([ "pingd", "gt", "1", "or", "not_defined", "pingd" ])) ) def testAndOrExpressionDateSpec(self): self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(date-spec (literal hours=1-12))" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "date-spec", "hours=1-12" ])) ) self.assertEquals( "(or " "(date-spec (literal monthdays=1-12)) " "(ne (literal #uname) (literal node1))" ")", str(self.parser.parse([ "date-spec", "monthdays=1-12", "or", "#uname", "ne", "node1" ])) ) self.assertEquals( "(or " "(date-spec (literal monthdays=1-10)) " "(date-spec (literal monthdays=11-20))" ")", str(self.parser.parse([ "date-spec", "monthdays=1-10", "or", "date-spec", "monthdays=11-20" ])) ) def testAndOrExpressionDate(self): self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(in_range " "(literal date) (literal 2014-06-26) (literal 2014-07-26)" ")" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "date", "in_range", "2014-06-26", "to", "2014-07-26" ])) ) self.assertEquals( "(and " "(in_range " "(literal date) (literal 2014-06-26) (literal 2014-07-26)" ") " "(ne (literal #uname) (literal node1))" ")", str(self.parser.parse([ "date", "in_range", "2014-06-26", "to", "2014-07-26", "and", "#uname", "ne", "node1" ])) ) def testAndOrExpressionBad(self): self.assertSyntaxError( "unexpected 'and'", ["and"] ) self.assertSyntaxError( "unexpected 'or'", ["or"] ) self.assertSyntaxError( "unexpected '#uname' before 'and'", ["#uname", "and", "node1"] ) self.assertSyntaxError( "unexpected '#uname' before 'or'", ["#uname", "or", "node1"] ) self.assertSyntaxError( "unexpected '#uname' before 'or'", ["#uname", "or", "eq"] ) self.assertSyntaxError( "unexpected 'node2' after 'and'", ["#uname", "eq", "node1", "and", "node2"] ) self.assertUnexpectedEndOfInput(["#uname", "eq", "node1", "and"]) self.assertUnexpectedEndOfInput( ["#uname", "eq", "node1", "and", "#uname", "eq"] ) self.assertSyntaxError( "unexpected 'and'", ["and", "#uname", "eq", "node1"] ) self.assertSyntaxError( "unexpected 'duration' after 'and'", ["#uname", "ne", "node1", "and", "duration", "hours=1"] ) self.assertSyntaxError( "unexpected 'duration' before 'or'", ["duration", "monthdays=1", "or", "#uname", "ne", "node1"] ) def testParenthesizedExpression(self): self.assertSyntaxError( "missing one of 'eq', 'ne', 'lt', 'gt', 'lte', 'gte', 'in_range', " "'defined', 'not_defined', 'date-spec'", ["(", "#uname", ")"] ) self.assertEquals( "(date-spec (literal hours=1))", str(self.parser.parse(["(", "date-spec", "hours=1", ")"])) ) self.assertEquals( "(eq (literal #uname) (literal node1))", str(self.parser.parse(["(", "#uname", "eq", "node1", ")"])) ) self.assertEquals( "(defined (literal pingd))", str(self.parser.parse(["(", "defined", "pingd", ")"])) ) self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ")", str(self.parser.parse([ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")" ])) ) self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ")", str(self.parser.parse([ "(", "#uname", "ne", "node1", ")", "and", "(", "#uname", "ne", "node2", ")" ])) ) self.assertEquals( "(or " "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ") " "(eq (literal #uname) (literal node3))" ")", str(self.parser.parse([ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")", "or", "#uname", "eq", "node3" ])) ) self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(or " "(ne (literal #uname) (literal node2)) " "(eq (literal #uname) (literal node3))" ")" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "(", "#uname", "ne", "node2", "or", "#uname", "eq", "node3", ")" ])) ) self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(or " "(ne (literal #uname) (literal node2)) " "(eq (literal #uname) (literal node3))" ")" ")", str(self.parser.parse([ "(", "(", "(", "#uname", "ne", "node1", ")", "and", "(", "(", "(", "#uname", "ne", "node2", ")", "or", "(", "#uname", "eq", "node3", ")", ")", ")", ")", ")" ])) ) self.assertEquals( "(in_range " "(literal date) (literal 2014-06-26) (literal 2014-07-26)" ")", str(self.parser.parse([ "(", "date", "in_range", "2014-06-26", "to", "2014-07-26", ")" ])) ) def testParenthesizedExpressionBad(self): self.assertUnexpectedEndOfInput(["("]) self.assertSyntaxError( "unexpected ')'", ["(", ")"] ) self.assertSyntaxError( "missing ')'", ["(", "#uname"] ) self.assertUnexpectedEndOfInput(["(", "#uname", "eq"]) self.assertSyntaxError( "missing ')'", ["(", "#uname", "eq", "node1"] ) def assertUnexpectedEndOfInput(self, program): self.assertRaises(rule.UnexpectedEndOfInput, self.parser.parse, program) def assertSyntaxError(self, syntax_error, program): self.assertRaises( rule.SyntaxError, self.parser.parse, program ) try: self.parser.parse(program) except rule.SyntaxError as e: self.assertEquals(syntax_error, str(e)) class CibBuilderTest(unittest.TestCase): def setUp(self): self.parser = rule.RuleParser() self.builder = rule.CibBuilder() def testSingleLiteralDatespec(self): self.assertExpressionXml( ["date-spec", "hours=1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="date_spec"> <date_spec hours="1" id="location-dummy-rule-expr-datespec"/> </date_expression> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date-spec", "hours=1-14 monthdays=20-30 months=1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="date_spec"> <date_spec hours="1-14" id="location-dummy-rule-expr-datespec" monthdays="20-30" months="1"/> </date_expression> </rule> </rsc_location> """ ) def testSimpleExpression(self): self.assertExpressionXml( ["#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "ne", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "gt", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="gt" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "gte", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="gte" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "lt", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="lt" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "lte", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="lte" value="node1"/> </rule> </rsc_location> """ ) def testTypeExpression(self): self.assertExpressionXml( ["#uname", "eq", "string", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" type="string" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "eq", "integer", "12345"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" type="number" value="12345"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "eq", "version", "1.2.3"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" type="version" value="1.2.3"/> </rule> </rsc_location> """ ) def testDefinedExpression(self): self.assertExpressionXml( ["defined", "pingd"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="pingd" id="location-dummy-rule-expr" operation="defined"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["not_defined", "pingd"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="pingd" id="location-dummy-rule-expr" operation="not_defined"/> </rule> </rsc_location> """ ) def testDateExpression(self): self.assertExpressionXml( ["date", "gt", "2014-06-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="gt" start="2014-06-26"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "lt", "2014-06-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression end="2014-06-26" id="location-dummy-rule-expr" operation="lt"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "in_range", "2014-06-26", "to", "2014-07-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression end="2014-07-26" id="location-dummy-rule-expr" operation="in_range" start="2014-06-26"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "in_range", "2014-06-26", "to", "duration", "years=1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="in_range" start="2014-06-26"> <duration id="location-dummy-rule-expr-duration" years="1"/> </date_expression> </rule> </rsc_location> """ ) def testNotDateExpression(self): self.assertExpressionXml( ["date", "eq", "2014-06-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="date" id="location-dummy-rule-expr" operation="eq" value="2014-06-26"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "gt", "string", "2014-06-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="date" id="location-dummy-rule-expr" operation="gt" type="string" value="2014-06-26"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "gt", "integer", "12345"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="date" id="location-dummy-rule-expr" operation="gt" type="number" value="12345"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "gt", "version", "1.2.3"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="date" id="location-dummy-rule-expr" operation="gt" type="version" value="1.2.3"/> </rule> </rsc_location> """ ) def testAndOrExpression(self): self.assertExpressionXml( ["#uname", "ne", "node1", "and", "#uname", "ne", "node2"], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="ne" value="node2"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "eq", "node1", "or", "#uname", "eq", "node2"], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="eq" value="node2"/> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "#uname", "ne", "node1", "and", "#uname", "ne", "node2", "and", "#uname", "ne", "node3" ], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="ne" value="node2"/> <expression attribute="#uname" id="location-dummy-rule-expr-2" operation="ne" value="node3"/> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "#uname", "ne", "node1", "and", "#uname", "ne", "node2", "or", "#uname", "eq", "node3" ], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <rule boolean-op="and" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="ne" value="node2"/> </rule> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node3"/> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "#uname", "eq", "node1", "or", "#uname", "eq", "node2", "and", "#uname", "ne", "node3" ], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <rule boolean-op="or" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="eq" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="eq" value="node2"/> </rule> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node3"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["defined", "pingd", "and", "pingd", "lte", "1"], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="pingd" id="location-dummy-rule-expr" operation="defined"/> <expression attribute="pingd" id="location-dummy-rule-expr-1" operation="lte" value="1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["pingd", "gt", "1", "or", "not_defined", "pingd"], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <expression attribute="pingd" id="location-dummy-rule-expr" operation="gt" value="1"/> <expression attribute="pingd" id="location-dummy-rule-expr-1" operation="not_defined"/> </rule> </rsc_location> """ ) def testAndOrExpressionDateSpec(self): self.assertExpressionXml( ["#uname", "ne", "node1", "and", "date-spec", "hours=1-12"], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <date_expression id="location-dummy-rule-expr-1" operation="date_spec"> <date_spec hours="1-12" id="location-dummy-rule-expr-1-datespec"/> </date_expression> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date-spec", "monthdays=1-12", "or", "#uname", "ne", "node1"], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="date_spec"> <date_spec id="location-dummy-rule-expr-datespec" monthdays="1-12"/> </date_expression> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="ne" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date-spec", "monthdays=1-10", "or", "date-spec", "monthdays=11-20"], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="date_spec"> <date_spec id="location-dummy-rule-expr-datespec" monthdays="1-10"/> </date_expression> <date_expression id="location-dummy-rule-expr-1" operation="date_spec"> <date_spec id="location-dummy-rule-expr-1-datespec" monthdays="11-20"/> </date_expression> </rule> </rsc_location> """ ) def testParenthesizedExpression(self): self.assertExpressionXml( [ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")", "or", "#uname", "eq", "node3" ], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <rule boolean-op="and" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="ne" value="node2"/> </rule> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node3"/> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "#uname", "ne", "node1", "and", "(", "#uname", "ne", "node2", "or", "#uname", "eq", "node3", ")" ], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <rule boolean-op="or" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="ne" value="node2"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="eq" value="node3"/> </rule> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")", "or", "(", "#uname", "ne", "node3", "and", "#uname", "ne", "node4", ")", ], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <rule boolean-op="and" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="ne" value="node2"/> </rule> <rule boolean-op="and" id="location-dummy-rule-rule-1"> <expression attribute="#uname" id="location-dummy-rule-rule-1-expr" operation="ne" value="node3"/> <expression attribute="#uname" id="location-dummy-rule-rule-1-expr-1" operation="ne" value="node4"/> </rule> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")", "and", "(", "#uname", "ne", "node3", "and", "#uname", "ne", "node4", ")", ], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="ne" value="node2"/> <expression attribute="#uname" id="location-dummy-rule-expr-2" operation="ne" value="node3"/> <expression attribute="#uname" id="location-dummy-rule-expr-3" operation="ne" value="node4"/> </rule> </rsc_location> """ ) def assertExpressionXml(self, rule_expression, rule_xml): cib_dom = xml.dom.minidom.parse("empty.xml") constraints = cib_dom.getElementsByTagName("constraints")[0] constraint_el = constraints.appendChild( cib_dom.createElement("rsc_location") ) constraint_el.setAttribute("id", "location-dummy") ac( self.builder.build( constraint_el, self.parser.parse(rule_expression) ).parentNode.toprettyxml(indent=" "), rule_xml.lstrip().rstrip(" ") ) class TokenPreprocessorTest(unittest.TestCase): def setUp(self): self.preprocessor = rule.TokenPreprocessor() def testNoChanges(self): self.assertEquals([], self.preprocessor.run([])) self.assertEquals( ["#uname", "eq", "node1"], self.preprocessor.run(["#uname", "eq", "node1"]) ) def testDateSpec(self): self.assertEquals( ["date-spec"], self.preprocessor.run(["date-spec"]) ) self.assertEquals( ["date-spec", "hours=14"], self.preprocessor.run(["date-spec", "hours=14"]) ) self.assertEquals( ["date-spec", "hours weeks=6 months= moon=1"], self.preprocessor.run( ["date-spec", "hours", "weeks=6", "months=", "moon=1"] ) ) self.assertEquals( ["date-spec", "foo", "hours=14"], self.preprocessor.run(["date-spec", "foo", "hours=14"]) ) self.assertEquals( ["date-spec", "hours=14", "foo", "hours=14"], self.preprocessor.run(["date-spec", "hours=14", "foo", "hours=14"]) ) self.assertEquals( [ "date-spec", "hours=1 monthdays=2 weekdays=3 yeardays=4 months=5 " "weeks=6 years=7 weekyears=8 moon=9" ], self.preprocessor.run([ "date-spec", "hours=1", "monthdays=2", "weekdays=3", "yeardays=4", "months=5","weeks=6", "years=7", "weekyears=8", "moon=9" ]) ) self.assertEquals( ["#uname", "eq", "node1", "or", "date-spec", "hours=14"], self.preprocessor.run([ "#uname", "eq", "node1", "or", "date-spec", "hours=14" ]) ) self.assertEquals( ["date-spec", "hours=14", "or", "#uname", "eq", "node1"], self.preprocessor.run([ "date-spec", "hours=14", "or", "#uname", "eq", "node1", ]) ) def testDuration(self): self.assertEquals( ["duration"], self.preprocessor.run(["duration"]) ) self.assertEquals( ["duration", "hours=14"], self.preprocessor.run(["duration", "hours=14"]) ) self.assertEquals( ["duration", "hours weeks=6 months= moon=1"], self.preprocessor.run( ["duration", "hours", "weeks=6", "months=", "moon=1"] ) ) self.assertEquals( ["duration", "foo", "hours=14"], self.preprocessor.run(["duration", "foo", "hours=14"]) ) self.assertEquals( ["duration", "hours=14", "foo", "hours=14"], self.preprocessor.run(["duration", "hours=14", "foo", "hours=14"]) ) self.assertEquals( [ "duration", "hours=1 monthdays=2 weekdays=3 yeardays=4 months=5 " "weeks=6 years=7 weekyears=8 moon=9" ], self.preprocessor.run([ "duration", "hours=1", "monthdays=2", "weekdays=3", "yeardays=4", "months=5","weeks=6", "years=7", "weekyears=8", "moon=9" ]) ) self.assertEquals( ["#uname", "eq", "node1", "or", "duration", "hours=14"], self.preprocessor.run([ "#uname", "eq", "node1", "or", "duration", "hours=14" ]) ) self.assertEquals( ["duration", "hours=14", "or", "#uname", "eq", "node1"], self.preprocessor.run([ "duration", "hours=14", "or", "#uname", "eq", "node1", ]) ) def testOperationDatespec(self): self.assertEquals( ["date-spec", "weeks=6 moon=1"], self.preprocessor.run( ["date-spec", "operation=date_spec", "weeks=6", "moon=1"] ) ) self.assertEquals( ["date-spec", "weeks=6 moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "operation=date_spec", "moon=1"] ) ) self.assertEquals( ["date-spec", "weeks=6", "foo", "moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "operation=date_spec", "foo", "moon=1"] ) ) self.assertEquals( ["date-spec", "weeks=6", "foo", "operation=date_spec", "moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "foo", "operation=date_spec", "moon=1"] ) ) self.assertEquals( ["date-spec", "weeks=6 moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "moon=1", "operation=date_spec"] ) ) self.assertEquals( ["date-spec", "weeks=6 moon=1", "foo"], self.preprocessor.run( ["date-spec", "weeks=6", "moon=1", "operation=date_spec", "foo"] ) ) self.assertEquals( ["date-spec"], self.preprocessor.run( ["date-spec", "operation=date_spec"] ) ) self.assertEquals( ["date-spec", "weeks=6", "operation=foo", "moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "operation=foo", "moon=1"] ) ) def testDateLegacySyntax(self): # valid syntax self.assertEquals( ["date", "gt", "2014-06-26"], self.preprocessor.run([ "date", "start=2014-06-26", "gt" ]) ) self.assertEquals( ["date", "lt", "2014-06-26"], self.preprocessor.run([ "date", "end=2014-06-26", "lt" ]) ) self.assertEquals( ["date", "in_range", "2014-06-26", "to", "2014-07-26"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26", "in_range" ]) ) self.assertEquals( ["date", "in_range", "2014-06-26", "to", "2014-07-26"], self.preprocessor.run([ "date", "end=2014-07-26", "start=2014-06-26", "in_range" ]) ) self.assertEquals( ["date", "gt", "2014-06-26", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "gt", "foo" ]) ) self.assertEquals( ["date", "lt", "2014-06-26", "foo"], self.preprocessor.run([ "date", "end=2014-06-26", "lt", "foo" ]) ) self.assertEquals( ["date", "in_range", "2014-06-26", "to", "2014-07-26", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26", "in_range", "foo" ]) ) self.assertEquals( ["date", "in_range", "2014-06-26", "to", "2014-07-26", "foo"], self.preprocessor.run([ "date", "end=2014-07-26", "start=2014-06-26", "in_range", "foo" ]) ) # invalid syntax - no change self.assertEquals( ["date"], self.preprocessor.run([ "date" ]) ) self.assertEquals( ["date", "start=2014-06-26"], self.preprocessor.run([ "date", "start=2014-06-26" ]) ) self.assertEquals( ["date", "end=2014-06-26"], self.preprocessor.run([ "date", "end=2014-06-26" ]) ) self.assertEquals( ["date", "start=2014-06-26", "end=2014-07-26"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26" ]) ) self.assertEquals( ["date", "start=2014-06-26", "end=2014-07-26", "lt"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26", "lt" ]) ) self.assertEquals( ["date", "start=2014-06-26", "lt", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "lt", "foo" ]) ) self.assertEquals( ["date", "start=2014-06-26", "end=2014-07-26", "gt", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26", "gt", "foo" ]) ) self.assertEquals( ["date", "end=2014-06-26", "gt"], self.preprocessor.run([ "date", "end=2014-06-26", "gt" ]) ) self.assertEquals( ["date", "start=2014-06-26", "in_range", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "in_range", "foo" ]) ) self.assertEquals( ["date", "end=2014-07-26", "in_range"], self.preprocessor.run([ "date", "end=2014-07-26", "in_range" ]) ) self.assertEquals( ["foo", "start=2014-06-26", "gt"], self.preprocessor.run([ "foo", "start=2014-06-26", "gt" ]) ) self.assertEquals( ["foo", "end=2014-06-26", "lt"], self.preprocessor.run([ "foo", "end=2014-06-26", "lt" ]) ) def testParenthesis(self): self.assertEquals( ["("], self.preprocessor.run(["("]) ) self.assertEquals( [")"], self.preprocessor.run([")"]) ) self.assertEquals( ["(", "(", ")", ")"], self.preprocessor.run(["(", "(", ")", ")"]) ) self.assertEquals( ["(", "(", ")", ")"], self.preprocessor.run(["(())"]) ) self.assertEquals( ["a", "(", "b", ")", "c"], self.preprocessor.run(["a", "(", "b", ")", "c"]) ) self.assertEquals( ["a", "(", "b", "c", ")", "d"], self.preprocessor.run(["a", "(", "b", "c", ")", "d"]) ) self.assertEquals( ["a", ")", "b", "(", "c"], self.preprocessor.run(["a", ")", "b", "(", "c"]) ) self.assertEquals( ["a", "(", "b", ")", "c"], self.preprocessor.run(["a", "(b)", "c"]) ) self.assertEquals( ["a", "(", "b", ")", "c"], self.preprocessor.run(["a(", "b", ")c"]) ) self.assertEquals( ["a", "(", "b", ")", "c"], self.preprocessor.run(["a(b)c"]) ) self.assertEquals( ["aA", "(", "bB", ")", "cC"], self.preprocessor.run(["aA(bB)cC"]) ) self.assertEquals( ["(", "aA", "(", "bB", ")", "cC", ")"], self.preprocessor.run(["(aA(bB)cC)"]) ) self.assertEquals( ["(", "aA", "(", "(", "bB", ")", "cC", ")"], self.preprocessor.run(["(aA(", "(bB)cC)"]) ) self.assertEquals( ["(", "aA", "(", "(", "(", "bB", ")", "cC", ")"], self.preprocessor.run(["(aA(", "(", "(bB)cC)"]) ) class ExportAsExpressionTest(unittest.TestCase): def test_success(self): self.assertXmlExport( """ <rule id="location-dummy-rule" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> """, "#uname eq node1", "#uname eq string node1" ) self.assertXmlExport( """ <rule id="location-dummy-rule" score="INFINITY"> <expression attribute="foo" id="location-dummy-rule-expr" operation="gt" type="version" value="1.2.3"/> </rule> """, "foo gt version 1.2.3", "foo gt version 1.2.3" ) self.assertXmlExport( """ <rule boolean-op="or" id="complexRule" score="INFINITY"> <rule boolean-op="and" id="complexRule-rule-1" score="0"> <date_expression id="complexRule-rule-1-expr" operation="date_spec"> <date_spec id="complexRule-rule-1-expr-datespec" weekdays="1-5" hours="12-23"/> </date_expression> <date_expression id="complexRule-rule-1-expr-1" operation="in_range" start="2014-07-26"> <duration id="complexRule-rule-1-expr-1-duration" months="1"/> </date_expression> </rule> <rule boolean-op="and" id="complexRule-rule" score="0"> <expression attribute="foo" id="complexRule-rule-expr-1" operation="gt" type="version" value="1.2"/> <expression attribute="#uname" id="complexRule-rule-expr" operation="eq" value="node3 4"/> </rule> </rule> """, "(date-spec hours=12-23 weekdays=1-5 and date in_range 2014-07-26 to duration months=1) or (foo gt version 1.2 and #uname eq \"node3 4\")", "(#uname eq string \"node3 4\" and foo gt version 1.2) or (date in_range 2014-07-26 to duration months=1 and date-spec hours=12-23 weekdays=1-5)" ) def assertXmlExport(self, rule_xml, export, export_normalized): ac( export + "\n", rule.ExportAsExpression().get_string( xml.dom.minidom.parseString(rule_xml).documentElement, normalize=False ) + "\n" ) ac( export_normalized + "\n", rule.ExportAsExpression().get_string( xml.dom.minidom.parseString(rule_xml).documentElement, normalize=True ) + "\n" ) class DomRuleAddTest(unittest.TestCase): def setUp(self): shutil.copy(empty_cib, temp_cib) output, returnVal = pcs(temp_cib, "resource create dummy1 Dummy") assert returnVal == 0 and output == "" def test_success_xml(self): self.assertExpressionXml( ["#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["id=myRule", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="myRule" score="INFINITY"> <expression attribute="#uname" id="myRule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["score=INFINITY", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["score=100", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" score="100"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["score-attribute=pingd", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" score-attribute="pingd"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["role=master", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" role="master" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["role=slave", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" role="slave" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["score=100", "id=myRule", "role=master", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="myRule" role="master" score="100"> <expression attribute="#uname" id="myRule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) def test_success(self): output, returnVal = pcs( temp_cib, "constraint location dummy1 rule #uname eq node1" ) ac(output, "") self.assertEquals(0, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=MyRule score=100 role=master #uname eq node2" ) ac(output, "") self.assertEquals(0, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=complexRule (#uname eq node3 and foo gt version 1.2) or (date-spec hours=12-23 weekdays=1-5 and date in_range 2014-07-26 to duration months=1)" ) ac(output, "") self.assertEquals(0, returnVal) output, returnVal = pcs(temp_cib, "constraint location show --full") ac(output, """\ Location Constraints: Resource: dummy1 Constraint: location-dummy1 Rule: score=INFINITY (id:location-dummy1-rule) Expression: #uname eq node1 (id:location-dummy1-rule-expr) Constraint: location-dummy1-1 Rule: score=100 role=master (id:MyRule) Expression: #uname eq node2 (id:MyRule-expr) Constraint: location-dummy1-2 Rule: score=INFINITY boolean-op=or (id:complexRule) Rule: score=0 boolean-op=and (id:complexRule-rule) Expression: #uname eq node3 (id:complexRule-rule-expr) Expression: foo gt version 1.2 (id:complexRule-rule-expr-1) Rule: score=0 boolean-op=and (id:complexRule-rule-1) Expression: (id:complexRule-rule-1-expr) Date Spec: hours=12-23 weekdays=1-5 (id:complexRule-rule-1-expr-datespec) Expression: date in_range 2014-07-26 to duration (id:complexRule-rule-1-expr-1) Duration: months=1 (id:complexRule-rule-1-expr-1-duration) """) self.assertEquals(0, returnVal) output, returnVal = pcs(temp_cib, "constraint location show") ac(output, """\ Location Constraints: Resource: dummy1 Constraint: location-dummy1 Rule: score=INFINITY Expression: #uname eq node1 Constraint: location-dummy1-1 Rule: score=100 role=master Expression: #uname eq node2 Constraint: location-dummy1-2 Rule: score=INFINITY boolean-op=or Rule: score=0 boolean-op=and Expression: #uname eq node3 Expression: foo gt version 1.2 Rule: score=0 boolean-op=and Expression: Date Spec: hours=12-23 weekdays=1-5 Expression: date in_range 2014-07-26 to duration Duration: months=1 """) self.assertEquals(0, returnVal) def test_invalid_score(self): output, returnVal = pcs( temp_cib, "constraint location dummy1 rule score=pingd defined pingd" ) ac( output, "Warning: invalid score 'pingd', setting score-attribute=pingd " "instead\n" ) self.assertEquals(0, returnVal) output, returnVal = pcs(temp_cib, "constraint location show --full") ac(output, """\ Location Constraints: Resource: dummy1 Constraint: location-dummy1 Rule: score-attribute=pingd (id:location-dummy1-rule) Expression: defined pingd (id:location-dummy1-rule-expr) """) self.assertEquals(0, returnVal) def test_invalid_rule(self): output, returnVal = pcs( temp_cib, "constraint location dummy1 rule score=100" ) ac(output, "Error: no rule expression was specified\n") self.assertEquals(1, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule #uname eq" ) ac( output, "Error: '#uname eq' is not a valid rule expression: unexpected end " "of rule\n" ) self.assertEquals(1, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule string #uname eq node1" ) ac( output, "Error: 'string #uname eq node1' is not a valid rule expression: " "unexpected 'string' before 'eq'\n" ) self.assertEquals(1, returnVal) def test_ivalid_options(self): output, returnVal = pcs( temp_cib, "constraint location dummy1 rule role=foo #uname eq node1" ) ac(output, "Error: invalid role 'foo', use 'master' or 'slave'\n") self.assertEquals(1, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule score=100 score-attribute=pingd #uname eq node1" ) ac(output, "Error: can not specify both score and score-attribute\n") self.assertEquals(1, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=1foo #uname eq node1" ) ac( output, "Error: invalid rule id '1foo', '1' is not a valid first character " "for a rule id\n" ) self.assertEquals(1, returnVal) output, returnVal = pcs(temp_cib, "constraint location show --full") ac(output, "Location Constraints:\n") self.assertEquals(0, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=MyRule #uname eq node1" ) ac(output, "") self.assertEquals(0, returnVal) output, returnVal = pcs(temp_cib, "constraint location show --full") ac(output, """\ Location Constraints: Resource: dummy1 Constraint: location-dummy1 Rule: score=INFINITY (id:MyRule) Expression: #uname eq node1 (id:MyRule-expr) """) self.assertEquals(0, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=MyRule #uname eq node1" ) ac( output, "Error: id 'MyRule' is already in use, please specify another one\n" ) self.assertEquals(1, returnVal) def assertExpressionXml(self, rule_expression, rule_xml): cib_dom = xml.dom.minidom.parse("empty.xml") constraints = cib_dom.getElementsByTagName("constraints")[0] constraint_el = constraints.appendChild( cib_dom.createElement("rsc_location") ) constraint_el.setAttribute("id", "location-dummy") options, rule_argv = rule.parse_argv(rule_expression) rule.dom_rule_add(constraint_el, options, rule_argv) ac( constraint_el.toprettyxml(indent=" "), rule_xml.lstrip().rstrip(" ") ) if __name__ == "__main__": unittest.main()<|fim▁end|>
"(gt (literal int) (literal 123))", str(self.parser.parse(["int", "gt", "123"]))
<|file_name|>generated.rs<|end_file_name|><|fim▁begin|>// ================================================================= // // * WARNING * // // This file is generated! // // Changes made to this file will be overwritten. If changes are // required to the generated code, the service_crategen project // must be updated to generate the changes. // // ================================================================= use std::error::Error; use std::fmt; use async_trait::async_trait; use rusoto_core::credential::ProvideAwsCredentials; use rusoto_core::region; use rusoto_core::request::{BufferedHttpResponse, DispatchSignedRequest}; use rusoto_core::{Client, RusotoError}; use rusoto_core::param::{Params, ServiceParams}; use rusoto_core::proto; use rusoto_core::signature::SignedRequest; #[allow(unused_imports)] use serde::{Deserialize, Serialize}; use serde_json; /// <p>CDN Authorization credentials</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct Authorization { /// <p>The Amazon Resource Name (ARN) for the secret in Secrets Manager that your Content Distribution Network (CDN) uses for authorization to access your endpoint.</p> #[serde(rename = "CdnIdentifierSecret")] pub cdn_identifier_secret: String, /// <p>The Amazon Resource Name (ARN) for the IAM role that allows MediaPackage to communicate with AWS Secrets Manager.</p> #[serde(rename = "SecretsRoleArn")] pub secrets_role_arn: String, } /// <p>A Channel resource configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct Channel { /// <p>The Amazon Resource Name (ARN) assigned to the Channel.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>A short text description of the Channel.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "EgressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub egress_access_logs: Option<EgressAccessLogs>, #[serde(rename = "HlsIngest")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_ingest: Option<HlsIngest>, /// <p>The ID of the Channel.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "IngressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub ingress_access_logs: Option<IngressAccessLogs>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, } /// <p>A Common Media Application Format (CMAF) encryption configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct CmafEncryption { /// <p>Time (in seconds) between each encryption key rotation.</p> #[serde(rename = "KeyRotationIntervalSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub key_rotation_interval_seconds: Option<i64>, #[serde(rename = "SpekeKeyProvider")] pub speke_key_provider: SpekeKeyProvider, } /// <p>A Common Media Application Format (CMAF) packaging configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct CmafPackage { #[serde(rename = "Encryption")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption: Option<CmafEncryption>, /// <p>A list of HLS manifest configurations</p> #[serde(rename = "HlsManifests")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_manifests: Option<Vec<HlsManifest>>, /// <p>Duration (in seconds) of each segment. Actual segments will be /// rounded to the nearest multiple of the source segment duration.</p> #[serde(rename = "SegmentDurationSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub segment_duration_seconds: Option<i64>, /// <p>An optional custom string that is prepended to the name of each segment. If not specified, it defaults to the ChannelId.</p> #[serde(rename = "SegmentPrefix")] #[serde(skip_serializing_if = "Option::is_none")] pub segment_prefix: Option<String>, #[serde(rename = "StreamSelection")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_selection: Option<StreamSelection>, } /// <p>A Common Media Application Format (CMAF) packaging configuration.</p> #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct CmafPackageCreateOrUpdateParameters { #[serde(rename = "Encryption")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption: Option<CmafEncryption>, /// <p>A list of HLS manifest configurations</p> #[serde(rename = "HlsManifests")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_manifests: Option<Vec<HlsManifestCreateOrUpdateParameters>>, /// <p>Duration (in seconds) of each segment. Actual segments will be /// rounded to the nearest multiple of the source segment duration.</p> #[serde(rename = "SegmentDurationSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub segment_duration_seconds: Option<i64>, /// <p>An optional custom string that is prepended to the name of each segment. If not specified, it defaults to the ChannelId.</p> #[serde(rename = "SegmentPrefix")] #[serde(skip_serializing_if = "Option::is_none")] pub segment_prefix: Option<String>, #[serde(rename = "StreamSelection")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_selection: Option<StreamSelection>, } /// <p>the option to configure log subscription.</p> #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct ConfigureLogsRequest { #[serde(rename = "EgressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub egress_access_logs: Option<EgressAccessLogs>, /// <p>The ID of the channel to log subscription.</p> #[serde(rename = "Id")] pub id: String, #[serde(rename = "IngressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub ingress_access_logs: Option<IngressAccessLogs>, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct ConfigureLogsResponse { /// <p>The Amazon Resource Name (ARN) assigned to the Channel.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>A short text description of the Channel.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "EgressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub egress_access_logs: Option<EgressAccessLogs>, #[serde(rename = "HlsIngest")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_ingest: Option<HlsIngest>, /// <p>The ID of the Channel.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "IngressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub ingress_access_logs: Option<IngressAccessLogs>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, } /// <p>A new Channel configuration.</p> #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct CreateChannelRequest { /// <p>A short text description of the Channel.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, /// <p>The ID of the Channel. The ID must be unique within the region and it /// cannot be changed after a Channel is created.</p> #[serde(rename = "Id")] pub id: String, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct CreateChannelResponse { /// <p>The Amazon Resource Name (ARN) assigned to the Channel.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>A short text description of the Channel.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "EgressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub egress_access_logs: Option<EgressAccessLogs>, #[serde(rename = "HlsIngest")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_ingest: Option<HlsIngest>, /// <p>The ID of the Channel.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "IngressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub ingress_access_logs: Option<IngressAccessLogs>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, } /// <p>Configuration parameters used to create a new HarvestJob.</p> #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct CreateHarvestJobRequest { /// <p>The end of the time-window which will be harvested</p> #[serde(rename = "EndTime")] pub end_time: String, /// <p>The ID of the HarvestJob. The ID must be unique within the region /// and it cannot be changed after the HarvestJob is submitted</p> #[serde(rename = "Id")] pub id: String, /// <p>The ID of the OriginEndpoint that the HarvestJob will harvest from. /// This cannot be changed after the HarvestJob is submitted.</p> #[serde(rename = "OriginEndpointId")] pub origin_endpoint_id: String, #[serde(rename = "S3Destination")] pub s3_destination: S3Destination, /// <p>The start of the time-window which will be harvested</p> #[serde(rename = "StartTime")] pub start_time: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct CreateHarvestJobResponse { /// <p>The Amazon Resource Name (ARN) assigned to the HarvestJob.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>The ID of the Channel that the HarvestJob will harvest from.</p> #[serde(rename = "ChannelId")] #[serde(skip_serializing_if = "Option::is_none")] pub channel_id: Option<String>, /// <p>The time the HarvestJob was submitted</p> #[serde(rename = "CreatedAt")] #[serde(skip_serializing_if = "Option::is_none")] pub created_at: Option<String>, /// <p>The end of the time-window which will be harvested.</p> #[serde(rename = "EndTime")] #[serde(skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, /// <p>The ID of the HarvestJob. The ID must be unique within the region /// and it cannot be changed after the HarvestJob is submitted.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, /// <p>The ID of the OriginEndpoint that the HarvestJob will harvest from. /// This cannot be changed after the HarvestJob is submitted.</p> #[serde(rename = "OriginEndpointId")] #[serde(skip_serializing_if = "Option::is_none")] pub origin_endpoint_id: Option<String>, #[serde(rename = "S3Destination")] #[serde(skip_serializing_if = "Option::is_none")] pub s3_destination: Option<S3Destination>, /// <p>The start of the time-window which will be harvested.</p> #[serde(rename = "StartTime")] #[serde(skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, /// <p>The current status of the HarvestJob. Consider setting up a CloudWatch Event to listen for /// HarvestJobs as they succeed or fail. In the event of failure, the CloudWatch Event will /// include an explanation of why the HarvestJob failed.</p> #[serde(rename = "Status")] #[serde(skip_serializing_if = "Option::is_none")] pub status: Option<String>, } /// <p>Configuration parameters used to create a new OriginEndpoint.</p> #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct CreateOriginEndpointRequest { #[serde(rename = "Authorization")] #[serde(skip_serializing_if = "Option::is_none")] pub authorization: Option<Authorization>, /// <p>The ID of the Channel that the OriginEndpoint will be associated with. /// This cannot be changed after the OriginEndpoint is created.</p> #[serde(rename = "ChannelId")] pub channel_id: String, #[serde(rename = "CmafPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub cmaf_package: Option<CmafPackageCreateOrUpdateParameters>, #[serde(rename = "DashPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub dash_package: Option<DashPackage>, /// <p>A short text description of the OriginEndpoint.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "HlsPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_package: Option<HlsPackage>, /// <p>The ID of the OriginEndpoint. The ID must be unique within the region /// and it cannot be changed after the OriginEndpoint is created.</p> #[serde(rename = "Id")] pub id: String, /// <p>A short string that will be used as the filename of the OriginEndpoint URL (defaults to &quot;index&quot;).</p> #[serde(rename = "ManifestName")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_name: Option<String>, #[serde(rename = "MssPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub mss_package: Option<MssPackage>, /// <p>Control whether origination of video is allowed for this OriginEndpoint. If set to ALLOW, the OriginEndpoint /// may by requested, pursuant to any other form of access control. If set to DENY, the OriginEndpoint may not be /// requested. This can be helpful for Live to VOD harvesting, or for temporarily disabling origination</p> #[serde(rename = "Origination")] #[serde(skip_serializing_if = "Option::is_none")] pub origination: Option<String>, /// <p>Maximum duration (seconds) of content to retain for startover playback. /// If not specified, startover playback will be disabled for the OriginEndpoint.</p> #[serde(rename = "StartoverWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub startover_window_seconds: Option<i64>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, /// <p>Amount of delay (seconds) to enforce on the playback of live content. /// If not specified, there will be no time delay in effect for the OriginEndpoint.</p> #[serde(rename = "TimeDelaySeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub time_delay_seconds: Option<i64>, /// <p>A list of source IP CIDR blocks that will be allowed to access the OriginEndpoint.</p> #[serde(rename = "Whitelist")] #[serde(skip_serializing_if = "Option::is_none")] pub whitelist: Option<Vec<String>>, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct CreateOriginEndpointResponse { /// <p>The Amazon Resource Name (ARN) assigned to the OriginEndpoint.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, #[serde(rename = "Authorization")] #[serde(skip_serializing_if = "Option::is_none")] pub authorization: Option<Authorization>, /// <p>The ID of the Channel the OriginEndpoint is associated with.</p> #[serde(rename = "ChannelId")] #[serde(skip_serializing_if = "Option::is_none")] pub channel_id: Option<String>, #[serde(rename = "CmafPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub cmaf_package: Option<CmafPackage>, #[serde(rename = "DashPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub dash_package: Option<DashPackage>, /// <p>A short text description of the OriginEndpoint.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "HlsPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_package: Option<HlsPackage>, /// <p>The ID of the OriginEndpoint.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, /// <p>A short string appended to the end of the OriginEndpoint URL.</p> #[serde(rename = "ManifestName")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_name: Option<String>, #[serde(rename = "MssPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub mss_package: Option<MssPackage>, /// <p>Control whether origination of video is allowed for this OriginEndpoint. If set to ALLOW, the OriginEndpoint /// may by requested, pursuant to any other form of access control. If set to DENY, the OriginEndpoint may not be /// requested. This can be helpful for Live to VOD harvesting, or for temporarily disabling origination</p> #[serde(rename = "Origination")] #[serde(skip_serializing_if = "Option::is_none")] pub origination: Option<String>, /// <p>Maximum duration (seconds) of content to retain for startover playback. /// If not specified, startover playback will be disabled for the OriginEndpoint.</p> #[serde(rename = "StartoverWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub startover_window_seconds: Option<i64>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, /// <p>Amount of delay (seconds) to enforce on the playback of live content. /// If not specified, there will be no time delay in effect for the OriginEndpoint.</p> #[serde(rename = "TimeDelaySeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub time_delay_seconds: Option<i64>, /// <p>The URL of the packaged OriginEndpoint for consumption.</p> #[serde(rename = "Url")] #[serde(skip_serializing_if = "Option::is_none")] pub url: Option<String>, /// <p>A list of source IP CIDR blocks that will be allowed to access the OriginEndpoint.</p> #[serde(rename = "Whitelist")] #[serde(skip_serializing_if = "Option::is_none")] pub whitelist: Option<Vec<String>>, } /// <p>A Dynamic Adaptive Streaming over HTTP (DASH) encryption configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct DashEncryption { /// <p>Time (in seconds) between each encryption key rotation.</p> #[serde(rename = "KeyRotationIntervalSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub key_rotation_interval_seconds: Option<i64>, #[serde(rename = "SpekeKeyProvider")] pub speke_key_provider: SpekeKeyProvider, } /// <p>A Dynamic Adaptive Streaming over HTTP (DASH) packaging configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct DashPackage { #[serde(rename = "AdTriggers")] #[serde(skip_serializing_if = "Option::is_none")] pub ad_triggers: Option<Vec<String>>, #[serde(rename = "AdsOnDeliveryRestrictions")] #[serde(skip_serializing_if = "Option::is_none")] pub ads_on_delivery_restrictions: Option<String>, #[serde(rename = "Encryption")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption: Option<DashEncryption>, /// <p>Determines the position of some tags in the Media Presentation Description (MPD). When set to FULL, elements like SegmentTemplate and ContentProtection are included in each Representation. When set to COMPACT, duplicate elements are combined and presented at the AdaptationSet level.</p> #[serde(rename = "ManifestLayout")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_layout: Option<String>, /// <p>Time window (in seconds) contained in each manifest.</p> #[serde(rename = "ManifestWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_window_seconds: Option<i64>, /// <p>Minimum duration (in seconds) that a player will buffer media before starting the presentation.</p> #[serde(rename = "MinBufferTimeSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub min_buffer_time_seconds: Option<i64>, /// <p>Minimum duration (in seconds) between potential changes to the Dynamic Adaptive Streaming over HTTP (DASH) Media Presentation Description (MPD).</p> #[serde(rename = "MinUpdatePeriodSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub min_update_period_seconds: Option<i64>, /// <p>A list of triggers that controls when the outgoing Dynamic Adaptive Streaming over HTTP (DASH) /// Media Presentation Description (MPD) will be partitioned into multiple periods. If empty, the content will not /// be partitioned into more than one period. If the list contains &quot;ADS&quot;, new periods will be created where /// the Channel source contains SCTE-35 ad markers.</p> #[serde(rename = "PeriodTriggers")] #[serde(skip_serializing_if = "Option::is_none")] pub period_triggers: Option<Vec<String>>, /// <p>The Dynamic Adaptive Streaming over HTTP (DASH) profile type. When set to &quot;HBBTV<em>1</em>5&quot;, HbbTV 1.5 compliant output is enabled.</p> #[serde(rename = "Profile")] #[serde(skip_serializing_if = "Option::is_none")] pub profile: Option<String>, /// <p>Duration (in seconds) of each segment. Actual segments will be /// rounded to the nearest multiple of the source segment duration.</p> #[serde(rename = "SegmentDurationSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub segment_duration_seconds: Option<i64>, /// <p>Determines the type of SegmentTemplate included in the Media Presentation Description (MPD). When set to NUMBER<em>WITH</em>TIMELINE, a full timeline is presented in each SegmentTemplate, with $Number$ media URLs. When set to TIME<em>WITH</em>TIMELINE, a full timeline is presented in each SegmentTemplate, with $Time$ media URLs. When set to NUMBER<em>WITH</em>DURATION, only a duration is included in each SegmentTemplate, with $Number$ media URLs.</p> #[serde(rename = "SegmentTemplateFormat")] #[serde(skip_serializing_if = "Option::is_none")] pub segment_template_format: Option<String>, #[serde(rename = "StreamSelection")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_selection: Option<StreamSelection>, /// <p>Duration (in seconds) to delay live content before presentation.</p> #[serde(rename = "SuggestedPresentationDelaySeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub suggested_presentation_delay_seconds: Option<i64>, /// <p>Determines the type of UTCTiming included in the Media Presentation Description (MPD)</p> #[serde(rename = "UtcTiming")] #[serde(skip_serializing_if = "Option::is_none")] pub utc_timing: Option<String>, /// <p>Specifies the value attribute of the UTCTiming field when utcTiming is set to HTTP-ISO or HTTP-HEAD</p> #[serde(rename = "UtcTimingUri")] #[serde(skip_serializing_if = "Option::is_none")] pub utc_timing_uri: Option<String>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct DeleteChannelRequest { /// <p>The ID of the Channel to delete.</p> #[serde(rename = "Id")] pub id: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct DeleteChannelResponse {} #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct DeleteOriginEndpointRequest { /// <p>The ID of the OriginEndpoint to delete.</p> #[serde(rename = "Id")] pub id: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct DeleteOriginEndpointResponse {} #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct DescribeChannelRequest { /// <p>The ID of a Channel.</p> #[serde(rename = "Id")] pub id: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct DescribeChannelResponse { /// <p>The Amazon Resource Name (ARN) assigned to the Channel.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>A short text description of the Channel.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "EgressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub egress_access_logs: Option<EgressAccessLogs>, #[serde(rename = "HlsIngest")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_ingest: Option<HlsIngest>, /// <p>The ID of the Channel.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "IngressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub ingress_access_logs: Option<IngressAccessLogs>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct DescribeHarvestJobRequest { /// <p>The ID of the HarvestJob.</p> #[serde(rename = "Id")] pub id: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct DescribeHarvestJobResponse { /// <p>The Amazon Resource Name (ARN) assigned to the HarvestJob.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>The ID of the Channel that the HarvestJob will harvest from.</p> #[serde(rename = "ChannelId")] #[serde(skip_serializing_if = "Option::is_none")] pub channel_id: Option<String>, /// <p>The time the HarvestJob was submitted</p> #[serde(rename = "CreatedAt")] #[serde(skip_serializing_if = "Option::is_none")] pub created_at: Option<String>, /// <p>The end of the time-window which will be harvested.</p> #[serde(rename = "EndTime")] #[serde(skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, /// <p>The ID of the HarvestJob. The ID must be unique within the region /// and it cannot be changed after the HarvestJob is submitted.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, /// <p>The ID of the OriginEndpoint that the HarvestJob will harvest from. /// This cannot be changed after the HarvestJob is submitted.</p> #[serde(rename = "OriginEndpointId")] #[serde(skip_serializing_if = "Option::is_none")] pub origin_endpoint_id: Option<String>, #[serde(rename = "S3Destination")] #[serde(skip_serializing_if = "Option::is_none")] pub s3_destination: Option<S3Destination>, /// <p>The start of the time-window which will be harvested.</p> #[serde(rename = "StartTime")] #[serde(skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, /// <p>The current status of the HarvestJob. Consider setting up a CloudWatch Event to listen for /// HarvestJobs as they succeed or fail. In the event of failure, the CloudWatch Event will /// include an explanation of why the HarvestJob failed.</p> #[serde(rename = "Status")] #[serde(skip_serializing_if = "Option::is_none")] pub status: Option<String>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct DescribeOriginEndpointRequest { /// <p>The ID of the OriginEndpoint.</p> #[serde(rename = "Id")] pub id: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct DescribeOriginEndpointResponse { /// <p>The Amazon Resource Name (ARN) assigned to the OriginEndpoint.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, #[serde(rename = "Authorization")] #[serde(skip_serializing_if = "Option::is_none")] pub authorization: Option<Authorization>, /// <p>The ID of the Channel the OriginEndpoint is associated with.</p> #[serde(rename = "ChannelId")] #[serde(skip_serializing_if = "Option::is_none")] pub channel_id: Option<String>, #[serde(rename = "CmafPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub cmaf_package: Option<CmafPackage>, #[serde(rename = "DashPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub dash_package: Option<DashPackage>, /// <p>A short text description of the OriginEndpoint.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "HlsPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_package: Option<HlsPackage>, /// <p>The ID of the OriginEndpoint.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, /// <p>A short string appended to the end of the OriginEndpoint URL.</p> #[serde(rename = "ManifestName")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_name: Option<String>, #[serde(rename = "MssPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub mss_package: Option<MssPackage>, /// <p>Control whether origination of video is allowed for this OriginEndpoint. If set to ALLOW, the OriginEndpoint /// may by requested, pursuant to any other form of access control. If set to DENY, the OriginEndpoint may not be /// requested. This can be helpful for Live to VOD harvesting, or for temporarily disabling origination</p> #[serde(rename = "Origination")] #[serde(skip_serializing_if = "Option::is_none")] pub origination: Option<String>, /// <p>Maximum duration (seconds) of content to retain for startover playback. /// If not specified, startover playback will be disabled for the OriginEndpoint.</p> #[serde(rename = "StartoverWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub startover_window_seconds: Option<i64>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, /// <p>Amount of delay (seconds) to enforce on the playback of live content. /// If not specified, there will be no time delay in effect for the OriginEndpoint.</p> #[serde(rename = "TimeDelaySeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub time_delay_seconds: Option<i64>, /// <p>The URL of the packaged OriginEndpoint for consumption.</p> #[serde(rename = "Url")] #[serde(skip_serializing_if = "Option::is_none")] pub url: Option<String>, /// <p>A list of source IP CIDR blocks that will be allowed to access the OriginEndpoint.</p> #[serde(rename = "Whitelist")] #[serde(skip_serializing_if = "Option::is_none")] pub whitelist: Option<Vec<String>>, } /// <p>Configure egress access logging.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct EgressAccessLogs { /// <p>Customize the log group name.</p> #[serde(rename = "LogGroupName")] #[serde(skip_serializing_if = "Option::is_none")] pub log_group_name: Option<String>, } /// <p>A HarvestJob resource configuration</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct HarvestJob { /// <p>The Amazon Resource Name (ARN) assigned to the HarvestJob.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>The ID of the Channel that the HarvestJob will harvest from.</p> #[serde(rename = "ChannelId")] #[serde(skip_serializing_if = "Option::is_none")] pub channel_id: Option<String>, /// <p>The time the HarvestJob was submitted</p> #[serde(rename = "CreatedAt")] #[serde(skip_serializing_if = "Option::is_none")] pub created_at: Option<String>, /// <p>The end of the time-window which will be harvested.</p> #[serde(rename = "EndTime")] #[serde(skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, /// <p>The ID of the HarvestJob. The ID must be unique within the region /// and it cannot be changed after the HarvestJob is submitted.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, /// <p>The ID of the OriginEndpoint that the HarvestJob will harvest from. /// This cannot be changed after the HarvestJob is submitted.</p> #[serde(rename = "OriginEndpointId")] #[serde(skip_serializing_if = "Option::is_none")] pub origin_endpoint_id: Option<String>, #[serde(rename = "S3Destination")] #[serde(skip_serializing_if = "Option::is_none")] pub s3_destination: Option<S3Destination>, /// <p>The start of the time-window which will be harvested.</p> #[serde(rename = "StartTime")] #[serde(skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, /// <p>The current status of the HarvestJob. Consider setting up a CloudWatch Event to listen for /// HarvestJobs as they succeed or fail. In the event of failure, the CloudWatch Event will /// include an explanation of why the HarvestJob failed.</p> #[serde(rename = "Status")] #[serde(skip_serializing_if = "Option::is_none")] pub status: Option<String>, } /// <p>An HTTP Live Streaming (HLS) encryption configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct HlsEncryption { /// <p>A constant initialization vector for encryption (optional). /// When not specified the initialization vector will be periodically rotated.</p> #[serde(rename = "ConstantInitializationVector")] #[serde(skip_serializing_if = "Option::is_none")] pub constant_initialization_vector: Option<String>, /// <p>The encryption method to use.</p> #[serde(rename = "EncryptionMethod")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption_method: Option<String>, /// <p>Interval (in seconds) between each encryption key rotation.</p> #[serde(rename = "KeyRotationIntervalSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub key_rotation_interval_seconds: Option<i64>, /// <p>When enabled, the EXT-X-KEY tag will be repeated in output manifests.</p> #[serde(rename = "RepeatExtXKey")] #[serde(skip_serializing_if = "Option::is_none")] pub repeat_ext_x_key: Option<bool>, #[serde(rename = "SpekeKeyProvider")] pub speke_key_provider: SpekeKeyProvider, } /// <p>An HTTP Live Streaming (HLS) ingest resource configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct HlsIngest { /// <p>A list of endpoints to which the source stream should be sent.</p> #[serde(rename = "IngestEndpoints")] #[serde(skip_serializing_if = "Option::is_none")] pub ingest_endpoints: Option<Vec<IngestEndpoint>>, } /// <p>A HTTP Live Streaming (HLS) manifest configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct HlsManifest { /// <p>This setting controls how ad markers are included in the packaged OriginEndpoint. /// &quot;NONE&quot; will omit all SCTE-35 ad markers from the output. /// &quot;PASSTHROUGH&quot; causes the manifest to contain a copy of the SCTE-35 ad /// markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. /// &quot;SCTE35_ENHANCED&quot; generates ad markers and blackout tags based on SCTE-35 /// messages in the input source. /// &quot;DATERANGE&quot; inserts EXT-X-DATERANGE tags to signal ad and program transition events /// in HLS and CMAF manifests. For this option, you must set a programDateTimeIntervalSeconds value /// that is greater than 0.</p> #[serde(rename = "AdMarkers")] #[serde(skip_serializing_if = "Option::is_none")] pub ad_markers: Option<String>, /// <p>The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is created.</p> #[serde(rename = "Id")] pub id: String, /// <p>When enabled, an I-Frame only stream will be included in the output.</p> #[serde(rename = "IncludeIframeOnlyStream")] #[serde(skip_serializing_if = "Option::is_none")] pub include_iframe_only_stream: Option<bool>, /// <p>An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the manifestName for the OriginEndpoint.</p> #[serde(rename = "ManifestName")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_name: Option<String>, /// <p>The HTTP Live Streaming (HLS) playlist type. /// When either &quot;EVENT&quot; or &quot;VOD&quot; is specified, a corresponding EXT-X-PLAYLIST-TYPE /// entry will be included in the media playlist.</p> #[serde(rename = "PlaylistType")] #[serde(skip_serializing_if = "Option::is_none")] pub playlist_type: Option<String>, /// <p>Time window (in seconds) contained in each parent manifest.</p> #[serde(rename = "PlaylistWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub playlist_window_seconds: Option<i64>, /// <p>The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag /// inserted into manifests. Additionally, when an interval is specified /// ID3Timed Metadata messages will be generated every 5 seconds using the /// ingest time of the content. /// If the interval is not specified, or set to 0, then /// no EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no /// ID3Timed Metadata messages will be generated. Note that irrespective /// of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, /// it will be passed through to HLS output.</p> #[serde(rename = "ProgramDateTimeIntervalSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub program_date_time_interval_seconds: Option<i64>, /// <p>The URL of the packaged OriginEndpoint for consumption.</p> #[serde(rename = "Url")] #[serde(skip_serializing_if = "Option::is_none")] pub url: Option<String>, } /// <p>A HTTP Live Streaming (HLS) manifest configuration.</p> #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct HlsManifestCreateOrUpdateParameters { /// <p>This setting controls how ad markers are included in the packaged OriginEndpoint. /// &quot;NONE&quot; will omit all SCTE-35 ad markers from the output. /// &quot;PASSTHROUGH&quot; causes the manifest to contain a copy of the SCTE-35 ad /// markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. /// &quot;SCTE35_ENHANCED&quot; generates ad markers and blackout tags based on SCTE-35 /// messages in the input source. /// &quot;DATERANGE&quot; inserts EXT-X-DATERANGE tags to signal ad and program transition events /// in HLS and CMAF manifests. For this option, you must set a programDateTimeIntervalSeconds value /// that is greater than 0.</p> #[serde(rename = "AdMarkers")] #[serde(skip_serializing_if = "Option::is_none")] pub ad_markers: Option<String>, #[serde(rename = "AdTriggers")] #[serde(skip_serializing_if = "Option::is_none")] pub ad_triggers: Option<Vec<String>>, #[serde(rename = "AdsOnDeliveryRestrictions")] #[serde(skip_serializing_if = "Option::is_none")] pub ads_on_delivery_restrictions: Option<String>, /// <p>The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is created.</p> #[serde(rename = "Id")] pub id: String, /// <p>When enabled, an I-Frame only stream will be included in the output.</p> #[serde(rename = "IncludeIframeOnlyStream")] #[serde(skip_serializing_if = "Option::is_none")] pub include_iframe_only_stream: Option<bool>, /// <p>An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the manifestName for the OriginEndpoint.</p> #[serde(rename = "ManifestName")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_name: Option<String>, /// <p>The HTTP Live Streaming (HLS) playlist type. /// When either &quot;EVENT&quot; or &quot;VOD&quot; is specified, a corresponding EXT-X-PLAYLIST-TYPE /// entry will be included in the media playlist.</p> #[serde(rename = "PlaylistType")] #[serde(skip_serializing_if = "Option::is_none")] pub playlist_type: Option<String>, /// <p>Time window (in seconds) contained in each parent manifest.</p> #[serde(rename = "PlaylistWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub playlist_window_seconds: Option<i64>, /// <p>The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag /// inserted into manifests. Additionally, when an interval is specified /// ID3Timed Metadata messages will be generated every 5 seconds using the /// ingest time of the content. /// If the interval is not specified, or set to 0, then /// no EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no /// ID3Timed Metadata messages will be generated. Note that irrespective /// of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, /// it will be passed through to HLS output.</p> #[serde(rename = "ProgramDateTimeIntervalSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub program_date_time_interval_seconds: Option<i64>, } /// <p>An HTTP Live Streaming (HLS) packaging configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct HlsPackage { /// <p>This setting controls how ad markers are included in the packaged OriginEndpoint. /// &quot;NONE&quot; will omit all SCTE-35 ad markers from the output. /// &quot;PASSTHROUGH&quot; causes the manifest to contain a copy of the SCTE-35 ad /// markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. /// &quot;SCTE35_ENHANCED&quot; generates ad markers and blackout tags based on SCTE-35 /// messages in the input source. /// &quot;DATERANGE&quot; inserts EXT-X-DATERANGE tags to signal ad and program transition events /// in HLS and CMAF manifests. For this option, you must set a programDateTimeIntervalSeconds value /// that is greater than 0.</p> #[serde(rename = "AdMarkers")] #[serde(skip_serializing_if = "Option::is_none")] pub ad_markers: Option<String>, #[serde(rename = "AdTriggers")] #[serde(skip_serializing_if = "Option::is_none")] pub ad_triggers: Option<Vec<String>>, #[serde(rename = "AdsOnDeliveryRestrictions")] #[serde(skip_serializing_if = "Option::is_none")] pub ads_on_delivery_restrictions: Option<String>, #[serde(rename = "Encryption")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption: Option<HlsEncryption>, /// <p>When enabled, an I-Frame only stream will be included in the output.</p> #[serde(rename = "IncludeIframeOnlyStream")] #[serde(skip_serializing_if = "Option::is_none")] pub include_iframe_only_stream: Option<bool>, /// <p>The HTTP Live Streaming (HLS) playlist type. /// When either &quot;EVENT&quot; or &quot;VOD&quot; is specified, a corresponding EXT-X-PLAYLIST-TYPE /// entry will be included in the media playlist.</p> #[serde(rename = "PlaylistType")] #[serde(skip_serializing_if = "Option::is_none")] pub playlist_type: Option<String>, /// <p>Time window (in seconds) contained in each parent manifest.</p> #[serde(rename = "PlaylistWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub playlist_window_seconds: Option<i64>, /// <p>The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag /// inserted into manifests. Additionally, when an interval is specified /// ID3Timed Metadata messages will be generated every 5 seconds using the /// ingest time of the content. /// If the interval is not specified, or set to 0, then /// no EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no /// ID3Timed Metadata messages will be generated. Note that irrespective /// of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, /// it will be passed through to HLS output.</p> #[serde(rename = "ProgramDateTimeIntervalSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub program_date_time_interval_seconds: Option<i64>, /// <p>Duration (in seconds) of each fragment. Actual fragments will be /// rounded to the nearest multiple of the source fragment duration.</p> #[serde(rename = "SegmentDurationSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub segment_duration_seconds: Option<i64>, #[serde(rename = "StreamSelection")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_selection: Option<StreamSelection>, /// <p>When enabled, audio streams will be placed in rendition groups in the output.</p> #[serde(rename = "UseAudioRenditionGroup")] #[serde(skip_serializing_if = "Option::is_none")] pub use_audio_rendition_group: Option<bool>, } /// <p>An endpoint for ingesting source content for a Channel.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct IngestEndpoint { /// <p>The system generated unique identifier for the IngestEndpoint</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, /// <p>The system generated password for ingest authentication.</p> #[serde(rename = "Password")] #[serde(skip_serializing_if = "Option::is_none")] pub password: Option<String>, /// <p>The ingest URL to which the source stream should be sent.</p> #[serde(rename = "Url")] #[serde(skip_serializing_if = "Option::is_none")] pub url: Option<String>, /// <p>The system generated username for ingest authentication.</p> #[serde(rename = "Username")] #[serde(skip_serializing_if = "Option::is_none")] pub username: Option<String>, } /// <p>Configure ingress access logging.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct IngressAccessLogs { /// <p>Customize the log group name.</p> #[serde(rename = "LogGroupName")] #[serde(skip_serializing_if = "Option::is_none")] pub log_group_name: Option<String>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct ListChannelsRequest { /// <p>Upper bound on number of records to return.</p> #[serde(rename = "MaxResults")] #[serde(skip_serializing_if = "Option::is_none")] pub max_results: Option<i64>, /// <p>A token used to resume pagination from the end of a previous request.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct ListChannelsResponse { /// <p>A list of Channel records.</p> #[serde(rename = "Channels")] #[serde(skip_serializing_if = "Option::is_none")] pub channels: Option<Vec<Channel>>, /// <p>A token that can be used to resume pagination from the end of the collection.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct ListHarvestJobsRequest { /// <p>When specified, the request will return only HarvestJobs associated with the given Channel ID.</p> #[serde(rename = "IncludeChannelId")] #[serde(skip_serializing_if = "Option::is_none")] pub include_channel_id: Option<String>, /// <p>When specified, the request will return only HarvestJobs in the given status.</p> #[serde(rename = "IncludeStatus")] #[serde(skip_serializing_if = "Option::is_none")] pub include_status: Option<String>, /// <p>The upper bound on the number of records to return.</p> #[serde(rename = "MaxResults")] #[serde(skip_serializing_if = "Option::is_none")] pub max_results: Option<i64>, /// <p>A token used to resume pagination from the end of a previous request.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct ListHarvestJobsResponse { /// <p>A list of HarvestJob records.</p> #[serde(rename = "HarvestJobs")] #[serde(skip_serializing_if = "Option::is_none")] pub harvest_jobs: Option<Vec<HarvestJob>>, /// <p>A token that can be used to resume pagination from the end of the collection.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct ListOriginEndpointsRequest { /// <p>When specified, the request will return only OriginEndpoints associated with the given Channel ID.</p> #[serde(rename = "ChannelId")] #[serde(skip_serializing_if = "Option::is_none")] pub channel_id: Option<String>, /// <p>The upper bound on the number of records to return.</p> #[serde(rename = "MaxResults")] #[serde(skip_serializing_if = "Option::is_none")] pub max_results: Option<i64>, /// <p>A token used to resume pagination from the end of a previous request.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct ListOriginEndpointsResponse { /// <p>A token that can be used to resume pagination from the end of the collection.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, /// <p>A list of OriginEndpoint records.</p> #[serde(rename = "OriginEndpoints")] #[serde(skip_serializing_if = "Option::is_none")] pub origin_endpoints: Option<Vec<OriginEndpoint>>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct ListTagsForResourceRequest { #[serde(rename = "ResourceArn")] pub resource_arn: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct ListTagsForResourceResponse { #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")]<|fim▁hole|> /// <p>A Microsoft Smooth Streaming (MSS) encryption configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct MssEncryption { #[serde(rename = "SpekeKeyProvider")] pub speke_key_provider: SpekeKeyProvider, } /// <p>A Microsoft Smooth Streaming (MSS) packaging configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct MssPackage { #[serde(rename = "Encryption")] #[serde(skip_serializing_if = "Option::is_none")] pub encryption: Option<MssEncryption>, /// <p>The time window (in seconds) contained in each manifest.</p> #[serde(rename = "ManifestWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_window_seconds: Option<i64>, /// <p>The duration (in seconds) of each segment.</p> #[serde(rename = "SegmentDurationSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub segment_duration_seconds: Option<i64>, #[serde(rename = "StreamSelection")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_selection: Option<StreamSelection>, } /// <p>An OriginEndpoint resource configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct OriginEndpoint { /// <p>The Amazon Resource Name (ARN) assigned to the OriginEndpoint.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, #[serde(rename = "Authorization")] #[serde(skip_serializing_if = "Option::is_none")] pub authorization: Option<Authorization>, /// <p>The ID of the Channel the OriginEndpoint is associated with.</p> #[serde(rename = "ChannelId")] #[serde(skip_serializing_if = "Option::is_none")] pub channel_id: Option<String>, #[serde(rename = "CmafPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub cmaf_package: Option<CmafPackage>, #[serde(rename = "DashPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub dash_package: Option<DashPackage>, /// <p>A short text description of the OriginEndpoint.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "HlsPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_package: Option<HlsPackage>, /// <p>The ID of the OriginEndpoint.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, /// <p>A short string appended to the end of the OriginEndpoint URL.</p> #[serde(rename = "ManifestName")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_name: Option<String>, #[serde(rename = "MssPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub mss_package: Option<MssPackage>, /// <p>Control whether origination of video is allowed for this OriginEndpoint. If set to ALLOW, the OriginEndpoint /// may by requested, pursuant to any other form of access control. If set to DENY, the OriginEndpoint may not be /// requested. This can be helpful for Live to VOD harvesting, or for temporarily disabling origination</p> #[serde(rename = "Origination")] #[serde(skip_serializing_if = "Option::is_none")] pub origination: Option<String>, /// <p>Maximum duration (seconds) of content to retain for startover playback. /// If not specified, startover playback will be disabled for the OriginEndpoint.</p> #[serde(rename = "StartoverWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub startover_window_seconds: Option<i64>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, /// <p>Amount of delay (seconds) to enforce on the playback of live content. /// If not specified, there will be no time delay in effect for the OriginEndpoint.</p> #[serde(rename = "TimeDelaySeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub time_delay_seconds: Option<i64>, /// <p>The URL of the packaged OriginEndpoint for consumption.</p> #[serde(rename = "Url")] #[serde(skip_serializing_if = "Option::is_none")] pub url: Option<String>, /// <p>A list of source IP CIDR blocks that will be allowed to access the OriginEndpoint.</p> #[serde(rename = "Whitelist")] #[serde(skip_serializing_if = "Option::is_none")] pub whitelist: Option<Vec<String>>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct RotateChannelCredentialsRequest { /// <p>The ID of the channel to update.</p> #[serde(rename = "Id")] pub id: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct RotateChannelCredentialsResponse { /// <p>The Amazon Resource Name (ARN) assigned to the Channel.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>A short text description of the Channel.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "EgressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub egress_access_logs: Option<EgressAccessLogs>, #[serde(rename = "HlsIngest")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_ingest: Option<HlsIngest>, /// <p>The ID of the Channel.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "IngressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub ingress_access_logs: Option<IngressAccessLogs>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct RotateIngestEndpointCredentialsRequest { /// <p>The ID of the channel the IngestEndpoint is on.</p> #[serde(rename = "Id")] pub id: String, /// <p>The id of the IngestEndpoint whose credentials should be rotated</p> #[serde(rename = "IngestEndpointId")] pub ingest_endpoint_id: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct RotateIngestEndpointCredentialsResponse { /// <p>The Amazon Resource Name (ARN) assigned to the Channel.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>A short text description of the Channel.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "EgressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub egress_access_logs: Option<EgressAccessLogs>, #[serde(rename = "HlsIngest")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_ingest: Option<HlsIngest>, /// <p>The ID of the Channel.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "IngressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub ingress_access_logs: Option<IngressAccessLogs>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, } /// <p>Configuration parameters for where in an S3 bucket to place the harvested content</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct S3Destination { /// <p>The name of an S3 bucket within which harvested content will be exported</p> #[serde(rename = "BucketName")] pub bucket_name: String, /// <p>The key in the specified S3 bucket where the harvested top-level manifest will be placed.</p> #[serde(rename = "ManifestKey")] pub manifest_key: String, /// <p>The IAM role used to write to the specified S3 bucket</p> #[serde(rename = "RoleArn")] pub role_arn: String, } /// <p>A configuration for accessing an external Secure Packager and Encoder Key Exchange (SPEKE) service that will provide encryption keys.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct SpekeKeyProvider { /// <p>An Amazon Resource Name (ARN) of a Certificate Manager certificate /// that MediaPackage will use for enforcing secure end-to-end data /// transfer with the key provider service.</p> #[serde(rename = "CertificateArn")] #[serde(skip_serializing_if = "Option::is_none")] pub certificate_arn: Option<String>, /// <p>The resource ID to include in key requests.</p> #[serde(rename = "ResourceId")] pub resource_id: String, /// <p>An Amazon Resource Name (ARN) of an IAM role that AWS Elemental /// MediaPackage will assume when accessing the key provider service.</p> #[serde(rename = "RoleArn")] pub role_arn: String, /// <p>The system IDs to include in key requests.</p> #[serde(rename = "SystemIds")] pub system_ids: Vec<String>, /// <p>The URL of the external key provider service.</p> #[serde(rename = "Url")] pub url: String, } /// <p>A StreamSelection configuration.</p> #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] pub struct StreamSelection { /// <p>The maximum video bitrate (bps) to include in output.</p> #[serde(rename = "MaxVideoBitsPerSecond")] #[serde(skip_serializing_if = "Option::is_none")] pub max_video_bits_per_second: Option<i64>, /// <p>The minimum video bitrate (bps) to include in output.</p> #[serde(rename = "MinVideoBitsPerSecond")] #[serde(skip_serializing_if = "Option::is_none")] pub min_video_bits_per_second: Option<i64>, /// <p>A directive that determines the order of streams in the output.</p> #[serde(rename = "StreamOrder")] #[serde(skip_serializing_if = "Option::is_none")] pub stream_order: Option<String>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct TagResourceRequest { #[serde(rename = "ResourceArn")] pub resource_arn: String, #[serde(rename = "Tags")] pub tags: ::std::collections::HashMap<String, String>, } #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct UntagResourceRequest { #[serde(rename = "ResourceArn")] pub resource_arn: String, /// <p>The key(s) of tag to be deleted</p> #[serde(rename = "TagKeys")] pub tag_keys: Vec<String>, } /// <p>Configuration parameters used to update the Channel.</p> #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct UpdateChannelRequest { /// <p>A short text description of the Channel.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, /// <p>The ID of the Channel to update.</p> #[serde(rename = "Id")] pub id: String, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct UpdateChannelResponse { /// <p>The Amazon Resource Name (ARN) assigned to the Channel.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, /// <p>A short text description of the Channel.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "EgressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub egress_access_logs: Option<EgressAccessLogs>, #[serde(rename = "HlsIngest")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_ingest: Option<HlsIngest>, /// <p>The ID of the Channel.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "IngressAccessLogs")] #[serde(skip_serializing_if = "Option::is_none")] pub ingress_access_logs: Option<IngressAccessLogs>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, } /// <p>Configuration parameters used to update an existing OriginEndpoint.</p> #[derive(Clone, Debug, Default, PartialEq, Serialize)] #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))] pub struct UpdateOriginEndpointRequest { #[serde(rename = "Authorization")] #[serde(skip_serializing_if = "Option::is_none")] pub authorization: Option<Authorization>, #[serde(rename = "CmafPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub cmaf_package: Option<CmafPackageCreateOrUpdateParameters>, #[serde(rename = "DashPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub dash_package: Option<DashPackage>, /// <p>A short text description of the OriginEndpoint.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "HlsPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_package: Option<HlsPackage>, /// <p>The ID of the OriginEndpoint to update.</p> #[serde(rename = "Id")] pub id: String, /// <p>A short string that will be appended to the end of the Endpoint URL.</p> #[serde(rename = "ManifestName")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_name: Option<String>, #[serde(rename = "MssPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub mss_package: Option<MssPackage>, /// <p>Control whether origination of video is allowed for this OriginEndpoint. If set to ALLOW, the OriginEndpoint /// may by requested, pursuant to any other form of access control. If set to DENY, the OriginEndpoint may not be /// requested. This can be helpful for Live to VOD harvesting, or for temporarily disabling origination</p> #[serde(rename = "Origination")] #[serde(skip_serializing_if = "Option::is_none")] pub origination: Option<String>, /// <p>Maximum duration (in seconds) of content to retain for startover playback. /// If not specified, startover playback will be disabled for the OriginEndpoint.</p> #[serde(rename = "StartoverWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub startover_window_seconds: Option<i64>, /// <p>Amount of delay (in seconds) to enforce on the playback of live content. /// If not specified, there will be no time delay in effect for the OriginEndpoint.</p> #[serde(rename = "TimeDelaySeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub time_delay_seconds: Option<i64>, /// <p>A list of source IP CIDR blocks that will be allowed to access the OriginEndpoint.</p> #[serde(rename = "Whitelist")] #[serde(skip_serializing_if = "Option::is_none")] pub whitelist: Option<Vec<String>>, } #[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[cfg_attr(any(test, feature = "serialize_structs"), derive(Serialize))] pub struct UpdateOriginEndpointResponse { /// <p>The Amazon Resource Name (ARN) assigned to the OriginEndpoint.</p> #[serde(rename = "Arn")] #[serde(skip_serializing_if = "Option::is_none")] pub arn: Option<String>, #[serde(rename = "Authorization")] #[serde(skip_serializing_if = "Option::is_none")] pub authorization: Option<Authorization>, /// <p>The ID of the Channel the OriginEndpoint is associated with.</p> #[serde(rename = "ChannelId")] #[serde(skip_serializing_if = "Option::is_none")] pub channel_id: Option<String>, #[serde(rename = "CmafPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub cmaf_package: Option<CmafPackage>, #[serde(rename = "DashPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub dash_package: Option<DashPackage>, /// <p>A short text description of the OriginEndpoint.</p> #[serde(rename = "Description")] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "HlsPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub hls_package: Option<HlsPackage>, /// <p>The ID of the OriginEndpoint.</p> #[serde(rename = "Id")] #[serde(skip_serializing_if = "Option::is_none")] pub id: Option<String>, /// <p>A short string appended to the end of the OriginEndpoint URL.</p> #[serde(rename = "ManifestName")] #[serde(skip_serializing_if = "Option::is_none")] pub manifest_name: Option<String>, #[serde(rename = "MssPackage")] #[serde(skip_serializing_if = "Option::is_none")] pub mss_package: Option<MssPackage>, /// <p>Control whether origination of video is allowed for this OriginEndpoint. If set to ALLOW, the OriginEndpoint /// may by requested, pursuant to any other form of access control. If set to DENY, the OriginEndpoint may not be /// requested. This can be helpful for Live to VOD harvesting, or for temporarily disabling origination</p> #[serde(rename = "Origination")] #[serde(skip_serializing_if = "Option::is_none")] pub origination: Option<String>, /// <p>Maximum duration (seconds) of content to retain for startover playback. /// If not specified, startover playback will be disabled for the OriginEndpoint.</p> #[serde(rename = "StartoverWindowSeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub startover_window_seconds: Option<i64>, #[serde(rename = "Tags")] #[serde(skip_serializing_if = "Option::is_none")] pub tags: Option<::std::collections::HashMap<String, String>>, /// <p>Amount of delay (seconds) to enforce on the playback of live content. /// If not specified, there will be no time delay in effect for the OriginEndpoint.</p> #[serde(rename = "TimeDelaySeconds")] #[serde(skip_serializing_if = "Option::is_none")] pub time_delay_seconds: Option<i64>, /// <p>The URL of the packaged OriginEndpoint for consumption.</p> #[serde(rename = "Url")] #[serde(skip_serializing_if = "Option::is_none")] pub url: Option<String>, /// <p>A list of source IP CIDR blocks that will be allowed to access the OriginEndpoint.</p> #[serde(rename = "Whitelist")] #[serde(skip_serializing_if = "Option::is_none")] pub whitelist: Option<Vec<String>>, } /// Errors returned by ConfigureLogs #[derive(Debug, PartialEq)] pub enum ConfigureLogsError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl ConfigureLogsError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ConfigureLogsError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(ConfigureLogsError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(ConfigureLogsError::InternalServerError(err.msg)) } "NotFoundException" => { return RusotoError::Service(ConfigureLogsError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(ConfigureLogsError::ServiceUnavailable(err.msg)) } "TooManyRequestsException" => { return RusotoError::Service(ConfigureLogsError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(ConfigureLogsError::UnprocessableEntity(err.msg)) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for ConfigureLogsError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ConfigureLogsError::Forbidden(ref cause) => write!(f, "{}", cause), ConfigureLogsError::InternalServerError(ref cause) => write!(f, "{}", cause), ConfigureLogsError::NotFound(ref cause) => write!(f, "{}", cause), ConfigureLogsError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), ConfigureLogsError::TooManyRequests(ref cause) => write!(f, "{}", cause), ConfigureLogsError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for ConfigureLogsError {} /// Errors returned by CreateChannel #[derive(Debug, PartialEq)] pub enum CreateChannelError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl CreateChannelError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateChannelError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(CreateChannelError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(CreateChannelError::InternalServerError(err.msg)) } "NotFoundException" => { return RusotoError::Service(CreateChannelError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(CreateChannelError::ServiceUnavailable(err.msg)) } "TooManyRequestsException" => { return RusotoError::Service(CreateChannelError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(CreateChannelError::UnprocessableEntity(err.msg)) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for CreateChannelError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { CreateChannelError::Forbidden(ref cause) => write!(f, "{}", cause), CreateChannelError::InternalServerError(ref cause) => write!(f, "{}", cause), CreateChannelError::NotFound(ref cause) => write!(f, "{}", cause), CreateChannelError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), CreateChannelError::TooManyRequests(ref cause) => write!(f, "{}", cause), CreateChannelError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for CreateChannelError {} /// Errors returned by CreateHarvestJob #[derive(Debug, PartialEq)] pub enum CreateHarvestJobError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl CreateHarvestJobError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateHarvestJobError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(CreateHarvestJobError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(CreateHarvestJobError::InternalServerError( err.msg, )) } "NotFoundException" => { return RusotoError::Service(CreateHarvestJobError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(CreateHarvestJobError::ServiceUnavailable(err.msg)) } "TooManyRequestsException" => { return RusotoError::Service(CreateHarvestJobError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(CreateHarvestJobError::UnprocessableEntity( err.msg, )) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for CreateHarvestJobError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { CreateHarvestJobError::Forbidden(ref cause) => write!(f, "{}", cause), CreateHarvestJobError::InternalServerError(ref cause) => write!(f, "{}", cause), CreateHarvestJobError::NotFound(ref cause) => write!(f, "{}", cause), CreateHarvestJobError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), CreateHarvestJobError::TooManyRequests(ref cause) => write!(f, "{}", cause), CreateHarvestJobError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for CreateHarvestJobError {} /// Errors returned by CreateOriginEndpoint #[derive(Debug, PartialEq)] pub enum CreateOriginEndpointError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl CreateOriginEndpointError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<CreateOriginEndpointError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(CreateOriginEndpointError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(CreateOriginEndpointError::InternalServerError( err.msg, )) } "NotFoundException" => { return RusotoError::Service(CreateOriginEndpointError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(CreateOriginEndpointError::ServiceUnavailable( err.msg, )) } "TooManyRequestsException" => { return RusotoError::Service(CreateOriginEndpointError::TooManyRequests( err.msg, )) } "UnprocessableEntityException" => { return RusotoError::Service(CreateOriginEndpointError::UnprocessableEntity( err.msg, )) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for CreateOriginEndpointError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { CreateOriginEndpointError::Forbidden(ref cause) => write!(f, "{}", cause), CreateOriginEndpointError::InternalServerError(ref cause) => write!(f, "{}", cause), CreateOriginEndpointError::NotFound(ref cause) => write!(f, "{}", cause), CreateOriginEndpointError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), CreateOriginEndpointError::TooManyRequests(ref cause) => write!(f, "{}", cause), CreateOriginEndpointError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for CreateOriginEndpointError {} /// Errors returned by DeleteChannel #[derive(Debug, PartialEq)] pub enum DeleteChannelError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl DeleteChannelError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteChannelError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(DeleteChannelError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(DeleteChannelError::InternalServerError(err.msg)) } "NotFoundException" => { return RusotoError::Service(DeleteChannelError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(DeleteChannelError::ServiceUnavailable(err.msg)) } "TooManyRequestsException" => { return RusotoError::Service(DeleteChannelError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(DeleteChannelError::UnprocessableEntity(err.msg)) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for DeleteChannelError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { DeleteChannelError::Forbidden(ref cause) => write!(f, "{}", cause), DeleteChannelError::InternalServerError(ref cause) => write!(f, "{}", cause), DeleteChannelError::NotFound(ref cause) => write!(f, "{}", cause), DeleteChannelError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), DeleteChannelError::TooManyRequests(ref cause) => write!(f, "{}", cause), DeleteChannelError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for DeleteChannelError {} /// Errors returned by DeleteOriginEndpoint #[derive(Debug, PartialEq)] pub enum DeleteOriginEndpointError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl DeleteOriginEndpointError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DeleteOriginEndpointError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(DeleteOriginEndpointError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(DeleteOriginEndpointError::InternalServerError( err.msg, )) } "NotFoundException" => { return RusotoError::Service(DeleteOriginEndpointError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(DeleteOriginEndpointError::ServiceUnavailable( err.msg, )) } "TooManyRequestsException" => { return RusotoError::Service(DeleteOriginEndpointError::TooManyRequests( err.msg, )) } "UnprocessableEntityException" => { return RusotoError::Service(DeleteOriginEndpointError::UnprocessableEntity( err.msg, )) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for DeleteOriginEndpointError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { DeleteOriginEndpointError::Forbidden(ref cause) => write!(f, "{}", cause), DeleteOriginEndpointError::InternalServerError(ref cause) => write!(f, "{}", cause), DeleteOriginEndpointError::NotFound(ref cause) => write!(f, "{}", cause), DeleteOriginEndpointError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), DeleteOriginEndpointError::TooManyRequests(ref cause) => write!(f, "{}", cause), DeleteOriginEndpointError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for DeleteOriginEndpointError {} /// Errors returned by DescribeChannel #[derive(Debug, PartialEq)] pub enum DescribeChannelError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl DescribeChannelError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DescribeChannelError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(DescribeChannelError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(DescribeChannelError::InternalServerError(err.msg)) } "NotFoundException" => { return RusotoError::Service(DescribeChannelError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(DescribeChannelError::ServiceUnavailable(err.msg)) } "TooManyRequestsException" => { return RusotoError::Service(DescribeChannelError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(DescribeChannelError::UnprocessableEntity(err.msg)) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for DescribeChannelError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { DescribeChannelError::Forbidden(ref cause) => write!(f, "{}", cause), DescribeChannelError::InternalServerError(ref cause) => write!(f, "{}", cause), DescribeChannelError::NotFound(ref cause) => write!(f, "{}", cause), DescribeChannelError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), DescribeChannelError::TooManyRequests(ref cause) => write!(f, "{}", cause), DescribeChannelError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for DescribeChannelError {} /// Errors returned by DescribeHarvestJob #[derive(Debug, PartialEq)] pub enum DescribeHarvestJobError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl DescribeHarvestJobError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DescribeHarvestJobError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(DescribeHarvestJobError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(DescribeHarvestJobError::InternalServerError( err.msg, )) } "NotFoundException" => { return RusotoError::Service(DescribeHarvestJobError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(DescribeHarvestJobError::ServiceUnavailable( err.msg, )) } "TooManyRequestsException" => { return RusotoError::Service(DescribeHarvestJobError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(DescribeHarvestJobError::UnprocessableEntity( err.msg, )) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for DescribeHarvestJobError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { DescribeHarvestJobError::Forbidden(ref cause) => write!(f, "{}", cause), DescribeHarvestJobError::InternalServerError(ref cause) => write!(f, "{}", cause), DescribeHarvestJobError::NotFound(ref cause) => write!(f, "{}", cause), DescribeHarvestJobError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), DescribeHarvestJobError::TooManyRequests(ref cause) => write!(f, "{}", cause), DescribeHarvestJobError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for DescribeHarvestJobError {} /// Errors returned by DescribeOriginEndpoint #[derive(Debug, PartialEq)] pub enum DescribeOriginEndpointError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl DescribeOriginEndpointError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<DescribeOriginEndpointError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(DescribeOriginEndpointError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(DescribeOriginEndpointError::InternalServerError( err.msg, )) } "NotFoundException" => { return RusotoError::Service(DescribeOriginEndpointError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(DescribeOriginEndpointError::ServiceUnavailable( err.msg, )) } "TooManyRequestsException" => { return RusotoError::Service(DescribeOriginEndpointError::TooManyRequests( err.msg, )) } "UnprocessableEntityException" => { return RusotoError::Service(DescribeOriginEndpointError::UnprocessableEntity( err.msg, )) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for DescribeOriginEndpointError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { DescribeOriginEndpointError::Forbidden(ref cause) => write!(f, "{}", cause), DescribeOriginEndpointError::InternalServerError(ref cause) => write!(f, "{}", cause), DescribeOriginEndpointError::NotFound(ref cause) => write!(f, "{}", cause), DescribeOriginEndpointError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), DescribeOriginEndpointError::TooManyRequests(ref cause) => write!(f, "{}", cause), DescribeOriginEndpointError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for DescribeOriginEndpointError {} /// Errors returned by ListChannels #[derive(Debug, PartialEq)] pub enum ListChannelsError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl ListChannelsError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListChannelsError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(ListChannelsError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(ListChannelsError::InternalServerError(err.msg)) } "NotFoundException" => { return RusotoError::Service(ListChannelsError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(ListChannelsError::ServiceUnavailable(err.msg)) } "TooManyRequestsException" => { return RusotoError::Service(ListChannelsError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(ListChannelsError::UnprocessableEntity(err.msg)) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for ListChannelsError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ListChannelsError::Forbidden(ref cause) => write!(f, "{}", cause), ListChannelsError::InternalServerError(ref cause) => write!(f, "{}", cause), ListChannelsError::NotFound(ref cause) => write!(f, "{}", cause), ListChannelsError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), ListChannelsError::TooManyRequests(ref cause) => write!(f, "{}", cause), ListChannelsError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for ListChannelsError {} /// Errors returned by ListHarvestJobs #[derive(Debug, PartialEq)] pub enum ListHarvestJobsError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl ListHarvestJobsError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListHarvestJobsError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(ListHarvestJobsError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(ListHarvestJobsError::InternalServerError(err.msg)) } "NotFoundException" => { return RusotoError::Service(ListHarvestJobsError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(ListHarvestJobsError::ServiceUnavailable(err.msg)) } "TooManyRequestsException" => { return RusotoError::Service(ListHarvestJobsError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(ListHarvestJobsError::UnprocessableEntity(err.msg)) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for ListHarvestJobsError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ListHarvestJobsError::Forbidden(ref cause) => write!(f, "{}", cause), ListHarvestJobsError::InternalServerError(ref cause) => write!(f, "{}", cause), ListHarvestJobsError::NotFound(ref cause) => write!(f, "{}", cause), ListHarvestJobsError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), ListHarvestJobsError::TooManyRequests(ref cause) => write!(f, "{}", cause), ListHarvestJobsError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for ListHarvestJobsError {} /// Errors returned by ListOriginEndpoints #[derive(Debug, PartialEq)] pub enum ListOriginEndpointsError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl ListOriginEndpointsError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListOriginEndpointsError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(ListOriginEndpointsError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(ListOriginEndpointsError::InternalServerError( err.msg, )) } "NotFoundException" => { return RusotoError::Service(ListOriginEndpointsError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(ListOriginEndpointsError::ServiceUnavailable( err.msg, )) } "TooManyRequestsException" => { return RusotoError::Service(ListOriginEndpointsError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(ListOriginEndpointsError::UnprocessableEntity( err.msg, )) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for ListOriginEndpointsError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ListOriginEndpointsError::Forbidden(ref cause) => write!(f, "{}", cause), ListOriginEndpointsError::InternalServerError(ref cause) => write!(f, "{}", cause), ListOriginEndpointsError::NotFound(ref cause) => write!(f, "{}", cause), ListOriginEndpointsError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), ListOriginEndpointsError::TooManyRequests(ref cause) => write!(f, "{}", cause), ListOriginEndpointsError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for ListOriginEndpointsError {} /// Errors returned by ListTagsForResource #[derive(Debug, PartialEq)] pub enum ListTagsForResourceError {} impl ListTagsForResourceError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<ListTagsForResourceError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for ListTagsForResourceError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self {} } } impl Error for ListTagsForResourceError {} /// Errors returned by RotateChannelCredentials #[derive(Debug, PartialEq)] pub enum RotateChannelCredentialsError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl RotateChannelCredentialsError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<RotateChannelCredentialsError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(RotateChannelCredentialsError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service( RotateChannelCredentialsError::InternalServerError(err.msg), ) } "NotFoundException" => { return RusotoError::Service(RotateChannelCredentialsError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(RotateChannelCredentialsError::ServiceUnavailable( err.msg, )) } "TooManyRequestsException" => { return RusotoError::Service(RotateChannelCredentialsError::TooManyRequests( err.msg, )) } "UnprocessableEntityException" => { return RusotoError::Service( RotateChannelCredentialsError::UnprocessableEntity(err.msg), ) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for RotateChannelCredentialsError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { RotateChannelCredentialsError::Forbidden(ref cause) => write!(f, "{}", cause), RotateChannelCredentialsError::InternalServerError(ref cause) => write!(f, "{}", cause), RotateChannelCredentialsError::NotFound(ref cause) => write!(f, "{}", cause), RotateChannelCredentialsError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), RotateChannelCredentialsError::TooManyRequests(ref cause) => write!(f, "{}", cause), RotateChannelCredentialsError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for RotateChannelCredentialsError {} /// Errors returned by RotateIngestEndpointCredentials #[derive(Debug, PartialEq)] pub enum RotateIngestEndpointCredentialsError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl RotateIngestEndpointCredentialsError { pub fn from_response( res: BufferedHttpResponse, ) -> RusotoError<RotateIngestEndpointCredentialsError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(RotateIngestEndpointCredentialsError::Forbidden( err.msg, )) } "InternalServerErrorException" => { return RusotoError::Service( RotateIngestEndpointCredentialsError::InternalServerError(err.msg), ) } "NotFoundException" => { return RusotoError::Service(RotateIngestEndpointCredentialsError::NotFound( err.msg, )) } "ServiceUnavailableException" => { return RusotoError::Service( RotateIngestEndpointCredentialsError::ServiceUnavailable(err.msg), ) } "TooManyRequestsException" => { return RusotoError::Service( RotateIngestEndpointCredentialsError::TooManyRequests(err.msg), ) } "UnprocessableEntityException" => { return RusotoError::Service( RotateIngestEndpointCredentialsError::UnprocessableEntity(err.msg), ) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for RotateIngestEndpointCredentialsError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { RotateIngestEndpointCredentialsError::Forbidden(ref cause) => write!(f, "{}", cause), RotateIngestEndpointCredentialsError::InternalServerError(ref cause) => { write!(f, "{}", cause) } RotateIngestEndpointCredentialsError::NotFound(ref cause) => write!(f, "{}", cause), RotateIngestEndpointCredentialsError::ServiceUnavailable(ref cause) => { write!(f, "{}", cause) } RotateIngestEndpointCredentialsError::TooManyRequests(ref cause) => { write!(f, "{}", cause) } RotateIngestEndpointCredentialsError::UnprocessableEntity(ref cause) => { write!(f, "{}", cause) } } } } impl Error for RotateIngestEndpointCredentialsError {} /// Errors returned by TagResource #[derive(Debug, PartialEq)] pub enum TagResourceError {} impl TagResourceError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<TagResourceError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for TagResourceError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self {} } } impl Error for TagResourceError {} /// Errors returned by UntagResource #[derive(Debug, PartialEq)] pub enum UntagResourceError {} impl UntagResourceError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UntagResourceError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for UntagResourceError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self {} } } impl Error for UntagResourceError {} /// Errors returned by UpdateChannel #[derive(Debug, PartialEq)] pub enum UpdateChannelError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl UpdateChannelError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateChannelError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(UpdateChannelError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(UpdateChannelError::InternalServerError(err.msg)) } "NotFoundException" => { return RusotoError::Service(UpdateChannelError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(UpdateChannelError::ServiceUnavailable(err.msg)) } "TooManyRequestsException" => { return RusotoError::Service(UpdateChannelError::TooManyRequests(err.msg)) } "UnprocessableEntityException" => { return RusotoError::Service(UpdateChannelError::UnprocessableEntity(err.msg)) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for UpdateChannelError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { UpdateChannelError::Forbidden(ref cause) => write!(f, "{}", cause), UpdateChannelError::InternalServerError(ref cause) => write!(f, "{}", cause), UpdateChannelError::NotFound(ref cause) => write!(f, "{}", cause), UpdateChannelError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), UpdateChannelError::TooManyRequests(ref cause) => write!(f, "{}", cause), UpdateChannelError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for UpdateChannelError {} /// Errors returned by UpdateOriginEndpoint #[derive(Debug, PartialEq)] pub enum UpdateOriginEndpointError { /// <p>The client is not authorized to access the requested resource.</p> Forbidden(String), /// <p>An unexpected error occurred.</p> InternalServerError(String), /// <p>The requested resource does not exist.</p> NotFound(String), /// <p>An unexpected error occurred.</p> ServiceUnavailable(String), /// <p>The client has exceeded their resource or throttling limits.</p> TooManyRequests(String), /// <p>The parameters sent in the request are not valid.</p> UnprocessableEntity(String), } impl UpdateOriginEndpointError { pub fn from_response(res: BufferedHttpResponse) -> RusotoError<UpdateOriginEndpointError> { if let Some(err) = proto::json::Error::parse_rest(&res) { match err.typ.as_str() { "ForbiddenException" => { return RusotoError::Service(UpdateOriginEndpointError::Forbidden(err.msg)) } "InternalServerErrorException" => { return RusotoError::Service(UpdateOriginEndpointError::InternalServerError( err.msg, )) } "NotFoundException" => { return RusotoError::Service(UpdateOriginEndpointError::NotFound(err.msg)) } "ServiceUnavailableException" => { return RusotoError::Service(UpdateOriginEndpointError::ServiceUnavailable( err.msg, )) } "TooManyRequestsException" => { return RusotoError::Service(UpdateOriginEndpointError::TooManyRequests( err.msg, )) } "UnprocessableEntityException" => { return RusotoError::Service(UpdateOriginEndpointError::UnprocessableEntity( err.msg, )) } "ValidationException" => return RusotoError::Validation(err.msg), _ => {} } } RusotoError::Unknown(res) } } impl fmt::Display for UpdateOriginEndpointError { #[allow(unused_variables)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { UpdateOriginEndpointError::Forbidden(ref cause) => write!(f, "{}", cause), UpdateOriginEndpointError::InternalServerError(ref cause) => write!(f, "{}", cause), UpdateOriginEndpointError::NotFound(ref cause) => write!(f, "{}", cause), UpdateOriginEndpointError::ServiceUnavailable(ref cause) => write!(f, "{}", cause), UpdateOriginEndpointError::TooManyRequests(ref cause) => write!(f, "{}", cause), UpdateOriginEndpointError::UnprocessableEntity(ref cause) => write!(f, "{}", cause), } } } impl Error for UpdateOriginEndpointError {} /// Trait representing the capabilities of the MediaPackage API. MediaPackage clients implement this trait. #[async_trait] pub trait MediaPackage { /// <p>Changes the Channel&#39;s properities to configure log subscription</p> async fn configure_logs( &self, input: ConfigureLogsRequest, ) -> Result<ConfigureLogsResponse, RusotoError<ConfigureLogsError>>; /// <p>Creates a new Channel.</p> async fn create_channel( &self, input: CreateChannelRequest, ) -> Result<CreateChannelResponse, RusotoError<CreateChannelError>>; /// <p>Creates a new HarvestJob record.</p> async fn create_harvest_job( &self, input: CreateHarvestJobRequest, ) -> Result<CreateHarvestJobResponse, RusotoError<CreateHarvestJobError>>; /// <p>Creates a new OriginEndpoint record.</p> async fn create_origin_endpoint( &self, input: CreateOriginEndpointRequest, ) -> Result<CreateOriginEndpointResponse, RusotoError<CreateOriginEndpointError>>; /// <p>Deletes an existing Channel.</p> async fn delete_channel( &self, input: DeleteChannelRequest, ) -> Result<DeleteChannelResponse, RusotoError<DeleteChannelError>>; /// <p>Deletes an existing OriginEndpoint.</p> async fn delete_origin_endpoint( &self, input: DeleteOriginEndpointRequest, ) -> Result<DeleteOriginEndpointResponse, RusotoError<DeleteOriginEndpointError>>; /// <p>Gets details about a Channel.</p> async fn describe_channel( &self, input: DescribeChannelRequest, ) -> Result<DescribeChannelResponse, RusotoError<DescribeChannelError>>; /// <p>Gets details about an existing HarvestJob.</p> async fn describe_harvest_job( &self, input: DescribeHarvestJobRequest, ) -> Result<DescribeHarvestJobResponse, RusotoError<DescribeHarvestJobError>>; /// <p>Gets details about an existing OriginEndpoint.</p> async fn describe_origin_endpoint( &self, input: DescribeOriginEndpointRequest, ) -> Result<DescribeOriginEndpointResponse, RusotoError<DescribeOriginEndpointError>>; /// <p>Returns a collection of Channels.</p> async fn list_channels( &self, input: ListChannelsRequest, ) -> Result<ListChannelsResponse, RusotoError<ListChannelsError>>; /// <p>Returns a collection of HarvestJob records.</p> async fn list_harvest_jobs( &self, input: ListHarvestJobsRequest, ) -> Result<ListHarvestJobsResponse, RusotoError<ListHarvestJobsError>>; /// <p>Returns a collection of OriginEndpoint records.</p> async fn list_origin_endpoints( &self, input: ListOriginEndpointsRequest, ) -> Result<ListOriginEndpointsResponse, RusotoError<ListOriginEndpointsError>>; async fn list_tags_for_resource( &self, input: ListTagsForResourceRequest, ) -> Result<ListTagsForResourceResponse, RusotoError<ListTagsForResourceError>>; /// <p>Changes the Channel&#39;s first IngestEndpoint&#39;s username and password. WARNING - This API is deprecated. Please use RotateIngestEndpointCredentials instead</p> async fn rotate_channel_credentials( &self, input: RotateChannelCredentialsRequest, ) -> Result<RotateChannelCredentialsResponse, RusotoError<RotateChannelCredentialsError>>; /// <p>Rotate the IngestEndpoint&#39;s username and password, as specified by the IngestEndpoint&#39;s id.</p> async fn rotate_ingest_endpoint_credentials( &self, input: RotateIngestEndpointCredentialsRequest, ) -> Result< RotateIngestEndpointCredentialsResponse, RusotoError<RotateIngestEndpointCredentialsError>, >; async fn tag_resource( &self, input: TagResourceRequest, ) -> Result<(), RusotoError<TagResourceError>>; async fn untag_resource( &self, input: UntagResourceRequest, ) -> Result<(), RusotoError<UntagResourceError>>; /// <p>Updates an existing Channel.</p> async fn update_channel( &self, input: UpdateChannelRequest, ) -> Result<UpdateChannelResponse, RusotoError<UpdateChannelError>>; /// <p>Updates an existing OriginEndpoint.</p> async fn update_origin_endpoint( &self, input: UpdateOriginEndpointRequest, ) -> Result<UpdateOriginEndpointResponse, RusotoError<UpdateOriginEndpointError>>; } /// A client for the MediaPackage API. #[derive(Clone)] pub struct MediaPackageClient { client: Client, region: region::Region, } impl MediaPackageClient { /// Creates a client backed by the default tokio event loop. /// /// The client will use the default credentials provider and tls client. pub fn new(region: region::Region) -> MediaPackageClient { MediaPackageClient { client: Client::shared(), region, } } pub fn new_with<P, D>( request_dispatcher: D, credentials_provider: P, region: region::Region, ) -> MediaPackageClient where P: ProvideAwsCredentials + Send + Sync + 'static, D: DispatchSignedRequest + Send + Sync + 'static, { MediaPackageClient { client: Client::new_with(credentials_provider, request_dispatcher), region, } } pub fn new_with_client(client: Client, region: region::Region) -> MediaPackageClient { MediaPackageClient { client, region } } } #[async_trait] impl MediaPackage for MediaPackageClient { /// <p>Changes the Channel&#39;s properities to configure log subscription</p> #[allow(unused_mut)] async fn configure_logs( &self, input: ConfigureLogsRequest, ) -> Result<ConfigureLogsResponse, RusotoError<ConfigureLogsError>> { let request_uri = format!("/channels/{id}/configure_logs", id = input.id); let mut request = SignedRequest::new("PUT", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let encoded = Some(serde_json::to_vec(&input).unwrap()); request.set_payload(encoded); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<ConfigureLogsResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(ConfigureLogsError::from_response(response)) } } /// <p>Creates a new Channel.</p> #[allow(unused_mut)] async fn create_channel( &self, input: CreateChannelRequest, ) -> Result<CreateChannelResponse, RusotoError<CreateChannelError>> { let request_uri = "/channels"; let mut request = SignedRequest::new("POST", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let encoded = Some(serde_json::to_vec(&input).unwrap()); request.set_payload(encoded); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<CreateChannelResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(CreateChannelError::from_response(response)) } } /// <p>Creates a new HarvestJob record.</p> #[allow(unused_mut)] async fn create_harvest_job( &self, input: CreateHarvestJobRequest, ) -> Result<CreateHarvestJobResponse, RusotoError<CreateHarvestJobError>> { let request_uri = "/harvest_jobs"; let mut request = SignedRequest::new("POST", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let encoded = Some(serde_json::to_vec(&input).unwrap()); request.set_payload(encoded); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<CreateHarvestJobResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(CreateHarvestJobError::from_response(response)) } } /// <p>Creates a new OriginEndpoint record.</p> #[allow(unused_mut)] async fn create_origin_endpoint( &self, input: CreateOriginEndpointRequest, ) -> Result<CreateOriginEndpointResponse, RusotoError<CreateOriginEndpointError>> { let request_uri = "/origin_endpoints"; let mut request = SignedRequest::new("POST", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let encoded = Some(serde_json::to_vec(&input).unwrap()); request.set_payload(encoded); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<CreateOriginEndpointResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(CreateOriginEndpointError::from_response(response)) } } /// <p>Deletes an existing Channel.</p> #[allow(unused_mut)] async fn delete_channel( &self, input: DeleteChannelRequest, ) -> Result<DeleteChannelResponse, RusotoError<DeleteChannelError>> { let request_uri = format!("/channels/{id}", id = input.id); let mut request = SignedRequest::new("DELETE", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 202 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<DeleteChannelResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(DeleteChannelError::from_response(response)) } } /// <p>Deletes an existing OriginEndpoint.</p> #[allow(unused_mut)] async fn delete_origin_endpoint( &self, input: DeleteOriginEndpointRequest, ) -> Result<DeleteOriginEndpointResponse, RusotoError<DeleteOriginEndpointError>> { let request_uri = format!("/origin_endpoints/{id}", id = input.id); let mut request = SignedRequest::new("DELETE", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 202 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<DeleteOriginEndpointResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(DeleteOriginEndpointError::from_response(response)) } } /// <p>Gets details about a Channel.</p> #[allow(unused_mut)] async fn describe_channel( &self, input: DescribeChannelRequest, ) -> Result<DescribeChannelResponse, RusotoError<DescribeChannelError>> { let request_uri = format!("/channels/{id}", id = input.id); let mut request = SignedRequest::new("GET", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<DescribeChannelResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(DescribeChannelError::from_response(response)) } } /// <p>Gets details about an existing HarvestJob.</p> #[allow(unused_mut)] async fn describe_harvest_job( &self, input: DescribeHarvestJobRequest, ) -> Result<DescribeHarvestJobResponse, RusotoError<DescribeHarvestJobError>> { let request_uri = format!("/harvest_jobs/{id}", id = input.id); let mut request = SignedRequest::new("GET", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<DescribeHarvestJobResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(DescribeHarvestJobError::from_response(response)) } } /// <p>Gets details about an existing OriginEndpoint.</p> #[allow(unused_mut)] async fn describe_origin_endpoint( &self, input: DescribeOriginEndpointRequest, ) -> Result<DescribeOriginEndpointResponse, RusotoError<DescribeOriginEndpointError>> { let request_uri = format!("/origin_endpoints/{id}", id = input.id); let mut request = SignedRequest::new("GET", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<DescribeOriginEndpointResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(DescribeOriginEndpointError::from_response(response)) } } /// <p>Returns a collection of Channels.</p> #[allow(unused_mut)] async fn list_channels( &self, input: ListChannelsRequest, ) -> Result<ListChannelsResponse, RusotoError<ListChannelsError>> { let request_uri = "/channels"; let mut request = SignedRequest::new("GET", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut params = Params::new(); if let Some(ref x) = input.max_results { params.put("maxResults", x); } if let Some(ref x) = input.next_token { params.put("nextToken", x); } request.set_params(params); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<ListChannelsResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(ListChannelsError::from_response(response)) } } /// <p>Returns a collection of HarvestJob records.</p> #[allow(unused_mut)] async fn list_harvest_jobs( &self, input: ListHarvestJobsRequest, ) -> Result<ListHarvestJobsResponse, RusotoError<ListHarvestJobsError>> { let request_uri = "/harvest_jobs"; let mut request = SignedRequest::new("GET", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut params = Params::new(); if let Some(ref x) = input.include_channel_id { params.put("includeChannelId", x); } if let Some(ref x) = input.include_status { params.put("includeStatus", x); } if let Some(ref x) = input.max_results { params.put("maxResults", x); } if let Some(ref x) = input.next_token { params.put("nextToken", x); } request.set_params(params); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<ListHarvestJobsResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(ListHarvestJobsError::from_response(response)) } } /// <p>Returns a collection of OriginEndpoint records.</p> #[allow(unused_mut)] async fn list_origin_endpoints( &self, input: ListOriginEndpointsRequest, ) -> Result<ListOriginEndpointsResponse, RusotoError<ListOriginEndpointsError>> { let request_uri = "/origin_endpoints"; let mut request = SignedRequest::new("GET", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut params = Params::new(); if let Some(ref x) = input.channel_id { params.put("channelId", x); } if let Some(ref x) = input.max_results { params.put("maxResults", x); } if let Some(ref x) = input.next_token { params.put("nextToken", x); } request.set_params(params); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<ListOriginEndpointsResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(ListOriginEndpointsError::from_response(response)) } } #[allow(unused_mut)] async fn list_tags_for_resource( &self, input: ListTagsForResourceRequest, ) -> Result<ListTagsForResourceResponse, RusotoError<ListTagsForResourceError>> { let request_uri = format!("/tags/{resource_arn}", resource_arn = input.resource_arn); let mut request = SignedRequest::new("GET", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<ListTagsForResourceResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(ListTagsForResourceError::from_response(response)) } } /// <p>Changes the Channel&#39;s first IngestEndpoint&#39;s username and password. WARNING - This API is deprecated. Please use RotateIngestEndpointCredentials instead</p> #[allow(unused_mut)] async fn rotate_channel_credentials( &self, input: RotateChannelCredentialsRequest, ) -> Result<RotateChannelCredentialsResponse, RusotoError<RotateChannelCredentialsError>> { let request_uri = format!("/channels/{id}/credentials", id = input.id); let mut request = SignedRequest::new("PUT", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<RotateChannelCredentialsResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(RotateChannelCredentialsError::from_response(response)) } } /// <p>Rotate the IngestEndpoint&#39;s username and password, as specified by the IngestEndpoint&#39;s id.</p> #[allow(unused_mut)] async fn rotate_ingest_endpoint_credentials( &self, input: RotateIngestEndpointCredentialsRequest, ) -> Result< RotateIngestEndpointCredentialsResponse, RusotoError<RotateIngestEndpointCredentialsError>, > { let request_uri = format!( "/channels/{id}/ingest_endpoints/{ingest_endpoint_id}/credentials", id = input.id, ingest_endpoint_id = input.ingest_endpoint_id ); let mut request = SignedRequest::new("PUT", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<RotateIngestEndpointCredentialsResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(RotateIngestEndpointCredentialsError::from_response( response, )) } } #[allow(unused_mut)] async fn tag_resource( &self, input: TagResourceRequest, ) -> Result<(), RusotoError<TagResourceError>> { let request_uri = format!("/tags/{resource_arn}", resource_arn = input.resource_arn); let mut request = SignedRequest::new("POST", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let encoded = Some(serde_json::to_vec(&input).unwrap()); request.set_payload(encoded); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 204 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = ::std::mem::drop(response); Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(TagResourceError::from_response(response)) } } #[allow(unused_mut)] async fn untag_resource( &self, input: UntagResourceRequest, ) -> Result<(), RusotoError<UntagResourceError>> { let request_uri = format!("/tags/{resource_arn}", resource_arn = input.resource_arn); let mut request = SignedRequest::new("DELETE", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let mut params = Params::new(); for item in input.tag_keys.iter() { params.put("tagKeys", item); } request.set_params(params); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 204 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = ::std::mem::drop(response); Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(UntagResourceError::from_response(response)) } } /// <p>Updates an existing Channel.</p> #[allow(unused_mut)] async fn update_channel( &self, input: UpdateChannelRequest, ) -> Result<UpdateChannelResponse, RusotoError<UpdateChannelError>> { let request_uri = format!("/channels/{id}", id = input.id); let mut request = SignedRequest::new("PUT", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let encoded = Some(serde_json::to_vec(&input).unwrap()); request.set_payload(encoded); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<UpdateChannelResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(UpdateChannelError::from_response(response)) } } /// <p>Updates an existing OriginEndpoint.</p> #[allow(unused_mut)] async fn update_origin_endpoint( &self, input: UpdateOriginEndpointRequest, ) -> Result<UpdateOriginEndpointResponse, RusotoError<UpdateOriginEndpointError>> { let request_uri = format!("/origin_endpoints/{id}", id = input.id); let mut request = SignedRequest::new("PUT", "mediapackage", &self.region, &request_uri); request.set_content_type("application/x-amz-json-1.1".to_owned()); let encoded = Some(serde_json::to_vec(&input).unwrap()); request.set_payload(encoded); let mut response = self .client .sign_and_dispatch(request) .await .map_err(RusotoError::from)?; if response.status.as_u16() == 200 { let mut response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; let result = proto::json::ResponsePayload::new(&response) .deserialize::<UpdateOriginEndpointResponse, _>()?; Ok(result) } else { let response = response.buffer().await.map_err(RusotoError::HttpDispatch)?; Err(UpdateOriginEndpointError::from_response(response)) } } }<|fim▁end|>
pub tags: Option<::std::collections::HashMap<String, String>>, }